code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
##
# Copyright 2009-2016 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for Boost, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
@author: Ward Poelmans (Ghent University)
@author: Petar Forai (IMP/IMBA)
@author: Luca Marsella (CSCS)
@author: Guilherme Peretti-Pezzi (CSCS)
@author: Joachim Hein (Lund University)
"""
from distutils.version import LooseVersion
import fileinput
import glob
import os
import re
import shutil
import sys
import easybuild.tools.toolchain as toolchain
from easybuild.framework.easyblock import EasyBlock
from easybuild.framework.easyconfig import CUSTOM
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import write_file
from easybuild.tools.modules import get_software_root
from easybuild.tools.run import run_cmd
from easybuild.tools.systemtools import UNKNOWN, get_glibc_version, get_shared_lib_ext
class EB_Boost(EasyBlock):
"""Support for building Boost."""
def __init__(self, *args, **kwargs):
"""Initialize Boost-specific variables."""
super(EB_Boost, self).__init__(*args, **kwargs)
self.objdir = None
@staticmethod
def extra_options():
"""Add extra easyconfig parameters for Boost."""
extra_vars = {
'boost_mpi': [False, "Build mpi boost module", CUSTOM],
'toolset': [None, "Toolset to use for Boost configuration ('--with-toolset for bootstrap.sh')", CUSTOM],
'mpi_launcher': [None, "Launcher to use when running MPI regression tests", CUSTOM],
}
return EasyBlock.extra_options(extra_vars)
def patch_step(self):
"""Patch Boost source code before building."""
super(EB_Boost, self).patch_step()
# TIME_UTC is also defined in recent glibc versions, so we need to rename it for old Boost versions (<= 1.47)
glibc_version = get_glibc_version()
old_glibc = glibc_version is not UNKNOWN and LooseVersion(glibc_version) > LooseVersion("2.15")
if old_glibc and LooseVersion(self.version) <= LooseVersion("1.47.0"):
self.log.info("Patching because the glibc version is too new")
files_to_patch = ["boost/thread/xtime.hpp"] + glob.glob("libs/interprocess/test/*.hpp")
files_to_patch += glob.glob("libs/spirit/classic/test/*.cpp") + glob.glob("libs/spirit/classic/test/*.inl")
for patchfile in files_to_patch:
try:
for line in fileinput.input("%s" % patchfile, inplace=1, backup='.orig'):
line = re.sub(r"TIME_UTC", r"TIME_UTC_", line)
sys.stdout.write(line)
except IOError, err:
raise EasyBuildError("Failed to patch %s: %s", patchfile, err)
def configure_step(self):
"""Configure Boost build using custom tools"""
# mpi sanity check
if self.cfg['boost_mpi'] and not self.toolchain.options.get('usempi', None):
raise EasyBuildError("When enabling building boost_mpi, also enable the 'usempi' toolchain option.")
# create build directory (Boost doesn't like being built in source dir)
try:
self.objdir = os.path.join(self.builddir, 'obj')
os.mkdir(self.objdir)
self.log.debug("Succesfully created directory %s" % self.objdir)
except OSError, err:
raise EasyBuildError("Failed to create directory %s: %s", self.objdir, err)
# generate config depending on compiler used
toolset = self.cfg['toolset']
if toolset is None:
if self.toolchain.comp_family() == toolchain.INTELCOMP:
toolset = 'intel-linux'
elif self.toolchain.comp_family() == toolchain.GCC:
toolset = 'gcc'
else:
raise EasyBuildError("Unknown compiler used, don't know what to specify to --with-toolset, aborting.")
cmd = "./bootstrap.sh --with-toolset=%s --prefix=%s %s" % (toolset, self.objdir, self.cfg['configopts'])
run_cmd(cmd, log_all=True, simple=True)
if self.cfg['boost_mpi']:
self.toolchain.options['usempi'] = True
# configure the boost mpi module
# http://www.boost.org/doc/libs/1_47_0/doc/html/mpi/getting_started.html
# let Boost.Build know to look here for the config file
txt = ''
# Check if using a Cray toolchain and configure MPI accordingly
if self.toolchain.toolchain_family() == toolchain.CRAYPE:
if self.toolchain.PRGENV_MODULE_NAME_SUFFIX == 'gnu':
craympichdir = os.getenv('CRAY_MPICH2_DIR')
craygccversion = os.getenv('GCC_VERSION')
txt = '\n'.join([
'local CRAY_MPICH2_DIR = %s ;' % craympichdir,
'using gcc ',
': %s' % craygccversion,
': CC ',
': <compileflags>-I$(CRAY_MPICH2_DIR)/include ',
' <linkflags>-L$(CRAY_MPICH2_DIR)/lib \ ',
'; ',
'using mpi ',
': CC ',
': <find-shared-library>mpich ',
': %s' % self.cfg['mpi_launcher'],
';',
'',
])
else:
raise EasyBuildError("Bailing out: only PrgEnv-gnu supported for now")
else:
txt = "using mpi : %s ;" % os.getenv("MPICXX")
write_file('user-config.jam', txt, append=True)
def build_step(self):
"""Build Boost with bjam tool."""
bjamoptions = " --prefix=%s" % self.objdir
# specify path for bzip2/zlib if module is loaded
for lib in ["bzip2", "zlib"]:
libroot = get_software_root(lib)
if libroot:
bjamoptions += " -s%s_INCLUDE=%s/include" % (lib.upper(), libroot)
bjamoptions += " -s%s_LIBPATH=%s/lib" % (lib.upper(), libroot)
if self.cfg['boost_mpi']:
self.log.info("Building boost_mpi library")
bjammpioptions = "%s --user-config=user-config.jam --with-mpi" % bjamoptions
# build mpi lib first
# let bjam know about the user-config.jam file we created in the configure step
run_cmd("./bjam %s" % bjammpioptions, log_all=True, simple=True)
# boost.mpi was built, let's 'install' it now
run_cmd("./bjam %s install" % bjammpioptions, log_all=True, simple=True)
# install remainder of boost libraries
self.log.info("Installing boost libraries")
cmd = "./bjam %s install" % bjamoptions
run_cmd(cmd, log_all=True, simple=True)
def install_step(self):
"""Install Boost by copying file to install dir."""
self.log.info("Copying %s to installation dir %s" % (self.objdir, self.installdir))
try:
for f in os.listdir(self.objdir):
src = os.path.join(self.objdir, f)
dst = os.path.join(self.installdir, f)
if os.path.isdir(src):
shutil.copytree(src, dst)
else:
shutil.copy2(src, dst)
except OSError, err:
raise EasyBuildError("Copying %s to installation dir %s failed: %s", self.objdir, self.installdir, err)
def sanity_check_step(self):
"""Custom sanity check for Boost."""
shlib_ext = get_shared_lib_ext()
custom_paths = {
'files': ['lib/libboost_system.%s' % shlib_ext],
'dirs': ['include/boost']
}
if self.cfg['boost_mpi']:
custom_paths["files"].append('lib/libboost_mpi.%s' % shlib_ext)
if get_software_root('Python'):
custom_paths["files"].append('lib/libboost_python.%s' % shlib_ext)
super(EB_Boost, self).sanity_check_step(custom_paths=custom_paths)
def make_module_extra(self):
"""Set up a BOOST_ROOT environment variable to e.g. ease Boost handling by cmake"""
txt = super(EB_Boost, self).make_module_extra()
txt += self.module_generator.set_environment('BOOST_ROOT', self.installdir)
return txt
| ocaisa/easybuild-easyblocks | easybuild/easyblocks/b/boost.py | Python | gpl-2.0 | 9,501 |
import scipy
# Units in SI, i.e. not cgs
RSUN = 6.955e8
MSUN = 1.9889e30
MJUP = 1.8986e27
RJUP = 7.149e7
REARTH = 6.371e6
DAY2S = 86400.0
DEG2RAD = scipy.pi/180.
AU = 1.496e11
PLANCKH = 6.626e-34
BOLTZK = 1.38065e-23
C = 2.9979e8 # peed of light in vacuum in m s^-1
G = 6.673e-11 # gravitational constant in m^3 kg^-1 s^-2
RGAS = 8.314 # gas constant in J mol^-1 K^-1
| tomevans/utils | constants.py | Python | gpl-2.0 | 370 |
"""Parallel testing, supporting arbitrary collection ordering
The Workflow
------------
- Master py.test process starts up, inspects config to decide how many slave to start, if at all
- env['parallel_base_urls'] is inspected first
- py.test config.option.appliances and the related --appliance cmdline flag are used
if env['parallel_base_urls'] isn't set
- if neither are set, no parallelization happens
- Slaves are started
- Master runs collection, blocks until slaves report their collections
- Slaves each run collection and submit them to the master, then block inside their runtest loop,
waiting for tests to run
- Master diffs slave collections against its own; the test ids are verified to match
across all nodes
- Master enters main runtest loop, uses a generator to build lists of test groups which are then
sent to slaves, one group at a time
- For each phase of each test, the slave serializes test reports, which are then unserialized on
the master and handed to the normal pytest reporting hooks, which is able to deal with test
reports arriving out of order
- Before running the last test in a group, the slave will request more tests from the master
- If more tests are received, they are run
- If no tests are received, the slave will shut down after running its final test
- After all slaves are shut down, the master will do its end-of-session reporting as usual, and
shut down
"""
import collections
import difflib
import json
import os
import signal
import subprocess
from collections import OrderedDict, defaultdict, deque, namedtuple
from datetime import datetime
from itertools import count
from threading import Lock, RLock, Thread, Timer
from time import sleep, time
from urlparse import urlparse
import pytest
import zmq
from _pytest import runner
from functools32 import wraps
from fixtures import terminalreporter
from fixtures.parallelizer import remote
from fixtures.pytest_store import store
from utils import at_exit, conf
from utils.appliance import IPAppliance, stack as appliance_stack
from utils.log import create_sublogger
from utils.net import random_port
from utils.path import conf_path, project_path
from utils.sprout import SproutClient, SproutException
from utils.wait import wait_for
_appliance_help = '''specify appliance URLs to use for distributed testing.
this option can be specified more than once, and must be specified at least two times'''
env_base_urls = conf.env.get('parallel_base_urls', [])
if env_base_urls:
conf.runtime['env']['base_url'] = env_base_urls[0]
# Initialize slaveid to None, indicating this as the master process
# slaves will set this to a unique string when they're initialized
conf.runtime['env']['slaveid'] = None
# lock for protecting mutation of recv queue
recv_lock = Lock()
# lock for protecting zmq socket access
zmq_lock = Lock()
def pytest_addoption(parser):
group = parser.getgroup("cfme")
group._addoption('--appliance', dest='appliances', action='append',
default=env_base_urls, metavar='base_url', help=_appliance_help)
group._addoption('--use-sprout', dest='use_sprout', action='store_true',
default=False, help="Use Sprout for provisioning appliances.")
group._addoption('--sprout-appliances', dest='sprout_appliances', type=int,
default=1, help="How many Sprout appliances to use?.")
group._addoption('--sprout-timeout', dest='sprout_timeout', type=int,
default=60, help="How many minutes is the lease timeout.")
group._addoption('--sprout-provision-timeout', dest='sprout_provision_timeout', type=int,
default=60, help="How many minutes to wait for appliances provisioned.")
group._addoption(
'--sprout-group', dest='sprout_group', default=None, help="Which stream to use.")
group._addoption(
'--sprout-version', dest='sprout_version', default=None, help="Which version to use.")
group._addoption(
'--sprout-date', dest='sprout_date', default=None, help="Which date to use.")
group._addoption(
'--sprout-desc', dest='sprout_desc', default=None, help="Set description of the pool.")
def pytest_addhooks(pluginmanager):
import hooks
pluginmanager.addhooks(hooks)
@pytest.mark.hookwrapper
def pytest_configure(config):
# configures the parallel session, then fires pytest_parallel_configured
yield
if (config.option.appliances or (config.option.use_sprout and
config.option.sprout_appliances > 1)):
session = ParallelSession(config)
config.pluginmanager.register(session, "parallel_session")
store.parallelizer_role = 'master'
config.hook.pytest_parallel_configured(parallel_session=session)
else:
config.hook.pytest_parallel_configured(parallel_session=None)
def dump_pool_info(printf, pool_data):
printf("Fulfilled: {}".format(pool_data["fulfilled"]))
printf("Progress: {}%".format(pool_data["progress"]))
printf("Appliances:")
for appliance in pool_data["appliances"]:
name = appliance.pop("name")
printf("\t{}:".format(name))
for key in sorted(appliance.keys()):
printf("\t\t{}: {}".format(key, appliance[key]))
def handle_end_session(signal, frame):
# when signaled, end the current test session immediately
if store.parallel_session:
store.parallel_session.session_finished = True
signal.signal(signal.SIGQUIT, handle_end_session)
class SlaveDict(dict):
"""A normal dict, but with a special "add" method that autogenerated slaveids"""
# intentionally in the class scope so all instances share the slave counter
slaveid_generator = ('slave{:02d}'.format(i) for i in count())
lock = RLock()
_instances = []
def __init__(self, *args, **kwargs):
super(SlaveDict, self).__init__(*args, **kwargs)
with self.lock:
SlaveDict._instances.append(self)
# autoincrement the slaveids when something is added
def add(self, value):
self[next(self.slaveid_generator)] = value
# when removing a slave with this method, it is removed from all instances
# use the normal `del` behavior to only remove from one instances
def remove(self, key):
with self.lock:
for instance in self._instances:
if key in instance:
del(instance[key])
# helper to wrap dict method wrapper to generate methods protected by a lock
# like a decorator, but takes a method name instead of wrapping
def _lock_wrap(method_name):
wrapped = getattr(dict, method_name)
@wraps(wrapped)
def wrapper(self, *args, **kwargs):
with self.lock:
return wrapped(self, *args, **kwargs)
return wrapper
# all mutating methods should be wrapped; if one is missing here that isn't intentional
__setitem__ = _lock_wrap('__setitem__')
__delitem__ = _lock_wrap('__delitem__')
# destroy now-useless lock wrapper function
del(_lock_wrap)
class ParallelSession(object):
def __init__(self, config):
self.config = config
self.session = None
self.session_finished = False
self.countfailures = 0
self.collection = OrderedDict()
self.sent_tests = 0
self.log = create_sublogger('master')
self.maxfail = config.getvalue("maxfail")
self._failed_collection_errors = {}
self.terminal = store.terminalreporter
self.trdist = None
self.slaves = SlaveDict()
self.slave_urls = SlaveDict()
self.slave_tests = defaultdict(set)
self.test_groups = self._test_item_generator()
self._pool = []
self.pool_lock = Lock()
from utils.conf import cfme_data
self.provs = sorted(set(cfme_data['management_systems'].keys()),
key=len, reverse=True)
self.slave_allocation = collections.defaultdict(list)
self.used_prov = set()
self.failed_slave_test_groups = deque()
self.slave_spawn_count = 0
self.sprout_client = None
self.sprout_timer = None
self.sprout_pool = None
if not self.config.option.use_sprout:
# Without Sprout
self.appliances = self.config.option.appliances
else:
# Using sprout
self.sprout_client = SproutClient.from_config()
self.terminal.write(
"Requesting {} appliances from Sprout at {}\n".format(
self.config.option.sprout_appliances, self.sprout_client.api_entry))
pool_id = self.sprout_client.request_appliances(
self.config.option.sprout_group,
count=self.config.option.sprout_appliances,
version=self.config.option.sprout_version,
date=self.config.option.sprout_date,
lease_time=self.config.option.sprout_timeout
)
self.terminal.write("Pool {}. Waiting for fulfillment ...\n".format(pool_id))
self.sprout_pool = pool_id
at_exit(self.sprout_client.destroy_pool, self.sprout_pool)
if self.config.option.sprout_desc is not None:
self.sprout_client.set_pool_description(
pool_id, str(self.config.option.sprout_desc))
try:
result = wait_for(
lambda: self.sprout_client.request_check(self.sprout_pool)["fulfilled"],
num_sec=self.config.option.sprout_provision_timeout * 60,
delay=5,
message="requesting appliances was fulfilled"
)
except:
pool = self.sprout_client.request_check(self.sprout_pool)
dump_pool_info(lambda x: self.terminal.write("{}\n".format(x)), pool)
self.terminal.write("Destroying the pool on error.\n")
self.sprout_client.destroy_pool(pool_id)
raise
else:
pool = self.sprout_client.request_check(self.sprout_pool)
dump_pool_info(lambda x: self.terminal.write("{}\n".format(x)), pool)
self.terminal.write("Provisioning took {0:.1f} seconds\n".format(result.duration))
request = self.sprout_client.request_check(self.sprout_pool)
self.appliances = []
# Push an appliance to the stack to have proper reference for test collection
# FIXME: this is a bad hack based on the need for controll of collection partitioning
appliance_stack.push(
IPAppliance(address=request["appliances"][0]["ip_address"]))
self.terminal.write("Appliances were provided:\n")
for appliance in request["appliances"]:
url = "https://{}/".format(appliance["ip_address"])
self.appliances.append(url)
self.terminal.write("- {} is {}\n".format(url, appliance['name']))
map(lambda a: "https://{}/".format(a["ip_address"]), request["appliances"])
self._reset_timer()
# Set the base_url for collection purposes on the first appliance
conf.runtime["env"]["base_url"] = self.appliances[0]
# Retrieve and print the template_name for Jenkins to pick up
template_name = request["appliances"][0]["template_name"]
conf.runtime["cfme_data"]["basic_info"]["appliance_template"] = template_name
self.terminal.write("appliance_template=\"{}\";\n".format(template_name))
with project_path.join('.appliance_template').open('w') as template_file:
template_file.write('export appliance_template="{}"'.format(template_name))
self.terminal.write("Parallelized Sprout setup finished.\n")
self.slave_appliances_data = {}
for appliance in request["appliances"]:
self.slave_appliances_data[appliance["ip_address"]] = (
appliance["template_name"], appliance["provider"]
)
# set up the ipc socket
zmq_endpoint = 'tcp://127.0.0.1:{}'.format(random_port())
ctx = zmq.Context.instance()
self.sock = ctx.socket(zmq.ROUTER)
self.sock.bind('{}'.format(zmq_endpoint))
# clean out old slave config if it exists
slave_config = conf_path.join('slave_config.yaml')
slave_config.check() and slave_config.remove()
# write out the slave config
conf.runtime['slave_config'] = {
'args': self.config.args,
'options': self.config.option.__dict__,
'zmq_endpoint': zmq_endpoint,
'sprout': self.sprout_client is not None and self.sprout_pool is not None,
}
if hasattr(self, "slave_appliances_data"):
conf.runtime['slave_config']["appliance_data"] = self.slave_appliances_data
conf.runtime['slave_config']['options']['use_sprout'] = False # Slaves don't use sprout
conf.save('slave_config')
for i, base_url in enumerate(self.appliances):
self.slave_urls.add(base_url)
for slave in sorted(self.slave_urls):
self.print_message("using appliance {}".format(self.slave_urls[slave]),
slave, green=True)
# Start the recv queue
self._recv_queue = deque()
recv_queuer = Thread(target=_recv_queue, args=(self,))
recv_queuer.daemon = True
recv_queuer.start()
def _slave_audit(self):
# XXX: There is currently no mechanism to add or remove slave_urls, short of
# firing up the debugger and doing it manually. This is making room for
# planned future abilities to dynamically add and remove slaves via automation
# check for unexpected slave shutdowns and redistribute tests
for slaveid, slave in self.slaves.items():
returncode = slave.poll()
if returncode:
del(self.slaves[slaveid])
if returncode == -9:
msg = '{} killed due to error, respawning'.format(slaveid)
else:
msg = '{} terminated unexpectedly with status {}, respawning'.format(
slaveid, returncode)
if self.slave_tests[slaveid]:
num_failed_tests = len(self.slave_tests[slaveid])
self.sent_tests -= num_failed_tests
msg += ' and redistributing {} tests'.format(num_failed_tests)
with SlaveDict.lock:
self.failed_slave_test_groups.append(self.slave_tests.pop(slaveid))
self.print_message(msg, purple=True)
# Make sure we have a slave for every slave_url
for slaveid in list(self.slave_urls):
if slaveid not in self.slaves:
self._start_slave(slaveid)
# If a slave has lost its base_url for any reason, kill that slave
# Losing a base_url means the associated appliance died :(
for slaveid in list(self.slaves):
if slaveid not in self.slave_urls:
self.print_message("{}'s appliance has died, deactivating slave".format(slaveid))
self.interrupt(slaveid)
def _start_slave(self, slaveid):
devnull = open(os.devnull, 'w')
try:
base_url = self.slave_urls[slaveid]
except KeyError:
# race condition: slave was removed from slave_urls when something else decided to
# start it; in this case slave_urls wins and the slave should not start
return
# worker output redirected to null; useful info comes via messages and logs
slave = subprocess.Popen(
['python', remote.__file__, slaveid, base_url],
stdout=devnull, stderr=devnull,
)
self.slaves[slaveid] = slave
self.slave_spawn_count += 1
at_exit(slave.kill)
def _reset_timer(self):
if not (self.sprout_client is not None and self.sprout_pool is not None):
if self.sprout_timer:
self.sprout_timer.cancel() # Cancel it anyway
self.terminal.write("Sprout timer cancelled\n")
return
if self.sprout_timer:
self.sprout_timer.cancel()
self.sprout_timer = Timer(
(self.config.option.sprout_timeout / 2) * 60,
self.sprout_ping_pool)
self.sprout_timer.daemon = True
self.sprout_timer.start()
def sprout_ping_pool(self):
try:
self.sprout_client.prolong_appliance_pool_lease(self.sprout_pool)
except SproutException as e:
self.terminal.write(
"Pool {} does not exist any more, disabling the timer.\n".format(self.sprout_pool))
self.terminal.write(
"This can happen before the tests are shut down "
"(last deleted appliance deleted the pool")
self.terminal.write("> The exception was: {}".format(str(e)))
self.sprout_pool = None # Will disable the timer in next reset call.
self._reset_timer()
def send(self, slaveid, event_data):
"""Send data to slave.
``event_data`` will be serialized as JSON, and so must be JSON serializable
"""
event_json = json.dumps(event_data)
with zmq_lock:
self.sock.send_multipart([slaveid, '', event_json])
def recv(self):
"""Return any unproccesed events from the recv queue"""
try:
with recv_lock:
return self._recv_queue.popleft()
except IndexError:
return None, None, None
def print_message(self, message, prefix='master', **markup):
"""Print a message from a node to the py.test console
Args:
slaveid: Can be a slaveid or any string, e.g. ``'master'`` is also useful here.
message: The message to print
**markup: If set, overrides the default markup when printing the message
"""
# differentiate master and slave messages by default
if not markup:
if prefix == 'master':
markup = {'blue': True}
else:
markup = {'cyan': True}
stamp = datetime.now().strftime("%Y%m%d %H:%M:%S")
self.terminal.write_ensure_prefix('({})[{}] '.format(prefix, stamp), message, **markup)
def ack(self, slaveid, event_name):
"""Acknowledge a slave's message"""
self.send(slaveid, 'ack {}'.format(event_name))
def monitor_shutdown(self, slaveid, respawn=False):
# non-daemon so slaves get every opportunity to shut down cleanly
shutdown_thread = Thread(target=self._monitor_shutdown_t, args=(slaveid, respawn))
shutdown_thread.start()
def _monitor_shutdown_t(self, slaveid, respawn):
# a KeyError here means self.slaves got mangled, indicating a problem elsewhere
try:
slave = self.slaves[slaveid]
except KeyError:
self.log.warning('Slave was missing when trying to monitor shutdown')
return
start_time = time()
# configure the polling logic
polls = 0
# how often to poll
poll_sleep_time = .5
# how often to report (calculated to be around once a minute based on poll_sleep_time)
poll_report_modulo = 60 / poll_sleep_time
# maximum time to wait
poll_num_sec = 300
# time spent waiting
def poll_walltime():
return time() - start_time
# start the poll
while poll_walltime() < poll_num_sec:
polls += 1
ec = slave.poll()
if ec is None:
# process still running, report if needed and continue polling
if polls % poll_report_modulo == 0:
remaining_time = int(poll_num_sec - poll_walltime())
self.print_message('{} still shutting down, '
'will continue polling for {} seconds '
.format(slaveid, remaining_time), blue=True)
else:
if ec == 0:
self.print_message('{} exited'.format(slaveid), green=True)
else:
self.print_message('{} died'.format(slaveid), red=True)
break
sleep(poll_sleep_time)
else:
self.print_message('{} failed to shut down gracefully; killed'.format(slaveid),
red=True)
slave.kill()
if not respawn and slaveid in self.slave_urls:
self.slave_urls.remove(slaveid)
elif slaveid in self.slaves:
del(self.slaves[slaveid])
def interrupt(self, slaveid, **kwargs):
"""Nicely ask a slave to terminate"""
slave = self.slaves.pop(slaveid, None)
if slave and slave.poll() is None:
slave.send_signal(subprocess.signal.SIGINT)
self.monitor_shutdown(slaveid, **kwargs)
def kill(self, slaveid, **kwargs):
"""Rudely kill a slave"""
slave = self.slaves.pop(slaveid, None)
if slave and slave.poll() is None:
slave.kill()
self.monitor_shutdown(slaveid, **kwargs)
def send_tests(self, slaveid):
"""Send a slave a group of tests"""
try:
with SlaveDict.lock:
tests = list(self.failed_slave_test_groups.popleft())
except IndexError:
try:
tests = self.get(slaveid)
# To return to the old parallelizer distributor, remove the line above
# and replace it with the line below.
# tests = self.test_groups.next()
except StopIteration:
tests = []
self.send(slaveid, tests)
self.slave_tests[slaveid] |= set(tests)
collect_len = len(self.collection)
tests_len = len(tests)
self.sent_tests += tests_len
if tests:
self.print_message('sent {} tests to {} ({}/{}, {:.1f}%)'.format(
tests_len, slaveid, self.sent_tests, collect_len,
self.sent_tests * 100. / collect_len
))
return tests
def pytest_sessionstart(self, session):
"""pytest sessionstart hook
- sets up distributed terminal reporter
- sets up zmp ipc socket for the slaves to use
- writes pytest options and args to slave_config.yaml
- starts the slaves
- register atexit kill hooks to destroy slaves at the end if things go terribly wrong
"""
# If reporter() gave us a fake terminal reporter in __init__, the real
# terminal reporter is registered by now
self.terminal = store.terminalreporter
self.trdist = TerminalDistReporter(self.config, self.terminal)
self.config.pluginmanager.register(self.trdist, "terminaldistreporter")
self.session = session
def pytest_runtestloop(self):
"""pytest runtest loop
- Disable the master terminal reporter hooks, so we can add our own handlers
that include the slaveid in the output
- Send tests to slaves when they ask
- Log the starting of tests and test results, including slave id
- Handle clean slave shutdown when they finish their runtest loops
- Restore the master terminal reporter after testing so we get the final report
"""
# Build master collection for slave diffing and distribution
for item in self.session.items:
self.collection[item.nodeid] = item
# Fire up the workers after master collection is complete
# master and the first slave share an appliance, this is a workaround to prevent a slave
# from altering an appliance while master collection is still taking place
self._slave_audit()
try:
self.print_message("Waiting for {} slave collections".format(len(self.slaves)),
red=True)
# Turn off the terminal reporter to suppress the builtin logstart printing
terminalreporter.disable()
while True:
# spawn/kill/replace slaves if needed
self._slave_audit()
if not self.slaves:
# All slaves are killed or errored, we're done with tests
self.print_message('all slaves have exited', yellow=True)
self.session_finished = True
if self.session_finished:
break
slaveid, event_data, event_name = self.recv()
if event_name == 'collectionfinish':
slave_collection = event_data['node_ids']
# compare slave collection to the master, all test ids must be the same
self.log.debug('diffing {} collection'.format(slaveid))
diff_err = report_collection_diff(slaveid, self.collection.keys(),
slave_collection)
if diff_err:
self.print_message('collection differs, respawning', slaveid,
purple=True)
self.print_message(diff_err, purple=True)
self.log.error('{}'.format(diff_err))
self.kill(slaveid)
self._start_slave(slaveid)
else:
self.ack(slaveid, event_name)
elif event_name == 'need_tests':
self.send_tests(slaveid)
self.log.info('starting master test distribution')
elif event_name == 'runtest_logstart':
self.ack(slaveid, event_name)
self.trdist.runtest_logstart(slaveid,
event_data['nodeid'], event_data['location'])
elif event_name == 'runtest_logreport':
self.ack(slaveid, event_name)
report = unserialize_report(event_data['report'])
if (report.when in ('call', 'teardown')
and report.nodeid in self.slave_tests[slaveid]):
self.slave_tests[slaveid].remove(report.nodeid)
self.trdist.runtest_logreport(slaveid, report)
elif event_name == 'internalerror':
self.ack(slaveid, event_name)
self.print_message(event_data['message'], slaveid, purple=True)
with SlaveDict.lock:
if slaveid in self.slaves:
# If this slave hasn't already quit, kill it with fire (signal 9)
self.slaves[slaveid].send_signal(9)
elif event_name == 'shutdown':
self.ack(slaveid, event_name)
self.monitor_shutdown(slaveid)
# total slave spawn count * 3, to allow for each slave's initial spawn
# and then each slave (on average) can fail two times
if self.slave_spawn_count >= len(self.appliances) * 3:
self.print_message('too many slave respawns, exiting',
red=True, bold=True)
raise KeyboardInterrupt('Interrupted due to slave failures')
except Exception as ex:
self.log.error('Exception in runtest loop:')
self.log.exception(ex)
raise
finally:
terminalreporter.enable()
# Suppress other runtestloop calls
return True
def _test_item_generator(self):
for tests in self._modscope_item_generator():
yield tests
def _modscope_item_generator(self):
# breaks out tests by module, can work just about any way we want
# as long as it yields lists of tests id from the master collection
sent_tests = 0
module_items_cache = []
collection_ids = self.collection.keys()
collection_len = len(collection_ids)
for i, item_id in enumerate(collection_ids):
# everything before the first '::' is the module fspath
i_fspath = item_id.split('::')[0]
try:
nextitem_id = collection_ids[i + 1]
ni_fspath = nextitem_id.split('::')[0]
except IndexError:
nextitem_id = ni_fspath = None
module_items_cache.append(item_id)
if i_fspath == ni_fspath:
# This item and the next item are in the same module
# loop to the next item
continue
else:
# This item and the next item are in different modules,
# yield the indices if any items were generated
if not module_items_cache:
continue
for tests in self._modscope_id_splitter(module_items_cache):
tests_len = len(tests)
sent_tests += tests_len
self.log.info('%d tests remaining to send'
% (collection_len - sent_tests))
if tests:
yield tests
# Then clear the cache in-place
module_items_cache[:] = []
def _modscope_id_splitter(self, module_items):
# given a list of item ids from one test module, break up tests into groups with the same id
parametrized_ids = defaultdict(list)
for item in module_items:
try:
# split on the leftmost bracket, then strip everything after the rightmight bracket
# so 'test_module.py::test_name[parametrized_id]' becomes 'parametrized_id'
parametrized_id = item.split('[')[1].rsplit(']')[0]
except IndexError:
# splits failed, item has no parametrized id
parametrized_id = None
parametrized_ids[parametrized_id].append(item)
for id, tests in parametrized_ids.items():
if id is None:
id = 'no params'
self.log.info('sent tests with param {} {!r}'.format(id, tests))
yield tests
def get(self, slave):
with self.pool_lock:
if not self._pool:
for test_group in self.test_groups:
self._pool.append(test_group)
for test in test_group:
if '[' in test:
found_prov = []
for pv in self.provs:
if pv in test:
found_prov.append(pv)
break
provs = list(set(found_prov).intersection(self.provs))
if provs:
self.used_prov = self.used_prov.union(set(provs))
if self.used_prov:
self.ratio = float(len(self.slaves)) / float(len(self.used_prov))
else:
self.ratio = 0.0
if not self._pool:
raise StopIteration
current_allocate = self.slave_allocation.get(slave, None)
# num_provs_list = [len(v) for k, v in self.slave_allocation.iteritems()]
# average_num_provs = sum(num_provs_list) / float(len(self.slaves))
appliance_num_limit = 2
for test_group in self._pool:
for test in test_group:
# If the test is parametrized...
if '[' in test:
found_prov = []
for pv in self.provs:
if pv in test:
found_prov.append(pv)
break
# The line below can probably be removed now, since we compare
# providers in the loop above with self.provs, which is a list
# of all providers.
provs = list(set(found_prov).intersection(self.provs))
# If the parametrization contains a provider...
if provs:
prov = provs[0]
# num_slave_with_prov = len([sl for sl, provs_list
# in self.slave_allocation.iteritems()
# if prov in provs_list])
# If this slave/appliance already has providers then...
if current_allocate:
# If the slave has _our_ provider
if prov in current_allocate:
# provider is already with the slave, so just return the tests
self._pool.remove(test_group)
return test_group
# If the slave doesn't have _our_ provider
else:
# Check to see how many slaves there are with this provider
if len(self.slave_allocation[slave]) >= appliance_num_limit:
continue
else:
# Adding provider to slave since there are not too many
self.slave_allocation[slave].append(prov)
self._pool.remove(test_group)
return test_group
# If this slave doesn't have any providers...
else:
# Adding provider to slave
self.slave_allocation[slave].append(prov)
self._pool.remove(test_group)
return test_group
else:
# No providers - ie, not a provider parametrized test
self._pool.remove(test_group)
return test_group
else:
# No params, so no need to think about providers
self._pool.remove(test_group)
return test_group
# Here means no tests were able to be sent
for test_group in self._pool:
for test in test_group:
# If the test is parametrized...
if '[' in test:
found_prov = []
for pv in self.provs:
if pv in test:
found_prov.append(pv)
break
# The line below can probably be removed now, since we compare
# providers in the loop above with self.provs, which is a list
# of all providers.
provs = list(set(found_prov).intersection(self.provs))
# If the parametrization contains a provider...
if provs:
# Already too many slaves with provider
app_url = self.slave_urls[slave]
app_ip = urlparse(app_url).netloc
app = IPAppliance(app_ip)
self.print_message('cleansing appliance', slave,
purple=True)
try:
app.delete_all_providers()
except:
self.print_message('cloud not cleanse', slave,
red=True)
self.slave_allocation[slave] = [prov]
self._pool.remove(test_group)
return test_group
return []
def report_collection_diff(slaveid, from_collection, to_collection):
"""Report differences, if any exist, between master and a slave collection
Raises RuntimeError if collections differ
Note:
This function will sort functions before comparing them.
"""
from_collection, to_collection = sorted(from_collection), sorted(to_collection)
if from_collection == to_collection:
# Well, that was easy.
return
# diff the two, so we get some idea of what's wrong
diff = difflib.unified_diff(
from_collection,
to_collection,
fromfile='master',
tofile=slaveid,
)
# diff is a line generator, stringify it
diff = '\n'.join([line.rstrip() for line in diff])
return '{slaveid} diff:\n{diff}\n'.format(slaveid=slaveid, diff=diff)
def _recv_queue(session):
# poll the zmq socket, populate the recv queue deque with responses
while not session.session_finished:
try:
with zmq_lock:
slaveid, empty, event_json = session.sock.recv_multipart(flags=zmq.NOBLOCK)
except zmq.Again:
continue
event_data = json.loads(event_json)
event_name = event_data.pop('_event_name')
if event_name == 'message':
message = event_data.pop('message')
# messages are special, handle them immediately
session.print_message(message, slaveid, **event_data)
session.ack(slaveid, event_name)
else:
with recv_lock:
session._recv_queue.append((slaveid, event_data, event_name))
class TerminalDistReporter(object):
"""Terminal Reporter for Distributed Testing
trdist reporter exists to make sure we get good distributed logging during the runtest loop,
which means the normal terminal reporter should be disabled during the loop
This class is where we make sure the terminal reporter is made aware of whatever state it
needs to report properly once we turn it back on after the runtest loop
It has special versions of pytest reporting hooks that, where possible, try to include a
slave ID. These hooks are called in :py:class:`ParallelSession`'s runtestloop hook.
"""
def __init__(self, config, terminal):
self.config = config
self.tr = terminal
self.outcomes = {}
def runtest_logstart(self, slaveid, nodeid, location):
test = self.tr._locationline(nodeid, *location)
prefix = '({}) {}'.format(slaveid, test)
self.tr.write_ensure_prefix(prefix, 'running', blue=True)
self.config.hook.pytest_runtest_logstart(nodeid=nodeid, location=location)
def runtest_logreport(self, slaveid, report):
# Run all the normal logreport hooks
self.config.hook.pytest_runtest_logreport(report=report)
# Now do what the terminal reporter would normally do, but include parallelizer info
outcome, letter, word = self.config.hook.pytest_report_teststatus(report=report)
# Stash stats on the terminal reporter so it reports properly
# after it's reenabled at the end of runtestloop
self.tr.stats.setdefault(outcome, []).append(report)
test = self.tr._locationline(report.nodeid, *report.location)
prefix = '({}) {}'.format(slaveid, test)
try:
# for some reason, pytest_report_teststatus returns a word, markup tuple
# when the word would be 'XPASS', so unpack it here if that's the case
word, markup = word
except (TypeError, ValueError):
# word wasn't iterable or didn't have enough values, use it as-is
pass
if word in ('PASSED', 'xfail'):
markup = {'green': True}
elif word in ('ERROR', 'FAILED', 'XPASS'):
markup = {'red': True}
elif word:
markup = {'yellow': True}
# For every stage where we can report the outcome, stash it in the outcomes dict
if word:
self.outcomes[test] = Outcome(word, markup)
# Then, when we get to the teardown report, print the last outcome
# This prevents reportings a test as 'PASSED' if its teardown phase fails, for example
if report.when == 'teardown':
word, markup = self.outcomes.pop(test)
self.tr.write_ensure_prefix(prefix, word, **markup)
Outcome = namedtuple('Outcome', ['word', 'markup'])
def unserialize_report(reportdict):
"""
Generate a :py:class:`TestReport <pytest:_pytest.runner.TestReport>` from a serialized report
"""
return runner.TestReport(**reportdict)
| akrzos/cfme_tests | fixtures/parallelizer/__init__.py | Python | gpl-2.0 | 40,869 |
# ===========================================================================
# eXe
# Copyright 2004-2006, University of Auckland
# Copyright 2004-2008 eXe Project, http://eXeLearning.org/
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
A ImageMagnifier Idevice is one built up from an image and free text.
"""
import logging
from exe.engine.idevice import Idevice
from exe.engine.field import TextAreaField, MagnifierField
from exe.engine.translate import lateTranslate
log = logging.getLogger(__name__)
# ===========================================================================
class ImageMagnifierIdevice(Idevice):
"""
A ImageMagnifier Idevice is one built up from an image and free text.
"""
persistenceVersion = 4
def __init__(self, defaultImage = None):
Idevice.__init__(self,
x_(u"Image Magnifier"),
x_(u"University of Auckland"),
x_(u"""The image magnifier is a magnifying tool enabling
learners to magnify the view of the image they have been given. Moving the
magnifying glass over the image allows larger detail to be studied."""),
u"", u"")
self.emphasis = Idevice.NoEmphasis
self.short_desc = x_("Allow learners to magnify a given view of an image")
self.imageMagnifier = MagnifierField(
x_(u"Choose an Image"), x_(u"""Click
on the picture below or the "Add Image" button to select an image file to be
magnified."""))
self.imageMagnifier.idevice = self
self.imageMagnifier.defaultImage = defaultImage
self.text = TextAreaField(x_(u"Text"),
x_("""Enter the text you wish to
associate with the file."""))
self.text.idevice = self
self.float = u"left"
self.caption = u""
self._captionInstruc = x_(u"""Provide a caption for the
image to be magnified.""")
self._dimensionInstruc = x_(u"""Choose the size you want
your image to display at. The measurements are in pixels. Generally, 100
pixels equals approximately 3cm. Leave both fields blank if you want the
image to display at its original size.""")
self._alignInstruc = x_(u"""Alignment allows you to
choose where on the screen the image will be positioned.""")
self._initialZoomInstruc = x_(u"""Set the initial level of zoom
when the IDevice loads, as a percentage of the original image size""")
self._maxZoomInstruc = x_(u"""Set the maximum level of zoom,
as a percentage of the original image size""")
self._glassSizeInstruc = x_(u"""Select the size of the magnifying glass""")
self.systemResources += ['mojomagnify.js']
# Properties
captionInstruc = lateTranslate('captionInstruc')
dimensionInstruc = lateTranslate('dimensionInstruc')
alignInstruc = lateTranslate('alignInstruc')
initialZoomInstruc = lateTranslate('initialZoomInstruc')
maxZoomInstruc = lateTranslate('maxZoomInstruc')
glassSizeInstruc = lateTranslate('glassSizeInstruc')
def getResourcesField(self, this_resource):
"""
implement the specific resource finding mechanism for this iDevice:
"""
# be warned that before upgrading, this iDevice field could not exist:
if hasattr(self, 'imageMagnifier')\
and hasattr(self.imageMagnifier, 'imageResource'):
if this_resource == self.imageMagnifier.imageResource:
return self.imageMagnifier
# be warned that before upgrading, this iDevice field could not exist:
if hasattr(self, 'text') and hasattr(self.text, 'images'):
for this_image in self.text.images:
if hasattr(this_image, '_imageResource') \
and this_resource == this_image._imageResource:
return self.text
return None
def getRichTextFields(self):
"""
Like getResourcesField(), a general helper to allow nodes to search
through all of their fields without having to know the specifics of each
iDevice type.
"""
fields_list = []
if hasattr(self, 'text'):
fields_list.append(self.text)
return fields_list
def burstHTML(self, i):
"""
takes a BeautifulSoup fragment (i) and bursts its contents to
import this idevice from a CommonCartridge export
"""
# ImageMagnifier Idevice:
#======> WARNING - NOT YET BURSTING!!!!!!!!
#title = i.find(name='span', attrs={'class' : 'iDeviceTitle' })
#idevice.title = title.renderContents().decode('utf-8')
# no title for this idevice
# WARNING: not yet loading the image or its parameters:
# Could be in the following tag:
# <param name="FlashVars" \
# value="glassSize=2&height=189&width=267 \
# &initialZoomSize=100&file=sunflowers.jpg \
# &maxZoomSize=150&targetColor=#FF0000&borderWidth=12
#inner = i.find(name='div', attrs={'class' : 'iDevice_inner' })
#idevice.fields[0].content = inner.renderContents().decode('utf-8')
#idevice.fields[0].content_w_resourcePaths = inner.renderContents().decode('utf-8')
#idevice.fields[0].content_wo_resourcePaths = inner.renderContents().decode('utf-8')
def upgradeToVersion1(self):
"""
Upgrades to v0.14
"""
self._alignInstruc = x_(u"""Alignment allows you to
choose where on the screen the image will be positioned.""")
self._initialZoomInstruc = x_(u"""Set the initial level of zoom
when the IDevice loads, as a percentage of the original image size""")
self._maxZoomInstruc = x_(u"""Set the maximum level of zoom,
as a percentage of the original image size""")
self._glassSizeInstruc = x_(u"""This chooses the initial size
of the magnifying glass""")
def upgradeToVersion2(self):
"""
Upgrades to v0.24
"""
self.imageMagnifier.isDefaultImage = False
def upgradeToVersion3(self):
if 'magnifier.swf' in self.systemResources:
self.systemResources.remove('magnifier.swf')
if 'mojomagnify.js' not in self.systemResources:
self.systemResources.append('mojomagnify.js')
def upgradeToVersion4(self):
"""
Delete icon from system resources
"""
self._upgradeIdeviceToVersion3()
# ===========================================================================
| UstadMobile/exelearning-ustadmobile-work | exe/engine/imagemagnifieridevice.py | Python | gpl-2.0 | 7,602 |
from django import forms
from django.contrib import messages
from django.contrib.admin.views.decorators import staff_member_required
from django.shortcuts import render
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from data.models import ImportTask
class ImportTaskForm(forms.ModelForm):
class Meta:
model = ImportTask
fields = ('data_file',)
@staff_member_required
def upload(request):
if request.method == 'POST':
import_task_form = ImportTaskForm(data=request.POST, files=request.FILES)
if import_task_form.is_valid():
import_task = import_task_form.save()
import_task.enqueue()
messages.info(request, 'Data file queued for processing')
return HttpResponseRedirect(reverse('data_upload'))
else:
import_task_form = ImportTaskForm()
return render(request, 'data/upload.html', {
'import_task_form': import_task_form,
})
@staff_member_required
def enqueue(request, import_task_id):
import_task = ImportTask.objects.get(pk=import_task_id)
import_task.enqueue()
messages.info(request, 'Data file queued for processing')
return HttpResponseRedirect(reverse('admin:data_importtask_changelist'))
| ScienceMob/vmicroc | data/views.py | Python | gpl-2.0 | 1,284 |
import copy
class Histogram( object ):
'''Histogram + a few things.
This class does not inherit from a ROOT class as we could want to use it
with a TH1D, TH1F, and even a 2D at some point.
Histogram contains the original ROOT histogram, obj, and a weighted version,
weigthed, originally set equal to obj (weight == 1).
- layer : can be used to order histograms
- stack : to decide whether the histogram
should be stacked or not (see the Stack class for more information)
- name : user defined histogram. Useful when manipulating several histograms with
the same GetName(), coming from different TDirectories.
'''
def __init__(self, name, obj, layer=0., legendLine=None, stack=True):
# name is a user defined name
self.name = name
self.realName = name # can be different if an alias is set
if legendLine is None:
self.legendLine = name
else:
self.legendLine = legendLine
self.obj = obj
# self.weighted = copy.deepcopy(self.obj)
self.layer = layer
self.stack = stack
self.on = True
self.style = None
# after construction, weighted histogram = base histogram
self.SetWeight(1)
def Clone(self, newName):
newHist = copy.deepcopy(self)
newHist.name = newName
newHist.legendLine = newName
return newHist
def __str__(self):
fmt = '{self.name:<10} / {hname:<50},\t Layer ={self.layer:8.1f}, w = {weighted:8.1f}, u = {unweighted:8.1f}'
tmp = fmt.format(self=self,
hname = self.realName,
weighted = self.Yield(weighted=True),
unweighted = self.Yield(weighted=False) )
return tmp
def Yield(self, weighted=True):
'''Returns the weighted number of entries in the histogram
(under and overflow not counted).
Use weighted=False if you want the unweighted number of entries'''
hist = self.weighted
if not weighted:
hist = self.obj
return hist.Integral( 0, hist.GetNbinsX()+1)
def GetBinning(self):
'''return nbins, xmin, xmax'''
return self.obj.GetNbinsX(), \
self.obj.GetXaxis().GetXmin(), \
self.obj.GetXaxis().GetXmax()
def Rebin(self, factor):
'''Rebins by factor'''
self.obj.Rebin( factor )
self.weighted.Rebin(factor)
def Divide(self, other):
self.obj.Divide( other.obj)
self.weighted.Divide( other.weighted )
def NormalizeToBinWidth(self):
'''Divides each bin content and error by the bin size'''
for i in range (1,self.obj.GetNbinsX()+1) :
self.obj.SetBinContent(i, self.obj.GetBinContent(i) / self.obj.GetBinWidth(i))
self.obj.SetBinError (i, self.obj.GetBinError(i) / self.obj.GetBinWidth(i))
for i in range (1,self.weighted.GetNbinsX()+1) :
self.weighted.SetBinContent(i, self.weighted.GetBinContent(i) / self.weighted.GetBinWidth(i))
self.weighted.SetBinError (i, self.weighted.GetBinError(i) / self.weighted.GetBinWidth(i))
def SetWeight(self, weight):
'''Set the weight and create the weighted histogram.'''
self.weighted = copy.deepcopy(self.obj)
self.weight = weight
self.weighted.Scale(weight)
def Scale(self, scale):
'''Scale the histogram (multiply the weight by scale)'''
self.SetWeight( self.weight * scale )
def SetStyle(self, style):
'''Set the style for the original and weighted histograms.'''
if style is None:
return
style.formatHisto( self.obj )
style.formatHisto( self.weighted )
self.style = style
def AddEntry(self, legend, legendLine=None):
'''By default the legend entry is set to self.legendLine of the histogram.'''
if legendLine is None:
legendLine = self.legendLine
if legendLine is None:
legendLine = self.name
opt = 'f'
if not self.stack:
opt = 'p'
legend.AddEntry(self.obj, legendLine, opt)
def Draw(self, opt='hist', weighted=True):
'''Draw the weighted (or original) histogram.'''
if weighted is True:
self.weighted.Draw(opt)
else:
self.obj.Draw(opt)
def GetXaxis(self, opt='', weighted=True):
'''All these functions could be written in a clever and compact way'''
if weighted is True:
return self.weighted.GetXaxis()
else:
return self.obj.GetXaxis()
def GetYaxis(self, opt='', weighted=True):
'''All these functions could be written in a clever and compact way'''
if weighted is True:
return self.weighted.GetYaxis()
else:
return self.obj.GetYaxis()
def GetMaximum(self, opt='', weighted=True):
'''All these functions could be written in a clever and compact way'''
if weighted is True:
return self.weighted.GetMaximum()
else:
return self.obj.GetMaximum()
def Add(self, other, coeff=1):
'''Add another histogram.
Provide the optional coeff argument for the coefficient factor (e.g. -1 to subtract)
'''
self.obj.Add( other.obj, coeff )
self.weighted.Add( other.weighted, coeff )
integral = self.obj.Integral(0, self.obj.GetNbinsX())
if integral > 0.:
self.weight = self.weighted.Integral(0, self.weighted.GetNbinsX()+1)/integral
return self
def Integral(self, weighted=True, xmin=None, xmax=None ):
'''
Returns the weighted or unweighted integral of this histogram.
If xmin and xmax are None, underflows and overflows are included.
'''
if type( weighted ) is not bool:
raise ValueError('weighted should be a boolean')
if xmin is not None:
bmin = self.obj.FindFixBin( xmin )
else:
bmin = None
if xmax is not None:
bmax = self.obj.FindFixBin( xmax ) - 1
else:
bmax = None
hist = self.weighted
if weighted is False:
hist = self.obj
if bmin is None and bmax is None:
return hist.Integral(0, hist.GetNbinsX()+1)
elif bmin is not None and bmax is not None:
# import pdb; pdb.set_trace()
if (xmax - xmin) % self.obj.GetBinWidth(1) != 0:
raise ValueError('boundaries should define an integer number of bins. nbins=%d, xmin=%3.3f, xmax=%3.3f' % (self.obj.GetNbinsX(), self.obj.GetXaxis().GetXmin(), self.obj.GetXaxis().GetXmax()) )
return hist.Integral(bmin, bmax)
else:
raise ValueError('if specifying one boundary, you must specify the other')
def DrawNormalized(self):
'''Draw a normalized version of this histogram.
The original and weighted histograms stay untouched.'''
self.obj.DrawNormalized()
def Normalize(self):
'''Sets the weight to normalize the weighted histogram to 1.
In other words, the original histogram stays untouched.'''
self.Scale( 1/self.Integral() )
def RemoveNegativeValues(self, hist=None):
# what about errors??
if hist is None:
self.RemoveNegativeValues(self.weighted)
self.RemoveNegativeValues(self.obj)
else:
for ibin in range(1, hist.GetNbinsX()+1):
if hist.GetBinContent(ibin)<0:
hist.SetBinContent(ibin, 0)
def Blind(self, minx, maxx):
whist = self.weighted
uwhist = self.weighted
minbin = whist.FindBin(minx)
maxbin = min(whist.FindBin(maxx), whist.GetNbinsX() + 1)
for bin in range(minbin, maxbin):
whist.SetBinContent(bin,0)
whist.SetBinError(bin,0)
uwhist.SetBinContent(bin,0)
uwhist.SetBinError(bin,0)
| cbernet/cpyroot | tools/DataMC/Histogram.py | Python | gpl-2.0 | 8,121 |
# coding=utf-8
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from app import app
if __name__ == "__main__":
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(5000)
IOLoop.instance().start()
| levythu/swift-layerC | inapi/httpd.py | Python | gpl-2.0 | 287 |
import logging
from joj.lib.base import *
from paste.request import parse_querystring
import urllib2
log = logging.getLogger(__name__)
class LoggedinController(BaseController):
def index(self):
#self closes window
return '<html><head></head><body onload="window.close()"></body></html>'
| NERC-CEH/jules-jasmin | majic/joj/controllers/loggedin.py | Python | gpl-2.0 | 304 |
# -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2013 Vassilii Khachaturov
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Date strings to translate per each language for display and parsing.
"""
from __future__ import print_function, unicode_literals
#-------------------------------------------------------------------------
#
# set up logging
#
#-------------------------------------------------------------------------
import logging
log = logging.getLogger(".DateStrings")
#-------------------------------------------------------------------------
#
# DateStrings
#
#-------------------------------------------------------------------------
class DateStrings(object):
"""
String tables for :class:`.DateDisplay` and :class:`.DateParser`.
"""
# This table needs not be localized, it's only for parsing
# Swedish calendar dates using Swedish month names.
# Display of these months uses the regular long_months.
# TODO should we pack these into alt_long_months instead?
swedish_SV = (
"", "Januari", "Februari", "Mars",
"April", "Maj", "Juni",
"Juli", "Augusti", "September",
"Oktober", "November", "December"
)
def __init__(self, locale):
_ = locale.translation.lexgettext
self.long_months = ( "",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("localized lexeme inflections||January"),
_("localized lexeme inflections||February"),
_("localized lexeme inflections||March"),
_("localized lexeme inflections||April"),
_("localized lexeme inflections||May"),
_("localized lexeme inflections||June"),
_("localized lexeme inflections||July"),
_("localized lexeme inflections||August"),
_("localized lexeme inflections||September"),
_("localized lexeme inflections||October"),
_("localized lexeme inflections||November"),
_("localized lexeme inflections||December") )
self.short_months = ( "",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("localized lexeme inflections - short month form||Jan"),
_("localized lexeme inflections - short month form||Feb"),
_("localized lexeme inflections - short month form||Mar"),
_("localized lexeme inflections - short month form||Apr"),
_("localized lexeme inflections - short month form||May"),
_("localized lexeme inflections - short month form||Jun"),
_("localized lexeme inflections - short month form||Jul"),
_("localized lexeme inflections - short month form||Aug"),
_("localized lexeme inflections - short month form||Sep"),
_("localized lexeme inflections - short month form||Oct"),
_("localized lexeme inflections - short month form||Nov"),
_("localized lexeme inflections - short month form||Dec") )
_ = locale.translation.sgettext
self.alt_long_months = ( "",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to add proper alternatives to be recognized in your localized
# DateParser code!
_("alternative month names for January||"),
_("alternative month names for February||"),
_("alternative month names for March||"),
_("alternative month names for April||"),
_("alternative month names for May||"),
_("alternative month names for June||"),
_("alternative month names for July||"),
_("alternative month names for August||"),
_("alternative month names for September||"),
_("alternative month names for October||"),
_("alternative month names for November||"),
_("alternative month names for December||") )
self.calendar = (
# Must appear in the order indexed by Date.CAL_... numeric constants
_("calendar|Gregorian"),
_("calendar|Julian"),
_("calendar|Hebrew"),
_("calendar|French Republican"),
_("calendar|Persian"),
_("calendar|Islamic"),
_("calendar|Swedish") )
_ = locale.translation.lexgettext
self.hebrew = (
"",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("Hebrew month lexeme|Tishri"),
_("Hebrew month lexeme|Heshvan"),
_("Hebrew month lexeme|Kislev"),
_("Hebrew month lexeme|Tevet"),
_("Hebrew month lexeme|Shevat"),
_("Hebrew month lexeme|AdarI"),
_("Hebrew month lexeme|AdarII"),
_("Hebrew month lexeme|Nisan"),
_("Hebrew month lexeme|Iyyar"),
_("Hebrew month lexeme|Sivan"),
_("Hebrew month lexeme|Tammuz"),
_("Hebrew month lexeme|Av"),
_("Hebrew month lexeme|Elul")
)
self.french = (
"",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("French month lexeme|Vendémiaire"),
_("French month lexeme|Brumaire"),
_("French month lexeme|Frimaire"),
_("French month lexeme|Nivôse"),
_("French month lexeme|Pluviôse"),
_("French month lexeme|Ventôse"),
_("French month lexeme|Germinal"),
_("French month lexeme|Floréal"),
_("French month lexeme|Prairial"),
_("French month lexeme|Messidor"),
_("French month lexeme|Thermidor"),
_("French month lexeme|Fructidor"),
_("French month lexeme|Extra"),
)
self.islamic = (
"",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("Islamic month lexeme|Muharram"),
_("Islamic month lexeme|Safar"),
_("Islamic month lexeme|Rabi`al-Awwal"),
_("Islamic month lexeme|Rabi`ath-Thani"),
_("Islamic month lexeme|Jumada l-Ula"),
_("Islamic month lexeme|Jumada t-Tania"),
_("Islamic month lexeme|Rajab"),
_("Islamic month lexeme|Sha`ban"),
_("Islamic month lexeme|Ramadan"),
_("Islamic month lexeme|Shawwal"),
_("Islamic month lexeme|Dhu l-Qa`da"),
_("Islamic month lexeme|Dhu l-Hijja"),
)
self.persian = (
"",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("Persian month lexeme|Farvardin"),
_("Persian month lexeme|Ordibehesht"),
_("Persian month lexeme|Khordad"),
_("Persian month lexeme|Tir"),
_("Persian month lexeme|Mordad"),
_("Persian month lexeme|Shahrivar"),
_("Persian month lexeme|Mehr"),
_("Persian month lexeme|Aban"),
_("Persian month lexeme|Azar"),
_("Persian month lexeme|Dey"),
_("Persian month lexeme|Bahman"),
_("Persian month lexeme|Esfand"),
)
self.modifiers = ("",
_("date modifier|before "),
_("date modifier|after "),
_("date modifier|about "),
"", "", "")
self.qualifiers = ("",
_("date quality|estimated "),
_("date quality|calculated "),
)
# 6753: localized day names. Eventually should sprout into
# a per-calendar type thing instead.
self.long_days = ("",
_("Sunday"),
_("Monday"),
_("Tuesday"),
_("Wednesday"),
_("Thursday"),
_("Friday"),
_("Saturday"),
)
__doc__ += """
__main__
--------
Run this code with the appropriate ``LANG`` and ``LC_DATE`` set for your target
language, in order to generate the .po snippets initialized with the strings
from your locale (from the deprecated data provided in _grampslocale).
E.g., for French::
LANG=fr_FR.utf8 LC_ALL=fr_FR.utf8 GRAMPS_RESOURCES=$PWD python -m gramps.gen.datehandler._datestrings
Then merge the output into your language's .po file, and further modify the
strings as needed. Then remove the strings from your language's
:class:`DateParserXX` and :class:`DateHandlerXX` classes.
"""
if __name__ == '__main__':
import sys
from ..utils.grampslocale import GrampsLocale
from gramps.gen.const import GRAMPS_LOCALE as glocale
from ._grampslocale import (_deprecated_long_months as old_long,
_deprecated_short_months as old_short,
_deprecated_long_days as old_days)
from ._datedisplay import DateDisplay
import gettext
lang = glocale.lang
lang_short = lang[:2]
available_langs = glocale.get_available_translations()
if glocale.check_available_translations(lang) is None:
print ("Translation for current language {lang} not available.\n"
"Available translations: {list}.\n"
"Does po/{lang_short}*.po exist in gramps source tree?!\n"
"Please set your LANG / LC_ALL environment to something else...\n".format(
lang=lang, list=available_langs, lang_short=lang_short),
file=sys.stderr)
sys.exit(1)
print ("# Generating snippets for {}*.po\n"
"# Available languages: {}".format(
lang_short, available_langs))
glocale = GrampsLocale(languages=(lang))
dd = glocale.date_displayer
ds = dd._ds
glocale_EN = GrampsLocale(languages=('en'))
ds_EN = DateStrings(glocale_EN)
filename = __file__
try:
localized_months = dd.__class__.long_months
except AttributeError:
localized_months = old_long
def print_po_snippet(en_loc_old_lists, context):
for m,localized,old in zip(*en_loc_old_lists):
if m == "":
continue
if m == localized:
localized = old
print ('#: {file}:{line}\n'
'msgid "{context}{en_month}"\n'
'msgstr "{localized_month}"\n'.format(
context = context,
file = filename,
line = print_po_snippet.line,
en_month = m,
localized_month = localized))
print_po_snippet.line += 1
print_po_snippet.line = 10000
try:
localized_months = dd.__class__.long_months
except AttributeError:
localized_months = old_long
print_po_snippet((ds_EN.long_months, localized_months, old_long),
"localized lexeme inflections||")
try:
localized_months = dd.__class__.short_months
except AttributeError:
localized_months = old_short
print_po_snippet((ds_EN.short_months, localized_months, old_short),
"localized lexeme inflections - short month form||")
try:
loc = dd.__class__.hebrew
print_po_snippet((ds_EN.hebrew, loc, loc),
"Hebrew month lexeme|")
except AttributeError:
pass
try:
loc = dd.__class__.french
print_po_snippet((ds_EN.french, loc, loc),
"French month lexeme|")
except AttributeError:
pass
try:
loc = dd.__class__.islamic
print_po_snippet((ds_EN.islamic, loc, loc),
"Islamic month lexeme|")
except AttributeError:
pass
try:
loc = dd.__class__.persian
print_po_snippet((ds_EN.persian, loc, loc),
"Persian month lexeme|")
except AttributeError:
pass
try:
loc = dd.__class__._mod_str
print_po_snippet((ds_EN.modifiers, loc, loc),
"date modifier|")
except AttributeError:
pass
try:
loc = dd.__class__._qual_str
print_po_snippet((ds_EN.qualifiers, loc, loc),
"date quality|")
except AttributeError:
pass
print_po_snippet((ds_EN.long_days, old_days, old_days), "")
| pmghalvorsen/gramps_branch | gramps/gen/datehandler/_datestrings.py | Python | gpl-2.0 | 14,146 |
# Copyright (C) 2011, Endre Karlson
# All rights reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
DetailNav router for the Zenoss JSON API
"""
from zope.interface import implements
from zenoss_api.interfaces import IDetailNav
from zenoss_api.router import RouterBase
from zenoss_api.utils import myArgs
info = {"name": "nav",
"author": "Endre Karlson endre.karlson@gmail.com",
"version": "0.1",
"class": "DetailNav"}
class DetailNav(RouterBase):
implements(IDetailNav)
# Location + action
location = 'detailnav_router'
action = 'DetailNavRouter'
def getDetailNavConfigs(self, uid=None, menuIds=None):
args = myArgs()[0]
return self._request(args, **kw)
def getContextMenus(self, uid):
args = myArgs()[0]
return self._request(args, **kw)
def getSecurityPermissions(self, uid):
args = myArgs()[0]
return self._request(args, **kw)
| ekarlso/python-zenoss-api | zenoss_api/routers/nav.py | Python | gpl-3.0 | 1,526 |
#!/usr/bin/env python
"""aligned_parse_reader.py: read parsed and aligned files"""
__author__ = "Fabien Cromieres"
__license__ = "undecided"
__version__ = "1.0"
__email__ = "fabien.cromieres@gmail.com"
__status__ = "Development"
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import io
import exceptions
log = logging.getLogger("aparse")
log.setLevel(logging.INFO)
def read_one_parse_info_from_file_object(f):
id_line = f.readline()
if len(id_line) == 0:
raise exceptions.EOFError()
sharp, id_part, score_part = id_line.split()
assert sharp == "#"
id_tag, id_ = id_part.split("=")
assert id_tag == "ID"
score_tag, score = score_part.split("=")
assert score_tag == "SCORE"
score = float(score)
sentence = []
while True:
line = f.readline().strip()
if len(line) == 0:
return id_, sentence
splitted_line = line.split("\t")
num_pos = int(splitted_line[0])
dpnd = int(splitted_line[1])
word = splitted_line[2]
assert num_pos == len(sentence)
sentence.append(word)
def read_one_align_info_from_file_object(f):
id_line = f.readline()
if len(id_line) == 0:
raise exceptions.EOFError()
id_line = id_line.strip()
sharp, id_, score = id_line.split()
assert sharp == "#"
score = float(score)
align_line = f.readline().strip()
alignment = []
for link in align_line.split():
left, right = link.split("-")
left = [int(x) for x in left.split(",")]
right = [int(x) for x in right.split(",")]
alignment.append((left, right))
return id_, score, alignment
def load_aligned_corpus(src_fn, tgt_fn, align_fn, skip_empty_align=True, invert_alignment_links=False):
src = io.open(src_fn, 'rt', encoding="utf8")
tgt = io.open(tgt_fn, 'rt', encoding="utf8")
align_f = io.open(align_fn, 'rt', encoding="utf8")
num_sentence = 0
while True:
try:
id_src, sentence_src = read_one_parse_info_from_file_object(src)
id_tgt, sentence_tgt = read_one_parse_info_from_file_object(tgt)
id_align, score_align, alignment = read_one_align_info_from_file_object(
align_f)
except exceptions.EOFError:
return
if skip_empty_align and len(alignment) == 0:
log.warn("skipping empty alignment %i %s" % (num_sentence, id_align))
continue
assert id_src == id_tgt, "%s != %s @%i" % (id_src, id_tgt, num_sentence)
assert id_src == id_align, "%s != %s @%i" % (id_src, id_align, num_sentence)
if invert_alignment_links:
inverted_alignment = [(right, left) for (left, right) in alignment]
alignment = inverted_alignment
yield sentence_src, sentence_tgt, alignment
num_sentence += 1
| fabiencro/knmt | nmt_chainer/dataprocessing/aligned_parse_reader.py | Python | gpl-3.0 | 2,892 |
"""Subpackage with PS-specific BSMP objects."""
| lnls-sirius/dev-packages | siriuspy/siriuspy/pwrsupply/bsmp/__init__.py | Python | gpl-3.0 | 48 |
#!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import httplib
import socket
import urllib2
from lib.core.data import logger
from lib.core.exception import SqlmapConnectionException
ssl = None
try:
import ssl as _ssl
ssl = _ssl
except ImportError:
pass
_protocols = [ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1]
class HTTPSConnection(httplib.HTTPSConnection):
"""
Connection class that enables usage of newer SSL protocols.
Reference: http://bugs.python.org/msg128686
"""
def __init__(self, *args, **kwargs):
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
def connect(self):
def create_sock():
sock = socket.create_connection((self.host, self.port), self.timeout)
if getattr(self, "_tunnel_host", None):
self.sock = sock
self._tunnel()
return sock
success = False
for protocol in _protocols:
try:
sock = create_sock()
_ = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=protocol)
if _:
success = True
self.sock = _
_protocols.remove(protocol)
_protocols.insert(0, protocol)
break
else:
sock.close()
except ssl.SSLError, errMsg:
logger.debug("SSL connection error occured ('%s')" % errMsg)
if not success:
raise SqlmapConnectionException("can't establish SSL connection")
class HTTPSHandler(urllib2.HTTPSHandler):
def https_open(self, req):
return self.do_open(HTTPSConnection if ssl else httplib.HTTPSConnection, req)
# Bug fix (http://bugs.python.org/issue17849)
def _(self, *args):
return self._readline()
httplib.LineAndFileWrapper._readline = httplib.LineAndFileWrapper.readline
httplib.LineAndFileWrapper.readline = _
| aron-bordin/Tyrant-Sql | SQL_Map/lib/request/httpshandler.py | Python | gpl-3.0 | 2,072 |
from .models import *
from django.contrib import admin
class PingLogAdmin(admin.ModelAdmin):
list_display = ('id','hash_key','url','ip_address','user_agent','time')
admin.site.register(PingLog, PingLogAdmin) | thejeshgn/quest | quest/admin.py | Python | gpl-3.0 | 213 |
# -*- coding: utf-8 -*-
#
# test_import_issues.py - Test issue importing.
# Copyright (C) 2008 by Drew Hess <dhess@bothan.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Test issue importing."""
import unittest
import lobbyists
import sqlite3
import util
class TestImportIssues(unittest.TestCase):
def test_import_issues(self):
"""Import issues"""
filings = list(lobbyists.parse_filings(util.testpath('issues.xml')))
con = sqlite3.connect(':memory:')
con = lobbyists.create_db(con)
cur = con.cursor()
self.failUnless(lobbyists.import_filings(cur, filings))
con.row_factory = sqlite3.Row
cur = con.cursor()
cur.execute("SELECT * FROM issue")
rows = list(cur)
row = rows.pop()
self.failUnlessEqual(row['id'], 23)
self.failUnlessEqual(row['code'],
'ENERGY/NUCLEAR')
self.failUnlessEqual(row['specific_issue'],
'\r\nComprehensive Energy Bill')
row = rows.pop()
self.failUnlessEqual(row['id'], 22)
self.failUnlessEqual(row['code'],
'TRANSPORTATION')
self.failUnlessEqual(row['specific_issue'],
'\r\nH.R. 1495 Water Resources Development Act (WRDA) - the WRDA provisions to modernize the locks on the Upper Mississippi and Illinois Rivers are essential if U.S. agriculture is going to remain competitive in the global marketplace.\r\nH.R. 1495 the Water Resources Development Act of 2007 (WRDA) - conference report - Title VIII of the legislation includes authorization for the Corps of Engineers to construct new 1,200 foot locks on the Upper Mississippi and Illinois Rivers\n')
row = rows.pop()
self.failUnlessEqual(row['id'], 21)
self.failUnlessEqual(row['code'],
'IMMIGRATION')
self.failUnlessEqual(row['specific_issue'],
'\r\nImmigration - Thanking Senator Lincoln and her staff for the hard work and long hours and dedication they presented in an effort to develop a comprehensive immigration reform.\n')
row = rows.pop()
self.failUnlessEqual(row['id'], 20)
self.failUnlessEqual(row['code'],
'AGRICULTURE')
self.failUnlessEqual(row['specific_issue'],
'\r\nFY08 Agriculture Appropriations Bill - (Sec. 738) amendment to prohibit USDA from spending money for health inspection of horses.\n\nH.R. 3161, the FY08 Ag spending bill - amendments: King/Kingston amendment to strike Sec. 738. It would limit USDA authority for equine health inspection, effectively restricting the movement of all horses; Ackerman amendment prohibits funding for Food Safety and Inspection Service (FSIS) inspections in facilities that process nonambulatory or downer livestock; Whitfield-Spratt-Rahall-Chandler amendment to restrict USDA inspection of horses intended for processing for human consumption.\n\nPayment Limits.\r\nFarm Bill: tax title, reductions in direct payments, counter-cyclical revenue option, senate ag committee markup on farm bill, amendments seeking further reform to payment limits and adjusted gross income restrictions.\n')
row = rows.pop()
self.failUnlessEqual(row['id'], 19)
self.failUnlessEqual(row['code'],
'TRADE (DOMESTIC/FOREIGN)')
self.failUnlessEqual(row['specific_issue'],
'\r\nU.S. -Peru Trade Promotion Agreement (TPA) - the goal is to increase U.S. agriculture exports and increase market share.')
row = rows.pop()
self.failUnlessEqual(row['id'], 18)
self.failUnlessEqual(row['code'],
'EDUCATION')
self.failUnlessEqual(row['specific_issue'],
'\r\nFY08 Labor, HHS and Education spending. Perkins Amendment (federal funding for FFA and career and technical education).')
row = rows.pop()
self.failUnlessEqual(row['id'], 17)
self.failUnlessEqual(row['code'],
'ROADS/HIGHWAY')
self.failUnlessEqual(row['specific_issue'],
'\r\nH.R. 3098 to restore farm truck exemptions from federal motor carrier vehicle regulations.')
row = rows.pop()
self.failUnlessEqual(row['id'], 16)
self.failUnlessEqual(row['code'],
'DEFENSE')
self.failUnlessEqual(row['specific_issue'],
'H.R.3222 & Senate FY08 Defense Appropriations-Navy, Army & SOCOM R&D\nH.R.1585 & S.1547 FY08 Defense Authorizations-Navy, Army & SOCOM R&D\n')
row = rows.pop()
self.failUnlessEqual(row['id'], 15)
self.failUnlessEqual(row['code'],
'HOMELAND SECURITY')
self.failUnlessEqual(row['specific_issue'],
'H.R.3222 & Senate FY08 Defense Appropriations-Navy, Army & SOCOM R&D\nH.R.1585 & S.1547 FY08 Defense Authorizations-Navy, Army & SOCOM R&D\nH.R.2638 & S.1644 FY08 DHS AppropriationsBill-CRP')
row = rows.pop()
self.failUnlessEqual(row['id'], 14)
self.failUnlessEqual(row['code'],
'BUDGET/APPROPRIATIONS')
self.failUnlessEqual(row['specific_issue'],
'H.R.3222 & Senate FY08 Defense Appropriations-Navy, Army & SOCOM R&D\nH.R.1585 & S.1547 FY08 Defense Authorizations-Navy, Army & SOCOM R&D\nH.R.2638 & S.1644 FY08 DHS AppropriationsBill-CRP')
row = rows.pop()
self.failUnlessEqual(row['id'], 13)
self.failUnlessEqual(row['code'],
'DEFENSE')
self.failUnlessEqual(row['specific_issue'],
'DEFENSE AUTHORIZATION, DEFENSE APPROPRIATIONS, VETERANS, DEFENSE HEALTH CARE, ARMED FORCES RETIREMENT, ARMED FORCES PERSONNEL BENEFITS, EMERGING DEFENSE RELATED ISSUES')
row = rows.pop()
self.failUnlessEqual(row['id'], 12)
self.failUnlessEqual(row['code'],
'BANKING')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 11)
self.failUnlessEqual(row['code'],
'REAL ESTATE/LAND USE/CONSERVATION')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 10)
self.failUnlessEqual(row['code'],
'FINANCIAL INSTITUTIONS/INVESTMENTS/SECURITIES')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 9)
self.failUnlessEqual(row['code'],
'FOREIGN RELATIONS')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 8)
self.failUnlessEqual(row['code'],
'LAW ENFORCEMENT/CRIME/CRIMINAL JUSTICE')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 7)
self.failUnlessEqual(row['code'],
'FAMILY ISSUES/ABORTION/ADOPTION')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 6)
self.failUnlessEqual(row['code'],
'HEALTH ISSUES')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 5)
self.failUnlessEqual(row['code'],
'MEDICARE/MEDICAID')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 4)
self.failUnlessEqual(row['code'],
'WELFARE')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 3)
self.failUnlessEqual(row['code'],
'BUDGET/APPROPRIATIONS')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 2)
self.failUnlessEqual(row['code'],
'TAXATION/INTERNAL REVENUE CODE')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
row = rows.pop()
self.failUnlessEqual(row['id'], 1)
self.failUnlessEqual(row['code'],
'INSURANCE')
self.failUnlessEqual(row['specific_issue'],
'unspecified')
self.failUnlessEqual(len(rows), 0)
def test_import_issues_issue_code(self):
"""Importing issues should fill issue_code table."""
filings = list(lobbyists.parse_filings(util.testpath('issues.xml')))
con = sqlite3.connect(':memory:')
con = lobbyists.create_db(con)
cur = con.cursor()
self.failUnless(lobbyists.import_filings(cur, filings))
con.row_factory = sqlite3.Row
cur = con.cursor()
cur.execute("SELECT * FROM issue_code")
rows = list(cur)
row = rows.pop()
self.failUnlessEqual(row['code'],
'ENERGY/NUCLEAR')
row = rows.pop()
self.failUnlessEqual(row['code'],
'TRANSPORTATION')
row = rows.pop()
self.failUnlessEqual(row['code'],
'IMMIGRATION')
row = rows.pop()
self.failUnlessEqual(row['code'],
'AGRICULTURE')
row = rows.pop()
self.failUnlessEqual(row['code'],
'TRADE (DOMESTIC/FOREIGN)')
row = rows.pop()
self.failUnlessEqual(row['code'],
'EDUCATION')
row = rows.pop()
self.failUnlessEqual(row['code'],
'ROADS/HIGHWAY')
row = rows.pop()
self.failUnlessEqual(row['code'],
'HOMELAND SECURITY')
row = rows.pop()
self.failUnlessEqual(row['code'],
'DEFENSE')
row = rows.pop()
self.failUnlessEqual(row['code'],
'BANKING')
row = rows.pop()
self.failUnlessEqual(row['code'],
'REAL ESTATE/LAND USE/CONSERVATION')
row = rows.pop()
self.failUnlessEqual(row['code'],
'FINANCIAL INSTITUTIONS/INVESTMENTS/SECURITIES')
row = rows.pop()
self.failUnlessEqual(row['code'],
'FOREIGN RELATIONS')
row = rows.pop()
self.failUnlessEqual(row['code'],
'LAW ENFORCEMENT/CRIME/CRIMINAL JUSTICE')
row = rows.pop()
self.failUnlessEqual(row['code'],
'FAMILY ISSUES/ABORTION/ADOPTION')
row = rows.pop()
self.failUnlessEqual(row['code'],
'HEALTH ISSUES')
row = rows.pop()
self.failUnlessEqual(row['code'],
'MEDICARE/MEDICAID')
row = rows.pop()
self.failUnlessEqual(row['code'],
'WELFARE')
row = rows.pop()
self.failUnlessEqual(row['code'],
'BUDGET/APPROPRIATIONS')
row = rows.pop()
self.failUnlessEqual(row['code'],
'TAXATION/INTERNAL REVENUE CODE')
row = rows.pop()
self.failUnlessEqual(row['code'],
'INSURANCE')
self.failUnlessEqual(len(rows), 0)
def test_import_filings_to_issues(self):
"""Issues are matched up with filings in the database."""
filings = list(lobbyists.parse_filings(util.testpath('issues.xml')))
con = sqlite3.connect(':memory:')
con = lobbyists.create_db(con)
cur = con.cursor()
self.failUnless(lobbyists.import_filings(cur, filings))
con.row_factory = sqlite3.Row
cur = con.cursor()
cur.execute("SELECT * FROM filing_issues")
rows = list(cur)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 23)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 22)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 21)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 20)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 19)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 18)
row = rows.pop()
self.failUnlessEqual(row['filing'], '79E53F91-8C5F-44AD-909D-032AA25D5B00')
self.failUnlessEqual(row['issue'], 17)
row = rows.pop()
self.failUnlessEqual(row['filing'], '05804BE5-57C9-41BF-97B2-0120826D4393')
self.failUnlessEqual(row['issue'], 16)
row = rows.pop()
self.failUnlessEqual(row['filing'], '05804BE5-57C9-41BF-97B2-0120826D4393')
self.failUnlessEqual(row['issue'], 15)
row = rows.pop()
self.failUnlessEqual(row['filing'], '05804BE5-57C9-41BF-97B2-0120826D4393')
self.failUnlessEqual(row['issue'], 14)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'F56492FC-4FBD-4824-83E1-0004B30F0519')
self.failUnlessEqual(row['issue'], 13)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'A55002C7-78C4-41BA-A6CA-01FCF7650116')
self.failUnlessEqual(row['issue'], 12)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'A55002C7-78C4-41BA-A6CA-01FCF7650116')
self.failUnlessEqual(row['issue'], 11)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'A55002C7-78C4-41BA-A6CA-01FCF7650116')
self.failUnlessEqual(row['issue'], 10)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'A55002C7-78C4-41BA-A6CA-01FCF7650116')
self.failUnlessEqual(row['issue'], 9)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'A55002C7-78C4-41BA-A6CA-01FCF7650116')
self.failUnlessEqual(row['issue'], 8)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'A55002C7-78C4-41BA-A6CA-01FCF7650116')
self.failUnlessEqual(row['issue'], 7)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'D1C9DB2A-AE4F-4FED-9BCB-024C8373813E')
self.failUnlessEqual(row['issue'], 6)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'D1C9DB2A-AE4F-4FED-9BCB-024C8373813E')
self.failUnlessEqual(row['issue'], 5)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'D1C9DB2A-AE4F-4FED-9BCB-024C8373813E')
self.failUnlessEqual(row['issue'], 4)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'D1C9DB2A-AE4F-4FED-9BCB-024C8373813E')
self.failUnlessEqual(row['issue'], 3)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'D1C9DB2A-AE4F-4FED-9BCB-024C8373813E')
self.failUnlessEqual(row['issue'], 2)
row = rows.pop()
self.failUnlessEqual(row['filing'], 'D1C9DB2A-AE4F-4FED-9BCB-024C8373813E')
self.failUnlessEqual(row['issue'], 1)
self.failUnlessEqual(len(rows), 0)
if __name__ == '__main__':
unittest.main()
| dhess/lobbyists | lobbyists/tests/test_import_issues.py | Python | gpl-3.0 | 17,052 |
from PySide2.QtWidgets import QApplication
__title__ = "Wordsets editor"
__description__ = "A plugin to manage word sets"
__long_description__ = """
<p>This plugin allows to create sets of words that can be matched with the
attributes of the project's variants.</p>
<p>
Once the addition of a word set is started, a manual addition one by one of the
words is possible; for practical reasons it is however advisable to directly
import a text file containing merely 1 word per line.</p>
The set can be reworked at any time via an editor.<br>
<br>
<i>Example of use:</i><br>
<br>
<i>A user wishes to quickly filter all variants of a project related to a set of
relevant genes for him.
He therefore creates a word set and then makes a selection via:</i>
<ul>
<li>the <em>Filters Editor</em> plugin with a filter of the type:
<pre>gene IN ('WORDSET', 'my_word_set')</pre></li>
<li>the <em>VQL Editor</em> plugin with a VQL request of the type:
<pre>SELECT chr,pos,ref,alt,gene FROM variants WHERE gene IN WORDSET['my_word_set']</pre></li>
</ul>
"""
__author__ = "Sacha schutz"
__version__ = "1.0.0"
| labsquare/CuteVariant | cutevariant/gui/plugins/word_set/__init__.py | Python | gpl-3.0 | 1,098 |
"""
Facilities for pyFTS Benchmark module
"""
import matplotlib as plt
import matplotlib.cm as cmx
import matplotlib.colors as pltcolors
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import sqlite3
#from mpl_toolkits.mplot3d import Axes3D
from copy import deepcopy
from pyFTS.common import Util
def open_benchmark_db(name):
"""
Open a connection with a Sqlite database designed to store benchmark results.
:param name: database filenem
:return: a sqlite3 database connection
"""
conn = sqlite3.connect(name)
#performance optimizations
conn.execute("PRAGMA journal_mode = WAL")
conn.execute("PRAGMA synchronous = NORMAL")
create_benchmark_tables(conn)
return conn
def create_benchmark_tables(conn):
"""
Create a sqlite3 table designed to store benchmark results.
:param conn: a sqlite3 database connection
"""
c = conn.cursor()
c.execute('''CREATE TABLE if not exists benchmarks(
ID integer primary key, Date int, Dataset text, Tag text,
Type text, Model text, Transformation text, 'Order' int,
Scheme text, Partitions int,
Size int, Steps int, Method text, Measure text, Value real)''')
conn.commit()
def insert_benchmark(data, conn):
"""
Insert benchmark data on database
:param data: a tuple with the benchmark data with format:
ID: integer incremental primary key
Date: Date/hour of benchmark execution
Dataset: Identify on which dataset the dataset was performed
Tag: a user defined word that indentify a benchmark set
Type: forecasting type (point, interval, distribution)
Model: FTS model
Transformation: The name of data transformation, if one was used
Order: the order of the FTS method
Scheme: UoD partitioning scheme
Partitions: Number of partitions
Size: Number of rules of the FTS model
Steps: prediction horizon, i. e., the number of steps ahead
Measure: accuracy measure
Value: the measure value
:param conn: a sqlite3 database connection
:return:
"""
c = conn.cursor()
c.execute("INSERT INTO benchmarks(Date, Dataset, Tag, Type, Model, "
+ "Transformation, 'Order', Scheme, Partitions, "
+ "Size, Steps, Method, Measure, Value) "
+ "VALUES(datetime('now'),?,?,?,?,?,?,?,?,?,?,?,?,?)", data)
conn.commit()
def process_common_data(dataset, tag, type, job):
"""
Wraps benchmark information on a tuple for sqlite database
:param dataset: benchmark dataset
:param tag: benchmark set alias
:param type: forecasting type
:param job: a dictionary with benchmark data
:return: tuple for sqlite database
"""
model = job["obj"]
if model.benchmark_only:
data = [dataset, tag, type, model.shortname,
str(model.transformations[0]) if len(model.transformations) > 0 else None,
model.order, None, None,
None]
else:
data = [dataset, tag, type, model.shortname,
str(model.partitioner.transformation) if model.partitioner.transformation is not None else None,
model.order, model.partitioner.name, str(model.partitioner.partitions),
len(model)]
return data
def process_common_data2(dataset, tag, type, job):
"""
Wraps benchmark information on a tuple for sqlite database
:param dataset: benchmark dataset
:param tag: benchmark set alias
:param type: forecasting type
:param job: a dictionary with benchmark data
:return: tuple for sqlite database
"""
data = [dataset, tag, type,
job['model'],
job['transformation'],
job['order'],
job['partitioner'],
job['partitions'],
job['size']
]
return data
def get_dataframe_from_bd(file, filter):
"""
Query the sqlite benchmark database and return a pandas dataframe with the results
:param file: the url of the benchmark database
:param filter: sql conditions to filter
:return: pandas dataframe with the query results
"""
con = sqlite3.connect(file)
sql = "SELECT * from benchmarks"
if filter is not None:
sql += " WHERE " + filter
return pd.read_sql_query(sql, con)
def extract_measure(dataframe, measure, data_columns):
if not dataframe.empty:
df = dataframe[(dataframe.Measure == measure)][data_columns]
tmp = df.to_dict(orient="records")[0]
ret = [k for k in tmp.values() if not np.isnan(k)]
return ret
else:
return None
def find_best(dataframe, criteria, ascending):
models = dataframe.Model.unique()
orders = dataframe.Order.unique()
ret = {}
for m in models:
for o in orders:
mod = {}
df = dataframe[(dataframe.Model == m) & (dataframe.Order == o)].sort_values(by=criteria, ascending=ascending)
if not df.empty:
_key = str(m) + str(o)
best = df.loc[df.index[0]]
mod['Model'] = m
mod['Order'] = o
mod['Scheme'] = best["Scheme"]
mod['Partitions'] = best["Partitions"]
ret[_key] = mod
return ret
def simple_synthetic_dataframe(file, tag, measure, sql=None):
'''
Read experiments results from sqlite3 database in 'file', make a synthesis of the results
of the metric 'measure' with the same 'tag', returning a Pandas DataFrame with the mean results.
:param file: sqlite3 database file name
:param tag: common tag of the experiments
:param measure: metric to synthetize
:return: Pandas DataFrame with the mean results
'''
df = get_dataframe_from_bd(file,"tag = '{}' and measure = '{}' {}"
.format(tag, measure,
'' if sql is None else 'and {}'.format(sql)))
data = []
models = df.Model.unique()
datasets = df.Dataset.unique()
for dataset in datasets:
for model in models:
_filter = (df.Dataset == dataset) & (df.Model == model)
avg = np.nanmean(df[_filter].Value)
std = np.nanstd(df[_filter].Value)
data.append([dataset, model, avg, std])
dat = pd.DataFrame(data, columns=['Dataset', 'Model', 'AVG', 'STD'])
dat = dat.sort_values(['AVG', 'STD'])
best = []
for dataset in datasets:
for model in models:
ix = dat[(dat.Dataset == dataset) & (dat.Model == model)].index[0]
best.append(ix)
ret = dat.loc[best].sort_values(['AVG', 'STD'])
ret.groupby('Dataset')
return ret
def analytic_tabular_dataframe(dataframe):
experiments = len(dataframe.columns) - len(base_dataframe_columns()) - 1
models = dataframe.Model.unique()
orders = dataframe.Order.unique()
schemes = dataframe.Scheme.unique()
partitions = dataframe.Partitions.unique()
steps = dataframe.Steps.unique()
measures = dataframe.Measure.unique()
data_columns = analytical_data_columns(experiments)
ret = []
for m in models:
for o in orders:
for s in schemes:
for p in partitions:
for st in steps:
for ms in measures:
df = dataframe[(dataframe.Model == m) & (dataframe.Order == o)
& (dataframe.Scheme == s) & (dataframe.Partitions == p)
& (dataframe.Steps == st) & (dataframe.Measure == ms) ]
if not df.empty:
for col in data_columns:
mod = [m, o, s, p, st, ms, df[col].values[0]]
ret.append(mod)
dat = pd.DataFrame(ret, columns=tabular_dataframe_columns())
return dat
def tabular_dataframe_columns():
return ["Model", "Order", "Scheme", "Partitions", "Steps", "Measure", "Value"]
def base_dataframe_columns():
return ["Model", "Order", "Scheme", "Partitions", "Size", "Steps", "Method"]
def point_dataframe_synthetic_columns():
return base_dataframe_columns().extend(["RMSEAVG", "RMSESTD",
"SMAPEAVG", "SMAPESTD", "UAVG","USTD", "TIMEAVG", "TIMESTD"])
def point_dataframe_analytic_columns(experiments):
columns = [str(k) for k in np.arange(0, experiments)]
columns.insert(0, "Model")
columns.insert(1, "Order")
columns.insert(2, "Scheme")
columns.insert(3, "Partitions")
columns.insert(4, "Size")
columns.insert(5, "Steps")
columns.insert(6, "Method")
columns.insert(7, "Measure")
return columns
def save_dataframe_point(experiments, file, objs, rmse, save, synthetic, smape, times, u, steps, method):
"""
Create a dataframe to store the benchmark results
:param experiments: dictionary with the execution results
:param file:
:param objs:
:param rmse:
:param save:
:param synthetic:
:param smape:
:param times:
:param u:
:return:
"""
ret = []
if synthetic:
for k in sorted(objs.keys()):
try:
mod = []
mfts = objs[k]
mod.append(mfts.shortname)
mod.append(mfts.order)
if not mfts.benchmark_only:
mod.append(mfts.partitioner.name)
mod.append(mfts.partitioner.partitions)
mod.append(len(mfts))
else:
mod.append('-')
mod.append('-')
mod.append('-')
mod.append(steps[k])
mod.append(method[k])
mod.append(np.round(np.nanmean(rmse[k]), 2))
mod.append(np.round(np.nanstd(rmse[k]), 2))
mod.append(np.round(np.nanmean(smape[k]), 2))
mod.append(np.round(np.nanstd(smape[k]), 2))
mod.append(np.round(np.nanmean(u[k]), 2))
mod.append(np.round(np.nanstd(u[k]), 2))
mod.append(np.round(np.nanmean(times[k]), 4))
mod.append(np.round(np.nanstd(times[k]), 4))
ret.append(mod)
except Exception as ex:
print("Erro ao salvar ", k)
print("Exceção ", ex)
columns = point_dataframe_synthetic_columns()
else:
for k in sorted(objs.keys()):
try:
mfts = objs[k]
n = mfts.shortname
o = mfts.order
if not mfts.benchmark_only:
s = mfts.partitioner.name
p = mfts.partitioner.partitions
l = len(mfts)
else:
s = '-'
p = '-'
l = '-'
st = steps[k]
mt = method[k]
tmp = [n, o, s, p, l, st, mt, 'RMSE']
tmp.extend(rmse[k])
ret.append(deepcopy(tmp))
tmp = [n, o, s, p, l, st, mt, 'SMAPE']
tmp.extend(smape[k])
ret.append(deepcopy(tmp))
tmp = [n, o, s, p, l, st, mt, 'U']
tmp.extend(u[k])
ret.append(deepcopy(tmp))
tmp = [n, o, s, p, l, st, mt, 'TIME']
tmp.extend(times[k])
ret.append(deepcopy(tmp))
except Exception as ex:
print("Erro ao salvar ", k)
print("Exceção ", ex)
columns = point_dataframe_analytic_columns(experiments)
try:
dat = pd.DataFrame(ret, columns=columns)
if save: dat.to_csv(Util.uniquefilename(file), sep=";", index=False)
return dat
except Exception as ex:
print(ex)
print(experiments)
print(columns)
print(ret)
def cast_dataframe_to_synthetic(infile, outfile, experiments, type):
if type == 'point':
analytic_columns = point_dataframe_analytic_columns
synthetic_columns = point_dataframe_synthetic_columns
synthetize_measures = cast_dataframe_to_synthetic_point
elif type == 'interval':
analytic_columns = interval_dataframe_analytic_columns
synthetic_columns = interval_dataframe_synthetic_columns
synthetize_measures = cast_dataframe_to_synthetic_interval
elif type == 'distribution':
analytic_columns = probabilistic_dataframe_analytic_columns
synthetic_columns = probabilistic_dataframe_synthetic_columns
synthetize_measures = cast_dataframe_to_synthetic_probabilistic
else:
raise ValueError("Type parameter has an unknown value!")
columns = analytic_columns(experiments)
dat = pd.read_csv(infile, sep=";", usecols=columns)
models = dat.Model.unique()
orders = dat.Order.unique()
schemes = dat.Scheme.unique()
partitions = dat.Partitions.unique()
steps = dat.Steps.unique()
methods = dat.Method.unique()
data_columns = analytical_data_columns(experiments)
ret = []
for m in models:
for o in orders:
for s in schemes:
for p in partitions:
for st in steps:
for mt in methods:
df = dat[(dat.Model == m) & (dat.Order == o) & (dat.Scheme == s) &
(dat.Partitions == p) & (dat.Steps == st) & (dat.Method == mt)]
if not df.empty:
mod = synthetize_measures(df, data_columns)
mod.insert(0, m)
mod.insert(1, o)
mod.insert(2, s)
mod.insert(3, p)
mod.insert(4, df.iat[0,5])
mod.insert(5, st)
mod.insert(6, mt)
ret.append(mod)
dat = pd.DataFrame(ret, columns=synthetic_columns())
dat.to_csv(outfile, sep=";", index=False)
def cast_dataframe_to_synthetic_point(df, data_columns):
ret = []
rmse = extract_measure(df, 'RMSE', data_columns)
smape = extract_measure(df, 'SMAPE', data_columns)
u = extract_measure(df, 'U', data_columns)
times = extract_measure(df, 'TIME', data_columns)
ret.append(np.round(np.nanmean(rmse), 2))
ret.append(np.round(np.nanstd(rmse), 2))
ret.append(np.round(np.nanmean(smape), 2))
ret.append(np.round(np.nanstd(smape), 2))
ret.append(np.round(np.nanmean(u), 2))
ret.append(np.round(np.nanstd(u), 2))
ret.append(np.round(np.nanmean(times), 4))
ret.append(np.round(np.nanstd(times), 4))
return ret
def analytical_data_columns(experiments):
data_columns = [str(k) for k in np.arange(0, experiments)]
return data_columns
def scale_params(data):
vmin = np.nanmin(data)
vlen = np.nanmax(data) - vmin
return (vmin, vlen)
def scale(data, params):
ndata = [(k-params[0])/params[1] for k in data]
return ndata
def stats(measure, data):
print(measure, np.nanmean(data), np.nanstd(data))
def unified_scaled_point(experiments, tam, save=False, file=None,
sort_columns=['UAVG', 'RMSEAVG', 'USTD', 'RMSESTD'],
sort_ascend=[1, 1, 1, 1],save_best=False,
ignore=None, replace=None):
fig, axes = plt.subplots(nrows=3, ncols=1, figsize=tam)
axes[0].set_title('RMSE')
axes[1].set_title('SMAPE')
axes[2].set_title('U Statistic')
models = {}
for experiment in experiments:
mdl = {}
dat_syn = pd.read_csv(experiment[0], sep=";", usecols=point_dataframe_synthetic_columns())
bests = find_best(dat_syn, sort_columns, sort_ascend)
dat_ana = pd.read_csv(experiment[1], sep=";", usecols=point_dataframe_analytic_columns(experiment[2]))
rmse = []
smape = []
u = []
times = []
data_columns = analytical_data_columns(experiment[2])
for b in sorted(bests.keys()):
if check_ignore_list(b, ignore):
continue
if b not in models:
models[b] = {}
models[b]['rmse'] = []
models[b]['smape'] = []
models[b]['u'] = []
models[b]['times'] = []
if b not in mdl:
mdl[b] = {}
mdl[b]['rmse'] = []
mdl[b]['smape'] = []
mdl[b]['u'] = []
mdl[b]['times'] = []
best = bests[b]
tmp = dat_ana[(dat_ana.Model == best["Model"]) & (dat_ana.Order == best["Order"])
& (dat_ana.Scheme == best["Scheme"]) & (dat_ana.Partitions == best["Partitions"])]
tmpl = extract_measure(tmp,'RMSE',data_columns)
mdl[b]['rmse'].extend( tmpl )
rmse.extend( tmpl )
tmpl = extract_measure(tmp, 'SMAPE', data_columns)
mdl[b]['smape'].extend(tmpl)
smape.extend(tmpl)
tmpl = extract_measure(tmp, 'U', data_columns)
mdl[b]['u'].extend(tmpl)
u.extend(tmpl)
tmpl = extract_measure(tmp, 'TIME', data_columns)
mdl[b]['times'].extend(tmpl)
times.extend(tmpl)
models[b]['label'] = check_replace_list(best["Model"] + " " + str(best["Order"]), replace)
print("GLOBAL")
rmse_param = scale_params(rmse)
stats("rmse", rmse)
smape_param = scale_params(smape)
stats("smape", smape)
u_param = scale_params(u)
stats("u", u)
times_param = scale_params(times)
for key in sorted(models.keys()):
models[key]['rmse'].extend( scale(mdl[key]['rmse'], rmse_param) )
models[key]['smape'].extend( scale(mdl[key]['smape'], smape_param) )
models[key]['u'].extend( scale(mdl[key]['u'], u_param) )
models[key]['times'].extend( scale(mdl[key]['times'], times_param) )
rmse = []
smape = []
u = []
times = []
labels = []
for key in sorted(models.keys()):
print(key)
rmse.append(models[key]['rmse'])
stats("rmse", models[key]['rmse'])
smape.append(models[key]['smape'])
stats("smape", models[key]['smape'])
u.append(models[key]['u'])
stats("u", models[key]['u'])
times.append(models[key]['times'])
labels.append(models[key]['label'])
axes[0].boxplot(rmse, labels=labels, autorange=True, showmeans=True)
axes[0].set_title("RMSE")
axes[1].boxplot(smape, labels=labels, autorange=True, showmeans=True)
axes[1].set_title("SMAPE")
axes[2].boxplot(u, labels=labels, autorange=True, showmeans=True)
axes[2].set_title("U Statistic")
plt.tight_layout()
Util.show_and_save_image(fig, file, save)
def plot_dataframe_point(file_synthetic, file_analytic, experiments, tam, save=False, file=None,
sort_columns=['UAVG', 'RMSEAVG', 'USTD', 'RMSESTD'],
sort_ascend=[1, 1, 1, 1],save_best=False,
ignore=None,replace=None):
fig, axes = plt.subplots(nrows=3, ncols=1, figsize=tam)
axes[0].set_title('RMSE')
axes[1].set_title('SMAPE')
axes[2].set_title('U Statistic')
dat_syn = pd.read_csv(file_synthetic, sep=";", usecols=point_dataframe_synthetic_columns())
bests = find_best(dat_syn, sort_columns, sort_ascend)
dat_ana = pd.read_csv(file_analytic, sep=";", usecols=point_dataframe_analytic_columns(experiments))
data_columns = analytical_data_columns(experiments)
if save_best:
dat = pd.DataFrame.from_dict(bests, orient='index')
dat.to_csv(Util.uniquefilename(file_synthetic.replace("synthetic","best")), sep=";", index=False)
rmse = []
smape = []
u = []
times = []
labels = []
for b in sorted(bests.keys()):
if check_ignore_list(b, ignore):
continue
best = bests[b]
tmp = dat_ana[(dat_ana.Model == best["Model"]) & (dat_ana.Order == best["Order"])
& (dat_ana.Scheme == best["Scheme"]) & (dat_ana.Partitions == best["Partitions"])]
rmse.append( extract_measure(tmp,'RMSE',data_columns) )
smape.append(extract_measure(tmp, 'SMAPE', data_columns))
u.append(extract_measure(tmp, 'U', data_columns))
times.append(extract_measure(tmp, 'TIME', data_columns))
labels.append(check_replace_list(best["Model"] + " " + str(best["Order"]),replace))
axes[0].boxplot(rmse, labels=labels, autorange=True, showmeans=True)
axes[0].set_title("RMSE")
axes[1].boxplot(smape, labels=labels, autorange=True, showmeans=True)
axes[1].set_title("SMAPE")
axes[2].boxplot(u, labels=labels, autorange=True, showmeans=True)
axes[2].set_title("U Statistic")
plt.tight_layout()
Util.show_and_save_image(fig, file, save)
def check_replace_list(m, replace):
if replace is not None:
for r in replace:
if r[0] in m:
return r[1]
return m
def check_ignore_list(b, ignore):
flag = False
if ignore is not None:
for i in ignore:
if i in b:
flag = True
return flag
def save_dataframe_interval(coverage, experiments, file, objs, resolution, save, sharpness, synthetic, times,
q05, q25, q75, q95, steps, method):
ret = []
if synthetic:
for k in sorted(objs.keys()):
mod = []
mfts = objs[k]
mod.append(mfts.shortname)
mod.append(mfts.order)
l = len(mfts)
if not mfts.benchmark_only:
mod.append(mfts.partitioner.name)
mod.append(mfts.partitioner.partitions)
mod.append(l)
else:
mod.append('-')
mod.append('-')
mod.append('-')
mod.append(steps[k])
mod.append(method[k])
mod.append(round(np.nanmean(sharpness[k]), 2))
mod.append(round(np.nanstd(sharpness[k]), 2))
mod.append(round(np.nanmean(resolution[k]), 2))
mod.append(round(np.nanstd(resolution[k]), 2))
mod.append(round(np.nanmean(coverage[k]), 2))
mod.append(round(np.nanstd(coverage[k]), 2))
mod.append(round(np.nanmean(times[k]), 2))
mod.append(round(np.nanstd(times[k]), 2))
mod.append(round(np.nanmean(q05[k]), 2))
mod.append(round(np.nanstd(q05[k]), 2))
mod.append(round(np.nanmean(q25[k]), 2))
mod.append(round(np.nanstd(q25[k]), 2))
mod.append(round(np.nanmean(q75[k]), 2))
mod.append(round(np.nanstd(q75[k]), 2))
mod.append(round(np.nanmean(q95[k]), 2))
mod.append(round(np.nanstd(q95[k]), 2))
mod.append(l)
ret.append(mod)
columns = interval_dataframe_synthetic_columns()
else:
for k in sorted(objs.keys()):
try:
mfts = objs[k]
n = mfts.shortname
o = mfts.order
if not mfts.benchmark_only:
s = mfts.partitioner.name
p = mfts.partitioner.partitions
l = len(mfts)
else:
s = '-'
p = '-'
l = '-'
st = steps[k]
mt = method[k]
tmp = [n, o, s, p, l, st, mt, 'Sharpness']
tmp.extend(sharpness[k])
ret.append(deepcopy(tmp))
tmp = [n, o, s, p, l, st, mt, 'Resolution']
tmp.extend(resolution[k])
ret.append(deepcopy(tmp))
tmp = [n, o, s, p, l, st, mt, 'Coverage']
tmp.extend(coverage[k])
ret.append(deepcopy(tmp))
tmp = [n, o, s, p, l, st, mt, 'TIME']
tmp.extend(times[k])
ret.append(deepcopy(tmp))
tmp = [n, o, s, p, l, st, mt, 'Q05']
tmp.extend(q05[k])
ret.append(deepcopy(tmp))
tmp = [n, o, s, p, l, st, mt, 'Q25']
tmp.extend(q25[k])
ret.append(deepcopy(tmp))
tmp = [n, o, s, p, l, st, mt, 'Q75']
tmp.extend(q75[k])
ret.append(deepcopy(tmp))
tmp = [n, o, s, p, l, st, mt, 'Q95']
tmp.extend(q95[k])
ret.append(deepcopy(tmp))
except Exception as ex:
print("Erro ao salvar ", k)
print("Exceção ", ex)
columns = interval_dataframe_analytic_columns(experiments)
dat = pd.DataFrame(ret, columns=columns)
if save: dat.to_csv(Util.uniquefilename(file), sep=";")
return dat
def interval_dataframe_analytic_columns(experiments):
columns = [str(k) for k in np.arange(0, experiments)]
columns.insert(0, "Model")
columns.insert(1, "Order")
columns.insert(2, "Scheme")
columns.insert(3, "Partitions")
columns.insert(4, "Size")
columns.insert(5, "Steps")
columns.insert(6, "Method")
columns.insert(7, "Measure")
return columns
def interval_dataframe_synthetic_columns():
columns = ["Model", "Order", "Scheme", "Partitions","SIZE", "Steps","Method" "SHARPAVG", "SHARPSTD", "RESAVG", "RESSTD", "COVAVG",
"COVSTD", "TIMEAVG", "TIMESTD", "Q05AVG", "Q05STD", "Q25AVG", "Q25STD", "Q75AVG", "Q75STD", "Q95AVG", "Q95STD"]
return columns
def cast_dataframe_to_synthetic_interval(df, data_columns):
sharpness = extract_measure(df, 'Sharpness', data_columns)
resolution = extract_measure(df, 'Resolution', data_columns)
coverage = extract_measure(df, 'Coverage', data_columns)
times = extract_measure(df, 'TIME', data_columns)
q05 = extract_measure(df, 'Q05', data_columns)
q25 = extract_measure(df, 'Q25', data_columns)
q75 = extract_measure(df, 'Q75', data_columns)
q95 = extract_measure(df, 'Q95', data_columns)
ret = []
ret.append(np.round(np.nanmean(sharpness), 2))
ret.append(np.round(np.nanstd(sharpness), 2))
ret.append(np.round(np.nanmean(resolution), 2))
ret.append(np.round(np.nanstd(resolution), 2))
ret.append(np.round(np.nanmean(coverage), 2))
ret.append(np.round(np.nanstd(coverage), 2))
ret.append(np.round(np.nanmean(times), 4))
ret.append(np.round(np.nanstd(times), 4))
ret.append(np.round(np.nanmean(q05), 4))
ret.append(np.round(np.nanstd(q05), 4))
ret.append(np.round(np.nanmean(q25), 4))
ret.append(np.round(np.nanstd(q25), 4))
ret.append(np.round(np.nanmean(q75), 4))
ret.append(np.round(np.nanstd(q75), 4))
ret.append(np.round(np.nanmean(q95), 4))
ret.append(np.round(np.nanstd(q95), 4))
return ret
def unified_scaled_interval(experiments, tam, save=False, file=None,
sort_columns=['COVAVG', 'SHARPAVG', 'COVSTD', 'SHARPSTD'],
sort_ascend=[True, False, True, True],save_best=False,
ignore=None, replace=None):
fig, axes = plt.subplots(nrows=3, ncols=1, figsize=tam)
axes[0].set_title('Sharpness')
axes[1].set_title('Resolution')
axes[2].set_title('Coverage')
models = {}
for experiment in experiments:
mdl = {}
dat_syn = pd.read_csv(experiment[0], sep=";", usecols=interval_dataframe_synthetic_columns())
bests = find_best(dat_syn, sort_columns, sort_ascend)
dat_ana = pd.read_csv(experiment[1], sep=";", usecols=interval_dataframe_analytic_columns(experiment[2]))
sharpness = []
resolution = []
coverage = []
times = []
data_columns = analytical_data_columns(experiment[2])
for b in sorted(bests.keys()):
if check_ignore_list(b, ignore):
continue
if b not in models:
models[b] = {}
models[b]['sharpness'] = []
models[b]['resolution'] = []
models[b]['coverage'] = []
models[b]['times'] = []
if b not in mdl:
mdl[b] = {}
mdl[b]['sharpness'] = []
mdl[b]['resolution'] = []
mdl[b]['coverage'] = []
mdl[b]['times'] = []
best = bests[b]
print(best)
tmp = dat_ana[(dat_ana.Model == best["Model"]) & (dat_ana.Order == best["Order"])
& (dat_ana.Scheme == best["Scheme"]) & (dat_ana.Partitions == best["Partitions"])]
tmpl = extract_measure(tmp, 'Sharpness', data_columns)
mdl[b]['sharpness'].extend(tmpl)
sharpness.extend(tmpl)
tmpl = extract_measure(tmp, 'Resolution', data_columns)
mdl[b]['resolution'].extend(tmpl)
resolution.extend(tmpl)
tmpl = extract_measure(tmp, 'Coverage', data_columns)
mdl[b]['coverage'].extend(tmpl)
coverage.extend(tmpl)
tmpl = extract_measure(tmp, 'TIME', data_columns)
mdl[b]['times'].extend(tmpl)
times.extend(tmpl)
models[b]['label'] = check_replace_list(best["Model"] + " " + str(best["Order"]), replace)
sharpness_param = scale_params(sharpness)
resolution_param = scale_params(resolution)
coverage_param = scale_params(coverage)
times_param = scale_params(times)
for key in sorted(models.keys()):
models[key]['sharpness'].extend(scale(mdl[key]['sharpness'], sharpness_param))
models[key]['resolution'].extend(scale(mdl[key]['resolution'], resolution_param))
models[key]['coverage'].extend(scale(mdl[key]['coverage'], coverage_param))
models[key]['times'].extend(scale(mdl[key]['times'], times_param))
sharpness = []
resolution = []
coverage = []
times = []
labels = []
for key in sorted(models.keys()):
sharpness.append(models[key]['sharpness'])
resolution.append(models[key]['resolution'])
coverage.append(models[key]['coverage'])
times.append(models[key]['times'])
labels.append(models[key]['label'])
axes[0].boxplot(sharpness, labels=labels, autorange=True, showmeans=True)
axes[1].boxplot(resolution, labels=labels, autorange=True, showmeans=True)
axes[2].boxplot(coverage, labels=labels, autorange=True, showmeans=True)
plt.tight_layout()
Util.show_and_save_image(fig, file, save)
def plot_dataframe_interval(file_synthetic, file_analytic, experiments, tam, save=False, file=None,
sort_columns=['COVAVG', 'SHARPAVG', 'COVSTD', 'SHARPSTD'],
sort_ascend=[True, False, True, True],save_best=False,
ignore=None, replace=None):
fig, axes = plt.subplots(nrows=3, ncols=1, figsize=tam)
axes[0].set_title('Sharpness')
axes[1].set_title('Resolution')
axes[2].set_title('Coverage')
dat_syn = pd.read_csv(file_synthetic, sep=";", usecols=interval_dataframe_synthetic_columns())
bests = find_best(dat_syn, sort_columns, sort_ascend)
dat_ana = pd.read_csv(file_analytic, sep=";", usecols=interval_dataframe_analytic_columns(experiments))
data_columns = analytical_data_columns(experiments)
if save_best:
dat = pd.DataFrame.from_dict(bests, orient='index')
dat.to_csv(Util.uniquefilename(file_synthetic.replace("synthetic","best")), sep=";", index=False)
sharpness = []
resolution = []
coverage = []
times = []
labels = []
bounds_shp = []
for b in sorted(bests.keys()):
if check_ignore_list(b, ignore):
continue
best = bests[b]
df = dat_ana[(dat_ana.Model == best["Model"]) & (dat_ana.Order == best["Order"])
& (dat_ana.Scheme == best["Scheme"]) & (dat_ana.Partitions == best["Partitions"])]
sharpness.append( extract_measure(df,'Sharpness',data_columns) )
resolution.append(extract_measure(df, 'Resolution', data_columns))
coverage.append(extract_measure(df, 'Coverage', data_columns))
times.append(extract_measure(df, 'TIME', data_columns))
labels.append(check_replace_list(best["Model"] + " " + str(best["Order"]), replace))
axes[0].boxplot(sharpness, labels=labels, autorange=True, showmeans=True)
axes[0].set_title("Sharpness")
axes[1].boxplot(resolution, labels=labels, autorange=True, showmeans=True)
axes[1].set_title("Resolution")
axes[2].boxplot(coverage, labels=labels, autorange=True, showmeans=True)
axes[2].set_title("Coverage")
axes[2].set_ylim([0, 1.1])
plt.tight_layout()
Util.show_and_save_image(fig, file, save)
def unified_scaled_interval_pinball(experiments, tam, save=False, file=None,
sort_columns=['COVAVG','SHARPAVG','COVSTD','SHARPSTD'],
sort_ascend=[True, False, True, True], save_best=False,
ignore=None, replace=None):
fig, axes = plt.subplots(nrows=1, ncols=4, figsize=tam)
axes[0].set_title(r'$\tau=0.05$')
axes[1].set_title(r'$\tau=0.25$')
axes[2].set_title(r'$\tau=0.75$')
axes[3].set_title(r'$\tau=0.95$')
models = {}
for experiment in experiments:
mdl = {}
dat_syn = pd.read_csv(experiment[0], sep=";", usecols=interval_dataframe_synthetic_columns())
bests = find_best(dat_syn, sort_columns, sort_ascend)
dat_ana = pd.read_csv(experiment[1], sep=";", usecols=interval_dataframe_analytic_columns(experiment[2]))
q05 = []
q25 = []
q75 = []
q95 = []
data_columns = analytical_data_columns(experiment[2])
for b in sorted(bests.keys()):
if check_ignore_list(b, ignore):
continue
if b not in models:
models[b] = {}
models[b]['q05'] = []
models[b]['q25'] = []
models[b]['q75'] = []
models[b]['q95'] = []
if b not in mdl:
mdl[b] = {}
mdl[b]['q05'] = []
mdl[b]['q25'] = []
mdl[b]['q75'] = []
mdl[b]['q95'] = []
best = bests[b]
print(best)
tmp = dat_ana[(dat_ana.Model == best["Model"]) & (dat_ana.Order == best["Order"])
& (dat_ana.Scheme == best["Scheme"]) & (dat_ana.Partitions == best["Partitions"])]
tmpl = extract_measure(tmp, 'Q05', data_columns)
mdl[b]['q05'].extend(tmpl)
q05.extend(tmpl)
tmpl = extract_measure(tmp, 'Q25', data_columns)
mdl[b]['q25'].extend(tmpl)
q25.extend(tmpl)
tmpl = extract_measure(tmp, 'Q75', data_columns)
mdl[b]['q75'].extend(tmpl)
q75.extend(tmpl)
tmpl = extract_measure(tmp, 'Q95', data_columns)
mdl[b]['q95'].extend(tmpl)
q95.extend(tmpl)
models[b]['label'] = check_replace_list(best["Model"] + " " + str(best["Order"]), replace)
q05_param = scale_params(q05)
q25_param = scale_params(q25)
q75_param = scale_params(q75)
q95_param = scale_params(q95)
for key in sorted(models.keys()):
models[key]['q05'].extend(scale(mdl[key]['q05'], q05_param))
models[key]['q25'].extend(scale(mdl[key]['q25'], q25_param))
models[key]['q75'].extend(scale(mdl[key]['q75'], q75_param))
models[key]['q95'].extend(scale(mdl[key]['q95'], q95_param))
q05 = []
q25 = []
q75 = []
q95 = []
labels = []
for key in sorted(models.keys()):
q05.append(models[key]['q05'])
q25.append(models[key]['q25'])
q75.append(models[key]['q75'])
q95.append(models[key]['q95'])
labels.append(models[key]['label'])
axes[0].boxplot(q05, labels=labels, vert=False, autorange=True, showmeans=True)
axes[1].boxplot(q25, labels=labels, vert=False, autorange=True, showmeans=True)
axes[2].boxplot(q75, labels=labels, vert=False, autorange=True, showmeans=True)
axes[3].boxplot(q95, labels=labels, vert=False, autorange=True, showmeans=True)
plt.tight_layout()
Util.show_and_save_image(fig, file, save)
def plot_dataframe_interval_pinball(file_synthetic, file_analytic, experiments, tam, save=False, file=None,
sort_columns=['COVAVG','SHARPAVG','COVSTD','SHARPSTD'],
sort_ascend=[True, False, True, True], save_best=False,
ignore=None, replace=None):
fig, axes = plt.subplots(nrows=1, ncols=4, figsize=tam)
axes[0].set_title(r'$\tau=0.05$')
axes[1].set_title(r'$\tau=0.25$')
axes[2].set_title(r'$\tau=0.75$')
axes[3].set_title(r'$\tau=0.95$')
dat_syn = pd.read_csv(file_synthetic, sep=";", usecols=interval_dataframe_synthetic_columns())
bests = find_best(dat_syn, sort_columns, sort_ascend)
dat_ana = pd.read_csv(file_analytic, sep=";", usecols=interval_dataframe_analytic_columns(experiments))
data_columns = analytical_data_columns(experiments)
if save_best:
dat = pd.DataFrame.from_dict(bests, orient='index')
dat.to_csv(Util.uniquefilename(file_synthetic.replace("synthetic","best")), sep=";", index=False)
q05 = []
q25 = []
q75 = []
q95 = []
labels = []
for b in sorted(bests.keys()):
if check_ignore_list(b, ignore):
continue
best = bests[b]
df = dat_ana[(dat_ana.Model == best["Model"]) & (dat_ana.Order == best["Order"])
& (dat_ana.Scheme == best["Scheme"]) & (dat_ana.Partitions == best["Partitions"])]
q05.append(extract_measure(df, 'Q05', data_columns))
q25.append(extract_measure(df, 'Q25', data_columns))
q75.append(extract_measure(df, 'Q75', data_columns))
q95.append(extract_measure(df, 'Q95', data_columns))
labels.append(check_replace_list(best["Model"] + " " + str(best["Order"]), replace))
axes[0].boxplot(q05, labels=labels, vert=False, autorange=True, showmeans=True)
axes[1].boxplot(q25, labels=labels, vert=False, autorange=True, showmeans=True)
axes[2].boxplot(q75, labels=labels, vert=False, autorange=True, showmeans=True)
axes[3].boxplot(q95, labels=labels, vert=False, autorange=True, showmeans=True)
plt.tight_layout()
Util.show_and_save_image(fig, file, save)
def save_dataframe_probabilistic(experiments, file, objs, crps, times, save, synthetic, steps, method):
"""
Save benchmark results for m-step ahead probabilistic forecasters
:param experiments:
:param file:
:param objs:
:param crps_interval:
:param crps_distr:
:param times:
:param times2:
:param save:
:param synthetic:
:return:
"""
ret = []
if synthetic:
for k in sorted(objs.keys()):
try:
ret = []
for k in sorted(objs.keys()):
try:
mod = []
mfts = objs[k]
mod.append(mfts.shortname)
mod.append(mfts.order)
if not mfts.benchmark_only:
mod.append(mfts.partitioner.name)
mod.append(mfts.partitioner.partitions)
mod.append(len(mfts))
else:
mod.append('-')
mod.append('-')
mod.append('-')
mod.append(steps[k])
mod.append(method[k])
mod.append(np.round(np.nanmean(crps[k]), 2))
mod.append(np.round(np.nanstd(crps[k]), 2))
mod.append(np.round(np.nanmean(times[k]), 4))
mod.append(np.round(np.nanstd(times[k]), 4))
ret.append(mod)
except Exception as e:
print('Erro: %s' % e)
except Exception as ex:
print("Erro ao salvar ", k)
print("Exceção ", ex)
columns = probabilistic_dataframe_synthetic_columns()
else:
for k in sorted(objs.keys()):
try:
mfts = objs[k]
n = mfts.shortname
o = mfts.order
if not mfts.benchmark_only:
s = mfts.partitioner.name
p = mfts.partitioner.partitions
l = len(mfts)
else:
s = '-'
p = '-'
l = '-'
st = steps[k]
mt = method[k]
tmp = [n, o, s, p, l, st, mt, 'CRPS']
tmp.extend(crps[k])
ret.append(deepcopy(tmp))
tmp = [n, o, s, p, l, st, mt, 'TIME']
tmp.extend(times[k])
ret.append(deepcopy(tmp))
except Exception as ex:
print("Erro ao salvar ", k)
print("Exceção ", ex)
columns = probabilistic_dataframe_analytic_columns(experiments)
dat = pd.DataFrame(ret, columns=columns)
if save: dat.to_csv(Util.uniquefilename(file), sep=";")
return dat
def probabilistic_dataframe_analytic_columns(experiments):
columns = [str(k) for k in np.arange(0, experiments)]
columns.insert(0, "Model")
columns.insert(1, "Order")
columns.insert(2, "Scheme")
columns.insert(3, "Partitions")
columns.insert(4, "Size")
columns.insert(5, "Steps")
columns.insert(6, "Method")
columns.insert(7, "Measure")
return columns
def probabilistic_dataframe_synthetic_columns():
columns = ["Model", "Order", "Scheme", "Partitions","Size", "Steps", "Method", "CRPSAVG", "CRPSSTD",
"TIMEAVG", "TIMESTD"]
return columns
def cast_dataframe_to_synthetic_probabilistic(df, data_columns):
crps1 = extract_measure(df, 'CRPS', data_columns)
times1 = extract_measure(df, 'TIME', data_columns)
ret = []
ret.append(np.round(np.nanmean(crps1), 2))
ret.append(np.round(np.nanstd(crps1), 2))
ret.append(np.round(np.nanmean(times1), 2))
ret.append(np.round(np.nanstd(times1), 2))
return ret
def unified_scaled_probabilistic(experiments, tam, save=False, file=None,
sort_columns=['CRPSAVG', 'CRPSSTD'],
sort_ascend=[True, True], save_best=False,
ignore=None, replace=None):
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=tam)
axes.set_title('CRPS')
#axes[1].set_title('CRPS Distribution Ahead')
models = {}
for experiment in experiments:
print(experiment)
mdl = {}
dat_syn = pd.read_csv(experiment[0], sep=";", usecols=probabilistic_dataframe_synthetic_columns())
bests = find_best(dat_syn, sort_columns, sort_ascend)
dat_ana = pd.read_csv(experiment[1], sep=";", usecols=probabilistic_dataframe_analytic_columns(experiment[2]))
crps1 = []
crps2 = []
data_columns = analytical_data_columns(experiment[2])
for b in sorted(bests.keys()):
if check_ignore_list(b, ignore):
continue
if b not in models:
models[b] = {}
models[b]['crps1'] = []
models[b]['crps2'] = []
if b not in mdl:
mdl[b] = {}
mdl[b]['crps1'] = []
mdl[b]['crps2'] = []
best = bests[b]
print(best)
tmp = dat_ana[(dat_ana.Model == best["Model"]) & (dat_ana.Order == best["Order"])
& (dat_ana.Scheme == best["Scheme"]) & (dat_ana.Partitions == best["Partitions"])]
tmpl = extract_measure(tmp, 'CRPS_Interval', data_columns)
mdl[b]['crps1'].extend(tmpl)
crps1.extend(tmpl)
tmpl = extract_measure(tmp, 'CRPS_Distribution', data_columns)
mdl[b]['crps2'].extend(tmpl)
crps2.extend(tmpl)
models[b]['label'] = check_replace_list(best["Model"] + " " + str(best["Order"]), replace)
crps1_param = scale_params(crps1)
crps2_param = scale_params(crps2)
for key in sorted(mdl.keys()):
print(key)
models[key]['crps1'].extend(scale(mdl[key]['crps1'], crps1_param))
models[key]['crps2'].extend(scale(mdl[key]['crps2'], crps2_param))
crps1 = []
crps2 = []
labels = []
for key in sorted(models.keys()):
crps1.append(models[key]['crps1'])
crps2.append(models[key]['crps2'])
labels.append(models[key]['label'])
axes[0].boxplot(crps1, labels=labels, autorange=True, showmeans=True)
axes[1].boxplot(crps2, labels=labels, autorange=True, showmeans=True)
plt.tight_layout()
Util.show_and_save_image(fig, file, save)
def plot_dataframe_probabilistic(file_synthetic, file_analytic, experiments, tam, save=False, file=None,
sort_columns=['CRPS1AVG', 'CRPS2AVG', 'CRPS1STD', 'CRPS2STD'],
sort_ascend=[True, True, True, True], save_best=False,
ignore=None, replace=None):
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=tam)
axes[0].set_title('CRPS')
axes[1].set_title('CRPS')
dat_syn = pd.read_csv(file_synthetic, sep=";", usecols=probabilistic_dataframe_synthetic_columns())
bests = find_best(dat_syn, sort_columns, sort_ascend)
dat_ana = pd.read_csv(file_analytic, sep=";", usecols=probabilistic_dataframe_analytic_columns(experiments))
data_columns = analytical_data_columns(experiments)
if save_best:
dat = pd.DataFrame.from_dict(bests, orient='index')
dat.to_csv(Util.uniquefilename(file_synthetic.replace("synthetic","best")), sep=";", index=False)
crps1 = []
crps2 = []
labels = []
for b in sorted(bests.keys()):
if check_ignore_list(b, ignore):
continue
best = bests[b]
df = dat_ana[(dat_ana.Model == best["Model"]) & (dat_ana.Order == best["Order"])
& (dat_ana.Scheme == best["Scheme"]) & (dat_ana.Partitions == best["Partitions"])]
crps1.append( extract_measure(df,'CRPS_Interval',data_columns) )
crps2.append(extract_measure(df, 'CRPS_Distribution', data_columns))
labels.append(check_replace_list(best["Model"] + " " + str(best["Order"]), replace))
axes[0].boxplot(crps1, labels=labels, autorange=True, showmeans=True)
axes[1].boxplot(crps2, labels=labels, autorange=True, showmeans=True)
plt.tight_layout()
Util.show_and_save_image(fig, file, save)
| petroniocandido/pyFTS | pyFTS/benchmarks/Util.py | Python | gpl-3.0 | 47,305 |
from peewee import * # no other way to reach playhouse :(
from playhouse import flask_utils as peewee_flask_utils
from playhouse import signals as peewee_signals
database = peewee_flask_utils.FlaskDB()
| thedod/boilerplate-peewee-flask | application/sitepack/db.py | Python | gpl-3.0 | 204 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "DjangoServer.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| AcroManiac/AcroLink | Server/DjangoServer/manage.py | Python | gpl-3.0 | 810 |
# coding=utf-8
import unittest
"""587. Erect the Fence
https://leetcode.com/problems/erect-the-fence/description/
There are some trees, where each tree is represented by (x,y) coordinate in a
two-dimensional garden. Your job is to fence the entire garden using the
**minimum length** of rope as it is expensive. The garden is well fenced only
if all the trees are enclosed. Your task is to help find the coordinates of
trees which are exactly located on the fence perimeter.
**Example 1:**
**Input:** [[1,1],[2,2],[2,0],[2,4],[3,3],[4,2]]
**Output:** [[1,1],[2,0],[4,2],[3,3],[2,4]]
**Explanation:**

**Example 2:**
**Input:** [[1,2],[2,2],[4,2]]
**Output:** [[1,2],[2,2],[4,2]]
**Explanation:**

Even you only have trees in a line, you need to use rope to enclose them.
Note:
1. All trees should be enclosed together. You cannot cut the rope to enclose trees that will separate them in more than one group.
2. All input integers will range from 0 to 100.
3. The garden has at least one tree.
4. All coordinates are distinct.
5. Input points have **NO** order. No order required for output.
Similar Questions:
"""
# Definition for a point.
# class Point(object):
# def __init__(self, a=0, b=0):
# self.x = a
# self.y = b
class Solution(object):
def outerTrees(self, points):
"""
:type points: List[Point]
:rtype: List[Point]
"""
def test(self):
pass
if __name__ == "__main__":
unittest.main()
| openqt/algorithms | leetcode/python/lc587-erect-the-fence.py | Python | gpl-3.0 | 1,702 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on May 12, 2014
Model Paper
fields:
conf
year
passcode
paper id
status
title
category1
category1
keywords
@author: riccardo
'''
class Paper(object):
__conf=""
__year=""
__passcode=""
__pid=""
__status=""
__title=""
__category1=""
__category2=""
__keywords=""
def __init__(self, conf,year,pid, status, title, category1, category2, keywords):
self.__conf = conf
self.__year = year
self.__pid = pid
self.__status = status
self.__title = title
self.__category1 = category1
self.__category2 = category2
self.__keywords = keywords
def get_passcode(self):
return self.__passcode
def get_pid(self):
return self.__pid
def get_status(self):
return self.__status
def get_title(self):
return self.__title
def get_category_1(self):
return self.__category1
def get_category_2(self):
return self.__category2
def get_keywords(self):
return self.__keywords
def set_passcode(self, value):
self.__passcode = value
def set_pid(self, value):
self.__pid = value
def set_status(self, value):
self.__status = value
def set_title(self, value):
self.__title = value
def set_category_1(self, value):
self.__category1 = value
def set_category_2(self, value):
self.__category2 = value
def set_keywords(self, value):
self.__keywords = value
def del_passcode(self):
del self.__passcode
def del_pid(self):
del self.__pid
def del_status(self):
del self.__status
def del_title(self):
del self.__title
def del_category_1(self):
del self.__category1
def del_category_2(self):
del self.__category2
def del_keywords(self):
del self.__keywords
passcode = property(get_passcode, set_passcode, del_passcode, "passcode's docstring")
pid = property(get_pid, set_pid, del_pid, "pid's docstring")
status = property(get_status, set_status, del_status, "status's docstring")
title = property(get_title, set_title, del_title, "title's docstring")
category1 = property(get_category_1, set_category_1, del_category_1, "category1's docstring")
category2 = property(get_category_2, set_category_2, del_category_2, "category2's docstring")
keywords = property(get_keywords, set_keywords, del_keywords, "keywords's docstring")
def get_conf(self):
return self.__conf
def get_year(self):
return self.__year
def set_conf(self, value):
self.__conf = value
def set_year(self, value):
self.__year = value
def del_conf(self):
del self.__conf
def del_year(self):
del self.__year
conf = property(get_conf, set_conf, del_conf, "conf's docstring")
year = property(get_year, set_year, del_year, "year's docstring")
| riccardodg/lodstuff | lremap/it.cnr.ilc.lremapowl/src/lremapobj/paper.py | Python | gpl-3.0 | 3,059 |
## pythonFlu - Python wrapping for OpenFOAM C++ API
## Copyright (C) 2010- Alexey Petrov
## Copyright (C) 2009-2010 Pebble Bed Modular Reactor (Pty) Limited (PBMR)
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
## See http://sourceforge.net/projects/pythonflu
##
## Author : Alexey PETROV, Andrey SIMURZIN
##
#--------------------------------------------------------------------------------------
class TLoadHelper( object ):
def __init__( self, the_dict ):
self._dict = the_dict
pass
def __getattr__( self, the_attr ):
if not self._dict.has_key( the_attr ):
raise AttributeError( "There is no \"%s\" attribute " %the_attr)
pass
a_result = self._dict[ the_attr ]
if type( a_result ) == str:
an_interface =a_result.split('.')[ -1 ]
#print an_interface
an_interface_path = ".".join( a_result.split( "." )[ :-1 ] )
#print an_interface_path
exec "from %s import %s as a_result" %( an_interface_path, an_interface )
pass
exec "self.%s = a_result" %the_attr
return a_result
pass
#--------------------------------------------------------------------------------------
class TManLoadHelper( TLoadHelper ):
def __call__( self, theExpr, theDeps ):
result = theExpr.holder( theDeps )
theExpr.this.disown()
return result
pass
pass
#--------------------------------------------------------------------------------------
| alexey4petrov/pythonFlu | Foam/helper.py | Python | gpl-3.0 | 2,152 |
from src.deenuxapi.deezer.Model import Model
from src.deenuxapi.deezer.model.Artist import Artist
class Track(Model):
"""
Contains information about a track.
"""
def __init__(self, id: int, title: str, artist: Artist, duration: int = -1):
"""
Constructor of Track.
:param id: track's ID
:param title: track's full title
:param artist: track's artist
:param duration: track's duration in seconds (default is -1)
"""
super().__init__(id)
self.__title = title
self.__artist = artist
self.__duration = duration
@staticmethod
def map(obj):
return Track(
id=obj['id'],
title=obj['title'],
duration=obj['duration'],
artist=Artist(
id=obj['artist']['id'],
name=obj['artist']['name']
)
)
def __str__(self):
return '{} - {}'.format(self.__artist.name, self.__title)
"""
Getters and setters.
"""
@property
def title(self):
return self.__title
@title.setter
def title(self, title: str):
self.__title = title
@property
def artist(self):
return self.__artist
@artist.setter
def artist(self, artist: Artist):
self.__artist = artist
@property
def duration(self):
return self.__duration
@duration.setter
def duration(self, duration: int):
self.__duration = duration
| alexarnautu/deenux | src/deenuxapi/deezer/model/Track.py | Python | gpl-3.0 | 1,503 |
# Linktastic Module
# - A python2/3 compatible module that can create hardlinks/symlinks on windows-based systems
#
# Linktastic is distributed under the MIT License. The follow are the terms and conditions of using Linktastic.
#
# The MIT License (MIT)
# Copyright (c) 2012 Solipsis Development
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial
# portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
# LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import subprocess
from subprocess import CalledProcessError
import os
# Prevent spaces from messing with us!
def _escape_param(param):
return '"%s"' % param
# Private function to create link on nt-based systems
def _link_windows(src, dest):
try:
subprocess.check_output(
'cmd /C mklink /H %s %s' % (_escape_param(dest), _escape_param(src)),
stderr=subprocess.STDOUT)
except CalledProcessError as err:
raise IOError(err.output.decode('utf-8'))
# TODO, find out what kind of messages Windows sends us from mklink
# print(stdout)
# assume if they ret-coded 0 we're good
def _symlink_windows(src, dest):
try:
subprocess.check_output(
'cmd /C mklink %s %s' % (_escape_param(dest), _escape_param(src)),
stderr=subprocess.STDOUT)
except CalledProcessError as err:
raise IOError(err.output.decode('utf-8'))
# TODO, find out what kind of messages Windows sends us from mklink
# print(stdout)
# assume if they ret-coded 0 we're good
def _dirlink_windows(src, dest):
try:
subprocess.check_output(
'cmd /C mklink /J %s %s' % (_escape_param(dest), _escape_param(src)),
stderr=subprocess.STDOUT)
except CalledProcessError as err:
raise IOError(err.output.decode('utf-8'))
# TODO, find out what kind of messages Windows sends us from mklink
# print(stdout)
# assume if they ret-coded 0 we're good
def _junctionlink_windows(src, dest):
try:
subprocess.check_output(
'cmd /C mklink /D %s %s' % (_escape_param(dest), _escape_param(src)),
stderr=subprocess.STDOUT)
except CalledProcessError as err:
raise IOError(err.output.decode('utf-8'))
# TODO, find out what kind of messages Windows sends us from mklink
# print(stdout)
# assume if they ret-coded 0 we're good
# Create a hard link to src named as dest
# This version of link, unlike os.link, supports nt systems as well
def link(src, dest):
if os.name == 'nt':
_link_windows(src, dest)
else:
os.link(src, dest)
# Create a symlink to src named as dest, but don't fail if you're on nt
def symlink(src, dest):
if os.name == 'nt':
_symlink_windows(src, dest)
else:
os.symlink(src, dest)
# Create a symlink to src named as dest, but don't fail if you're on nt
def dirlink(src, dest):
if os.name == 'nt':
_dirlink_windows(src, dest)
else:
os.symlink(src, dest)
# Create a symlink to src named as dest, but don't fail if you're on nt
def junctionlink(src, dest):
if os.name == 'nt':
_junctionlink_windows(src, dest)
else:
os.symlink(src, dest) | roderickvd/nzbToMedia | nzbtomedia/linktastic/linktastic.py | Python | gpl-3.0 | 4,080 |
import mapnik
import subprocess,PIL.Image,cStringIO as StringIO
import time,sys,os
ew = 20037508.3428
tz = 8
def make_mapnik(fn, tabpp = None, scale=None, srs=None, mp=None, avoidEdges=False, abspath=True):
cc=[l for l in subprocess.check_output(['carto',fn]).split("\n") if not l.startswith('[millstone')]
if scale!=None:
for i,c in enumerate(cc):
if 'ScaleDenominator' in c:
sd=c.strip()[21:-22]
nsd=str(int(sd)*scale)
#print i,sd,"=>",nsd,
c=c.replace(sd, nsd)
#print c
cc[i]=c
bsp=''
if abspath:
a,b=os.path.split(fn)
if a:
bsp=a
#for i,c in enumerate(cc):
# if 'file' in c:
# if 'file=' in c:
# cc[i] = c.replace('file="','file="'+a+'/')
# elif 'name="file"><![CDATA[' in c:
# cc[i] = c.replace('CDATA[','CDATA['+a+'/')
if avoidEdges:
for i,c in enumerate(cc):
if '<ShieldSymbolizer size' in c:
cs = c.replace("ShieldSymbolizer size", "ShieldSymbolizer avoid-edges=\"true\" size")
cc[i]=cs
if tabpp != None:
cc=[l.replace("planet_osm",tabpp) for l in cc]
#cc2=[c.replace("clip=\"false","clip=\"true") for c in cc]
#cc3=[c.replace("file=\"symbols", "file=\""+root+"/symbols") for c in cc2]
#cc4=[c.replace("CDATA[data", "CDATA["+root+"/data") for c in cc3]
if mp==None:
mp = mapnik.Map(256*tz,256*tz)
mapnik.load_map_from_string(mp,"\n".join(cc),False,bsp)
if srs!=None:
mp.srs=srs
#mp.buffer_size=128
return mp
def tilebound(z,x,y,tzp):
zz = 1<<(z-1)
ss = ew/zz * tzp
xx = x / tzp
yy = y / tzp
bx = mapnik.Box2d(-ew + ss*xx, ew-ss*(yy+1), -ew+ss*(xx+1), ew-ss*yy)
mm = "%d %d %d {%d %d %d %f} => %s" % (z,x,y,zz,xx,yy,ss,bx)
return xx,yy,mm,bx
def render_im(mp,bx,width,height=None, scale_factor=1.0, buffer_size=256):
if height==None:
height=width
mp.resize(width,height)
mp.zoom_to_box(bx)
mp.buffer_size = buffer_size
im=mapnik.Image(mp.width,mp.height)
mapnik.render(mp,im, scale_factor)
return PIL.Image.frombytes('RGBA',(mp.width,mp.height),im.tostring())
def render_tile(mp,z,x,y):
st=time.time()
tzp = 1
if z==13: tzp=2
if z==14: tzp=4
if z>=15: tzp=8
#tzp = tz if z>10 else 1
xx,yy,mm,bx=tilebound(z,x,y,tzp)
print mm,
sys.stdout.flush()
pim = render_im(mp,bx,tzp*256)
print "%-8.1fs" % (time.time()-st,)
return iter_subtiles(pim,xx,yy,z,tzp)
def iter_subtiles(pim, xx,yy,z,tzp,ts=256):
for i in xrange(tzp):
for j in xrange(tzp):
xp = xx*tzp+i
yp = yy*tzp+j
pp = pim.crop([i*ts,j*ts,(i+1)*ts,(j+1)*ts])
#return pim.tostring('png')
ss=StringIO.StringIO()
pp.save(ss,format='PNG')
yield (z,xp,yp),ss.getvalue()
| jharris2268/osmquadtreeutils | osmquadtreeutils/rendertiles.py | Python | gpl-3.0 | 3,227 |
from django.utils.safestring import mark_safe
from django.contrib.staticfiles.templatetags.staticfiles import static
def locked():
return mark_safe('<img src="%s" alt="locked" style="border:0px; margin: 0px; padding: 0px"/>' % (
static('manager/padlock_close.png')
))
def unlocked():
return mark_safe('<img src="%s" alt="locked" style="border:0px; margin: 0px; padding: 0px"/>' % (
static('manager/padlock_open.png')
))
| efce/voltPy | manager/helpers/html.py | Python | gpl-3.0 | 458 |
# coding=utf-8
from qtpy import QtWidgets
class MenuShortcuts(QtWidgets.QWidget):
"""
Window displaying the application shortcuts
"""
def __init__(self):
QtWidgets.QWidget.__init__(self)
self.setWindowTitle('Shortcuts')
l = QtWidgets.QGridLayout()
self.setLayout(l)
lk = QtWidgets.QLabel('<b>Key</b>')
ld = QtWidgets.QLabel('<b>Description</b>')
l.addWidget(lk, 0, 0)
l.addWidget(ld, 0, 1)
line = QtWidgets.QFrame(self)
line.setLineWidth(2)
line.setMidLineWidth(1)
line.setFrameShape(QtWidgets.QFrame.HLine)
line.setFrameShadow(QtWidgets.QFrame.Raised)
l.addWidget(line, 1, 0, 1, 2)
self._r = 2
def addShortcut(self, key, description):
lk = QtWidgets.QLabel(key)
ld = QtWidgets.QLabel(description)
l = self.layout()
l.addWidget(lk, self._r, 0)
l.addWidget(ld, self._r, 1)
self._r += 1
| radjkarl/appBase | appbase/mainWindowRessources/menuShortcuts.py | Python | gpl-3.0 | 1,016 |
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
#
# RecSQL -- a simple mash-up of sqlite and numpy.recsql
# Copyright (C) 2007-2016 Oliver Beckstein <orbeckst@gmail.com>
# Released under the GNU Public License, version 3 or higher (your choice)
"""
:mod:`recsql.csv_table` --- Parse a simple CSV table
====================================================
Turn a CSV table into a numpy array.
Uses :mod:`csv` (requires python 2.6 or better).
.. autoclass:: Table2array
:members: __init__, recarray
.. autofunction:: make_python_name
"""
from __future__ import with_statement, absolute_import
# notes on csv (from http://farmdev.com/talks/unicode/)
# encode temp. to utf-8
# s_bytes = s_uni.encode('utf-8')
# do stuff
# s_bytes.decode('utf-8')
try:
# needs python >= 2.6
import csv
except ImportError:
import warnings
warnings.warn("csv module not available (needs python >=2.6)", category=ImportWarning)
# ... just go ahead and fail later miserably ...
import numpy
import re
from .convert import Autoconverter
# from the csv examples: http://docs.python.org/library/csv.html#csv-examples
import codecs
class UTF8Recoder(object):
"""
Iterator that reads an encoded stream and reencodes the input to UTF-8
"""
def __init__(self, f, encoding):
self.reader = codecs.getreader(encoding)(f)
def __iter__(self):
return self
def next(self):
return self.reader.next().encode("utf-8")
class UnicodeReader(object):
"""
A CSV reader which will iterate over lines in the CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
f = UTF8Recoder(f, encoding)
self.reader = csv.reader(f, dialect=dialect, **kwds)
def next(self):
row = self.reader.next()
return [unicode(s, "utf-8") for s in row]
def __iter__(self):
return self
def make_python_name(s, default=None, number_prefix='N',encoding="utf-8"):
"""Returns a unicode string that can be used as a legal python identifier.
:Arguments:
*s*
string
*default*
use *default* if *s* is ``None``
*number_prefix*
string to prepend if *s* starts with a number
"""
if s in ('', None):
s = default
s = str(s)
s = re.sub("[^a-zA-Z0-9_]", "_", s)
if not re.match('\d', s) is None:
s = number_prefix+s
return unicode(s, encoding)
class Table2array(object):
"""Read a csv file and provide conversion to a :class:`numpy.recarray`.
* Depending on the arguments, autoconversion of values can take
place. See :class:`recsql.convert.Autoconverter` for details.
* Table column headers are always read from the first row of the file.
* Empty rows are discarded.
"""
def __init__(self, filename=None, tablename="CSV", encoding="utf-8", **kwargs):
"""Initialize the class.
:Arguments:
*filename*
CSV file (encoded with *encoding*)
*name*
name of the table
*autoconvert*
EXPERIMENTAL. ``True``: replace certain values
with special python values (see :class:`convert.Autoconverter`) and possibly
split values into lists (see *sep*).
``False``: leave everything as it is (numbers as numbers and strings
as strings).
*mode*
mode of the :class:`~convert.Autoconverter`
"""
if filename is None:
raise TypeError("filename is actually required")
self.tablename = tablename
self.autoconvert = Autoconverter(**kwargs).convert
csvtab = UnicodeReader(open(filename, "rb"), encoding=encoding)
self.names = [make_python_name(s,default=n,encoding=encoding) for n,s in enumerate(csvtab.next())]
# read the rest after the column headers
self.records = [tuple(map(self.autoconvert, line)) for line in csvtab \
if len(line) > 0 and not numpy.all(numpy.array(line) == '')]
def recarray(self):
"""Returns data as :class:`numpy.recarray`."""
return numpy.rec.fromrecords(self.records, names=self.names)
| orbeckst/RecSQL | recsql/csv_table.py | Python | gpl-3.0 | 4,336 |
# coding=utf-8
import unittest
"""962. Maximum Width Ramp
https://leetcode.com/problems/maximum-width-ramp/description/
Given an array `A` of integers, a _ramp_ is a tuple `(i, j)` for which `i <
j` and `A[i] <= A[j]`. The width of such a ramp is `j - i`.
Find the maximum width of a ramp in `A`. If one doesn't exist, return 0.
**Example 1:**
**Input:** [6,0,8,2,1,5]
**Output:** 4
**Explanation:**
The maximum width ramp is achieved at (i, j) = (1, 5): A[1] = 0 and A[5] = 5.
**Example 2:**
**Input:** [9,8,1,0,1,9,4,0,4,1]
**Output:** 7
**Explanation:**
The maximum width ramp is achieved at (i, j) = (2, 9): A[2] = 1 and A[9] = 1.
**Note:**
1. `2 <= A.length <= 50000`
2. `0 <= A[i] <= 50000`
Similar Questions:
"""
class Solution(object):
def maxWidthRamp(self, A):
"""
:type A: List[int]
:rtype: int
"""
def test(self):
pass
if __name__ == "__main__":
unittest.main()
| openqt/algorithms | leetcode/python/lc962-maximum-width-ramp.py | Python | gpl-3.0 | 1,041 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt, date_diff, formatdate, add_days, today, getdate
from frappe import _
from frappe.model.document import Document
from erpnext.hr.utils import set_employee_name, get_leave_period
from erpnext.hr.doctype.leave_ledger_entry.leave_ledger_entry import expire_allocation, create_leave_ledger_entry
class OverlapError(frappe.ValidationError): pass
class BackDatedAllocationError(frappe.ValidationError): pass
class OverAllocationError(frappe.ValidationError): pass
class LessAllocationError(frappe.ValidationError): pass
class ValueMultiplierError(frappe.ValidationError): pass
class LeaveAllocation(Document):
def validate(self):
self.validate_period()
self.validate_new_leaves_allocated_value()
self.validate_allocation_overlap()
self.validate_back_dated_allocation()
self.set_total_leaves_allocated()
self.validate_total_leaves_allocated()
self.validate_lwp()
set_employee_name(self)
self.validate_leave_allocation_days()
def validate_leave_allocation_days(self):
company = frappe.db.get_value("Employee", self.employee, "company")
leave_period = get_leave_period(self.from_date, self.to_date, company)
max_leaves_allowed = frappe.db.get_value("Leave Type", self.leave_type, "max_leaves_allowed")
if max_leaves_allowed > 0:
leave_allocated = 0
if leave_period:
leave_allocated = get_leave_allocation_for_period(self.employee, self.leave_type,
leave_period[0].from_date, leave_period[0].to_date)
leave_allocated += self.new_leaves_allocated
if leave_allocated > max_leaves_allowed:
frappe.throw(_("Total allocated leaves are more days than maximum allocation of {0} leave type for employee {1} in the period")
.format(self.leave_type, self.employee))
def on_submit(self):
self.create_leave_ledger_entry()
# expire all unused leaves in the ledger on creation of carry forward allocation
allocation = get_previous_allocation(self.from_date, self.leave_type, self.employee)
if self.carry_forward and allocation:
expire_allocation(allocation)
def on_cancel(self):
self.create_leave_ledger_entry(submit=False)
if self.carry_forward:
self.set_carry_forwarded_leaves_in_previous_allocation(on_cancel=True)
def validate_period(self):
if date_diff(self.to_date, self.from_date) <= 0:
frappe.throw(_("To date cannot be before from date"))
def validate_lwp(self):
if frappe.db.get_value("Leave Type", self.leave_type, "is_lwp"):
frappe.throw(_("Leave Type {0} cannot be allocated since it is leave without pay").format(self.leave_type))
def validate_new_leaves_allocated_value(self):
"""validate that leave allocation is in multiples of 0.5"""
if flt(self.new_leaves_allocated) % 0.5:
frappe.throw(_("Leaves must be allocated in multiples of 0.5"), ValueMultiplierError)
def validate_allocation_overlap(self):
leave_allocation = frappe.db.sql("""
select name from `tabLeave Allocation`
where employee=%s and leave_type=%s and docstatus=1
and to_date >= %s and from_date <= %s""",
(self.employee, self.leave_type, self.from_date, self.to_date))
if leave_allocation:
frappe.msgprint(_("{0} already allocated for Employee {1} for period {2} to {3}")
.format(self.leave_type, self.employee, formatdate(self.from_date), formatdate(self.to_date)))
frappe.throw(_('Reference') + ': <a href="#Form/Leave Allocation/{0}">{0}</a>'
.format(leave_allocation[0][0]), OverlapError)
def validate_back_dated_allocation(self):
future_allocation = frappe.db.sql("""select name, from_date from `tabLeave Allocation`
where employee=%s and leave_type=%s and docstatus=1 and from_date > %s
and carry_forward=1""", (self.employee, self.leave_type, self.to_date), as_dict=1)
if future_allocation:
frappe.throw(_("Leave cannot be allocated before {0}, as leave balance has already been carry-forwarded in the future leave allocation record {1}")
.format(formatdate(future_allocation[0].from_date), future_allocation[0].name),
BackDatedAllocationError)
def set_total_leaves_allocated(self):
self.unused_leaves = get_carry_forwarded_leaves(self.employee,
self.leave_type, self.from_date, self.carry_forward)
self.total_leaves_allocated = flt(self.unused_leaves) + flt(self.new_leaves_allocated)
self.limit_carry_forward_based_on_max_allowed_leaves()
if self.carry_forward:
self.set_carry_forwarded_leaves_in_previous_allocation()
if not self.total_leaves_allocated \
and not frappe.db.get_value("Leave Type", self.leave_type, "is_earned_leave") \
and not frappe.db.get_value("Leave Type", self.leave_type, "is_compensatory"):
frappe.throw(_("Total leaves allocated is mandatory for Leave Type {0}")
.format(self.leave_type))
def limit_carry_forward_based_on_max_allowed_leaves(self):
max_leaves_allowed = frappe.db.get_value("Leave Type", self.leave_type, "max_leaves_allowed")
if max_leaves_allowed and self.total_leaves_allocated > flt(max_leaves_allowed):
self.total_leaves_allocated = flt(max_leaves_allowed)
self.unused_leaves = max_leaves_allowed - flt(self.new_leaves_allocated)
def set_carry_forwarded_leaves_in_previous_allocation(self, on_cancel=False):
''' Set carry forwarded leaves in previous allocation '''
previous_allocation = get_previous_allocation(self.from_date, self.leave_type, self.employee)
if on_cancel:
self.unused_leaves = 0.0
if previous_allocation:
frappe.db.set_value("Leave Allocation", previous_allocation.name,
'carry_forwarded_leaves_count', self.unused_leaves)
def validate_total_leaves_allocated(self):
# Adding a day to include To Date in the difference
date_difference = date_diff(self.to_date, self.from_date) + 1
if date_difference < self.total_leaves_allocated:
frappe.throw(_("Total allocated leaves are more than days in the period"), OverAllocationError)
def create_leave_ledger_entry(self, submit=True):
if self.unused_leaves:
expiry_days = frappe.db.get_value("Leave Type", self.leave_type, "expire_carry_forwarded_leaves_after_days")
end_date = add_days(self.from_date, expiry_days - 1) if expiry_days else self.to_date
args = dict(
leaves=self.unused_leaves,
from_date=self.from_date,
to_date= min(getdate(end_date), getdate(self.to_date)),
is_carry_forward=1
)
create_leave_ledger_entry(self, args, submit)
args = dict(
leaves=self.new_leaves_allocated,
from_date=self.from_date,
to_date=self.to_date,
is_carry_forward=0
)
create_leave_ledger_entry(self, args, submit)
def get_previous_allocation(from_date, leave_type, employee):
''' Returns document properties of previous allocation '''
return frappe.db.get_value("Leave Allocation",
filters={
'to_date': ("<", from_date),
'leave_type': leave_type,
'employee': employee,
'docstatus': 1
},
order_by='to_date DESC',
fieldname=['name', 'from_date', 'to_date', 'employee', 'leave_type'], as_dict=1)
def get_leave_allocation_for_period(employee, leave_type, from_date, to_date):
leave_allocated = 0
leave_allocations = frappe.db.sql("""
select employee, leave_type, from_date, to_date, total_leaves_allocated
from `tabLeave Allocation`
where employee=%(employee)s and leave_type=%(leave_type)s
and docstatus=1
and (from_date between %(from_date)s and %(to_date)s
or to_date between %(from_date)s and %(to_date)s
or (from_date < %(from_date)s and to_date > %(to_date)s))
""", {
"from_date": from_date,
"to_date": to_date,
"employee": employee,
"leave_type": leave_type
}, as_dict=1)
if leave_allocations:
for leave_alloc in leave_allocations:
leave_allocated += leave_alloc.total_leaves_allocated
return leave_allocated
@frappe.whitelist()
def get_carry_forwarded_leaves(employee, leave_type, date, carry_forward=None):
''' Returns carry forwarded leaves for the given employee '''
unused_leaves = 0.0
previous_allocation = get_previous_allocation(date, leave_type, employee)
if carry_forward and previous_allocation:
validate_carry_forward(leave_type)
unused_leaves = get_unused_leaves(employee, leave_type,
previous_allocation.from_date, previous_allocation.to_date)
if unused_leaves:
max_carry_forwarded_leaves = frappe.db.get_value("Leave Type",
leave_type, "maximum_carry_forwarded_leaves")
if max_carry_forwarded_leaves and unused_leaves > flt(max_carry_forwarded_leaves):
unused_leaves = flt(max_carry_forwarded_leaves)
return unused_leaves
def get_unused_leaves(employee, leave_type, from_date, to_date):
''' Returns unused leaves between the given period while skipping leave allocation expiry '''
leaves = frappe.get_all("Leave Ledger Entry", filters={
'employee': employee,
'leave_type': leave_type,
'from_date': ('>=', from_date),
'to_date': ('<=', to_date)
}, or_filters={
'is_expired': 0,
'is_carry_forward': 1
}, fields=['sum(leaves) as leaves'])
return flt(leaves[0]['leaves'])
def validate_carry_forward(leave_type):
if not frappe.db.get_value("Leave Type", leave_type, "is_carry_forward"):
frappe.throw(_("Leave Type {0} cannot be carry-forwarded").format(leave_type)) | libracore/erpnext | erpnext/hr/doctype/leave_allocation/leave_allocation.py | Python | gpl-3.0 | 9,271 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
############################# Copyright ################################
# Author: GHOSTnew # 2014 #
########################################################################
# This file is part of HelpBot. #
# HelpBot is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# HelpBot is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with Foobar. If not, see <http://www.gnu.org/licenses/>. #
########################################################################
import socket
import socks
import time
import web
import re
import modules
import ssl
################################ Config #################################################
nick = "HelpBot"
real_name = "Help on our Network"
nickserv = "password nickserv" # for nickserv auth (set None for no ns auth)
channels = ["#opAmanda", "#help"] #channel
host = "server host"
port = 6697
Tor = False
password = None #server password (None by default)
SSL = True #SSL , false by default
import_modules = ["noob", "opamanda", "ubuntu_fr", "wikipedia", "sms_killers", "clients"]
################################ End config ##############################################
class Bot(object):
def __init__ (self, host, port, nick, real_name,Tor, nickserv=None, password=None, SSL=False):
self.host = host
self.port = port
self.nick = nick
self.real_name = real_name
self.password = password
self.nickserv = nickserv
self.ssl = SSL
if Tor == True:
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050, True)
socket.socket = socks.socksocket
self.sock = socket.socket()
def connect(self):
self.sock.connect((self.host, self.port))
if self.ssl == True:
try:
self.sock = ssl.wrap_socket(self.sock)
self.sock.do_handshake()
except:
print "Failed to do ssl handshake"
time.sleep(5)
self.raw_line('USER ' + self.nick + ' 0 ' + self.nick +' :' + self.real_name)
if self.password:
self.raw_line('PASS ' + self.password)
self.raw_line('NICK ' + self.nick)
self.raw_line('mode +B')
time.sleep(2)
if self.nickserv:
self.send("NickServ", "IDENTIFY " + nickserv)
time.sleep(2)
for chan in channels:
self.join(chan)
def disconnect(self):
self.sock.close()
self.sock = socket.socket()
def read(self):
return self.sock.recv(1024)
def send(self, channel, msg):
self.sock.send('PRIVMSG ' + channel + ' :' + msg + '\r\n')
def notice(self, channel, msg):
self.sock.send('NOTICE ' + channel + ' :' + msg + '\r\n')
def action(self, channel, msg):
self.sock.send('PRIVMSG ' + channel + ' :' + '\001ACTION' + msg + '\001\r\n')
def raw_line(self, line):
self.sock.send(line + '\r\n')
def join(self, channel):
if channel.startswith('#'):
self.sock.send('JOIN ' + channel + '\r\n')
else:
self.sock.send('JOIN #' + channel + '\r\n')
def kick(self, channel, nick, reason = ""):
self.sock.send('KICK ' + channel + ' ' + nick + ' :' + reason + '\r\n')
def main():
HelpBot = Bot(host, port, nick, real_name, Tor, nickserv, password, SSL)
HelpBot.connect()
while True:
data = HelpBot.read()
if not data:
print "connexion lost"
HelpBot.disconnect()
HelpBot.connect()
#break
#print data
if data.find('PING') != -1:
HelpBot.raw_line('PONG ' + data.split()[1] + '\r\n')
elif data.find('PRIVMSG') != -1:
cmd = (':'.join(data.split (':')[2:])).split( )[0]
channel = ''.join (data.split(':')[:2]).split (' ')[-2]
nick_source = (data.split (':')[1]).split('!')[0]
arg = data.split(" ")
args = ''
for index,item in enumerate(arg) :
if index > 3 :
if args == '':
args = item
else :
args += ' ' + item
args = args.split('\r')[0]
if cmd == ".help":
for module in import_modules:
mod = __import__ ("modules." + module, fromlist=import_modules)
mod.onhelp(HelpBot, channel)
else:
for module in import_modules:
mod = __import__ ("modules." + module, fromlist=import_modules)
mod.load(HelpBot, cmd, nick_source, channel, args)
if __name__ == "__main__" :
main()
| GHOSTnew/HelpBot | HelpBot.py | Python | gpl-3.0 | 5,517 |
from model.flyweight import Flyweight
from model.static.database import database
class Service(Flyweight):
def __init__(self,service_id):
#prevents reinitializing
if "_inited" in self.__dict__:
return
self._inited = None
#prevents reinitializing
self.service_id = service_id
cursor = database.get_cursor(
"select * from staServices where serviceID={};".format(
self.service_id))
row = cursor.fetchone()
self.service_name = row["serviceName"]
self.description = row["description"]
cursor.close()
| Iconik/eve-suite | src/model/static/sta/services.py | Python | gpl-3.0 | 624 |
NAME = 'pokerserver'
DESCRIPTION = 'Poker server for our Python workshop at TNG Technology Consulting.'
VERSION = '2017.3.10'
| MartinAltmayer/pokerserver | pokerserver/version.py | Python | gpl-3.0 | 126 |
# -*- coding: utf-8 -*-
"""
General description
-------------------
This example shows how to perform a capacity optimization for
an energy system with storage. The following energy system is modeled:
input/output bgas bel
| | | |
| | | |
wind(FixedSource) |------------------>| |
| | | |
pv(FixedSource) |------------------>| |
| | | |
gas_resource |--------->| | |
(Commodity) | | | |
| | | |
demand(Sink) |<------------------| |
| | | |
| | | |
pp_gas(Transformer) |<---------| | |
|------------------>| |
| | | |
storage(Storage) |<------------------| |
|------------------>| |
The example exists in four variations. The following parameters describe
the main setting for the optimization variation 2:
- optimize gas_resource and storage
- set installed capacities for wind and pv
- set investment cost for storage
- set gas price for kWh
Results show a higher renewable energy share than in variation 1
(78% compared to 51%) due to preinstalled renewable capacities.
Storage is not installed as the gas resource is cheaper.
Have a look at different parameter settings. There are four variations
of this example in the same folder.
Installation requirements
-------------------------
This example requires the version v0.4.x of oemof. Install by:
pip install 'oemof.solph>=0.4,<0.5'
"""
__copyright__ = "oemof developer group"
__license__ = "GPLv3"
###############################################################################
# Imports
###############################################################################
# Default logger of oemof
from oemof.tools import logger
from oemof.tools import economics
from oemof import solph
import logging
import os
import pandas as pd
import pprint as pp
number_timesteps = 8760
##########################################################################
# Initialize the energy system and read/calculate necessary parameters
##########################################################################
logger.define_logging()
logging.info("Initialize the energy system")
date_time_index = pd.date_range("1/1/2012", periods=number_timesteps, freq="H")
energysystem = solph.EnergySystem(timeindex=date_time_index)
# Read data file
full_filename = os.path.join(os.getcwd(), "storage_investment.csv")
data = pd.read_csv(full_filename, sep=",")
price_gas = 0.04
# If the period is one year the equivalent periodical costs (epc) of an
# investment are equal to the annuity. Use oemof's economic tools.
epc_storage = economics.annuity(capex=1000, n=20, wacc=0.05)
##########################################################################
# Create oemof objects
##########################################################################
logging.info("Create oemof objects")
# create natural gas bus
bgas = solph.Bus(label="natural_gas")
# create electricity bus
bel = solph.Bus(label="electricity")
energysystem.add(bgas, bel)
# create excess component for the electricity bus to allow overproduction
excess = solph.Sink(label="excess_bel", inputs={bel: solph.Flow()})
# create source object representing the natural gas commodity (annual limit)
gas_resource = solph.Source(
label="rgas", outputs={bgas: solph.Flow(variable_costs=price_gas)}
)
# create fixed source object representing wind power plants
wind = solph.Source(
label="wind",
outputs={bel: solph.Flow(fix=data["wind"], nominal_value=1000000)},
)
# create fixed source object representing pv power plants
pv = solph.Source(
label="pv", outputs={bel: solph.Flow(fix=data["pv"], nominal_value=600000)}
)
# create simple sink object representing the electrical demand
demand = solph.Sink(
label="demand",
inputs={bel: solph.Flow(fix=data["demand_el"], nominal_value=1)},
)
# create simple transformer object representing a gas power plant
pp_gas = solph.Transformer(
label="pp_gas",
inputs={bgas: solph.Flow()},
outputs={bel: solph.Flow(nominal_value=10e10, variable_costs=0)},
conversion_factors={bel: 0.58},
)
# create storage object representing a battery
storage = solph.components.GenericStorage(
label="storage",
inputs={bel: solph.Flow(variable_costs=0.0001)},
outputs={bel: solph.Flow()},
loss_rate=0.00,
initial_storage_level=0,
invest_relation_input_capacity=1 / 6,
invest_relation_output_capacity=1 / 6,
inflow_conversion_factor=1,
outflow_conversion_factor=0.8,
investment=solph.Investment(ep_costs=epc_storage),
)
energysystem.add(excess, gas_resource, wind, pv, demand, pp_gas, storage)
##########################################################################
# Optimise the energy system
##########################################################################
logging.info("Optimise the energy system")
# initialise the operational model
om = solph.Model(energysystem)
# if tee_switch is true solver messages will be displayed
logging.info("Solve the optimization problem")
om.solve(solver="cbc", solve_kwargs={"tee": True})
##########################################################################
# Check and plot the results
##########################################################################
# check if the new result object is working for custom components
results = solph.processing.results(om)
custom_storage = solph.views.node(results, "storage")
electricity_bus = solph.views.node(results, "electricity")
meta_results = solph.processing.meta_results(om)
pp.pprint(meta_results)
my_results = electricity_bus["scalars"]
# installed capacity of storage in GWh
my_results["storage_invest_GWh"] = (
results[(storage, None)]["scalars"]["invest"] / 1e6
)
# resulting renewable energy share
my_results["res_share"] = (
1
- results[(pp_gas, bel)]["sequences"].sum()
/ results[(bel, demand)]["sequences"].sum()
)
pp.pprint(my_results)
| oemof/oemof_examples | oemof_examples/oemof.solph/v0.4.x/storage_investment/v2_invest_optimize_only_gas_and_storage.py | Python | gpl-3.0 | 6,369 |
#!/usr/bin/env python
#
# Raspberry Pi Internet Radio
# using an HD44780 LCD display
# $Id: radio8x2.py,v 1.7 2017/07/31 07:44:26 bob Exp $
#
# Author : Bob Rathbone
# Site : http://www.bobrathbone.com
#
# This program uses Music Player Daemon 'mpd'and it's client 'mpc'
# See http://mpd.wikia.com/wiki/Music_Player_Daemon_Wiki
#
#
# License: GNU V3, See https://www.gnu.org/copyleft/gpl.html
#
# Disclaimer: Software is provided as is and absolutly no warranties are implied or given.
# The authors shall not be liable for any loss or damage however caused.
#
import os
import RPi.GPIO as GPIO
import signal
import atexit
import traceback
import subprocess
import sys
import time
import string
import datetime
from time import strftime
import shutil
# Class imports
from radio_daemon import Daemon
from radio_class import Radio
from lcd_class import Lcd
from log_class import Log
from rss_class import Rss
# To use GPIO 14 and 15 (Serial RX/TX)
# Remove references to /dev/ttyAMA0 from /boot/cmdline.txt and /etc/inittab
UP = 0
DOWN = 1
CurrentStationFile = "/var/lib/radiod/current_station"
CurrentTrackFile = "/var/lib/radiod/current_track"
CurrentFile = CurrentStationFile
PlaylistsDirectory = "/var/lib/mpd/playlists/"
log = Log()
radio = Radio()
lcd = Lcd()
rss = Rss()
# Signal SIGTERM handler
def signalHandler(signal,frame):
global lcd
global log
pid = os.getpid()
log.message("Radio stopped, PID " + str(pid), log.INFO)
lcd.line1("Stopped")
lcd.line2("")
radio.exit()
# Signal SIGTERM handler
def signalSIGUSR1(signal,frame):
global log
global radio
log.message("Radio got SIGUSR1", log.INFO)
display_mode = radio.getDisplayMode() + 1
if display_mode > radio.MODE_LAST:
display_mode = radio.MODE_TIME
radio.setDisplayMode(display_mode)
return
# Daemon class
class MyDaemon(Daemon):
def run(self):
global CurrentFile
GPIO.setmode(GPIO.BCM) # Use BCM GPIO numbers
GPIO.setwarnings(False) # Ignore warnings
# Get switches configuration
up_switch = radio.getSwitchGpio("up_switch")
down_switch = radio.getSwitchGpio("down_switch")
left_switch = radio.getSwitchGpio("left_switch")
right_switch = radio.getSwitchGpio("right_switch")
menu_switch = radio.getSwitchGpio("menu_switch")
boardrevision = radio.getBoardRevision()
if boardrevision == 1:
# For rev 1 boards with no inbuilt pull-up/down resistors
# Wire the GPIO inputs to ground via a 10K resistor
GPIO.setup(menu_switch, GPIO.IN)
GPIO.setup(up_switch, GPIO.IN)
GPIO.setup(down_switch, GPIO.IN)
GPIO.setup(left_switch, GPIO.IN)
GPIO.setup(right_switch, GPIO.IN)
else:
# For rev 2 boards with inbuilt pull-up/down resistors the
# following lines are used instead of the above, so
# there is no need to physically wire the 10k resistors
GPIO.setup(menu_switch, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(up_switch, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(down_switch, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(left_switch, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(right_switch, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
# Initialise radio
log.init('radio')
signal.signal(signal.SIGTERM,signalHandler)
signal.signal(signal.SIGUSR1,signalSIGUSR1)
progcall = str(sys.argv)
log.message('Radio running pid ' + str(os.getpid()), log.INFO)
log.message("Radio " + progcall + " daemon version " + radio.getVersion(), log.INFO)
log.message("GPIO version " + str(GPIO.VERSION), log.INFO)
lcd.init(boardrevision)
lcd.setWidth(8)
hostname = exec_cmd('hostname')
ipaddr = exec_cmd('hostname -I')
# Display daemon pid on the LCD
message = "PID " + str(os.getpid())
lcd.line1(message)
time.sleep(2)
# Wait for the IP network
ipaddr = ""
waiting4network = True
count = 10
while waiting4network:
lcd.scroll2("Wait for network",no_interrupt)
ipaddr = exec_cmd('hostname -I')
time.sleep(1)
count -= 1
if (count < 0) or (len(ipaddr) > 1):
waiting4network = False
if len(ipaddr) < 1:
lcd.line2("No IP")
else:
lcd.scroll2("IP " + ipaddr, no_interrupt)
time.sleep(2)
log.message("Starting MPD", log.INFO)
lcd.scroll2("Starting MPD", no_interrupt)
radio.start()
log.message("MPD started", log.INFO)
mpd_version = radio.execMpcCommand("version")
log.message(mpd_version, log.INFO)
lcd.line1("Ver "+ radio.getVersion())
lcd.scroll2(mpd_version,no_interrupt)
time.sleep(1)
# Auto-load music library if no Internet
if len(ipaddr) < 1 and radio.autoload():
log.message("Loading music library",log.INFO)
radio.setSource(radio.PLAYER)
# Load radio
reload(lcd,radio)
radio.play(get_stored_id(CurrentFile))
log.message("Current ID = " + str(radio.getCurrentID()), log.INFO)
# Set up switch event processing
GPIO.add_event_detect(menu_switch, GPIO.RISING, callback=switch_event, bouncetime=200)
GPIO.add_event_detect(left_switch, GPIO.RISING, callback=switch_event, bouncetime=200)
GPIO.add_event_detect(right_switch, GPIO.RISING, callback=switch_event, bouncetime=200)
GPIO.add_event_detect(up_switch, GPIO.RISING, callback=switch_event, bouncetime=200)
GPIO.add_event_detect(down_switch, GPIO.RISING, callback=switch_event, bouncetime=200)
# Main processing loop
count = 0
while True:
switch = radio.getSwitch()
if switch > 0:
get_switch_states(lcd,radio,rss)
display_mode = radio.getDisplayMode()
lcd.setScrollSpeed(0.3) # Scroll speed normal
ipaddr = exec_cmd('hostname -I')
# Shutdown command issued
if display_mode == radio.MODE_SHUTDOWN:
displayShutdown(lcd)
while True:
time.sleep(1)
if len(ipaddr) < 1:
lcd.line2("No IP network")
elif display_mode == radio.MODE_TIME:
if radio.getReload():
log.message("Reload ", log.DEBUG)
reload(lcd,radio)
radio.setReload(False)
else:
displayTime(lcd,radio)
if radio.muted():
msg = "Sound muted"
if radio.getStreaming():
msg = msg + ' *'
lcd.line2(msg)
else:
display_current(lcd,radio)
elif display_mode == radio.MODE_SEARCH:
display_search(lcd,radio)
elif display_mode == radio.MODE_SOURCE:
display_source_select(lcd,radio)
elif display_mode == radio.MODE_OPTIONS:
display_options(lcd,radio)
elif display_mode == radio.MODE_IP:
lcd.line2("Radio v" + radio.getVersion())
if len(ipaddr) < 1:
lcd.line1("No IP")
else:
lcd.scroll1("IP " + ipaddr, interrupt)
elif display_mode == radio.MODE_RSS:
displayTime(lcd,radio)
display_rss(lcd,rss)
elif display_mode == radio.MODE_SLEEP:
displayTime(lcd,radio)
display_sleep(lcd,radio)
# Timer function
checkTimer(radio)
# Check state (pause or play)
checkState(radio)
# Alarm wakeup function
if display_mode == radio.MODE_SLEEP and radio.alarmFired():
log.message("Alarm fired", log.INFO)
unmuteRadio(lcd,radio)
displayWakeUpMessage(lcd)
radio.setDisplayMode(radio.MODE_TIME)
if radio.volumeChanged():
lcd.line2("Volume " + str(radio.getVolume()))
time.sleep(0.5)
time.sleep(0.1)
def status(self):
# Get the pid from the pidfile
try:
pf = file(self.pidfile,'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if not pid:
message = "radiod status: not running"
log.message(message, log.INFO)
print message
else:
message = "radiod running pid " + str(pid)
log.message(message, log.INFO)
print message
return
# End of class overrides
# Interrupt scrolling LCD routine
def interrupt():
global lcd
global radio
global rss
interrupt = False
switch = radio.getSwitch()
if switch > 0:
interrupt = get_switch_states(lcd,radio,rss)
# Rapid display of timer
if radio.getTimer() and not interrupt:
displayTime(lcd,radio)
interrupt = checkTimer(radio)
if radio.volumeChanged():
lcd.line2("Volume " + str(radio.getVolume()))
time.sleep(0.5)
if not interrupt:
interrupt = checkState(radio) or radio.getInterrupt()
return interrupt
def no_interrupt():
return False
# Call back routine called by switch events
def switch_event(switch):
global radio
radio.setSwitch(switch)
return
# Check switch states
def get_switch_states(lcd,radio,rss):
interrupt = False # Interrupt display
switch = radio.getSwitch()
pid = exec_cmd("cat /var/run/radiod.pid")
display_mode = radio.getDisplayMode()
input_source = radio.getSource()
# Get rotary switches configuration
up_switch = radio.getSwitchGpio("up_switch")
down_switch = radio.getSwitchGpio("down_switch")
left_switch = radio.getSwitchGpio("left_switch")
right_switch = radio.getSwitchGpio("right_switch")
menu_switch = radio.getSwitchGpio("menu_switch")
if switch == menu_switch:
log.message("MENU switch mode=" + str(display_mode), log.DEBUG)
if radio.muted():
unmuteRadio(lcd,radio)
display_mode = display_mode + 1
# Skip RSS mode if not available
if display_mode == radio.MODE_RSS and not radio.alarmActive():
if rss.isAvailable() and not radio.optionChanged():
lcd.line2("Getting RSS feed")
else:
display_mode = display_mode + 1
if display_mode > radio.MODE_LAST:
boardrevision = radio.getBoardRevision()
lcd.init(boardrevision) # Recover corrupted dosplay
display_mode = radio.MODE_TIME
radio.setDisplayMode(display_mode)
log.message("New mode " + radio.getDisplayModeString()+
"(" + str(display_mode) + ")", log.DEBUG)
# Shutdown if menu button held for > 3 seconds
MenuSwitch = GPIO.input(menu_switch)
count = 15
while MenuSwitch:
time.sleep(0.2)
MenuSwitch = GPIO.input(menu_switch)
count = count - 1
if count < 0:
log.message("Shutdown", log.DEBUG)
MenuSwitch = False
radio.setDisplayMode(radio.MODE_SHUTDOWN)
if radio.getUpdateLibrary():
update_library(lcd,radio)
radio.setDisplayMode(radio.MODE_TIME)
elif radio.getReload():
source = radio.getSource()
log.message("Reload " + str(source), log.INFO)
lcd.line2("Reloading ")
reload(lcd,radio)
radio.setReload(False)
radio.setDisplayMode(radio.MODE_TIME)
elif radio.optionChanged():
log.message("optionChanged", log.DEBUG)
option = radio.getOption()
if radio.alarmActive() and not radio.getTimer() \
and (option == radio.ALARMSETHOURS or option == radio.ALARMSETMINS):
radio.setDisplayMode(radio.MODE_SLEEP)
radio.mute()
else:
radio.setDisplayMode(radio.MODE_TIME)
radio.optionChangedFalse()
elif radio.loadNew():
log.message("Load new search=" + str(radio.getSearchIndex()), log.DEBUG)
radio.playNew(radio.getSearchIndex())
radio.setDisplayMode(radio.MODE_TIME)
interrupt = True
elif switch == up_switch:
if display_mode != radio.MODE_SLEEP:
log.message("UP switch display_mode " + str(display_mode), log.DEBUG)
if radio.muted():
radio.unmute()
if display_mode == radio.MODE_SOURCE:
radio.toggleSource()
radio.setReload(True)
elif display_mode == radio.MODE_SEARCH:
wait = 0.5
while GPIO.input(up_switch):
radio.getNext(UP)
display_search(lcd,radio)
time.sleep(wait)
wait = 0.1
elif display_mode == radio.MODE_OPTIONS:
cycle_options(radio,UP)
else:
radio.channelUp()
if display_mode == radio.MODE_RSS:
radio.setDisplayMode(radio.MODE_TIME)
interrupt = True
else:
DisplayExitMessage(lcd)
elif switch == down_switch:
log.message("DOWN switch display_mode " + str(display_mode), log.DEBUG)
if display_mode != radio.MODE_SLEEP:
if radio.muted():
radio.unmute()
if display_mode == radio.MODE_SOURCE:
radio.toggleSource()
radio.setReload(True)
elif display_mode == radio.MODE_SEARCH:
wait = 0.5
while GPIO.input(down_switch):
radio.getNext(DOWN)
display_search(lcd,radio)
time.sleep(wait)
wait = 0.1
elif display_mode == radio.MODE_OPTIONS:
cycle_options(radio,DOWN)
else:
radio.channelDown()
if display_mode == radio.MODE_RSS:
radio.setDisplayMode(radio.MODE_TIME)
interrupt = True
else:
DisplayExitMessage(lcd)
elif switch == left_switch:
log.message("LEFT switch" ,log.DEBUG)
if display_mode != radio.MODE_SLEEP:
if display_mode == radio.MODE_OPTIONS:
toggle_option(radio,lcd,DOWN)
interrupt = True
elif display_mode == radio.MODE_SEARCH and input_source == radio.PLAYER:
wait = 0.5
while GPIO.input(left_switch):
radio.findNextArtist(DOWN)
display_search(lcd,radio)
time.sleep(wait)
wait = 0.1
interrupt = True
else:
# Decrease volume
volChange = True
while volChange:
# Mute function (Both buttons depressed)
if GPIO.input(right_switch):
radio.mute()
lcd.line2("Mute")
time.sleep(2)
volChange = False
interrupt = True
else:
volume = radio.decreaseVolume()
displayVolume(lcd,radio)
volChange = GPIO.input(left_switch)
if volume <= 0:
volChange = False
time.sleep(0.1)
else:
DisplayExitMessage(lcd)
elif switch == right_switch:
log.message("RIGHT switch" ,log.DEBUG)
if display_mode != radio.MODE_SLEEP:
if display_mode == radio.MODE_OPTIONS:
toggle_option(radio,lcd,UP)
interrupt = True
elif display_mode == radio.MODE_SEARCH and input_source == radio.PLAYER:
wait = 0.5
while GPIO.input(right_switch):
radio.findNextArtist(UP)
display_search(lcd,radio)
time.sleep(wait)
wait = 0.1
interrupt = True
else:
# Increase volume
volChange = True
while volChange:
# Mute function (Both buttons depressed)
if GPIO.input(left_switch):
radio.mute()
lcd.line2("Mute")
time.sleep(2)
volChange = False
interrupt = True
else:
volume = radio.increaseVolume()
displayVolume(lcd,radio)
volChange = GPIO.input(right_switch)
if volume >= 100:
volChange = False
time.sleep(0.1)
else:
DisplayExitMessage(lcd)
# Reset switch and return interrupt
radio.setSwitch(0)
return interrupt
# Cycle through the options
# Only display reload the library if in PLAYER mode
def cycle_options(radio,direction):
option = radio.getOption()
log.message("cycle_options direction:" + str(direction)
+ " option: " + str(option), log.DEBUG)
if direction == UP:
option += 1
else:
option -= 1
# Don;t display reload if not player mode
source = radio.getSource()
if option == radio.RELOADLIB:
if source != radio.PLAYER:
if direction == UP:
option = option+1
else:
option = option-1
if option == radio.STREAMING:
if not radio.streamingAvailable():
if direction == UP:
option = option+1
else:
option = option-1
if option > radio.OPTION_LAST:
option = radio.RANDOM
elif option < 0:
if source == radio.PLAYER:
option = radio.OPTION_LAST
else:
option = radio.OPTION_LAST-1
radio.setOption(option)
radio.optionChangedTrue()
return
# Toggle or change options
def toggle_option(radio,lcd,direction):
option = radio.getOption()
log.message("toggle_option option="+ str(option), log.DEBUG)
# Get switches configuration
up_switch = radio.getSwitchGpio("up_switch")
down_switch = radio.getSwitchGpio("down_switch")
left_switch = radio.getSwitchGpio("left_switch")
right_switch = radio.getSwitchGpio("right_switch")
menu_switch = radio.getSwitchGpio("menu_switch")
if option == radio.RANDOM:
if radio.getRandom():
radio.randomOff()
else:
radio.randomOn()
elif option == radio.CONSUME:
if radio.getSource() == radio.PLAYER:
if radio.getConsume():
radio.consumeOff()
else:
radio.consumeOn()
else:
lcd.line2("Not allowed")
time.sleep(2)
elif option == radio.REPEAT:
if radio.getRepeat():
radio.repeatOff()
else:
radio.repeatOn()
elif option == radio.TIMER:
TimerChange = True
# Buttons held in
if radio.getTimer():
while TimerChange:
if direction == UP:
radio.incrementTimer(1)
lcd.line2("Timer " + radio.getTimerString())
TimerChange = GPIO.input(right_switch)
else:
radio.decrementTimer(1)
lcd.line2("Timer " + radio.getTimerString())
TimerChange = GPIO.input(left_switch)
time.sleep(0.1)
else:
radio.timerOn()
elif option == radio.ALARM:
radio.alarmCycle(direction)
elif option == radio.ALARMSETHOURS or option == radio.ALARMSETMINS:
# Buttons held in
AlarmChange = True
twait = 0.4
value = 1
unit = " mins"
if option == radio.ALARMSETHOURS:
value = 60
unit = " hours"
while AlarmChange:
if direction == UP:
radio.incrementAlarm(value)
lcd.line2("Alarm " + radio.getAlarmTime() + unit)
time.sleep(twait)
AlarmChange = GPIO.input(right_switch)
else:
radio.decrementAlarm(value)
lcd.line2("Alarm " + radio.getAlarmTime() + unit)
time.sleep(twait)
AlarmChange = GPIO.input(left_switch)
twait = 0.1
elif option == radio.STREAMING:
radio.toggleStreaming()
elif option == radio.RELOADLIB:
if radio.getUpdateLibrary():
radio.setUpdateLibOff()
else:
radio.setUpdateLibOn()
radio.optionChangedTrue()
return
# Update music library
def update_library(lcd,radio):
log.message("Updating library", log.INFO)
lcd.line1("Updating")
lcd.line2("library")
radio.updateLibrary()
return
# Reload if new source selected (RADIO or PLAYER)
def reload(lcd,radio):
lcd.line1("Loading")
source = radio.getSource()
if source == radio.RADIO:
lcd.line2("stations")
dirList=os.listdir(PlaylistsDirectory)
for fname in dirList:
if os.path.isfile(fname):
continue
log.message("Loading " + fname, log.DEBUG)
lcd.line2(fname)
time.sleep(0.1)
radio.loadStations()
elif source == radio.PLAYER:
lcd.line2("media")
radio.loadMedia()
current = radio.execMpcCommand("current")
if len(current) < 1:
update_library(lcd,radio)
return
# Display the RSS feed
def display_rss(lcd,rss):
rss_line = rss.getFeed()
lcd.setScrollSpeed(0.2) # Scroll RSS faster
lcd.scroll2(rss_line,interrupt)
return
# Display the currently playing station or track
def display_current(lcd,radio):
current_id = radio.getCurrentID()
source = radio.getSource()
if source == radio.RADIO:
current = radio.getCurrentStation()
else:
current_artist = radio.getCurrentArtist()
index = radio.getSearchIndex()
current_artist = radio.getCurrentArtist()
track_name = radio.getCurrentTitle()
current = current_artist + " - " + track_name
# Display any stream error
leng = len(current)
if radio.gotError():
errorStr = radio.getErrorString()
lcd.scroll2(errorStr,interrupt)
radio.clearError()
else:
leng = len(current)
if leng > 16:
lcd.scroll2(current[0:160],interrupt)
elif leng < 1:
lcd.line2("No input!")
time.sleep(1)
radio.play(1) # Reset station or track
else:
lcd.line2(current)
return
# Get currently playing station or track number from MPC
def get_current_id():
current_id = 1
status = radio.execMpcCommand("status | grep \"\[\" ")
if len(status) > 1:
x = status.index('#')+1
y = status.index('/')
current_id = int(status[x:y])
exec_cmd ("echo " + str(current_id) + " > " + CurrentFile)
return current_id
# Get the last ID stored in /var/lib/radiod
def get_stored_id(current_file):
current_id = 5
if os.path.isfile(current_file):
current_id = int(exec_cmd("cat " + current_file) )
return current_id
# Execute system command
def exec_cmd(cmd):
p = os.popen(cmd)
result = p.readline().rstrip('\n')
return result
# Get list of tracks or stations
def get_mpc_list(cmd):
list = []
line = ""
p = os.popen("/usr/bin/mpc " + cmd)
while True:
line = p.readline().strip('\n')
if line.__len__() < 1:
break
list.append(line)
return list
# Source selection display
def display_source_select(lcd,radio):
lcd.line1("Source:")
source = radio.getSource()
if source == radio.RADIO:
lcd.line2("Radio")
elif source == radio.PLAYER:
lcd.line2("Media")
return
# Display search (Station or Track)
def display_search(lcd,radio):
index = radio.getSearchIndex()
source = radio.getSource()
if source == radio.PLAYER:
current_artist = radio.getArtistName(index)
lcd.scroll1("(" + str(index+1) + ")" + current_artist[0:160],interrupt)
current_track = radio.getTrackNameByIndex(index)
lcd.scroll2(current_track,interrupt)
else:
current = index+1
lcd.line1("Search")
current_station = radio.getStationName(index)
msg = current_station[0:40] + '('+ str(current) + ')'
lcd.scroll2(msg,interrupt)
return
# Display if in sleep
def display_sleep(lcd,radio):
message = 'Sleep mode'
if radio.alarmActive():
message = "Alarm " + radio.getAlarmTime()
lcd.line2(message)
return
# Unmute radio and get stored volume
def unmuteRadio(lcd,radio):
radio.unmute()
displayVolume(lcd,radio)
return
# Display volume and streamin on indicator
def displayVolume(lcd,radio):
volume = radio.getVolume()
msg = "Vol " + str(volume)
if radio.getStreaming():
msg = msg + ' *'
lcd.line2(msg)
return
# Options menu
def display_options(lcd,radio):
option = radio.getOption()
if option != radio.TIMER and option != radio.ALARM \
and option != radio.ALARMSETHOURS and option != radio.ALARMSETMINS :
lcd.line1("Menu:")
if option == radio.RANDOM:
if radio.getRandom():
lcd.scroll2("Random on", interrupt)
else:
lcd.scroll2("Random off", interrupt)
elif option == radio.CONSUME:
if radio.getConsume():
lcd.scroll2("Consume on", interrupt)
else:
lcd.scroll2("Consume off", interrupt)
elif option == radio.REPEAT:
if radio.getRepeat():
lcd.scroll2("Repeat on", interrupt)
else:
lcd.scroll2("Repeat off", interrupt)
elif option == radio.TIMER:
lcd.line1("Timer:")
if radio.getTimer():
lcd.line2(radio.getTimerString())
else:
lcd.line2("off")
elif option == radio.ALARM:
alarmString = "off"
lcd.line1("Alarm:")
alarmType = radio.getAlarmType()
if alarmType == radio.ALARM_ON:
alarmString = "on"
elif alarmType == radio.ALARM_REPEAT:
alarmString = "repeat"
elif alarmType == radio.ALARM_WEEKDAYS:
alarmString = "Weekdays"
lcd.line2(alarmString)
elif option == radio.ALARMSETHOURS:
lcd.line1("Set hour")
lcd.line2(radio.getAlarmTime())
elif option == radio.ALARMSETMINS:
lcd.line1("Set mins")
lcd.line2(radio.getAlarmTime())
elif option == radio.STREAMING:
if radio.getStreaming():
lcd.scroll2("Streamng on", interrupt)
else:
lcd.scroll2("Streaming off", interrupt)
elif option == radio.RELOADLIB:
if radio.getUpdateLibrary():
lcd.scroll2("Update library:Yes", interrupt)
else:
lcd.scroll2("Update library:No", interrupt)
return
# Display wake up message
def displayWakeUpMessage(lcd):
message = 'Good day'
t = datetime.datetime.now()
if t.hour >= 0 and t.hour < 12:
message = 'Good morning'
if t.hour >= 12 and t.hour < 18:
message = 'Good afternoon'
if t.hour >= 16 and t.hour <= 23:
message = 'Good evening'
lcd.scroll2(message, interrupt)
time.sleep(3)
return
# Display shutdown messages
def displayShutdown(lcd):
lcd.line1("Stopping radio")
radio.execCommand("service mpd stop")
radio.execCommand("shutdown -h now")
lcd.line2("Shutdown")
time.sleep(2)
lcd.line1("Stopped")
lcd.line2("Turn off")
return
# Display time and timer/alarm
def displayTime(lcd,radio):
dateFormat = "%H:%M"
todaysdate = strftime(dateFormat)
timenow = strftime("%H:%M")
message = todaysdate
if radio.getTimer():
message = timenow + " " + radio.getTimerString()
if radio.alarmActive():
message = message + " " + radio.getAlarmTime()
lcd.line1(message)
return
# Sleep exit message
def DisplayExitMessage(lcd):
lcd.line1("Hit menu")
lcd.line2("to exit")
time.sleep(1)
return
# Check Timer fired
def checkTimer(radio):
interrupt = False
if radio.fireTimer():
log.message("Timer fired", log.INFO)
radio.mute()
radio.setDisplayMode(radio.MODE_SLEEP)
interrupt = True
return interrupt
# Check state (play or pause)
# Returns paused True if paused
def checkState(radio):
paused = False
display_mode = radio.getDisplayMode()
state = radio.getState()
radio.getVolume()
if state == 'pause':
paused = True
if not radio.muted():
if radio.alarmActive() and not radio.getTimer():
radio.setDisplayMode(radio.MODE_SLEEP)
radio.mute()
elif state == 'play':
if radio.muted():
unmuteRadio(lcd,radio)
radio.setDisplayMode(radio.MODE_TIME)
return paused
### Main routine ###
if __name__ == "__main__":
daemon = MyDaemon('/var/run/radiod.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
os.system("service mpd stop")
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
elif 'nodaemon' == sys.argv[1]:
daemon.nodaemon()
elif 'status' == sys.argv[1]:
daemon.status()
elif 'version' == sys.argv[1]:
print "Version " + radio.getVersion()
else:
print "Unknown command: " + sys.argv[1]
sys.exit(2)
sys.exit(0)
else:
print "usage: %s start|stop|restart|status|version" % sys.argv[0]
sys.exit(2)
# End of script
| bobrathbone/piradio | radio8x2.py | Python | gpl-3.0 | 25,317 |
#!/usr/bin/env python3
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2021 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <https://www.gnu.org/licenses/>.
"""Generate the html documentation based on the asciidoc files."""
from typing import List, Optional
import re
import os
import sys
import subprocess
import shutil
import tempfile
import argparse
import io
import pathlib
REPO_ROOT = pathlib.Path(__file__).resolve().parents[1]
DOC_DIR = REPO_ROOT / 'qutebrowser' / 'html' / 'doc'
sys.path.insert(0, str(REPO_ROOT))
from scripts import utils
class AsciiDoc:
"""Abstraction of an asciidoc subprocess."""
FILES = ['faq', 'changelog', 'contributing', 'quickstart', 'userscripts']
def __init__(self,
asciidoc: Optional[str],
asciidoc_python: Optional[str],
website: Optional[str]) -> None:
self._cmd: Optional[List[str]] = None
self._asciidoc = asciidoc
self._asciidoc_python = asciidoc_python
self._website = website
self._homedir: Optional[pathlib.Path] = None
self._themedir: Optional[pathlib.Path] = None
self._tempdir: Optional[pathlib.Path] = None
self._failed = False
def prepare(self) -> None:
"""Get the asciidoc command and create the homedir to use."""
self._cmd = self._get_asciidoc_cmd()
self._homedir = pathlib.Path(tempfile.mkdtemp())
self._themedir = self._homedir / '.asciidoc' / 'themes' / 'qute'
self._tempdir = self._homedir / 'tmp'
self._tempdir.mkdir(parents=True)
self._themedir.mkdir(parents=True)
def cleanup(self) -> None:
"""Clean up the temporary home directory for asciidoc."""
if self._homedir is not None and not self._failed:
shutil.rmtree(str(self._homedir))
def build(self) -> None:
"""Build either the website or the docs."""
if self._website:
self._build_website()
else:
self._build_docs()
self._copy_images()
def _build_docs(self) -> None:
"""Render .asciidoc files to .html sites."""
files = [((REPO_ROOT / 'doc' / '{}.asciidoc'.format(f)),
DOC_DIR / (f + ".html")) for f in self.FILES]
for src in (REPO_ROOT / 'doc' / 'help').glob('*.asciidoc'):
dst = DOC_DIR / (src.stem + ".html")
files.append((src, dst))
# patch image links to use local copy
replacements = [
("https://raw.githubusercontent.com/qutebrowser/qutebrowser/master/doc/img/cheatsheet-big.png",
"qute://help/img/cheatsheet-big.png"),
("https://raw.githubusercontent.com/qutebrowser/qutebrowser/master/doc/img/cheatsheet-small.png",
"qute://help/img/cheatsheet-small.png")
]
asciidoc_args = ['-a', 'source-highlighter=pygments']
for src, dst in files:
assert self._tempdir is not None # for mypy
modified_src = self._tempdir / src.name
with modified_src.open('w', encoding='utf-8') as moded_f, \
src.open('r', encoding='utf-8') as f:
for line in f:
for orig, repl in replacements:
line = line.replace(orig, repl)
moded_f.write(line)
self.call(modified_src, dst, *asciidoc_args)
def _copy_images(self) -> None:
"""Copy image files to qutebrowser/html/doc."""
print("Copying files...")
dst_path = DOC_DIR / 'img'
dst_path.mkdir(exist_ok=True)
for filename in ['cheatsheet-big.png', 'cheatsheet-small.png']:
src = REPO_ROOT / 'doc' / 'img' / filename
dst = dst_path / filename
shutil.copy(str(src), str(dst))
def _build_website_file(self, root: pathlib.Path, filename: str) -> None:
"""Build a single website file."""
src = root / filename
assert self._website is not None # for mypy
dst = pathlib.Path(self._website)
dst = dst / src.parent.relative_to(REPO_ROOT) / (src.stem + ".html")
dst.parent.mkdir(exist_ok=True)
assert self._tempdir is not None # for mypy
modified_src = self._tempdir / src.name
shutil.copy(str(REPO_ROOT / 'www' / 'header.asciidoc'), modified_src)
outfp = io.StringIO()
header = modified_src.read_text(encoding='utf-8')
header += "\n\n"
with src.open('r', encoding='utf-8') as infp:
outfp.write("\n\n")
hidden = False
found_title = False
title = ""
last_line = ""
for line in infp:
line = line.rstrip()
if line == '// QUTE_WEB_HIDE':
assert not hidden
hidden = True
elif line == '// QUTE_WEB_HIDE_END':
assert hidden
hidden = False
elif line == "The Compiler <mail@qutebrowser.org>":
continue
elif re.fullmatch(r':\w+:.*', line):
# asciidoc field
continue
if not found_title:
if re.fullmatch(r'=+', line):
line = line.replace('=', '-')
found_title = True
title = last_line + " | qutebrowser\n"
title += "=" * (len(title) - 1)
elif re.fullmatch(r'= .+', line):
line = '==' + line[1:]
found_title = True
title = last_line + " | qutebrowser\n"
title += "=" * (len(title) - 1)
if not hidden:
outfp.write(line.replace(".asciidoc[", ".html[") + '\n')
last_line = line
current_lines = outfp.getvalue()
outfp.close()
modified_str = title + "\n\n" + header + current_lines
modified_src.write_text(modified_str, encoding='utf-8')
asciidoc_args = ['--theme=qute', '-a toc', '-a toc-placement=manual',
'-a', 'source-highlighter=pygments']
self.call(modified_src, dst, *asciidoc_args)
def _build_website(self) -> None:
"""Prepare and build the website."""
theme_file = REPO_ROOT / 'www' / 'qute.css'
assert self._themedir is not None # for mypy
shutil.copy(theme_file, self._themedir)
assert self._website is not None # for mypy
outdir = pathlib.Path(self._website)
for item_path in pathlib.Path(REPO_ROOT).rglob('*.asciidoc'):
if item_path.stem in ['header', 'OpenSans-License']:
continue
self._build_website_file(item_path.parent, item_path.name)
copy = {'icons': 'icons', 'doc/img': 'doc/img', 'www/media': 'media/'}
for src, dest in copy.items():
full_src = REPO_ROOT / src
full_dest = outdir / dest
try:
shutil.rmtree(full_dest)
except FileNotFoundError:
pass
shutil.copytree(full_src, full_dest)
for dst, link_name in [
('README.html', 'index.html'),
((pathlib.Path('doc') / 'quickstart.html'), 'quickstart.html'),
]:
assert isinstance(dst, (str, pathlib.Path)) # for mypy
try:
(outdir / link_name).symlink_to(dst)
except FileExistsError:
pass
def _get_asciidoc_cmd(self) -> List[str]:
"""Try to find out what commandline to use to invoke asciidoc."""
if self._asciidoc is not None:
python = (sys.executable if self._asciidoc_python is None
else self._asciidoc_python)
return [python, self._asciidoc]
for executable in ['asciidoc', 'asciidoc.py']:
try:
subprocess.run([executable, '--version'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
check=True)
except OSError:
pass
else:
return [executable]
raise FileNotFoundError
def call(self, src: pathlib.Path, dst: pathlib.Path, *args):
"""Call asciidoc for the given files.
Args:
src: The source .asciidoc file.
dst: The destination .html file, or None to auto-guess.
*args: Additional arguments passed to asciidoc.
"""
print("Calling asciidoc for {}...".format(src.name))
assert self._cmd is not None # for mypy
cmdline = self._cmd[:]
if dst is not None:
cmdline += ['--out-file', str(dst)]
cmdline += args
cmdline.append(str(src))
# So the virtualenv's Pygments is found
bin_path = pathlib.Path(sys.executable).parent
try:
env = os.environ.copy()
env['HOME'] = str(self._homedir)
env['PATH'] = str(bin_path) + os.pathsep + env['PATH']
subprocess.run(cmdline, check=True, env=env)
except (subprocess.CalledProcessError, OSError) as e:
self._failed = True
utils.print_error(str(e))
print("Keeping modified sources in {}.".format(self._homedir),
file=sys.stderr)
sys.exit(1)
def parse_args() -> argparse.Namespace:
"""Parse command-line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('--website', help="Build website into a given "
"directory.")
parser.add_argument('--asciidoc', help="Full path to asciidoc.py. "
"If not given, it's searched in PATH.",
nargs='?')
parser.add_argument('--asciidoc-python', help="Python to use for asciidoc."
"If not given, the current Python interpreter is used.",
nargs='?')
return parser.parse_args()
def run(**kwargs) -> None:
"""Regenerate documentation."""
DOC_DIR.mkdir(exist_ok=True)
asciidoc = AsciiDoc(**kwargs)
try:
asciidoc.prepare()
except FileNotFoundError:
utils.print_error("Could not find asciidoc! Please install it, or use "
"the --asciidoc argument to point this script to "
"the correct python/asciidoc.py location!")
sys.exit(1)
try:
asciidoc.build()
finally:
asciidoc.cleanup()
def main(colors: bool = False) -> None:
"""Generate html files for the online documentation."""
utils.change_cwd()
utils.use_color = colors
args = parse_args()
run(asciidoc=args.asciidoc, asciidoc_python=args.asciidoc_python,
website=args.website)
if __name__ == '__main__':
main(colors=True)
| fiete201/qutebrowser | scripts/asciidoc2html.py | Python | gpl-3.0 | 11,655 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from frame import DStatFrame
from shee.util import get_result_dir_name
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
class DStatNetwork(DStatFrame):
def __init__(self, filename, frame=None, eth=None, grain=False):
if frame is not None:
self.df = frame.df
self.device = frame.device
self._set_name('network')
else:
super(DStatNetwork, self).__init__(filename, 'network')
sname = get_result_dir_name(filename)
if eth is not None:
self.filename = sname + '/network/eth' + str(eth) + '/' + sname.split("/")[-1]
df = self._read_dataframe(['epoch', 'net/eth' + str(eth)], grain=grain)
self.device = eth
else:
self.filename = sname + '/network/' + sname.split("/")[-1]
df = self._read_dataframe(['epoch', 'net/total'], grain=grain)
df.columns = df.columns.droplevel()
df.ix[:, df.columns != 'epoch'] = df.ix[:, df.columns != 'epoch'].divide(1024*1024/8)
self.df = df
def subplot_all(self, plot=False):
plot_title, save_title = self._get_titles()
# row and column sharing
fig, (ax1, ax2) = plt.subplots(2, 1, sharex=True)
hours = mdates.HourLocator() # every year
mins = mdates.MinuteLocator() # every month
self._set_subplots_title_and_plot(ax1, 'epoch', 'send')
ax1.set_ylabel(plot_title + 'Mbps')
self._set_subplots_title_and_plot(ax2, 'epoch', 'recv')
self._set_subplots_time(ax=ax2, hours=hours, mins=mins)
ax2.set_ylabel(plot_title + 'Mbps')
ax2.set_xlabel('time')
self._rotating_xticks_and_grid([ax1, ax2])
plt.tight_layout(pad=1, w_pad=1, h_pad=1)
if plot:
plt.show()
else:
self.save(save_title + "subplots")
plt.close()
@staticmethod
def _rotating_xticks_and_grid(axs):
for ax in axs:
ax.grid(True)
ax.tick_params(axis='x', pad=20)
plt.setp(ax.xaxis.get_minorticklabels(), rotation=40)
def _set_subplots_title_and_plot(self, ax, xlab, ylab):
ax.set_title(ylab)
ax.plot(self.df[xlab], self.df[ylab])
@staticmethod
def _set_subplots_time(ax, hours, mins):
ax.xaxis.set_major_locator(hours)
ax.xaxis.set_minor_locator(mins)
ax.xaxis.set_major_formatter(mdates.DateFormatter(''))
ax.xaxis.set_minor_formatter(mdates.DateFormatter('%H:%M')) | spi-x-i/shee | shee/frames/network.py | Python | gpl-3.0 | 2,568 |
"""mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
urlpatterns = [
url(r'^', include('gsn.urls')),
]
| LSIR/gsn | gsn-webui/app/urls.py | Python | gpl-3.0 | 712 |
#!/usr/bin/env python
'''
access satellite map tile database
some functions are based on code from mapUtils.py in gmapcatcher
Andrew Tridgell
May 2012
released under GNU GPL v3 or later
'''
import math, cv, sys, os, mp_util, httplib2, threading, time, collections, string, hashlib, errno, tempfile
class TileException(Exception):
'''tile error class'''
def __init__(self, msg):
Exception.__init__(self, msg)
TILE_SERVICES = {
# thanks to http://go2log.com/2011/09/26/fetching-tiles-for-offline-map/
# for the URL mapping info
"GoogleSat" : "http://khm${GOOG_DIGIT}.google.com/kh/v=113&src=app&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}",
"GoogleMap" : "http://mt${GOOG_DIGIT}.google.com/vt/lyrs=m@121&hl=en&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}",
"GoogleHyb" : "http://mt${GOOG_DIGIT}.google.com/vt/lyrs=h@121&hl=en&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}",
"GoogleTer" : "http://mt${GOOG_DIGIT}.google.com/vt/lyrs=t@108,r@121&hl=en&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}",
"GoogleChina" : "http://mt${GOOG_DIGIT}.google.cn/vt/lyrs=m@121&hl=en&gl=cn&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}",
"YahooMap" : "http://maps${Y_DIGIT}.yimg.com/hx/tl?v=4.3&.intl=en&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM}&r=1",
"YahooSat" : "http://maps${Y_DIGIT}.yimg.com/ae/ximg?v=1.9&t=a&s=256&.intl=en&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM}&r=1",
"YahooInMap" : "http://maps.yimg.com/hw/tile?locale=en&imgtype=png&yimgv=1.2&v=4.1&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM_2}",
"YahooInHyb" : "http://maps.yimg.com/hw/tile?imgtype=png&yimgv=0.95&t=h&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM_2}",
"YahooHyb" : "http://maps${Y_DIGIT}.yimg.com/hx/tl?v=4.3&t=h&.intl=en&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM}&r=1",
"MicrosoftBrMap" : "http://imakm${MS_DIGITBR}.maplink3.com.br/maps.ashx?v=${QUAD}|t&call=2.2.4",
"MicrosoftHyb" : "http://ecn.t${MS_DIGIT}.tiles.virtualearth.net/tiles/h${QUAD}.png?g=441&mkt=en-us&n=z",
"MicrosoftSat" : "http://ecn.t${MS_DIGIT}.tiles.virtualearth.net/tiles/a${QUAD}.png?g=441&mkt=en-us&n=z",
"MicrosoftMap" : "http://ecn.t${MS_DIGIT}.tiles.virtualearth.net/tiles/r${QUAD}.png?g=441&mkt=en-us&n=z",
"MicrosoftTer" : "http://ecn.t${MS_DIGIT}.tiles.virtualearth.net/tiles/r${QUAD}.png?g=441&mkt=en-us&shading=hill&n=z",
"OpenStreetMap" : "http://tile.openstreetmap.org/${ZOOM}/${X}/${Y}.png",
"OSMARender" : "http://tah.openstreetmap.org/Tiles/tile/${ZOOM}/${X}/${Y}.png",
"OpenAerialMap" : "http://tile.openaerialmap.org/tiles/?v=mgm&layer=openaerialmap-900913&x=${X}&y=${Y}&zoom=${OAM_ZOOM}",
"OpenCycleMap" : "http://andy.sandbox.cloudmade.com/tiles/cycle/${ZOOM}/${X}/${Y}.png"
}
# these are the md5sums of "unavailable" tiles
BLANK_TILES = set(["d16657bbee25d7f15c583f5c5bf23f50",
"c0e76e6e90ff881da047c15dbea380c7",
"d41d8cd98f00b204e9800998ecf8427e"])
# all tiles are 256x256
TILES_WIDTH = 256
TILES_HEIGHT = 256
class TileServiceInfo:
'''a lookup object for the URL templates'''
def __init__(self, x, y, zoom):
self.X = x
self.Y = y
self.Z = zoom
quadcode = ''
for i in range(zoom - 1, -1, -1):
quadcode += str((((((y >> i) & 1) << 1) + ((x >> i) & 1))))
self.ZOOM = zoom
self.QUAD = quadcode
self.YAHOO_Y = 2**(zoom-1) - 1 - y
self.YAHOO_ZOOM = zoom + 1
self.YAHOO_ZOOM_2 = 17 - zoom + 1
self.OAM_ZOOM = 17 - zoom
self.GOOG_DIGIT = (x + y) & 3
self.MS_DIGITBR = (((y & 1) << 1) + (x & 1)) + 1
self.MS_DIGIT = (((y & 3) << 1) + (x & 1))
self.Y_DIGIT = (x + y + zoom) % 3 + 1
self.GALILEO = "Galileo"[0:(3 * x + y) & 7]
def __getitem__(self, a):
return str(getattr(self, a))
class TileInfo:
'''description of a tile'''
def __init__(self, tile, zoom, offset=(0,0)):
self.tile = tile
(self.x, self.y) = tile
self.zoom = zoom
(self.offsetx, self.offsety) = offset
self.refresh_time()
def key(self):
'''tile cache key'''
return (self.tile, self.zoom)
def refresh_time(self):
'''reset the request time'''
self.request_time = time.time()
def coord(self, offset=(0,0)):
'''return lat,lon within a tile given (offsetx,offsety)'''
(tilex, tiley) = self.tile
(offsetx, offsety) = offset
world_tiles = 1<<self.zoom
x = ( tilex + 1.0*offsetx/TILES_WIDTH ) / (world_tiles/2.) - 1
y = ( tiley + 1.0*offsety/TILES_HEIGHT) / (world_tiles/2.) - 1
lon = x * 180.0
y = math.exp(-y*2*math.pi)
e = (y-1)/(y+1)
lat = 180.0/math.pi * math.asin(e)
return (lat, lon)
def size(self):
'''return tile size as (width,height) in meters'''
(lat1, lon1) = self.coord((0,0))
(lat2, lon2) = self.coord((TILES_WIDTH,0))
width = mp_util.gps_distance(lat1, lon1, lat2, lon2)
(lat2, lon2) = self.coord((0,TILES_HEIGHT))
height = mp_util.gps_distance(lat1, lon1, lat2, lon2)
return (width,height)
def distance(self, lat, lon):
'''distance of this tile from a given lat/lon'''
(tlat, tlon) = self.coord((TILES_WIDTH/2,TILES_HEIGHT/2))
return mp_util.gps_distance(lat, lon, tlat, tlon)
def path(self):
'''return relative path of tile image'''
(x, y) = self.tile
return "%u/%u/%u.img" % (self.zoom, y, x)
def url(self, service):
'''return URL for a tile'''
url = string.Template(TILE_SERVICES[service])
(x,y) = self.tile
tile_info = TileServiceInfo(x, y, self.zoom)
return url.substitute(tile_info)
class TileInfoScaled(TileInfo):
'''information on a tile with scale information and placement'''
def __init__(self, tile, zoom, scale, src, dst):
TileInfo.__init__(self, tile, zoom)
self.scale = scale
(self.srcx, self.srcy) = src
(self.dstx, self.dsty) = dst
class MPTile:
'''map tile object'''
def __init__(self, cache_path=None, download=True, cache_size=500,
service="MicrosoftSat", tile_delay=0.3, debug=False,
max_zoom=19):
if cache_path is None:
try:
cache_path = os.path.join(os.environ['HOME'], '.tilecache')
except Exception:
cache_path = os.path.join(tempfile.gettempdir(), 'MAVtilecache')
if not os.path.exists(cache_path):
mp_util.mkdir_p(cache_path)
self.cache_path = cache_path
self.max_zoom = max_zoom
self.min_zoom = 1
self.download = download
self.cache_size = cache_size
self.tile_delay = tile_delay
self.service = service
self.debug = debug
if service not in TILE_SERVICES:
raise TileException('unknown tile service %s' % service)
# _download_pending is a dictionary of TileInfo objects
self._download_pending = {}
self._download_thread = None
self._loading = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'data', 'loading.jpg')
self._unavailable = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'data', 'unavailable.jpg')
self._tile_cache = collections.OrderedDict()
def coord_to_tile(self, lat, lon, zoom):
'''convert lat/lon/zoom to a TileInfo'''
world_tiles = 1<<zoom
x = world_tiles / 360.0 * (lon + 180.0)
tiles_pre_radian = world_tiles / (2 * math.pi)
e = math.sin(lat * (1/180.*math.pi))
y = world_tiles/2 + 0.5*math.log((1+e)/(1-e)) * (-tiles_pre_radian)
offsetx = int((x - int(x)) * TILES_WIDTH)
offsety = int((y - int(y)) * TILES_HEIGHT)
return TileInfo((int(x) % world_tiles, int(y) % world_tiles), zoom, offset=(offsetx, offsety))
def tile_to_path(self, tile):
'''return full path to a tile'''
return os.path.join(self.cache_path, self.service, tile.path())
def coord_to_tilepath(self, lat, lon, zoom):
'''return the tile ID that covers a latitude/longitude at
a specified zoom level
'''
tile = self.coord_to_tile(lat, lon, zoom)
return self.tile_to_path(tile)
def tiles_pending(self):
'''return number of tiles pending download'''
return len(self._download_pending)
def downloader(self):
'''the download thread'''
http = httplib2.Http()
while self.tiles_pending() > 0:
time.sleep(self.tile_delay)
keys = self._download_pending.keys()[:]
# work out which one to download next, choosing by request_time
tile_info = self._download_pending[keys[0]]
for key in keys:
if self._download_pending[key].request_time > tile_info.request_time:
tile_info = self._download_pending[key]
url = tile_info.url(self.service)
path = self.tile_to_path(tile_info)
key = tile_info.key()
try:
if self.debug:
print("Downloading %s [%u left]" % (url, len(keys)))
resp,img = http.request(url)
except httplib2.HttpLib2Error as e:
#print('Error loading %s' % url)
self._tile_cache[key] = self._unavailable
self._download_pending.pop(key)
if self.debug:
print("Failed %s: %s" % (url, str(e)))
continue
if 'content-type' not in resp or resp['content-type'].find('image') == -1:
self._tile_cache[key] = self._unavailable
self._download_pending.pop(key)
if self.debug:
print("non-image response %s" % url)
continue
# see if its a blank/unavailable tile
md5 = hashlib.md5(img).hexdigest()
if md5 in BLANK_TILES:
if self.debug:
print("blank tile %s" % url)
self._tile_cache[key] = self._unavailable
self._download_pending.pop(key)
continue
mp_util.mkdir_p(os.path.dirname(path))
h = open(path+'.tmp','wb')
h.write(img)
h.close()
os.rename(path+'.tmp', path)
self._download_pending.pop(key)
self._download_thread = None
def start_download_thread(self):
'''start the downloader'''
if self._download_thread:
return
t = threading.Thread(target=self.downloader)
t.daemon = True
self._download_thread = t
t.start()
def load_tile_lowres(self, tile):
'''load a lower resolution tile from cache to fill in a
map while waiting for a higher resolution tile'''
if tile.zoom == self.min_zoom:
return None
# find the equivalent lower res tile
(lat,lon) = tile.coord()
width2 = TILES_WIDTH
height2 = TILES_HEIGHT
for zoom2 in range(tile.zoom-1, self.min_zoom-1, -1):
width2 /= 2
height2 /= 2
if width2 == 0 or height2 == 0:
break
tile_info = self.coord_to_tile(lat, lon, zoom2)
# see if its in the tile cache
key = tile_info.key()
if key in self._tile_cache:
img = self._tile_cache[key]
if img == self._unavailable:
continue
else:
path = self.tile_to_path(tile_info)
try:
img = cv.LoadImage(path)
# add it to the tile cache
self._tile_cache[key] = img
while len(self._tile_cache) > self.cache_size:
self._tile_cache.popitem(0)
except IOError as e:
continue
# copy out the quadrant we want
cv.SetImageROI(img, (tile_info.offsetx, tile_info.offsety, width2, height2))
img2 = cv.CreateImage((width2,height2), 8, 3)
cv.Copy(img, img2)
cv.ResetImageROI(img)
# and scale it
scaled = cv.CreateImage((TILES_WIDTH, TILES_HEIGHT), 8, 3)
cv.Resize(img2, scaled)
#cv.Rectangle(scaled, (0,0), (255,255), (0,255,0), 1)
return scaled
return None
def load_tile(self, tile):
'''load a tile from cache or tile server'''
# see if its in the tile cache
key = tile.key()
if key in self._tile_cache:
img = self._tile_cache[key]
if img == self._unavailable:
img = self.load_tile_lowres(tile)
if img is None:
img = cv.LoadImage(self._unavailable)
return img
path = self.tile_to_path(tile)
try:
ret = cv.LoadImage(path)
# add it to the tile cache
self._tile_cache[key] = ret
while len(self._tile_cache) > self.cache_size:
self._tile_cache.popitem(0)
return ret
except IOError as e:
if not e.errno in [errno.ENOENT]:
raise
pass
if not self.download:
img = self.load_tile_lowres(tile)
if img is None:
img = cv.LoadImage(self._unavailable)
return img
try:
self._download_pending[key].refresh_time()
except Exception:
self._download_pending[key] = tile
self.start_download_thread()
img = self.load_tile_lowres(tile)
if img is None:
img = cv.LoadImage(self._loading)
return img
def scaled_tile(self, tile):
'''return a scaled tile'''
width = int(TILES_WIDTH / tile.scale)
height = int(TILES_HEIGHT / tile.scale)
scaled_tile = cv.CreateImage((width,height), 8, 3)
full_tile = self.load_tile(tile)
cv.Resize(full_tile, scaled_tile)
return scaled_tile
def coord_from_area(self, x, y, lat, lon, width, ground_width):
'''return (lat,lon) for a pixel in an area image'''
pixel_width = ground_width / float(width)
dx = x * pixel_width
dy = y * pixel_width
return mp_util.gps_offset(lat, lon, dx, -dy)
def coord_to_pixel(self, lat, lon, width, ground_width, lat2, lon2):
'''return pixel coordinate (px,py) for position (lat2,lon2)
in an area image. Note that the results are relative to top,left
and may be outside the image'''
pixel_width = ground_width / float(width)
if lat is None or lon is None or lat2 is None or lon2 is None:
return (0,0)
dx = mp_util.gps_distance(lat, lon, lat, lon2)
if lon2 < lon:
dx = -dx
dy = mp_util.gps_distance(lat, lon, lat2, lon)
if lat2 > lat:
dy = -dy
dx /= pixel_width
dy /= pixel_width
return (int(dx), int(dy))
def area_to_tile_list(self, lat, lon, width, height, ground_width, zoom=None):
'''return a list of TileInfoScaled objects needed for
an area of land, with ground_width in meters, and
width/height in pixels.
lat/lon is the top left corner. If unspecified, the
zoom is automatically chosen to avoid having to grow
the tiles
'''
pixel_width = ground_width / float(width)
ground_height = ground_width * (height/(float(width)))
top_right = mp_util.gps_newpos(lat, lon, 90, ground_width)
bottom_left = mp_util.gps_newpos(lat, lon, 180, ground_height)
bottom_right = mp_util.gps_newpos(bottom_left[0], bottom_left[1], 90, ground_width)
# choose a zoom level if not provided
if zoom is None:
zooms = range(self.min_zoom, self.max_zoom+1)
else:
zooms = [zoom]
for zoom in zooms:
tile_min = self.coord_to_tile(lat, lon, zoom)
(twidth,theight) = tile_min.size()
tile_pixel_width = twidth / float(TILES_WIDTH)
scale = pixel_width / tile_pixel_width
if scale >= 1.0:
break
scaled_tile_width = int(TILES_WIDTH / scale)
scaled_tile_height = int(TILES_HEIGHT / scale)
# work out the bottom right tile
tile_max = self.coord_to_tile(bottom_right[0], bottom_right[1], zoom)
ofsx = int(tile_min.offsetx / scale)
ofsy = int(tile_min.offsety / scale)
srcy = ofsy
dsty = 0
ret = []
# place the tiles
for y in range(tile_min.y, tile_max.y+1):
srcx = ofsx
dstx = 0
for x in range(tile_min.x, tile_max.x+1):
if dstx < width and dsty < height:
ret.append(TileInfoScaled((x,y), zoom, scale, (srcx,srcy), (dstx,dsty)))
dstx += scaled_tile_width-srcx
srcx = 0
dsty += scaled_tile_height-srcy
srcy = 0
return ret
def area_to_image(self, lat, lon, width, height, ground_width, zoom=None, ordered=True):
'''return an RGB image for an area of land, with ground_width
in meters, and width/height in pixels.
lat/lon is the top left corner. The zoom is automatically
chosen to avoid having to grow the tiles'''
img = cv.CreateImage((width,height),8,3)
tlist = self.area_to_tile_list(lat, lon, width, height, ground_width, zoom)
# order the display by distance from the middle, so the download happens
# close to the middle of the image first
if ordered:
(midlat, midlon) = self.coord_from_area(width/2, height/2, lat, lon, width, ground_width)
tlist.sort(key=lambda d: d.distance(midlat, midlon), reverse=True)
for t in tlist:
scaled_tile = self.scaled_tile(t)
w = min(width - t.dstx, scaled_tile.width - t.srcx)
h = min(height - t.dsty, scaled_tile.height - t.srcy)
if w > 0 and h > 0:
cv.SetImageROI(scaled_tile, (t.srcx, t.srcy, w, h))
cv.SetImageROI(img, (t.dstx, t.dsty, w, h))
cv.Copy(scaled_tile, img)
cv.ResetImageROI(img)
cv.ResetImageROI(scaled_tile)
# return as an RGB image
cv.CvtColor(img, img, cv.CV_BGR2RGB)
return img
if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser("mp_tile.py [options]")
parser.add_option("--lat", type='float', default=-35.362938, help="start latitude")
parser.add_option("--lon", type='float', default=149.165085, help="start longitude")
parser.add_option("--width", type='float', default=1000.0, help="width in meters")
parser.add_option("--service", default="YahooSat", help="tile service")
parser.add_option("--zoom", default=None, type='int', help="zoom level")
parser.add_option("--max-zoom", type='int', default=19, help="maximum tile zoom")
parser.add_option("--delay", type='float', default=1.0, help="tile download delay")
parser.add_option("--boundary", default=None, help="region boundary")
parser.add_option("--debug", action='store_true', default=False, help="show debug info")
(opts, args) = parser.parse_args()
lat = opts.lat
lon = opts.lon
ground_width = opts.width
if opts.boundary:
boundary = mp_util.polygon_load(opts.boundary)
bounds = mp_util.polygon_bounds(boundary)
lat = bounds[0]+bounds[2]
lon = bounds[1]
ground_width = max(mp_util.gps_distance(lat, lon, lat, lon+bounds[3]),
mp_util.gps_distance(lat, lon, lat-bounds[2], lon))
print lat, lon, ground_width
mt = MPTile(debug=opts.debug, service=opts.service,
tile_delay=opts.delay, max_zoom=opts.max_zoom)
if opts.zoom is None:
zooms = range(mt.min_zoom, mt.max_zoom+1)
else:
zooms = [opts.zoom]
for zoom in zooms:
tlist = mt.area_to_tile_list(lat, lon, width=1024, height=1024,
ground_width=ground_width, zoom=zoom)
print("zoom %u needs %u tiles" % (zoom, len(tlist)))
for tile in tlist:
mt.load_tile(tile)
while mt.tiles_pending() > 0:
time.sleep(2)
print("Waiting on %u tiles" % mt.tiles_pending())
print('Done')
| kd0aij/matrixpilot_old | Tools/MAVLink/MAVProxy/modules/lib/mp_tile.py | Python | gpl-3.0 | 18,202 |
from dec.grid1 import *
import matplotlib.pyplot as plt
N = 4
#g = Grid_1D.periodic(N)
g = Grid_1D.regular(N)
#g = Grid_1D.chebyshev(N)
z = linspace(g.xmin, g.xmax, 100) #+ 1e-16
B0, B1, B0d, B1d = g.basis_fn()
H0, H1, H0d, H1d = hodge_star_matrix(g.projection(), g.basis_fn())
H1d = linalg.inv(H0)
#polynomial fit
#def poly_coeff(basis):
# A = array([polyfit(z, b(z), len(basis)-1)[::-1] for i, b in enumerate(basis)])
# return A
#print poly_coeff(g.B0)
#print poly_coeff(g.B1d)
plt.figure()
A = linalg.inv(H0).T
U = array([b(z) for b in B1d])
V = dot(A, array([b(z) for b in B0]))
for u, v in zip(U, V):
plt.plot(z, u)
plt.plot(z, v, color='k')
plt.scatter(g.verts, 0*g.verts)
plt.scatter(g.verts_dual, 0*g.verts_dual, color='r', marker='x')
plt.figure()
A = linalg.inv(H1).T
U = array([b(z) for b in B0d])
V = dot(A, array([b(z) for b in B1]))
for u, v in zip(U, V):
plt.plot(z, u)
plt.plot(z, v, color='k')
plt.scatter(g.verts, 0*g.verts)
plt.scatter(g.verts_dual, 0*g.verts_dual, color='r', marker='x')
plt.show()
| drufat/dec | doc/plot/cheb/basis_forms.py | Python | gpl-3.0 | 1,067 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Bottle is a fast and simple micro-framework for small web applications. It
offers request dispatching (Routes) with URL parameter support, templates,
a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
template engines - all in a single file and with no dependencies other than the
Python Standard Library.
Homepage and documentation: http://bottlepy.org/
Copyright (c) 2014, Marcel Hellkamp.
License: MIT (see LICENSE for details)
"""
from __future__ import with_statement
import sys
__author__ = 'Marcel Hellkamp'
__version__ = '0.13-dev'
__license__ = 'MIT'
###############################################################################
# 命令行接口###################################################################
###############################################################################
# INFO: Some server adapters need to monkey-patch std-lib modules before they
# are imported. This is why some of the command-line handling is done here, but
# the actual call to main() is at the end of the file.
def _cli_parse(args):
from optparse import OptionParser
parser = OptionParser(
usage="usage: %prog [options] package.module:app")
opt = parser.add_option
opt("--version", action="store_true", help="show version number.")
opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.")
opt("-s", "--server", default='wsgiref', help="use SERVER as backend.")
opt("-p", "--plugin", action="append", help="install additional plugin/s.")
opt("-c", "--conf", action="append", metavar="FILE",
help="load config values from FILE.")
opt("-C", "--param", action="append", metavar="NAME=VALUE",
help="override config values.")
opt("--debug", action="store_true", help="start server in debug mode.")
opt("--reload", action="store_true", help="auto-reload on file changes.")
opts, args = parser.parse_args(args[1:])
return opts, args, parser
def _cli_patch(args):
opts, _, _ = _cli_parse(args)
if opts.server:
if opts.server.startswith('gevent'):
import gevent.monkey
gevent.monkey.patch_all()
elif opts.server.startswith('eventlet'):
import eventlet
eventlet.monkey_patch()
if __name__ == '__main__':
_cli_patch(sys.argv)
###############################################################################
# Imports and Python 2/3 unification ###########################################
###############################################################################
import base64, cgi, email.utils, functools, hmac, imp, itertools, mimetypes,\
os, re, tempfile, threading, time, warnings
from types import FunctionType
from datetime import date as datedate, datetime, timedelta
from tempfile import TemporaryFile
from traceback import format_exc, print_exc
from unicodedata import normalize
# inspect.getargspec was removed in Python 3.6, use
# Signature-based version where we can (Python 3.3+)
try:
from inspect import signature
def getargspec(func):
params = signature(func).parameters
args, varargs, keywords, defaults = [], None, None, []
for name, param in params.items():
if param.kind == param.VAR_POSITIONAL:
varargs = name
elif param.kind == param.VAR_KEYWORD:
keywords = name
else:
args.append(name)
if param.default is not param.empty:
defaults.append(param.default)
return (args, varargs, keywords, tuple(defaults) or None)
except ImportError:
from inspect import getargspec
try:
from simplejson import dumps as json_dumps, loads as json_lds
except ImportError: # pragma: no cover
try:
from json import dumps as json_dumps, loads as json_lds
except ImportError:
try:
from django.utils.simplejson import dumps as json_dumps, loads as json_lds
except ImportError:
def json_dumps(data):
raise ImportError(
"JSON support requires Python 2.6 or simplejson.")
json_lds = json_dumps
# We now try to fix 2.5/2.6/3.1/3.2 incompatibilities.
# It ain't pretty but it works... Sorry for the mess.
py = sys.version_info
py3k = py >= (3, 0, 0)
py25 = py < (2, 6, 0)
py31 = (3, 1, 0) <= py < (3, 2, 0)
# Workaround for the missing "as" keyword in py3k.
def _e():
return sys.exc_info()[1]
# Workaround for the "print is a keyword/function" Python 2/3 dilemma
# and a fallback for mod_wsgi (resticts stdout/err attribute access)
try:
_stdout, _stderr = sys.stdout.write, sys.stderr.write
except IOError:
_stdout = lambda x: sys.stdout.write(x)
_stderr = lambda x: sys.stderr.write(x)
# Lots of stdlib and builtin differences.
if py3k:
import http.client as httplib
import _thread as thread
from urllib.parse import urljoin, SplitResult as UrlSplitResult
from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
urlunquote = functools.partial(urlunquote, encoding='latin1')
from http.cookies import SimpleCookie
from collections import MutableMapping as DictMixin
import pickle
from io import BytesIO
from configparser import ConfigParser, Error as ConfigParserError
basestring = str
unicode = str
json_loads = lambda s: json_lds(touni(s))
callable = lambda x: hasattr(x, '__call__')
imap = map
def _raise(*a):
raise a[0](a[1]).with_traceback(a[2])
else: # 2.x
import httplib
import thread
from urlparse import urljoin, SplitResult as UrlSplitResult
from urllib import urlencode, quote as urlquote, unquote as urlunquote
from Cookie import SimpleCookie
from itertools import imap
import cPickle as pickle
from StringIO import StringIO as BytesIO
from ConfigParser import SafeConfigParser as ConfigParser, \
Error as ConfigParserError
if py25:
msg = "Python 2.5 support may be dropped in future versions of Bottle."
warnings.warn(msg, DeprecationWarning)
from UserDict import DictMixin
def next(it):
return it.next()
bytes = str
else: # 2.6, 2.7
from collections import MutableMapping as DictMixin
unicode = unicode
json_loads = json_lds
eval(compile('def _raise(*a): raise a[0], a[1], a[2]', '<py3fix>', 'exec'))
# Some helpers for string/byte handling
def tob(s, enc='utf8'):
return s.encode(enc) if isinstance(s, unicode) else bytes(s)
def touni(s, enc='utf8', err='strict'):
if isinstance(s, bytes):
return s.decode(enc, err)
else:
return unicode(s or ("" if s is None else s))
tonat = touni if py3k else tob
# 3.2 fixes cgi.FieldStorage to accept bytes (which makes a lot of sense).
# 3.1 needs a workaround.
if py31:
from io import TextIOWrapper
class NCTextIOWrapper(TextIOWrapper):
def close(self):
pass # Keep wrapped buffer open.
# A bug in functools causes it to break if the wrapper is an instance method
def update_wrapper(wrapper, wrapped, *a, **ka):
try:
functools.update_wrapper(wrapper, wrapped, *a, **ka)
except AttributeError:
pass
# These helpers are used at module level and need to be defined first.
# And yes, I know PEP-8, but sometimes a lower-case classname makes more sense.
def depr(message, strict=False):
warnings.warn(message, DeprecationWarning, stacklevel=3)
def makelist(data): # This is just too handy
if isinstance(data, (tuple, list, set, dict)):
return list(data)
elif data:
return [data]
else:
return []
class DictProperty(object):
""" Property that maps to a key in a local dict-like attribute. """
def __init__(self, attr, key=None, read_only=False):
self.attr, self.key, self.read_only = attr, key, read_only
def __call__(self, func):
functools.update_wrapper(self, func, updated=[])
self.getter, self.key = func, self.key or func.__name__
return self
def __get__(self, obj, cls):
if obj is None: return self
key, storage = self.key, getattr(obj, self.attr)
if key not in storage: storage[key] = self.getter(obj)
return storage[key]
def __set__(self, obj, value):
if self.read_only: raise AttributeError("Read-Only property.")
getattr(obj, self.attr)[self.key] = value
def __delete__(self, obj):
if self.read_only: raise AttributeError("Read-Only property.")
del getattr(obj, self.attr)[self.key]
class cached_property(object):
""" A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property. """
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None: return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
class lazy_attribute(object):
""" A property that caches itself to the class object. """
def __init__(self, func):
functools.update_wrapper(self, func, updated=[])
self.getter = func
def __get__(self, obj, cls):
value = self.getter(cls)
setattr(cls, self.__name__, value)
return value
###############################################################################
# Exceptions and Events ########################################################
###############################################################################
class BottleException(Exception):
""" A base class for exceptions used by bottle. """
pass
###############################################################################
# Routing ######################################################################
###############################################################################
class RouteError(BottleException):
""" This is a base class for all routing related exceptions """
class RouteReset(BottleException):
""" If raised by a plugin or request handler, the route is reset and all
plugins are re-applied. """
class RouterUnknownModeError(RouteError):
pass
class RouteSyntaxError(RouteError):
""" The route parser found something not supported by this router. """
class RouteBuildError(RouteError):
""" The route could not be built. """
def _re_flatten(p):
""" Turn all capturing groups in a regular expression pattern into
non-capturing groups. """
if '(' not in p:
return p
return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))', lambda m: m.group(0) if
len(m.group(1)) % 2 else m.group(1) + '(?:', p)
class Router(object):
""" A Router is an ordered collection of route->target pairs. It is used to
efficiently match WSGI requests against a number of routes and return
the first target that satisfies the request. The target may be anything,
usually a string, ID or callable object. A route consists of a path-rule
and a HTTP method.
The path-rule is either a static path (e.g. `/contact`) or a dynamic
path that contains wildcards (e.g. `/wiki/<page>`). The wildcard syntax
and details on the matching order are described in docs:`routing`.
"""
default_pattern = '[^/]+'
default_filter = 're'
#: The current CPython regexp implementation does not allow more
#: than 99 matching groups per regular expression.
_MAX_GROUPS_PER_PATTERN = 99
def __init__(self, strict=False):
self.rules = [] # All rules in order
self._groups = {} # index of regexes to find them in dyna_routes
self.builder = {} # Data structure for the url builder
self.static = {} # Search structure for static routes
self.dyna_routes = {}
self.dyna_regexes = {} # Search structure for dynamic routes
#: If true, static routes are no longer checked first.
self.strict_order = strict
self.filters = {
're': lambda conf: (_re_flatten(conf or self.default_pattern),
None, None),
'int': lambda conf: (r'-?\d+', int, lambda x: str(int(x))),
'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))),
'path': lambda conf: (r'.+?', None, None)
}
def add_filter(self, name, func):
""" Add a filter. The provided function is called with the configuration
string as parameter and must return a (regexp, to_python, to_url) tuple.
The first element is a string, the last two are callables or None. """
self.filters[name] = func
rule_syntax = re.compile('(\\\\*)'
'(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'
'|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'
'(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))')
def _itertokens(self, rule):
offset, prefix = 0, ''
for match in self.rule_syntax.finditer(rule):
prefix += rule[offset:match.start()]
g = match.groups()
if len(g[0]) % 2: # Escaped wildcard
prefix += match.group(0)[len(g[0]):]
offset = match.end()
continue
if prefix:
yield prefix, None, None
name, filtr, conf = g[4:7] if g[2] is None else g[1:4]
yield name, filtr or 'default', conf or None
offset, prefix = match.end(), ''
if offset <= len(rule) or prefix:
yield prefix + rule[offset:], None, None
def add(self, rule, method, target, name=None):
""" Add a new rule or replace the target for an existing rule. """
anons = 0 # Number of anonymous wildcards found
keys = [] # Names of keys
pattern = '' # Regular expression pattern with named groups
filters = [] # Lists of wildcard input filters
builder = [] # Data structure for the URL builder
is_static = True
for key, mode, conf in self._itertokens(rule):
if mode:
is_static = False
if mode == 'default': mode = self.default_filter
mask, in_filter, out_filter = self.filters[mode](conf)
if not key:
pattern += '(?:%s)' % mask
key = 'anon%d' % anons
anons += 1
else:
pattern += '(?P<%s>%s)' % (key, mask)
keys.append(key)
if in_filter: filters.append((key, in_filter))
builder.append((key, out_filter or str))
elif key:
pattern += re.escape(key)
builder.append((None, key))
self.builder[rule] = builder
if name: self.builder[name] = builder
if is_static and not self.strict_order:
self.static.setdefault(method, {})
self.static[method][self.build(rule)] = (target, None)
return
try:
re_pattern = re.compile('^(%s)$' % pattern)
re_match = re_pattern.match
except re.error:
raise RouteSyntaxError("Could not add Route: %s (%s)" %
(rule, _e()))
if filters:
def getargs(path):
url_args = re_match(path).groupdict()
for name, wildcard_filter in filters:
try:
url_args[name] = wildcard_filter(url_args[name])
except ValueError:
raise HTTPError(400, 'Path has wrong format.')
return url_args
elif re_pattern.groupindex:
def getargs(path):
return re_match(path).groupdict()
else:
getargs = None
flatpat = _re_flatten(pattern)
whole_rule = (rule, flatpat, target, getargs)
if (flatpat, method) in self._groups:
if DEBUG:
msg = 'Route <%s %s> overwrites a previously defined route'
warnings.warn(msg % (method, rule), RuntimeWarning)
self.dyna_routes[method][
self._groups[flatpat, method]] = whole_rule
else:
self.dyna_routes.setdefault(method, []).append(whole_rule)
self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1
self._compile(method)
def _compile(self, method):
all_rules = self.dyna_routes[method]
comborules = self.dyna_regexes[method] = []
maxgroups = self._MAX_GROUPS_PER_PATTERN
for x in range(0, len(all_rules), maxgroups):
some = all_rules[x:x + maxgroups]
combined = (flatpat for (_, flatpat, _, _) in some)
combined = '|'.join('(^%s$)' % flatpat for flatpat in combined)
combined = re.compile(combined).match
rules = [(target, getargs) for (_, _, target, getargs) in some]
comborules.append((combined, rules))
def build(self, _name, *anons, **query):
""" Build an URL by filling the wildcards in a rule. """
builder = self.builder.get(_name)
if not builder:
raise RouteBuildError("No route with that name.", _name)
try:
for i, value in enumerate(anons):
query['anon%d' % i] = value
url = ''.join([f(query.pop(n)) if n else f for (n, f) in builder])
return url if not query else url + '?' + urlencode(query)
except KeyError:
raise RouteBuildError('Missing URL argument: %r' % _e().args[0])
def match(self, environ):
""" Return a (target, url_args) tuple or raise HTTPError(400/404/405). """
verb = environ['REQUEST_METHOD'].upper()
path = environ['PATH_INFO'] or '/'
if verb == 'HEAD':
methods = ['PROXY', verb, 'GET', 'ANY']
else:
methods = ['PROXY', verb, 'ANY']
for method in methods:
if method in self.static and path in self.static[method]:
target, getargs = self.static[method][path]
return target, getargs(path) if getargs else {}
elif method in self.dyna_regexes:
for combined, rules in self.dyna_regexes[method]:
match = combined(path)
if match:
target, getargs = rules[match.lastindex - 1]
return target, getargs(path) if getargs else {}
# No matching route found. Collect alternative methods for 405 response
allowed = set([])
nocheck = set(methods)
for method in set(self.static) - nocheck:
if path in self.static[method]:
allowed.add(verb)
for method in set(self.dyna_regexes) - allowed - nocheck:
for combined, rules in self.dyna_regexes[method]:
match = combined(path)
if match:
allowed.add(method)
if allowed:
allow_header = ",".join(sorted(allowed))
raise HTTPError(405, "Method not allowed.", Allow=allow_header)
# No matching route and no alternative method found. We give up
raise HTTPError(404, "Not found: " + repr(path))
class Route(object):
""" This class wraps a route callback along with route specific metadata and
configuration and applies Plugins on demand. It is also responsible for
turing an URL path rule into a regular expression usable by the Router.
"""
def __init__(self, app, rule, method, callback,
name=None,
plugins=None,
skiplist=None, **config):
#: The application this route is installed to.
self.app = app
#: The path-rule string (e.g. ``/wiki/<page>``).
self.rule = rule
#: The HTTP method as a string (e.g. ``GET``).
self.method = method
#: The original callback with no plugins applied. Useful for introspection.
self.callback = callback
#: The name of the route (if specified) or ``None``.
self.name = name or None
#: A list of route-specific plugins (see :meth:`Bottle.route`).
self.plugins = plugins or []
#: A list of plugins to not apply to this route (see :meth:`Bottle.route`).
self.skiplist = skiplist or []
#: Additional keyword arguments passed to the :meth:`Bottle.route`
#: decorator are stored in this dictionary. Used for route-specific
#: plugin configuration and meta-data.
self.config = ConfigDict().load_dict(config)
@cached_property
def call(self):
""" The route callback with all plugins applied. This property is
created on demand and then cached to speed up subsequent requests."""
return self._make_callback()
def reset(self):
""" Forget any cached values. The next time :attr:`call` is accessed,
all plugins are re-applied. """
self.__dict__.pop('call', None)
def prepare(self):
""" Do all on-demand work immediately (useful for debugging)."""
self.call
def all_plugins(self):
""" Yield all Plugins affecting this route. """
unique = set()
for p in reversed(self.app.plugins + self.plugins):
if True in self.skiplist: break
name = getattr(p, 'name', False)
if name and (name in self.skiplist or name in unique): continue
if p in self.skiplist or type(p) in self.skiplist: continue
if name: unique.add(name)
yield p
def _make_callback(self):
callback = self.callback
for plugin in self.all_plugins():
try:
if hasattr(plugin, 'apply'):
callback = plugin.apply(callback, self)
else:
callback = plugin(callback)
except RouteReset: # Try again with changed configuration.
return self._make_callback()
if not callback is self.callback:
update_wrapper(callback, self.callback)
return callback
def get_undecorated_callback(self):
""" Return the callback. If the callback is a decorated function, try to
recover the original function. """
func = self.callback
func = getattr(func, '__func__' if py3k else 'im_func', func)
closure_attr = '__closure__' if py3k else 'func_closure'
while hasattr(func, closure_attr) and getattr(func, closure_attr):
attributes = getattr(func, closure_attr)
func = attributes[0].cell_contents
# in case of decorators with multiple arguments
if not isinstance(func, FunctionType):
# pick first FunctionType instance from multiple arguments
func = filter(lambda x: isinstance(x, FunctionType),
map(lambda x: x.cell_contents, attributes))
func = list(func)[0] # py3 support
return func
def get_callback_args(self):
""" Return a list of argument names the callback (most likely) accepts
as keyword arguments. If the callback is a decorated function, try
to recover the original function before inspection. """
return getargspec(self.get_undecorated_callback())[0]
def get_config(self, key, default=None):
""" Lookup a config field and return its value, first checking the
route.config, then route.app.config."""
for conf in (self.config, self.app.config):
if key in conf: return conf[key]
return default
def __repr__(self):
cb = self.get_undecorated_callback()
return '<%s %r %r>' % (self.method, self.rule, cb)
###############################################################################
# Application Object ###########################################################
###############################################################################
class Bottle(object):
""" Each Bottle object represents a single, distinct web application and
consists of routes, callbacks, plugins, resources and configuration.
Instances are callable WSGI applications.
:param catchall: If true (default), handle all exceptions. Turn off to
let debugging middleware handle exceptions.
"""
def __init__(self, catchall=True, autojson=True):
#: A :class:`ConfigDict` for app specific configuration.
self.config = ConfigDict()
self.config._on_change = functools.partial(self.trigger_hook, 'config')
self.config.meta_set('autojson', 'validate', bool)
self.config.meta_set('catchall', 'validate', bool)
self.config['catchall'] = catchall
self.config['autojson'] = autojson
#: A :class:`ResourceManager` for application files
self.resources = ResourceManager()
self.routes = [] # List of installed :class:`Route` instances.
self.router = Router() # Maps requests to :class:`Route` instances.
self.error_handler = {}
# Core plugins
self.plugins = [] # List of installed plugins.
if self.config['autojson']:
self.install(JSONPlugin())
self.install(TemplatePlugin())
#: If true, most exceptions are caught and returned as :exc:`HTTPError`
catchall = DictProperty('config', 'catchall')
__hook_names = 'before_request', 'after_request', 'app_reset', 'config'
__hook_reversed = 'after_request'
@cached_property
def _hooks(self):
return dict((name, []) for name in self.__hook_names)
def add_hook(self, name, func):
""" Attach a callback to a hook. Three hooks are currently implemented:
before_request
Executed once before each request. The request context is
available, but no routing has happened yet.
after_request
Executed once after each request regardless of its outcome.
app_reset
Called whenever :meth:`Bottle.reset` is called.
"""
if name in self.__hook_reversed:
self._hooks[name].insert(0, func)
else:
self._hooks[name].append(func)
def remove_hook(self, name, func):
""" Remove a callback from a hook. """
if name in self._hooks and func in self._hooks[name]:
self._hooks[name].remove(func)
return True
def trigger_hook(self, __name, *args, **kwargs):
""" Trigger a hook and return a list of results. """
return [hook(*args, **kwargs) for hook in self._hooks[__name][:]]
def hook(self, name):
""" Return a decorator that attaches a callback to a hook. See
:meth:`add_hook` for details."""
def decorator(func):
self.add_hook(name, func)
return func
return decorator
def mount(self, prefix, app, **options):
""" Mount an application (:class:`Bottle` or plain WSGI) to a specific
URL prefix. Example::
root_app.mount('/admin/', admin_app)
:param prefix: path prefix or `mount-point`. If it ends in a slash,
that slash is mandatory.
:param app: an instance of :class:`Bottle` or a WSGI application.
All other parameters are passed to the underlying :meth:`route` call.
"""
segments = [p for p in prefix.split('/') if p]
if not segments: raise ValueError('Empty path prefix.')
path_depth = len(segments)
def mountpoint_wrapper():
try:
request.path_shift(path_depth)
rs = HTTPResponse([])
def start_response(status, headerlist, exc_info=None):
if exc_info:
_raise(*exc_info)
rs.status = status
for name, value in headerlist:
rs.add_header(name, value)
return rs.body.append
body = app(request.environ, start_response)
rs.body = itertools.chain(rs.body, body) if rs.body else body
return rs
finally:
request.path_shift(-path_depth)
options.setdefault('skip', True)
options.setdefault('method', 'PROXY')
options.setdefault('mountpoint', {'prefix': prefix, 'target': app})
options['callback'] = mountpoint_wrapper
self.route('/%s/<:re:.*>' % '/'.join(segments), **options)
if not prefix.endswith('/'):
self.route('/' + '/'.join(segments), **options)
def merge(self, routes):
""" Merge the routes of another :class:`Bottle` application or a list of
:class:`Route` objects into this application. The routes keep their
'owner', meaning that the :data:`Route.app` attribute is not
changed. """
if isinstance(routes, Bottle):
routes = routes.routes
for route in routes:
self.add_route(route)
def install(self, plugin):
""" Add a plugin to the list of plugins and prepare it for being
applied to all routes of this application. A plugin may be a simple
decorator or an object that implements the :class:`Plugin` API.
"""
if hasattr(plugin, 'setup'): plugin.setup(self)
if not callable(plugin) and not hasattr(plugin, 'apply'):
raise TypeError("Plugins must be callable or implement .apply()")
self.plugins.append(plugin)
self.reset()
return plugin
def uninstall(self, plugin):
""" Uninstall plugins. Pass an instance to remove a specific plugin, a type
object to remove all plugins that match that type, a string to remove
all plugins with a matching ``name`` attribute or ``True`` to remove all
plugins. Return the list of removed plugins. """
removed, remove = [], plugin
for i, plugin in list(enumerate(self.plugins))[::-1]:
if remove is True or remove is plugin or remove is type(plugin) \
or getattr(plugin, 'name', True) == remove:
removed.append(plugin)
del self.plugins[i]
if hasattr(plugin, 'close'): plugin.close()
if removed: self.reset()
return removed
def reset(self, route=None):
""" Reset all routes (force plugins to be re-applied) and clear all
caches. If an ID or route object is given, only that specific route
is affected. """
if route is None: routes = self.routes
elif isinstance(route, Route): routes = [route]
else: routes = [self.routes[route]]
for route in routes:
route.reset()
if DEBUG:
for route in routes:
route.prepare()
self.trigger_hook('app_reset')
def close(self):
""" Close the application and all installed plugins. """
for plugin in self.plugins:
if hasattr(plugin, 'close'): plugin.close()
def run(self, **kwargs):
""" Calls :func:`run` with the same parameters. """
run(self, **kwargs)
def match(self, environ):
""" Search for a matching route and return a (:class:`Route` , urlargs)
tuple. The second value is a dictionary with parameters extracted
from the URL. Raise :exc:`HTTPError` (404/405) on a non-match."""
return self.router.match(environ)
def get_url(self, routename, **kargs):
""" Return a string that matches a named route """
scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/'
location = self.router.build(routename, **kargs).lstrip('/')
return urljoin(urljoin('/', scriptname), location)
def add_route(self, route):
""" Add a route object, but do not change the :data:`Route.app`
attribute."""
self.routes.append(route)
self.router.add(route.rule, route.method, route, name=route.name)
if DEBUG: route.prepare()
def route(self,
path=None,
method='GET',
callback=None,
name=None,
apply=None,
skip=None, **config):
""" A decorator to bind a function to a request URL. Example::
@app.route('/hello/<name>')
def hello(name):
return 'Hello %s' % name
The ``<name>`` part is a wildcard. See :class:`Router` for syntax
details.
:param path: Request path or a list of paths to listen to. If no
path is specified, it is automatically generated from the
signature of the function.
:param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of
methods to listen to. (default: `GET`)
:param callback: An optional shortcut to avoid the decorator
syntax. ``route(..., callback=func)`` equals ``route(...)(func)``
:param name: The name for this route. (default: None)
:param apply: A decorator or plugin or a list of plugins. These are
applied to the route callback in addition to installed plugins.
:param skip: A list of plugins, plugin classes or names. Matching
plugins are not installed to this route. ``True`` skips all.
Any additional keyword arguments are stored as route-specific
configuration and passed to plugins (see :meth:`Plugin.apply`).
"""
if callable(path): path, callback = None, path
plugins = makelist(apply)
skiplist = makelist(skip)
def decorator(callback):
if isinstance(callback, basestring): callback = load(callback)
for rule in makelist(path) or yieldroutes(callback):
for verb in makelist(method):
verb = verb.upper()
route = Route(self, rule, verb, callback,
name=name,
plugins=plugins,
skiplist=skiplist, **config)
self.add_route(route)
return callback
return decorator(callback) if callback else decorator
def get(self, path=None, method='GET', **options):
""" Equals :meth:`route`. """
return self.route(path, method, **options)
def post(self, path=None, method='POST', **options):
""" Equals :meth:`route` with a ``POST`` method parameter. """
return self.route(path, method, **options)
def put(self, path=None, method='PUT', **options):
""" Equals :meth:`route` with a ``PUT`` method parameter. """
return self.route(path, method, **options)
def delete(self, path=None, method='DELETE', **options):
""" Equals :meth:`route` with a ``DELETE`` method parameter. """
return self.route(path, method, **options)
def patch(self, path=None, method='PATCH', **options):
""" Equals :meth:`route` with a ``PATCH`` method parameter. """
return self.route(path, method, **options)
def error(self, code=500):
""" Decorator: Register an output handler for a HTTP error code"""
def wrapper(handler):
self.error_handler[int(code)] = handler
return handler
return wrapper
def default_error_handler(self, res):
return tob(template(ERROR_PAGE_TEMPLATE, e=res))
def _handle(self, environ):
path = environ['bottle.raw_path'] = environ['PATH_INFO']
if py3k:
environ['PATH_INFO'] = path.encode('latin1').decode('utf8', 'ignore')
def _inner_handle():
# Maybe pass variables as locals for better performance?
try:
route, args = self.router.match(environ)
environ['route.handle'] = route
environ['bottle.route'] = route
environ['route.url_args'] = args
return route.call(**args)
except HTTPResponse:
return _e()
except RouteReset:
route.reset()
return _inner_handle()
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception:
if not self.catchall: raise
stacktrace = format_exc()
environ['wsgi.errors'].write(stacktrace)
return HTTPError(500, "Internal Server Error", _e(), stacktrace)
try:
out = None
environ['bottle.app'] = self
request.bind(environ)
response.bind()
self.trigger_hook('before_request')
out = _inner_handle()
return out;
finally:
if isinstance(out, HTTPResponse):
out.apply(response)
self.trigger_hook('after_request')
def _cast(self, out, peek=None):
""" Try to convert the parameter into something WSGI compatible and set
correct HTTP headers when possible.
Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
iterable of strings and iterable of unicodes
"""
# Empty output is done here
if not out:
if 'Content-Length' not in response:
response['Content-Length'] = 0
return []
# Join lists of byte or unicode strings. Mixed lists are NOT supported
if isinstance(out, (tuple, list))\
and isinstance(out[0], (bytes, unicode)):
out = out[0][0:0].join(out) # b'abc'[0:0] -> b''
# Encode unicode strings
if isinstance(out, unicode):
out = out.encode(response.charset)
# Byte Strings are just returned
if isinstance(out, bytes):
if 'Content-Length' not in response:
response['Content-Length'] = len(out)
return [out]
# HTTPError or HTTPException (recursive, because they may wrap anything)
# TODO: Handle these explicitly in handle() or make them iterable.
if isinstance(out, HTTPError):
out.apply(response)
out = self.error_handler.get(out.status_code,
self.default_error_handler)(out)
return self._cast(out)
if isinstance(out, HTTPResponse):
out.apply(response)
return self._cast(out.body)
# File-like objects.
if hasattr(out, 'read'):
if 'wsgi.file_wrapper' in request.environ:
return request.environ['wsgi.file_wrapper'](out)
elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
return WSGIFileWrapper(out)
# Handle Iterables. We peek into them to detect their inner type.
try:
iout = iter(out)
first = next(iout)
while not first:
first = next(iout)
except StopIteration:
return self._cast('')
except HTTPResponse:
first = _e()
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except:
if not self.catchall: raise
first = HTTPError(500, 'Unhandled exception', _e(), format_exc())
# These are the inner types allowed in iterator or generator objects.
if isinstance(first, HTTPResponse):
return self._cast(first)
elif isinstance(first, bytes):
new_iter = itertools.chain([first], iout)
elif isinstance(first, unicode):
encoder = lambda x: x.encode(response.charset)
new_iter = imap(encoder, itertools.chain([first], iout))
else:
msg = 'Unsupported response type: %s' % type(first)
return self._cast(HTTPError(500, msg))
if hasattr(out, 'close'):
new_iter = _closeiter(new_iter, out.close)
return new_iter
def wsgi(self, environ, start_response):
""" The bottle WSGI-interface. """
try:
out = self._cast(self._handle(environ))
# rfc2616 section 4.3
if response._status_code in (100, 101, 204, 304)\
or environ['REQUEST_METHOD'] == 'HEAD':
if hasattr(out, 'close'): out.close()
out = []
start_response(response._status_line, response.headerlist)
return out
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except:
if not self.catchall: raise
err = '<h1>Critical error while processing request: %s</h1>' \
% html_escape(environ.get('PATH_INFO', '/'))
if DEBUG:
err += '<h2>Error:</h2>\n<pre>\n%s\n</pre>\n' \
'<h2>Traceback:</h2>\n<pre>\n%s\n</pre>\n' \
% (html_escape(repr(_e())), html_escape(format_exc()))
environ['wsgi.errors'].write(err)
headers = [('Content-Type', 'text/html; charset=UTF-8')]
start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info())
return [tob(err)]
def __call__(self, environ, start_response):
""" Each instance of :class:'Bottle' is a WSGI application. """
return self.wsgi(environ, start_response)
def __enter__(self):
""" Use this application as default for all module-level shortcuts. """
default_app.push(self)
return self
def __exit__(self, exc_type, exc_value, traceback):
default_app.pop()
def __setattr__(self, name, value):
if name in self.__dict__:
raise AttributeError("Attribute %s already defined. Plugin conflict?" % name)
self.__dict__[name] = value
###############################################################################
# HTTP and WSGI Tools ##########################################################
###############################################################################
class BaseRequest(object):
""" A wrapper for WSGI environment dictionaries that adds a lot of
convenient access methods and properties. Most of them are read-only.
Adding new attributes to a request actually adds them to the environ
dictionary (as 'bottle.request.ext.<name>'). This is the recommended
way to store and access request-specific data.
"""
__slots__ = ('environ', )
#: Maximum size of memory buffer for :attr:`body` in bytes.
MEMFILE_MAX = 102400
def __init__(self, environ=None):
""" Wrap a WSGI environ dictionary. """
#: The wrapped WSGI environ dictionary. This is the only real attribute.
#: All other attributes actually are read-only properties.
self.environ = {} if environ is None else environ
self.environ['bottle.request'] = self
@DictProperty('environ', 'bottle.app', read_only=True)
def app(self):
""" Bottle application handling this request. """
raise RuntimeError('This request is not connected to an application.')
@DictProperty('environ', 'bottle.route', read_only=True)
def route(self):
""" The bottle :class:`Route` object that matches this request. """
raise RuntimeError('This request is not connected to a route.')
@DictProperty('environ', 'route.url_args', read_only=True)
def url_args(self):
""" The arguments extracted from the URL. """
raise RuntimeError('This request is not connected to a route.')
@property
def path(self):
""" The value of ``PATH_INFO`` with exactly one prefixed slash (to fix
broken clients and avoid the "empty path" edge case). """
return '/' + self.environ.get('PATH_INFO', '').lstrip('/')
@property
def method(self):
""" The ``REQUEST_METHOD`` value as an uppercase string. """
return self.environ.get('REQUEST_METHOD', 'GET').upper()
@DictProperty('environ', 'bottle.request.headers', read_only=True)
def headers(self):
""" A :class:`WSGIHeaderDict` that provides case-insensitive access to
HTTP request headers. """
return WSGIHeaderDict(self.environ)
def get_header(self, name, default=None):
""" Return the value of a request header, or a given default value. """
return self.headers.get(name, default)
@DictProperty('environ', 'bottle.request.cookies', read_only=True)
def cookies(self):
""" Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT
decoded. Use :meth:`get_cookie` if you expect signed cookies. """
cookies = SimpleCookie(self.environ.get('HTTP_COOKIE', '')).values()
return FormsDict((c.key, c.value) for c in cookies)
def get_cookie(self, key, default=None, secret=None):
""" Return the content of a cookie. To read a `Signed Cookie`, the
`secret` must match the one used to create the cookie (see
:meth:`BaseResponse.set_cookie`). If anything goes wrong (missing
cookie or wrong signature), return a default value. """
value = self.cookies.get(key)
if secret and value:
dec = cookie_decode(value, secret) # (key, value) tuple or None
return dec[1] if dec and dec[0] == key else default
return value or default
@DictProperty('environ', 'bottle.request.query', read_only=True)
def query(self):
""" :attr:`query_string`解析成:class:`FormsDict`.
这些值有时称为“URL参数”或“GET参数”,
但不能与“URL通配符”混淆,因为它们由:class:`Router`提供。"""
get = self.environ['bottle.get'] = FormsDict()
pairs = _parse_qsl(self.environ.get('QUERY_STRING', ''))
for key, value in pairs:
get[key] = value
return get
@DictProperty('environ', 'bottle.request.forms', read_only=True)
def forms(self):
""" Form values parsed from an `url-encoded` or `multipart/form-data`
encoded POST or PUT request body. The result is returned as a
:class:`FormsDict`. All keys and values are strings. File uploads
are stored separately in :attr:`files`. """
forms = FormsDict()
for name, item in self.POST.allitems():
if not isinstance(item, FileUpload):
forms[name] = item
return forms
@DictProperty('environ', 'bottle.request.params', read_only=True)
def params(self):
""" A :class:`FormsDict` with the combined values of :attr:`query` and
:attr:`forms`. File uploads are stored in :attr:`files`. """
params = FormsDict()
for key, value in self.query.allitems():
params[key] = value
for key, value in self.forms.allitems():
params[key] = value
return params
@DictProperty('environ', 'bottle.request.files', read_only=True)
def files(self):
""" File uploads parsed from `multipart/form-data` encoded POST or PUT
request body. The values are instances of :class:`FileUpload`.
"""
files = FormsDict()
for name, item in self.POST.allitems():
if isinstance(item, FileUpload):
files[name] = item
return files
@DictProperty('environ', 'bottle.request.json', read_only=True)
def json(self):
""" If the ``Content-Type`` header is ``application/json``, this
property holds the parsed content of the request body. Only requests
smaller than :attr:`MEMFILE_MAX` are processed to avoid memory
exhaustion. Invalid JSON raises a 400 error response. """
ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0]
if ctype == 'application/json':
b = self._get_body_string()
if not b:
return None
try:
return json_loads(b)
except (ValueError, TypeError):
raise HTTPError(400, 'Invalid JSON')
return None
def _iter_body(self, read, bufsize):
maxread = max(0, self.content_length)
while maxread:
part = read(min(maxread, bufsize))
if not part: break
yield part
maxread -= len(part)
@staticmethod
def _iter_chunked(read, bufsize):
err = HTTPError(400, 'Error while parsing chunked transfer body.')
rn, sem, bs = tob('\r\n'), tob(';'), tob('')
while True:
header = read(1)
while header[-2:] != rn:
c = read(1)
header += c
if not c: raise err
if len(header) > bufsize: raise err
size, _, _ = header.partition(sem)
try:
maxread = int(tonat(size.strip()), 16)
except ValueError:
raise err
if maxread == 0: break
buff = bs
while maxread > 0:
if not buff:
buff = read(min(maxread, bufsize))
part, buff = buff[:maxread], buff[maxread:]
if not part: raise err
yield part
maxread -= len(part)
if read(2) != rn:
raise err
@DictProperty('environ', 'bottle.request.body', read_only=True)
def _body(self):
try:
read_func = self.environ['wsgi.input'].read
except KeyError:
self.environ['wsgi.input'] = BytesIO()
return self.environ['wsgi.input']
body_iter = self._iter_chunked if self.chunked else self._iter_body
body, body_size, is_temp_file = BytesIO(), 0, False
for part in body_iter(read_func, self.MEMFILE_MAX):
body.write(part)
body_size += len(part)
if not is_temp_file and body_size > self.MEMFILE_MAX:
body, tmp = TemporaryFile(mode='w+b'), body
body.write(tmp.getvalue())
del tmp
is_temp_file = True
self.environ['wsgi.input'] = body
body.seek(0)
return body
def _get_body_string(self):
""" read body until content-length or MEMFILE_MAX into a string. Raise
HTTPError(413) on requests that are to large. """
clen = self.content_length
if clen > self.MEMFILE_MAX:
raise HTTPError(413, 'Request entity too large')
if clen < 0: clen = self.MEMFILE_MAX + 1
data = self.body.read(clen)
if len(data) > self.MEMFILE_MAX: # Fail fast
raise HTTPError(413, 'Request entity too large')
return data
@property
def body(self):
""" The HTTP request body as a seek-able file-like object. Depending on
:attr:`MEMFILE_MAX`, this is either a temporary file or a
:class:`io.BytesIO` instance. Accessing this property for the first
time reads and replaces the ``wsgi.input`` environ variable.
Subsequent accesses just do a `seek(0)` on the file object. """
self._body.seek(0)
return self._body
@property
def chunked(self):
""" True if Chunked transfer encoding was. """
return 'chunked' in self.environ.get(
'HTTP_TRANSFER_ENCODING', '').lower()
#: An alias for :attr:`query`.
GET = query
@DictProperty('environ', 'bottle.request.post', read_only=True)
def POST(self):
""" The values of :attr:`forms` and :attr:`files` combined into a single
:class:`FormsDict`. Values are either strings (form values) or
instances of :class:`cgi.FieldStorage` (file uploads).
"""
post = FormsDict()
# We default to application/x-www-form-urlencoded for everything that
# is not multipart and take the fast path (also: 3.1 workaround)
if not self.content_type.startswith('multipart/'):
pairs = _parse_qsl(tonat(self._get_body_string(), 'latin1'))
for key, value in pairs:
post[key] = value
return post
safe_env = {'QUERY_STRING': ''} # Build a safe environment for cgi
for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
if key in self.environ: safe_env[key] = self.environ[key]
args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
if py31:
args['fp'] = NCTextIOWrapper(args['fp'],
encoding='utf8',
newline='\n')
elif py3k:
args['encoding'] = 'utf8'
data = cgi.FieldStorage(**args)
self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394
data = data.list or []
for item in data:
if item.filename:
post[item.name] = FileUpload(item.file, item.name,
item.filename, item.headers)
else:
post[item.name] = item.value
return post
@property
def url(self):
""" The full request URI including hostname and scheme. If your app
lives behind a reverse proxy or load balancer and you get confusing
results, make sure that the ``X-Forwarded-Host`` header is set
correctly. """
return self.urlparts.geturl()
@DictProperty('environ', 'bottle.request.urlparts', read_only=True)
def urlparts(self):
""" The :attr:`url` string as an :class:`urlparse.SplitResult` tuple.
The tuple contains (scheme, host, path, query_string and fragment),
but the fragment is always empty because it is not visible to the
server. """
env = self.environ
http = env.get('HTTP_X_FORWARDED_PROTO') \
or env.get('wsgi.url_scheme', 'http')
host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')
if not host:
# HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients.
host = env.get('SERVER_NAME', '127.0.0.1')
port = env.get('SERVER_PORT')
if port and port != ('80' if http == 'http' else '443'):
host += ':' + port
path = urlquote(self.fullpath)
return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '')
@property
def fullpath(self):
""" Request path including :attr:`script_name` (if present). """
return urljoin(self.script_name, self.path.lstrip('/'))
@property
def query_string(self):
""" The raw :attr:`query` part of the URL (everything in between ``?``
and ``#``) as a string. """
return self.environ.get('QUERY_STRING', '')
@property
def script_name(self):
""" The initial portion of the URL's `path` that was removed by a higher
level (server or routing middleware) before the application was
called. This script path is returned with leading and tailing
slashes. """
script_name = self.environ.get('SCRIPT_NAME', '').strip('/')
return '/' + script_name + '/' if script_name else '/'
def path_shift(self, shift=1):
""" Shift path segments from :attr:`path` to :attr:`script_name` and
vice versa.
:param shift: The number of path segments to shift. May be negative
to change the shift direction. (default: 1)
"""
script, path = path_shift(self.environ.get('SCRIPT_NAME', '/'), self.path, shift)
self['SCRIPT_NAME'], self['PATH_INFO'] = script, path
@property
def content_length(self):
""" The request body length as an integer. The client is responsible to
set this header. Otherwise, the real length of the body is unknown
and -1 is returned. In this case, :attr:`body` will be empty. """
return int(self.environ.get('CONTENT_LENGTH') or -1)
@property
def content_type(self):
""" The Content-Type header as a lowercase-string (default: empty). """
return self.environ.get('CONTENT_TYPE', '').lower()
@property
def is_xhr(self):
""" True if the request was triggered by a XMLHttpRequest. This only
works with JavaScript libraries that support the `X-Requested-With`
header (most of the popular libraries do). """
requested_with = self.environ.get('HTTP_X_REQUESTED_WITH', '')
return requested_with.lower() == 'xmlhttprequest'
@property
def is_ajax(self):
""" Alias for :attr:`is_xhr`. "Ajax" is not the right term. """
return self.is_xhr
@property
def auth(self):
""" HTTP authentication data as a (user, password) tuple. This
implementation currently supports basic (not digest) authentication
only. If the authentication happened at a higher level (e.g. in the
front web-server or a middleware), the password field is None, but
the user field is looked up from the ``REMOTE_USER`` environ
variable. On any errors, None is returned. """
basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION', ''))
if basic: return basic
ruser = self.environ.get('REMOTE_USER')
if ruser: return (ruser, None)
return None
@property
def remote_route(self):
""" A list of all IPs that were involved in this request, starting with
the client IP and followed by zero or more proxies. This does only
work if all proxies support the ```X-Forwarded-For`` header. Note
that this information can be forged by malicious clients. """
proxy = self.environ.get('HTTP_X_FORWARDED_FOR')
if proxy: return [ip.strip() for ip in proxy.split(',')]
remote = self.environ.get('REMOTE_ADDR')
return [remote] if remote else []
@property
def remote_addr(self):
""" The client IP as a string. Note that this information can be forged
by malicious clients. """
route = self.remote_route
return route[0] if route else None
def copy(self):
""" Return a new :class:`Request` with a shallow :attr:`environ` copy. """
return Request(self.environ.copy())
def get(self, value, default=None):
return self.environ.get(value, default)
def __getitem__(self, key):
return self.environ[key]
def __delitem__(self, key):
self[key] = ""
del (self.environ[key])
def __iter__(self):
return iter(self.environ)
def __len__(self):
return len(self.environ)
def keys(self):
return self.environ.keys()
def __setitem__(self, key, value):
""" Change an environ value and clear all caches that depend on it. """
if self.environ.get('bottle.request.readonly'):
raise KeyError('The environ dictionary is read-only.')
self.environ[key] = value
todelete = ()
if key == 'wsgi.input':
todelete = ('body', 'forms', 'files', 'params', 'post', 'json')
elif key == 'QUERY_STRING':
todelete = ('query', 'params')
elif key.startswith('HTTP_'):
todelete = ('headers', 'cookies')
for key in todelete:
self.environ.pop('bottle.request.' + key, None)
def __repr__(self):
return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url)
def __getattr__(self, name):
""" Search in self.environ for additional user defined attributes. """
try:
var = self.environ['bottle.request.ext.%s' % name]
return var.__get__(self) if hasattr(var, '__get__') else var
except KeyError:
raise AttributeError('Attribute %r not defined.' % name)
def __setattr__(self, name, value):
if name == 'environ': return object.__setattr__(self, name, value)
key = 'bottle.request.ext.%s' % name
if key in self.environ:
raise AttributeError("Attribute already defined: %s" % name)
self.environ[key] = value
def __delattr__(self, name, value):
try:
del self.environ['bottle.request.ext.%s' % name]
except KeyError:
raise AttributeError("Attribute not defined: %s" % name)
def _hkey(s):
return s.title().replace('_', '-')
class HeaderProperty(object):
def __init__(self, name, reader=None, writer=str, default=''):
self.name, self.default = name, default
self.reader, self.writer = reader, writer
self.__doc__ = 'Current value of the %r header.' % name.title()
def __get__(self, obj, _):
if obj is None: return self
value = obj.headers.get(self.name, self.default)
return self.reader(value) if self.reader else value
def __set__(self, obj, value):
obj.headers[self.name] = self.writer(value)
def __delete__(self, obj):
del obj.headers[self.name]
class BaseResponse(object):
""" Storage class for a response body as well as headers and cookies.
This class does support dict-like case-insensitive item-access to
headers, but is NOT a dict. Most notably, iterating over a response
yields parts of the body and not the headers.
:param body: The response body as one of the supported types.
:param status: Either an HTTP status code (e.g. 200) or a status line
including the reason phrase (e.g. '200 OK').
:param headers: A dictionary or a list of name-value pairs.
Additional keyword arguments are added to the list of headers.
Underscores in the header name are replaced with dashes.
"""
default_status = 200
default_content_type = 'text/html; charset=UTF-8'
# Header blacklist for specific response codes
# (rfc2616 section 10.2.3 and 10.3.5)
bad_headers = {
204: set(('Content-Type', )),
304: set(('Allow', 'Content-Encoding', 'Content-Language',
'Content-Length', 'Content-Range', 'Content-Type',
'Content-Md5', 'Last-Modified'))
}
def __init__(self, body='', status=None, headers=None, **more_headers):
self._cookies = None
self._headers = {}
self.body = body
self.status = status or self.default_status
if headers:
if isinstance(headers, dict):
headers = headers.items()
for name, value in headers:
self.add_header(name, value)
if more_headers:
for name, value in more_headers.items():
self.add_header(name, value)
def copy(self, cls=None):
""" Returns a copy of self. """
cls = cls or BaseResponse
assert issubclass(cls, BaseResponse)
copy = cls()
copy.status = self.status
copy._headers = dict((k, v[:]) for (k, v) in self._headers.items())
if self._cookies:
copy._cookies = SimpleCookie()
copy._cookies.load(self._cookies.output(header=''))
return copy
def __iter__(self):
return iter(self.body)
def close(self):
if hasattr(self.body, 'close'):
self.body.close()
@property
def status_line(self):
""" The HTTP status line as a string (e.g. ``404 Not Found``)."""
return self._status_line
@property
def status_code(self):
""" The HTTP status code as an integer (e.g. 404)."""
return self._status_code
def _set_status(self, status):
if isinstance(status, int):
code, status = status, _HTTP_STATUS_LINES.get(status)
elif ' ' in status:
status = status.strip()
code = int(status.split()[0])
else:
raise ValueError('String status line without a reason phrase.')
if not 100 <= code <= 999:
raise ValueError('Status code out of range.')
self._status_code = code
self._status_line = str(status or ('%d Unknown' % code))
def _get_status(self):
return self._status_line
status = property(
_get_status, _set_status, None,
''' A writeable property to change the HTTP response status. It accepts
either a numeric code (100-999) or a string with a custom reason
phrase (e.g. "404 Brain not found"). Both :data:`status_line` and
:data:`status_code` are updated accordingly. The return value is
always a status string. ''')
del _get_status, _set_status
@property
def headers(self):
""" An instance of :class:`HeaderDict`, a case-insensitive dict-like
view on the response headers. """
hdict = HeaderDict()
hdict.dict = self._headers
return hdict
def __contains__(self, name):
return _hkey(name) in self._headers
def __delitem__(self, name):
del self._headers[_hkey(name)]
def __getitem__(self, name):
return self._headers[_hkey(name)][-1]
def __setitem__(self, name, value):
self._headers[_hkey(name)] = [value if isinstance(value, unicode) else
str(value)]
def get_header(self, name, default=None):
""" Return the value of a previously defined header. If there is no
header with that name, return a default value. """
return self._headers.get(_hkey(name), [default])[-1]
def set_header(self, name, value):
""" Create a new response header, replacing any previously defined
headers with the same name. """
self._headers[_hkey(name)] = [value if isinstance(value, unicode)
else str(value)]
def add_header(self, name, value):
""" Add an additional response header, not removing duplicates. """
self._headers.setdefault(_hkey(name), []).append(
value if isinstance(value, unicode) else str(value))
def iter_headers(self):
""" Yield (header, value) tuples, skipping headers that are not
allowed with the current response status code. """
return self.headerlist
@property
def headerlist(self):
""" WSGI conform list of (header, value) tuples. """
out = []
headers = list(self._headers.items())
if 'Content-Type' not in self._headers:
headers.append(('Content-Type', [self.default_content_type]))
if self._status_code in self.bad_headers:
bad_headers = self.bad_headers[self._status_code]
headers = [h for h in headers if h[0] not in bad_headers]
out += [(name, val) for (name, vals) in headers for val in vals]
if self._cookies:
for c in self._cookies.values():
out.append(('Set-Cookie', c.OutputString()))
if py3k:
return [(k, v.encode('utf8').decode('latin1')) for (k, v) in out]
else:
return [(k, v.encode('utf8') if isinstance(v, unicode) else v)
for (k, v) in out]
content_type = HeaderProperty('Content-Type')
content_length = HeaderProperty('Content-Length', reader=int)
expires = HeaderProperty(
'Expires',
reader=lambda x: datetime.utcfromtimestamp(parse_date(x)),
writer=lambda x: http_date(x))
@property
def charset(self, default='UTF-8'):
""" Return the charset specified in the content-type header (default: utf8). """
if 'charset=' in self.content_type:
return self.content_type.split('charset=')[-1].split(';')[0].strip()
return default
def set_cookie(self, name, value, secret=None, **options):
""" Create a new cookie or replace an old one. If the `secret` parameter is
set, create a `Signed Cookie` (described below).
:param name: the name of the cookie.
:param value: the value of the cookie.
:param secret: a signature key required for signed cookies.
Additionally, this method accepts all RFC 2109 attributes that are
supported by :class:`cookie.Morsel`, including:
:param max_age: maximum age in seconds. (default: None)
:param expires: a datetime object or UNIX timestamp. (default: None)
:param domain: the domain that is allowed to read the cookie.
(default: current domain)
:param path: limits the cookie to a given path (default: current path)
:param secure: limit the cookie to HTTPS connections (default: off).
:param httponly: prevents client-side javascript to read this cookie
(default: off, requires Python 2.6 or newer).
If neither `expires` nor `max_age` is set (default), the cookie will
expire at the end of the browser session (as soon as the browser
window is closed).
Signed cookies may store any pickle-able object and are
cryptographically signed to prevent manipulation. Keep in mind that
cookies are limited to 4kb in most browsers.
Warning: Signed cookies are not encrypted (the client can still see
the content) and not copy-protected (the client can restore an old
cookie). The main intention is to make pickling and unpickling
save, not to store secret information at client side.
"""
if not self._cookies:
self._cookies = SimpleCookie()
if secret:
value = touni(cookie_encode((name, value), secret))
elif not isinstance(value, basestring):
raise TypeError('Secret key missing for non-string Cookie.')
# Cookie size plus options must not exceed 4kb.
if len(name) + len(value) > 3800:
raise ValueError('Content does not fit into a cookie.')
self._cookies[name] = value
for key, value in options.items():
if key == 'max_age':
if isinstance(value, timedelta):
value = value.seconds + value.days * 24 * 3600
if key == 'expires':
if isinstance(value, (datedate, datetime)):
value = value.timetuple()
elif isinstance(value, (int, float)):
value = time.gmtime(value)
value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
if key in ('secure', 'httponly') and not value:
continue
self._cookies[name][key.replace('_', '-')] = value
def delete_cookie(self, key, **kwargs):
""" Delete a cookie. Be sure to use the same `domain` and `path`
settings as used to create the cookie. """
kwargs['max_age'] = -1
kwargs['expires'] = 0
self.set_cookie(key, '', **kwargs)
def __repr__(self):
out = ''
for name, value in self.headerlist:
out += '%s: %s\n' % (name.title(), value.strip())
return out
def _local_property():
ls = threading.local()
def fget(_):
try:
return ls.var
except AttributeError:
raise RuntimeError("Request context not initialized.")
def fset(_, value):
ls.var = value
def fdel(_):
del ls.var
return property(fget, fset, fdel, 'Thread-local property')
class LocalRequest(BaseRequest):
""" A thread-local subclass of :class:`BaseRequest` with a different
set of attributes for each thread. There is usually only one global
instance of this class (:data:`request`). If accessed during a
request/response cycle, this instance always refers to the *current*
request (even on a multithreaded server). """
bind = BaseRequest.__init__
environ = _local_property()
class LocalResponse(BaseResponse):
""" A thread-local subclass of :class:`BaseResponse` with a different
set of attributes for each thread. There is usually only one global
instance of this class (:data:`response`). Its attributes are used
to build the HTTP response at the end of the request/response cycle.
"""
bind = BaseResponse.__init__
_status_line = _local_property()
_status_code = _local_property()
_cookies = _local_property()
_headers = _local_property()
body = _local_property()
Request = BaseRequest
Response = BaseResponse
class HTTPResponse(Response, BottleException):
def __init__(self, body='', status=None, headers=None, **more_headers):
super(HTTPResponse, self).__init__(body, status, headers, **more_headers)
def apply(self, other):
other._status_code = self._status_code
other._status_line = self._status_line
other._headers = self._headers
other._cookies = self._cookies
other.body = self.body
class HTTPError(HTTPResponse):
default_status = 500
def __init__(self,
status=None,
body=None,
exception=None,
traceback=None, **more_headers):
self.exception = exception
self.traceback = traceback
super(HTTPError, self).__init__(body, status, **more_headers)
###############################################################################
# Plugins ######################################################################
###############################################################################
class PluginError(BottleException):
pass
class JSONPlugin(object):
name = 'json'
api = 2
def __init__(self, json_dumps=json_dumps):
self.json_dumps = json_dumps
def apply(self, callback, _):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPError:
rv = _e()
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization successful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
class TemplatePlugin(object):
""" This plugin applies the :func:`view` decorator to all routes with a
`template` config parameter. If the parameter is a tuple, the second
element must be a dict with additional options (e.g. `template_engine`)
or default variables for the template. """
name = 'template'
api = 2
def setup(self, app):
app.tpl = self
def apply(self, callback, route):
conf = route.config.get('template')
if isinstance(conf, (tuple, list)) and len(conf) == 2:
return view(conf[0], **conf[1])(callback)
elif isinstance(conf, str):
return view(conf)(callback)
else:
return callback
#: Not a plugin, but part of the plugin API. TODO: Find a better place.
class _ImportRedirect(object):
def __init__(self, name, impmask):
""" Create a virtual package that redirects imports (see PEP 302). """
self.name = name
self.impmask = impmask
self.module = sys.modules.setdefault(name, imp.new_module(name))
self.module.__dict__.update({
'__file__': __file__,
'__path__': [],
'__all__': [],
'__loader__': self
})
sys.meta_path.append(self)
def find_module(self, fullname, path=None):
if '.' not in fullname: return
packname = fullname.rsplit('.', 1)[0]
if packname != self.name: return
return self
def load_module(self, fullname):
if fullname in sys.modules: return sys.modules[fullname]
modname = fullname.rsplit('.', 1)[1]
realname = self.impmask % modname
__import__(realname)
module = sys.modules[fullname] = sys.modules[realname]
setattr(self.module, modname, module)
module.__loader__ = self
return module
###############################################################################
# Common Utilities #############################################################
###############################################################################
class MultiDict(DictMixin):
""" This dict stores multiple values per key, but behaves exactly like a
normal dict in that it returns only the newest value for any given key.
There are special methods available to access the full list of values.
"""
def __init__(self, *a, **k):
self.dict = dict((k, [v]) for (k, v) in dict(*a, **k).items())
def __len__(self):
return len(self.dict)
def __iter__(self):
return iter(self.dict)
def __contains__(self, key):
return key in self.dict
def __delitem__(self, key):
del self.dict[key]
def __getitem__(self, key):
return self.dict[key][-1]
def __setitem__(self, key, value):
self.append(key, value)
def keys(self):
return self.dict.keys()
if py3k:
def values(self):
return (v[-1] for v in self.dict.values())
def items(self):
return ((k, v[-1]) for k, v in self.dict.items())
def allitems(self):
return ((k, v) for k, vl in self.dict.items() for v in vl)
iterkeys = keys
itervalues = values
iteritems = items
iterallitems = allitems
else:
def values(self):
return [v[-1] for v in self.dict.values()]
def items(self):
return [(k, v[-1]) for k, v in self.dict.items()]
def iterkeys(self):
return self.dict.iterkeys()
def itervalues(self):
return (v[-1] for v in self.dict.itervalues())
def iteritems(self):
return ((k, v[-1]) for k, v in self.dict.iteritems())
def iterallitems(self):
return ((k, v) for k, vl in self.dict.iteritems() for v in vl)
def allitems(self):
return [(k, v) for k, vl in self.dict.iteritems() for v in vl]
def get(self, key, default=None, index=-1, type=None):
""" Return the most recent value for a key.
:param default: The default value to be returned if the key is not
present or the type conversion fails.
:param index: An index for the list of available values.
:param type: If defined, this callable is used to cast the value
into a specific type. Exception are suppressed and result in
the default value to be returned.
"""
try:
val = self.dict[key][index]
return type(val) if type else val
except Exception:
pass
return default
def append(self, key, value):
""" Add a new value to the list of values for this key. """
self.dict.setdefault(key, []).append(value)
def replace(self, key, value):
""" Replace the list of values with a single value. """
self.dict[key] = [value]
def getall(self, key):
""" Return a (possibly empty) list of values for a key. """
return self.dict.get(key) or []
#: Aliases for WTForms to mimic other multi-dict APIs (Django)
getone = get
getlist = getall
class FormsDict(MultiDict):
""" This :class:`MultiDict` subclass is used to store request form data.
Additionally to the normal dict-like item access methods (which return
unmodified data as native strings), this container also supports
attribute-like access to its values. Attributes are automatically de-
or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing
attributes default to an empty string. """
#: Encoding used for attribute values.
input_encoding = 'utf8'
#: If true (default), unicode strings are first encoded with `latin1`
#: and then decoded to match :attr:`input_encoding`.
recode_unicode = True
def _fix(self, s, encoding=None):
if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI
return s.encode('latin1').decode(encoding or self.input_encoding)
elif isinstance(s, bytes): # Python 2 WSGI
return s.decode(encoding or self.input_encoding)
else:
return s
def decode(self, encoding=None):
""" Returns a copy with all keys and values de- or recoded to match
:attr:`input_encoding`. Some libraries (e.g. WTForms) want a
unicode dictionary. """
copy = FormsDict()
enc = copy.input_encoding = encoding or self.input_encoding
copy.recode_unicode = False
for key, value in self.allitems():
copy.append(self._fix(key, enc), self._fix(value, enc))
return copy
def getunicode(self, name, default=None, encoding=None):
""" Return the value as a unicode string, or the default. """
try:
return self._fix(self[name], encoding)
except (UnicodeError, KeyError):
return default
def __getattr__(self, name, default=unicode()):
# Without this guard, pickle generates a cryptic TypeError:
if name.startswith('__') and name.endswith('__'):
return super(FormsDict, self).__getattr__(name)
return self.getunicode(name, default=default)
class HeaderDict(MultiDict):
""" A case-insensitive version of :class:`MultiDict` that defaults to
replace the old value instead of appending it. """
def __init__(self, *a, **ka):
self.dict = {}
if a or ka: self.update(*a, **ka)
def __contains__(self, key):
return _hkey(key) in self.dict
def __delitem__(self, key):
del self.dict[_hkey(key)]
def __getitem__(self, key):
return self.dict[_hkey(key)][-1]
def __setitem__(self, key, value):
self.dict[_hkey(key)] = [value if isinstance(value, unicode) else
str(value)]
def append(self, key, value):
self.dict.setdefault(_hkey(key), []).append(
value if isinstance(value, unicode) else str(value))
def replace(self, key, value):
self.dict[_hkey(key)] = [value if isinstance(value, unicode) else
str(value)]
def getall(self, key):
return self.dict.get(_hkey(key)) or []
def get(self, key, default=None, index=-1):
return MultiDict.get(self, _hkey(key), default, index)
def filter(self, names):
for name in [_hkey(n) for n in names]:
if name in self.dict:
del self.dict[name]
class WSGIHeaderDict(DictMixin):
""" This dict-like class wraps a WSGI environ dict and provides convenient
access to HTTP_* fields. Keys and values are native strings
(2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI
environment contains non-native string values, these are de- or encoded
using a lossless 'latin1' character set.
The API will remain stable even on changes to the relevant PEPs.
Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one
that uses non-native strings.)
"""
#: List of keys that do not have a ``HTTP_`` prefix.
cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH')
def __init__(self, environ):
self.environ = environ
def _ekey(self, key):
""" Translate header field name to CGI/WSGI environ key. """
key = key.replace('-', '_').upper()
if key in self.cgikeys:
return key
return 'HTTP_' + key
def raw(self, key, default=None):
""" Return the header value as is (may be bytes or unicode). """
return self.environ.get(self._ekey(key), default)
def __getitem__(self, key):
val = self.environ[self._ekey(key)]
if py3k:
if isinstance(val, unicode):
val = val.encode('latin1').decode('utf8')
else:
val = val.decode('utf8')
return val
def __setitem__(self, key, value):
raise TypeError("%s is read-only." % self.__class__)
def __delitem__(self, key):
raise TypeError("%s is read-only." % self.__class__)
def __iter__(self):
for key in self.environ:
if key[:5] == 'HTTP_':
yield _hkey(key[5:])
elif key in self.cgikeys:
yield _hkey(key)
def keys(self):
return [x for x in self]
def __len__(self):
return len(self.keys())
def __contains__(self, key):
return self._ekey(key) in self.environ
class ConfigDict(dict):
""" A dict-like configuration storage with additional support for
namespaces, validators, meta-data, on_change listeners and more.
"""
__slots__ = ('_meta', '_on_change')
def __init__(self):
self._meta = {}
self._on_change = lambda name, value: None
def load_module(self, path, squash):
""" Load values from a Python module.
:param squash: Squash nested dicts into namespaces by using
load_dict(), otherwise use update()
Example: load_config('my.app.settings', True)
Example: load_config('my.app.settings', False)
"""
config_obj = __import__(path)
obj = dict([(key, getattr(config_obj, key))
for key in dir(config_obj) if key.isupper()])
if squash:
self.load_dict(obj)
else:
self.update(obj)
return self
def load_config(self, filename):
""" Load values from an ``*.ini`` style config file.
If the config file contains sections, their names are used as
namespaces for the values within. The two special sections
``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix).
"""
conf = ConfigParser()
conf.read(filename)
for section in conf.sections():
for key, value in conf.items(section):
if section not in ('DEFAULT', 'bottle'):
key = section + '.' + key
self[key] = value
return self
def load_dict(self, source, namespace=''):
""" Load values from a dictionary structure. Nesting can be used to
represent namespaces.
>>> c = ConfigDict()
>>> c.load_dict({'some': {'namespace': {'key': 'value'} } })
{'some.namespace.key': 'value'}
"""
for key, value in source.items():
if isinstance(key, basestring):
nskey = (namespace + '.' + key).strip('.')
if isinstance(value, dict):
self.load_dict(value, namespace=nskey)
else:
self[nskey] = value
else:
raise TypeError('Key has type %r (not a string)' % type(key))
return self
def update(self, *a, **ka):
""" If the first parameter is a string, all keys are prefixed with this
namespace. Apart from that it works just as the usual dict.update().
Example: ``update('some.namespace', key='value')`` """
prefix = ''
if a and isinstance(a[0], basestring):
prefix = a[0].strip('.') + '.'
a = a[1:]
for key, value in dict(*a, **ka).items():
self[prefix + key] = value
def setdefault(self, key, value):
if key not in self:
self[key] = value
return self[key]
def __setitem__(self, key, value):
if not isinstance(key, basestring):
raise TypeError('Key has type %r (not a string)' % type(key))
value = self.meta_get(key, 'filter', lambda x: x)(value)
if key in self and self[key] is value:
return
self._on_change(key, value)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
self._on_change(key, None)
dict.__delitem__(self, key)
def meta_get(self, key, metafield, default=None):
""" Return the value of a meta field for a key. """
return self._meta.get(key, {}).get(metafield, default)
def meta_set(self, key, metafield, value):
""" Set the meta field for a key to a new value. This triggers the
on-change handler for existing keys. """
self._meta.setdefault(key, {})[metafield] = value
if key in self:
self[key] = self[key]
def meta_list(self, key):
""" Return an iterable of meta field names defined for a key. """
return self._meta.get(key, {}).keys()
class AppStack(list):
""" A stack-like list. Calling it returns the head of the stack. """
def __call__(self):
""" Return the current default application. """
return self[-1]
def push(self, value=None):
""" Add a new :class:`Bottle` instance to the stack """
if not isinstance(value, Bottle):
value = Bottle()
self.append(value)
return value
class WSGIFileWrapper(object):
def __init__(self, fp, buffer_size=1024 * 64):
self.fp, self.buffer_size = fp, buffer_size
for attr in ('fileno', 'close', 'read', 'readlines', 'tell', 'seek'):
if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr))
def __iter__(self):
buff, read = self.buffer_size, self.read
while True:
part = read(buff)
if not part: return
yield part
class _closeiter(object):
""" This only exists to be able to attach a .close method to iterators that
do not support attribute assignment (most of itertools). """
def __init__(self, iterator, close=None):
self.iterator = iterator
self.close_callbacks = makelist(close)
def __iter__(self):
return iter(self.iterator)
def close(self):
for func in self.close_callbacks:
func()
class ResourceManager(object):
""" This class manages a list of search paths and helps to find and open
application-bound resources (files).
:param base: default value for :meth:`add_path` calls.
:param opener: callable used to open resources.
:param cachemode: controls which lookups are cached. One of 'all',
'found' or 'none'.
"""
def __init__(self, base='./', opener=open, cachemode='all'):
self.opener = opener
self.base = base
self.cachemode = cachemode
#: A list of search paths. See :meth:`add_path` for details.
self.path = []
#: A cache for resolved paths. ``res.cache.clear()`` clears the cache.
self.cache = {}
def add_path(self, path, base=None, index=None, create=False):
""" Add a new path to the list of search paths. Return False if the
path does not exist.
:param path: The new search path. Relative paths are turned into
an absolute and normalized form. If the path looks like a file
(not ending in `/`), the filename is stripped off.
:param base: Path used to absolutize relative search paths.
Defaults to :attr:`base` which defaults to ``os.getcwd()``.
:param index: Position within the list of search paths. Defaults
to last index (appends to the list).
The `base` parameter makes it easy to reference files installed
along with a python module or package::
res.add_path('./resources/', __file__)
"""
base = os.path.abspath(os.path.dirname(base or self.base))
path = os.path.abspath(os.path.join(base, os.path.dirname(path)))
path += os.sep
if path in self.path:
self.path.remove(path)
if create and not os.path.isdir(path):
os.makedirs(path)
if index is None:
self.path.append(path)
else:
self.path.insert(index, path)
self.cache.clear()
return os.path.exists(path)
def __iter__(self):
""" Iterate over all existing files in all registered paths. """
search = self.path[:]
while search:
path = search.pop()
if not os.path.isdir(path): continue
for name in os.listdir(path):
full = os.path.join(path, name)
if os.path.isdir(full): search.append(full)
else: yield full
def lookup(self, name):
""" Search for a resource and return an absolute file path, or `None`.
The :attr:`path` list is searched in order. The first match is
returend. Symlinks are followed. The result is cached to speed up
future lookups. """
if name not in self.cache or DEBUG:
for path in self.path:
fpath = os.path.join(path, name)
if os.path.isfile(fpath):
if self.cachemode in ('all', 'found'):
self.cache[name] = fpath
return fpath
if self.cachemode == 'all':
self.cache[name] = None
return self.cache[name]
def open(self, name, mode='r', *args, **kwargs):
""" Find a resource and return a file object, or raise IOError. """
fname = self.lookup(name)
if not fname: raise IOError("Resource %r not found." % name)
return self.opener(fname, mode=mode, *args, **kwargs)
class FileUpload(object):
def __init__(self, fileobj, name, filename, headers=None):
""" Wrapper for file uploads. """
#: Open file(-like) object (BytesIO buffer or temporary file)
self.file = fileobj
#: Name of the upload form field
self.name = name
#: Raw filename as sent by the client (may contain unsafe characters)
self.raw_filename = filename
#: A :class:`HeaderDict` with additional headers (e.g. content-type)
self.headers = HeaderDict(headers) if headers else HeaderDict()
content_type = HeaderProperty('Content-Type')
content_length = HeaderProperty('Content-Length', reader=int, default=-1)
@cached_property
def filename(self):
""" Name of the file on the client file system, but normalized to ensure
file system compatibility. An empty filename is returned as 'empty'.
Only ASCII letters, digits, dashes, underscores and dots are
allowed in the final filename. Accents are removed, if possible.
Whitespace is replaced by a single dash. Leading or tailing dots
or dashes are removed. The filename is limited to 255 characters.
"""
fname = self.raw_filename
if not isinstance(fname, unicode):
fname = fname.decode('utf8', 'ignore')
fname = normalize('NFKD', fname)
fname = fname.encode('ASCII', 'ignore').decode('ASCII')
fname = os.path.basename(fname.replace('\\', os.path.sep))
fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip()
fname = re.sub(r'[-\s]+', '-', fname).strip('.-')
return fname[:255] or 'empty'
def _copy_file(self, fp, chunk_size=2 ** 16):
read, write, offset = self.file.read, fp.write, self.file.tell()
while 1:
buf = read(chunk_size)
if not buf: break
write(buf)
self.file.seek(offset)
def save(self, destination, overwrite=False, chunk_size=2 ** 16):
""" Save file to disk or copy its content to an open file(-like) object.
If *destination* is a directory, :attr:`filename` is added to the
path. Existing files are not overwritten by default (IOError).
:param destination: File path, directory or file(-like) object.
:param overwrite: If True, replace existing files. (default: False)
:param chunk_size: Bytes to read at a time. (default: 64kb)
"""
if isinstance(destination, basestring): # Except file-likes here
if os.path.isdir(destination):
destination = os.path.join(destination, self.filename)
if not overwrite and os.path.exists(destination):
raise IOError('File exists.')
with open(destination, 'wb') as fp:
self._copy_file(fp, chunk_size)
else:
self._copy_file(destination, chunk_size)
###############################################################################
# Application Helper ###########################################################
###############################################################################
def abort(code=500, text='Unknown Error.'):
""" Aborts execution and causes a HTTP error. """
raise HTTPError(code, text)
def redirect(url, code=None):
""" Aborts execution and causes a 303 or 302 redirect, depending on
the HTTP protocol version. """
if not code:
code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302
res = response.copy(cls=HTTPResponse)
res.status = code
res.body = ""
res.set_header('Location', urljoin(request.url, url))
raise res
def _file_iter_range(fp, offset, bytes, maxread=1024 * 1024):
""" Yield chunks from a range in a file. No chunk is bigger than maxread."""
fp.seek(offset)
while bytes > 0:
part = fp.read(min(bytes, maxread))
if not part: break
bytes -= len(part)
yield part
def static_file(filename, root,
mimetype='auto',
download=False,
charset='UTF-8'):
""" Open a file in a safe way and return :exc:`HTTPResponse` with status
code 200, 305, 403 or 404. The ``Content-Type``, ``Content-Encoding``,
``Content-Length`` and ``Last-Modified`` headers are set if possible.
Special support for ``If-Modified-Since``, ``Range`` and ``HEAD``
requests.
:param filename: Name or path of the file to send.
:param root: Root path for file lookups. Should be an absolute directory
path.
:param mimetype: Defines the content-type header (default: guess from
file extension)
:param download: If True, ask the browser to open a `Save as...` dialog
instead of opening the file with the associated program. You can
specify a custom filename as a string. If not specified, the
original filename is used (default: False).
:param charset: The charset to use for files with a ``text/*``
mime-type. (default: UTF-8)
"""
root = os.path.abspath(root) + os.sep
filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
headers = dict()
if not filename.startswith(root):
return HTTPError(403, "Access denied.")
if not os.path.exists(filename) or not os.path.isfile(filename):
return HTTPError(404, "File does not exist.")
if not os.access(filename, os.R_OK):
return HTTPError(403, "You do not have permission to access this file.")
if mimetype == 'auto':
if download and download != True:
mimetype, encoding = mimetypes.guess_type(download)
else:
mimetype, encoding = mimetypes.guess_type(filename)
if encoding: headers['Content-Encoding'] = encoding
if mimetype:
if mimetype[:5] == 'text/' and charset and 'charset' not in mimetype:
mimetype += '; charset=%s' % charset
headers['Content-Type'] = mimetype
if download:
download = os.path.basename(filename if download == True else download)
headers['Content-Disposition'] = 'attachment; filename="%s"' % download
stats = os.stat(filename)
headers['Content-Length'] = clen = stats.st_size
lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
headers['Last-Modified'] = lm
ims = request.environ.get('HTTP_IF_MODIFIED_SINCE')
if ims:
ims = parse_date(ims.split(";")[0].strip())
if ims is not None and ims >= int(stats.st_mtime):
headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT",
time.gmtime())
return HTTPResponse(status=304, **headers)
body = '' if request.method == 'HEAD' else open(filename, 'rb')
headers["Accept-Ranges"] = "bytes"
ranges = request.environ.get('HTTP_RANGE')
if 'HTTP_RANGE' in request.environ:
ranges = list(parse_range_header(request.environ['HTTP_RANGE'], clen))
if not ranges:
return HTTPError(416, "Requested Range Not Satisfiable")
offset, end = ranges[0]
headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end - 1, clen)
headers["Content-Length"] = str(end - offset)
if body: body = _file_iter_range(body, offset, end - offset)
return HTTPResponse(body, status=206, **headers)
return HTTPResponse(body, **headers)
###############################################################################
# HTTP Utilities and MISC (TODO) ###############################################
###############################################################################
def debug(mode=True):
""" Change the debug level.
There is only one debug level supported at the moment."""
global DEBUG
if mode: warnings.simplefilter('default')
DEBUG = bool(mode)
def http_date(value):
if isinstance(value, (datedate, datetime)):
value = value.utctimetuple()
elif isinstance(value, (int, float)):
value = time.gmtime(value)
if not isinstance(value, basestring):
value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
return value
def parse_date(ims):
""" Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """
try:
ts = email.utils.parsedate_tz(ims)
return time.mktime(ts[:8] + (0, )) - (ts[9] or 0) - time.timezone
except (TypeError, ValueError, IndexError, OverflowError):
return None
def parse_auth(header):
""" Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None"""
try:
method, data = header.split(None, 1)
if method.lower() == 'basic':
user, pwd = touni(base64.b64decode(tob(data))).split(':', 1)
return user, pwd
except (KeyError, ValueError):
return None
def parse_range_header(header, maxlen=0):
""" Yield (start, end) ranges parsed from a HTTP Range header. Skip
unsatisfiable ranges. The end index is non-inclusive."""
if not header or header[:6] != 'bytes=': return
ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r]
for start, end in ranges:
try:
if not start: # bytes=-100 -> last 100 bytes
start, end = max(0, maxlen - int(end)), maxlen
elif not end: # bytes=100- -> all but the first 99 bytes
start, end = int(start), maxlen
else: # bytes=100-200 -> bytes 100-200 (inclusive)
start, end = int(start), min(int(end) + 1, maxlen)
if 0 <= start < end <= maxlen:
yield start, end
except ValueError:
pass
def _parse_qsl(qs):
r = []
for pair in qs.replace(';', '&').split('&'):
if not pair: continue
nv = pair.split('=', 1)
if len(nv) != 2: nv.append('')
key = urlunquote(nv[0].replace('+', ' '))
value = urlunquote(nv[1].replace('+', ' '))
r.append((key, value))
return r
def _lscmp(a, b):
""" Compares two strings in a cryptographically safe way:
Runtime is not affected by length of common prefix. """
return not sum(0 if x == y else 1
for x, y in zip(a, b)) and len(a) == len(b)
def cookie_encode(data, key):
""" Encode and sign a pickle-able object. Return a (byte) string """
msg = base64.b64encode(pickle.dumps(data, -1))
sig = base64.b64encode(hmac.new(tob(key), msg).digest())
return tob('!') + sig + tob('?') + msg
def cookie_decode(data, key):
""" Verify and decode an encoded string. Return an object or None."""
data = tob(data)
if cookie_is_encoded(data):
sig, msg = data.split(tob('?'), 1)
if _lscmp(sig[1:], base64.b64encode(hmac.new(tob(key), msg).digest())):
return pickle.loads(base64.b64decode(msg))
return None
def cookie_is_encoded(data):
""" Return True if the argument looks like a encoded cookie."""
return bool(data.startswith(tob('!')) and tob('?') in data)
def html_escape(string):
""" Escape HTML special characters ``&<>`` and quotes ``'"``. """
return string.replace('&', '&').replace('<', '<').replace('>', '>')\
.replace('"', '"').replace("'", ''')
def html_quote(string):
""" Escape and quote a string to be used as an HTTP attribute."""
return '"%s"' % html_escape(string).replace('\n', ' ')\
.replace('\r', ' ').replace('\t', '	')
def yieldroutes(func):
""" Return a generator for routes that match the signature (name, args)
of the func parameter. This may yield more than one route if the function
takes optional keyword arguments. The output is best described by example::
a() -> '/a'
b(x, y) -> '/b/<x>/<y>'
c(x, y=5) -> '/c/<x>' and '/c/<x>/<y>'
d(x=5, y=6) -> '/d' and '/d/<x>' and '/d/<x>/<y>'
"""
path = '/' + func.__name__.replace('__', '/').lstrip('/')
spec = getargspec(func)
argc = len(spec[0]) - len(spec[3] or [])
path += ('/<%s>' * argc) % tuple(spec[0][:argc])
yield path
for arg in spec[0][argc:]:
path += '/<%s>' % arg
yield path
def path_shift(script_name, path_info, shift=1):
""" Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
:return: The modified paths.
:param script_name: The SCRIPT_NAME path.
:param script_name: The PATH_INFO path.
:param shift: The number of path fragments to shift. May be negative to
change the shift direction. (default: 1)
"""
if shift == 0: return script_name, path_info
pathlist = path_info.strip('/').split('/')
scriptlist = script_name.strip('/').split('/')
if pathlist and pathlist[0] == '': pathlist = []
if scriptlist and scriptlist[0] == '': scriptlist = []
if 0 < shift <= len(pathlist):
moved = pathlist[:shift]
scriptlist = scriptlist + moved
pathlist = pathlist[shift:]
elif 0 > shift >= -len(scriptlist):
moved = scriptlist[shift:]
pathlist = moved + pathlist
scriptlist = scriptlist[:shift]
else:
empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO'
raise AssertionError("Cannot shift. Nothing left from %s" % empty)
new_script_name = '/' + '/'.join(scriptlist)
new_path_info = '/' + '/'.join(pathlist)
if path_info.endswith('/') and pathlist: new_path_info += '/'
return new_script_name, new_path_info
def auth_basic(check, realm="private", text="Access denied"):
""" Callback decorator to require HTTP auth (basic).
TODO: Add route(check_auth=...) parameter. """
def decorator(func):
@functools.wraps(func)
def wrapper(*a, **ka):
user, password = request.auth or (None, None)
if user is None or not check(user, password):
err = HTTPError(401, text)
err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm)
return err
return func(*a, **ka)
return wrapper
return decorator
# Shortcuts for common Bottle methods.
# They all refer to the current default application.
def make_default_app_wrapper(name):
""" Return a callable that relays calls to the current default app. """
@functools.wraps(getattr(Bottle, name))
def wrapper(*a, **ka):
return getattr(app(), name)(*a, **ka)
return wrapper
route = make_default_app_wrapper('route')
get = make_default_app_wrapper('get')
post = make_default_app_wrapper('post')
put = make_default_app_wrapper('put')
delete = make_default_app_wrapper('delete')
patch = make_default_app_wrapper('patch')
error = make_default_app_wrapper('error')
mount = make_default_app_wrapper('mount')
hook = make_default_app_wrapper('hook')
install = make_default_app_wrapper('install')
uninstall = make_default_app_wrapper('uninstall')
url = make_default_app_wrapper('get_url')
###############################################################################
# Server Adapter ###############################################################
###############################################################################
class ServerAdapter(object):
quiet = False
def __init__(self, host='127.0.0.1', port=8080, **options):
self.options = options
self.host = host
self.port = int(port)
def run(self, handler): # pragma: no cover
pass
def __repr__(self):
args = ', '.join(['%s=%s' % (k, repr(v))
for k, v in self.options.items()])
return "%s(%s)" % (self.__class__.__name__, args)
class CGIServer(ServerAdapter):
quiet = True
def run(self, handler): # pragma: no cover
from wsgiref.handlers import CGIHandler
def fixed_environ(environ, start_response):
environ.setdefault('PATH_INFO', '')
return handler(environ, start_response)
CGIHandler().run(fixed_environ)
class FlupFCGIServer(ServerAdapter):
def run(self, handler): # pragma: no cover
import flup.server.fcgi
self.options.setdefault('bindAddress', (self.host, self.port))
flup.server.fcgi.WSGIServer(handler, **self.options).run()
class WSGIRefServer(ServerAdapter):
def run(self, app): # pragma: no cover
from wsgiref.simple_server import make_server
from wsgiref.simple_server import WSGIRequestHandler, WSGIServer
import socket
class FixedHandler(WSGIRequestHandler):
def address_string(self): # Prevent reverse DNS lookups please.
return self.client_address[0]
def log_request(*args, **kw):
if not self.quiet:
return WSGIRequestHandler.log_request(*args, **kw)
handler_cls = self.options.get('handler_class', FixedHandler)
server_cls = self.options.get('server_class', WSGIServer)
if ':' in self.host: # Fix wsgiref for IPv6 addresses.
if getattr(server_cls, 'address_family') == socket.AF_INET:
class server_cls(server_cls):
address_family = socket.AF_INET6
self.srv = make_server(self.host, self.port, app, server_cls,
handler_cls)
self.port = self.srv.server_port # update port actual port (0 means random)
try:
self.srv.serve_forever()
except KeyboardInterrupt:
self.srv.server_close() # Prevent ResourceWarning: unclosed socket
raise
class CherryPyServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from cherrypy import wsgiserver
self.options['bind_addr'] = (self.host, self.port)
self.options['wsgi_app'] = handler
certfile = self.options.get('certfile')
if certfile:
del self.options['certfile']
keyfile = self.options.get('keyfile')
if keyfile:
del self.options['keyfile']
server = wsgiserver.CherryPyWSGIServer(**self.options)
if certfile:
server.ssl_certificate = certfile
if keyfile:
server.ssl_private_key = keyfile
try:
server.start()
finally:
server.stop()
class WaitressServer(ServerAdapter):
def run(self, handler):
from waitress import serve
serve(handler, host=self.host, port=self.port, _quiet=self.quiet)
class PasteServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from paste import httpserver
from paste.translogger import TransLogger
handler = TransLogger(handler, setup_console_handler=(not self.quiet))
httpserver.serve(handler,
host=self.host,
port=str(self.port), **self.options)
class MeinheldServer(ServerAdapter):
def run(self, handler):
from meinheld import server
server.listen((self.host, self.port))
server.run(handler)
class FapwsServer(ServerAdapter):
""" Extremely fast webserver using libev. See http://www.fapws.org/ """
def run(self, handler): # pragma: no cover
import fapws._evwsgi as evwsgi
from fapws import base, config
port = self.port
if float(config.SERVER_IDENT[-2:]) > 0.4:
# fapws3 silently changed its API in 0.5
port = str(port)
evwsgi.start(self.host, port)
# fapws3 never releases the GIL. Complain upstream. I tried. No luck.
if 'BOTTLE_CHILD' in os.environ and not self.quiet:
_stderr("WARNING: Auto-reloading does not work with Fapws3.\n")
_stderr(" (Fapws3 breaks python thread support)\n")
evwsgi.set_base_module(base)
def app(environ, start_response):
environ['wsgi.multiprocess'] = False
return handler(environ, start_response)
evwsgi.wsgi_cb(('', app))
evwsgi.run()
class TornadoServer(ServerAdapter):
""" The super hyped asynchronous server by facebook. Untested. """
def run(self, handler): # pragma: no cover
import tornado.wsgi, tornado.httpserver, tornado.ioloop
container = tornado.wsgi.WSGIContainer(handler)
server = tornado.httpserver.HTTPServer(container)
server.listen(port=self.port, address=self.host)
tornado.ioloop.IOLoop.instance().start()
class AppEngineServer(ServerAdapter):
""" Adapter for Google App Engine. """
quiet = True
def run(self, handler):
from google.appengine.ext.webapp import util
# A main() function in the handler script enables 'App Caching'.
# Lets makes sure it is there. This _really_ improves performance.
module = sys.modules.get('__main__')
if module and not hasattr(module, 'main'):
module.main = lambda: util.run_wsgi_app(handler)
util.run_wsgi_app(handler)
class TwistedServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from twisted.web import server, wsgi
from twisted.python.threadpool import ThreadPool
from twisted.internet import reactor
thread_pool = ThreadPool()
thread_pool.start()
reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop)
factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler))
reactor.listenTCP(self.port, factory, interface=self.host)
if not reactor.running:
reactor.run()
class DieselServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from diesel.protocols.wsgi import WSGIApplication
app = WSGIApplication(handler, port=self.port)
app.run()
class GeventServer(ServerAdapter):
""" Untested. Options:
* `fast` (default: False) uses libevent's http server, but has some
issues: No streaming, no pipelining, no SSL.
* See gevent.wsgi.WSGIServer() documentation for more options.
"""
def run(self, handler):
from gevent import wsgi, pywsgi, local
if not isinstance(threading.local(), local.local):
msg = "Bottle requires gevent.monkey.patch_all() (before import)"
raise RuntimeError(msg)
if not self.options.pop('fast', None): wsgi = pywsgi
self.options['log'] = None if self.quiet else 'default'
address = (self.host, self.port)
server = wsgi.WSGIServer(address, handler, **self.options)
if 'BOTTLE_CHILD' in os.environ:
import signal
signal.signal(signal.SIGINT, lambda s, f: server.stop())
server.serve_forever()
class GeventSocketIOServer(ServerAdapter):
def run(self, handler):
from socketio import server
address = (self.host, self.port)
server.SocketIOServer(address, handler, **self.options).serve_forever()
class GunicornServer(ServerAdapter):
""" Untested. See http://gunicorn.org/configure.html for options. """
def run(self, handler):
from gunicorn.app.base import Application
config = {'bind': "%s:%d" % (self.host, int(self.port))}
config.update(self.options)
class GunicornApplication(Application):
def init(self, parser, opts, args):
return config
def load(self):
return handler
GunicornApplication().run()
class EventletServer(ServerAdapter):
""" Untested. Options:
* `backlog` adjust the eventlet backlog parameter which is the maximum
number of queued connections. Should be at least 1; the maximum
value is system-dependent.
* `family`: (default is 2) socket family, optional. See socket
documentation for available families.
"""
def run(self, handler):
from eventlet import wsgi, listen, patcher
if not patcher.is_monkey_patched(os):
msg = "Bottle requires eventlet.monkey_patch() (before import)"
raise RuntimeError(msg)
socket_args = {}
for arg in ('backlog', 'family'):
try:
socket_args[arg] = self.options.pop(arg)
except KeyError:
pass
address = (self.host, self.port)
try:
wsgi.server(listen(address, **socket_args), handler,
log_output=(not self.quiet))
except TypeError:
# Fallback, if we have old version of eventlet
wsgi.server(listen(address), handler)
class RocketServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from rocket import Rocket
server = Rocket((self.host, self.port), 'wsgi', {'wsgi_app': handler})
server.start()
class BjoernServer(ServerAdapter):
""" Fast server written in C: https://github.com/jonashaag/bjoern """
def run(self, handler):
from bjoern import run
run(handler, self.host, self.port)
class AiohttpServer(ServerAdapter):
""" Untested.
aiohttp
https://pypi.python.org/pypi/aiohttp/
"""
def run(self, handler):
import asyncio
from aiohttp.wsgi import WSGIServerHttpProtocol
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
protocol_factory = lambda: WSGIServerHttpProtocol(
handler,
readpayload=True,
debug=(not self.quiet))
self.loop.run_until_complete(self.loop.create_server(protocol_factory,
self.host,
self.port))
if 'BOTTLE_CHILD' in os.environ:
import signal
signal.signal(signal.SIGINT, lambda s, f: self.loop.stop())
try:
self.loop.run_forever()
except KeyboardInterrupt:
self.loop.stop()
class AutoServer(ServerAdapter):
""" Untested. """
adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer,
WSGIRefServer]
def run(self, handler):
for sa in self.adapters:
try:
return sa(self.host, self.port, **self.options).run(handler)
except ImportError:
pass
server_names = {
'cgi': CGIServer,
'flup': FlupFCGIServer,
'wsgiref': WSGIRefServer,
'waitress': WaitressServer,
'cherrypy': CherryPyServer,
'paste': PasteServer,
'fapws3': FapwsServer,
'tornado': TornadoServer,
'gae': AppEngineServer,
'twisted': TwistedServer,
'diesel': DieselServer,
'meinheld': MeinheldServer,
'gunicorn': GunicornServer,
'eventlet': EventletServer,
'gevent': GeventServer,
'geventSocketIO': GeventSocketIOServer,
'rocket': RocketServer,
'bjoern': BjoernServer,
'aiohttp': AiohttpServer,
'auto': AutoServer,
}
###############################################################################
# Application Control ##########################################################
###############################################################################
def load(target, **namespace):
""" Import a module or fetch an object from a module.
* ``package.module`` returns `module` as a module object.
* ``pack.mod:name`` returns the module variable `name` from `pack.mod`.
* ``pack.mod:func()`` calls `pack.mod.func()` and returns the result.
The last form accepts not only function calls, but any type of
expression. Keyword arguments passed to this function are available as
local variables. Example: ``import_string('re:compile(x)', x='[a-z]')``
"""
module, target = target.split(":", 1) if ':' in target else (target, None)
if module not in sys.modules: __import__(module)
if not target: return sys.modules[module]
if target.isalnum(): return getattr(sys.modules[module], target)
package_name = module.split('.')[0]
namespace[package_name] = sys.modules[package_name]
return eval('%s.%s' % (module, target), namespace)
def load_app(target):
""" Load a bottle application from a module and make sure that the import
does not affect the current default application, but returns a separate
application object. See :func:`load` for the target parameter. """
global NORUN
NORUN, nr_old = True, NORUN
tmp = default_app.push() # Create a new "default application"
try:
rv = load(target) # Import the target module
return rv if callable(rv) else tmp
finally:
default_app.remove(tmp) # Remove the temporary added default application
NORUN = nr_old
_debug = debug
def run(app=None,
server='wsgiref',
host='127.0.0.1',
port=8080,
interval=1,
reloader=False,
quiet=False,
plugins=None,
debug=None,
config=None, **kargs):
""" Start a server instance. This method blocks until the server terminates.
:param app: WSGI application or target string supported by
:func:`load_app`. (default: :func:`default_app`)
:param server: Server adapter to use. See :data:`server_names` keys
for valid names or pass a :class:`ServerAdapter` subclass.
(default: `wsgiref`)
:param host: Server address to bind to. Pass ``0.0.0.0`` to listens on
all interfaces including the external one. (default: 127.0.0.1)
:param port: Server port to bind to. Values below 1024 require root
privileges. (default: 8080)
:param reloader: Start auto-reloading server? (default: False)
:param interval: Auto-reloader interval in seconds (default: 1)
:param quiet: Suppress output to stdout and stderr? (default: False)
:param options: Options passed to the server adapter.
"""
if NORUN: return
if reloader and not os.environ.get('BOTTLE_CHILD'):
import subprocess
lockfile = None
try:
fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock')
os.close(fd) # We only need this file to exist. We never write to it
while os.path.exists(lockfile):
args = [sys.executable] + sys.argv
environ = os.environ.copy()
environ['BOTTLE_CHILD'] = 'true'
environ['BOTTLE_LOCKFILE'] = lockfile
p = subprocess.Popen(args, env=environ)
while p.poll() is None: # Busy wait...
os.utime(lockfile, None) # I am alive!
time.sleep(interval)
if p.poll() != 3:
if os.path.exists(lockfile): os.unlink(lockfile)
sys.exit(p.poll())
except KeyboardInterrupt:
pass
finally:
if os.path.exists(lockfile):
os.unlink(lockfile)
return
try:
if debug is not None: _debug(debug)
app = app or default_app()
if isinstance(app, basestring):
app = load_app(app)
if not callable(app):
raise ValueError("Application is not callable: %r" % app)
for plugin in plugins or []:
if isinstance(plugin, basestring):
plugin = load(plugin)
app.install(plugin)
if config:
app.config.update(config)
if server in server_names:
server = server_names.get(server)
if isinstance(server, basestring):
server = load(server)
if isinstance(server, type):
server = server(host=host, port=port, **kargs)
if not isinstance(server, ServerAdapter):
raise ValueError("Unknown or unsupported server: %r" % server)
server.quiet = server.quiet or quiet
if not server.quiet:
_stderr("Bottle v%s server starting up (using %s)...\n" %
(__version__, repr(server)))
_stderr("Listening on http://%s:%d/\n" %
(server.host, server.port))
_stderr("Hit Ctrl-C to quit.\n\n")
if reloader:
lockfile = os.environ.get('BOTTLE_LOCKFILE')
bgcheck = FileCheckerThread(lockfile, interval)
with bgcheck:
server.run(app)
if bgcheck.status == 'reload':
sys.exit(3)
else:
server.run(app)
except KeyboardInterrupt:
pass
except (SystemExit, MemoryError):
raise
except:
if not reloader: raise
if not getattr(server, 'quiet', quiet):
print_exc()
time.sleep(interval)
sys.exit(3)
class FileCheckerThread(threading.Thread):
""" Interrupt main-thread as soon as a changed module file is detected,
the lockfile gets deleted or gets to old. """
def __init__(self, lockfile, interval):
threading.Thread.__init__(self)
self.daemon = True
self.lockfile, self.interval = lockfile, interval
#: Is one of 'reload', 'error' or 'exit'
self.status = None
def run(self):
exists = os.path.exists
mtime = lambda p: os.stat(p).st_mtime
files = dict()
for module in list(sys.modules.values()):
path = getattr(module, '__file__', '')
if path[-4:] in ('.pyo', '.pyc'): path = path[:-1]
if path and exists(path): files[path] = mtime(path)
while not self.status:
if not exists(self.lockfile)\
or mtime(self.lockfile) < time.time() - self.interval - 5:
self.status = 'error'
thread.interrupt_main()
for path, lmtime in list(files.items()):
if not exists(path) or mtime(path) > lmtime:
self.status = 'reload'
thread.interrupt_main()
break
time.sleep(self.interval)
def __enter__(self):
self.start()
def __exit__(self, exc_type, *_):
if not self.status: self.status = 'exit' # silent exit
self.join()
return exc_type is not None and issubclass(exc_type, KeyboardInterrupt)
###############################################################################
# Template Adapters ############################################################
###############################################################################
class TemplateError(HTTPError):
def __init__(self, message):
HTTPError.__init__(self, 500, message)
class BaseTemplate(object):
""" Base class and minimal API for template adapters """
extensions = ['tpl', 'html', 'thtml', 'stpl']
settings = {} #used in prepare()
defaults = {} #used in render()
def __init__(self,
source=None,
name=None,
lookup=None,
encoding='utf8', **settings):
""" Create a new template.
If the source parameter (str or buffer) is missing, the name argument
is used to guess a template filename. Subclasses can assume that
self.source and/or self.filename are set. Both are strings.
The lookup, encoding and settings parameters are stored as instance
variables.
The lookup parameter stores a list containing directory paths.
The encoding parameter should be used to decode byte strings or files.
The settings parameter contains a dict for engine-specific settings.
"""
self.name = name
self.source = source.read() if hasattr(source, 'read') else source
self.filename = source.filename if hasattr(source, 'filename') else None
self.lookup = [os.path.abspath(x) for x in lookup] if lookup else []
self.encoding = encoding
self.settings = self.settings.copy() # Copy from class variable
self.settings.update(settings) # Apply
if not self.source and self.name:
self.filename = self.search(self.name, self.lookup)
if not self.filename:
raise TemplateError('Template %s not found.' % repr(name))
if not self.source and not self.filename:
raise TemplateError('No template specified.')
self.prepare(**self.settings)
@classmethod
def search(cls, name, lookup=None):
""" Search name in all directories specified in lookup.
First without, then with common extensions. Return first hit. """
if not lookup:
depr('The template lookup path list should not be empty.',
True) #0.12
lookup = ['.']
if os.path.isabs(name) and os.path.isfile(name):
depr('Absolute template path names are deprecated.', True) #0.12
return os.path.abspath(name)
for spath in lookup:
spath = os.path.abspath(spath) + os.sep
fname = os.path.abspath(os.path.join(spath, name))
if not fname.startswith(spath): continue
if os.path.isfile(fname): return fname
for ext in cls.extensions:
if os.path.isfile('%s.%s' % (fname, ext)):
return '%s.%s' % (fname, ext)
@classmethod
def global_config(cls, key, *args):
""" This reads or sets the global settings stored in class.settings. """
if args:
cls.settings = cls.settings.copy() # Make settings local to class
cls.settings[key] = args[0]
else:
return cls.settings[key]
def prepare(self, **options):
""" Run preparations (parsing, caching, ...).
It should be possible to call this again to refresh a template or to
update settings.
"""
raise NotImplementedError
def render(self, *args, **kwargs):
""" Render the template with the specified local variables and return
a single byte or unicode string. If it is a byte string, the encoding
must match self.encoding. This method must be thread-safe!
Local variables may be provided in dictionaries (args)
or directly, as keywords (kwargs).
"""
raise NotImplementedError
class MakoTemplate(BaseTemplate):
def prepare(self, **options):
from mako.template import Template
from mako.lookup import TemplateLookup
options.update({'input_encoding': self.encoding})
options.setdefault('format_exceptions', bool(DEBUG))
lookup = TemplateLookup(directories=self.lookup, **options)
if self.source:
self.tpl = Template(self.source, lookup=lookup, **options)
else:
self.tpl = Template(uri=self.name,
filename=self.filename,
lookup=lookup, **options)
def render(self, *args, **kwargs):
for dictarg in args:
kwargs.update(dictarg)
_defaults = self.defaults.copy()
_defaults.update(kwargs)
return self.tpl.render(**_defaults)
class CheetahTemplate(BaseTemplate):
def prepare(self, **options):
from Cheetah.Template import Template
self.context = threading.local()
self.context.vars = {}
options['searchList'] = [self.context.vars]
if self.source:
self.tpl = Template(source=self.source, **options)
else:
self.tpl = Template(file=self.filename, **options)
def render(self, *args, **kwargs):
for dictarg in args:
kwargs.update(dictarg)
self.context.vars.update(self.defaults)
self.context.vars.update(kwargs)
out = str(self.tpl)
self.context.vars.clear()
return out
class Jinja2Template(BaseTemplate):
def prepare(self, filters=None, tests=None, globals={}, **kwargs):
from jinja2 import Environment, FunctionLoader
self.env = Environment(loader=FunctionLoader(self.loader), **kwargs)
if filters: self.env.filters.update(filters)
if tests: self.env.tests.update(tests)
if globals: self.env.globals.update(globals)
if self.source:
self.tpl = self.env.from_string(self.source)
else:
self.tpl = self.env.get_template(self.filename)
def render(self, *args, **kwargs):
for dictarg in args:
kwargs.update(dictarg)
_defaults = self.defaults.copy()
_defaults.update(kwargs)
return self.tpl.render(**_defaults)
def loader(self, name):
fname = self.search(name, self.lookup)
if not fname: return
with open(fname, "rb") as f:
return f.read().decode(self.encoding)
class SimpleTemplate(BaseTemplate):
def prepare(self,
escape_func=html_escape,
noescape=False,
syntax=None, **ka):
self.cache = {}
enc = self.encoding
self._str = lambda x: touni(x, enc)
self._escape = lambda x: escape_func(touni(x, enc))
self.syntax = syntax
if noescape:
self._str, self._escape = self._escape, self._str
@cached_property
def co(self):
return compile(self.code, self.filename or '<string>', 'exec')
@cached_property
def code(self):
source = self.source
if not source:
with open(self.filename, 'rb') as f:
source = f.read()
try:
source, encoding = touni(source), 'utf8'
except UnicodeError:
depr('Template encodings other than utf8 are not supported.') #0.11
source, encoding = touni(source, 'latin1'), 'latin1'
parser = StplParser(source, encoding=encoding, syntax=self.syntax)
code = parser.translate()
self.encoding = parser.encoding
return code
def _rebase(self, _env, _name=None, **kwargs):
_env['_rebase'] = (_name, kwargs)
def _include(self, _env, _name=None, **kwargs):
env = _env.copy()
env.update(kwargs)
if _name not in self.cache:
self.cache[_name] = self.__class__(name=_name, lookup=self.lookup)
return self.cache[_name].execute(env['_stdout'], env)
def execute(self, _stdout, kwargs):
env = self.defaults.copy()
env.update(kwargs)
env.update({
'_stdout': _stdout,
'_printlist': _stdout.extend,
'include': functools.partial(self._include, env),
'rebase': functools.partial(self._rebase, env),
'_rebase': None,
'_str': self._str,
'_escape': self._escape,
'get': env.get,
'setdefault': env.setdefault,
'defined': env.__contains__
})
eval(self.co, env)
if env.get('_rebase'):
subtpl, rargs = env.pop('_rebase')
rargs['base'] = ''.join(_stdout) #copy stdout
del _stdout[:] # clear stdout
return self._include(env, subtpl, **rargs)
return env
def render(self, *args, **kwargs):
""" Render the template using keyword arguments as local variables. """
env = {}
stdout = []
for dictarg in args:
env.update(dictarg)
env.update(kwargs)
self.execute(stdout, env)
return ''.join(stdout)
class StplSyntaxError(TemplateError):
pass
class StplParser(object):
""" Parser for stpl templates. """
_re_cache = {} #: Cache for compiled re patterns
# This huge pile of voodoo magic splits python code into 8 different tokens.
# We use the verbose (?x) regex mode to make this more manageable
_re_tok = _re_inl = r'''((?mx) # verbose and dot-matches-newline mode
[urbURB]*
(?: ''(?!')
|""(?!")
|'{6}
|"{6}
|'(?:[^\\']|\\.)+?'
|"(?:[^\\"]|\\.)+?"
|'{3}(?:[^\\]|\\.|\n)+?'{3}
|"{3}(?:[^\\]|\\.|\n)+?"{3}
)
)'''
_re_inl = _re_tok.replace(r'|\n', '') # We re-use this string pattern later
_re_tok += r'''
# 2: Comments (until end of line, but not the newline itself)
|(\#.*)
# 3: Open and close (4) grouping tokens
|([\[\{\(])
|([\]\}\)])
# 5,6: Keywords that start or continue a python block (only start of line)
|^([\ \t]*(?:if|for|while|with|try|def|class)\b)
|^([\ \t]*(?:elif|else|except|finally)\b)
# 7: Our special 'end' keyword (but only if it stands alone)
|((?:^|;)[\ \t]*end[\ \t]*(?=(?:%(block_close)s[\ \t]*)?\r?$|;|\#))
# 8: A customizable end-of-code-block template token (only end of line)
|(%(block_close)s[\ \t]*(?=\r?$))
# 9: And finally, a single newline. The 10th token is 'everything else'
|(\r?\n)
'''
# Match the start tokens of code areas in a template
_re_split = r'''(?m)^[ \t]*(\\?)((%(line_start)s)|(%(block_start)s))'''
# Match inline statements (may contain python strings)
_re_inl = r'''%%(inline_start)s((?:%s|[^'"\n]+?)*?)%%(inline_end)s''' % _re_inl
default_syntax = '<% %> % {{ }}'
def __init__(self, source, syntax=None, encoding='utf8'):
self.source, self.encoding = touni(source, encoding), encoding
self.set_syntax(syntax or self.default_syntax)
self.code_buffer, self.text_buffer = [], []
self.lineno, self.offset = 1, 0
self.indent, self.indent_mod = 0, 0
self.paren_depth = 0
def get_syntax(self):
""" Tokens as a space separated string (default: <% %> % {{ }}) """
return self._syntax
def set_syntax(self, syntax):
self._syntax = syntax
self._tokens = syntax.split()
if not syntax in self._re_cache:
names = 'block_start block_close line_start inline_start inline_end'
etokens = map(re.escape, self._tokens)
pattern_vars = dict(zip(names.split(), etokens))
patterns = (self._re_split, self._re_tok, self._re_inl)
patterns = [re.compile(p % pattern_vars) for p in patterns]
self._re_cache[syntax] = patterns
self.re_split, self.re_tok, self.re_inl = self._re_cache[syntax]
syntax = property(get_syntax, set_syntax)
def translate(self):
if self.offset: raise RuntimeError('Parser is a one time instance.')
while True:
m = self.re_split.search(self.source, pos=self.offset)
if m:
text = self.source[self.offset:m.start()]
self.text_buffer.append(text)
self.offset = m.end()
if m.group(1): # Escape syntax
line, sep, _ = self.source[self.offset:].partition('\n')
self.text_buffer.append(self.source[m.start():m.start(1)] +
m.group(2) + line + sep)
self.offset += len(line + sep)
continue
self.flush_text()
self.offset += self.read_code(self.source[self.offset:],
multiline=bool(m.group(4)))
else:
break
self.text_buffer.append(self.source[self.offset:])
self.flush_text()
return ''.join(self.code_buffer)
def read_code(self, pysource, multiline):
code_line, comment = '', ''
offset = 0
while True:
m = self.re_tok.search(pysource, pos=offset)
if not m:
code_line += pysource[offset:]
offset = len(pysource)
self.write_code(code_line.strip(), comment)
break
code_line += pysource[offset:m.start()]
offset = m.end()
_str, _com, _po, _pc, _blk1, _blk2, _end, _cend, _nl = m.groups()
if self.paren_depth > 0 and (_blk1 or _blk2): # a if b else c
code_line += _blk1 or _blk2
continue
if _str: # Python string
code_line += _str
elif _com: # Python comment (up to EOL)
comment = _com
if multiline and _com.strip().endswith(self._tokens[1]):
multiline = False # Allow end-of-block in comments
elif _po: # open parenthesis
self.paren_depth += 1
code_line += _po
elif _pc: # close parenthesis
if self.paren_depth > 0:
# we could check for matching parentheses here, but it's
# easier to leave that to python - just check counts
self.paren_depth -= 1
code_line += _pc
elif _blk1: # Start-block keyword (if/for/while/def/try/...)
code_line, self.indent_mod = _blk1, -1
self.indent += 1
elif _blk2: # Continue-block keyword (else/elif/except/...)
code_line, self.indent_mod = _blk2, -1
elif _end: # The non-standard 'end'-keyword (ends a block)
self.indent -= 1
elif _cend: # The end-code-block template token (usually '%>')
if multiline: multiline = False
else: code_line += _cend
else: # \n
self.write_code(code_line.strip(), comment)
self.lineno += 1
code_line, comment, self.indent_mod = '', '', 0
if not multiline:
break
return offset
def flush_text(self):
text = ''.join(self.text_buffer)
del self.text_buffer[:]
if not text: return
parts, pos, nl = [], 0, '\\\n' + ' ' * self.indent
for m in self.re_inl.finditer(text):
prefix, pos = text[pos:m.start()], m.end()
if prefix:
parts.append(nl.join(map(repr, prefix.splitlines(True))))
if prefix.endswith('\n'): parts[-1] += nl
parts.append(self.process_inline(m.group(1).strip()))
if pos < len(text):
prefix = text[pos:]
lines = prefix.splitlines(True)
if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3]
elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4]
parts.append(nl.join(map(repr, lines)))
code = '_printlist((%s,))' % ', '.join(parts)
self.lineno += code.count('\n') + 1
self.write_code(code)
@staticmethod
def process_inline(chunk):
if chunk[0] == '!': return '_str(%s)' % chunk[1:]
return '_escape(%s)' % chunk
def write_code(self, line, comment=''):
code = ' ' * (self.indent + self.indent_mod)
code += line.lstrip() + comment + '\n'
self.code_buffer.append(code)
def template(*args, **kwargs):
"""
Get a rendered template as a string iterator.
You can use a name, a filename or a template string as first parameter.
Template rendering arguments can be passed as dictionaries
or directly (as keyword arguments).
"""
tpl = args[0] if args else None
adapter = kwargs.pop('template_adapter', SimpleTemplate)
lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
tplid = (id(lookup), tpl)
if tplid not in TEMPLATES or DEBUG:
settings = kwargs.pop('template_settings', {})
if isinstance(tpl, adapter):
TEMPLATES[tplid] = tpl
if settings: TEMPLATES[tplid].prepare(**settings)
elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings)
else:
TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings)
if not TEMPLATES[tplid]:
abort(500, 'Template (%s) not found' % tpl)
for dictarg in args[1:]:
kwargs.update(dictarg)
return TEMPLATES[tplid].render(kwargs)
mako_template = functools.partial(template, template_adapter=MakoTemplate)
cheetah_template = functools.partial(template,
template_adapter=CheetahTemplate)
jinja2_template = functools.partial(template, template_adapter=Jinja2Template)
def view(tpl_name, **defaults):
""" Decorator: renders a template for a handler.
The handler can control its behavior like that:
- return a dict of template vars to fill out the template
- return something other than a dict and the view decorator will not
process the template, but return the handler result as is.
This includes returning a HTTPResponse(dict) to get,
for instance, JSON with autojson or other castfilters.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
if isinstance(result, (dict, DictMixin)):
tplvars = defaults.copy()
tplvars.update(result)
return template(tpl_name, **tplvars)
elif result is None:
return template(tpl_name, defaults)
return result
return wrapper
return decorator
mako_view = functools.partial(view, template_adapter=MakoTemplate)
cheetah_view = functools.partial(view, template_adapter=CheetahTemplate)
jinja2_view = functools.partial(view, template_adapter=Jinja2Template)
###############################################################################
# Constants and Globals ########################################################
###############################################################################
TEMPLATE_PATH = ['./', './views/']
TEMPLATES = {}
DEBUG = False
NORUN = False # If set, run() does nothing. Used by load_app()
#: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found')
HTTP_CODES = httplib.responses.copy()
HTTP_CODES[418] = "I'm a teapot" # RFC 2324
HTTP_CODES[428] = "Precondition Required"
HTTP_CODES[429] = "Too Many Requests"
HTTP_CODES[431] = "Request Header Fields Too Large"
HTTP_CODES[511] = "Network Authentication Required"
_HTTP_STATUS_LINES = dict((k, '%d %s' % (k, v))
for (k, v) in HTTP_CODES.items())
#: The default template used for error pages. Override with @error()
ERROR_PAGE_TEMPLATE = """
%%try:
%%from %s import DEBUG, request
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
<html>
<head>
<title>Error: {{e.status}}</title>
<style type="text/css">
html {background-color: #eee; font-family: sans-serif;}
body {background-color: #fff; border: 1px solid #ddd;
padding: 15px; margin: 15px;}
pre {background-color: #eee; border: 1px solid #ddd; padding: 5px;}
</style>
</head>
<body>
<h1>Error: {{e.status}}</h1>
<p>Sorry, the requested URL <tt>{{repr(request.url)}}</tt>
caused an error:</p>
<pre>{{e.body}}</pre>
%%if DEBUG and e.exception:
<h2>Exception:</h2>
<pre>{{repr(e.exception)}}</pre>
%%end
%%if DEBUG and e.traceback:
<h2>Traceback:</h2>
<pre>{{e.traceback}}</pre>
%%end
</body>
</html>
%%except ImportError:
<b>ImportError:</b> Could not generate the error page. Please add bottle to
the import path.
%%end
""" % __name__
#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a
#: request callback, this instance always refers to the *current* request
#: (even on a multi-threaded server).
request = LocalRequest()
#: A thread-safe instance of :class:`LocalResponse`. It is used to change the
#: HTTP response for the *current* request.
response = LocalResponse()
#: A thread-safe namespace. Not used by Bottle.
local = threading.local()
# Initialize app stack (create first empty Bottle app)
# BC: 0.6.4 and needed for run()
app = default_app = AppStack()
app.push()
#: A virtual package that redirects import statements.
#: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`.
ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else
__name__ + ".ext", 'bottle_%s').module
if __name__ == '__main__':
opt, args, parser = _cli_parse(sys.argv)
def _cli_error(msg):
parser.print_help()
_stderr('\nError: %s\n' % msg)
sys.exit(1)
if opt.version:
_stdout('Bottle %s\n' % __version__)
sys.exit(0)
if not args:
_cli_error("No application entry point specified.")
sys.path.insert(0, '.')
sys.modules.setdefault('bottle', sys.modules['__main__'])
host, port = (opt.bind or 'localhost'), 8080
if ':' in host and host.rfind(']') < host.rfind(':'):
host, port = host.rsplit(':', 1)
host = host.strip('[]')
config = ConfigDict()
for cfile in opt.conf or []:
try:
if cfile.endswith('.json'):
with open(cfile, 'rb') as fp:
config.load_dict(json_loads(fp.read()))
else:
config.load_config(cfile)
except ConfigParserError:
_cli_error(str(_e()))
except IOError:
_cli_error("Unable to read config file %r" % cfile)
except (UnicodeError, TypeError, ValueError):
_cli_error("Unable to parse config file %r: %s" % (cfile, _e()))
for cval in opt.param or []:
if '=' in cval:
config.update((cval.split('=', 1),))
else:
config[cval] = True
run(args[0],
host=host,
port=int(port),
server=opt.server,
reloader=opt.reload,
plugins=opt.plugin,
debug=opt.debug,
config=config)
# THE END
| hackersql/sq1map | thirdparty/bottle/bottle.py | Python | gpl-3.0 | 152,507 |
import datetime
from backend import db
from cruds.crud_user_type_destinations.models import UserTypeDestinations
from cruds.crud_users.models import Users
from cruds import format_urls_in_text
class WallMessages(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
date = db.Column(db.Integer)
sender = db.Column(db.Integer, db.ForeignKey("users.id"))
destination = db.Column(db.Integer, db.ForeignKey("user_type_destinations.id"))
param_value = db.Column(db.Integer())
message = db.Column(db.Text())
def set_fields(self, fields):
self.date = fields['date']
self.sender = fields['sender']
self.destination = fields['user_type_destination_id']
self.param_value = fields['parameter']
self.message = format_urls_in_text(fields['message'])
def get_sender(self):
return Users.query.filter_by(id=self.sender).all()
def get_destinations(self):
_dict = {}
query = UserTypeDestinations.query.filter_by(id=self.destination).first().users_query
query = str(query).replace('$', str(self.param_value))
exec(query, _dict)
return _dict['users']
| sandroandrade/emile-server | cruds/crud_wall_messages/models.py | Python | gpl-3.0 | 1,182 |
from rambutan3.check_args.seq.RRangeSizeUniqueSequenceMatcher import RRangeSizeUniqueSequenceMatcher
from rambutan3.check_args.seq.RSequenceEnum import RSequenceEnum
# noinspection PyPep8Naming
def EXACT_SIZE_UNIQUE_TUPLE(*, exact_size: int) -> RRangeSizeUniqueSequenceMatcher:
x = RRangeSizeUniqueSequenceMatcher(RSequenceEnum.TUPLE, min_size=exact_size, max_size=exact_size)
return x
| kevinarpe/kevinarpe-rambutan3 | rambutan3/check_args/annotation/EXACT_SIZE_UNIQUE_TUPLE.py | Python | gpl-3.0 | 396 |
from django.contrib import admin
from django.contrib.admin.widgets import AdminIntegerFieldWidget
from django.core.validators import MaxValueValidator, MinValueValidator
from modeltranslation.admin import TranslationAdmin
from django.urls import reverse
from django.utils import timezone as tz
from django.utils.html import format_html
from django.utils.translation import gettext as _
from django import forms
from reversion.admin import VersionAdmin
from ..models import Issue
from ..models import Journal
from ..models import JournalInformation
from ..models import JournalType
from ..models import Language
from ..models import Discipline
JOURNAL_INFORMATION_COMPARE_EXCLUDE = [
# Exclude the translated base fields (ie. about) because the translation fields (ie. about_fr)
# are already displayed.
"about",
"contact",
"editorial_policy",
"instruction_for_authors",
"partners",
"publishing_ethics",
"subscriptions",
"team",
# Exclude the auto_now date field.
"updated",
# Exclude ID fields.
"id",
"journal_id",
]
class JournalDisciplineInline(admin.TabularInline):
model = Journal.disciplines.through
def get_field_queryset(self, db, db_field, request):
if db_field.name == "discipline":
# Filter the discipline field's queryset based on the parent journal's type.
if request._obj:
return db_field.remote_field.model._default_manager.using(db).filter(
type__code=request._obj.type.code
)
# If there is no parent journal (during journal creation), return an empty queryset.
else:
return db_field.remote_field.model._default_manager.using(db).none()
return super().get_field_queryset(db, db_field, request)
class JournalForm(forms.ModelForm):
fields = "all"
model = Journal
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Limit `year_of_addition` field values to the current year and the next two years.
now = tz.now()
min_year = now.year
max_year = min_year + 2
self.fields["year_of_addition"].validators = [
MinValueValidator(min_year),
MaxValueValidator(max_year),
]
self.fields["year_of_addition"].widget = AdminIntegerFieldWidget(
attrs={
"min": min_year,
"max": max_year,
},
)
def clean(self):
# In Django < 2.0, CharField stores empty values as empty strings, causing
# a unicity constraint error when multiple objects have an empty value for
# the same field. When we upgrade to Django 2.0, it will not be necessary
# to convert empty strings to None values.
if self.cleaned_data["localidentifier"] == "":
self.cleaned_data["localidentifier"] = None
return self.cleaned_data
class JournalAdmin(admin.ModelAdmin):
form = JournalForm
search_fields = (
"code",
"name",
"issn_print",
"issn_web",
"external_url",
)
list_display = (
"__str__",
"code",
"type",
"open_access",
"external_url",
"active",
)
list_display_links = (
"__str__",
"code",
)
list_filter = (
"collection",
"type",
"paper",
"open_access",
"active",
"is_new",
"year_of_addition",
)
filter_horizontal = ("members",)
fieldsets = [
(
"Identification",
{
"fields": (
(
"collection",
"type",
),
(
"code",
"localidentifier",
),
(
"name",
"subtitle",
),
("is_new", "year_of_addition"),
(
"previous_journal",
"next_journal",
),
(
"issn_print",
"issn_web",
),
("external_url", "redirect_to_external_url"),
),
},
),
(
None,
{
"fields": (
("open_access", "charges_apc", "paper"),
("first_publication_year", "last_publication_year"),
),
},
),
("Membres", {"fields": ("members",)}),
(
"État",
{
"classes": ("collapse",),
"fields": ("active",),
},
),
]
inlines = (JournalDisciplineInline,)
def get_form(self, request, obj=None, change=False, **kwargs):
# Save the journal object on the request to have access to it in `JournalDisciplineInline`.
request._obj = obj
return super().get_form(request, obj, change, **kwargs)
class IssueAdmin(admin.ModelAdmin):
list_display = (
"journal",
"year",
"volume",
"number",
"title",
"localidentifier",
"is_published",
"view_issue_on_site",
)
search_fields = (
"id",
"localidentifier",
)
list_filter = (
"is_published",
"journal__collection",
"journal__name",
)
actions = [
"make_published",
"make_unpublished",
"force_free_access_to_true",
"force_free_access_to_false",
]
def force_free_access_to_true(self, request, queryset):
"""Mark a set of issues as open access"""
queryset.update(force_free_access=True)
force_free_access_to_true.short_description = _(
"Contraindre les numéros sélectionnés en libre d'accès"
)
def force_free_access_to_false(self, request, queryset):
"""Mark a set of issues as not open access"""
queryset.update(force_free_access=False)
force_free_access_to_false.short_description = _(
"Ne pas contraindre ces numéros au libre accès"
)
def view_issue_on_site(self, obj):
""" Display the link leading to the issue on website """
url = reverse(
"public:journal:issue_detail",
kwargs={
"journal_code": obj.journal.code,
"issue_slug": obj.volume_slug,
"localidentifier": obj.localidentifier,
},
)
if not obj.is_published and obj.journal.collection.is_main_collection:
url = "{url}?ticket={ticket}".format(url=url, ticket=obj.prepublication_ticket)
return format_html("<a href={}>{}</a>", url, _("Voir sur le site"))
view_issue_on_site.short_description = _("Voir le numéro sur le site")
def get_readonly_fields(self, request, obj=None):
return self.readonly_fields + ("is_published",)
class JournalInformationAdminForm(forms.ModelForm):
class Meta:
model = JournalInformation
fields = "__all__"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Exclude French & English from other_languages field. These languages are set in the
# main_languages field.
self.fields["other_languages"].queryset = Language.objects.exclude(id__in=[1, 2])
class JournalInformationAdmin(VersionAdmin, TranslationAdmin):
form = JournalInformationAdminForm
class JournalTypeAdmin(TranslationAdmin):
pass
class DisciplineAdmin(TranslationAdmin):
def get_types(self, obj):
return ", ".join([t.name for t in obj.type.all()])
list_display = [
"name",
"get_types",
]
list_filter = [
"type",
]
admin.site.register(Journal, JournalAdmin)
admin.site.register(Issue, IssueAdmin)
admin.site.register(JournalInformation, JournalInformationAdmin)
admin.site.unregister(JournalType)
admin.site.register(JournalType, JournalTypeAdmin)
admin.site.register(Discipline, DisciplineAdmin)
| erudit/eruditorg | eruditorg/erudit/admin/journal.py | Python | gpl-3.0 | 8,218 |
#!/usr/bin/env python3
from scipy.special import airy
from numpy import abs
def f(xinput):
x0=xinput
xoutput=x0
Ai=abs(airy(-xoutput)[0])
while Ai>1e-12:
ai=abs(airy(-xoutput))
Ai=ai[0]
Aip=ai[1]
xoutput=xoutput+Ai/Aip
return Ai, xoutput | fusion809/python-scripts | SLE/Airy_root_finder.py | Python | gpl-3.0 | 292 |
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
"""Supports the Vesuvio instrument at ISIS
backgrounds -- Defines backgrounds for fitting
base -- Helper functions for Vesuvio algorithms
commands -- Used to start processing of Vesuvio data
fitting -- Support routines for fitting
instrument -- Instrument specific data for Vesuvio
profiles -- Defines mass profiles
testing -- Simulates Vesuvio data for use in tests
"""
from __future__ import absolute_import
__all__=['backgrounds','base','commands','fitting','instrument','profiles', 'testing']
| mganeva/mantid | scripts/Inelastic/vesuvio/__init__.py | Python | gpl-3.0 | 797 |
# read_hadamard_file.py
# Reads data from a text file to create a 3D
# version of a given Hadamard Matrix.
# Created by Rick Henderson
# Created on June 4, 2015
# Completed June 5, 2015
# Note: A "Hadamard File" is a text file containing rows
# rows of + and - where the + indicates a 1 or a cube
# and the - represents a 0 or a space.
import bpy
# Set the order (size) of the matrix
nOrder = 12
# You can also change these values if you want to alter the offset between the cubes
xOffset = 1.0
yOffset = 1.0
zOffset = 0 # You would have to alter the code more if you want a 3D array of cubes
xpos = 0
ypos = 0
char_number = 0
# Open the file to read from
# Modified technique from DiveIntoPython3.net/files.html
line_number = 0
with open('c:/had12.txt', encoding='utf-8') as a_file:
for each_row in a_file:
line_number += 1
# Just print the current row to the console as a test
print(each_row.rstrip())
for a_char in each_row:
char_number += 1
# If the current character is +, generate a cube then position it
if a_char == '+':
bpy.ops.mesh.primitive_cube_add(radius=0.5)
bpy.context.object.location[0] = line_number * xOffset
bpy.context.object.location[1] = char_number * yOffset
# Now an entire row has been read, so reset char_number to 0
char_number = 0
# Program Ends
| rickhenderson/code-samples | python-blender/read_hadamard_file.py | Python | gpl-3.0 | 1,503 |
# Copyright 2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
import sys
import os
import re
import csv
import copy
import warnings
from optparse import OptionParser
from gnuradio import filter, fft
try:
import numpy as np
except ImportError:
raise SystemExit('Please install NumPy to run this script (https://www.np.org/)')
try:
import numpy.fft as fft_detail
except ImportError:
raise SystemExit('Could not import fft implementation of numpy')
try:
from numpy import poly1d
except ImportError:
raise SystemExit('Please install NumPy to run this script (https://www.np.org)')
try:
from scipy import signal
except ImportError:
raise SystemExit('Please install SciPy to run this script (https://www.scipy.org)')
try:
from PyQt5 import Qt, QtCore, QtWidgets
except ImportError:
raise SystemExit('Please install PyQt5 to run this script (https://www.riverbankcomputing.com/software/pyqt/download5)')
try:
import pyqtgraph as pg
except ImportError:
raise SystemExit('Please install pyqtgraph to run this script (http://www.pyqtgraph.org)')
try:
from gnuradio.filter.pyqt_filter_stacked import Ui_MainWindow
except ImportError:
raise SystemExit('Could not import from pyqt_filter_stacked. Please build with "pyuic5 pyqt_filter_stacked.ui -o pyqt_filter_stacked.py"')
try:
from gnuradio.filter.banditems import *
except ImportError:
raise SystemExit('Could not import from banditems. Please check whether banditems.py is in the library path')
try:
from gnuradio.filter.polezero_plot import *
except ImportError:
raise SystemExit('Could not import from polezero_plot. Please check whether polezero_plot.py is in the library path')
# Behavior is not quite working on 3.8 - TODO
# try:
# from gnuradio.filter.idealbanditems import *
# except ImportError:
# raise SystemExit('Could not import from idealbanditems. Please check whether idealbanditems.py is in the library path')
try:
from gnuradio.filter.api_object import *
except ImportError:
raise SystemExit('Could not import from api_object. Please check whether api_object.py is in the library path')
try:
from gnuradio.filter.fir_design import *
except ImportError:
raise SystemExit('Could not import from fir_design. Please check whether fir_design.py is in the library path')
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s): return s
# Gnuradio Filter design tool main window
class gr_plot_filter(QtGui.QMainWindow):
def __init__(self, options, callback=None, restype=""):
QtGui.QWidget.__init__(self, None)
self.gui = Ui_MainWindow()
self.callback = callback
# Set Global pyqtgraph options
pg.setConfigOption('foreground', 'k') # Default foreground color for text, lines, axes, etc.
pg.setConfigOption('background', None) # Default background for GraphicsView.
pg.setConfigOptions(antialias=True) # Draw lines with smooth edges at the cost of reduced performance.
self.gui.setupUi(self)
# Remove other filter combobox entry if some restriction is specified.
if restype == "iir":
ind = self.gui.fselectComboBox.findText("FIR")
if ind != -1:
self.gui.fselectComboBox.removeItem(ind)
elif restype == "fir":
ind = self.gui.fselectComboBox.findText("IIR(scipy)")
if ind != -1:
self.gui.fselectComboBox.removeItem(ind)
self.gui.action_save.triggered.connect(self.action_save_dialog)
self.gui.action_save.setEnabled(False)
self.gui.action_open.triggered.connect(self.action_open_dialog)
self.gui.filterTypeComboBox.currentIndexChanged['const QString&'].connect(self.changed_filter_type)
self.gui.iirfilterBandComboBox.currentIndexChanged['const QString&'].connect(self.changed_iirfilter_band)
self.gui.filterDesignTypeComboBox.currentIndexChanged['const QString&'].connect(self.changed_filter_design_type)
self.gui.fselectComboBox.currentIndexChanged['const QString&'].connect(self.changed_fselect)
self.gui.iirfilterTypeComboBox.currentIndexChanged['const QString&'].connect(self.set_order)
self.gui.designButton.released.connect(self.design)
# self.gui.tabGroup.currentChanged['int'].connect(self.tab_changed)
self.gui.nfftEdit.textEdited['QString'].connect(self.nfft_edit_changed)
self.gui.actionQuick_Access.triggered.connect(self.action_quick_access)
self.gui.actionSpec_Widget.triggered.connect(self.action_spec_widget)
self.gui.actionResponse_Widget.triggered.connect(self.action_response_widget)
self.gui.actionDesign_Widget.triggered.connect(self.action_design_widget)
self.gui.actionMagnitude_Response.triggered.connect(self.set_actmagresponse)
self.gui.actionGrid_2.triggered.connect(self.set_actgrid)
self.gui.actionPhase_Respone.triggered.connect(self.set_actphase)
self.gui.actionGroup_Delay.triggered.connect(self.set_actgdelay)
self.gui.actionFilter_Coefficients.triggered.connect(self.set_actfcoeff)
self.gui.actionBand_Diagram.triggered.connect(self.set_actband)
# self.gui.actionIdeal_Band.triggered.connect(self.set_drawideal)
self.gui.actionPole_Zero_Plot_2.triggered.connect(self.set_actpzplot)
self.gui.actionGridview.triggered.connect(self.set_switchview)
self.gui.actionPlot_select.triggered.connect(self.set_plotselect)
self.gui.actionPhase_Delay.triggered.connect(self.set_actpdelay)
self.gui.actionImpulse_Response.triggered.connect(self.set_actimpres)
self.gui.actionStep_Response.triggered.connect(self.set_actstepres)
self.gui.mfmagPush.released.connect(self.set_mfmagresponse)
self.gui.mfphasePush.released.connect(self.set_mfphaseresponse)
self.gui.mfgpdlyPush.released.connect(self.set_mfgroupdelay)
self.gui.mfphdlyPush.released.connect(self.set_mfphasedelay)
self.gui.mfoverlayPush.clicked.connect(self.set_mfoverlay)
self.gui.conjPush.clicked.connect(self.set_conj)
self.gui.mconjPush.clicked.connect(self.set_mconj)
self.gui.addzeroPush.clicked.connect(self.set_zeroadd)
self.gui.maddzeroPush.clicked.connect(self.set_mzeroadd)
self.gui.addpolePush.clicked.connect(self.set_poleadd)
self.gui.maddpolePush.clicked.connect(self.set_mpoleadd)
self.gui.delPush.clicked.connect(self.set_delpz)
self.gui.mdelPush.clicked.connect(self.set_mdelpz)
self.gui.mttapsPush.clicked.connect(self.set_mttaps)
self.gui.mtstepPush.clicked.connect(self.set_mtstep)
self.gui.mtimpPush.clicked.connect(self.set_mtimpulse)
self.gui.checkKeepcur.stateChanged['int'].connect(self.set_bufferplots)
self.gui.checkGrid.stateChanged['int'].connect(self.set_grid)
self.gui.checkMagres.stateChanged['int'].connect(self.set_magresponse)
self.gui.checkGdelay.stateChanged['int'].connect(self.set_gdelay)
self.gui.checkPhase.stateChanged['int'].connect(self.set_phase)
self.gui.checkFcoeff.stateChanged['int'].connect(self.set_fcoeff)
self.gui.checkBand.stateChanged['int'].connect(self.set_band)
self.gui.checkPzplot.stateChanged['int'].connect(self.set_pzplot)
self.gui.checkPdelay.stateChanged['int'].connect(self.set_pdelay)
self.gui.checkImpulse.stateChanged['int'].connect(self.set_impres)
self.gui.checkStep.stateChanged['int'].connect(self.set_stepres)
self.gridenable = False
self.mfoverlay = False
self.mtoverlay = False
self.iir = False
self.mfmagresponse = True
self.mfphaseresponse = False
self.mfgroupdelay = False
self.mfphasedelay = False
self.mttaps = True
self.mtstep = False
self.mtimpulse = False
self.gui.designButton.setShortcut(QtCore.Qt.Key_Return)
self.taps = []
self.a = []
self.b = []
self.fftdB = []
self.fftDeg = []
self.groupDelay = []
self.phaseDelay = []
self.gridview = 0
self.params = []
self.nfftpts = int(10000)
self.gui.nfftEdit.setText(str(self.nfftpts))
self.firFilters = ("Low Pass", "Band Pass", "Complex Band Pass", "Band Notch",
"High Pass", "Root Raised Cosine", "Gaussian", "Half Band")
self.optFilters = ("Low Pass", "Band Pass", "Complex Band Pass",
"Band Notch", "High Pass", "Half Band")
self.set_windowed()
# Initialize to LPF.
self.gui.filterTypeWidget.setCurrentWidget(self.gui.firlpfPage)
self.gui.iirfilterTypeComboBox.hide()
self.gui.iirfilterBandComboBox.hide()
self.gui.adComboBox.hide()
self.gui.addpolePush.setEnabled(False)
self.gui.maddpolePush.setEnabled(False)
# Create plots.
self.plots = {'FREQ': None, 'TIME': None, 'PHASE': None, 'GROUP': None,
'IMPRES': None, 'STEPRES': None, 'PDELAY': None}
self.mplots = {'mFREQ': None, 'mTIME': None}
self.plots['FREQ'] = self.gui.freqPlot
self.plots['TIME'] = self.gui.timePlot
self.plots['PHASE'] = self.gui.phasePlot
self.plots['GROUP'] = self.gui.groupPlot
self.plots['IMPRES'] = self.gui.impresPlot
self.plots['STEPRES'] = self.gui.stepresPlot
self.plots['PDELAY'] = self.gui.pdelayPlot
# for i in self.plots:
# self.plots[i] = pg.PlotWidget(enableMenu=False, viewBox=CustomViewBox())
# self.plots[i].plotItem.vb = CustomViewBox()
self.mplots['mFREQ'] = self.gui.mfreqPlot
self.mplots['mTIME'] = self.gui.mtimePlot
# for i in self.mplots:
# self.mplots[i] = pg.PlotWidget(enableMenu=False, viewBox=CustomViewBox())
# # Add plots to layouts.
# self.gui.freqTab.layout().addWidget(self.plots['FREQ'])
# self.gui.timeTab.layout().addWidget(self.plots['TIME'])
# self.gui.phaseTab.layout().addWidget(self.plots['PHASE'])
# self.gui.groupTab.layout().addWidget(self.plots['GROUP'])
# self.gui.impresTab.layout().addWidget(self.plots['IMPRES'])
# self.gui.stepresTab.layout().addWidget(self.plots['STEPRES'])
# self.gui.pdelayTab.layout().addWidget(self.plots['PDELAY'])
# self.gui.mfreqTab.layout().addWidget(self.mplots['mFREQ'])
# self.gui.mtimeTab.layout().addWidget(self.mplots['mTIME'])
# Set Axis Labels.
self.labelstyle11b = {'font-family': 'Helvetica', 'font-size': '11pt', 'font-weight': 'bold'}
self.plots['FREQ'].setLabel('bottom', 'Frequency', units='Hz', **self.labelstyle11b)
self.plots['FREQ'].setLabel('left', 'Magnitude', units='dB', **self.labelstyle11b)
self.plots['TIME'].setLabel('bottom', 'Tap number', **self.labelstyle11b)
self.plots['TIME'].setLabel('left', 'Amplitude', **self.labelstyle11b)
self.plots['PHASE'].setLabel('bottom', 'Frequency', units='Hz', **self.labelstyle11b)
self.plots['PHASE'].setLabel('left', 'Phase', units='Radians', **self.labelstyle11b)
self.plots['GROUP'].setLabel('bottom', 'Frequency', units='Hz', **self.labelstyle11b)
self.plots['GROUP'].setLabel('left', 'Delay', units='seconds', **self.labelstyle11b)
self.plots['IMPRES'].setLabel('bottom', 'n', units='Samples', **self.labelstyle11b)
self.plots['IMPRES'].setLabel('left', 'Amplitude', **self.labelstyle11b)
self.plots['STEPRES'].setLabel('bottom', 'n', units='Samples', **self.labelstyle11b)
self.plots['STEPRES'].setLabel('left', 'Amplitude', **self.labelstyle11b)
self.plots['PDELAY'].setLabel('bottom', 'Frequency', units='Hz', **self.labelstyle11b)
self.plots['PDELAY'].setLabel('left', 'Phase Delay', units='Radians', **self.labelstyle11b)
self.labelstyle9b = {'font-family': 'Helvetica', 'font-size': '9pt', 'font-weight': 'bold'}
self.mplots['mTIME'].setLabel('bottom', 'n', units='Samples/taps', **self.labelstyle9b)
self.mplots['mTIME'].setLabel('left', 'Amplitude', **self.labelstyle9b)
# Set up axes.
for i in self.plots:
axis = self.plots[i].getAxis('bottom')
axis.setStyle(tickLength=-10)
axis = self.plots[i].getAxis('left')
axis.setStyle(tickLength=-10)
for i in self.mplots:
axis = self.mplots[i].getAxis('bottom')
axis.setStyle(tickLength=-10)
axis = self.mplots[i].getAxis('left')
axis.setStyle(tickLength=-10)
# Set up plot curves.
self.rcurve = self.plots['TIME'].plot(title="Real")
self.icurve = self.plots['TIME'].plot(title="Imag")
self.mtimecurve = self.mplots['mTIME'].plot(title="PSD")
self.mtimecurve_stems = self.mplots['mTIME'].plot(connect='pairs', name='Stems')
self.mtimecurve_i_stems = self.mplots['mTIME'].plot(connect='pairs', name='Stems')
self.mtimecurve_i = self.mplots['mTIME'].plot(title="Impulse Response Imag")
self.plots['FREQ'].enableAutoRange(enable=True)
self.freqcurve = self.plots['FREQ'].plot(title="PSD")
# For the frequency view, set up linked x axes
self.primary_freq_overlay = self.mplots['mFREQ']
self.mfreqcurve = self.primary_freq_overlay.plot(title="PSD")
self.secondary_freq_overlay_vb = CustomViewBox()
self.primary_freq_overlay.scene().addItem(self.secondary_freq_overlay_vb)
self.primary_freq_overlay.getAxis('right').linkToView(self.secondary_freq_overlay_vb)
self.mfreqcurve2 = pg.PlotCurveItem()
# self.secondary_freq_overlay_vb.setGeometry(self.primary_freq_overlay.plotItem.vb.sceneBoundingRect())
self.secondary_freq_overlay_vb.setXLink(self.primary_freq_overlay)
self.secondary_freq_overlay_vb.addItem(self.mfreqcurve2)
self.primary_freq_overlay.plotItem.vb.sigResized.connect(self.updateViews)
self.phasecurve = self.plots['PHASE'].plot(title="Phase")
self.groupcurve = self.plots['GROUP'].plot(title="Group Delay")
self.imprescurve_stems = self.plots['IMPRES'].plot(connect='pairs', name='Stems')
self.imprescurve = self.plots['IMPRES'].plot(title="Impulse Response")
self.imprescurve_i_stems = self.plots['IMPRES'].plot(connect='pairs', name='Stems')
self.imprescurve_i = self.plots['IMPRES'].plot(title="Impulse Response Imag")
self.steprescurve_stems = self.plots['STEPRES'].plot(connect='pairs', name='Stems')
self.steprescurve = self.plots['STEPRES'].plot(title="Step Response")
self.steprescurve_i_stems = self.plots['STEPRES'].plot(connect='pairs', name='Stems')
self.steprescurve_i = self.plots['STEPRES'].plot(title="Step Response Imag")
self.pdelaycurve = self.plots['PDELAY'].plot(title="Phase Delay")
# Disable Ideal Band for now
# self.idbanditems = IdealBandItems()
self.set_defaultpen()
# Assigning items.
self.lpfitems = lpfItems
self.hpfitems = hpfItems
self.bpfitems = bpfItems
self.bnfitems = bnfItems
# Connect signals.
self.lpfitems[0].attenChanged.connect(self.set_fatten)
self.hpfitems[0].attenChanged.connect(self.set_fatten)
self.bpfitems[0].attenChanged.connect(self.set_fatten)
self.bnfitems[0].attenChanged.connect(self.set_fatten)
# Populate the Band-diagram scene.
self.scene = QtGui.QGraphicsScene()
self.scene.setSceneRect(0,0,250,250)
lightback = QtGui.qRgb(0xf8, 0xf8, 0xff)
backbrush = Qt.QBrush(Qt.QColor(lightback))
self.scene.setBackgroundBrush(backbrush)
self.gui.bandView.setScene(self.scene)
self.gui.mbandView.setScene(self.scene)
# Install Canvas picker for pz-plot.
self.cpicker = CanvasPicker(self.gui.pzPlot)
self.cpicker.curveChanged.connect(self.set_curvetaps)
self.cpicker.mouseposChanged.connect(self.set_statusbar)
self.cpicker2 = CanvasPicker(self.gui.mpzPlot)
self.cpicker2.curveChanged.connect(self.set_mcurvetaps)
self.cpicker2.mouseposChanged.connect(self.set_mstatusbar)
# Edit boxes for band-diagrams (Not required todate so may be remove?).
"""
self.lpfpassEdit = QtGui.QLineEdit()
self.lpfpassEdit.setMaximumSize(QtCore.QSize(75,20))
self.lpfpassEdit.setText('Not set')
self.lpfstartproxy = QtGui.QGraphicsProxyWidget()
self.lpfstartproxy.setWidget(self.lpfpassEdit)
self.lpfstartproxy.setPos(400,30)
self.lpfstopEdit = QtGui.QLineEdit()
self.lpfstopEdit.setMaximumSize(QtCore.QSize(75,20))
self.lpfstopEdit.setText('Not set')
self.lpfstopproxy = QtGui.QGraphicsProxyWidget()
self.lpfstopproxy.setWidget(self.lpfstopEdit)
self.lpfstopproxy.setPos(400,50)
self.lpfitems.append(self.lpfstartproxy)
self.lpfitems.append(self.lpfstopproxy)
"""
self.populate_bandview(self.lpfitems)
# Set up validators for edit boxes.
self.intVal = Qt.QIntValidator(None)
self.dblVal = Qt.QDoubleValidator(None)
self.gui.nfftEdit.setValidator(self.intVal)
self.gui.sampleRateEdit.setValidator(self.dblVal)
self.gui.filterGainEdit.setValidator(self.dblVal)
self.gui.endofLpfPassBandEdit.setValidator(self.dblVal)
self.gui.startofLpfStopBandEdit.setValidator(self.dblVal)
self.gui.lpfStopBandAttenEdit.setValidator(self.dblVal)
self.gui.lpfPassBandRippleEdit.setValidator(self.dblVal)
self.gui.startofBpfPassBandEdit.setValidator(self.dblVal)
self.gui.endofBpfPassBandEdit.setValidator(self.dblVal)
self.gui.bpfTransitionEdit.setValidator(self.dblVal)
self.gui.bpfStopBandAttenEdit.setValidator(self.dblVal)
self.gui.bpfPassBandRippleEdit.setValidator(self.dblVal)
self.gui.startofBnfStopBandEdit.setValidator(self.dblVal)
self.gui.endofBnfStopBandEdit.setValidator(self.dblVal)
self.gui.bnfTransitionEdit.setValidator(self.dblVal)
self.gui.bnfStopBandAttenEdit.setValidator(self.dblVal)
self.gui.bnfPassBandRippleEdit.setValidator(self.dblVal)
self.gui.endofHpfStopBandEdit.setValidator(self.dblVal)
self.gui.startofHpfPassBandEdit.setValidator(self.dblVal)
self.gui.hpfStopBandAttenEdit.setValidator(self.dblVal)
self.gui.hpfPassBandRippleEdit.setValidator(self.dblVal)
self.gui.rrcSymbolRateEdit.setValidator(self.dblVal)
self.gui.rrcAlphaEdit.setValidator(self.dblVal)
self.gui.rrcNumTapsEdit.setValidator(self.dblVal)
self.gui.gausSymbolRateEdit.setValidator(self.dblVal)
self.gui.gausBTEdit.setValidator(self.dblVal)
self.gui.gausNumTapsEdit.setValidator(self.dblVal)
self.gui.iirendofLpfPassBandEdit.setValidator(self.dblVal)
self.gui.iirstartofLpfStopBandEdit.setValidator(self.dblVal)
self.gui.iirLpfPassBandAttenEdit.setValidator(self.dblVal)
self.gui.iirLpfStopBandRippleEdit.setValidator(self.dblVal)
self.gui.iirstartofHpfPassBandEdit.setValidator(self.dblVal)
self.gui.iirendofHpfStopBandEdit.setValidator(self.dblVal)
self.gui.iirHpfPassBandAttenEdit.setValidator(self.dblVal)
self.gui.iirHpfStopBandRippleEdit.setValidator(self.dblVal)
self.gui.iirstartofBpfPassBandEdit.setValidator(self.dblVal)
self.gui.iirendofBpfPassBandEdit.setValidator(self.dblVal)
self.gui.iirendofBpfStopBandEdit1.setValidator(self.dblVal)
self.gui.iirstartofBpfStopBandEdit2.setValidator(self.dblVal)
self.gui.iirBpfPassBandAttenEdit.setValidator(self.dblVal)
self.gui.iirBpfStopBandRippleEdit.setValidator(self.dblVal)
self.gui.iirendofBsfPassBandEdit1.setValidator(self.dblVal)
self.gui.iirstartofBsfPassBandEdit2.setValidator(self.dblVal)
self.gui.iirstartofBsfStopBandEdit.setValidator(self.dblVal)
self.gui.iirendofBsfStopBandEdit.setValidator(self.dblVal)
self.gui.iirBsfPassBandAttenEdit.setValidator(self.dblVal)
self.gui.iirBsfStopBandRippleEdit.setValidator(self.dblVal)
self.gui.besselordEdit.setValidator(self.intVal)
self.gui.iirbesselcritEdit1.setValidator(self.dblVal)
self.gui.iirbesselcritEdit2.setValidator(self.dblVal)
self.gui.nTapsEdit.setText("0")
self.filterWindows = {"Hamming Window" : fft.window.WIN_HAMMING,
"Hann Window" : fft.window.WIN_HANN,
"Blackman Window" : fft.window.WIN_BLACKMAN,
"Rectangular Window" : fft.window.WIN_RECTANGULAR,
"Kaiser Window" : fft.window.WIN_KAISER,
"Blackman-harris Window" : fft.window.WIN_BLACKMAN_hARRIS}
self.EQUIRIPPLE_FILT = 6 # const for equiripple filter window types.
# Disable functionality that is not quite working in 3.8
self.gui.checkKeepcur.setEnabled(False)
self.gui.actionIdeal_Band.setEnabled(False)
self.show()
def updateViews(self):
# for linking overlay graphs on GridView freq plots
self.secondary_freq_overlay_vb.setGeometry(self.primary_freq_overlay.plotItem.vb.sceneBoundingRect())
# Set up curve pens, lines, and symbols.
def set_defaultpen(self):
blue = QtGui.qRgb(0x00, 0x00, 0xFF)
blueBrush = Qt.QBrush(Qt.QColor(blue))
red = QtGui.qRgb(0xFF, 0x00, 0x00)
redBrush = Qt.QBrush(Qt.QColor(red))
self.freqcurve.setPen(pg.mkPen('b', width=1.5))
self.rcurve.setPen(None)
self.rcurve.setSymbol('o')
self.rcurve.setSymbolPen('b')
self.rcurve.setSymbolBrush(Qt.QBrush(Qt.Qt.gray))
self.rcurve.setSymbolSize(8)
self.icurve.setPen(None)
self.icurve.setSymbol('o')
self.icurve.setSymbolPen('r')
self.icurve.setSymbolBrush(Qt.QBrush(Qt.Qt.gray))
self.icurve.setSymbolSize(8)
self.imprescurve_stems.setPen(pg.mkPen('b', width=1.5))
self.imprescurve.setPen(None)
self.imprescurve.setSymbol('o')
self.imprescurve.setSymbolPen('b')
self.imprescurve.setSymbolBrush(Qt.QBrush(Qt.Qt.gray))
self.imprescurve.setSymbolSize(8)
self.imprescurve_i_stems.setPen(pg.mkPen('b', width=1.5))
self.imprescurve_i.setPen(None)
self.imprescurve_i.setSymbol('o')
self.imprescurve_i.setSymbolPen('r')
self.imprescurve_i.setSymbolBrush(Qt.QBrush(Qt.Qt.gray))
self.imprescurve_i.setSymbolSize(8)
self.steprescurve_stems.setPen(pg.mkPen('b', width=1.5))
self.steprescurve.setPen(None)
self.steprescurve.setSymbol('o')
self.steprescurve.setSymbolPen('b')
self.steprescurve.setSymbolBrush(Qt.QBrush(Qt.Qt.gray))
self.steprescurve.setSymbolSize(8)
self.steprescurve_i_stems.setPen(pg.mkPen('b', width=1.5))
self.steprescurve_i.setPen(None)
self.steprescurve_i.setSymbol('o')
self.steprescurve_i.setSymbolPen('r')
self.steprescurve_i.setSymbolBrush(Qt.QBrush(Qt.Qt.gray))
self.steprescurve_i.setSymbolSize(8)
self.phasecurve.setPen(pg.mkPen('b', width=1.5))
self.groupcurve.setPen(pg.mkPen('b', width=1.5))
self.pdelaycurve.setPen(pg.mkPen('b', width=1.5))
# self.idbanditems.setLinetype()
self.mfreqcurve.setPen(pg.mkPen('b', width=1.5))
self.mfreqcurve2.setPen(pg.mkPen('r', width=1.5))
self.mtimecurve.setPen(None)
self.mtimecurve.setSymbol('o')
self.mtimecurve.setSymbolPen('b')
self.mtimecurve.setSymbolBrush(Qt.QBrush(Qt.Qt.gray))
self.mtimecurve.setSymbolSize(8)
self.mtimecurve_stems.setPen(pg.mkPen('b', width=1.5))
self.mtimecurve_i_stems.setPen(pg.mkPen('b', width=1.5))
self.mtimecurve_i.setPen(None)
self.mtimecurve_i.setSymbol('o')
self.mtimecurve_i.setSymbolPen('r')
self.mtimecurve_i.setSymbolBrush(Qt.QBrush(Qt.Qt.gray))
self.mtimecurve_i.setSymbolSize(8)
def changed_fselect(self, ftype):
if(ftype == "FIR"):
self.gui.iirfilterTypeComboBox.hide()
self.gui.iirfilterBandComboBox.hide()
self.gui.adComboBox.hide()
self.gui.filterDesignTypeComboBox.show()
self.gui.globalParamsBox.show()
self.gui.filterTypeComboBox.show()
self.gui.filterTypeWidget.setCurrentWidget(self.gui.firlpfPage)
self.gui.tabGroup.addTab(self.gui.timeTab, _fromUtf8("Filter Taps"))
self.gui.mttapsPush.setEnabled(True)
self.gui.addpolePush.setEnabled(False)
self.gui.maddpolePush.setEnabled(False)
elif(ftype.startswith("IIR")):
self.gui.filterDesignTypeComboBox.hide()
self.gui.globalParamsBox.hide()
self.gui.filterTypeComboBox.hide()
self.gui.iirfilterTypeComboBox.show()
self.gui.adComboBox.show()
self.gui.iirfilterBandComboBox.show()
self.gui.filterTypeWidget.setCurrentWidget(self.gui.iirlpfPage)
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.timeTab))
self.gui.mttapsPush.setEnabled(False)
self.gui.addpolePush.setEnabled(True)
self.gui.maddpolePush.setEnabled(True)
#self.design()
def set_order(self, ftype):
if(ftype == "Bessel"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.iirbesselPage)
self.changed_iirfilter_band(self.gui.iirfilterBandComboBox.currentText())
else:
self.changed_iirfilter_band(self.gui.iirfilterBandComboBox.currentText())
#self.design()
def changed_iirfilter_band(self, ftype):
iirftype = self.gui.iirfilterTypeComboBox.currentText()
if(ftype == "Low Pass"):
if(iirftype == "Bessel"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.iirbesselPage)
self.gui.iirbesselcritLabel2.hide()
self.gui.iirbesselcritEdit2.hide()
else:
self.gui.filterTypeWidget.setCurrentWidget(self.gui.iirlpfPage)
elif(ftype == "Band Pass"):
if(iirftype == "Bessel"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.iirbesselPage)
self.gui.iirbesselcritLabel2.show()
self.gui.iirbesselcritEdit2.show()
else:
self.gui.filterTypeWidget.setCurrentWidget(self.gui.iirbpfPage)
elif(ftype == "Band Stop"):
if(iirftype == "Bessel"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.iirbesselPage)
self.gui.iirbesselcritLabel2.show()
self.gui.iirbesselcritEdit2.show()
else:
self.gui.filterTypeWidget.setCurrentWidget(self.gui.iirbsfPage)
elif(ftype == "High Pass"):
if(iirftype == "Bessel"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.iirbesselPage)
self.gui.iirbesselcritLabel2.hide()
self.gui.iirbesselcritEdit2.hide()
else:
self.gui.filterTypeWidget.setCurrentWidget(self.gui.iirhpfPage)
#self.design()
def changed_filter_type(self, ftype):
if(ftype == "Low Pass"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.firlpfPage)
self.remove_bandview()
self.populate_bandview(self.lpfitems)
elif(ftype == "Band Pass"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.firbpfPage)
self.remove_bandview()
self.populate_bandview(self.bpfitems)
elif(ftype == "Complex Band Pass"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.firbpfPage)
self.remove_bandview()
self.populate_bandview(self.bpfitems)
elif(ftype == "Band Notch"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.firbnfPage)
self.remove_bandview()
self.populate_bandview(self.bnfitems)
elif(ftype == "High Pass"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.firhpfPage)
self.remove_bandview()
self.populate_bandview(self.hpfitems)
elif(ftype == "Root Raised Cosine"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.rrcPage)
elif(ftype == "Gaussian"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.gausPage)
elif(ftype == "Half Band"):
self.gui.filterTypeWidget.setCurrentWidget(self.gui.firhbPage)
#self.design()
def changed_filter_design_type(self, design):
if(design == "Equiripple"):
self.set_equiripple()
else:
self.set_windowed()
#self.design()
def set_equiripple(self):
# Stop sending the signal for this function.
self.gui.filterTypeComboBox.blockSignals(True)
self.equiripple = True
self.gui.lpfPassBandRippleLabel.setVisible(True)
self.gui.lpfPassBandRippleEdit.setVisible(True)
self.gui.bpfPassBandRippleLabel.setVisible(True)
self.gui.bpfPassBandRippleEdit.setVisible(True)
self.gui.bnfPassBandRippleLabel.setVisible(True)
self.gui.bnfPassBandRippleEdit.setVisible(True)
self.gui.hpfPassBandRippleLabel.setVisible(True)
self.gui.hpfPassBandRippleEdit.setVisible(True)
# Save current type and repopulate the combo box for
# filters this window type can handle.
currenttype = self.gui.filterTypeComboBox.currentText()
items = self.gui.filterTypeComboBox.count()
for i in range(items):
self.gui.filterTypeComboBox.removeItem(0)
self.gui.filterTypeComboBox.addItems(self.optFilters)
# If the last filter type was valid for this window type,
# go back to it; otherwise, reset.
try:
index = self.optFilters.index(currenttype)
self.gui.filterTypeComboBox.setCurrentIndex(index)
except ValueError:
pass
# Tell gui its ok to start sending this signal again.
self.gui.filterTypeComboBox.blockSignals(False)
def set_windowed(self):
# Stop sending the signal for this function.
self.gui.filterTypeComboBox.blockSignals(True)
self.equiripple = False
self.gui.lpfPassBandRippleLabel.setVisible(False)
self.gui.lpfPassBandRippleEdit.setVisible(False)
self.gui.bpfPassBandRippleLabel.setVisible(False)
self.gui.bpfPassBandRippleEdit.setVisible(False)
self.gui.bnfPassBandRippleLabel.setVisible(False)
self.gui.bnfPassBandRippleEdit.setVisible(False)
self.gui.hpfPassBandRippleLabel.setVisible(False)
self.gui.hpfPassBandRippleEdit.setVisible(False)
# Save current type and repopulate the combo box for
# filters this window type can handle.
currenttype = self.gui.filterTypeComboBox.currentText()
items = self.gui.filterTypeComboBox.count()
for i in range(items):
self.gui.filterTypeComboBox.removeItem(0)
self.gui.filterTypeComboBox.addItems(self.firFilters)
# If the last filter type was valid for this window type,
# go back to it; otherwise, reset.
try:
index = self.optFilters.index(currenttype)
self.gui.filterTypeComboBox.setCurrentIndex(index)
except ValueError:
pass
# Tell gui its ok to start sending this signal again.
self.gui.filterTypeComboBox.blockSignals(False)
def design(self):
ret = True
fs,r = getfloat(self.gui.sampleRateEdit.text())
ret = r and ret
gain,r = getfloat(self.gui.filterGainEdit.text())
ret = r and ret
winstr = self.gui.filterDesignTypeComboBox.currentText()
ftype = self.gui.filterTypeComboBox.currentText()
fsel = self.gui.fselectComboBox.currentText()
if (fsel == "FIR"):
self.b, self.a = [],[]
if(ret):
self.design_fir(ftype, fs, gain, winstr)
elif (fsel.startswith("IIR")):
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
self.design_iir()
if len(w):
reply = QtGui.QMessageBox.information(self, "BadCoefficients",
str(w[-1].message),
QtGui.QMessageBox.Ok)
# Do FIR design.
def design_fir(self, ftype, fs, gain, winstr):
self.iir = False
self.cpicker.set_iir(False)
self.cpicker2.set_iir(False)
if(winstr == "Equiripple"):
designer = {"Low Pass" : design_opt_lpf,
"Band Pass" : design_opt_bpf,
"Complex Band Pass" : design_opt_cbpf,
"Band Notch" : design_opt_bnf,
"Half Band" : design_opt_hb,
"High Pass" : design_opt_hpf}
taps,params,r = designer[ftype](fs, gain, self)
else:
designer = {"Low Pass" : design_win_lpf,
"Band Pass" : design_win_bpf,
"Complex Band Pass" : design_win_cbpf,
"Band Notch" : design_win_bnf,
"High Pass" : design_win_hpf,
"Half Band" : design_win_hb,
"Root Raised Cosine" : design_win_rrc,
"Gaussian" : design_win_gaus}
wintype = int(self.filterWindows[winstr])
taps,params,r = designer[ftype](fs, gain, wintype, self)
if(r):
if self.gridview:
self.params = params
self.update_fft(taps, params)
self.set_mfmagresponse()
self.set_mttaps()
self.gui.nTapsEdit.setText(str(self.taps.size))
else:
self.draw_plots(taps,params)
zeros = self.get_zeros()
poles = self.get_poles()
self.gui.pzPlot.insertZeros(zeros)
self.gui.pzPlot.insertPoles(poles)
self.gui.mpzPlot.insertZeros(zeros)
self.gui.mpzPlot.insertPoles(poles)
self.update_fcoeff()
self.gui.action_save.setEnabled(True)
# self.set_drawideal()
# Return taps if callback is enabled.
if self.callback:
retobj = ApiObject()
retobj.update_all("fir", self.params, self.taps, 1)
self.callback(retobj)
# Do IIR design.
def design_iir(self):
iirftype = self.gui.iirfilterTypeComboBox.currentText()
iirbtype = self.gui.iirfilterBandComboBox.currentText()
atype = self.gui.adComboBox.currentText()
self.taps = []
self.iir = True
ret = True
params = []
besselparams = []
self.cpicker.set_iir(True)
self.cpicker2.set_iir(True)
iirft = {"Elliptic" : 'ellip',
"Butterworth" : 'butter',
"Chebyshev-1" : 'cheby1',
"Chebyshev-2" : 'cheby2',
"Bessel" : 'bessel' }
sanalog = {"Analog (rad/second)" : 1,
"Digital (normalized 0-1)" : 0 }
paramtype = { 1 : "analog",
0 : "digital" }
iirabbr = {
"Low Pass" : "lpf",
"High Pass" : "hpf",
"Band Pass" : "bpf",
"Band Stop" : "bnf" }
iirboxes = {"Low Pass" : [float(self.gui.iirendofLpfPassBandEdit.text()),
float(self.gui.iirstartofLpfStopBandEdit.text()),
float(self.gui.iirLpfPassBandAttenEdit.text()),
float(self.gui.iirLpfStopBandRippleEdit.text())],
"High Pass" : [float(self.gui.iirstartofHpfPassBandEdit.text()),
float(self.gui.iirendofHpfStopBandEdit.text()),
float(self.gui.iirHpfPassBandAttenEdit.text()),
float(self.gui.iirHpfStopBandRippleEdit.text())],
"Band Pass" : [float(self.gui.iirstartofBpfPassBandEdit.text()),
float(self.gui.iirendofBpfPassBandEdit.text()),
float(self.gui.iirendofBpfStopBandEdit1.text()),
float(self.gui.iirstartofBpfStopBandEdit2.text()),
float(self.gui.iirBpfPassBandAttenEdit.text()),
float(self.gui.iirBpfStopBandRippleEdit.text())],
"Band Stop" : [float(self.gui.iirendofBsfPassBandEdit1.text()),
float(self.gui.iirstartofBsfPassBandEdit2.text()),
float(self.gui.iirstartofBsfStopBandEdit.text()),
float(self.gui.iirendofBsfStopBandEdit.text()),
float(self.gui.iirBsfPassBandAttenEdit.text()),
float(self.gui.iirBsfStopBandRippleEdit.text())] }
# Remove Ideal band-diagrams if IIR.
# self.set_drawideal()
for i in range(len(iirboxes[iirbtype])):
params.append(iirboxes[iirbtype][i])
if len(iirboxes[iirbtype]) == 6:
params = [params[:2],params[2:4],params[4],params[5]]
if(iirftype == "Bessel"):
if iirbtype == "Low Pass" or iirbtype == "High Pass":
besselparams.append(float(self.gui.iirbesselcritEdit1.text()))
else:
besselparams.append(float(self.gui.iirbesselcritEdit1.text()))
besselparams.append(float(self.gui.iirbesselcritEdit2.text()))
order = int(self.gui.besselordEdit.text())
try:
(self.b, self.a) = signal.iirfilter(order, besselparams, btype=iirbtype.replace(' ', '').lower(),
analog=sanalog[atype], ftype=iirft[iirftype], output='ba')
except Exception as e:
reply = QtGui.QMessageBox.information(self, "IIR design error", e.args[0],
QtGui.QMessageBox.Ok)
(self.z, self.p, self.k) = signal.tf2zpk(self.b, self.a)
iirparams = {"filttype": iirft[iirftype], "bandtype": iirabbr[iirbtype], "filtord": order,
"paramtype": paramtype[sanalog[atype]], "critfreq": besselparams}
else:
try:
(self.b, self.a) = signal.iirdesign(params[0], params[1], params[2], params[3],
analog=sanalog[atype], ftype=iirft[iirftype], output='ba')
except Exception as e:
reply = QtGui.QMessageBox.information(self, "IIR design error", e.args[0],
QtGui.QMessageBox.Ok)
(self.z, self.p, self.k) = signal.tf2zpk(self.b, self.a)
# Create parameters.
iirparams = {"filttype": iirft[iirftype], "bandtype": iirabbr[iirbtype],
"paramtype": paramtype[sanalog[atype]], "pbedge": params[0], "sbedge": params[1],
"gpass": params[2], "gstop": params[3]}
self.gui.pzPlot.insertZeros(self.z)
self.gui.pzPlot.insertPoles(self.p)
self.gui.mpzPlot.insertZeros(self.z)
self.gui.mpzPlot.insertPoles(self.p)
self.iir_plot_all(self.z, self.p, self.k)
self.update_fcoeff()
self.gui.nTapsEdit.setText("-")
self.params = iirparams
self.gui.action_save.setEnabled(True)
# Return api_object if callback is enabled.
if self.callback:
retobj = ApiObject()
retobj.update_all("iir", self.params, (self.b, self.a), 1)
self.callback(retobj)
# IIR Filter design plot updates.
def iir_plot_all(self,z,p,k):
self.b,self.a = signal.zpk2tf(z,p,k)
w,h = signal.freqz(self.b,self.a)
self.fftdB = 20 * np.log10 (abs(h))
self.freq = w / max(w)
self.fftDeg = np.unwrap(np.arctan2(np.imag(h),np.real(h)))
self.groupDelay = -np.diff(self.fftDeg)
self.phaseDelay = -self.fftDeg[1:] / self.freq[1:]
if self.gridview:
self.set_mfmagresponse()
self.set_mtimpulse()
else:
self.update_freq_curves()
self.update_phase_curves()
self.update_group_curves()
self.update_pdelay_curves()
self.update_step_curves()
self.update_imp_curves()
def nfft_edit_changed(self, nfft):
infft,r = getint(nfft)
if(r and (infft != self.nfftpts)):
self.nfftpts = infft
self.update_freq_curves()
# def tab_changed(self, tab):
# if(tab == 0):
# self.update_freq_curves()
# if(tab == 1):
# self.update_time_curves()
# if(tab == 2):
# self.update_phase_curves()
# if(tab == 3):
# self.update_group_curves()
def get_fft(self, fs, taps, Npts):
fftpts = fft_detail.fft(taps, Npts)
self.freq = np.linspace(start=0, stop=fs, num=Npts, endpoint=False)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
self.fftdB = 20.0*np.log10(abs(fftpts))
if any(self.fftdB == float('-inf')):
sys.stderr.write('Filter design failed (taking log10 of 0).\n')
self.fftdB = np.zeros([len(fftpts)])
self.fftDeg = np.unwrap(np.angle(fftpts))
self.groupDelay = -np.diff(self.fftDeg)
self.phaseDelay = -self.fftDeg[1:] / self.freq[1:]
def update_time_curves(self):
ntaps = len(self.taps)
if(ntaps < 1):
return
# Set Data.
if(type(self.taps[0]) == scipy.complex128):
self.rcurve.setData(np.arange(ntaps), self.taps.real)
self.icurve.setData(np.arange(ntaps), self.taps.imag)
else:
self.rcurve.setData(np.arange(ntaps), self.taps)
self.icurve.setData([],[]);
if self.mttaps:
if(type(self.taps[0]) == scipy.complex128):
self.mtimecurve_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(self.taps.real.shape[0], dtype=int),
self.taps.real)).flatten())
self.mtimecurve.setData(np.arange(ntaps), self.taps.real)
self.mtimecurve_i_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(self.taps.imag.shape[0], dtype=int),
self.taps.imag)).flatten())
self.mtimecurve_i.setData(np.arange(ntaps), self.taps.imag)
else:
self.mtimecurve.setData(np.arange(ntaps), self.taps)
self.mtimecurve_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(self.taps.shape[0], dtype=int),
self.taps)).flatten())
self.mtimecurve_i_stems.setData([],[])
self.mtimecurve_i.setData([],[])
# Configure plots.
if self.mtoverlay:
self.mplots['mTIME'].setMouseEnabled(x=True, y=True)
else:
self.mplots['mTIME'].setMouseEnabled(x=False, y=False)
self.mplots['mTIME'].showAxis('right', False)
# Set plot limits and reset axis zoom.
self.plot_auto_limit(self.plots['TIME'], xMin=0, xMax=ntaps)
self.plot_auto_limit(self.mplots['mTIME'], xMin=0, xMax=ntaps)
def update_step_curves(self):
ntaps = len(self.taps)
if((ntaps < 1) and (not self.iir)):
return
# Set Data.
if self.iir:
stepres = self.step_response(self.b,self.a)
ntaps = 50
else:
stepres = self.step_response(self.taps)
if(type(stepres[0]) == np.complex128):
self.steprescurve_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(stepres.real.shape[0], dtype=int),
stepres.real)).flatten())
self.steprescurve.setData(np.arange(ntaps), stepres.real)
self.steprescurve_i_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(stepres.imag.shape[0], dtype=int),
stepres.imag)).flatten())
self.steprescurve_i.setData(np.arange(ntaps), stepres.imag)
else:
self.steprescurve_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(stepres.shape[0], dtype=int),
stepres)).flatten())
self.steprescurve.setData(np.arange(ntaps), stepres)
self.steprescurve_i_stems.setData([],[])
self.steprescurve_i.setData([],[])
if self.mtstep:
if(type(stepres[0]) == np.complex128):
self.mtimecurve_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(stepres.real.shape[0], dtype=int),
stepres.real)).flatten())
self.mtimecurve.setData(np.arange(ntaps), stepres.real)
self.mtimecurve_i_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(stepres.imag.shape[0], dtype=int),
stepres.imag)).flatten())
self.mtimecurve_i.setData(np.arange(ntaps), stepres.imag)
else:
self.mtimecurve_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(stepres.shape[0], dtype=int),
stepres)).flatten())
self.mtimecurve.setData(np.arange(ntaps), stepres)
self.mtimecurve_i_stems.setData([],[])
self.mtimecurve_i.setData([],[])
# Configure plots.
if self.mtoverlay:
self.mplots['mTIME'].setMouseEnabled(x=True, y=True)
else:
self.mplots['mTIME'].setMouseEnabled(x=False, y=False)
self.mplots['mTIME'].showAxis('right', False)
# Set plot limits and reset axis zoom.
self.plot_auto_limit(self.plots['STEPRES'], xMin=0, xMax=ntaps)
self.plot_auto_limit(self.mplots['mTIME'], xMin=0, xMax=ntaps)
def update_imp_curves(self):
ntaps = len(self.taps)
if((ntaps < 1) and (not self.iir)):
return
# Set Data.
if self.iir:
impres = self.impulse_response(self.b, self.a)
ntaps = 50
else:
impres = self.impulse_response(self.taps)
if(type(impres[0]) == np.complex128):
self.imprescurve_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(impres.real.shape[0], dtype=int),
impres.real)).flatten())
self.imprescurve.setData(np.arange(ntaps), impres.real)
self.imprescurve_i_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(impres.imag.shape[0], dtype=int),
impres.imag)).flatten())
self.imprescurve_i.setData(np.arange(ntaps), impres.imag)
else:
self.imprescurve_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(impres.shape[0], dtype=int),
impres)).flatten())
if self.mtimpulse:
if(type(impres[0]) == np.complex128):
self.mtimecurve_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(impres.real.shape[0], dtype=int),
impres.real)).flatten())
self.mtimecurve.setData(np.arange(ntaps), impres.real)
self.mtimecurve_i_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(impres.imag.shape[0], dtype=int),
impres.imag)).flatten())
self.mtimecurve_i.setData(np.arange(ntaps), impres.imag)
else:
self.mtimecurve_stems.setData(np.repeat(np.arange(ntaps), 2),
np.dstack((np.zeros(impres.shape[0], dtype=int),
impres)).flatten())
self.mtimecurve.setData(np.arange(ntaps), impres)
self.mtimecurve_i_stems.setData([],[])
self.mtimecurve_i.setData([],[])
# Configure plots.
if self.mtoverlay:
self.mplots['mTIME'].setMouseEnabled(x=True, y=True)
else:
self.mplots['mTIME'].setMouseEnabled(x=False, y=False)
self.mplots['mTIME'].showAxis('right', False)
# Set plot limits and reset axis zoom.
self.plot_auto_limit(self.plots['IMPRES'], xMin=0, xMax=ntaps)
self.plot_auto_limit(self.mplots['mTIME'], xMin=0, xMax=ntaps)
def plot_secondary(self):
if (self.mfoverlay):
if self.last_mfreq_plot == "freq":
self.mfmagresponse = True
self.update_freq_curves(True)
elif self.last_mfreq_plot == "phase":
self.mfphaseresponse = True
self.update_phase_curves(True)
elif self.last_mfreq_plot == "group":
self.mfgroupdelay = True
self.update_group_curves(True)
elif self.last_mfreq_plot == "pdelay":
self.mfphasedelay = True
self.update_pdelay_curves(True)
self.mplots['mFREQ'].showAxis('right', True)
else:
self.mplots['mFREQ'].setMouseEnabled(x=False, y=False)
self.mplots['mFREQ'].showAxis('right', False)
self.mfreqcurve2.setData([],[])
def update_freq_curves(self, secondary=False):
npts = len(self.fftdB)
if(npts < 1):
return
# Set Data.
if self.iir:
self.freqcurve.setData(self.freq[:npts-1], self.fftdB[:npts-1])
else:
self.freqcurve.setData(self.freq[:int(npts//2)], self.fftdB[:int(npts//2)])
if self.mfmagresponse:
curve = self.mfreqcurve
if secondary:
curve = self.mfreqcurve2
if self.iir:
curve.setData(self.freq[:npts-1], self.fftdB[:npts-1])
else:
curve.setData(self.freq[:int(npts//2)], self.fftdB[:int(npts//2)])
# Set axes to new scales.
# Set plot limits and reset axis zoom.
if self.iir:
xmax = self.freq[npts-1]
else:
xmax = self.freq[npts//2]
xmin = self.freq[0]
self.plot_auto_limit(self.plots['FREQ'], xMin=xmin, xMax=xmax)
self.plot_auto_limit(self.mplots['mFREQ'], xMin=xmin, xMax=xmax)
if secondary:
self.mplots['mFREQ'].setLabel('right', 'Magnitude', units='dB', **self.labelstyle9b)
else:
self.mplots['mFREQ'].setLabel('left', 'Magnitude', units='dB', **self.labelstyle9b)
if not secondary:
self.plot_secondary()
self.last_mfreq_plot = 'freq'
def update_phase_curves(self, secondary=False):
npts = len(self.fftDeg)
if(npts < 1):
return
# Set Data.
if self.iir:
self.phasecurve.setData(self.freq[:npts-1], self.fftDeg[:npts-1])
else:
self.phasecurve.setData(self.freq[:int(npts//2)], self.fftDeg[:int(npts//2)])
if self.mfphaseresponse:
curve = self.mfreqcurve
if secondary:
curve = self.mfreqcurve2
if self.iir:
curve.setData(self.freq[:npts-1], self.fftDeg[:npts-1])
else:
curve.setData(self.freq[:int(npts//2)], self.fftDeg[:int(npts//2)])
# Set plot limits and reset axis zoom.
if self.iir:
xmax = self.freq[npts-1]
else:
xmax = self.freq[npts//2]
xmin = self.freq[0]
self.plot_auto_limit(self.plots['PHASE'], xMin=xmin, xMax=xmax)
self.plot_auto_limit(self.mplots['mFREQ'], xMin=xmin, xMax=xmax)
# Set Axis title.
if secondary:
self.mplots['mFREQ'].setLabel('right', 'Phase', units='Radians', **self.labelstyle9b)
else:
self.mplots['mFREQ'].setLabel('left', 'Phase', units='Radians', **self.labelstyle9b)
if not secondary:
self.plot_secondary()
self.last_mfreq_plot = 'phase'
def update_group_curves(self, secondary=False):
npts = len(self.groupDelay)
if(npts < 1):
return
# Set Data.
if self.iir:
self.groupcurve.setData(self.freq[:npts-1], self.groupDelay[:npts-1])
else:
self.groupcurve.setData(self.freq[:int(npts//2)], self.groupDelay[:int(npts//2)])
if self.mfgroupdelay:
curve = self.mfreqcurve
if secondary:
curve = self.mfreqcurve2
if self.iir:
curve.setData(self.freq[:npts-1], self.groupDelay[:npts-1])
else:
curve.setData(self.freq[:int(npts//2)], self.groupDelay[:int(npts//2)])
# Configure plots.
if self.mtoverlay:
self.mplots['mFREQ'].setMouseEnabled(x=True, y=True)
else:
self.mplots['mFREQ'].setMouseEnabled(x=False, y=False)
self.mplots['mFREQ'].showAxis('right', False)
# Set plot limits and reset axis zoom.
if self.iir:
xmax = self.freq[npts-1]
else:
xmax = self.freq[npts//2]
xmin = self.freq[0]
self.plot_auto_limit(self.plots['GROUP'], xMin=xmin, xMax=xmax)
self.plot_auto_limit(self.mplots['mFREQ'], xMin=xmin, xMax=xmax)
# Set Axis title.
if secondary:
self.mplots['mFREQ'].setLabel('right', 'Delay', units='seconds', **self.labelstyle9b)
else:
self.mplots['mFREQ'].setLabel('left', 'Delay', units='seconds', **self.labelstyle9b)
if not secondary:
self.plot_secondary()
self.last_mfreq_plot = 'group'
def update_pdelay_curves(self, secondary=False):
npts = len(self.phaseDelay)
if(npts < 1):
return
# Set Data.
if self.iir:
self.pdelaycurve.setData(self.freq[:npts-1], self.phaseDelay[:npts-1])
else:
self.pdelaycurve.setData(self.freq[:int(npts//2)], self.phaseDelay[:int(npts//2)])
if self.mfphasedelay:
curve = self.mfreqcurve
if secondary:
curve = self.mfreqcurve2
if self.iir:
curve.setData(self.freq[:npts-1], self.phaseDelay[:npts-1])
else:
curve.setData(self.freq[:int(npts//2)], self.phaseDelay[:int(npts//2)])
# Set plot limits and reset axis zoom.
if self.iir:
xmax = self.freq[npts-1]
else:
xmax = self.freq[npts//2]
xmin = self.freq[0]
self.plot_auto_limit(self.plots['PDELAY'], xMin=xmin, xMax=xmax)
self.plot_auto_limit(self.mplots['mFREQ'], xMin=xmin, xMax=xmax)
# Set Axis title.
if secondary:
self.mplots['mFREQ'].setLabel('right', 'Phase Delay', **self.labelstyle9b)
else:
self.mplots['mFREQ'].setLabel('left', 'Phase Delay', **self.labelstyle9b)
if not secondary:
self.plot_secondary()
self.last_mfreq_plot = 'pdelay'
def plot_auto_limit(self, plot, xMin=None, xMax=None, yMin=None, yMax=None):
plot.setLimits(xMin=None, xMax=None, yMin=None, yMax=None)
plot.autoRange()
view = plot.viewRange()
xmin = view[0][0] if xMin is None else xMin
xmax = view[0][1] if xMax is None else xMax
ymin = view[1][0] if yMin is None else yMin
ymax = view[1][1] if yMax is None else yMax
plot.setLimits(xMin=xmin, xMax=xmax, yMin=ymin, yMax=ymax)
def action_quick_access(self):
# Hides quick access widget if unselected.
if (self.gui.quickFrame.isHidden()):
self.gui.quickFrame.show()
else:
self.gui.quickFrame.hide()
def action_spec_widget(self):
# Hides spec widget if unselected.
if (self.gui.filterspecView.isHidden()):
self.gui.filterspecView.show()
else:
self.gui.filterspecView.hide()
def action_response_widget(self):
if (self.gui.tabGroup.isHidden()):
self.gui.tabGroup.show()
else:
self.gui.tabGroup.hide()
def action_design_widget(self):
# Hides design widget if unselected.
if (self.gui.filterFrame.isHidden()):
self.gui.filterFrame.show()
else:
self.gui.filterFrame.hide()
# Saves and attach the plots for comparison.
def set_bufferplots(self):
if (self.gui.checkKeepcur.checkState() == 0 ):
# Detach and delete all plots if unchecked.
for c in self.bufferplots:
c.detach()
self.replot_all()
self.bufferplots = []
else:
self.bufferplots = []
# Iterate through tabgroup children and copy curves.
for i in range(self.gui.tabGroup.count()):
page = self.gui.tabGroup.widget(i)
for item in page.children():
if isinstance(item, Qwt.QwtPlot):
# Change colours as both plots overlay.
colours = [QtCore.Qt.darkYellow,QtCore.Qt.black]
for c in item.itemList():
if isinstance(c, Qwt.QwtPlotCurve):
dup = Qwt.QwtPlotCurve()
dpen = c.pen()
dsym = c.symbol()
dsym.setPen(Qt.QPen(colours[0]))
dsym.setSize(Qt.QSize(6, 6))
dpen.setColor(colours[0])
del colours[0]
dup.setPen(dpen)
dup.setSymbol(dsym)
dup.setRenderHint(Qwt.QwtPlotItem.RenderAntialiased)
dup.setData([c.x(i) for i in range(c.dataSize())],
[c.y(i) for i in range(c.dataSize())])
self.bufferplots.append(dup)
self.bufferplots[-1].attach(item)
def set_grid(self):
if (self.gui.checkGrid.checkState() == 0):
self.gridenable = False
for i in self.plots:
self.plots[i].showGrid(x=False, y=False)
for i in self.mplots:
self.mplots[i].showGrid(x=False, y=False)
else:
self.gridenable = True
if self.gridview:
for i in self.mplots:
self.mplots[i].showGrid(x=True, y=True)
else:
for i in self.plots:
self.plots[i].showGrid(x=True, y=True)
def set_actgrid(self):
if (self.gui.actionGrid_2.isChecked() == 0 ):
self.gridenable = False
for i in self.plots:
self.plots[i].showGrid(x=False, y=False)
for i in self.mplots:
self.mplots[i].showGrid(x=False, y=False)
else:
self.gridenable = True
if self.gridview:
for i in self.mplots:
self.mplots[i].showGrid(x=True, y=True)
else:
for i in self.plots:
self.plots[i].showGrid(x=True, y=True)
def set_magresponse(self):
if (self.gui.checkMagres.checkState() == 0 ):
self.magres = False
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.freqTab))
else:
self.magres = True
self.gui.tabGroup.addTab(self.gui.freqTab, _fromUtf8("Magnitude Response"))
self.update_freq_curves()
def set_actmagresponse(self):
if (self.gui.actionMagnitude_Response.isChecked() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.freqTab))
else:
self.gui.tabGroup.addTab(self.gui.freqTab, _fromUtf8("Magnitude Response"))
self.update_freq_curves()
def set_switchview(self):
if (self.gui.actionGridview.isChecked() == 0 ):
self.gridview = 0
self.set_defaultpen()
self.set_actgrid()
self.gui.stackedWindows.setCurrentIndex(0)
if self.iir:
self.iir_plot_all(self.z,self.p,self.k)
else:
self.draw_plots(self.taps,self.params)
else:
self.gridview = 1
self.set_actgrid()
self.gui.stackedWindows.setCurrentIndex(1)
self.update_freq_curves()
self.update_time_curves()
# self.set_drawideal()
def set_plotselect(self):
if (self.gui.actionPlot_select.isChecked() == 0 ):
self.gui.mfgroupBox.hide()
self.gui.mtgroupBox.hide()
self.gui.pzgroupBox.hide()
self.gui.mpzgroupBox.hide()
else:
self.gui.mfgroupBox.show()
self.gui.mtgroupBox.show()
self.gui.pzgroupBox.show()
self.gui.mpzgroupBox.show()
def replot_all(self):
self.plots['TIME'].replot()
self.mplots['mTIME'].replot()
self.plots['FREQ'].replot()
self.mplots['mFREQ'].replot()
self.plots['PHASE'].replot()
self.plots['GROUP'].replot()
self.plots['IMPRES'].replot()
self.plots['STEPRES'].replot()
self.plots['PDELAY'].replot()
def detach_allgrid(self):
for i in self.plots:
i.showGrid(x=False, y=False)
def set_mfmagresponse(self):
if self.mfoverlay:
self.mfmagresponse = True
else:
self.mfmagresponse = not(self.mfmagresponse)
# if not self.mfoverlay:
self.mfphasedelay = False
self.mfgroupdelay = False
self.mfphaseresponse = False
self.update_freq_curves()
def set_mfphaseresponse(self):
if self.mfoverlay:
self.mfphaseresponse = True
else:
self.mfphaseresponse = not(self.mfphaseresponse)
# if not self.mfoverlay:
self.mfphasedelay = False
self.mfgroupdelay = False
self.mfmagresponse = False
self.update_phase_curves()
def set_mfgroupdelay(self):
if self.mfoverlay:
self.mfgroupdelay = True
else:
self.mfgroupdelay = not(self.mfgroupdelay)
# if not self.mfoverlay:
self.mfphasedelay = False
self.mfphaseresponse = False
self.mfmagresponse = False
self.update_group_curves()
def set_mfphasedelay(self):
if self.mfoverlay:
self.mfphasedelay = True
else:
self.mfphasedelay = not(self.mfphasedelay)
# if not self.mfoverlay:
self.mfgroupdelay = False
self.mfphaseresponse = False
self.mfmagresponse = False
self.update_pdelay_curves()
def ifinlist(self,a,dlist):
for d in dlist:
if self.compare_instances(a,d):
return True
return False
def compare_instances(self,a,b):
if a is b:
return True
else:
return False
def detach_firstattached(self, plot):
items = plot.itemList()
plot.enableAxis(Qwt.QwtPlot.yRight)
if len(items) > 2:
yaxis=items[0].yAxis()
items[2].setPen(items[0].pen())
items[2].setYAxis(yaxis)
items[0].detach()
else:
items[1].setYAxis(Qwt.QwtPlot.yRight)
if plot is self.mplots['mFREQ']:
items[1].setPen(QtGui.QPen(QtCore.Qt.red, 1, QtCore.Qt.SolidLine))
self.set_actgrid()
def update_fft(self, taps, params):
self.params = params
self.taps = np.array(taps)
self.get_fft(self.params["fs"], self.taps, self.nfftpts)
def set_mfoverlay(self):
self.mfoverlay = not(self.mfoverlay)
def set_conj(self):
self.cpicker.set_conjugate()
def set_mconj(self):
self.cpicker2.set_conjugate()
def set_zeroadd(self):
self.cpicker.add_zero()
def set_mzeroadd(self):
self.cpicker2.add_zero()
def set_poleadd(self):
self.cpicker.add_pole()
def set_mpoleadd(self):
self.cpicker2.add_pole()
def set_delpz(self):
self.cpicker.delete_pz()
def set_mdelpz(self):
self.cpicker2.delete_pz()
def set_mttaps(self):
self.mttaps = not(self.mttaps)
if not self.mfoverlay:
self.mtstep = False
self.mtimpulse = False
self.update_time_curves()
def set_mtstep(self):
self.mtstep = not(self.mtstep)
if not self.mfoverlay:
self.mttaps = False
self.mtimpulse = False
self.update_step_curves()
def set_mtimpulse(self):
self.mtimpulse = not(self.mtimpulse)
if not self.mfoverlay:
self.mttaps = False
self.mtstep = False
self.update_imp_curves()
def set_gdelay(self):
if (self.gui.checkGdelay.checkState() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.groupTab))
else:
self.gui.tabGroup.addTab(self.gui.groupTab, _fromUtf8("Group Delay"))
self.update_freq_curves()
def set_actgdelay(self):
if (self.gui.actionGroup_Delay.isChecked() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.groupTab))
else:
self.gui.tabGroup.addTab(self.gui.groupTab, _fromUtf8("Group Delay"))
self.update_freq_curves()
def set_phase(self):
if (self.gui.checkPhase.checkState() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.phaseTab))
else:
self.gui.tabGroup.addTab(self.gui.phaseTab, _fromUtf8("Phase Response"))
self.update_freq_curves()
def set_actphase(self):
if (self.gui.actionPhase_Respone.isChecked() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.phaseTab))
else:
self.gui.tabGroup.addTab(self.gui.phaseTab, _fromUtf8("Phase Response"))
self.update_freq_curves()
def set_fcoeff(self):
if (self.gui.checkFcoeff.checkState() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.fcTab))
else:
self.gui.tabGroup.addTab(self.gui.fcTab, _fromUtf8("Filter Coefficients"))
self.update_fcoeff()
def set_actfcoeff(self):
if (self.gui.actionFilter_Coefficients.isChecked() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.fcTab))
else:
self.gui.tabGroup.addTab(self.gui.fcTab, _fromUtf8("Filter Coefficients"))
self.update_fcoeff()
def set_band(self):
if (self.gui.checkBand.checkState() == 0 ):
self.gui.filterspecView.removeTab(self.gui.filterspecView.indexOf(self.gui.bandDiagram))
else:
self.gui.filterspecView.addTab(self.gui.bandDiagram, _fromUtf8("Band Diagram"))
def set_actband(self):
if (self.gui.actionBand_Diagram.isChecked() == 0 ):
self.gui.filterspecView.removeTab(self.gui.filterspecView.indexOf(self.gui.bandDiagram))
else:
self.gui.filterspecView.addTab(self.gui.bandDiagram, _fromUtf8("Band Diagram"))
# def set_drawideal(self):
# fsel = self.gui.fselectComboBox.currentText()
# if self.gridview and not(self.mfoverlay):
# plot = self.mplots['mFREQ']
# else:
# plot = self.plots['FREQ']
# if (self.gui.actionIdeal_Band.isChecked() == 0 or fsel == "IIR(scipy)"):
# self.idbanditems.detach_allidealcurves(plot)
# elif(self.params):
# ftype = self.gui.filterTypeComboBox.currentText()
# self.idbanditems.attach_allidealcurves(plot)
# self.idbanditems.plotIdealCurves(ftype, self.params, plot)
# plot.replot()
def set_pzplot(self):
if (self.gui.checkPzplot.checkState() == 0 ):
self.gui.filterspecView.removeTab(self.gui.filterspecView.indexOf(self.gui.poleZero))
else:
self.gui.filterspecView.addTab(self.gui.poleZero, _fromUtf8("Pole-Zero Plot"))
def set_actpzplot(self):
if (self.gui.actionPole_Zero_Plot_2.isChecked() == 0 ):
self.gui.filterspecView.removeTab(self.gui.filterspecView.indexOf(self.gui.poleZero))
else:
self.gui.filterspecView.addTab(self.gui.poleZero, _fromUtf8("Pole-Zero Plot"))
def set_pdelay(self):
if (self.gui.checkPzplot.checkState() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.pdelayTab))
else:
self.gui.tabGroup.addTab(self.gui.pdelayTab, _fromUtf8("Phase Delay"))
def set_actpdelay(self):
if (self.gui.actionPhase_Delay.isChecked() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.pdelayTab))
else:
self.gui.tabGroup.addTab(self.gui.pdelayTab, _fromUtf8("Phase Delay"))
def set_impres(self):
if (self.gui.checkImpulse.checkState() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.impresTab))
else:
self.gui.tabGroup.addTab(self.gui.impresTab, _fromUtf8("Impulse Response"))
def set_actimpres(self):
if (self.gui.actionImpulse_Response.isChecked() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.impresTab))
else:
self.gui.tabGroup.addTab(self.gui.impresTab, _fromUtf8("Impulse Response"))
def set_stepres(self):
if (self.gui.checkStep.checkState() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.stepresTab))
else:
self.gui.tabGroup.addTab(self.gui.stepresTab, _fromUtf8("Step Response"))
def set_actstepres(self):
if (self.gui.actionStep_Response.isChecked() == 0 ):
self.gui.tabGroup.removeTab(self.gui.tabGroup.indexOf(self.gui.stepresTab))
else:
self.gui.tabGroup.addTab(self.gui.stepresTab, _fromUtf8("Step Response"))
def populate_bandview(self,fitems):
for item in fitems:
if (item.isWidgetType()):
self.scene.addWidget(item)
else:
self.scene.addItem(item)
def remove_bandview(self):
for item in list(self.scene.items()):
self.scene.removeItem(item)
def set_fatten(self,atten):
ftype = self.gui.filterTypeComboBox.currentText()
if (ftype == "Low Pass"):
boxatten,r = getfloat(self.gui.lpfStopBandAttenEdit.text())
self.gui.lpfStopBandAttenEdit.setText(str(atten+boxatten))
if ftype == "High Pass":
boxatten,r = getfloat(self.gui.hpfStopBandAttenEdit.text())
self.gui.hpfStopBandAttenEdit.setText(str(atten+boxatten))
if ftype == "Band Pass":
boxatten,r = getfloat(self.gui.bpfStopBandAttenEdit.text())
self.gui.bpfStopBandAttenEdit.setText(str(atten+boxatten))
if ftype == "Band Notch":
boxatten,r = getfloat(self.gui.bnfStopBandAttenEdit.text())
self.gui.bnfStopBandAttenEdit.setText(str(atten+boxatten))
if ftype == "Complex Band Pass":
boxatten,r = getfloat(self.gui.bpfStopBandAttenEdit.text())
self.gui.bpfStopBandAttenEdit.setText(str(atten+boxatten))
def set_curvetaps(self, zeros_poles):
zr, pl = zeros_poles
if self.iir:
self.z = zr
self.p = pl
self.iir_plot_all(self.z,self.p,self.k)
self.gui.mpzPlot.insertZeros(zr)
self.gui.mpzPlot.insertPoles(pl)
self.update_fcoeff()
if self.callback:
retobj = ApiObject()
retobj.update_all("iir", self.params, (self.b, self.a), 1)
self.callback(retobj)
else:
hz = poly1d(zr,r=1)
# print hz.c.
self.taps = hz.c*self.taps[0]
self.draw_plots(self.taps,self.params)
self.update_fcoeff()
# update the pzplot in other view.
zeros = self.get_zeros()
poles = self.get_poles()
self.gui.mpzPlot.insertZeros(zeros)
self.gui.mpzPlot.insertPoles(poles)
self.gui.nTapsEdit.setText(str(self.taps.size))
if self.callback:
retobj = ApiObject()
retobj.update_all("fir", self.params, self.taps, 1)
self.callback(retobj)
def set_mcurvetaps(self, zeros_poles):
zr, pl = zeros_poles
if self.iir:
self.z = zr
self.p = pl
self.iir_plot_all(self.z,self.p,self.k)
self.gui.pzPlot.insertZeros(zr)
self.gui.pzPlot.insertPoles(pl)
self.update_fcoeff()
if self.callback:
retobj = ApiObject()
retobj.update_all("iir", self.params, (self.b, self.a), 1)
self.callback(retobj)
else:
hz = poly1d(zr,r=1)
# print hz.c.
self.taps = hz.c*self.taps[0]
if self.gridview:
self.update_fft(self.taps, self.params)
self.set_mfmagresponse()
self.set_mttaps()
else:
self.draw_plots(self.taps,self.params)
self.update_fcoeff()
# update the pzplot in other view.
zeros = self.get_zeros()
poles = self.get_poles()
self.gui.pzPlot.insertZeros(zeros)
self.gui.pzPlot.insertPoles(poles)
self.gui.nTapsEdit.setText(str(self.taps.size))
if self.callback:
retobj = ApiObject()
retobj.update_all("fir", self.params, self.taps, 1)
self.callback(retobj)
def set_statusbar(self, point):
x, y = point
if x == None:
self.gui.pzstatusBar.showMessage("")
else:
self.gui.pzstatusBar.showMessage("X: "+str(x)+" Y: "+str(y))
def set_mstatusbar(self, point):
x, y = point
if x == None:
self.gui.mpzstatusBar.showMessage("")
else:
self.gui.mpzstatusBar.showMessage("X: "+str(x)+" Y: "+str(y))
def get_zeros(self):
hz = poly1d(self.taps,r=0)
return hz.r
def get_poles(self):
if len(self.taps):
hp = zeros(len(self.taps)-1,complex)
return hp
else:
return []
def impulse_response(self, b, a=1):
length = len(b)
if self.iir:
length = 50
impulse = np.repeat(0., length)
impulse[0] = 1.
x = np.arange(0, length)
response = signal.lfilter(b, a, impulse)
return response
def step_response(self, b, a=1):
length = len(b)
if self.iir:
length = 50
impulse = np.repeat(0., length)
impulse[0] = 1.
x = np.arange(0, length)
response = signal.lfilter(b, a, impulse)
step = np.cumsum(response)
return step
def update_fcoeff(self):
fcoeff=""
if self.iir:
fcoeff="b = " + ','.join(str(e) for e in self.b) +"\na = " + ','.join(str(e) for e in self.a)
else:
fcoeff="taps = " + ','.join(str(e) for e in self.taps)
self.gui.filterCoeff.setText(fcoeff)
self.gui.mfilterCoeff.setText(fcoeff)
def action_save_dialog(self):
file_dialog_output = QtGui.QFileDialog.getSaveFileName(self, "Save CSV Filter File", ".", "")
filename = file_dialog_output[0]
try:
handle = open(filename, "w")
except IOError:
reply = QtGui.QMessageBox.information(self, 'File Name',
("Could not save to file: %s" % filename),
QtGui.QMessageBox.Ok)
return
csvhandle = csv.writer(handle, delimiter=",")
# Indicate FIR/IIR for easy reading.
if self.iir:
csvhandle.writerow(["restype","iir"])
else:
csvhandle.writerow(["restype","fir"])
for k in list(self.params.keys()):
csvhandle.writerow([k, self.params[k]])
if self.iir:
csvhandle.writerow(["b",] + list(self.b))
csvhandle.writerow(["a",] + list(self.a))
else:
csvhandle.writerow(["taps",] + list(self.taps))
handle.close()
self.gui.action_save.setEnabled(False)
# Iterate through all plots and delete the curves
for window in self.plots.values():
window.drop_plotdata()
# Clear filter coeffs
self.gui.filterCoeff.setText("")
self.gui.mfilterCoeff.setText("")
# Clear poles and zeros plot
self.gui.pzPlot.clear()
self.replot_all()
def action_open_dialog(self):
file_dialog_output = QtGui.QFileDialog.getOpenFileName(self, "Open CSV Filter File", ".", "")
if(len(file_dialog_output) == 0):
return
# file_dialog_output returns tuple of (filename, file filter)
filename = file_dialog_output[0]
try:
handle = open(filename, "r")
except IOError:
reply = QtGui.QMessageBox.information(self, 'File Name',
("Could not open file: %s" % filename),
QtGui.QMessageBox.Ok)
return
csvhandle = csv.reader(handle, delimiter=",")
b_a={}
taps = []
params = {}
for row in csvhandle:
if (row[0] == "restype"):
restype = row[1]
elif(row[0] == "taps"):
testcpx = re.findall(r"[+-]?\d+\.*\d*[Ee]?[-+]?\d+j", row[1])
if(len(testcpx) > 0): # it's a complex
taps = [complex(r) for r in row[1:]]
else:
taps = [float(r) for r in row[1:]]
elif(row[0] == "b" or row[0] == "a"):
testcpx = re.findall(r"[+-]?\d+\.*\d*[Ee]?[-+]?\d+j", row[1])
if(len(testcpx) > 0): # it's a complex
b_a[row[0]] = [complex(r) for r in row[1:]]
else:
b_a[row[0]]= [float(r) for r in row[1:]]
else:
testcpx = re.findall(r"[+-]?\d+\.*\d*[Ee]?[-+]?\d+j", row[1])
if(len(testcpx) > 0): # it's a complex
params[row[0]] = complex(row[1])
else: # assume it's a float.
try: # if it's not a float, its a string.
params[row[0]] = float(row[1])
except ValueError:
params[row[0]] = row[1]
handle.close()
if restype == "fir":
self.iir = False
self.gui.fselectComboBox.setCurrentIndex(0)
self.draw_plots(taps, params)
zeros = self.get_zeros()
poles = self.get_poles()
self.gui.pzPlot.insertZeros(zeros)
self.gui.pzPlot.insertPoles(poles)
self.gui.mpzPlot.insertZeros(zeros)
self.gui.mpzPlot.insertPoles(poles)
self.gui.sampleRateEdit.setText(str(params["fs"]))
self.gui.filterGainEdit.setText(str(params["gain"]))
# Set up GUI parameters for each filter type.
if(params["filttype"] == "lpf"):
self.gui.filterTypeComboBox.setCurrentIndex(0)
self.gui.filterDesignTypeComboBox.setCurrentIndex(int(params["wintype"]))
self.gui.endofLpfPassBandEdit.setText(str(params["pbend"]))
self.gui.startofLpfStopBandEdit.setText(str(params["sbstart"]))
self.gui.lpfStopBandAttenEdit.setText(str(params["atten"]))
if(params["wintype"] == self.EQUIRIPPLE_FILT):
self.gui.lpfPassBandRippleEdit.setText(str(params["ripple"]))
elif(params["filttype"] == "bpf"):
self.gui.filterTypeComboBox.setCurrentIndex(1)
self.gui.filterDesignTypeComboBox.setCurrentIndex(int(params["wintype"]))
self.gui.startofBpfPassBandEdit.setText(str(params["pbstart"]))
self.gui.endofBpfPassBandEdit.setText(str(params["pbend"]))
self.gui.bpfTransitionEdit.setText(str(params["tb"]))
self.gui.bpfStopBandAttenEdit.setText(str(params["atten"]))
if(params["wintype"] == self.EQUIRIPPLE_FILT):
self.gui.bpfPassBandRippleEdit.setText(str(params["ripple"]))
elif(params["filttype"] == "cbpf"):
self.gui.filterTypeComboBox.setCurrentIndex(2)
self.gui.filterDesignTypeComboBox.setCurrentIndex(int(params["wintype"]))
self.gui.startofBpfPassBandEdit.setText(str(params["pbstart"]))
self.gui.endofBpfPassBandEdit.setText(str(params["pbend"]))
self.gui.bpfTransitionEdit.setText(str(params["tb"]))
self.gui.bpfStopBandAttenEdit.setText(str(params["atten"]))
if(params["wintype"] == self.EQUIRIPPLE_FILT):
self.gui.bpfPassBandRippleEdit.setText(str(params["ripple"]))
elif(params["filttype"] == "bnf"):
self.gui.filterTypeComboBox.setCurrentIndex(3)
self.gui.filterDesignTypeComboBox.setCurrentIndex(int(params["wintype"]))
self.gui.startofBnfStopBandEdit.setText(str(params["sbstart"]))
self.gui.endofBnfStopBandEdit.setText(str(params["sbend"]))
self.gui.bnfTransitionEdit.setText(str(params["tb"]))
self.gui.bnfStopBandAttenEdit.setText(str(params["atten"]))
if(params["wintype"] == self.EQUIRIPPLE_FILT):
self.gui.bnfPassBandRippleEdit.setText(str(params["ripple"]))
elif(params["filttype"] == "hpf"):
self.gui.filterTypeComboBox.setCurrentIndex(4)
self.gui.filterDesignTypeComboBox.setCurrentIndex(int(params["wintype"]))
self.gui.endofHpfStopBandEdit.setText(str(params["sbend"]))
self.gui.startofHpfPassBandEdit.setText(str(params["pbstart"]))
self.gui.hpfStopBandAttenEdit.setText(str(params["atten"]))
if(params["wintype"] == self.EQUIRIPPLE_FILT):
self.gui.hpfPassBandRippleEdit.setText(str(params["ripple"]))
elif(params["filttype"] == "rrc"):
self.gui.filterTypeComboBox.setCurrentIndex(5)
self.gui.filterDesignTypeComboBox.setCurrentIndex(int(params["wintype"]))
self.gui.rrcSymbolRateEdit.setText(str(params["srate"]))
self.gui.rrcAlphaEdit.setText(str(params["alpha"]))
self.gui.rrcNumTapsEdit.setText(str(params["ntaps"]))
elif(params["filttype"] == "gaus"):
self.gui.filterTypeComboBox.setCurrentIndex(6)
self.gui.filterDesignTypeComboBox.setCurrentIndex(int(params["wintype"]))
self.gui.gausSymbolRateEdit.setText(str(params["srate"]))
self.gui.gausBTEdit.setText(str(params["bt"]))
self.gui.gausNumTapsEdit.setText(str(params["ntaps"]))
else:
self.iir = True
self.b, self.a = b_a["b"],b_a["a"]
(self.z,self.p,self.k) = signal.tf2zpk(self.b, self.a)
self.gui.pzPlot.insertZeros(self.z)
self.gui.pzPlot.insertPoles(self.p)
self.gui.mpzPlot.insertZeros(self.z)
self.gui.mpzPlot.insertPoles(self.p)
self.iir_plot_all(self.z,self.p,self.k)
self.update_fcoeff()
self.gui.nTapsEdit.setText("-")
self.params = params
# Set GUI for IIR type.
iirft = { "ellip" : 0,
"butter" : 1,
"cheby1" : 2,
"cheby2" : 3,
"bessel" : 4 }
paramtype = { "analog" : 1,
"digital" : 0 }
bandpos = {
"lpf" : 0,
"bpf" : 1,
"bnf" : 2,
"hpf" : 3}
iirboxes = {"lpf" : [self.gui.iirendofLpfPassBandEdit,
self.gui.iirstartofLpfStopBandEdit,
self.gui.iirLpfPassBandAttenEdit,
self.gui.iirLpfStopBandRippleEdit],
"hpf" : [self.gui.iirstartofHpfPassBandEdit,
self.gui.iirendofHpfStopBandEdit,
self.gui.iirHpfPassBandAttenEdit,
self.gui.iirHpfStopBandRippleEdit],
"bpf" : [self.gui.iirstartofBpfPassBandEdit,
self.gui.iirendofBpfPassBandEdit,
self.gui.iirendofBpfStopBandEdit1,
self.gui.iirstartofBpfStopBandEdit2,
self.gui.iirBpfPassBandAttenEdit,
self.gui.iirBpfStopBandRippleEdit],
"bnf" : [self.gui.iirendofBsfPassBandEdit1,
self.gui.iirstartofBsfPassBandEdit2,
self.gui.iirstartofBsfStopBandEdit,
self.gui.iirendofBsfStopBandEdit,
self.gui.iirBsfPassBandAttenEdit,
self.gui.iirBsfStopBandRippleEdit] }
self.gui.fselectComboBox.setCurrentIndex(1)
self.gui.iirfilterTypeComboBox.setCurrentIndex(iirft[params["filttype"]])
self.gui.iirfilterBandComboBox.setCurrentIndex(bandpos[params["bandtype"]])
if params["filttype"] == "bessel":
critfreq = [float(x) for x in params["critfreq"][1:-1].split(',')]
self.gui.besselordEdit.setText(str(params["filtord"]))
self.gui.iirbesselcritEdit1.setText(str(critfreq[0]))
self.gui.iirbesselcritEdit2.setText(str(critfreq[1]))
else:
self.gui.adComboBox.setCurrentIndex(paramtype[params["paramtype"]])
if len(iirboxes[params["bandtype"]]) == 4:
sdata = [params["pbedge"], params["sbedge"], params["gpass"], params["gstop"]]
else:
pbedge = list(map(float, params["pbedge"][1:-1].split(',')))
sbedge = list(map(float, params["sbedge"][1:-1].split(',')))
sdata = [pbedge[0], pbedge[1], sbedge[0],
sbedge[1], params["gpass"], params["gstop"]]
cboxes = iirboxes[params["bandtype"]]
for i in range(len(cboxes)):
cboxes[i].setText(str(sdata[i]))
def draw_plots(self, taps, params):
self.params = params
self.taps = np.array(taps)
if self.params:
self.get_fft(self.params["fs"], self.taps, self.nfftpts)
self.update_time_curves()
self.update_freq_curves()
self.update_phase_curves()
self.update_group_curves()
self.update_pdelay_curves()
self.update_step_curves()
self.update_imp_curves()
self.gui.nTapsEdit.setText(str(self.taps.size))
class CustomViewBox(pg.ViewBox):
def __init__(self, *args, **kwds):
pg.ViewBox.__init__(self, *args, **kwds)
self.setMouseMode(self.RectMode)
# Reimplement right-click to zoom out.
def mouseClickEvent(self, ev):
if ev.button() == QtCore.Qt.RightButton:
self.autoRange()
def mouseDragEvent(self, ev):
if ev.button() == QtCore.Qt.RightButton:
ev.ignore()
else:
pg.ViewBox.mouseDragEvent(self, ev)
def setup_options():
usage="%prog: [options] (input_filename)"
description = ""
parser = OptionParser(conflict_handler="resolve",
usage=usage, description=description)
return parser
def launch(args, callback=None, restype=""):
parser = setup_options()
(options, args) = parser.parse_args ()
if callback is None:
app = Qt.QApplication(args)
gplt = gr_plot_filter(options, callback, restype)
app.exec_()
if gplt.iir:
retobj = ApiObject()
retobj.update_all("iir", gplt.params, (gplt.b, gplt.a), 1)
return retobj
else:
retobj = ApiObject()
retobj.update_all("fir", gplt.params, gplt.taps, 1)
return retobj
else:
gplt = gr_plot_filter(options, callback, restype)
return gplt
def main(args):
parser = setup_options()
(options, args) = parser.parse_args ()
app = Qt.QApplication(args)
gplt = gr_plot_filter(options)
app.exec_()
app.deleteLater()
sys.exit()
if __name__ == '__main__':
main(sys.argv)
| sdh11/gnuradio | gr-filter/python/filter/design/filter_design.py | Python | gpl-3.0 | 91,770 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'qt\ui\fontgen.ui'
#
# Created: Mon Jun 03 01:17:17 2013
# by: PyQt4 UI code generator 4.8.5
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_FontGenerator(object):
def setupUi(self, FontGenerator):
FontGenerator.setObjectName(_fromUtf8("FontGenerator"))
FontGenerator.resize(570, 493)
FontGenerator.setWindowTitle(QtGui.QApplication.translate("FontGenerator", "Font Generator - untitled[*]", None, QtGui.QApplication.UnicodeUTF8))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/monokuma-green.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
FontGenerator.setWindowIcon(icon)
self.verticalLayout = QtGui.QVBoxLayout(FontGenerator)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.btnNew = QtGui.QPushButton(FontGenerator)
self.btnNew.setText(QtGui.QApplication.translate("FontGenerator", "&New", None, QtGui.QApplication.UnicodeUTF8))
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/report.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnNew.setIcon(icon1)
self.btnNew.setShortcut(QtGui.QApplication.translate("FontGenerator", "Ctrl+N", None, QtGui.QApplication.UnicodeUTF8))
self.btnNew.setAutoDefault(False)
self.btnNew.setObjectName(_fromUtf8("btnNew"))
self.horizontalLayout_2.addWidget(self.btnNew)
self.btnSave = QtGui.QPushButton(FontGenerator)
self.btnSave.setText(QtGui.QApplication.translate("FontGenerator", "&Save", None, QtGui.QApplication.UnicodeUTF8))
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8(":/disk.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnSave.setIcon(icon2)
self.btnSave.setShortcut(QtGui.QApplication.translate("FontGenerator", "Ctrl+S", None, QtGui.QApplication.UnicodeUTF8))
self.btnSave.setAutoDefault(False)
self.btnSave.setObjectName(_fromUtf8("btnSave"))
self.horizontalLayout_2.addWidget(self.btnSave)
self.btnSaveAs = QtGui.QPushButton(FontGenerator)
self.btnSaveAs.setText(QtGui.QApplication.translate("FontGenerator", "Save As...", None, QtGui.QApplication.UnicodeUTF8))
self.btnSaveAs.setShortcut(QtGui.QApplication.translate("FontGenerator", "Ctrl+Shift+S", None, QtGui.QApplication.UnicodeUTF8))
self.btnSaveAs.setAutoDefault(False)
self.btnSaveAs.setObjectName(_fromUtf8("btnSaveAs"))
self.horizontalLayout_2.addWidget(self.btnSaveAs)
self.btnLoad = QtGui.QPushButton(FontGenerator)
self.btnLoad.setText(QtGui.QApplication.translate("FontGenerator", "&Open", None, QtGui.QApplication.UnicodeUTF8))
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/folder.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnLoad.setIcon(icon3)
self.btnLoad.setShortcut(QtGui.QApplication.translate("FontGenerator", "Ctrl+O", None, QtGui.QApplication.UnicodeUTF8))
self.btnLoad.setAutoDefault(False)
self.btnLoad.setObjectName(_fromUtf8("btnLoad"))
self.horizontalLayout_2.addWidget(self.btnLoad)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem)
self.btnGenerateFont = QtGui.QPushButton(FontGenerator)
self.btnGenerateFont.setText(QtGui.QApplication.translate("FontGenerator", "&Generate", None, QtGui.QApplication.UnicodeUTF8))
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/cog.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnGenerateFont.setIcon(icon4)
self.btnGenerateFont.setShortcut(QtGui.QApplication.translate("FontGenerator", "Ctrl+G", None, QtGui.QApplication.UnicodeUTF8))
self.btnGenerateFont.setAutoDefault(False)
self.btnGenerateFont.setObjectName(_fromUtf8("btnGenerateFont"))
self.horizontalLayout_2.addWidget(self.btnGenerateFont)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.line = QtGui.QFrame(FontGenerator)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.verticalLayout.addWidget(self.line)
self.tabFonts = QtGui.QTabWidget(FontGenerator)
self.tabFonts.setTabsClosable(False)
self.tabFonts.setMovable(True)
self.tabFonts.setObjectName(_fromUtf8("tabFonts"))
self.verticalLayout.addWidget(self.tabFonts)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.btnNewTab = QtGui.QPushButton(FontGenerator)
self.btnNewTab.setText(QtGui.QApplication.translate("FontGenerator", "Add Tab", None, QtGui.QApplication.UnicodeUTF8))
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(_fromUtf8(":/add.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnNewTab.setIcon(icon5)
self.btnNewTab.setAutoDefault(False)
self.btnNewTab.setObjectName(_fromUtf8("btnNewTab"))
self.horizontalLayout.addWidget(self.btnNewTab)
self.btnRemoveTab = QtGui.QPushButton(FontGenerator)
self.btnRemoveTab.setText(QtGui.QApplication.translate("FontGenerator", "Remove", None, QtGui.QApplication.UnicodeUTF8))
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(_fromUtf8(":/delete.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnRemoveTab.setIcon(icon6)
self.btnRemoveTab.setAutoDefault(False)
self.btnRemoveTab.setObjectName(_fromUtf8("btnRemoveTab"))
self.horizontalLayout.addWidget(self.btnRemoveTab)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.verticalLayout.addLayout(self.horizontalLayout)
self.groupBox = QtGui.QGroupBox(FontGenerator)
self.groupBox.setTitle(QtGui.QApplication.translate("FontGenerator", "Export", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.horizontalLayout_3 = QtGui.QHBoxLayout(self.groupBox)
self.horizontalLayout_3.setContentsMargins(-1, 4, -1, 8)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.chkGenForGame = QtGui.QCheckBox(self.groupBox)
self.chkGenForGame.setText(QtGui.QApplication.translate("FontGenerator", "Export to umdimage2", None, QtGui.QApplication.UnicodeUTF8))
self.chkGenForGame.setChecked(True)
self.chkGenForGame.setObjectName(_fromUtf8("chkGenForGame"))
self.verticalLayout_2.addWidget(self.chkGenForGame)
self.chkGenForEditor = QtGui.QCheckBox(self.groupBox)
self.chkGenForEditor.setText(QtGui.QApplication.translate("FontGenerator", "Export to editor GFX dir", None, QtGui.QApplication.UnicodeUTF8))
self.chkGenForEditor.setChecked(True)
self.chkGenForEditor.setObjectName(_fromUtf8("chkGenForEditor"))
self.verticalLayout_2.addWidget(self.chkGenForEditor)
self.horizontalLayout_3.addLayout(self.verticalLayout_2)
self.verticalLayout_3 = QtGui.QVBoxLayout()
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.rdoGenFont1 = QtGui.QRadioButton(self.groupBox)
self.rdoGenFont1.setText(QtGui.QApplication.translate("FontGenerator", "Font 01 (regular text)", None, QtGui.QApplication.UnicodeUTF8))
self.rdoGenFont1.setChecked(True)
self.rdoGenFont1.setObjectName(_fromUtf8("rdoGenFont1"))
self.verticalLayout_3.addWidget(self.rdoGenFont1)
self.rdoGenFont2 = QtGui.QRadioButton(self.groupBox)
self.rdoGenFont2.setText(QtGui.QApplication.translate("FontGenerator", "Font 02 (Class Trial minigame text)", None, QtGui.QApplication.UnicodeUTF8))
self.rdoGenFont2.setChecked(False)
self.rdoGenFont2.setObjectName(_fromUtf8("rdoGenFont2"))
self.verticalLayout_3.addWidget(self.rdoGenFont2)
self.horizontalLayout_3.addLayout(self.verticalLayout_3)
self.groupBox_2 = QtGui.QGroupBox(self.groupBox)
self.groupBox_2.setTitle(QtGui.QApplication.translate("FontGenerator", "Tab Priority", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.horizontalLayout_6 = QtGui.QHBoxLayout(self.groupBox_2)
self.horizontalLayout_6.setContentsMargins(-1, 4, -1, 8)
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
self.rdoLeftToRight = QtGui.QRadioButton(self.groupBox_2)
self.rdoLeftToRight.setText(QtGui.QApplication.translate("FontGenerator", "Left to right", None, QtGui.QApplication.UnicodeUTF8))
self.rdoLeftToRight.setChecked(True)
self.rdoLeftToRight.setObjectName(_fromUtf8("rdoLeftToRight"))
self.horizontalLayout_6.addWidget(self.rdoLeftToRight)
self.rdoRightToLeft = QtGui.QRadioButton(self.groupBox_2)
self.rdoRightToLeft.setText(QtGui.QApplication.translate("FontGenerator", "Right to left", None, QtGui.QApplication.UnicodeUTF8))
self.rdoRightToLeft.setObjectName(_fromUtf8("rdoRightToLeft"))
self.horizontalLayout_6.addWidget(self.rdoRightToLeft)
self.horizontalLayout_3.addWidget(self.groupBox_2)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem2)
self.verticalLayout.addWidget(self.groupBox)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
spacerItem3 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem3)
self.btnClose = QtGui.QPushButton(FontGenerator)
self.btnClose.setText(QtGui.QApplication.translate("FontGenerator", "Close", None, QtGui.QApplication.UnicodeUTF8))
self.btnClose.setAutoDefault(False)
self.btnClose.setObjectName(_fromUtf8("btnClose"))
self.horizontalLayout_4.addWidget(self.btnClose)
self.verticalLayout.addLayout(self.horizontalLayout_4)
self.retranslateUi(FontGenerator)
self.tabFonts.setCurrentIndex(-1)
QtCore.QObject.connect(self.btnNewTab, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.add_tab)
QtCore.QObject.connect(self.btnRemoveTab, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.remove_tab)
QtCore.QObject.connect(self.btnGenerateFont, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.generate_font)
QtCore.QObject.connect(self.btnClose, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.accept)
QtCore.QObject.connect(self.chkGenForGame, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), FontGenerator.export_changed)
QtCore.QObject.connect(self.chkGenForEditor, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), FontGenerator.export_changed)
QtCore.QObject.connect(self.rdoGenFont1, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), FontGenerator.export_changed)
QtCore.QObject.connect(self.rdoGenFont2, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), FontGenerator.export_changed)
QtCore.QObject.connect(self.rdoLeftToRight, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), FontGenerator.export_changed)
QtCore.QObject.connect(self.rdoRightToLeft, QtCore.SIGNAL(_fromUtf8("toggled(bool)")), FontGenerator.export_changed)
QtCore.QObject.connect(self.btnNew, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.new_clicked)
QtCore.QObject.connect(self.btnSave, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.save_clicked)
QtCore.QObject.connect(self.btnSaveAs, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.save_as_clicked)
QtCore.QObject.connect(self.btnLoad, QtCore.SIGNAL(_fromUtf8("clicked()")), FontGenerator.load_clicked)
QtCore.QMetaObject.connectSlotsByName(FontGenerator)
FontGenerator.setTabOrder(self.tabFonts, self.btnNewTab)
FontGenerator.setTabOrder(self.btnNewTab, self.btnRemoveTab)
FontGenerator.setTabOrder(self.btnRemoveTab, self.chkGenForGame)
FontGenerator.setTabOrder(self.chkGenForGame, self.chkGenForEditor)
FontGenerator.setTabOrder(self.chkGenForEditor, self.rdoGenFont1)
FontGenerator.setTabOrder(self.rdoGenFont1, self.rdoGenFont2)
FontGenerator.setTabOrder(self.rdoGenFont2, self.rdoLeftToRight)
FontGenerator.setTabOrder(self.rdoLeftToRight, self.rdoRightToLeft)
FontGenerator.setTabOrder(self.rdoRightToLeft, self.btnNew)
FontGenerator.setTabOrder(self.btnNew, self.btnSave)
FontGenerator.setTabOrder(self.btnSave, self.btnSaveAs)
FontGenerator.setTabOrder(self.btnSaveAs, self.btnLoad)
FontGenerator.setTabOrder(self.btnLoad, self.btnGenerateFont)
FontGenerator.setTabOrder(self.btnGenerateFont, self.btnClose)
def retranslateUi(self, FontGenerator):
pass
import icons_rc
| ThunderGemios10/The-Super-Duper-Script-Editor | ui_fontgenerator.py | Python | gpl-3.0 | 13,740 |
# (C) British Crown Copyright 2011 - 2012, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from matplotlib.testing.decorators import image_comparison as mpl_image_comparison
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from matplotlib.collections import PatchCollection
from matplotlib.path import Path
import shapely.geometry
import cartopy.crs as ccrs
import cartopy.mpl_integration.patch as cpatch
from cartopy.tests.mpl import image_comparison
@image_comparison(baseline_images=['poly_interiors'])
def test_polygon_interiors():
ax = plt.subplot(211, projection=ccrs.PlateCarree())
ax.coastlines()
ax.set_global() # XXX could be the default???
pth = Path([[0, 45], [60, 45], [60, -45], [0, -45], [0, -45],
[10, 20], [10, -20], [40, -20], [40, 20], [10, -20]],
[1, 2, 2, 2, 79, 1, 2, 2 , 2, 79])
patches_native = []
patches = []
for geos in cpatch.path_to_geos(pth):
for pth in cpatch.geos_to_path(geos):
patches.append(mpatches.PathPatch(pth))
# buffer by 10 degrees (leaves a small hole in the middle)
geos_buffered = geos.buffer(10)
for pth in cpatch.geos_to_path(geos_buffered):
patches_native.append(mpatches.PathPatch(pth))
collection = PatchCollection(patches_native, facecolor='red', alpha=0.4,
transform=ax.projection
)
ax.add_collection(collection)
collection = PatchCollection(patches, facecolor='yellow', alpha=0.4,
transform=ccrs.Geodetic()
)
ax.add_collection(collection)
# test multiple interior polygons
ax = plt.subplot(212, projection=ccrs.PlateCarree(), xlim=[-5, 15], ylim=[-5, 15])
ax.coastlines()
exterior = np.array(shapely.geometry.box(0, 0, 12, 12).exterior.coords)
interiors = [
np.array(shapely.geometry.box(1, 1, 2, 2, ccw=False).exterior.coords),
np.array(shapely.geometry.box(1, 8, 2, 9, ccw=False).exterior.coords),
]
poly = shapely.geometry.Polygon(exterior, interiors)
patches = []
for pth in cpatch.geos_to_path(poly):
patches.append(mpatches.PathPatch(pth))
collection = PatchCollection(patches, facecolor='yellow', alpha=0.4,
transform=ccrs.Geodetic()
)
ax.add_collection(collection)
@image_comparison(baseline_images=['contour_with_interiors'])
def test_contour_interiors():
# ############## produces a polygon with multiple holes:
nx, ny = 10, 10
numlev = 2
lons, lats = np.meshgrid(np.linspace(-50, 50, nx), np.linspace(-45, 45, ny))
data = np.sin(np.sqrt(lons**2 + lats**2))
ax = plt.subplot(221, projection=ccrs.PlateCarree())
ax.set_global()
plt.title("Native projection")
plt.contourf(lons, lats, data, numlev, transform=ccrs.PlateCarree())
ax.coastlines()
plt.subplot(222, projection=ccrs.PlateCarree())
plt.title("Non-native projection")
ax = plt.gca()
ax.set_global()
plt.contourf(lons, lats, data, numlev, transform=ccrs.Geodetic())
ax.coastlines()
############## produces singular polygons (zero area polygons)
numlev = 2
x, y = np.meshgrid(np.arange(-5.5, 5.5, 0.25), np.arange(-5.5, 5.5, 0.25))
dim = x.shape[0]
data = Z = np.sin(np.sqrt(x**2 + y**2))
lats = np.arange(dim) + 30
lons = np.arange(dim) - 20
ax = plt.subplot(223, projection=ccrs.PlateCarree())
ax.set_global()
plt.title("Native projection")
plt.contourf(lons, lats, data, numlev, transform=ccrs.PlateCarree())
ax.coastlines()
plt.subplot(224, projection=ccrs.PlateCarree())
plt.title("Non-native projection")
ax = plt.gca()
ax.set_global()
cs = plt.contourf(lons, lats, data, numlev, transform=ccrs.Geodetic())
ax.coastlines()
if __name__=='__main__':
import nose
nose.runmodule(argv=['-s','--with-doctest'], exit=False) | marqh/cartopy | lib/cartopy/tests/mpl/test_shapely_to_mpl.py | Python | gpl-3.0 | 4,821 |
#Function used to change all the O concepts of the words into the words themselves
def changeAllO(file, out):
w = open(out, "w")
for line in (open(file).readlines()):
v = line.split("\t")
if(len(v)>1):
if v[1][0:1] == "I" or v[1][0:1] == "B":
w.write(line)
else:
w.write(v[0] + "\t" + "$-"+str(v[0])+"\n")
else:
w.write("\n")
flag = 0
w.close()
changeAllO("TRAIN.txt", "NLSPARQL.train.data")
changeAllO("TEST.txt", "NLSPARQL.test.data") | marcomanciniunitn/Final-LUS-project | RNN/rnn/lus_rnn_lab/rnn_slu/data/new_data/word-pos-enhanced/enhance.py | Python | gpl-3.0 | 467 |
#!/usr/bin/env python3
# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab
#########################################################################
# Copyright 2019- Martin Sinn m.sinn@gmx.de
#########################################################################
# This file is part of SmartHomeNG
#
# SmartHomeNG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SmartHomeNG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SmartHomeNG If not, see <http://www.gnu.org/licenses/>.
#########################################################################
import threading
from lib.module import Modules
from lib.model.smartplugin import SmartPlugin
from lib.shtime import Shtime
class MqttPlugin(SmartPlugin):
_item_values = {} # dict of dicts
# Initialization of SmartPlugin class called by super().__init__() from the plugin's __init__() method
def __init__(self):
"""
Initialization Routine for the mqtt extension class to SmartPlugin
"""
SmartPlugin.__init__(self)
# get instance of MQTT module
try:
self.mod_mqtt = Modules.get_instance().get_module('mqtt') # try/except to handle running in a core version that does not support modules
except:
self.mod_mqtt = None
if self.mod_mqtt == None:
self.logger.error("Module 'mqtt' not loaded. The plugin is not starting")
self._init_complete = False
return False
self._subscribed_topics_lock = threading.Lock()
self._subscribed_topics = {} # subscribed topics (a dict of dicts)
self._subscribe_current_number = 0 # current number of the subscription entry
self._subscriptions_started = False
# get broker configuration (for display in web interface)
self.broker_config = self.mod_mqtt.get_broker_config()
return True
def start_subscriptions(self):
"""
Start subscription to all topics
Should be called from the run method of a plugin
"""
if self.mod_mqtt:
with self._subscribed_topics_lock:
for topic in self._subscribed_topics:
# start subscription to all items for this topic
for item_path in self._subscribed_topics[topic]:
self._start_subscription(topic, item_path)
self._subscriptions_started = True
return
def stop_subscriptions(self):
"""
Stop subscription to all topics
Should be called from the stop method of a plugin
"""
if self.mod_mqtt:
with self._subscribed_topics_lock:
for topic in self._subscribed_topics:
# stop subscription to all items for this topic
for item_path in self._subscribed_topics[topic]:
current = str(self._subscribed_topics[topic][item_path]['current'])
self.logger.info("stop(): Unsubscribing from topic {} for item {}".format(topic, item_path))
self.mod_mqtt.unsubscribe_topic(self.get_shortname() + '-' + current, topic)
self._subscriptions_started = False
return
def _start_subscription(self, topic, item_path):
current = str(self._subscribed_topics[topic][item_path]['current'])
qos = self._subscribed_topics[topic][item_path].get('qos', None)
payload_type = self._subscribed_topics[topic][item_path].get('payload_type', None)
callback = self._subscribed_topics[topic][item_path].get('callback', None)
bool_values = self._subscribed_topics[topic][item_path].get('bool_values', None)
self.logger.info("_start_subscription: Subscribing to topic {}, payload_type '{}' for item {} (callback={})".format(topic, payload_type, item_path, callback))
self.mod_mqtt.subscribe_topic(self.get_shortname() + '-' + current, topic, callback=callback,
qos=qos, payload_type=payload_type, bool_values=bool_values)
return
def add_subscription(self, topic, payload_type, bool_values=None, item=None, callback=None):
"""
Add mqtt subscription to subscribed_topics list
subscribing is done directly, if subscriptions have been started by self.start_subscriptions()
:param topic: topic to subscribe to
:param payload_type: payload type of the topic (for this subscription to the topic)
:param bool_values: bool values (for this subscription to the topic)
:param item: item that should receive the payload as value. Used by the standard handler (if no callback function is specified)
:param callback: a plugin can provide an own callback function, if special handling of the payload is needed
:return:
"""
with self._subscribed_topics_lock:
# test if topic is new
if not self._subscribed_topics.get(topic, None):
self._subscribed_topics[topic] = {}
# add this item to topic
if item is None:
item_path = '*no_item*'
else:
item_path = item.path()
self._subscribed_topics[topic][item_path] = {}
self._subscribe_current_number += 1
self._subscribed_topics[topic][item_path]['current'] = self._subscribe_current_number
self._subscribed_topics[topic][item_path]['item'] = item
self._subscribed_topics[topic][item_path]['qos'] = None
self._subscribed_topics[topic][item_path]['payload_type'] = payload_type
if callback:
self._subscribed_topics[topic][item_path]['callback'] = callback
else:
self._subscribed_topics[topic][item_path]['callback'] = self._on_mqtt_message
self._subscribed_topics[topic][item_path]['bool_values'] = bool_values
if self._subscriptions_started:
# directly subscribe to added subscription, if subscribtions are started
self._start_subscription(topic, item_path)
return
def publish_topic(self, topic, payload, item=None, qos=None, retain=False, bool_values=None):
"""
Publish a topic to mqtt
:param topic: topic to publish
:param payload: payload to publish
:param item: item (if relevant)
:param qos: qos for this message (optional)
:param retain: retain flag for this message (optional)
:param bool_values: bool values (for publishing this topic, optional)
:return:
"""
self.mod_mqtt.publish_topic(self.get_shortname(), topic, payload, qos, retain, bool_values)
if item is not None:
self.logger.info("publish_topic: Item '{}' -> topic '{}', payload '{}', QoS '{}', retain '{}'".format(item.id(), topic, payload, qos, retain))
# Update dict for periodic updates of the web interface
self._update_item_values(item, payload)
else:
self.logger.info("publish_topic: topic '{}', payload '{}', QoS '{}', retain '{}'".format(topic, payload, qos, retain))
return
# ----------------------------------------------------------------------------------------
# methods to handle the broker connection
# ----------------------------------------------------------------------------------------
_broker_version = '?'
_broker = {}
broker_config = {}
broker_monitoring = False
def get_broker_info(self):
if self.mod_mqtt:
(self._broker, self.broker_monitoring) = self.mod_mqtt.get_broker_info()
def broker_uptime(self):
"""
Return formatted uptime of broker
"""
if self.shtime is None:
self.shtime = Shtime.get_instance()
try:
return self.shtime.seconds_to_displaystring(int(self._broker['uptime']))
except Exception as e:
return '-'
def mqtt_init(self):
"""
Dummy method - should not be called any more
:return: Bool value True
:rtype: bool
"""
self.logger.warning("'mqtt_init()' method called. it is not used anymore. The Plugin should remove the call to mqtt_init(), use 'super.__init__()' instead")
pass
return True
# -----------------------------------------------------------------------
def _on_mqtt_message(self, topic, payload, qos=None, retain=None):
"""
Callback function to handle received messages
:param topic:
:param payload:
:param qos:
:param retain:
"""
self.logger.debug("_on_mqtt_message: Received topic '{}', payload '{} (type {})', QoS '{}', retain '{}' ".format(topic, payload, type(payload), qos, retain))
# get item for topic
if self._subscribed_topics.get(topic, None):
# at least 1 item has subscribed to this topic
for item_path in self._subscribed_topics[topic]:
item = self._subscribed_topics[topic][item_path].get('item', None)
if item != None:
try:
log_info = (float(payload) != float(item()))
except:
log_info = (str(payload) != str(item()))
if log_info:
self.logger.info("_on_mqtt_message: Received topic '{}', payload '{}' (type {}), QoS '{}', retain '{}' for item '{}'".format( topic, payload, item.type(), qos, retain, item.id() ))
else:
self.logger.debug("_on_mqtt_message: Received topic '{}', payload '{}' (type {}), QoS '{}', retain '{}' for item '{}'".format(topic, payload, item.type(), qos, retain, item.id()))
item(payload, self.get_shortname())
# Update dict for periodic updates of the web interface
self._update_item_values(item, payload)
else:
self.logger.error("_on_mqtt_message: No definition found for subscribed topic '{}'".format(topic))
return
def _update_item_values(self, item, payload):
"""
Update dict for periodic updates of the web interface
:param item:
:param payload:
"""
if not self._item_values.get(item.id()):
self._item_values[item.id()] = {}
if isinstance(payload, bool):
self._item_values[item.id()]['value'] = str(payload)
else:
self._item_values[item.id()]['value'] = payload
self._item_values[item.id()]['last_update'] = item.last_update().strftime('%d.%m.%Y %H:%M:%S')
self._item_values[item.id()]['last_change'] = item.last_change().strftime('%d.%m.%Y %H:%M:%S')
return
| smarthomeNG/smarthome | lib/model/mqttplugin.py | Python | gpl-3.0 | 11,395 |
from importlib import import_module
from django.apps import AppConfig as BaseAppConfig
class AppConfig(BaseAppConfig):
name = "gestioneide"
def ready(self):
import_module("gestioneide.receivers")
| Etxea/gestioneide | gestioneide/apps.py | Python | gpl-3.0 | 217 |
# coding: utf-8
"""Find the path to picasso dynamic library files."""
import os
import platform
import sys
class PicassoLibraryNotFound(Exception):
"""Error thrown by when picasso is not found"""
pass
def find_lib_path():
"""Find the path to picasso dynamic library files.
:return: List of all found library path to picasso
:rtype: list(string)
"""
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
dll_path = [os.path.join(curr_path, './lib/')]
if sys.platform == 'win32':
dll_path = [os.path.join(p, 'picasso.dll') for p in dll_path] \
+[os.path.join(p, 'libpicasso.so') for p in dll_path]
elif sys.platform.startswith('linux'):
dll_path = [os.path.join(p, 'libpicasso.so') for p in dll_path]
elif sys.platform == 'darwin':
dll_path = [os.path.join(p, 'libpicasso.so') for p in dll_path] \
+[os.path.join(p, 'libpicasso.dylib') for p in dll_path]
lib_path = [p for p in dll_path if os.path.exists(p) and os.path.isfile(p)]
if not lib_path:
print('Library file does not exist. Need to be updated!')
return lib_path
# From github issues, most of installation errors come from machines w/o compilers
if not lib_path and not os.environ.get('PICASSO_BUILD_DOC', False):
raise PicassoLibraryNotFound(
'Cannot find Picasso Library in the candidate path, ' +
'did you install compilers and make the project in root path?\n'
'List of candidates:\n' + ('\n'.join(dll_path)))
return lib_path
| jasonge27/picasso | python-package/pycasso/libpath.py | Python | gpl-3.0 | 1,608 |
import datetime
import pytz
from django.utils import timezone
from django.contrib.auth.models import User
from django.test import TestCase
from gerencex.core.models import HoursBalance, Timing, Office
from gerencex.core.time_calculations import DateData
class HoursBalanceModelTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user('testuser', 'test@user.com', 'senha123')
def test_balances(self):
r1 = HoursBalance.objects.create(
date=datetime.date(2016, 8, 18),
user=self.user,
credit=datetime.timedelta(hours=6).seconds,
debit=datetime.timedelta(hours=7).seconds,
)
# Test creation
self.assertTrue(HoursBalance.objects.exists())
# First balance is calculated without a previous balance (see the
# total_balance_handler function at signals.py)
self.assertEqual(r1.balance, int(datetime.timedelta(hours=-1).total_seconds()))
# Second balance takes the first balance into account (see the
# total_balance_handler function at signals.py)
r2 = HoursBalance.objects.create(
date=datetime.date(2016, 8, 19),
user=self.user,
credit=datetime.timedelta(hours=6).seconds,
debit=datetime.timedelta(hours=7).seconds,
)
self.assertEqual(r2.balance, int(datetime.timedelta(hours=-2).total_seconds()))
# Change in first credit or debit must change the second balance (see the
# next_balance_handler function at signals.py)
r1.credit = datetime.timedelta(hours=7).seconds
r1.save()
r2 = HoursBalance.objects.get(pk=2)
self.assertEqual(r2.balance, int(datetime.timedelta(hours=-1).total_seconds()))
class CreditTriggerTest(TestCase):
"""
The user credit is always registered at HourBalance via signal, when a checkout occurs.
See the 'credit_calculation' function, at signals.py
"""
@classmethod
def setUpTestData(cls):
Office.objects.create(name='Nenhuma lotação',
initials='NL',
regular_work_hours=datetime.timedelta(hours=6))
User.objects.create_user('testuser', 'test@user.com', 'senha123')
cls.user = User.objects.get(username='testuser')
def test_credit_triggers(self):
# Let's record a check in...
t1 = Timing.objects.create(
user=self.user,
date_time=timezone.make_aware(datetime.datetime(2016, 10, 3, 12, 0, 0, 0)),
checkin=True
)
# ...and a checkout
t2 = Timing.objects.create(
user=self.user,
date_time=timezone.make_aware(datetime.datetime(2016, 10, 3, 13, 0, 0, 0)),
checkin=False
)
# Let's record a balance line at HoursBalance
date = datetime.date(2016, 10, 3)
new_credit = DateData(self.user, date).credit().seconds
new_debit = DateData(self.user, date).debit().seconds
HoursBalance.objects.create(
date=date,
user=self.user,
credit=new_credit,
debit=new_debit
)
# Let's change t2 (checkout record)
t2.date_time += datetime.timedelta(hours=1)
t2.save()
# The balance must have been recalculated via django signal (signals.py)
checkout_tolerance = self.user.userdetail.office.checkout_tolerance
checkin_tolerance = self.user.userdetail.office.checkin_tolerance
tolerance = checkout_tolerance + checkin_tolerance
reference = datetime.timedelta(hours=2).seconds + tolerance.seconds
line = HoursBalance.objects.first()
credit = line.credit
self.assertEqual(reference, credit)
# Let's change t1 (checkin record)
t1.date_time += datetime.timedelta(hours=1)
t1.save()
# The balance must have been recalculated via signal
modified_reference = datetime.timedelta(hours=1).seconds + tolerance.seconds
modified_balance_line = HoursBalance.objects.first()
modified_credit = modified_balance_line.credit
self.assertEqual(modified_reference, modified_credit)
# TODO: Escrever o teste depois que já houver view para produzir o balanço da divisão e do usuário
class RestdayDebitTriggerTest(TestCase):
"""
When a we record a Restday whose date is prior to the date of the Balance, the balances must
be recalculated for all users.
"""
@classmethod
def setUpTestData(cls):
Office.objects.create(name='Diacomp 1', initials='diacomp1')
Office.objects.create(name='Diacomp 2', initials='diacomp2')
cls.diacomp1 = Office.objects.get(initials='diacomp1')
cls.diacomp2 = Office.objects.get(initials='diacomp2')
cls.diacomp1.hours_control_start_date = datetime.date(2016, 9, 1)
cls.diacomp1.save()
cls.diacomp2.hours_control_start_date = datetime.date(2016, 10, 1)
cls.diacomp1.save()
User.objects.create_user('testuser1', 'test1@user.com', 'senha123')
User.objects.create_user('testuser2', 'test2@user.com', 'senha123')
cls.user1 = User.objects.get(username='testuser')
cls.user2 = User.objects.get(username='testuser')
# def test_debit_trigger(self):
def activate_timezone():
return timezone.activate(pytz.timezone('America/Sao_Paulo'))
| flavoso/gerencex | gerencex/core/tests/test_hoursbalance_model.py | Python | gpl-3.0 | 5,457 |
from .parameters import Parameters
from .framebuffer import FrameBuffer
from .actionhandler import ActionHandler, ActionPolicy
| Islandman93/reinforcepy | reinforcepy/handlers/__init__.py | Python | gpl-3.0 | 127 |
from os import environ
# if you set a property in SESSION_CONFIG_DEFAULTS, it will be inherited by all configs
# in SESSION_CONFIGS, except those that explicitly override it.
# the session config can be accessed from methods in your apps as self.session.config,
# e.g. self.session.config['participation_fee']
SESSION_CONFIG_DEFAULTS = {
'real_world_currency_per_point': 1.00,
'participation_fee': 0.00,
'doc': "",
}
SESSION_CONFIGS = [
{
'name': 'dill_resp_punish_first',
'display_name': "Dilution de responsabilité, Punish First",
'num_demo_participants': 12,
'app_sequence': ['dill_resp'],
'treatment_order': 'punish_first'
},
{
'name': 'dill_resp_punish_last',
'display_name': "Dilution de responsabilité, Punish Last",
'num_demo_participants': 12,
'app_sequence': ['dill_resp'],
'treatment_order': 'punish_last'
},
]
# ISO-639 code
# for example: de, fr, ja, ko, zh-hans
LANGUAGE_CODE = 'en'
# e.g. EUR, GBP, CNY, JPY
REAL_WORLD_CURRENCY_CODE = 'USD'
USE_POINTS = True
ROOMS = []
CHANNEL_ROUTING = 'redirect.routing.channel_routing'
# AUTH_LEVEL:
# this setting controls which parts of your site are freely accessible,
# and which are password protected:
# - If it's not set (the default), then the whole site is freely accessible.
# - If you are launching a study and want visitors to only be able to
# play your app if you provided them with a start link, set it to STUDY.
# - If you would like to put your site online in public demo mode where
# anybody can play a demo version of your game, but not access the rest
# of the admin interface, set it to DEMO.
# for flexibility, you can set it in the environment variable OTREE_AUTH_LEVEL
AUTH_LEVEL = environ.get('OTREE_AUTH_LEVEL')
ADMIN_USERNAME = 'admin'
# for security, best to set admin password in an environment variable
ADMIN_PASSWORD = environ.get('OTREE_ADMIN_PASSWORD')
# Consider '', None, and '0' to be empty/false
DEBUG = (environ.get('OTREE_PRODUCTION') in {None, '', '0'})
DEMO_PAGE_INTRO_HTML = """ """
# don't share this with anybody.
SECRET_KEY = '29*rluv^s95qdbcfe6&mql^2$-_^e7nvtxi_j7r%wl#8g27p(q'
# if an app is included in SESSION_CONFIGS, you don't need to list it here
INSTALLED_APPS = ['otree']
| anthropo-lab/XP | EPHEMER/dill_resp_project/settings.py | Python | gpl-3.0 | 2,310 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-20 00:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0006_auto_20170620_0010'),
]
operations = [
migrations.AlterField(
model_name='post',
name='posted',
field=models.DateField(db_index=True, verbose_name='data'),
),
]
| sandrofolk/girox | girox/blog/migrations/0007_auto_20170620_0011.py | Python | gpl-3.0 | 467 |
# -*- coding: utf-8 -*-
"""Unit tests for the Web Service client."""
import base64
import datetime
import json
import unittest
from mock import patch, MagicMock
from requests.exceptions import ConnectionError
from genweb.serveistic.ws_client.problems import (
Client, ClientException, Problem)
class TestWSClient(unittest.TestCase):
def setUp(self):
self.client = Client(
endpoint='http://endpoint',
login_username='test-username',
login_password='test-password')
def test_get_headers(self):
# All header params are given
client = Client(
endpoint='#',
login_username='username',
login_password='password',
content_type='application/xml')
headers = client._get_headers()
self.assertEqual(headers, {
'Content-type': 'application/xml',
'login.username': 'username',
'login.password': 'password',
'Authorization': "Basic {0}".format(
base64.b64encode("username:password"))})
# Only mandatory params are given
client = Client(
endpoint='#',
login_username='username',
login_password='password')
headers = client._get_headers()
self.assertEqual(headers, {
'Content-type': 'application/json',
'login.username': 'username',
'login.password': 'password',
'Authorization': "Basic {0}".format(
base64.b64encode("username:password"))})
# login.username and login.password are None
client = Client(
endpoint='#',
login_username=None,
login_password=None)
headers = client._get_headers()
self.assertEqual(headers, {
'Content-type': 'application/json',
'login.username': '',
'login.password': '',
'Authorization': "Basic {0}".format(base64.b64encode(':'))})
def test_parse_response_result_empty(self):
response = json.loads('{}')
try:
self.client._parse_response_result(response)
self.fail("ClientException should have been raised")
except ClientException as cexception:
self.assertEqual(
"'resultat' is not present in the response",
cexception.message)
def test_parse_response_result_with_undefined_exception(self):
response = json.loads('''
{
"resultat": "ERROR"
}''')
try:
self.client._parse_response_result(response)
self.fail("ClientException should have been raised")
except ClientException as cexception:
self.assertEqual(
"Error code UNDEFINED: Undefined",
cexception.message)
def test_parse_response_result_with_defined_exception(self):
response = json.loads('''
{
"resultat": "ERROR",
"resultatMissatge": "This is the message"
}''')
try:
self.client._parse_response_result(response)
self.fail("ClientException should have been raised")
except ClientException as cexception:
self.assertEqual(
"Error code UNDEFINED: This is the message",
cexception.message)
response = json.loads('''
{
"resultat": "ERROR",
"codiError": "5"
}''')
try:
self.client._parse_response_result(response)
self.fail("ClientException should have been raised")
except ClientException as cexception:
self.assertEqual(
"Error code 5: Undefined",
cexception.message)
response = json.loads('''
{
"resultat": "ERROR",
"codiError": "5",
"resultatMissatge": "This is the message"
}''')
try:
self.client._parse_response_result(response)
self.fail("ClientException should have been raised")
except ClientException as cexception:
self.assertEqual(
"Error code 5: This is the message",
cexception.message)
def test_parse_response_list_problems_empty(self):
response = json.loads('''
{
"resultat": "SUCCESS",
"resultatMissatge": "This is the message"
}''')
try:
self.client._parse_response_list_problems(response)
self.fail("ClientException should have been raised")
except ClientException as cexception:
self.assertEqual(
"'llistaProblemes' is not present in the response",
cexception.message)
def test_parse_response_list_problems_not_empty(self):
response = json.loads('''
{
"llistaProblemes":
[
{
"assumpte": "Gestió por VPN de gateway para servei atenció",
"productNom": "e-Connect",
"requirementId": "481897",
"creatPerId": "11235",
"productId": "33283",
"statusId": "PROBLEMA_OBERT",
"visiblePortalServeisTIC": "Y",
"descripcioProblema": "No es posible acceder a través de la vpn",
"creatPerNom": "Jose Antonio",
"creatPerCognom": "Tebar Garcia",
"dataCreacio": "2014-01-22 14:33:47.362",
"dataLimitResolucioString": "2014-02-12 11:13:07.152",
"idEmpresa": "1123",
"urlProblema": "/problemes/control/problemaDetallDadesGenerals"
},
{}
],
"resultat": "SUCCESS",
"resultatMissatge": "Llista problemes retornada"
}
''')
results = self.client._parse_response_list_problems(response)
self.assertEqual(len(results), 2)
self.assertEqual(
results[0],
Problem(
topic=u"Gestió por VPN de gateway para servei atenció",
description=u"No es posible acceder a través de la vpn",
url=u"/problemes/control/problemaDetallDadesGenerals",
date_creation=datetime.datetime(
2014, 01, 22, 14, 33, 47, 362000),
date_fix=datetime.datetime(
2014, 02, 12, 11, 13, 07, 152000)))
self.assertEqual(
results[1],
Problem(
topic=u'',
description=u'',
url=u'',
date_creation=u'',
date_fix=u''))
def test_parse_response_list_problems_wrong_format(self):
response = json.loads('''
{
"llistaProblemes":
[
{
"assumpte": "Gestió por VPN de gateway para servei atenció",
"descripcioProblema": "No es posible acceder a través de la vpn",
"dataCreacio": "2014/01/22 14:33:47.362",
"urlProblema": "/problemes/control/problemaDetallDadesGenerals"
}
],
"resultat": "SUCCESS",
"resultatMissatge": "Llista problemes retornada"
}
''')
results = self.client._parse_response_list_problems(response)
self.assertEqual(len(results), 1)
self.assertEqual(
results[0],
Problem(
topic=u"Gestió por VPN de gateway para servei atenció",
description=u"No es posible acceder a través de la vpn",
url=u"/problemes/control/problemaDetallDadesGenerals",
date_creation=u'',
date_fix=u''))
def test_list_problems(self):
# Parameter product_id empty
try:
self.client.list_problems(" \n \t ")
self.fail("ClientException should have been raised")
except ClientException as exception:
self.assertEqual("Parameter 'product_id' cannot be empty",
exception.message)
try:
self.client.list_problems(None)
self.fail("ClientException should have been raised")
except ClientException as exception:
self.assertEqual("Parameter 'product_id' cannot be empty",
exception.message)
# Connection error
with patch('genweb.serveistic.ws_client.problems.requests.get',
side_effect=ConnectionError):
try:
self.client.list_problems(1)
self.fail("ClientException should have been raised")
except ClientException as exception:
self.assertEqual("The connection with '{0}' could not be "
"established".format(self.client.endpoint),
exception.message)
# Response status is not OK
response_mock = MagicMock(status_code=500)
with patch('genweb.serveistic.ws_client.problems.requests.get',
side_effect=(response_mock,)):
try:
self.client.list_problems(1)
self.fail("ClientException should have been raised")
except ClientException as exception:
self.assertEqual("Status code is not OK (500)",
exception.message)
# resultat is present
response_mock = MagicMock(status_code=200)
with patch('genweb.serveistic.ws_client.problems.requests.get',
side_effect=(response_mock,)), patch(
'genweb.serveistic.ws_client.problems.Client._parse_response_list_problems',
side_effect=([],)):
self.assertEqual([], self.client.list_problems(1))
def test_list_problems_with_count_parameter(self):
response_mock = MagicMock(status_code=200)
with patch('genweb.serveistic.ws_client.problems.requests.get',
side_effect=(response_mock for _ in range(5))), patch(
'genweb.serveistic.ws_client.problems.Client._parse_response_list_problems',
side_effect=([1, 2, 3, 4, 5, 6, 7, 8] for _ in range(5))):
self.assertEqual([1, 2, 3, 4, 5, 6, 7, 8],
self.client.list_problems(1))
self.assertEqual([1, 2, 3, 4, 5, 6, 7, 8],
self.client.list_problems(1, None))
self.assertEqual([],
self.client.list_problems(1, 0))
self.assertEqual([1, 2, 3, 4, 5],
self.client.list_problems(1, 5))
self.assertEqual([1, 2, 3, 4, 5, 6, 7, 8],
self.client.list_problems(1, 10))
| UPCnet/genweb.serveistic | genweb/serveistic/tests/test_ws_client_problems.py | Python | gpl-3.0 | 10,661 |
# macgen.py script to generate a MAC address for Red Hat Virtualization guests
#
# from http://www.linux-kvm.com/sites/default/files/macgen.py
import random
def randomMAC():
mac = [ 0x00, 0x16, 0x3e,
random.randint(0x00, 0x7f),
random.randint(0x00, 0xff),
random.randint(0x00, 0xff) ]
return ':'.join(map(lambda x: "%02x" % x, mac))
| dguglielmi/vmc-core | vmc/thirdparty/macgen.py | Python | gpl-3.0 | 343 |
from .async import TelnetServer
| Scorched-Moon/server | server/miniboa/__init__.py | Python | gpl-3.0 | 32 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from functools import partial
from PyQt5 import QtWidgets, QtCore
from controller.gensec.dialogs.processes.information import Information
from view.dialogs.base_dialog import BaseDialog
from view.gensec.dialogs.processes.ui_posl import Ui_process
class POSL(BaseDialog, Ui_process):
def __init__(self, process_data=False, parent=None):
BaseDialog.__init__(self, parent)
self.setupUi(self)
self.id = 4
self.date_type = ''
self.comments = ''
self.channels_calculation = 0
self.time_per_channel_calculation = 0
self.information_dialog = None
self.push_button_accept.clicked.connect(self.accept)
self.push_button_cancel.clicked.connect(self.close)
self.push_button_information.clicked.connect(self.showInformationDialog)
self.before_stimulation.valueChanged.connect(partial(self.dataPointsValidator, 1))
self.during_stimulation.valueChanged.connect(partial(self.dataPointsValidator, 2))
self.after_stimulation.valueChanged.connect(partial(self.dataPointsValidator, 3))
self.time.valueChanged.connect(self.updateTimePerChannel)
self.time_measurement.currentIndexChanged.connect(self.updateTimePerChannel)
width = self.sizeHint().width()
height = self.sizeHint().height()
widget = QtWidgets.QDesktopWidget()
main_screen_size = widget.availableGeometry(widget.primaryScreen())
pos_x = (main_screen_size.width() / 2) - (width / 2)
pos_y = (main_screen_size.height() / 2) - (height / 2)
self.setGeometry(QtCore.QRect(pos_x, pos_y, width, height))
self.fill(process_data)
def fill(self, process_data):
if process_data and process_data is not None:
self.stabilization.setValue(process_data["stabilization"])
self.heating_rate.setValue(process_data["heating_rate"])
self.final_temperature.setValue(process_data["final_temp"])
self.time.setValue(self.convertTime(process_data["time"], process_data["time_unit"]))
self.optical_power.setValue(process_data["start_optical_power"])
self.before_stimulation.setValue(process_data["datapoints1"])
self.during_stimulation.setValue(process_data["datapoints2"])
self.after_stimulation.setValue(process_data["datapoints3"])
self.number_of_scan.setValue(process_data["number_scan"])
time_measurements = {
'ms': 0,
's': 1,
'us': 2
}
self.time_measurement.setCurrentIndex(time_measurements[process_data["time_unit"]])
light_source = {
'Blue': 0,
'IR': 1,
'AUX': 2,
}
self.ligth_source.setCurrentIndex(light_source[process_data["light_source"]])
self.time_per_channel_calculation = process_data["timePerChannel"]
self.channels_calculation = process_data["channels"]
self.date_type = process_data["date_type"]
self.comments = process_data["comments"]
self.updateTimePerChannel()
self.dataPointsValidator(None)
def showInformationDialog(self):
self.information_dialog = Information(self.date_type, self.comments, self)
self.information_dialog.accepted.connect(self.informationAccepted)
self.information_dialog.exec_()
def informationAccepted(self):
self.date_type, self.comments = self.information_dialog.getData()
self.information_dialog.close()
def convertTime(self, time, time_measurement):
if time_measurement == 'ms':
return float(time) / 0.001
elif time_measurement == 's':
return float(time)
elif time_measurement == 'us':
return float(time) / 0.000001
def getTime(self):
time = self.time.value()
if self.time_measurement.currentIndex() == 0:
time *= 0.001
elif self.time_measurement.currentIndex() == 1:
pass
elif self.time_measurement.currentIndex() == 2:
time = self.toString(time * 0.000001)
return time
def toString(self, f):
if int(f) < 1:
s = str(f + 1)
temp = s.split('.')
temp[0] = '0'
s = temp[0] + '.' + temp[1]
else:
s = str(f)
return s
def updateTimePerChannel(self):
try:
self.time_per_channel_calculation = self.time.value() / self.channels_calculation
except:
pass
time_measurement = str(self.time_measurement.currentText())
self.time_per_channel.setText(str(round(self.time_per_channel_calculation, 2)) + ' ' + time_measurement)
def dataPointsValidator(self, button):
before = self.before_stimulation.value()
during = self.during_stimulation.value()
after = self.after_stimulation.value()
if (before + during + after) > 512:
if button == 1:
self.before_stimulation.setValue(before - 1)
elif button == 2:
self.during_stimulation.setValue(during - 1)
else:
self.after_stimulation.setValue(after - 1)
else:
self.channels_calculation = before + during + after
self.channels.setText(str(self.channels_calculation))
self.updateTimePerChannel()
def getData(self):
data = "POSL, " + \
str(self.ligth_source.currentText()) + ", " + \
str(self.optical_power.value()) + "%"
all_ = {
"id": self.id,
"light_source": str(self.ligth_source.currentText()),
"start_optical_power": self.optical_power.value(),
"number_scan": self.number_of_scan.value(),
"time": self.getTime(),
"time_unit": str(self.time_measurement.currentText()),
"datapoints1": self.before_stimulation.value(),
"datapoints2": self.during_stimulation.value(),
"datapoints3": self.after_stimulation.value(),
"final_temp": self.final_temperature.value(),
"time_final_temp": self.toString(float(self.getTime()) + self.stabilization.value()),
"heating_rate": self.heating_rate.value(),
"stabilization": self.stabilization.value(),
"date_type": self.date_type,
"comments": self.comments,
"channels": self.channels_calculation,
"timePerChannel": self.time_per_channel_calculation
}
return data, all_
| carlos-ferras/Sequence-ToolKit | controller/gensec/dialogs/processes/posl.py | Python | gpl-3.0 | 6,732 |
# -*-coding=utf-8-*-
__author__ = 'Rocky'
# -*-coding=utf-8-*-
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
import smtplib
from email import encoders, utils
import urllib.request, urllib.error, urllib.parse
import time
import re
import sys
import os
from bs4 import BeautifulSoup
from email.header import Header
import imp
class GetContent():
def __init__(self, id):
# 给出的第一个参数 就是你要下载的问题的id
# 比如 想要下载的问题链接是 https://www.zhihu.com/question/29372574
# 那么 就输入 python zhihu.py 29372574
id_link = "/question/" + id
#self.getAnswer(id_link)
self.getstorelist();
#self.gettuangoulist();
def save2file(self, filename, content):
# 保存为电子书文件
filename = filename + ".txt"
f = open(filename, 'a', encoding="utf-8")
f.write(content)
f.close()
def gettuangoulist(self):
url = "http://www.shihuo.cn/tuangou"
print(url)
user_agent = "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)"
# 构造header 伪装一下
header = {"User-Agent": user_agent}
req = urllib.request.Request(url, headers=header)
try:
resp = urllib.request.urlopen(req)
content= resp.read().decode('utf-8');
if content is None:
print("Empty")
return None
except:
print("Time out. Retry")
time.sleep(30)
# try to switch with proxy ip
resp = urllib.request.urlopen(req)
content = resp.read()
if content is None:
print("Empty")
return None
return None
# 这里已经获取了 网页的代码,接下来就是提取你想要的内容。 使用beautifulSoup 来处理,很方便
try:
bs = BeautifulSoup(content)
except:
print("Beautifulsoup error")
return None
#print(content)
title = bs.title
# 获取的标题
print(title)
filename_old = title.string.strip()
print(filename_old)
filename = re.sub('[\/:*?"<>|]', '-', filename_old)
# 用来保存内容的文件名,因为文件名不能有一些特殊符号,所以使用正则表达式过滤掉
store_list = bs.find_all("li", class_="")
html_up = """\
<html>
<head>识货团购</head>
<body>
"""
html_middle = "";
html_bottom = """\
</body>
</html>
"""
k = 0
index = 0
for each_list in store_list:
store_name = each_list.find("div",class_="h2")
if(store_name!=None):
print (store_name.a.string);
store_price = each_list.find("div",class_="z2")
if(store_price!=None):
html_middle= html_middle+str(each_list);
'''
store_rmb = store_price.find("span",class_="z1")
i=0
if(store_rmb!=None):
for string in store_rmb.strings:
self.save2file(filename,string)
i = i + 1;
if(i ==1 and float(string)<=299):
print (string)
html_middle= html_middle+str(each_list);
#print (store_rmb);
#for a in each_answer.strings:
# 循环获取每一个答案的内容,然后保存到文件中
#self.save2file(filename, str(a))
'''
k += 1
index = index + 1
html = html_up + html_middle + html_bottom;
smtp_server = 'smtp.sina.com'
from_mail = 'he_jingtai@sina.com'
password = 'hjtgoodno1'
to_mail = '1024654056@qq.com'
sender = '17759118897@189.cn'
receiver = '1024654056@qq.com'
subject = '图片附件html发送邮件测试'
send_kindle=MailAtt(smtp_server,from_mail,password,to_mail)
send_kindle.sendMail(sender,receiver,subject,html)
def getstorelist(self):
url = "http://www.shihuo.cn/haitao/daigou?order=date"
print(url)
user_agent = "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)"
# 构造header 伪装一下
header = {"User-Agent": user_agent}
req = urllib.request.Request(url, headers=header)
try:
resp = urllib.request.urlopen(req)
content= resp.read().decode('utf-8');
if content is None:
print("Empty")
return None
except:
print("Time out. Retry")
time.sleep(30)
# try to switch with proxy ip
resp = urllib.request.urlopen(req)
content = resp.read()
if content is None:
print("Empty")
return None
return None
# 这里已经获取了 网页的代码,接下来就是提取你想要的内容。 使用beautifulSoup 来处理,很方便
try:
bs = BeautifulSoup(content)
except:
print("Beautifulsoup error")
return None
#print(content)
title = bs.title
# 获取的标题
print(title)
filename_old = title.string.strip()
print(filename_old)
filename = re.sub('[\/:*?"<>|]', '-', filename_old)
# 用来保存内容的文件名,因为文件名不能有一些特殊符号,所以使用正则表达式过滤掉
store_list = bs.find_all("li", class_="")
html_up = """\
<html>
<head>测试一下</head>
<body>
"""
html_middle = "";
html_bottom = """\
</body>
</html>
"""
k = 0
index = 0
for each_list in store_list:
store_name = each_list.find("div",class_="title")
#if(store_name!=None):
#print (store_name.a.string);
store_price = each_list.find("div",class_="price")
if(store_price!=None):
store_rmb = each_list.find("span")
i=0
for string in store_rmb.strings:
self.save2file(filename,string)
i = i + 1;
if(i ==2 and float(string)<200):
html_middle= html_middle+str(each_list);
#print (store_rmb);
#for a in each_answer.strings:
# 循环获取每一个答案的内容,然后保存到文件中
#self.save2file(filename, str(a))
k += 1
index = index + 1
html = html_up + html_middle + html_bottom;
smtp_server = 'smtp.sina.com'
from_mail = 'he_jingtai@sina.com'
password = 'hjtgoodno1'
to_mail = '1024654056@qq.com'
sender = '17759118897@189.cn'
receiver = '1024654056@qq.com'
subject = '图片附件html发送邮件测试'
send_kindle=MailAtt(smtp_server,from_mail,password,to_mail)
send_kindle.sendMail(sender,receiver,subject,html)
def getAnswer(self, answerID):
host = "http://www.shihuo.cn"
url = "http://www.shihuo.cn/haitao/daigou?order=date"
print(url)
user_agent = "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)"
# 构造header 伪装一下
header = {"User-Agent": user_agent}
req = urllib.request.Request(url, headers=header)
try:
resp = urllib.request.urlopen(req)
content= resp.read().decode('utf-8');
if content is None:
print("Empty")
return None
except:
print("Time out. Retry")
time.sleep(30)
# try to switch with proxy ip
resp = urllib.request.urlopen(req)
content = resp.read()
if content is None:
print("Empty")
return None
# 这里已经获取了 网页的代码,接下来就是提取你想要的内容。 使用beautifulSoup 来处理,很方便
try:
bs = BeautifulSoup(content)
except:
print("Beautifulsoup error")
return None
#print(content)
title = bs.title
# 获取的标题
print(title)
filename_old = title.string.strip()
print(filename_old)
filename = re.sub('[\/:*?"<>|]', '-', filename_old)
# 用来保存内容的文件名,因为文件名不能有一些特殊符号,所以使用正则表达式过滤掉
self.save2file(filename, title.string)
store_list = bs.find_all("li", class_="")
k = 0
index = 0
for each_list in store_list:
store_name = each_list.find("div",class_="title")
if(store_name!=None):
self.save2file(filename, "\n\n-------------------------store %s via -------------------------\n\n" % k)
print (store_name.a.string);
self.save2file(filename, str(store_name.a.string))
store_price = each_list.find("div",class_="price")
if(store_price!=None):
store_rmb = each_list.find("span")
i=0
for string in store_rmb.strings:
self.save2file(filename,string)
i = i + 1;
if(i ==2 and float(string)<200):
print(string);
#print (store_rmb);
#for a in each_answer.strings:
# 循环获取每一个答案的内容,然后保存到文件中
#self.save2file(filename, str(a))
k += 1
index = index + 1
#detail = bs.find("li", class_="")
#self.save2file(filename, "\n\n\n\n--------------------Link %s ----------------------\n\n" %url)
#self.save2file(filename, "\n\n\n\n--------------------Detail----------------------\n\n")
# 获取问题的补充内容
smtp_server = 'smtp.sina.com'
from_mail = 'he_jingtai@sina.com'
password = 'hjtgoodno1'
to_mail = '17759118897@189.cn'
sender = '17759118897@189.cn'
receiver = '1024654056@qq.com'
subject = '图片附件html发送邮件测试'
send_kindle=MailAtt(smtp_server,from_mail,password,to_mail)
send_kindle.sendMail(sender,receiver,subject,html)
# 调用发送邮件函数,把电子书发送到你的kindle用户的邮箱账号,这样你的kindle就可以收到电子书啦
print(filename)
class MailAtt():
def __init__(self, smtp_server, from_mail, password, to_mail):
self.server = smtp_server
self.username = from_mail.split("@")[0]
self.from_mail = from_mail
self.password = password
self.to_mail = to_mail
# 初始化邮箱设置
def sendMail(self,sender,receiver,subject,html):
smtpserver = 'smtp.189.cn'
username = '17759118897'
password = 'hjtgood'
msg = MIMEMultipart('alternative')
msg['Subject'] = Header(subject,'utf-8')
htm = MIMEText(html,'html','utf-8')
msg.attach(htm)
#构造图片
'''
fp = open('QQ截图未命名1.jpg','rb')
msgImage = MIMEImage(fp.read())
fp.close()
msgImage.add_header('Content-ID','<image1>')
msg.attach(msgImage)
fp = open('QQ截图未命名1.jpg','rb')
msgImage = MIMEImage(fp.read())
fp.close()
msgImage.add_header('Content-ID','<image2>')
msg.attach(msgImage)
'''
#构造附件
#att = MIMEText(open('Pictures.rar','rb').read(),'base64','utf-8')
#att["Content-Type"] = 'application/octet-stream'
#att["Content-Disposition"] = 'attatchment;filename="Pictures.rar"'
#msg.attach(att)
smtp = smtplib.SMTP()
smtp.connect('smtp.189.cn')
smtp.login(username,password)
smtp.sendmail(sender,receiver,msg.as_string())
smtp.quit()
def send_txt(self, filename):
# 这里发送附件尤其要注意字符编码,当时调试了挺久的,因为收到的文件总是乱码
self.filename = "C:\\Users\\heinic\\content\\" + filename + ".txt"
content = open(self.filename, 'rb').read()
self.msg = MIMEText(content,_subtype='plain',_charset='gb2312')
self.msg['Subject'] = 'sssssss'
self.msg['From'] = self.from_mail
self.msg['To'] = self.to_mail
try:
self.smtp = smtplib.SMTP()
self.smtp.connect(self.server)
self.smtp.login(self.username, self.password)
self.smtp.sendmail(self.msg['from'], self.msg['To'], self.msg.as_string())
self.smtp.close()
return True
except Exception as e:
print (str(e))
return False
'''
self.smtp = smtplib.SMTP()
self.smtp.connect(self.server)
self.smtp.login(self.username, self.password)
#self.smtp = smtplib.SMTP_SSL(self.server, 465)
#self.smtp.ehlo()
#self.smtp.login('1024654056@qq.com','mhpdosozbielbddc')#XXX为用户名,XXXXX为密码
#创建一个带附件的实例
#msg = MIMEMultipart()
#构造附件1
#msg.attach(att1)
#self.msg = MIMEMultipart()
self.filename = "C:\\Users\\heinic\\content\\" + filename + ".txt"
#self.msg['Date'] = utils.formatdate(localtime=1)
content = open(self.filename, 'rb').read()
#self.att = MIMEText(content, 'base64', 'gb2312')
self.msg = MIMEText(content, 'plain', 'utf-8')
self.msg['to'] = self.to_mail
self.msg['from'] = self.from_mail
self.msg['Subject'] = "convert"
#self.msg.attach(content1)
#attfile = 'C:\\Users\\heinic\\Desktop\\题库.txt'
#basename = os.path.basename(attfile)
#fp = open(self.filename, 'rb')
#self.att = MIMEText(fp.read(), 'base64', 'gb2312')
#self.att["Content-Type"] = 'application/octet-stream'
#self.att.add_header('Content-Disposition', 'attachment',filename=('gbk', '', self.filename))
#encoders.encode_base64(self.att)
# print content
#self.att = MIMEText(open('C:\\Users\\heinic\\Desktop\\题库.txt', 'rb').read(), 'base64', 'gb2312')
#self.att["Content-Type"] = 'application/octet-stream'
#self.att["Content-Disposition"] = 'attachment; filename="题库.txt"'#这里的filename可以任意写,写什么名字,邮件中显示什么名字
#self.att = MIMEText(content, 'base64', 'gb2312')
#self.att['Content-Type'] = 'application/octet-stream'
#self.att["Content-Disposition"] = "attachment;filename=\"%s\"" %(self.filename.encode('gb2312'))
#self.att["Content-Disposition"] = 'attachment; filename="题库.txt"';
# print self.att["Content-Disposition"]
#self.msg.attach(self.att)
self.smtp.sendmail(self.msg['from'], self.msg['to'], self.msg.as_string())
self.smtp.quit()
'''
if __name__ == "__main__":
sub_folder = os.path.join(os.getcwd(), "content")
# 专门用于存放下载的电子书的目录
if not os.path.exists(sub_folder):
os.mkdir(sub_folder)
os.chdir(sub_folder)
id="35461941"
#id = sys.argv[1]
# 给出的第一个参数 就是你要下载的问题的id
# 比如 想要下载的问题链接是 https://www.zhihu.com/question/29372574
# 那么 就输入 python zhihu.py 29372574
# id_link="/question/"+id
obj = GetContent(id)
# obj.getAnswer(id_link)
# 调用获取函数
print("Done") | hjtgood/test | shihuo.py | Python | gpl-3.0 | 16,434 |
import unittest
from pystan import stanc, StanModel
from pystan._compat import PY2
class TestUTF8(unittest.TestCase):
desired = sorted({"status", "model_cppname", "cppcode", "model_name", "model_code", "include_paths"})
def test_utf8(self):
model_code = 'parameters {real y;} model {y ~ normal(0,1);}'
result = stanc(model_code=model_code)
self.assertEqual(sorted(result.keys()), self.desired)
self.assertTrue(result['cppcode'].startswith("// Code generated by Stan "))
self.assertEqual(result['status'], 0)
def test_utf8_linecomment(self):
model_code = u'parameters {real y;\n //äöéü\n} model {y ~ normal(0,1);}'
result = stanc(model_code=model_code)
self.assertEqual(sorted(result.keys()), self.desired)
self.assertTrue(result['cppcode'].startswith("// Code generated by Stan "))
self.assertEqual(result['status'], 0)
def test_utf8_multilinecomment(self):
model_code = u'parameters {real y;\n /*äöéü\näöéü*/\n} model {y ~ normal(0,1);}'
result = stanc(model_code=model_code)
self.assertEqual(sorted(result.keys()), self.desired)
self.assertTrue(result['cppcode'].startswith("// Code generated by Stan "))
self.assertEqual(result['status'], 0)
def test_utf8_inprogramcode(self):
model_code = u'parameters {real ö;\n} model {ö ~ normal(0,1);}'
assertRaisesRegex = self.assertRaisesRegexp if PY2 else self.assertRaisesRegex
with assertRaisesRegex(ValueError, 'Failed to parse Stan model .*'):
stanc(model_code=model_code)
| ariddell/pystan | pystan/tests/test_utf8.py | Python | gpl-3.0 | 1,618 |
#!/usr/bin/env python
import rospy
from belt_parser import BeltParser
import tf
import tf2_ros
import math
import copy
from memory_definitions.srv import GetDefinition
from processing_belt_interpreter.msg import *
from drivers_ard_others.msg import BeltRange
from geometry_msgs.msg import Pose2D, TransformStamped, PointStamped
from ai_game_manager import StatusServices
from dynamic_reconfigure.server import Server
from processing_belt_interpreter.cfg import BeltInterpreterConfig
from multiprocessing import Lock
class BeltInterpreter(object):
def __init__(self):
super(BeltInterpreter, self).__init__()
rospy.init_node("belt_interpreter")
rospy.loginfo("Belt interpreter is initializing...")
# template for the sensor frame id, with '{}' being the sensor id
self.SENSOR_FRAME_ID = "belt_{}"
self.DEF_FILE = "processing/belt.xml"
self.TOPIC = "/processing/belt_interpreter/rects"
self.SENSORS_TOPIC = "/drivers/ard_others/belt_ranges"
self.PUB_RATE = rospy.Rate(10)
self.RECT_SCALE_WIDTH = 1.0
self.RECT_SCALE_HEIGHT = 1.0
self.WATCHDOG_PERIOD_BELT = rospy.Duration(0.015)
self.WATCHDOG_PERIOD_TERA = rospy.Duration(0.05)
self.PREVIOUS_DATA_SIZE = 2
filepath = self.fetch_definition()
self._belt_parser = BeltParser(filepath)
self._pub = rospy.Publisher(self.TOPIC, BeltRects, queue_size=1)
self._broadcaster = tf2_ros.StaticTransformBroadcaster()
self.pub_static_transforms()
self._sensors_sub = rospy.Subscriber(self.SENSORS_TOPIC, BeltRange,
self.callback)
self.syn_param_srv = Server(BeltInterpreterConfig, self.dyn_param_cb)
self._mutex = Lock()
self._watchdog = rospy.Timer(self.WATCHDOG_PERIOD_TERA, self.publish, oneshot=True)
self._current_rects = {}
self._current_statuses = {}
self._data_to_process = []
self._previous_rects = []
self._previous_statuses = []
self._same_bad_value_counter = {s: 0 for s in self._belt_parser.Sensors.keys()}
self._last_bad_value = {s: 0 for s in self._belt_parser.Sensors.keys()}
rospy.loginfo("Belt interpreter is ready. Listening for sensor data on '{}'.".format(self.SENSORS_TOPIC)) # TODO duplicate log with status_services.ready()
# Tell ai/game_manager the node initialized successfuly.
StatusServices("processing", "belt_interpreter").ready(True)
rospy.spin()
def dyn_param_cb(self, config, level):
self.RECT_SCALE_HEIGHT = config["RECT_SCALE_HEIGHT"]
self.RECT_SCALE_WIDTH = config["RECT_SCALE_WIDTH"]
rospy.loginfo("Set rect scale to (%f, %f)" % (self.RECT_SCALE_WIDTH, self.RECT_SCALE_HEIGHT))
return config
def publish(self, event):
with self._mutex:
if self._current_rects.keys() == ["sensor_tera1"] or not self._current_rects:
if self._watchdog:
self._watchdog.shutdown()
self._watchdog = rospy.Timer(self.WATCHDOG_PERIOD_TERA, self.publish, oneshot=True)
if len(self._current_rects) > 0:
self._previous_rects.append(copy.deepcopy(self._current_rects))
self._previous_statuses.append(copy.deepcopy(self._current_statuses))
if(len(self._previous_rects) > self.PREVIOUS_DATA_SIZE):
self._previous_rects.pop(0)
if (len(self._previous_statuses) > self.PREVIOUS_DATA_SIZE):
self._previous_statuses.pop(0)
self._pub.publish(self._current_rects.values())
self._current_rects.clear()
self._current_statuses.clear()
def process_range(self, data):
if data.sensor_id not in self._belt_parser.Sensors.keys():
rospy.logerr("Received data from belt sensor '{}' but no such sensor is defined"
.format(data.sensor_id))
return
with self._mutex:
params = self._belt_parser.Params[self._belt_parser.Sensors[data.sensor_id]["type"]]
if data.range > params["max_range"] or data.range <= 0:
self._current_statuses.update({data.sensor_id: False})
if data.range == self._last_bad_value[data.sensor_id]:
self._same_bad_value_counter[data.sensor_id] += 1
else:
self._same_bad_value_counter[data.sensor_id] = 0
self._last_bad_value[data.sensor_id] = data.range
if self._same_bad_value_counter[data.sensor_id] > 100:
rospy.logwarn_throttle(1, "Sensor %s might be disconnected !" % data.sensor_id)
# If we published this sensor most of the time and its bad, publish the last one we got
l = [data.sensor_id in d and d[data.sensor_id] for d in self._previous_statuses]
if sum(l) > math.ceil((self.PREVIOUS_DATA_SIZE + 1) / 2):
for d in reversed(self._previous_rects):
if data.sensor_id in d:
rospy.logdebug('Got bad data for sensor %s but publishing the last good data' % data.sensor_id)
r = d[data.sensor_id]
r.header.stamp = rospy.Time.now()
self._current_rects.update({data.sensor_id: d[data.sensor_id]})
return
return
self._same_bad_value_counter[data.sensor_id] = 0
if params["scale_responsive"]:
width = self.get_rect_width(data.range, params) * self.RECT_SCALE_WIDTH
height = self.get_rect_height(data.range, params) * self.RECT_SCALE_HEIGHT
else:
width = self.get_rect_width(data.range, params)
height = self.get_rect_height(data.range, params)
rect = RectangleStamped()
rect.header.frame_id = self.SENSOR_FRAME_ID.format(data.sensor_id)
rect.header.stamp = rospy.Time.now()
rect.x = self.get_rect_x(data.range, params)
rect.y = 0
rect.w = width
rect.h = height
rect.a = 0
self._current_rects.update({data.sensor_id: rect})
self._current_statuses.update({data.sensor_id: True})
def get_rect_width(self, r, params):
prec = r * params["precision"]
angle = params["angle"]
x_far = r + prec
x_close = math.cos(angle / 2) * (r - prec)
# called width because along x axis, but it is the smaller side
width = abs(x_far - x_close)
return width
def get_rect_height(self, r, params):
prec = r * params["precision"]
angle = params["angle"]
return abs(2 * math.sin(angle / 2) * (r + prec))
def get_rect_x(self, r, params):
prec = r * params["precision"]
angle = params["angle"]
x_far = r + prec
x_close = math.cos(angle / 2) * (r - prec)
return (x_far + x_close) / 2
def callback(self, data):
publish_now = False
if data.sensor_id in self._current_rects and data.sensor_id != 'sensor_tera1':
publish_now = True
self.process_range(data)
if data.sensor_id != 'sensor_tera1' and not publish_now:
if self._watchdog:
self._watchdog.shutdown()
self._watchdog = rospy.Timer(self.WATCHDOG_PERIOD_BELT, self.publish, oneshot=True)
elif publish_now:
self.publish(None)
def pub_static_transforms(self):
tr_list = []
for id, s in self._belt_parser.Sensors.items():
tr = TransformStamped()
tr.header.stamp = rospy.Time.now()
tr.header.frame_id = "robot"
tr.child_frame_id = self.SENSOR_FRAME_ID.format(id)
tr.transform.translation.x = s["x"]
tr.transform.translation.y = s["y"]
tr.transform.translation.z = 0
quat = tf.transformations.quaternion_from_euler(0, 0, s["a"])
tr.transform.rotation.x = quat[0]
tr.transform.rotation.y = quat[1]
tr.transform.rotation.z = quat[2]
tr.transform.rotation.w = quat[3]
tr_list.append(tr)
self._broadcaster.sendTransform(tr_list)
def fetch_definition(self):
get_def = rospy.ServiceProxy('/memory/definitions/get', GetDefinition)
get_def.wait_for_service()
try:
res = get_def(self.DEF_FILE)
if not res.success:
msg = "Can't fetch belt definition file. Shutting down."
rospy.logfatal(msg)
raise rospy.ROSInitException(msg)
else:
rospy.logdebug("Belt definition file fetched.")
return res.path
except rospy.ServiceException as exc:
msg = "Exception when fetching belt definition file. Shutting down.\n {}".format(str(exc))
rospy.logfatal(msg)
raise rospy.ROSInitException(msg)
if __name__ == '__main__':
b = BeltInterpreter()
| utcoupe/coupe18 | ros_ws/src/processing_belt_interpreter/src/belt_interpreter_node.py | Python | gpl-3.0 | 9,252 |
#!/usr/bin/python
name = raw_input("please enter your name:")
address = raw_input("please enter your address:")
print "my name is {} and i live in {}".format(name,address) | tuxfux-hlp-notes/python-batches | archieves/batch-65/second.py | Python | gpl-3.0 | 171 |
"""
Chunk (N number of bytes at M offset to a source's beginning) provider.
Primarily for file sources but usable by any iterator that has both
seek and read( N ).
"""
import os
import base64
import base
import exceptions
import logging
log = logging.getLogger( __name__ )
# -----------------------------------------------------------------------------
class ChunkDataProvider( base.DataProvider ):
"""
Data provider that yields chunks of data from its file.
Note: this version does not account for lines and works with Binary datatypes.
"""
MAX_CHUNK_SIZE = 2 ** 16
DEFAULT_CHUNK_SIZE = MAX_CHUNK_SIZE
settings = {
'chunk_index' : 'int',
'chunk_size' : 'int'
}
# TODO: subclass from LimitedOffsetDataProvider?
# see web/framework/base.iterate_file, util/__init__.file_reader, and datatypes.tabular
def __init__( self, source, chunk_index=0, chunk_size=DEFAULT_CHUNK_SIZE, **kwargs ):
"""
:param chunk_index: if a source can be divided into N number of
`chunk_size` sections, this is the index of which section to
return.
:param chunk_size: how large are the desired chunks to return
(gen. in bytes).
"""
super( ChunkDataProvider, self ).__init__( source, **kwargs )
self.chunk_size = int( chunk_size )
self.chunk_pos = int( chunk_index ) * self.chunk_size
def validate_source( self, source ):
"""
Does the given source have both the methods `seek` and `read`?
:raises InvalidDataProviderSource: if not.
"""
source = super( ChunkDataProvider, self ).validate_source( source )
if( ( not hasattr( source, 'seek' ) ) or ( not hasattr( source, 'read' ) ) ):
raise exceptions.InvalidDataProviderSource( source )
return source
def __iter__( self ):
# not reeeally an iterator per se
self.__enter__()
self.source.seek( self.chunk_pos, os.SEEK_SET )
chunk = self.encode( self.source.read( self.chunk_size ) )
yield chunk
self.__exit__()
def encode( self, chunk ):
"""
Called on the chunk before returning.
Overrride to modify, encode, or decode chunks.
"""
return chunk
class Base64ChunkDataProvider( ChunkDataProvider ):
"""
Data provider that yields chunks of base64 encoded data from its file.
"""
def encode( self, chunk ):
"""
Return chunks encoded in base 64.
"""
return base64.b64encode( chunk )
| icaoberg/cellorganizer-galaxy-tools | datatypes/dataproviders/chunk.py | Python | gpl-3.0 | 2,581 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# STARLING PROJECT
#
# LIRIS - Laboratoire d'InfoRmatique en Image et Systèmes d'information
#
# Copyright: 2012 - 2015 Eric Lombardi (eric.lombardi@liris.cnrs.fr), LIRIS (liris.cnrs.fr), CNRS (www.cnrs.fr)
#
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further information, check the COPYING file distributed with this software.
#
#----------------------------------------------------------------------
#
# Get OS version informations.
#
import platform
fullOsVersion = platform.platform()
if 'x86_64-with-Ubuntu-16.04' in fullOsVersion:
shortVersion = 'u1604-64'
elif 'x86_64-with-Ubuntu-14.04' in fullOsVersion:
shortVersion = 'u1404-64'
elif 'x86_64-with-Ubuntu-12.04' in fullOsVersion:
shortVersion = 'u1204-64'
elif 'Windows-7' in fullOsVersion:
shortVersion = 'w7'
else:
shortVersion = 'unknown'
print shortVersion
| liris-vision/starling | tools/getos.py | Python | gpl-3.0 | 1,475 |
from neobunch import Bunch
import os
from nltk.corpus import wordnet as wn
import re
from itertools import izip
from definition.words.word_sampler import lemmatize_all
class NLTKWordNetParser(object):
def __init__(self, opt):
self.opt = opt
def to_list(self, entry,
order_keys=['word', 'pos', 'sense_id',
'wn_id', 'proper_noun',
'lemma_freq', 'definition']):
output = [u'{}'.format(entry[k]) for k in order_keys]
return output
def parse_synset_name(self, name):
parts = name.split('.')
return Bunch(pos=parts[-2],
sense_id=int(parts[-1]), wn_id=name)
def get_entry(self, word, sense):
synset = self.parse_synset_name(sense.name())
synset.word = word
synset.definition = sense.definition()
synset.proper_noun = self.is_proper_noun(sense)
freq = 0
for lemma in sense.lemmas():
freq += lemma.count()
synset.lemma_freq = freq
return synset
def is_proper_noun(self, sense):
cap = 0
for lemma in sense.lemmas():
if lemma.name() != lemma.name().lower():
cap += 1
return cap == len(sense.lemmas())
def get_entries(self, word):
entries = []
query = word
senses = wn.synsets(query)
for sense in senses:
entries.append(self.get_entry(word, sense))
entries.sort(key=lambda x: x.lemma_freq, reverse=True)
return entries
def one_sense_per_pos(self, entries, pref=['v', 'a', 's', 'n', 'r']):
new_entries = []
for p in pref:
for entry in entries:
if entry.pos == p:
new_entries.append(entry)
break
return new_entries
def select_top_entry(self, entries):
if len(entries) < 2:
return entries
if entries[0].lemma_freq > entries[1].lemma_freq:
return [entries[0]]
top_freq = entries[0].lemma_freq
entries = filter(lambda e: e.lemma_freq == top_freq, entries)
entries = self.one_sense_per_pos(entries)
return [entries[0]]
def remove_self_ref(self, word, entries):
new_entries = []
p = re.compile(r' ' + word + r'[ ,:;"\']')
for entry in entries:
if p.search(entry.definition) is None:
new_entries.append(entry)
return new_entries
def preprocess(self, ifp, ofp):
for line in ifp:
word = line.strip()
entries = self.get_entries(word)
entries = self.remove_self_ref(word, entries)
if self.opt.only_first_sense and len(entries) > 1:
entries = self.select_top_entry(entries)
for entry in entries:
ofp.write(u'{}\n'.format(u'\t'.join(self.to_list(entry))))
class DBWordNetParser(object):
def __init__(self, opt):
self.opt = opt
self.dir = opt.wndb_dir
self.lexnames = DBWordNetParser.read_lexname(self.dir)
self.idx = DBWordNetParser.read_index_files(self.dir)
self.data = DBWordNetParser._read_data_files(self.dir, self.lexnames)
self.sense_numbers = DBWordNetParser.read_sense_file(self.dir)
_POS_FILE_MAP_ = {'n': 'noun', 'v': 'verb', 'a': 'adj', 'r': 'adv'}
_SYNSET_TYPE_MAP_ = [None, 'n', 'v', 'a', 'r', 's']
_PROPER_NOUN_REGEX_ = re.compile(r'^(([A-Z]|_(for|the|of|to)_)[^_]+_?)+$')
def to_list(self, word, entry, inflected=False, maybe_proper_noun=False):
source = 'lemma'
# proper_noun = 'regular_word'
if inflected:
source = 'inflection'
# if maybe_proper_noun:
# proper_noun = 'could_be_proper_nound'
if not inflected:
for eword in entry.words:
if eword.lower() == word:
word = eword
break
else:
if entry.lemma == word[:-1] and word[-1] == 's':
for eword in entry.words:
if eword.lower() == entry.lemma:
word = eword + 's'
break
output = [word, entry.lemma, entry.synset_type,
str(entry.sense_number), entry.pos, source,
','.join(entry.words), entry.gloss]
return output
def get_idx_entries(self, word, try_lemma=True):
if word in self.idx:
return self.idx[word], False
if try_lemma:
out_entries = []
tagged_lemmas = lemmatize_all(word)
for lemma, pos in tagged_lemmas:
for e in self.idx.get(lemma, []):
if e.pos == pos and lemma != word:
out_entries.append(e)
return out_entries, True
def get_entries(self, idx_entries):
out_entries = []
for idx_entry in idx_entries:
for synset_offset in idx_entry.synset_offsets:
data_entry = self.data[synset_offset]
sense_key = '{}-{}-{}'.format(
idx_entry.lemma, data_entry.synset_type, data_entry.offset)
entry = Bunch(lemma=idx_entry.lemma,
pos=idx_entry.pos,
synset_type=data_entry.synset_type,
sense_number=self.sense_numbers[sense_key],
gloss=data_entry.gloss,
words=[e.word for e in data_entry.words])
out_entries.append(entry)
return out_entries
def preprocess(self, ifp, ofp):
for line in ifp:
word = line.strip()
idx_entries, inflected = self.get_idx_entries(word)
entries = self.get_entries(idx_entries)
maybe_proper_noun = False
for entry in entries:
for other_word in entry.words:
if DBWordNetParser.is_proper_noun(other_word):
maybe_proper_noun = True
break
for entry in entries:
if inflected and DBWordNetParser.is_entry_proper_noun(entry):
continue
ofp.write(u'{}\n'.format(
u'\t'.join(self.to_list(
word, entry, inflected, maybe_proper_noun))))
@staticmethod
def is_entry_proper_noun(entry):
for word in entry.words:
if not DBWordNetParser.is_proper_noun(word):
return False
return True
@staticmethod
def is_proper_noun(word):
m = DBWordNetParser._PROPER_NOUN_REGEX_.match(word)
return m is not None
@staticmethod
def read_lexname(wndb_path):
lexnames = {}
lexname_path = os.path.join(wndb_path, 'lexnames')
with open(lexname_path) as ifp:
for line in ifp:
if line.startswith(' '):
continue
part = line.strip().split('\t')
lexnames[part[0]] = part[1]
return lexnames
@staticmethod
def read_sense_file(wndb_path):
sense_numbers = {}
idx_sense_path = os.path.join(wndb_path, 'index.sense')
with open(idx_sense_path) as ifp:
for line in ifp:
if line.startswith(' '):
continue
part = line.strip().split(' ')
lemma, key = part[0].split('%')
synset_type = DBWordNetParser._SYNSET_TYPE_MAP_[int(key[0])]
offset = part[1]
number = int(part[2])
sense_key = '{}-{}-{}'.format(lemma, synset_type, offset)
sense_numbers[sense_key] = number
return sense_numbers
@staticmethod
def read_index_files(wndb_path, pos_files=['noun', 'verb', 'adj', 'adv']):
entries = {}
for pos_file in pos_files:
idx_path = os.path.join(wndb_path, 'index.' + pos_file)
with open(idx_path) as ifp:
for line in ifp:
if line.startswith(' '):
continue
part = line.strip().split()
_ap = 4+int(part[3])
lemma = part[0]
idx_entry = Bunch(
lemma=lemma,
pos=part[1],
pointers=part[4:_ap],
num_tagsenes=int(part[_ap + 1]),
synset_offsets=['{}-{}'.format(pos_file, _o)
for _o in part[_ap + 2:]])
entries[lemma] = entries.get(lemma, [])
entries[lemma].append(idx_entry)
return entries
@staticmethod
def _parse_pointers(tups):
pointers = []
for tup in tups:
pointers.append(
Bunch(pointer=tup[0], offset='{}-{}'.format(
DBWordNetParser._POS_FILE_MAP_[tup[2]], tup[1]),
source=tup[3][:2], target=tup[3][2:]))
return pointers
@staticmethod
def _parse_words(tups):
words = []
for tup in tups:
words.append(Bunch(word=tup[0], lex_id=tup[1]))
return words
@staticmethod
def _chunklist(t, size=2):
it = iter(t)
return list(izip(*[it]*size))
@staticmethod
def _read_data_files(wndb_path, lexnames,
pos_files=['noun', 'verb', 'adj', 'adv']):
data = {}
for pos_file in pos_files:
data_path = os.path.join(wndb_path, 'data.' + pos_file)
with open(data_path) as ifp:
for line in ifp:
if line.startswith(' '):
continue
part = line.strip().split()
_num_words = int(part[3], 16)
_num_pointers = int(part[4 + _num_words * 2])
_pp = 4 + _num_words*2 + 1
_gloss_p = part.index('|') + 1
data_entry = Bunch(
offset=part[0],
lexname=lexnames[part[1]],
synset_type=part[2],
words=DBWordNetParser._parse_words(
DBWordNetParser._chunklist(part[4:_pp - 1])),
pointers=DBWordNetParser._parse_pointers(
DBWordNetParser._chunklist(
part[_pp:_pp + _num_pointers * 4], 4)),
gloss=' '.join(part[_gloss_p:]))
data['{}-{}'.format(pos_file, part[0])] = data_entry
return data
if __name__ == '__main__':
opt = Bunch(wndb_dir='data/wndb')
parser = DBWordNetParser(opt)
idx_entries, inflected = parser.get_idx_entries('tests')
for e in parser.get_entries(idx_entries):
print(e)
| NorThanapon/dict-definition | definition/readers/wordnet.py | Python | gpl-3.0 | 10,980 |
""" Container for TaskManager plug-ins, to handle the destination of the tasks
"""
import six
from DIRAC import gLogger
from DIRAC.Core.Utilities.List import fromChar
from DIRAC.Core.Utilities.SiteSEMapping import getSitesForSE
from DIRAC.DataManagementSystem.Utilities.DMSHelpers import DMSHelpers
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getSites
from DIRAC.TransformationSystem.Client.PluginBase import PluginBase
class TaskManagerPlugin(PluginBase):
"""A TaskManagerPlugin object should be instantiated by every TaskManager object.
self.params here could be
{'Status': 'Created', 'TargetSE': 'Unknown', 'TransformationID': 1086L, 'RunNumber': 0L,
'Site': 'DIRAC.Test.ch', 'TaskID': 21L, 'InputData': '', 'JobType': 'MCSimulation'}
which corresponds to paramsDict in TaskManager (which is in fact a tasks dict)
"""
def _BySE(self):
"""Matches using TargetSE. This is the standard plugin."""
destSites = set()
try:
seList = ["Unknown"]
if self.params["TargetSE"]:
if isinstance(self.params["TargetSE"], six.string_types):
seList = fromChar(self.params["TargetSE"])
elif isinstance(self.params["TargetSE"], list):
seList = self.params["TargetSE"]
except KeyError:
pass
if not seList or seList == ["Unknown"]:
return destSites
for se in seList:
res = getSitesForSE(se)
if not res["OK"]:
gLogger.warn("Could not get Sites associated to SE", res["Message"])
else:
thisSESites = res["Value"]
if thisSESites:
# We make an OR of the possible sites
destSites.update(thisSESites)
gLogger.debug("Destinations: %s" % ",".join(destSites))
return destSites
def _ByJobType(self):
"""By default, all sites are allowed to do every job.
The actual rules are freely specified in the Operation JobTypeMapping section.
The content of the section may look like this:
User
{
Exclude = PAK
Exclude += Ferrara
Exclude += Bologna
Exclude += Paris
Exclude += CERN
Exclude += IN2P3
Allow
{
Paris = IN2P3
CERN = CERN
IN2P3 = IN2P3
}
}
DataReconstruction
{
Exclude = PAK
Exclude += Ferrara
Exclude += CERN
Exclude += IN2P3
Allow
{
Ferrara = CERN
CERN = CERN
IN2P3 = IN2P3
IN2P3 += CERN
}
}
Merge
{
Exclude = ALL
Allow
{
CERN = CERN
IN2P3 = IN2P3
}
}
The sites in the exclusion list will be removed.
The allow section says where each site may help another site
"""
# 1. get sites list
res = getSites()
if not res["OK"]:
gLogger.error("Could not get the list of sites", res["Message"])
return res
destSites = set(res["Value"])
# 2. get JobTypeMapping "Exclude" value (and add autoAddedSites)
gLogger.debug("Getting JobTypeMapping 'Exclude' value (and add autoAddedSites)")
jobType = self.params["JobType"]
if not jobType:
raise RuntimeError("No jobType specified")
excludedSites = set(self.opsH.getValue("JobTypeMapping/%s/Exclude" % jobType, []))
gLogger.debug("Explicitly excluded sites for %s task: %s" % (jobType, ",".join(excludedSites)))
autoAddedSites = self.opsH.getValue("JobTypeMapping/AutoAddedSites", [])
if "WithStorage" in autoAddedSites:
# Add all sites with storage, such that jobs can run wherever data is
autoAddedSites.remove("WithStorage")
autoAddedSites += DMSHelpers().getTiers(withStorage=True)
# 3. removing sites in Exclude
if not excludedSites:
pass
elif "ALL" in excludedSites:
destSites = set()
else:
destSites -= excludedSites
# 4. get JobTypeMapping "Allow" section
res = self.opsH.getOptionsDict("JobTypeMapping/%s/Allow" % jobType)
if not res["OK"]:
gLogger.debug(res["Message"])
allowed = {}
else:
allowed = dict((site, set(fromChar(fromSites))) for site, fromSites in res["Value"].items())
autoAddedSites = set(self.opsH.getValue("JobTypeMapping/%s/AutoAddedSites" % jobType, autoAddedSites))
gLogger.debug("Auto-added sites for %s task: %s" % (jobType, ",".join(autoAddedSites)))
# 5. add autoAddedSites, if requested
for autoAddedSite in autoAddedSites:
allowed.setdefault(autoAddedSite, set()).add(autoAddedSite)
gLogger.debug("Allowed sites for %s task: %s" % (jobType, ",".join(allowed)))
# 6. Allowing sites that should be allowed
taskSiteDestination = self._BySE()
for destSite, fromSites in allowed.items():
for fromSite in fromSites:
if not taskSiteDestination or fromSite in taskSiteDestination:
destSites.add(destSite)
gLogger.debug(
"Computed list of destination sites for %s task with TargetSE %s: %s"
% (jobType, self.params["TargetSE"], ",".join(destSites))
)
return destSites
| DIRACGrid/DIRAC | src/DIRAC/TransformationSystem/Client/TaskManagerPlugin.py | Python | gpl-3.0 | 5,584 |
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013, John McNamara, jmcnamara@cpan.org
#
import unittest
from ...compatibility import StringIO
from ...vml import Vml
class TestWriteOidmap(unittest.TestCase):
"""
Test the Vml _write_idmap() method.
"""
def setUp(self):
self.fh = StringIO()
self.vml = Vml()
self.vml._set_filehandle(self.fh)
def test_write_idmap(self):
"""Test the _write_idmap() method"""
self.vml._write_idmap(1)
exp = """<o:idmap v:ext="edit" data="1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
if __name__ == '__main__':
unittest.main()
| ivmech/iviny-scope | lib/xlsxwriter/test/vml/test_write_idmap.py | Python | gpl-3.0 | 748 |
from __future__ import unicode_literals
import sys
import os
import random
import matplotlib
# Make sure that we are using QT5
matplotlib.use('Qt5Agg')
from PyQt5 import QtCore, QtWidgets
from numpy import arange, sin, pi
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
progname = os.path.basename(sys.argv[0])
progversion = "0.1"
class MyMplCanvas(FigureCanvas):
"""Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.)."""
def __init__(self, parent=None, width=5, height=4, dpi=100):
fig = Figure(figsize=(width, height), dpi=dpi)
self.axes = fig.add_subplot(111)
self.compute_initial_figure()
FigureCanvas.__init__(self, fig)
self.setParent(parent)
FigureCanvas.setSizePolicy(self,
QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def compute_initial_figure(self):
pass
class MyStaticMplCanvas(MyMplCanvas):
"""Simple canvas with a sine plot."""
def compute_initial_figure(self):
t = arange(0.0, 3.0, 0.01)
s = sin(2 * pi * t)
self.axes.plot(t, s)
class MyDynamicMplCanvas(MyMplCanvas):
"""A canvas that updates itself every second with a new plot."""
def __init__(self, *args, **kwargs):
MyMplCanvas.__init__(self, *args, **kwargs)
timer = QtCore.QTimer(self)
timer.timeout.connect(self.update_figure)
timer.start(1000)
def compute_initial_figure(self):
self.axes.plot([0, 1, 2, 3], [1, 2, 0, 4], 'r')
def update_figure(self):
# Build a list of 4 random integers between 0 and 10 (both inclusive)
l = [random.randint(0, 10) for i in range(4)]
self.axes.cla()
self.axes.plot([0, 1, 2, 3], l, 'r')
self.draw()
class ApplicationWindow(QtWidgets.QMainWindow):
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.setWindowTitle("application main window")
self.file_menu = QtWidgets.QMenu('&File', self)
self.file_menu.addAction('&Quit', self.fileQuit,
QtCore.Qt.CTRL + QtCore.Qt.Key_Q)
self.menuBar().addMenu(self.file_menu)
self.help_menu = QtWidgets.QMenu('&Help', self)
self.menuBar().addSeparator()
self.menuBar().addMenu(self.help_menu)
self.help_menu.addAction('&About', self.about)
self.main_widget = QtWidgets.QWidget(self)
l = QtWidgets.QVBoxLayout(self.main_widget)
sc = MyStaticMplCanvas(self.main_widget, width=5, height=4, dpi=100)
dc = MyDynamicMplCanvas(self.main_widget, width=5, height=4, dpi=100)
l.addWidget(sc)
l.addWidget(dc)
self.main_widget.setFocus()
self.setCentralWidget(self.main_widget)
self.statusBar().showMessage("All hail matplotlib!", 2000)
def fileQuit(self):
self.close()
def closeEvent(self, ce):
self.fileQuit()
def about(self):
QtWidgets.QMessageBox.about(self, "About",
"""embedding_in_qt5.py example
Copyright 2005 Florent Rougon, 2006 Darren Dale, 2015 Jens H Nielsen
This program is a simple example of a Qt5 application embedding matplotlib
canvases.
It may be used and modified with no restriction; raw copies as well as
modified versions may be distributed without limitation.
This is modified from the embedding in qt4 example to show the difference
between qt4 and qt5"""
)
qApp = QtWidgets.QApplication(sys.argv)
aw = ApplicationWindow()
aw.setWindowTitle("%s" % progname)
aw.show()
sys.exit(qApp.exec_())
# qApp.exec_()
| RyanChinSang/ECNG3020-ORSS4SCVI | BETA/TestCode/Matplotlib/mpl1.py | Python | gpl-3.0 | 3,865 |
from heppy_fcc.particles.jet import Jet as BaseJet
from vertex import Vertex
from ROOT import TLorentzVector
import math
class Jet(BaseJet):
def __init__(self, fccjet):
self.fccjet = fccjet
self._tlv = TLorentzVector()
p4 = fccjet.Core().P4
self._tlv.SetXYZM(p4.Px, p4.Py, p4.Pz, p4.Mass)
| semkiv/heppy_fcc | particles/fcc/jet.py | Python | gpl-3.0 | 341 |
import websocket
import logging
import json
import config
import threading
from time import sleep
from pprint import pformat
class TimeOutReached(Exception):
pass
class EventListenerThread(threading.Thread):
log = None
ws = None
messageStack = []
ws_uri = None
authtoken = None
open = False
#messageStack = dict()
def __init__(self, uri, token):
self.log = self.log = logging.getLogger(self.__class__.__name__)
super(EventListenerThread, self).__init__()
self.ws_uri = uri
self.authtoken = token
self.open = False
self.flush()
def flush(self):
self.messageStack = []
def stack_contains (self, event_type, call_id=None, destination=None):
for item in self.messageStack:
if item['event'] == event_type:
if call_id is None:
if destination is None:
return True
elif item['call']['destination'] == destination:
return True
elif item['call']['id'] == call_id:
if destination == None:
return True
elif item['call']['destination'] == destination:
return True
return False
def WaitForOpen (self, timeout=3.0):
RESOLUTION = 0.1
timeSlept = 0.0
while timeSlept < timeout:
timeSlept += RESOLUTION
if self.open:
return
sleep (RESOLUTION)
raise TimeOutReached ("Did not open websocket in a timely manner")
def WaitFor (self, event_type, call_id=None, timeout=10.0):
RESOLUTION = 0.1
timeSlept = 0.0
while timeSlept < timeout:
timeSlept += RESOLUTION
if self.stack_contains (event_type=event_type, call_id=call_id):
return
sleep (RESOLUTION)
raise TimeOutReached (event_type + ":" + str (call_id))
def getLatestEvent (self, event_type, call_id=None, destination=None):
for item in reversed (self.messageStack):
if item['event'] == event_type:
if call_id == None:
if destination == None:
return item['event']
elif item['call']['destination'] == destination:
return item['event']
elif item['call']['id'] == call_id:
if destination == None:
return item['event']
elif item['call']['destination'] == destination:
return item['event']
return False
def Get_Latest_Event (self, Event_Type, Call_ID = None, Destination = None):
try:
for item in reversed (self.messageStack):
if item['event'] == Event_Type:
if Call_ID is None:
if Destination is None:
return item
elif item['call']['destination'] == Destination:
return item
elif item['call']['id'] == Call_ID:
if Destination is None:
return item
elif item['call']['destination'] == Destination:
return item
except:
self.log.critical ("Exception in Get_Latest_Event: messageStack = " + str (self.messageStack))
raise
self.log.info ("Didn't find a match on {Event_Type = " + Event_Type + " & Call_ID = " + str(Call_ID) + " & Destination = " + str(Destination) + "}")
return None
def dump_stack(self):
return pformat(self.messageStack)
def on_error(self, ws, error):
self.log.error ("Unspecified error:" + str(error))
def on_open (self, ws):
self.log.info ("Opened websocket")
self.open = True
def on_close(self, ws):
self.log.info ("Closed websocket")
self.open = False
def on_message(self, ws, message):
self.log.info(message)
self.messageStack.append(json.loads(message))
def connect (self):
full_uri= self.ws_uri + "?token=" + self.authtoken
try:
self.ws = websocket.WebSocketApp (full_uri,
on_message = self.on_message,
on_error = self.on_error,
on_close = self.on_close)
self.ws.on_open = self.on_open
self.log.info("Websocket connected to " + full_uri)
except:
self.log.critical("Websocket could not connect to " + full_uri)
def run(self):
try:
self.log.info ("Starting websocket")
self.connect()
self.ws.run_forever()
except:
self.log.critical("Run in thread failed!")
def stop(self):
if self.open:
self.ws.close()
self.open = False
def __del__(self):
self.stop()
if __name__ == "__main__":
elt = EventListenerThread(uri=config.call_flow_events, token=config.authtoken)
elt.start()
elt.stop()
| AdaHeads/Coverage_Tests | src/event_stack.py | Python | gpl-3.0 | 5,316 |
# This Python file uses the following encoding: utf-8
from django.test import TestCase, RequestFactory
from models import Meeting, Abstract, Author
from django.core.urlresolvers import reverse
from fiber.models import Page
from views import AbstractCreateView
from home.models import Announcement
from datetime import datetime
from django.contrib.auth.models import AnonymousUser, User
# Factory method to create a fiber page tree with five pages.
# def create_django_page_tree():
# mainmenu = Page.objects.create(title='mainmenu')
# home = Page.objects.create(title='home', parent=mainmenu, url='home', template_name='base/home.html')
# Page.objects.create(title='join', parent=home, url='join', template_name='base/join.html')
# Page.objects.create(title='members', parent=home, url='members', template_name='base/members')
# Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
# Factory methods to create test abstracts, meetings, and authors
# def create_meeting(year=2020, title='Jamaica 2020', location='Jamaica', associated_with='AAPA'):
# """
# Creates a Meeting with default values for year, title, location and associated_with.
# """
# return Meeting.object.create(title, year, location=location, associated_with=associated_with)
# Factory method to create a fiber page tree with five home pages plus three meetings pages and their associated
# meeting instances.
# def create_three_meetings_with_pages():
# # Create home fiber tree
# create_django_page_tree()
# # Create meeting instances
# calgary = Meeting(year=2014, title='Calgary 2014', location='Calgary, AB', associated_with='AAPA')
# calgary.create_fiber_page()
# calgary.save()
# san_francisco = Meeting(year=2015, title='San Francisco 2015', location='San Francisco, CA', associated_with='SAA')
# san_francisco.create_fiber_page()
# san_francisco.save()
# atlanta = Meeting(year=2016, title='Atlanta 2016', location='Atlanta, GA', associated_with='AAPA')
# atlanta.create_fiber_page()
# atlanta.save()
def create_abstract(meeting,
contact_email='denne.reed@gmail.com',
presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p> Test abstract text about silly walks in Neanderthals.</p> """,
year=2020):
return Abstract(meeting, contact_email, presentation_type, title, abstract_text, year=year)
def create_author(abstract, author_rank,
last_name='Fake',
first_name="Ima",
name='Ima Fake',
department='Fake Anthropology',
institution='Chaos University',
country='United States of America',
email_address='denne.reed@gmail.com'
):
return Author(abstract, author_rank,
last_name=last_name,
first_name=first_name,
name=name,
department=department,
institution=institution,
country=country,
email_address=email_address
)
class MeetingCreateMethodTests(TestCase):
def test_meeting_create_method(self):
starting_meeting_count = Meeting.objects.count()
pittsburgh = Meeting.objects.create(title='Pittsburgh 1992', year=1992,
location='Pittsburgh, PA', associated_with='SAA')
self.assertEqual(Meeting.objects.count(), starting_meeting_count+1)
self.assertEqual(pittsburgh.title, 'Pittsburgh 1992')
self.assertEqual(pittsburgh.year, 1992)
self.assertEqual(pittsburgh.associated_with, 'SAA')
class MeetingMethodTests(TestCase):
def setUp(self):
# Create a basic page tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
self.assertEqual(Page.objects.count(), starting_page_count+2) # test two pages saved
# Create two meetings
starting_meeting_count = Meeting.objects.count()
Meeting.objects.create(title='Pittsburgh 1992', year=1992,
location='Pittsburgh, PA', associated_with='SAA')
Meeting.objects.create(year=2014, title='Calgary 2014',
location='Calgary', associated_with='AAPA')
self.assertEqual(Meeting.objects.count(), starting_meeting_count+2)
def test_meeting_create_fiber_page_method(self):
"""
Tests the fiber page constructor method.
"""
# Fetch a meeting
calgary_2014 = Meeting.objects.get(title='Calgary 2014')
# Call page constructor method
starting_page_count = Page.objects.count()
calgary_2014.create_fiber_page()
self.assertEqual(Page.objects.count(), starting_page_count+1)
# Fetch the fiber page we just created
calgary_2014_fiber_page = Page.objects.get(url__exact='2014')
# Test the attributes of the fiber page
self.assertEqual(calgary_2014_fiber_page.parent, Page.objects.get(url__exact='meetings'))
self.assertEqual(calgary_2014_fiber_page.url, '2014')
self.assertEqual(calgary_2014_fiber_page.title, 'Calgary 2014')
self.assertEqual(calgary_2014_fiber_page.get_absolute_url(), '/meetings/2014/')
self.assertEqual(calgary_2014_fiber_page.get_absolute_url(),
reverse('meetings:meeting_detail', kwargs={"year": 2014}))
# Test that the page renders
response = self.client.get('/meetings/2014/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Calgary')
def test_meeting_has_detail_method(self):
"""
Tests the has_detail method
"""
calgary_2014 = Meeting.objects.get(year=2014)
# IF no fiber page then has_detail should be false
self.assertEqual(calgary_2014.has_detail(), False)
# Call page constructor method
calgary_2014.create_fiber_page()
# If fiber page then has_detail should be true
self.assertEqual(calgary_2014.has_detail(), True)
cfp = Page.objects.get(url__exact=2014) # get tha page instance
cfp.is_public = False # set to not public
cfp.save() # save the change
self.assertEqual(calgary_2014.has_detail(), False) # Now has detail should return false
class MeetingsViewTestsNoData(TestCase):
def setUp(self):
# Create basic fiber tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
self.assertEqual(Page.objects.count(), starting_page_count+2) # test two pages saved
def test_meetings_index_view_with_no_meetings(self):
response = self.client.get(reverse('meetings:meetings'))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['meeting_list'], [])
class MeetingsViewTestsWithData(TestCase):
def setUp(self):
# Create basic fiber tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
self.assertEqual(Page.objects.count(), starting_page_count+2) # test two pages saved
calgary = Meeting.objects.create(year=2014, title='Calgary 2014',
location='Calgary, AB', associated_with='AAPA')
calgary.create_fiber_page()
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
san_francisco.create_fiber_page()
atlanta = Meeting.objects.create(year=2016, title='Atlanta 2016',
location='Atlanta, GA', associated_with='AAPA')
atlanta.create_fiber_page()
def test_meetings_index_view_with_meetings(self):
response = self.client.get(reverse('meetings:meetings')) # Meetings index should show three meetings
calgary = Meeting.objects.get(year=2014) # get meeting instance
san_francisco = Meeting.objects.get(year=2015)
atlanta = Meeting.objects.get(year=2016)
self.assertContains(response, calgary.location, status_code=200,)
self.assertContains(response, san_francisco.location, status_code=200)
self.assertContains(response, atlanta.location, status_code=200)
self.assertQuerysetEqual(response.context['meeting_list'],
['<Meeting: Atlanta 2016>',
'<Meeting: San Francisco 2015>',
'<Meeting: Calgary 2014>'])
self.assertContains(response, "<table>") # response includes a table element
self.assertContains(response, '<a href="/meetings/2014/"') # contains a link to the 2014 meeting detail
self.assertContains(response, '<a href="/meetings/2015/"')
self.assertContains(response, '<a href="/meetings/2016/"')
self.assertEqual(Page.objects.count(), 5) # should have 5 fiber pages
self.assertEqual(Meeting.objects.count(), 3) # should hav 3 meetings
atlanta_fp = Page.objects.get(url__exact=2016) # Get Atlanta fiber page
atlanta_fp.is_public = False # Set to not public
atlanta_fp.save() # save the change
self.assertEqual(atlanta_fp.is_public, False)
self.assertEqual(atlanta.has_detail(), False) # meeting should NOT have detail
self.assertEqual(atlanta_fp.show_in_menu, False) # meeting fiber page should not be in menu
response = self.client.get(reverse('meetings:meetings')) # Reload the page!
# If fiber page is not public and not in menu there should be no link to it
self.assertNotContains(response, '<a href="/meetings/2016/"')
def test_meetings_index_view_with_missing_meetings(self):
response = self.client.get(reverse('meetings:meetings'))
# Returns page but does not contain a meeting that does not exist.
self.assertNotContains(response, "Vancouver", status_code=200)
self.assertContains(response, "<table>", status_code=200) # contains a table listing meetings
def test_meetings_detail_view(self):
response = self.client.get(reverse('meetings:meeting_detail', args=[2014]))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Calgary')
class AbstractCreateMethodTests(TestCase):
def test_abstract_create_method(self):
starting_abstract_count = Abstract.objects.count()
# create a meeting
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
# create an abstract for the meeting
new_abstract = Abstract.objects.create(meeting_id=san_francisco.id, contact_email='denne.reed@gmail.com',
presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p>Silly walks in Neanderthals.</p> """,
year=2015)
# test that the abstract was created correctly
self.assertEqual(Abstract.objects.count(), starting_abstract_count+1)
self.assertEqual(new_abstract.title, 'Silly Walks of the Neanderthals')
self.assertEqual(new_abstract.year, 2015)
starting_author_count = Author.objects.count()
new_author = Author.objects.create(abstract=new_abstract, author_rank=1, first_name="Bob",
last_name="Reed", institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="denne.reed@gmail.com")
self.assertEqual(Author.objects.count(), starting_author_count+1)
self.assertEqual(new_author.last_name, 'Reed')
self.assertEqual(new_author.abstract, new_abstract)
self.assertEqual(new_author.full_name(), "Bob Reed")
self.assertEqual(new_author.author_rank, 1)
author2 = Author.objects.create(abstract=new_abstract, author_rank=2, first_name='Denné',
last_name='Jéhnson', institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="denne.reed@gmail.com")
self.assertEqual(Author.objects.count(), starting_author_count+2)
self.assertEqual(author2.last_name, 'Jéhnson')
self.assertEqual(author2.abstract, new_abstract)
self.assertEqual(author2.full_name(), 'Denné Jéhnson')
self.assertEqual(author2.author_rank, 2)
class AbstractMethodTests(TestCase):
def setUp(self):
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
# create an abstract for the meeting
new_abstract = Abstract.objects.create(meeting_id=san_francisco.id, contact_email='denne.reed@gmail.com',
presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p>Silly walks in Neanderthals.</p> """,
year=2015)
Author.objects.create(abstract=new_abstract, author_rank=1, first_name="Bob",
last_name="Reed", institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="denne.reed@gmail.com")
Author.objects.create(abstract=new_abstract, author_rank=2, first_name='Denné',
last_name='Jéhnson', institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="denne.reed@gmail.com")
abstract2 = Abstract.objects.create(meeting_id=san_francisco.id, contact_email='denne.reed@gmail.com',
presentation_type='Poster',
title='∂13 C isotopic values in zombies indicate a C4 diet',
abstract_text="""<p>Yummy plants, ugggh</p> """,
year=2015)
Author.objects.create(abstract=abstract2, author_rank=1, first_name="Archer",
last_name="Flexnick", institution="University of Transylvania",
department="Anthropology", country="Romania",
email_address="Archer.Flexnick@gmail.com")
Author.objects.create(abstract=abstract2, author_rank=2, first_name="Felix",
last_name="Quustz", institution="University of Transylvania",
department="Anthropology", country="Romania",
email_address="Felix.Q@gmail.com")
Author.objects.create(abstract=abstract2, author_rank=3, first_name="Adam",
last_name="Ackworth", institution="University of Transylvania",
department="Anthropology", country="Romania",
email_address="AdamAck@gmail.com")
def test_lead_author_last_name_method(self):
abstract = Abstract.objects.get(title='Silly Walks of the Neanderthals')
self.assertEqual(abstract.lead_author_last_name(), "Reed") # Last name of lead author should be "Reed"
def test_pretty_title(self):
abstract = Abstract.objects.get(title='Silly Walks of the Neanderthals')
self.assertEqual(abstract.pretty_title(), 'Silly Walks of the Neanderthals')
abstract = Abstract.objects.get(title='∂13 C isotopic values in zombies indicate a C4 diet')
self.assertEqual(abstract.pretty_title(), u'\u220213 C isotopic values in zombies indicate a C4 diet')
class AbstractViewTests(TestCase):
def setUp(self):
# Create basic fiber tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
meetings_page = Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
# Create abstract fiber page
abstract_submission_page = Page.objects.create(title='abstract submission',
parent=meetings_page, url='abstract')
Page.objects.create(title='Create Abstract', parent=abstract_submission_page, url='add')
self.assertEqual(Page.objects.count(), starting_page_count+4) # test 4 pages saved
# Create 3 meetings with associated fiber pages
calgary = Meeting.objects.create(year=2014, title='Calgary 2014',
location='Calgary, AB', associated_with='AAPA')
calgary.create_fiber_page()
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
san_francisco.create_fiber_page()
atlanta = Meeting.objects.create(year=2016, title='Atlanta 2016',
location='Atlanta, GA', associated_with='AAPA')
atlanta.create_fiber_page()
self.assertEqual(Page.objects.count(), starting_page_count+7) # test 6 pages saved
# Create an abstract with two authors
self.assertEqual(Meeting.objects.count(), 3)
self.assertEqual(Abstract.objects.count(), 0)
san_francisco = Meeting.objects.get(year=2015)
self.assertEqual(san_francisco.location, 'San Francisco, CA')
new_abstract = Abstract.objects.create(meeting_id=24, contact_email='denne.reed@gmail.com', presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p> Test abstract text about silly walks in Neanderthals.</p> """,
year=2015) # create a new abstract for the san francisco meeting
Author.objects.create(abstract=new_abstract, author_rank=1, first_name="Denne",
last_name="Reed", institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="denne.reed@gmail.com")
Author.objects.create(abstract=new_abstract, author_rank=2, first_name="Bob",
last_name="Frankle", institution="University of Michigan",
department="Anthropology", country="United States of America",
email_address="bob.frankle@gmail.com")
def test_create_abstract_view_with_get_method(self):
"""A get request should load a blank version of the form"""
response = self.client.get(reverse('meetings:create_abstract'))
self.assertEqual(response.status_code, 200) # Response should be an HTML page with status code 200
self.assertTemplateUsed(response, 'meetings/abstract.html') # Response should render the abstract.html template
self.assertContains(response, "<form") # Test that the page loads a form
self.assertContains(response, "<p>Author 1<br>") # Test that the page contains an author formset
self.assertContains(response, "input", count=36) # Test that the page contains 36 input elements
class AbstractViewTestsWithData(TestCase):
fixtures = ['fixtures/fiber_data.json', 'fixtures/meetings_data.json']
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create(username='bob', email='bob@gmail.com', password='secret')
def test_get(self):
request = self.factory.get(reverse('meetings:create_abstract'))
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200)
def test_abstract_create_view_with_empty_post_data(self):
request = self.factory.post(reverse('meetings:create_abstract'), {})
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200)
def test_abstract_lead_last_name_sorting_method(self):
queryset = Abstract.objects.filter(pk__in=[31, 33, 34, 35, 36]) # grab 5 posters from 2014
name_list = []
for q in queryset: name_list.append(q.lead_author_last_name())
self.assertEqual(len(name_list), 5)
self.assertEqual(name_list, ["Schillinger", "Harris", "Harris", "Key", "Werner"])
ordered_queryset = queryset.order_by('author__author_rank',
'author__last_name', 'author__first_name')[0:queryset.count()]
self.assertEqual(len(ordered_queryset), len(queryset))
ordered_name_list = []
for q in ordered_queryset: ordered_name_list.append(q.lead_author_last_name())
self.assertEqual(ordered_name_list, ["Harris", "Harris", "Key", "Schillinger", "Werner"])
def test_abstract_create_view_with_completed_form(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
'title': """<p>A test title with strange characters ∂13C and species names
like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': 'denne.reed@gmail.com',
'confirm_email': 'denne.reed@gmail.com',
'author_set-0-name': 'Denne Reed',
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': 'denne.reed@gmail.com',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
starting_abstract_count = Abstract.objects.filter(year=2015).count()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that successful submit returns redirect
def test_abstract_with_missing_title(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
#'title': """<p>A test title with strange characters ∂13C and species names
#like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': 'denne.reed@gmail.com',
'confirm_email': 'denne.reed@gmail.com',
'author_set-0-name': 'Denne Reed',
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': 'denne.reed@gmail.com',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that on submit we return the form again
self.assertEqual(response.context_data['form'].errors['title'][0], u'This field is required.')
def test_abstract_with_missing_confirmation_email(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
'title': """<p>A test title with strange characters ∂13C and species names
like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': 'denne.reed@gmail.com',
'author_set-0-name': 'Denne Reed',
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': 'denne.reed@gmail.com',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that on submit we return the form again
self.assertEqual(response.context_data['form'].errors['confirm_email'][0], u'This field is required.')
def test_abstract_with_malformed_confirmation_email(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
'title': """<p>A test title with strange characters ∂13C and species names
like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': 'denne.reed@gmail.com',
'confirm_email': 'denne.reed',
'author_set-0-name': 'Denne Reed', # invalid email address
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': 'denne.reed@gmail.com',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that on submit we return the form again
# test that the form contains an appropriate error message
self.assertEqual(response.context_data['form'].errors['confirm_email'][0], u'Enter a valid email address.')
def test_abstract_when_contact_email_not_same_as_confirmation_email(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
'title': """<p>A test title with strange characters ∂13C and species names
like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': 'denne.reed@gmail.com', # valid email address
'confirm_email': 'reedd@mail.utexas.edu', # valid email address, but not same as above
'author_set-0-name': 'Denne Reed',
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': 'denne.reed@gmail.com',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that on submit we return the form again
| dennereed/paleoanthro | meetings/tests.py | Python | gpl-3.0 | 31,299 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('attivita', '0012_attivita_centrale_operativa'),
]
operations = [
migrations.AddField(
model_name='partecipazione',
name='centrale_operativa',
field=models.BooleanField(default=False, db_index=True),
),
]
| CroceRossaItaliana/jorvik | attivita/migrations/0013_partecipazione_centrale_operativa.py | Python | gpl-3.0 | 448 |
from pygame import *
from key_dict import *
''' The player class '''
class Cursor:
def __init__(self, x, y, size):
self.x = int(x)
self.y = int(y)
self.size = size
self.speed = 1
self.cooldown = 0
self.block = 0
self.menu_switch = {'Build' : True}
self.menu_block = {
0 : 'Wall',
1 : 'Heavy tower',
2 : 'Light tower',
3 : 'Torch',
4 : 'Farm'}
def check_border(self, level, location):
if location < 0 or location >= level.map_size:
return False
return True
def update(self, keys, level, dt):
self.cooldown -= 1 * dt
if self.cooldown < 0:
self.cooldown = 0
tile = level.terrain_map[self.x + self.y * level.map_size]
for key in KEY_DICT:
if keys[key] and self.cooldown == 0:
if KEY_DICT[key] == 'left' and self.check_border(level, self.x - self.speed):
self.x -= self.speed
if KEY_DICT[key] == 'right' and self.check_border(level, self.x + self.speed):
self.x += self.speed
if KEY_DICT[key] == 'up' and self.check_border(level, self.y - self.speed):
self.y -= self.speed
if KEY_DICT[key] == 'down' and self.check_border(level, self.y + self.speed):
self.y += self.speed
# Toggles between building / building removal
#if KEY_DICT[key] == 'switch':
# self.menu_switch['Build'] = not self.menu_switch['Build']
if KEY_DICT[key] == 'block':
self.block += 1
if self.block >= len(self.menu_block):
self.block = 0
if KEY_DICT[key] == 'action':
if self.menu_switch['Build'] and level.gold > 0:
if tile.passable:
level.create_tile(self.x, self.y, self.menu_block[self.block])
elif not self.menu_switch['Build']:
if not tile.passable:
level.break_tile(self.x, self.y)
level.gold += tile.tile_price
self.cooldown = 0.2
def draw(self, screen, xoff, yoff):
draw.rect(screen, (255, 255, 255), ((self.x + xoff) * self.size, (self.y + yoff) * self.size, self.size, self.size), int(self.size/(self.size/3)))
| erikjuhani/thefortressheart | cursor.py | Python | gpl-3.0 | 2,603 |
import Log
import pygame as Pygame
def fillJob(screen, color, job):
if hasattr(job, "fillArea"):
job.fillArea(color)
else:
padding = 0
if hasattr(job, "border_width"):
## Workaround
padding = job.border_width
Pygame.draw.rect(
screen,
color,
(job.x-padding, job.y-padding, job.width+padding*2, job.height+padding*2),
0)
def draw3DBorder(screen, colors, rect, deepness, background=None):
x, y, width, height = rect
if background:
Pygame.draw.rect(
screen,
background,
(x + deepness + 1, y + deepness + 1, width - deepness - 1, height - deepness - 1)
)
## Top
Pygame.draw.polygon(
screen,
colors[0],
((x, y), (x + width, y),
(x + width - deepness, y + deepness), (x + deepness, y + deepness), ),
)
## Bottom
Pygame.draw.polygon(
screen,
colors[2],
((x, y + height), (x + width, y + height),
(x + width - deepness, y + height - deepness), (x + deepness, y + height - deepness), ),
)
## Left
Pygame.draw.polygon(
screen,
colors[3],
((x, y), (x + deepness, y + deepness),
(x + deepness, y + height - deepness), (x, y + height),),
)
## Right
Pygame.draw.polygon(
screen,
colors[1],
((x + width, y), (x + width - deepness, y + deepness),
(x + width - deepness, y + height - deepness), (x + width, y + height)),
)
| UndeadMastodon/Loltris | Draw.py | Python | gpl-3.0 | 1,695 |
# -*- coding: utf-8 -*-
class BankAccess(object):
def __init__(self):
self.pendingUids = {}
pass
def get(self, uid):
| manuco/LayeredLighting | model/bankaccess.py | Python | gpl-3.0 | 160 |
###############################################################################
# #
# Peekaboo Extended Email Attachment Behavior Observation Owl #
# #
# server.py #
###############################################################################
# #
# Copyright (C) 2016-2020 science + computing ag #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or (at #
# your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, but #
# WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU #
# General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
""" This module implements the Peekaboo server, i.e. the frontend to the
client. """
import asyncio
import email.utils
import logging
import urllib.parse
import sanic
import sanic.headers
import sanic.response
from peekaboo.db import PeekabooDatabaseError
logger = logging.getLogger(__name__)
class PeekabooServer:
""" A class wrapping the server components of Peekaboo. """
def __init__(self, host, port, job_queue, sample_factory,
request_queue_size, db_con):
""" Initialise a new server and start it. All error conditions are
returned as exceptions.
@param host: The local address to bind the socket to.
@type host: String
@param port: The local port to listen on for client connections.
@type port: int
@param job_queue: A reference to the job queue for submission of
samples.
@type job_queue: JobQueue
@param sample_factory: A reference to a sample factory for creating new
samples.
@type sample_factory: SampleFactory
@param request_queue_size: Number of requests that may be pending on
the socket.
@type request_queue_size: int
"""
logger.debug('Starting up server.')
self.app = sanic.Sanic("PeekabooAV", configure_logging=False)
self.app.config.FALLBACK_ERROR_FORMAT = "json"
# silence sanic to a reasonable amount
logging.getLogger('sanic.root').setLevel(logging.WARNING)
logging.getLogger('sanic.access').setLevel(logging.WARNING)
self.loop = asyncio.get_event_loop()
self.server_coroutine = self.app.create_server(
host=host, port=port, return_asyncio_server=True,
backlog=request_queue_size,
asyncio_server_kwargs=dict(start_serving=False))
self.server = None
self.job_queue = job_queue
self.sample_factory = sample_factory
self.db_con = db_con
# remember for diagnostics
self.host = host
self.port = port
self.app.add_route(self.hello, '/')
self.app.add_route(self.ping, '/ping')
self.app.add_route(self.scan, "/v1/scan", methods=['POST'])
self.app.add_route(
self.report, '/v1/report/<job_id:int>', methods=['GET'])
async def hello(self, _):
""" hello endpoint as fallback and catch all
@returns: hello world json response
"""
return sanic.response.json({'hello': 'PeekabooAV'})
async def ping(self, _):
""" ping endpoint for diagnostics
@returns: pong json response
"""
return sanic.response.json({'answer': 'pong'})
async def scan(self, request):
""" scan endpoint for job submission
@param request: sanic request object
@type request: sanic.Request
@returns: json response containing ID of newly created job
"""
# this is sanic's multipart/form-data parser in a version that knows
# that our file field contains binary data. This allows transferring
# files without a filename. The generic parser would treat those as
# text fields and try to decode them using the form charset or UTF-8 as
# a fallback and cause errors such as: UnicodeDecodeError: 'utf-8'
# codec can't decode byte 0xc0 in position 1: invalid start byte
content_type, parameters = sanic.headers.parse_content_header(
request.content_type)
# application/x-www-form-urlencoded is inefficient at transporting
# binary data. Also it needs a separate field to transfer the filename.
# Make clear here that we do not support that format (yet).
if content_type != 'multipart/form-data':
logger.error('Invalid content type %s', content_type)
return sanic.response.json(
{'message': 'Invalid content type, use multipart/form-data'},
400)
boundary = parameters["boundary"].encode("utf-8")
form_parts = request.body.split(boundary)
# split above leaves preamble in form_parts[0] and epilogue in
# form_parts[2]
num_fields = len(form_parts) - 2
if num_fields <= 0:
logger.error('Invalid MIME structure in request, no fields '
'or preamble or epilogue missing')
return sanic.response.json(
{'message': 'Invalid MIME structure in request'}, 400)
if num_fields != 1:
logger.error('Invalid number of fields in form: %d', num_fields)
return sanic.response.json(
{'message': 'Invalid number of fields in form, we accept '
'only one field "file"'}, 400)
form_part = form_parts[1]
file_name = None
content_type = None
field_name = None
line_index = 2
line_end_index = 0
while line_end_index != -1:
line_end_index = form_part.find(b'\r\n', line_index)
# this constitutes a hard requirement for the multipart headers
# (and filenames therein) to be UTF-8-encoded. There are some
# obscure provisions for transferring an encoding in RFC7578
# section 5.1.2 for HTML forms which don't apply here so its
# fallback to UTF-8 applies. This is no problem for our field name
# (ASCII) and file names in RFC2231 encoding. For HTML5-style
# percent-encoded filenames it means that whatever isn't
# percent-encoded needs to be UTF-8 encoded. There are no rules in
# HTML5 currently to percent-encode any UTF-8 byte sequences.
form_line = form_part[line_index:line_end_index].decode('utf-8')
line_index = line_end_index + 2
if not form_line:
break
colon_index = form_line.index(':')
idx = colon_index + 2
form_header_field = form_line[0:colon_index].lower()
# parse_content_header() reverts some of the percent encoding as
# per HTML5 WHATWG spec. As it is a "living standard" (i.e. moving
# target), it has changed over the years. There used to be
# backslash doubling and explicit control sequence encoding. As of
# this writing this has been changed to escaping only newline,
# linefeed and double quote. Sanic only supports the double quote
# part of that: %22 are reverted back to %. Luckily this interacts
# reasonably well with RFC2231 decoding below since that would do
# the same.
#
# There is no way to tell what version of the standard (or draft
# thereof) the client was following when encoding. It seems accepted
# practice in the browser world to just require current versions of
# everything so their behaviour hopefully converges eventually.
# This is also the reason why we do not try to improve upon it here
# because it's bound to become outdated.
#
# NOTE: Since we fork the sanic code here we need to keep track of
# its changes, particularly how it interacts with RFC2231 encoding
# if escaping of the escape character %25 is ever added to the
# HTML5 WHATWG spec. In that case parse_content_header() would
# start breaking the RFC2231 encoding which would explain why its
# use is forbidden in RFC7578 section 4.2 via RFC5987.
form_header_value, form_parameters = sanic.headers.parse_content_header(
form_line[idx:]
)
if form_header_field == 'content-disposition':
field_name = form_parameters.get('name')
file_name = form_parameters.get('filename')
# non-ASCII filenames in RFC2231, "filename*" format
if file_name is None and form_parameters.get('filename*'):
encoding, _, value = email.utils.decode_rfc2231(
form_parameters['filename*']
)
file_name = urllib.parse.unquote(value, encoding=encoding)
elif form_header_field == 'content-type':
content_type = form_header_value
if field_name != 'file':
logger.error('Field file missing from request')
return sanic.response.json(
{'message': 'Field "file" missing from request'}, 400)
file_content = form_part[line_index:-4]
content_disposition = request.headers.get('x-content-disposition')
sample = self.sample_factory.make_sample(
file_content, file_name,
content_type, content_disposition)
try:
await self.db_con.analysis_add(sample)
except PeekabooDatabaseError as dberr:
logger.error('Failed to add analysis to database: %s', dberr)
return sanic.response.json(
{'message': 'Failed to add analysis to database'}, 500)
if not self.job_queue.submit(sample):
logger.error('Error submitting sample to job queue')
return sanic.response.json(
{'message': 'Error submitting sample to job queue'}, 500)
# send answer to client
return sanic.response.json({'job_id': sample.id}, 200)
async def report(self, _, job_id):
""" report endpoint for report retrieval by job ID
@param request: sanic request object
@type request: sanic.Request
@param job_id: job ID extracted from endpoint path
@type job_id: int
@returns: report json response
"""
if not job_id:
return sanic.response.json(
{'message': 'job ID missing from request'}, 400)
try:
job_info = await self.db_con.analysis_retrieve(job_id)
except PeekabooDatabaseError as dberr:
logger.error('Failed to retrieve analysis result from '
'database: %s', dberr)
return sanic.response.json(
{'message': 'Failed to retrieve analysis result '
'from database'}, 500)
if job_info is None:
logger.debug('No analysis result yet for job %d', job_id)
return sanic.response.json(
{'message': 'No analysis result yet for job %d' % job_id}, 404)
reason, result = job_info
return sanic.response.json({
'result': result.name,
'reason': reason,
# FIXME: depends on saving the report to the database
# 'report': report,
}, 200)
def serve(self):
""" Serves requests until shutdown is requested from the outside. """
self.server = self.loop.run_until_complete(self.server_coroutine)
# sanic 21.9 introduced an explicit startup that finalizes the app,
# particularly the request routing. So we need to run it if present.
if hasattr(self.server, 'startup'):
self.loop.run_until_complete(self.server.startup())
self.loop.run_until_complete(self.server.start_serving())
logger.info('Peekaboo server is now listening on %s:%d',
self.host, self.port)
self.loop.run_until_complete(self.server.wait_closed())
logger.debug('Server shut down.')
def shut_down(self):
""" Triggers a shutdown of the server, used by the signal handler and
potentially other components to cause the main loop to exit. """
logger.debug('Server shutdown requested.')
if self.server is not None:
self.server.close()
| scVENUS/PeekabooAV | peekaboo/server.py | Python | gpl-3.0 | 13,675 |
## {{{ http://code.activestate.com/recipes/496882/ (r8)
'''
http://code.activestate.com/recipes/496882/
Author: Michael Palmer 13 Jul 2006
a regex-based JavaScript code compression kludge
'''
import re
class JSCompressor(object):
def __init__(self, compressionLevel=2, measureCompression=False):
'''
compressionLevel:
0 - no compression, script returned unchanged. For debugging only -
try if you suspect that compression compromises your script
1 - Strip comments and empty lines, don't change line breaks and indentation (code remains readable)
2 - Additionally strip insignificant whitespace (code will become quite unreadable)
measureCompression: append a comment stating the extent of compression
'''
self.compressionLevel = compressionLevel
self.measureCompression = measureCompression
# a bunch of regexes used in compression
# first, exempt string and regex literals from compression by transient substitution
findLiterals = re.compile(r'''
(\'.*?(?<=[^\\])\') | # single-quoted strings
(\".*?(?<=[^\\])\") | # double-quoted strings
((?<![\*\/])\/(?![\/\*]).*?(?<![\\])\/) # JS regexes, trying hard not to be tripped up by comments
''', re.VERBOSE)
# literals are temporarily replaced by numbered placeholders
literalMarker = '@_@%d@_@' # temporary replacement
backSubst = re.compile('@_@(\d+)@_@') # put the string literals back in
mlc1 = re.compile(r'(\/\*.*?\*\/)') # /* ... */ comments on single line
mlc = re.compile(r'(\/\*.*?\*\/)', re.DOTALL) # real multiline comments
slc = re.compile('\/\/.*') # remove single line comments
collapseWs = re.compile('(?<=\S)[ \t]+') # collapse successive non-leading white space characters into one
squeeze = re.compile('''
\s+(?=[\}\]\)\:\&\|\=\;\,\.\+]) | # remove whitespace preceding control characters
(?<=[\{\[\(\:\&\|\=\;\,\.\+])\s+ | # ... or following such
[ \t]+(?=\W) | # remove spaces or tabs preceding non-word characters
(?<=\W)[ \t]+ # ... or following such
'''
, re.VERBOSE | re.DOTALL)
def compress(self, script):
'''
perform compression and return compressed script
'''
if self.compressionLevel == 0:
return script
lengthBefore = len(script)
# first, substitute string literals by placeholders to prevent the regexes messing with them
literals = []
def insertMarker(mo):
l = mo.group()
literals.append(l)
return self.literalMarker % (len(literals) - 1)
script = self.findLiterals.sub(insertMarker, script)
# now, to the literal-stripped carcass, apply some kludgy regexes for deflation...
script = self.slc.sub('', script) # strip single line comments
script = self.mlc1.sub(' ', script) # replace /* .. */ comments on single lines by space
script = self.mlc.sub('\n', script) # replace real multiline comments by newlines
# remove empty lines and trailing whitespace
script = '\n'.join([l.rstrip() for l in script.splitlines() if l.strip()])
if self.compressionLevel == 2: # squeeze out any dispensible whitespace
script = self.squeeze.sub('', script)
elif self.compressionLevel == 1: # only collapse multiple whitespace characters
script = self.collapseWs.sub(' ', script)
# now back-substitute the string and regex literals
def backsub(mo):
return literals[int(mo.group(1))]
script = self.backSubst.sub(backsub, script)
if self.measureCompression:
lengthAfter = float(len(script))
squeezedBy = int(100*(1-lengthAfter/lengthBefore))
script += '\n// squeezed out %s%%\n' % squeezedBy
return script
if __name__ == '__main__':
script = '''
/* this is a totally useless multiline comment, containing a silly "quoted string",
surrounded by several superfluous line breaks
*/
// and this is an equally important single line comment
sth = "this string contains 'quotes', a /regex/ and a // comment yet it will survive compression";
function wurst(){ // this is a great function
var hans = 33;
}
sthelse = 'and another useless string';
function hans(){ // another function
var bill = 66; // successive spaces will be collapsed into one;
var bob = 77 // this line break will be preserved b/c of lacking semicolon
var george = 88;
}
'''
for x in range(1,3):
print '\ncompression level', x, ':\n--------------'
c = JSCompressor(compressionLevel=x, measureCompression=True)
cpr = c.compress(script)
print cpr
print 'length', len(cpr)
## end of http://code.activestate.com/recipes/496882/ }}}
| MaxTyutyunnikov/lino | lino/utils/jscompressor.py | Python | gpl-3.0 | 5,152 |
from couchpotato import get_session
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
from couchpotato.core.settings.model import History as Hist
import time
log = CPLog(__name__)
class History(Notification):
listen_to = ['movie.downloaded', 'movie.snatched', 'renamer.canceled']
def notify(self, message = '', data = {}, listener = None):
db = get_session()
history = Hist(
added = int(time.time()),
message = toUnicode(message),
release_id = data.get('id', 0)
)
db.add(history)
db.commit()
#db.close()
return True
| Akylas/CouchPotatoServer | couchpotato/core/notifications/history/main.py | Python | gpl-3.0 | 732 |
"""
Django settings for astrology project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import environ
env = environ.Env()
ROOT_DIR = environ.Path(__file__) - 3
APPS_DIR = ROOT_DIR
#BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'dt9u2xahh&cba_897gqfiyhc-=3f-^^lz-n2#qhg44_025ir-f'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
SITE_ID = 1
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'request',
#'easy_timezones',
'astro',
'charts',
'interpretations',
'users',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'request.middleware.RequestMiddleware',
#'easy_timezones.middleware.EasyTimezoneMiddleware',
)
ROOT_URLCONF = 'config.urls'
#Django allauth
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
"django.contrib.auth.backends.ModelBackend",
)
LOGIN_REDIRECT_URL = '/'
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages",
'django.core.context_processors.request',
'astro.utils.default_context_processor'
)
TEMPLATE_DIRS = [ROOT_DIR('templates'), ]
WSGI_APPLICATION = 'config.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ROOT_DIR('db.sqlite'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
gettext = lambda s: s
LANGUAGES = (
('pt-br', gettext('Brazilian Portuguese')),
('en-us', gettext('English')),
)
#LANGUAGE_CODE = 'en-us'
LANGUAGE_CODE = 'pt-br'
#TIME_ZONE = 'UTC'
TIME_ZONE = 'America/Sao_Paulo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
GEOIP_DATABASE = ROOT_DIR('static/geoip/database/GeoLiteCity.dat')
LOCALE_PATHS = (
ROOT_DIR('config/locale/'),
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
str(APPS_DIR.path('static')),
)
STATIC_ROOT = ROOT_DIR('staticfiles')
MEDIA_ROOT = ROOT_DIR('media')
MEDIA_URL = '/media/'
| flp9001/clevenus | clevenus/config/settings/base.py | Python | gpl-3.0 | 3,640 |
# Copyright (C) 2016 East Asian Observatory
# All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful,but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,51 Franklin
# Street, Fifth Floor, Boston, MA 02110-1301, USA
from __future__ import \
absolute_import, division, print_function, \
unicode_literals
from ...type.collection import ResultCollection
from ...db.meta import proposal
from ...util import is_list_like
from .meta import example_request
from .type import ExampleRequest, ExampleRequestCollection
class ExamplePart(object):
def search_example_request(self, proposal_id):
"""
Retrieve observing requests
for the given proposal or proposals.
"""
stmt = example_request.select()
iter_field = None
iter_list = None
if proposal_id is not None:
if is_list_like(proposal_id):
assert iter_field is None
iter_field = example_request.c.proposal_id
iter_list = proposal_id
else:
stmt = stmt.where(
example_request.c.proposal_id
== proposal_id)
ans = ExampleRequestCollection()
with self._transaction() as conn:
for iter_stmt in self._iter_stmt(
stmt, iter_field, iter_list):
for row in conn.execute(
iter_stmt.order_by(
example_request.c.id.asc())):
ans[row['id']] = ExampleRequest(**row)
return ans
def sync_example_proposal_request(
self, proposal_id, records):
"""
Update the observing requests
for the given proposal.
"""
with self._transaction() as conn:
return self._sync_records(
conn,
example_request,
example_request.c.proposal_id,
proposal_id,
records,
unique_columns=(
example_request.c.instrument,))
| eaobservatory/hedwig | lib/hedwig/facility/example/control.py | Python | gpl-3.0 | 2,612 |
# -*- coding: utf-8 -*-
"""
################################################################################
# #
# media_editing #
# #
################################################################################
# #
# LICENCE INFORMATION #
# #
# This program provides media editing utilities. #
# #
# copyright (C) 2018 Will Breaden Madden, wbm@protonmail.ch #
# #
# This software is released under the terms of the GNU General Public License #
# version 3 (GPLv3). #
# #
# This program is free software: you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# For a copy of the GNU General Public License, see #
# <http://www.gnu.org/licenses/>. #
# #
################################################################################
"""
| wdbm/media_editing | media_editing.py | Python | gpl-3.0 | 2,382 |
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
import os
from math import sqrt
from os.path import expanduser
def extract_patches(path, filename, out_path, patch_size, stride, visualize):
img = mpimg.imread(path+filename)
nRows, nCols, nColor = img.shape
psx, psy = patch_size
patches = []
for r in xrange(psy/2+1, nRows - psy/2 - 1, stride):
for c in xrange(psx/2+1, nCols - psx/2 - 1, stride):
patches.append(img[r-psy/2 : r + psy/2, c-psx/2 : c+psx/2, :])
grid_size = int(sqrt(len(patches)))
name, ext = os.path.splitext(filename)
for pos in xrange(len(patches)):
plt.imsave(out_path + name + "_" + str(pos) + ext, patches[pos])
if not visualize:
return
for pos in xrange(len(patches)):
if pos + 1 < grid_size ** 2:
plt.subplot(grid_size, grid_size, pos+1)
plt.imshow(patches[pos])
plt.axis('off')
if __name__ == "__main__":
home = expanduser("~")
nyu_path = home+'/IGNORE_NYU/jpgs/'
#extract_patches(, [16,16], 100, True)
for root, dirs, files in os.walk(nyu_path, topdown=False):
for filename in files:
extract_patches(nyu_path, filename, nyu_path+"/patches/", [64,64], 100, False)
| shengshuyang/StanfordCNNClass | shadow_project/extract_patches.py | Python | gpl-3.0 | 1,314 |
import bpy
# -----------------------------------------------------------------------------
# Draw UI, use an function to be append into 3D View Header
# -----------------------------------------------------------------------------
def ui_3D(self, context):
layout = self.layout
row = layout.row(align=True)
row.operator("view.grid_control", text='', icon='GRID')
icon = 'CURSOR'
row.operator("object.center_pivot_mesh_obj", text='', icon=icon)
icon = 'SMOOTH'
row.operator("object.smooth_shading", text='', icon=icon)
row = layout.row(align=True)
icon = 'FORCE_TEXTURE'
row.operator("unwrap.uv_checker", text='', icon=icon)
icon = 'EDITMODE_HLT'
row.operator("object.retopo_shading", text='', icon=icon)
# -----------------------------------------------------------------------------
# Draw UI, use an function to be append into UV/Image Editor View Header
# -----------------------------------------------------------------------------
def ui_UV(self, context):
layout = self.layout
row = layout.row(align=True)
icon = 'CURSOR'
row.operator("unwrap.reset_cursor", text='', icon=icon)
icon = 'FORCE_TEXTURE'
row.operator("unwrap.uv_checker", text='', icon=icon)
def register():
bpy.types.VIEW3D_HT_header.append(ui_3D)
bpy.types.IMAGE_HT_header.append(ui_UV)
def unregister():
bpy.types.VIEW3D_HT_header.remove(ui_3D)
bpy.types.IMAGE_HT_header.remove(ui_UV)
| stilobique/Icon-Header | views/header.py | Python | gpl-3.0 | 1,454 |
#!/usr/bin/env python
#
# Copyright 2006,2007,2010,2015 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
import numpy as np
class test_random(gr_unittest.TestCase):
# NOTE: For tests on the output distribution of the random numbers, see gnuradio-runtime/apps/evaluation_random_numbers.py.
# Check for range [0,1) of uniform distributed random numbers
def test_1(self):
num_tests = 10000
values = np.zeros(num_tests)
rndm = gr.random()
for k in range(num_tests):
values[k] = rndm.ran1()
for value in values:
self.assertLess(value, 1)
self.assertGreaterEqual(value, 0)
# Check reseed method (init with time and seed as fix number)
def test_2(self):
num = 5
rndm0 = gr.random(42); # init with time
rndm1 = gr.random(42); # init with fix seed
for k in range(num):
x = rndm0.ran1();
y = rndm1.ran1();
self.assertEqual(x,y)
x = np.zeros(num)
y = np.zeros(num)
rndm0 = gr.random(42); # init with fix seed 1
for k in range(num):
x[k] = rndm0.ran1();
rndm1.reseed(43); # init with fix seed 2
for k in range(num):
y[k] = rndm0.ran1();
for k in range(num):
self.assertNotEqual(x[k],y[k])
if __name__ == '__main__':
gr_unittest.run(test_random, "test_random.xml")
| surligas/gnuradio | gnuradio-runtime/python/gnuradio/gr/qa_random.py | Python | gpl-3.0 | 2,171 |
# -*- coding:utf-8 -*-
#############settings#############
NAME = "1" #GitStar用户名
PASSWORD = "1" #GitStar密码
GITNAME = "1" #GitHub用户名
GITPASSWORD = "1" #GitHub密码
#############settings#############
| w568w/GitHubStar | settings.py | Python | gpl-3.0 | 220 |
# -*- coding: utf-8 -*-
### required - do no delete
@auth.requires_login()
def plan():
return dict()
def new():
form = SQLFORM.factory(db.contacts,db.groups)
if form.accepts(request.vars):
_id_user = db.contacts.insert(**db.contacts._filter_fields(form.vars))
form.vars.contact = _id_user
id = db.groups.insert(**db.groups._filter_fields(form.vars))
response.flash = 'User registered successfully'
return locals()
def update():
id = request.args(0)
group = db(db.groups.id == id).select()[0]
form = SQLFORM(db.contacts, group.contact.id)
group = SQLFORM(db.group, group.id)
# Adding the group form
form.append(group)
if form.accepts(request.vars):
# Updating the contacts
db.contacts.update(**db.contacts._filter_fields(form.vars))
# Atualizando o grupo
old_group = db(db.groups.id == group.id).select().first()
old_group.update_record(group=group.vars.group)
response.session = 'Updated with success!'
return locals()
| withanage/HEIDIEditor | controllers/plan.py | Python | gpl-3.0 | 1,068 |
# This file is part of ArcJail.
#
# ArcJail is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ArcJail is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ArcJail. If not, see <http://www.gnu.org/licenses/>.
from listeners.tick import Delay
from controlled_cvars.handlers import float_handler
from ...resource.strings import build_module_strings
from ..damage_hook import get_hook, protected_player_manager
from ..players import player_manager
from .. import build_module_config
from .base_classes.jail_game import JailGame
from . import game_event_handler, stage
strings_module = build_module_strings('lrs/win_reward')
config_manager = build_module_config('lrs/win_reward')
config_manager.controlled_cvar(
float_handler,
"duration",
default=10,
description="Duration of Win Reward"
)
config_manager.controlled_cvar(
float_handler,
"loser_speed",
default=0.5,
description="Loser's speed"
)
class WinReward(JailGame):
caption = "Win Reward"
stage_groups = {
'winreward-start': [
"equip-damage-hooks",
"set-start-status",
"winreward-entry",
],
'winreward-timed-out': ["winreward-timed-out", ],
}
def __init__(self, players, **kwargs):
super().__init__(players, **kwargs)
self._counters = {}
self._results = {
'winner': kwargs['winner'],
'loser': kwargs['loser'],
}
@stage('basegame-entry')
def stage_basegame_entry(self):
self.set_stage_group('winreward-start')
@stage('equip-damage-hooks')
def stage_equip_damage_hooks(self):
winner, loser = self._results['winner'], self._results['loser']
def hook_hurt_for_loser(counter, info):
return info.attacker == winner.index
for player in self._players:
p_player = protected_player_manager[player.index]
counter = self._counters[player.index] = p_player.new_counter()
if player == winner:
counter.hook_hurt = get_hook('SW')
else:
counter.hook_hurt = hook_hurt_for_loser
p_player.set_protected()
@stage('undo-equip-damage-hooks')
def stage_undo_equip_damage_hooks(self):
for player in self._players_all:
p_player = protected_player_manager[player.index]
p_player.delete_counter(self._counters[player.index])
p_player.unset_protected()
@stage('winreward-entry')
def stage_winreward_entry(self):
winner, loser = self._results['winner'], self._results['loser']
loser.speed = config_manager['loser_speed']
def timeout_callback():
self.set_stage_group('winreward-timed-out')
self._delays.append(
Delay(config_manager['duration'], timeout_callback))
@stage('winreward-timed-out')
def stage_wireward_timed_out(self):
winner, loser = self._results['winner'], self._results['loser']
loser.take_damage(loser.health, attacker_index=winner.index)
@game_event_handler('jailgame-player-death', 'player_death')
def event_jailgame_player_death(self, game_event):
player = player_manager.get_by_userid(game_event['userid'])
if player not in self._players:
return
self._players.remove(player)
winner, loser = self._results['winner'], self._results['loser']
if player == winner:
loser.take_damage(loser.health + 1, attacker_index=winner.index)
self.set_stage_group('destroy')
| KirillMysnik/ArcJail | srcds/addons/source-python/plugins/arcjail/modules/lrs/win_reward.py | Python | gpl-3.0 | 4,013 |
from bzrc import BZRC, Command
import math, numpy, argparse
from numpy import dot
from random import randint
from time import sleep
desc=''' Example:
python kalman_pigeon.py -p localhost -s 57413 -t [1, 2, 3]
'''
class Point():
def __init__(self, x, y):
self.x = x
self.y = y
# self.visited = False
def __hash__(self):
return hash((self.x, self.y))
def __eq__(self, other):
if not isinstance(other, Point):
# print "I DID FALSE!"
return False
else:
# print "I DID TRUE!"
return (self.x, self.y) == (other.x, other.y)
class Agent(object):
def __init__(self, bzrc):
self.bzrc = bzrc
self.tank = self.bzrc.get_mytanks()[0]
self.commands = []
self.constants = self.bzrc.get_constants()
self.is_running = False
def update(self):
self.tank = self.bzrc.get_mytanks()[0]
def type1(self):
# basically do nothing
self.is_running = False
def type2(self):
# constant x and y
enemy = self.bzrc.get_othertanks()[0]
randPoint = getRandomPoint(int(enemy.x), int(enemy.y))
x = 0
while True:
self.update()
turnToPosition(self.bzrc, self.tank, randPoint.x, randPoint.y)
if facingAngle(self.tank, randPoint.x, randPoint.y):
x += 1
else:
x = 0
if x > 100:
break
self.bzrc.angvel(self.tank.index, 0)
self.bzrc.speed(self.tank.index, 1)
def type3(self):
while True:
self.update()
enemy = self.bzrc.get_othertanks()[0]
randPoint = getRandomPoint(int(enemy.x), int(enemy.y))
moveToPosition(self.bzrc, self.tank, randPoint.x, randPoint.y)
sleep(3)
# def evalAndMove(self):
def startRobot(hostname, socket):
bzrc = BZRC(hostname, socket)
agent = Agent(bzrc)
return bzrc, agent
def normalize_angle(angle):
"""Make any angle be between +/- pi."""
from math import pi
angle -= 2 * pi * int (angle / (2 * pi))
if angle <= -pi:
angle += 2 * pi
elif angle > pi:
angle -= 2 * pi
return angle
def facingAngle(tank, target_x, target_y):
target_angle = math.atan2(target_y - tank.y,
target_x - tank.x)
relative_angle = normalize_angle(target_angle - tank.angle)
return relative_angle < 0.5
def moveToPosition(bzrc, tank, target_x, target_y):
"""Set command to move to given coordinates."""
target_angle = math.atan2(target_y - tank.y,
target_x - tank.x)
relative_angle = normalize_angle(target_angle - tank.angle)
bzrc.do_commands([Command(tank.index, 1, 2 * relative_angle, False)])
def turnToPosition(bzrc, tank, target_x, target_y):
"""Set command to move to given coordinates."""
target_angle = math.atan2(target_y - tank.y,
target_x - tank.x)
relative_angle = normalize_angle(target_angle - tank.angle)
bzrc.do_commands([Command(tank.index, 0, 2 * relative_angle, False)])
def getDistance(p0, p1):
return math.sqrt((p0.x - p1.x)**2 + (p0.y - p1.y)**2)
def getRandomPoint(target_x, target_y):
x = 1000
y = 1000
while True:
x = randint(target_x-300, target_x+300)
y = randint(target_y-300, target_x+300)
if getDistance(Point(x, y), Point(target_x, target_y)) <= 325:
return Point(x, y)
def readCommandLine():
parser = argparse.ArgumentParser(description=desc, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--host', '-p', required=True, default='localhost', help='Hostname to connect to')
parser.add_argument('--socket', '-s', required=True, default=0, help='Team socket to connect to')
parser.add_argument('--log', '-l', required=False, default=False, help='Boolean value for logging or no logging')
parser.add_argument('--type', '-t', required=True, default=1, help='{1:"sitting duck", 2:"constant x,y", 3:"wild pigeon"')
return parser.parse_args()
if __name__ == '__main__':
try:
args = readCommandLine()
except:
print desc
raise
hostname = args.host
socket = int(args.socket)
if args.log == "True":
log = True
else:
log = False
bzrc, agent = startRobot(hostname, socket)
if args.type == '1':
agent.type1()
elif args.type == '2':
agent.type2()
elif args.type == '3':
agent.type3()
else:
print desc
raise | dnorth/BZRFlag | src/kalman_pigeon.py | Python | gpl-3.0 | 4,604 |
"""
Поиск значений в последовательностях
"""
def second_max(num_list):
if len(num_list) < 2:
raise IndexError
pair = (num_list.pop(), num_list.pop())
max1, max2 = max(pair), min(pair)
while num_list:
val = num_list.pop()
if val > max1:
max2 = max1
max1 = val
elif val > max2:
max2 = val
| vladworldss/algo | connectivity/stream_search.py | Python | gpl-3.0 | 406 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from supervisorclusterctl import __version__, __author__, __programm_name__, __programm_description__
import os
import sys
try:
from setuptools import setup, find_packages
except ImportError:
print "supervisorclusterctl needs setuptools in order to build. " \
"Please install it using your package manager (usually python-setuptools) or via pip (pip install setuptools)."
sys.exit(1)
requirements = []
test_requirements = []
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read().replace('.. :changelog:', '')
except:
README = ''
CHANGES = ''
data_files = []
setup(
name=__programm_name__,
version=__version__,
description=__programm_description__,
long_description=README + '\n\n' + CHANGES,
author=__author__,
keywords = 'supervisor ansible',
author_email='rbrtwnklr@gmail.com',
url='https://github.com/RobWin/supervisorclusterctl.git',
packages=find_packages(exclude=["docs", "test"]),
install_requires=requirements,
tests_require=test_requirements,
test_suite="test",
data_files=data_files,
license='GPLv3',
entry_points={
'console_scripts': [
'supervisorclusterctl = supervisorclusterctl.supervisorclusterctl:main'
],
}
) | RobWin/supervisorclusterctl | setup.py | Python | gpl-3.0 | 1,415 |
#from actstream import action
from django.contrib.auth import get_user_model
from django.db.models.signals import post_save
from django.core.exceptions import ObjectDoesNotExist
from django.dispatch import receiver
from profiles.models import Organisation, OrganisationGroup, UserProfile
UserModel = get_user_model()
@receiver(post_save, sender=UserModel)
def create_user_profile(sender, instance, created, **kwargs):
"""
Create a new profile for the newly registered user
"""
try:
shroom = instance.shroom
pass
except ObjectDoesNotExist:
if created:
UserProfile.objects.create(user=instance)
@receiver(post_save, sender=Organisation)
def create_organisation_group(sender, instance, created, **kwargs):
"""
Create a new organisation group for the new Organisation
"""
if created:
OrganisationGroup.objects.create(organisation=instance, name=instance.full_name)
| AtelierSoude/shrooms | ShroomsAPI/profiles/signals.py | Python | gpl-3.0 | 947 |
from distutils.core import setup
from setuptools import find_packages
from ponypusher import VERSION
setup(
#this is the name of the package to install from pypi/chishop
name='django-ponypusher',
description='Pusher app library for django',
author='Raymond McGinlay',
author_email='ray@lvlup.us',
#version number is derived from VERSION attribute defined in __init.py__
version=VERSION,
#apps to be installed in package
packages=['ponypusher', ],
#python pusher library is required
install_requires=['pusher',],
license='GPL v3',
#Description that will appear on pypi/chishop
long_description=open('README.rst').read(),
include_package_data=True,
)
| Level-Up/django-ponypusher | setup.py | Python | gpl-3.0 | 651 |
# -*- coding: utf-8 -*-
from flask import Flask
from flask import Flask,jsonify, request, Response, session,g,redirect, url_for,abort, render_template, flash
from islem import *
from bot import *
import sys
import time
import datetime
reload(sys)
sys.setdefaultencoding("utf-8")
app = Flask(__name__)
toxbot = tox_factory(ProfileHelper.open_profile("tox_save.tox"))
sonek=str(toxbot.self_get_address())[0:2]
karsi_dosyalar="gelen_cevaplar"+sonek
komut_dosyasi="gelen_komutlar"+sonek
@app.route('/')
def indeks():
arkadaslar=""
for num in toxbot.self_get_friend_list():
arkadaslar+="<tr><td><a href=/toxsys?fno="+str(num)+">"+str(num)+"</td><td>"+toxbot.friend_get_name(num)+"</td><td>"+str(toxbot.friend_get_status_message(num))+"</td><td>"+str(toxbot.friend_get_public_key(num))+"</td></tr>"
return '''<html>
<h2>Tox Yönetim Sayfası</h2>
<table border=1>
<tr><td>no</td><td>isim</td><td>publickey</td></tr>
<tr><td>-1</td><td>'''+toxbot.self_get_name()+'''</td><td>'''+toxbot.self_get_status_message()+'''</td><td>'''+toxbot.self_get_address()+'''</td></tr>
'''+arkadaslar+'''
</tr></table>
<a href="/toxfs">toxfs</a>
</html>'''
@app.route('/toxfs', methods = ['GET','POST'])
def toxfs():
# localhost:2061
#if request.method == 'GET':
islem=Islem()
islem.fno = request.args.get('fno')
islem.tip = request.args.get('tip')
islem.mesaj = request.args.get('mesaj')
islem.komut="---"
print "islem icerik:"
islem.icerik()
islem.dosyala(komut_dosyasi)
return "komut icra edildi."
#else:
#return '''<html>
#paremetreyle gönderin</html>'''
@app.route('/toxsys', methods = ['GET','POST'])
def toxsys():
dosyalar_html=""
# localhost:2061
#if request.method == 'GET':
islem=Islem()
if 'fno' in request.args and 'dosya' not in request.args:
islem.fno = request.args.get('fno')
islem.tip = "komut"
islem.mesaj = "x"
islem.komut = "@100@dlist"
print "islem icerik:"
islem.icerik()
islem.dosyala(komut_dosyasi)
cevap_geldi=False
dosya_bek_bas = datetime.datetime.now()
#6sn bekle cevap icin
t_end = time.time() + 6
while not cevap_geldi :
if os.path.exists(karsi_dosyalar):
time.sleep(1)
cevaplar=open(karsi_dosyalar,"r").read()
cevaplar=cevaplar.split("\n")
for dosya in cevaplar:
dosyalar_html+="<tr><td><a href=/toxsys?fno="+str(islem.fno)+"&dosya="+dosya+">"+dosya+"</td><td></tr>"
os.remove(karsi_dosyalar)
cevap_geldi=True
return '''<html>
<h3>dosyalar</h3>
<table border=1>
'''+dosyalar_html+'''
</tr>
<a href="./">anasayfa</a>
</html>'''
dosya_bek_son = datetime.datetime.now()
krono=dosya_bek_son-dosya_bek_bas
if krono.total_seconds() > 6 :
break
else:
print "dlist sonucu bekleniyor.",krono.total_seconds()
if 'fno' in request.args and 'dosya' in request.args:
islem.fno = request.args.get('fno')
dosya = request.args.get('dosya')
islem.tip = "komut"
islem.mesaj = "x"
islem.komut = "@102@"+dosya
islem.dosyala(komut_dosyasi)
cevap_geldi=False
while not cevap_geldi:
time.sleep(0.5)
#md5sum kontrol
if os.path.exists(karsi_dosyalar):
cevap=open(karsi_dosyalar,"r").read()
if cevap =="dosya_inme_tamam":
cevap_geldi=True
os.remove(karsi_dosyalar)
return "dosya geldi statikte"
else:
return redirect(url_for('indeks'))
if __name__ == '__main__':
app.run(debug=True,host='0.0.0.0', port=2061)
| milisarge/toxfs | webserver.py | Python | gpl-3.0 | 3,418 |
if __name__ == '__main__':
print("Loading Modules...")
from setuptools.command import easy_install
def install_with_easyinstall(package):
easy_install.main(["-U", package])
imported = False
tries = 0
while not imported:
try:
import socket, importlib
globals()['PIL'] = importlib.import_module('PIL')
imported = True
except Exception as ex:
print("An error occured when importing PIL: " + str(ex))
tries += 1
if tries == 6:
print("Install Failed.")
while True:
pass
print("Installing PIL... [Try " + str(tries) + "/5]")
try:
install_with_easyinstall('Pillow')
import site, imp
imp.reload(site)
print("PIL installed.")
except Exception as ex:
print("An error occured when installing PIL: " + str(ex))
import time, math, os, queue #, threading
from multiprocessing import Process, Queue, Value, Manager, Array
globals()["exitFlag"] = False
from tkinter import *
import PIL.Image
from PIL import ImageTk
if __name__ == '__main__':
print("All Modules Successfully Loaded!")
print("")
threadnumber = 2
time.sleep(0.5)
def process_data(threadName, q1, q2, im, qlock, ima, rfunc, rerrors, gfunc, gerrors, bfunc, berrors, percent, op):
import math
def funct_if(test,var_true,var_false):
if (test):
return var_true
else:
return var_false
def scale(var_old_min, var_old_max, var_new_min, var_new_max, var_value):
OldSRange = (var_old_max - var_old_min)
NewSRange = (var_new_max - var_new_min)
return (((var_value - var_old_min) * NewSRange) / OldSRange) + var_new_min
def is_even(value_to_test):
return value_to_test % 2 == 0
def draw_funct(dfunction, dxmin, dxmax, dymin, dymax, resolution):
dx = scale(0,canvas_width,dxmin,dxmax,x)
cdy = eval(dfunction)
dx = scale(0,canvas_width,dxmin,dxmax,x-resolution)
pdy = eval(dfunction)
dx = scale(0,canvas_width,dxmin,dxmax,x+resolution)
ndy = eval(dfunction)
cdsy = canvas_height - scale(dymin,dymax,0,canvas_height,cdy)
pdsy = canvas_height - scale(dymin,dymax,0,canvas_height,pdy)
ndsy = canvas_height - scale(dymin,dymax,0,canvas_height,ndy)
dyval = scale(0,canvas_height,dymin,dymax,y)
py = scale(dymin,dymax,0,canvas_height,dyval-resolution)
ny = scale(dymin,dymax,0,canvas_height,dyval+resolution)
#if y - cdsy > py - pdsy and y - cdsy < ny - ndsy:
#if (cdsy - y < pdsy - y and cdsy - y > ndsy - y) or (cdsy - y > pdsy - y and cdsy - y < ndsy - y):
if (0 < pdsy - y and 0 > ndsy - y) or (0 > pdsy - y and 0 < ndsy - y) or round(cdsy - y) == 0:
# print("dx: " + str(dx) + " , dy: " + str(dy))
# if y - dsy < resolution + 1 and y - dsy > 0-(resolution + 1): #round(dsy) == y:
return 255
else:
return 0
red = 0
green = 0
blue = 0
canvas_height = im.height
canvas_width = im.width
OldXRange = (canvas_width - 0)
OldYRange = (canvas_height - 0)
NewRange = (255 - 0)
def pix2index(xpix,ypix):
return ((((canvas_height - ypix - 1)*canvas_width) + (xpix)) * 3) - 3
def getpix(xval,yval):
pixindex = pix2index(xval,yval)
try:
rpix = ima[pixindex]
gpix = ima[pixindex + 1]
bpix = ima[pixindex + 2]
except:
print("ERROR WITH INDEX: " + str(pixindex))
while True:
pass
return (rpix,gpix,bpix)
def setpix(xval,yval,val):
pixindex = pix2index(xval,yval)
ima[pixindex] = val[0]
ima[pixindex + 1] = val[1]
ima[pixindex + 2] = val[2]
print("[" + str(threadName) + "] Started.")
# rfunccomp = eval('lambda: ' + globals()["rfunc"], locals())
# gfunccomp = eval('lambda: ' + globals()["gfunc"], locals())
# bfunccomp = eval('lambda: ' + globals()["bfunc"], locals())
while not im.exitFlag:
gotqdata = False
#queueLock.acquire()
if not q1.empty() and im.currq == 1:
try:
qlock.acquire()
datax = q1.get()
qlock.release()
gotqdata = True
except Exception as ex:
print("Q1Error: " + str(ex))
elif not q2.empty() and im.currq == 2:
try:
qlock.acquire()
datax = q2.get()
qlock.release()
gotqdata = True
except Exception as ex:
print("Q2Error: " + str(ex))
else:
time.sleep(0.1)
if gotqdata:
#queueLock.release()
#print ("%s processing %s" % (threadName, data))
x = datax
#print("[" + str(threadName) + "] Processing " + str(x))
y = canvas_height
while y > 0:
y = y - 1
qlock.acquire()
im.tmppix = im.tmppix + 1
qlock.release()
#print("Solving: " + str(x) + "," + str(y))
value = getpix(x,y)
XValue = round((x * NewRange) / OldXRange)
YValue = round((y * NewRange) / OldYRange)
progress = 255 * (percent.value / 100)
if op == 1:
level = round((value[0]+value[1]+value[2]) / 3)
pixval = (level,level,level)
elif op == 2:
red = value[0]
green = value[1]
blue = value[2]
try:
# r = rfunccomp()
r = eval(rfunc, locals())
except Exception as ex:
print("An Error occured at pixel (" + str(x) + "," + str(y) + "), Colour: " + str(value) + " with the red function: " + rfunc)
print("Error: " + str(ex))
r = 0
rerrors.value = rerrors.value + 1
try:
# g = gfunccomp()
g = eval(gfunc, locals())
except Exception as ex:
print("An Error occured at pixel (" + str(x) + "," + str(y) + "), Colour: " + str(value) + " with the green function: " + gfunc)
print("Error: " + str(ex))
g = 0
gerrors.value = gerrors.value + 1
try:
# b = bfunccomp()
b = eval(bfunc, locals())
except Exception as ex:
print("An Error occured at pixel (" + str(x) + "," + str(y) + "), Colour: " + str(value) + " with the blue function: " + bfunc)
print("Error: " + str(ex))
b = 0
berrors.value = berrors.value + 1
if r < 0:
r = 0
if r > 255:
r = 255
if g < 0:
g = 0
if g > 255:
g = 255
if b < 0:
b = 0
if b > 255:
b = 255
#print(str(red) + "," + str(green) + "," + str(blue) + ";" + str(r) + "," + str(g) + "," + str(b))
pixval = (round(r),round(g),round(b))
else:
pixval = value
# print("Changing pixel (" + str(x) + "," + str(y) + ") from " + str(value) + " to " + str(pixval))
#print("Before: " + str(x) + "," + str(y) + ":" + str(getpix(x,y)))
setpix(x,y,pixval)
#print("After: " + str(x) + "," + str(y) + ":" + str(getpix(x,y)))
else:
#queueLock.release()
pass
#time.sleep(1)
print("[" + str(threadName) + "] Exiting.")
if __name__ == '__main__':
print("""Modes:
0: Generate New Image
1: Load Image from File
""")
source = 0
gotsource = False
while not gotsource:
try:
source = int(input("Mode: "))
if source == 0 or source == 1:
gotsource = True
else:
print("Please enter either 0 or 1")
except:
print("Please enter either 0 or 1")
print("")
if source == 0:
genapproved = ""
while not genapproved.lower() == "y":
print("")
gotdimensions = False
while not gotdimensions:
try:
genheight = int(input("Image Height in Pixels: "))
genwidth = int(input("Image Width in Pixels: "))
if genheight > 0 and genwidth > 0:
gotdimensions = True
else:
print("Please enter a valid integer")
except:
print("Please enter a valid integer")
filename = input("Image name: ")
genapproved = input("Are these settings correct? [Y/N]: ")
print("")
print("Generating Canvas...")
try:
im = PIL.Image.new("RGB",(genwidth,genheight))
except Exception as ex:
print("An error occured when generating a canvas")
print("Error: " + str(ex))
while True:
pass
time.sleep(1)
print("Canvas Generated Successfully")
elif source == 1:
imported = False
while not imported:
try:
filename = input("Image Filename: ")
im = PIL.Image.open(filename)
imported = True
except Exception as ex:
print("An error occured when importing the image: " + str(ex))
else:
print("An Error Occured With Setting The Mode")
while True:
pass
print("""Operations:
0: Nothing
1: Greyscale
2: Custom
""")
opsuccess = False
while not opsuccess:
try:
op = int(input("Operation: "))
if 0 <= op and op <= 2:
opsuccess = True
else:
print("Invalid Op Code")
except:
print("Invalid Op Code")
canvas_height = im.height
canvas_width = im.width
progress = 0
percent = 0
XValue = 0
YValue = 0
x = 0
y = 0
print("")
print("Image Dimensions")
print("Height: " + str(canvas_height))
print("Width: " + str(canvas_width))
print("")
if op == 0:
rfunc = "red"
gfunc = "green"
bfunc = "blue"
elif op == 1:
rfunc = "round((red+green+blue) / 3)"
gfunc = "round((red+green+blue) / 3)"
bfunc = "round((red+green+blue) / 3)"
elif op == 2:
cusapproved = ""
while cusapproved.lower() != "y" :
print("""
Available Varibles:
canvas_height
canvas_width
x
y
progress
percent
XValue
YValue
red
green
blue
Available Functions:
Anything from the math module
funct_if(thing to test,value if true, value if false)
scale(value minimum, value maximum, new minimum, new maximum, value)
is_even(value)
draw_funct(function(use dx instead of x and put in quotation marks), x value minimum, x value maximum, y value minimum, y value maximum, resolution in px)
""")
globals()["rfunc"] = str(input("Red function: "))
globals()["gfunc"] = str(input("Green function: "))
globals()["bfunc"] = str(input("Blue function: "))
cusapproved = input("Are these functions correct? [Y/N]: ")
x = 0
y = 0
pix = 0
tmpx = 0
OldXRange = (im.width - 0)
OldYRange = (im.height - 0)
NewRange = (255 - 0)
print("Starting Conversion...")
starttime = time.time()
manager = Manager()
#threadList = ["Thread-1", "Thread-2", "Thread-3"]
#queueLock = threading.Lock()
# workQueue = queue.Queue(50000)
workQueue1 = manager.Queue(2500)
workQueue2 = manager.Queue(2500)
threads = []
threadID = 1
threadnum = threadnumber
imlist = list(im.tobytes())
#ima = list(im.getdata())
mcim = manager.Namespace()
mcim.exitFlag = False
mcim.tmppix = 0
mcim.currq = 200
mcim.height = im.height
mcim.width = im.width
mcqlock = manager.Lock()
mcima = Array("i",imlist)
rerrors = Value('d', 0)
gerrors = Value('d', 0)
berrors = Value('d', 0)
percent = Value('d', 0)
# Create new threads
print("Starting Processes...")
for tNum in range(threadnum):
#thread = myThread(threadID, "Thread-" + str(threadID), workQueue)
# process_data(threadName, q, im, exitFlag, tmppix, rfunc, rerrors, gfunc, gerrors, bfunc, berrors, percent)
thread = Process(target=process_data, args=("Process-" + str(threadID), workQueue1 , workQueue2, mcim, mcqlock, mcima, rfunc, rerrors, gfunc, gerrors, bfunc, berrors, percent, op,))
thread.start()
threads.append(thread)
threadID += 1
status = Tk()
status.title(string = "Status")
percentchange = 0
totalpix = im.width * im.height
time.sleep(1)
pixtmp = 0
print("Allocating Pixels...")
mcim.currq = 2
while tmpx < im.width:
while (workQueue1.full() and workQueue2.full()) and not (workQueue1.empty() and workQueue2.empty()):
print("FULL: " + str(workQueue1.full() and workQueue2.full()))
print("EMPTY: " + str(not (workQueue1.empty() and workQueue2.empty())))
if workQueue1.full() and workQueue2.empty():
mcim.currq = 1
print("Q1")
elif workQueue2.full() and workQueue1.empty():
mcim.currq = 2
print("Q2")
elif (mcim.currq == 1 and workQueue2.full()) or (mcim.currq == 2 and workQueue1.full()):
time.sleep(0.5)
else:
pass
try:
if mcim.currq == 1:
workQueue2.put(tmpx)
elif mcim.currq == 2:
workQueue1.put(tmpx)
else:
print("invalid currq")
pixtmp += 1
except:
print("put error")
print(str(pixtmp) + "/" + str(im.width))
oldpercent = percent.value
percentl = (mcim.tmppix - 1) / (totalpix / 100)
percent.value = round(percentl,1)
if oldpercent != percent.value:
Label(status,text = (str(percent.value) + "%"), anchor="w").grid(row = 1, column = 1)
status.update()
tmpx = tmpx + 1
print("Finished allocating pixels")
while mcim.tmppix != totalpix:
if workQueue1.empty() and not workQueue2.empty():
mcim.currq = 2
elif not workQueue1.empty() and workQueue2.empty():
mcim.currq = 1
oldpercent = percent.value
percentl = (mcim.tmppix - 1) / (totalpix / 100)
percent.value = round(percentl,1)
if oldpercent != percent.value:
Label(status,text = (str(percent.value) + "%"), anchor="w").grid(row = 1, column = 1)
status.update()
time.sleep(0.1)
print("Queue Size: " + str(workQueue1.qsize() + workQueue2.qsize()) + " , ExitFlag: " + str(mcim.exitFlag) + " , " + str(mcim.tmppix) + "/" + str(totalpix) + " , QSIZE+TMPPIX: " + str((workQueue1.qsize() + workQueue2.qsize())*im.height + mcim.tmppix))
mcim.exitFlag = True
print("Stopping Processes...")
for t in threads:
t.join()
Label(status,text = (str(100.0) + "%"), anchor="w").grid(row = 1, column = 1)
status.update()
#imoutput = mcim.im
imoutput = PIL.Image.new(im.mode,im.size)
imoutput.frombytes(bytes(mcima))
endtime = time.time()
processtime = endtime - starttime
s2m = divmod(processtime, 60)
m2h = divmod(s2m[0], 60)
timeseconds = round(s2m[1],3)
timeminutes = round(m2h[1])
timehours = round(m2h[0])
print("Conversion Completed Successfully in " + str(timehours) + " hours, " + str(timeminutes) + " minutes and " + str(timeseconds) + " seconds.")
time.sleep(0.5)
print()
print("Conversion Summary:")
time.sleep(0.5)
print("Your Red Function: Red = " + str(rfunc) + " had " + str(rerrors.value) + " error(s).")
time.sleep(0.5)
print("Your Green Function: Green = " + str(gfunc) + " had " + str(gerrors.value) + " error(s).")
time.sleep(0.5)
print("Your Blue Function: Blue = " + str(bfunc) + " had " + str(berrors.value) + " error(s).")
print("")
time.sleep(1)
print("Saving...")
savid = 0
saved = False
while not saved:
if not os.path.isfile(filename + "-" + str(savid) + "sav.png"):
imoutput.save(filename + "-" + str(savid) + "sav.png", "PNG")
saved = True
else:
savid = savid + 1
print("Saved as: " + filename + "-" + str(savid) + "sav.png")
status.destroy()
root = Tk()
photo = ImageTk.PhotoImage(imoutput)
canvas = Canvas(width=canvas_width, height=canvas_height, bg='white')
canvas.pack()
canvas.create_image(canvas_width/2, canvas_height/2, image=photo)
root.mainloop()
while True:
pass
| TNT-Samuel/Coding-Projects | Image Test/_ImageEdit3MultiProcess.py | Python | gpl-3.0 | 17,506 |
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: Walter Purcaro
"""
from urlparse import urljoin
import re
from module.common.json_layer import json_loads
from module.plugins.Crypter import Crypter
from module.utils import save_join
API_KEY = "AIzaSyCKnWLNlkX-L4oD1aEzqqhRw1zczeD6_k0"
class YoutubeBatch(Crypter):
__name__ = "YoutubeBatch"
__type__ = "crypter"
__pattern__ = r"https?://(?:www\.)?(m\.)?youtube\.com/(?P<TYPE>user|playlist|view_play_list)(/|.*?[?&](?:list|p)=)(?P<ID>[\w-]+)"
__version__ = "1.00"
__description__ = """Youtube.com channel & playlist decrypter"""
__config__ = [("likes", "bool", "Grab user (channel) liked videos", "False"),
("favorites", "bool", "Grab user (channel) favorite videos", "False"),
("uploads", "bool", "Grab channel unplaylisted videos", "True")]
__author_name__ = ("Walter Purcaro")
__author_mail__ = ("vuolter@gmail.com")
def api_response(self, ref, req):
req.update({"key": API_KEY})
url = urljoin("https://www.googleapis.com/youtube/v3/", ref)
page = self.load(url, get=req)
return json_loads(page)
def getChannel(self, user):
channels = self.api_response("channels", {"part": "id,snippet,contentDetails", "forUsername": user, "maxResults": "50"})
if channels["items"]:
channel = channels["items"][0]
return {"id": channel["id"],
"title": channel["snippet"]["title"],
"relatedPlaylists": channel["contentDetails"]["relatedPlaylists"],
"user": user} # One lone channel for user?
def getPlaylist(self, p_id):
playlists = self.api_response("playlists", {"part": "snippet", "id": p_id})
if playlists["items"]:
playlist = playlists["items"][0]
return {"id": p_id,
"title": playlist["snippet"]["title"],
"channelId": playlist["snippet"]["channelId"],
"channelTitle": playlist["snippet"]["channelTitle"]}
def _getPlaylists(self, id, token=None):
req = {"part": "id", "maxResults": "50", "channelId": id}
if token:
req.update({"pageToken": token})
playlists = self.api_response("playlists", req)
for playlist in playlists["items"]:
yield playlist["id"]
if "nextPageToken" in playlists:
for item in self._getPlaylists(id, playlists["nextPageToken"]):
yield item
def getPlaylists(self, ch_id):
return map(self.getPlaylist, self._getPlaylists(ch_id))
def _getVideosId(self, id, token=None):
req = {"part": "contentDetails", "maxResults": "50", "playlistId": id}
if token:
req.update({"pageToken": token})
playlist = self.api_response("playlistItems", req)
for item in playlist["items"]:
yield item["contentDetails"]["videoId"]
if "nextPageToken" in playlist:
for item in self._getVideosId(id, playlist["nextPageToken"]):
yield item
def getVideosId(self, p_id):
return list(self._getVideosId(p_id))
def decrypt(self, pyfile):
match = re.match(self.__pattern__, pyfile.url)
m_id = match.group("ID")
m_type = match.group("TYPE")
if m_type == "user":
self.logDebug("Url recognized as Channel")
user = m_id
channel = self.getChannel(user)
if channel:
playlists = self.getPlaylists(channel["id"])
self.logDebug("%s playlist\s found on channel \"%s\"" % (len(playlists), channel["title"]))
relatedplaylist = {p_name: self.getPlaylist(p_id) for p_name, p_id in channel["relatedPlaylists"].iteritems()}
self.logDebug("Channel's related playlists found = %s" % relatedplaylist.keys())
relatedplaylist["uploads"]["title"] = "Unplaylisted videos"
relatedplaylist["uploads"]["checkDups"] = True #: checkDups flag
for p_name, p_data in relatedplaylist.iteritems():
if self.getConfig(p_name):
p_data["title"] += " of " + user
playlists.append(p_data)
else:
playlists = []
else:
self.logDebug("Url recognized as Playlist")
playlists = [self.getPlaylist(m_id)]
if not playlists:
self.fail("No playlist available")
addedvideos = []
urlize = lambda x: "https://www.youtube.com/watch?v=" + x
for p in playlists:
p_name = p["title"]
p_videos = self.getVideosId(p["id"])
p_folder = save_join(self.config['general']['download_folder'], p["channelTitle"], p_name)
self.logDebug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name))
if not p_videos:
continue
elif "checkDups" in p:
p_urls = [urlize(v_id) for v_id in p_videos if v_id not in addedvideos]
self.logDebug("%s video\s available on playlist \"%s\" after duplicates cleanup" % (len(p_urls), p_name))
else:
p_urls = map(urlize, p_videos)
self.packages.append((p_name, p_urls, p_folder)) #: folder is NOT recognized by pyload 0.4.9!
addedvideos.extend(p_videos)
| wangjun/pyload | module/plugins/crypter/YoutubeBatch.py | Python | gpl-3.0 | 6,087 |
import logging
from functools import wraps
from requests.exceptions import HTTPError
from django.utils.decorators import available_attrs
from django.conf import settings
from authclient import _get_user_session_key, SESSION_KEY
from authclient.client import auth_client
logger = logging.getLogger('authclient')
def app_auth_exempt(function=None):
def decorator(view_func):
@wraps(view_func)
def _wrapped(request, *args, **kwargs):
return view_func(request, *args, **kwargs)
_wrapped.app_auth_exempt = True
return _wrapped
if function:
return decorator(function)
return decorator
def refresh_jwt(view_func):
"""
Decorator that adds headers to a response so that it will
never be cached.
"""
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view_func(request, *args, **kwargs):
response = view_func(request, *args, **kwargs)
try:
resource_token = _get_user_session_key(request)
except KeyError:
pass
else:
try:
resource_token = auth_client.token_refresh.call(
payload={'token': resource_token},
headers={'X-APPLICATION': settings.AUTH_API_TOKEN},
)['resource_token']
except HTTPError:
logger.debug('Failed to refresh the JWT.')
else:
request.session[SESSION_KEY] = resource_token
return response
return _wrapped_view_func
| PyPila/auth-client | authclient/decorators.py | Python | gpl-3.0 | 1,544 |