repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
LinusU/fbthrift
|
refs/heads/master
|
thrift/test/py/TestCppServer.py
|
1
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# this interferes with ServiceRouter/SWIG
# @lint-avoid-python-3-compatibility-imports
#from __future__ import unicode_literals
import json
import math
import multiprocessing
import os
import sys
import tempfile
import threading
import time
from fb303.ContextFacebookBase import FacebookBase
from libfb.testutil import BaseFacebookTestCase
from thrift.transport import TSocket, TSSLSocket, TTransport, THeaderTransport
from thrift.protocol import TBinaryProtocol, THeaderProtocol
from thrift.Thrift import TProcessorEventHandler, TProcessor, TMessageType, \
TServerInterface
from thrift.server.TCppServer import TCppServer, TSSLConfig, TSSLCacheOptions, \
SSLPolicy, SSLVerifyPeerEnum
from thrift.server.TServer import TServerEventHandler
# Load test fixtures from either fbbuild or Buck.
try:
# Try an fbconfig-style main module
from tools.test.stubs.fbpyunit import MainProgram
except ImportError:
# Assume a buck-style main module
from __test_main__ import MainProgram
from concurrent.futures import ProcessPoolExecutor
from test.sleep import SleepService, ttypes
from futuretest.future import FutureSleepService
TIMEOUT = 60 * 1000 # milliseconds
def getClient(addr, service_module=SleepService):
transport = TSocket.TSocket(addr[0], addr[1])
transport = TTransport.TFramedTransport(transport)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = service_module.Client(protocol)
transport.open()
return client
def getHeaderClient(addr, sock_cls=TSocket.TSocket):
transport = sock_cls(addr[0], addr[1])
transport = THeaderTransport.THeaderTransport(transport)
transport.set_header("hello", "world")
protocol = THeaderProtocol.THeaderProtocol(transport)
client = SleepService.Client(protocol)
transport.open()
return client
class SleepProcessorEventHandler(TProcessorEventHandler):
def getHandlerContext(self, fn_name, server_context):
self.last_peer_name = server_context.getPeerName()
self.last_sock_name = server_context.getSockName()
class SleepHandler(FacebookBase, SleepService.Iface, TServerInterface):
def __init__(self, noop_event, shutdown_event):
FacebookBase.__init__(self, "sleep")
TServerInterface.__init__(self)
self.noop_event = noop_event
self.shutdown_event = shutdown_event
def sleep(self, seconds):
print("server sleeping...")
time.sleep(seconds)
print("server sleeping... done")
def space(self, s):
if isinstance(s, bytes):
s = s.decode('latin1')
return " ".join(s)
def noop(self):
self.noop_event.set()
def shutdown(self):
self.shutdown_event.set()
def header(self):
request_context = self.getRequestContext()
if request_context is None:
return False
headers = request_context.getHeaders()
if headers is None:
return False
return headers.get(b"hello") == b"world"
def is_prime(num):
if num % 2 == 0:
return False
sqrt_n = int(math.floor(math.sqrt(num)))
for i in range(3, sqrt_n + 1, 2):
if num % i == 0:
return False
return True
class FutureSleepHandler(FacebookBase, FutureSleepService.Iface,
TServerInterface):
def __init__(self, executor):
FacebookBase.__init__(self, "futuresleep")
TServerInterface.__init__(self)
self.executor = executor
def sleep(self, seconds):
print("future server sleeping...")
time.sleep(seconds)
print("future server sleeping... done")
def future_isPrime(self, num):
return self.executor.submit(is_prime, num)
class SpaceProcess(multiprocessing.Process):
def __init__(self, addr):
self.queue = multiprocessing.Queue()
multiprocessing.Process.__init__(
self, target=self.target, args=(self.queue,))
self.addr = addr
def target(self, queue):
client = getClient(self.addr)
hw = "hello, world"
hw_spaced = "h e l l o , w o r l d"
result = client.space(hw)
if isinstance(result, bytes):
result = result.decode('latin1')
assert result == hw_spaced
queue.put((client._iprot.trans.getTransport().getSocketName(),
client._iprot.trans.getTransport().getPeerName()))
class FutureProcess(multiprocessing.Process):
# Stolen from PEP-3148
PRIMES = [
112272535095293,
112582705942171,
112272535095293,
115280095190773,
115797848077099,
]
def __init__(self, addr):
multiprocessing.Process.__init__(self)
self.addr = addr
def isPrime(self, num):
client = getClient(self.addr, FutureSleepService)
assert client.isPrime(num)
def sleep(self):
client = getClient(self.addr, FutureSleepService)
client.sleep(2)
def run(self):
threads = []
# Send multiple requests from multiple threads. Unfortunately
# future client isn't supported yet.
for prime in FutureProcess.PRIMES:
t = threading.Thread(target=self.isPrime, args=(prime,))
t.start()
threads.append(t)
for i in range(5):
t = threading.Thread(target=self.sleep)
t.start()
threads.append(t)
for t in threads:
t.join()
class HeaderProcess(multiprocessing.Process):
def __init__(self, addr):
multiprocessing.Process.__init__(self)
self.addr = addr
def run(self):
client = getHeaderClient(self.addr)
assert client.header()
class ParallelProcess(multiprocessing.Process):
def __init__(self, addr):
multiprocessing.Process.__init__(self)
self.addr = addr
def run(self):
clients = []
for i in range(0, 4):
clients.append(getClient(self.addr))
for c in clients:
c.send_sleep(3)
for c in clients:
c.recv_sleep()
class OnewayProcess(multiprocessing.Process):
def __init__(self, addr):
multiprocessing.Process.__init__(self)
self.addr = addr
def run(self):
client = getClient(self.addr)
hw = "hello, world"
hw_spaced = "h e l l o , w o r l d"
client.noop()
client.shutdown()
# Requests sent after the oneway request still get responses
result = client.space(hw)
if isinstance(result, bytes):
result = result.decode('latin1')
assert result == hw_spaced
class TestHeaderProcessor(TProcessor, BaseFacebookTestCase):
def process(self, iprot, oprot, server_context=None):
fname, type, seqid = iprot.readMessageBegin()
self.assertTrue(iprot.trans.get_headers().get(b"hello") == b"world")
result = SleepService.space_result()
result.success = "h i"
oprot.writeMessageBegin(fname, TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
class TestServerEventHandler(TServerEventHandler):
def __init__(self):
self.connCreated = 0
self.connDestroyed = 0
def newConnection(self, context):
self.connCreated += 1
def connectionDestroyed(self, context):
self.connDestroyed += 1
class TestServer(BaseFacebookTestCase):
def getProcessor(self):
processor = SleepService.Processor(
SleepHandler(self.noop_event, self.shutdown_event))
self.event_handler = SleepProcessorEventHandler()
processor.setEventHandler(self.event_handler)
return processor
def setUp(self):
super(TestServer, self).setUp()
self.noop_event = threading.Event()
self.shutdown_event = threading.Event()
self.serverEventHandler = TestServerEventHandler()
self.server = TCppServer(self.getProcessor())
self.configureSSL()
self.server.setServerEventHandler(self.serverEventHandler)
self.addCleanup(self.stopServer)
# Let the kernel choose a port.
self.server.setPort(0)
self.server_thread = threading.Thread(target=self.server.serve)
self.server_thread.start()
for t in range(30):
addr = self.server.getAddress()
if addr:
break
time.sleep(0.1)
self.assertTrue(addr)
self.server_addr = addr
def configureSSL(self):
pass
def stopServer(self):
if self.server:
self.server.stop()
self.server = None
class FutureTestServer(TestServer):
EXECUTOR = ProcessPoolExecutor()
def getProcessor(self):
return FutureSleepService.Processor(
FutureSleepHandler(FutureTestServer.EXECUTOR))
def testIsPrime(self):
sleep = FutureProcess(self.server_addr)
sleep.start()
sleep.join()
self.stopServer()
class BaseTestServer(TestServer):
def testSpace(self):
space = SpaceProcess(self.server_addr)
space.start()
client_sockname, client_peername = space.queue.get()
space.join()
self.stopServer()
self.assertEquals(space.exitcode, 0)
self.assertEquals(self.event_handler.last_peer_name, client_sockname)
self.assertEquals(self.event_handler.last_sock_name, client_peername)
self.assertEquals(self.serverEventHandler.connCreated, 1)
self.assertEquals(self.serverEventHandler.connDestroyed, 1)
def testHeader(self):
header = HeaderProcess(self.server_addr)
header.start()
header.join()
self.stopServer()
def testParallel(self):
parallel = ParallelProcess(self.server_addr)
parallel.start()
start_time = time.time()
# this should take about 3 seconds. In practice on an unloaded
# box, it takes about 3.6 seconds.
parallel.join()
duration = time.time() - start_time
print("total time = {}".format(duration))
self.stopServer()
self.assertEqual(parallel.exitcode, 0)
self.assertLess(duration, 5)
def testOneway(self):
oneway = OnewayProcess(self.server_addr)
oneway.start()
oneway.join()
self.stopServer()
self.assertTrue(self.noop_event.wait(5))
self.assertTrue(self.shutdown_event.wait(5))
class HeaderTestServer(TestServer):
def getProcessor(self):
return TestHeaderProcessor()
def testHeaderInProcessor(self):
client = getHeaderClient(self.server_addr)
self.assertEquals(client.space("hi"), "h i")
self.stopServer()
class TSSLConfigTest(BaseFacebookTestCase):
def testDefaults(self):
config = TSSLConfig()
self.assertEquals(config.cert_path, '')
self.assertEquals(config.key_path, '')
self.assertEquals(config.key_pw_path, '')
self.assertEquals(config.client_ca_path, '')
self.assertEquals(config.ecc_curve_name, '')
self.assertEquals(config.verify, SSLVerifyPeerEnum.VERIFY)
self.assertEquals(config.ssl_policy, SSLPolicy.PERMITTED)
def testEnumSetters(self):
config = TSSLConfig()
bogus_values = ['', 'bogus', 5, 0]
for v in bogus_values:
with self.assertRaises(ValueError):
config.verify = v
for v in bogus_values:
with self.assertRaises(ValueError):
config.ssl_policy = v
class SSLHeaderTestServer(TestServer):
def getProcessor(self):
return TestHeaderProcessor()
def setupTickets(self):
self.ticket_file = tempfile.NamedTemporaryFile(delete=False)
self.ticket_data = {
'old': ['00000000'],
'current': ['11111111'],
'new': ['22222222']
}
with open(self.ticket_file.name, 'w') as f:
f.write(json.dumps(self.ticket_data))
def configureSSL(self):
config = TSSLConfig()
self.setupTickets()
self.assertEquals(config.key_path, "")
config.ssl_policy = SSLPolicy.REQUIRED
config.cert_path = 'thrift/test/py/test_cert.pem'
config.client_verify = SSLVerifyPeerEnum.VERIFY
config.key_path = None
config.ticket_file_path = self.ticket_file.name
# expect an error with a cert_path but no key_path
with self.assertRaises(ValueError):
self.server.setSSLConfig(config)
config.key_path = 'thrift/test/py/test_cert.pem'
self.server.setSSLConfig(config)
cache_options = TSSLCacheOptions()
self.server.setSSLCacheOptions(cache_options)
def testSSLClient(self):
ssl_client = getHeaderClient(self.server_addr, TSSLSocket.TSSLSocket)
self.assertEquals(ssl_client.space("hi"), "h i")
client = getHeaderClient(self.server_addr)
with self.assertRaises(Exception):
client.space("hi")
self.stopServer()
def testTickets(self):
tickets = self.server.getTicketSeeds()
self.assertEquals(tickets, self.ticket_data)
def tearDown(self):
os.remove(self.ticket_file.name)
def testValidateSSL(self):
valid, msg = self.server.validateSSLConfig({})
self.assertFalse(valid)
self.assertIsNotNone(msg)
cfg = TSSLConfig()
valid, msg = self.server.validateSSLConfig(cfg)
self.assertTrue(valid)
self.assertIsNone(msg)
cfg.key_path = 'thrift/test/py/test_cert.pem'
valid, msg = self.server.validateSSLConfig(cfg)
self.assertFalse(valid)
self.assertIsNotNone(msg)
cfg.key_path = ''
cfg.cert_path = 'thrift/test/py/test_cert.pem'
valid, msg = self.server.validateSSLConfig(cfg)
self.assertFalse(valid)
self.assertIsNotNone(msg)
cfg.key_path = cfg.cert_path
valid, msg = self.server.validateSSLConfig(cfg)
self.assertTrue(valid)
self.assertIsNone(msg)
cfg.client_ca_path = 'thrift/test/should/not/exist.pem'
valid, msg = self.server.validateSSLConfig(cfg)
self.assertFalse(valid)
self.assertIsNotNone(msg)
if __name__ == '__main__':
rc = MainProgram(sys.argv).run()
sys.exit(rc)
|
CGenie/sorl-thumbnail
|
refs/heads/master
|
tests/thumbnail_tests/test_parsers.py
|
17
|
# -*- coding: utf-8 -*-
import unittest
from sorl.thumbnail.helpers import ThumbnailError
from sorl.thumbnail.parsers import parse_crop, parse_geometry
class CropParserTestCase(unittest.TestCase):
def test_alias_crop(self):
crop = parse_crop('center', (500, 500), (400, 400))
self.assertEqual(crop, (50, 50))
crop = parse_crop('right', (500, 500), (400, 400))
self.assertEqual(crop, (100, 50))
def test_percent_crop(self):
crop = parse_crop('50% 0%', (500, 500), (400, 400))
self.assertEqual(crop, (50, 0))
crop = parse_crop('10% 80%', (500, 500), (400, 400))
self.assertEqual(crop, (10, 80))
def test_px_crop(self):
crop = parse_crop('200px 33px', (500, 500), (400, 400))
self.assertEqual(crop, (100, 33))
def test_bad_crop(self):
self.assertRaises(ThumbnailError, parse_crop, '-200px', (500, 500), (400, 400))
class GeometryParserTestCase(unittest.TestCase):
def test_geometry(self):
g = parse_geometry('222x30')
self.assertEqual(g, (222, 30))
g = parse_geometry('222')
self.assertEqual(g, (222, None))
g = parse_geometry('x999')
self.assertEqual(g, (None, 999))
|
chacoroot/planetary
|
refs/heads/master
|
addons/mail/__init__.py
|
382
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import ir_attachment
import mail_message_subtype
import mail_alias
import mail_followers
import mail_message
import mail_mail
import mail_thread
import mail_group
import res_partner
import res_users
import report
import wizard
import res_config
import mail_group_menu
import update
import controllers
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
tumbl3w33d/ansible
|
refs/heads/devel
|
lib/ansible/modules/windows/win_group.py
|
52
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2014, Chris Hoffman <choffman@chathamfinancial.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: win_group
version_added: "1.7"
short_description: Add and remove local groups
description:
- Add and remove local groups.
- For non-Windows targets, please use the M(group) module instead.
options:
name:
description:
- Name of the group.
type: str
required: yes
description:
description:
- Description of the group.
type: str
state:
description:
- Create or remove the group.
type: str
choices: [ absent, present ]
default: present
seealso:
- module: group
- module: win_domain_group
- module: win_group_membership
author:
- Chris Hoffman (@chrishoffman)
'''
EXAMPLES = r'''
- name: Create a new group
win_group:
name: deploy
description: Deploy Group
state: present
- name: Remove a group
win_group:
name: deploy
state: absent
'''
|
da1z/intellij-community
|
refs/heads/master
|
python/testData/intentions/googleDocStubInlineFunctionBodyNoSpaceBefore_after.py
|
96
|
def f(x, y):
"""
Args:
x:
y:
Returns:
"""
return 42
|
z0by/django
|
refs/heads/master
|
django/core/management/base.py
|
83
|
# -*- coding: utf-8 -*-
"""
Base classes for writing management commands (named commands which can
be executed through ``django-admin`` or ``manage.py``).
"""
from __future__ import unicode_literals
import os
import sys
import warnings
from argparse import ArgumentParser
from optparse import OptionParser
import django
from django.core import checks
from django.core.management.color import color_style, no_style
from django.db import connections
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_str
class CommandError(Exception):
"""
Exception class indicating a problem while executing a management
command.
If this exception is raised during the execution of a management
command, it will be caught and turned into a nicely-printed error
message to the appropriate output stream (i.e., stderr); as a
result, raising this exception (with a sensible description of the
error) is the preferred way to indicate that something has gone
wrong in the execution of a command.
"""
pass
class SystemCheckError(CommandError):
"""
The system check framework detected unrecoverable errors.
"""
pass
class CommandParser(ArgumentParser):
"""
Customized ArgumentParser class to improve some error messages and prevent
SystemExit in several occasions, as SystemExit is unacceptable when a
command is called programmatically.
"""
def __init__(self, cmd, **kwargs):
self.cmd = cmd
super(CommandParser, self).__init__(**kwargs)
def parse_args(self, args=None, namespace=None):
# Catch missing argument for a better error message
if (hasattr(self.cmd, 'missing_args_message') and
not (args or any(not arg.startswith('-') for arg in args))):
self.error(self.cmd.missing_args_message)
return super(CommandParser, self).parse_args(args, namespace)
def error(self, message):
if self.cmd._called_from_command_line:
super(CommandParser, self).error(message)
else:
raise CommandError("Error: %s" % message)
def handle_default_options(options):
"""
Include any default options that all commands should accept here
so that ManagementUtility can handle them before searching for
user commands.
"""
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
if options.pythonpath:
sys.path.insert(0, options.pythonpath)
class OutputWrapper(object):
"""
Wrapper around stdout/stderr
"""
@property
def style_func(self):
return self._style_func
@style_func.setter
def style_func(self, style_func):
if style_func and self.isatty():
self._style_func = style_func
else:
self._style_func = lambda x: x
def __init__(self, out, style_func=None, ending='\n'):
self._out = out
self.style_func = None
self.ending = ending
def __getattr__(self, name):
return getattr(self._out, name)
def isatty(self):
return hasattr(self._out, 'isatty') and self._out.isatty()
def write(self, msg, style_func=None, ending=None):
ending = self.ending if ending is None else ending
if ending and not msg.endswith(ending):
msg += ending
style_func = style_func or self.style_func
self._out.write(force_str(style_func(msg)))
class BaseCommand(object):
"""
The base class from which all management commands ultimately
derive.
Use this class if you want access to all of the mechanisms which
parse the command-line arguments and work out what code to call in
response; if you don't need to change any of that behavior,
consider using one of the subclasses defined in this file.
If you are interested in overriding/customizing various aspects of
the command-parsing and -execution behavior, the normal flow works
as follows:
1. ``django-admin`` or ``manage.py`` loads the command class
and calls its ``run_from_argv()`` method.
2. The ``run_from_argv()`` method calls ``create_parser()`` to get
an ``ArgumentParser`` for the arguments, parses them, performs
any environment changes requested by options like
``pythonpath``, and then calls the ``execute()`` method,
passing the parsed arguments.
3. The ``execute()`` method attempts to carry out the command by
calling the ``handle()`` method with the parsed arguments; any
output produced by ``handle()`` will be printed to standard
output and, if the command is intended to produce a block of
SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.
4. If ``handle()`` or ``execute()`` raised any exception (e.g.
``CommandError``), ``run_from_argv()`` will instead print an error
message to ``stderr``.
Thus, the ``handle()`` method is typically the starting point for
subclasses; many built-in commands and command types either place
all of their logic in ``handle()``, or perform some additional
parsing work in ``handle()`` and then delegate from it to more
specialized methods as needed.
Several attributes affect behavior at various steps along the way:
``args``
A string listing the arguments accepted by the command,
suitable for use in help messages; e.g., a command which takes
a list of application names might set this to '<app_label
app_label ...>'.
``can_import_settings``
A boolean indicating whether the command needs to be able to
import Django settings; if ``True``, ``execute()`` will verify
that this is possible before proceeding. Default value is
``True``.
``help``
A short description of the command, which will be printed in
help messages.
``option_list``
This is the list of ``optparse`` options which will be fed
into the command's ``OptionParser`` for parsing arguments.
Deprecated and will be removed in Django 1.10.
``output_transaction``
A boolean indicating whether the command outputs SQL
statements; if ``True``, the output will automatically be
wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
``False``.
``requires_system_checks``
A boolean; if ``True``, entire Django project will be checked for errors
prior to executing the command. Default value is ``True``.
To validate an individual application's models
rather than all applications' models, call
``self.check(app_configs)`` from ``handle()``, where ``app_configs``
is the list of application's configuration provided by the
app registry.
``leave_locale_alone``
A boolean indicating whether the locale set in settings should be
preserved during the execution of the command instead of translations
being deactivated.
Default value is ``False``.
Make sure you know what you are doing if you decide to change the value
of this option in your custom command if it creates database content
that is locale-sensitive and such content shouldn't contain any
translations (like it happens e.g. with django.contrib.auth
permissions) as activating any locale might cause unintended effects.
This option can't be False when the can_import_settings option is set
to False too because attempting to deactivate translations needs access
to settings. This condition will generate a CommandError.
"""
# Metadata about this command.
option_list = ()
help = ''
args = ''
# Configuration shortcuts that alter various logic.
_called_from_command_line = False
can_import_settings = True
output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
leave_locale_alone = False
requires_system_checks = True
def __init__(self, stdout=None, stderr=None, no_color=False):
self.stdout = OutputWrapper(stdout or sys.stdout)
self.stderr = OutputWrapper(stderr or sys.stderr)
if no_color:
self.style = no_style()
else:
self.style = color_style()
self.stderr.style_func = self.style.ERROR
@property
def use_argparse(self):
return not bool(self.option_list)
def get_version(self):
"""
Return the Django version, which should be correct for all
built-in Django commands. User-supplied commands should
override this method.
"""
return django.get_version()
def usage(self, subcommand):
"""
Return a brief description of how to use this command, by
default from the attribute ``self.help``.
"""
usage = '%%prog %s [options] %s' % (subcommand, self.args)
if self.help:
return '%s\n\n%s' % (usage, self.help)
else:
return usage
def create_parser(self, prog_name, subcommand):
"""
Create and return the ``ArgumentParser`` which will be used to
parse the arguments to this command.
"""
if not self.use_argparse:
def store_as_int(option, opt_str, value, parser):
setattr(parser.values, option.dest, int(value))
# Backwards compatibility: use deprecated optparse module
warnings.warn("OptionParser usage for Django management commands "
"is deprecated, use ArgumentParser instead",
RemovedInDjango110Warning)
parser = OptionParser(prog=prog_name,
usage=self.usage(subcommand),
version=self.get_version())
parser.add_option('-v', '--verbosity', action='callback', dest='verbosity', default=1,
type='choice', choices=['0', '1', '2', '3'], callback=store_as_int,
help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output')
parser.add_option('--settings',
help=(
'The Python path to a settings module, e.g. '
'"myproject.settings.main". If this isn\'t provided, the '
'DJANGO_SETTINGS_MODULE environment variable will be used.'
),
)
parser.add_option('--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".'),
parser.add_option('--traceback', action='store_true',
help='Raise on CommandError exceptions')
parser.add_option('--no-color', action='store_true', dest='no_color', default=False,
help="Don't colorize the command output.")
for opt in self.option_list:
parser.add_option(opt)
else:
parser = CommandParser(self, prog="%s %s" % (os.path.basename(prog_name), subcommand),
description=self.help or None)
parser.add_argument('--version', action='version', version=self.get_version())
parser.add_argument('-v', '--verbosity', action='store', dest='verbosity', default='1',
type=int, choices=[0, 1, 2, 3],
help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output')
parser.add_argument('--settings',
help=(
'The Python path to a settings module, e.g. '
'"myproject.settings.main". If this isn\'t provided, the '
'DJANGO_SETTINGS_MODULE environment variable will be used.'
),
)
parser.add_argument('--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".')
parser.add_argument('--traceback', action='store_true',
help='Raise on CommandError exceptions')
parser.add_argument('--no-color', action='store_true', dest='no_color', default=False,
help="Don't colorize the command output.")
if self.args:
# Keep compatibility and always accept positional arguments, like optparse when args is set
parser.add_argument('args', nargs='*')
self.add_arguments(parser)
return parser
def add_arguments(self, parser):
"""
Entry point for subclassed commands to add custom arguments.
"""
pass
def print_help(self, prog_name, subcommand):
"""
Print the help message for this command, derived from
``self.usage()``.
"""
parser = self.create_parser(prog_name, subcommand)
parser.print_help()
def run_from_argv(self, argv):
"""
Set up any environment changes requested (e.g., Python path
and Django settings), then run this command. If the
command raises a ``CommandError``, intercept it and print it sensibly
to stderr. If the ``--traceback`` option is present or the raised
``Exception`` is not ``CommandError``, raise it.
"""
self._called_from_command_line = True
parser = self.create_parser(argv[0], argv[1])
if self.use_argparse:
options = parser.parse_args(argv[2:])
cmd_options = vars(options)
# Move positional args out of options to mimic legacy optparse
args = cmd_options.pop('args', ())
else:
options, args = parser.parse_args(argv[2:])
cmd_options = vars(options)
handle_default_options(options)
try:
self.execute(*args, **cmd_options)
except Exception as e:
if options.traceback or not isinstance(e, CommandError):
raise
# SystemCheckError takes care of its own formatting.
if isinstance(e, SystemCheckError):
self.stderr.write(str(e), lambda x: x)
else:
self.stderr.write('%s: %s' % (e.__class__.__name__, e))
sys.exit(1)
finally:
connections.close_all()
def execute(self, *args, **options):
"""
Try to execute this command, performing system checks if needed (as
controlled by the ``requires_system_checks`` attribute, except if
force-skipped).
"""
if options.get('no_color'):
self.style = no_style()
self.stderr.style_func = None
if options.get('stdout'):
self.stdout = OutputWrapper(options['stdout'])
if options.get('stderr'):
self.stderr = OutputWrapper(options.get('stderr'), self.stderr.style_func)
saved_locale = None
if not self.leave_locale_alone:
# Only mess with locales if we can assume we have a working
# settings file, because django.utils.translation requires settings
# (The final saying about whether the i18n machinery is active will be
# found in the value of the USE_I18N setting)
if not self.can_import_settings:
raise CommandError("Incompatible values of 'leave_locale_alone' "
"(%s) and 'can_import_settings' (%s) command "
"options." % (self.leave_locale_alone,
self.can_import_settings))
# Deactivate translations, because django-admin creates database
# content like permissions, and those shouldn't contain any
# translations.
from django.utils import translation
saved_locale = translation.get_language()
translation.deactivate_all()
try:
if (self.requires_system_checks and
not options.get('skip_validation') and # Remove at the end of deprecation for `skip_validation`.
not options.get('skip_checks')):
self.check()
output = self.handle(*args, **options)
if output:
if self.output_transaction:
# This needs to be imported here, because it relies on
# settings.
from django.db import connections, DEFAULT_DB_ALIAS
connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
if connection.ops.start_transaction_sql():
self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()))
self.stdout.write(output)
if self.output_transaction:
self.stdout.write('\n' + self.style.SQL_KEYWORD(connection.ops.end_transaction_sql()))
finally:
if saved_locale is not None:
translation.activate(saved_locale)
def check(self, app_configs=None, tags=None, display_num_errors=False,
include_deployment_checks=False):
"""
Uses the system check framework to validate entire Django project.
Raises CommandError for any serious message (error or critical errors).
If there are only light messages (like warnings), they are printed to
stderr and no exception is raised.
"""
all_issues = checks.run_checks(
app_configs=app_configs,
tags=tags,
include_deployment_checks=include_deployment_checks,
)
header, body, footer = "", "", ""
visible_issue_count = 0 # excludes silenced warnings
if all_issues:
debugs = [e for e in all_issues if e.level < checks.INFO and not e.is_silenced()]
infos = [e for e in all_issues if checks.INFO <= e.level < checks.WARNING and not e.is_silenced()]
warnings = [e for e in all_issues if checks.WARNING <= e.level < checks.ERROR and not e.is_silenced()]
errors = [e for e in all_issues if checks.ERROR <= e.level < checks.CRITICAL]
criticals = [e for e in all_issues if checks.CRITICAL <= e.level]
sorted_issues = [
(criticals, 'CRITICALS'),
(errors, 'ERRORS'),
(warnings, 'WARNINGS'),
(infos, 'INFOS'),
(debugs, 'DEBUGS'),
]
for issues, group_name in sorted_issues:
if issues:
visible_issue_count += len(issues)
formatted = (
self.style.ERROR(force_str(e))
if e.is_serious()
else self.style.WARNING(force_str(e))
for e in issues)
formatted = "\n".join(sorted(formatted))
body += '\n%s:\n%s\n' % (group_name, formatted)
if visible_issue_count:
header = "System check identified some issues:\n"
if display_num_errors:
if visible_issue_count:
footer += '\n'
footer += "System check identified %s (%s silenced)." % (
"no issues" if visible_issue_count == 0 else
"1 issue" if visible_issue_count == 1 else
"%s issues" % visible_issue_count,
len(all_issues) - visible_issue_count,
)
if any(e.is_serious() and not e.is_silenced() for e in all_issues):
msg = self.style.ERROR("SystemCheckError: %s" % header) + body + footer
raise SystemCheckError(msg)
else:
msg = header + body + footer
if msg:
if visible_issue_count:
self.stderr.write(msg, lambda x: x)
else:
self.stdout.write(msg)
def handle(self, *args, **options):
"""
The actual logic of the command. Subclasses must implement
this method.
"""
raise NotImplementedError('subclasses of BaseCommand must provide a handle() method')
class AppCommand(BaseCommand):
"""
A management command which takes one or more installed application labels
as arguments, and does something with each of them.
Rather than implementing ``handle()``, subclasses must implement
``handle_app_config()``, which will be called once for each application.
"""
missing_args_message = "Enter at least one application label."
def add_arguments(self, parser):
parser.add_argument('args', metavar='app_label', nargs='+',
help='One or more application label.')
def handle(self, *app_labels, **options):
from django.apps import apps
try:
app_configs = [apps.get_app_config(app_label) for app_label in app_labels]
except (LookupError, ImportError) as e:
raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
output = []
for app_config in app_configs:
app_output = self.handle_app_config(app_config, **options)
if app_output:
output.append(app_output)
return '\n'.join(output)
def handle_app_config(self, app_config, **options):
"""
Perform the command's actions for app_config, an AppConfig instance
corresponding to an application label given on the command line.
"""
raise NotImplementedError(
"Subclasses of AppCommand must provide"
"a handle_app_config() method.")
class LabelCommand(BaseCommand):
"""
A management command which takes one or more arbitrary arguments
(labels) on the command line, and does something with each of
them.
Rather than implementing ``handle()``, subclasses must implement
``handle_label()``, which will be called once for each label.
If the arguments should be names of installed applications, use
``AppCommand`` instead.
"""
label = 'label'
missing_args_message = "Enter at least one %s." % label
def add_arguments(self, parser):
parser.add_argument('args', metavar=self.label, nargs='+')
def handle(self, *labels, **options):
output = []
for label in labels:
label_output = self.handle_label(label, **options)
if label_output:
output.append(label_output)
return '\n'.join(output)
def handle_label(self, label, **options):
"""
Perform the command's actions for ``label``, which will be the
string as given on the command line.
"""
raise NotImplementedError('subclasses of LabelCommand must provide a handle_label() method')
class NoArgsCommand(BaseCommand):
"""
A command which takes no arguments on the command line.
Rather than implementing ``handle()``, subclasses must implement
``handle_noargs()``; ``handle()`` itself is overridden to ensure
no arguments are passed to the command.
Attempting to pass arguments will raise ``CommandError``.
"""
args = ''
def __init__(self):
warnings.warn(
"NoArgsCommand class is deprecated and will be removed in Django 1.10. "
"Use BaseCommand instead, which takes no arguments by default.",
RemovedInDjango110Warning
)
super(NoArgsCommand, self).__init__()
def handle(self, *args, **options):
if args:
raise CommandError("Command doesn't accept any arguments")
return self.handle_noargs(**options)
def handle_noargs(self, **options):
"""
Perform this command's actions.
"""
raise NotImplementedError('subclasses of NoArgsCommand must provide a handle_noargs() method')
|
sbuss/voteswap
|
refs/heads/master
|
lib/django/contrib/admin/models.py
|
184
|
from __future__ import unicode_literals
from django.conf import settings
from django.contrib.admin.utils import quote
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import NoReverseMatch, reverse
from django.db import models
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible, smart_text
from django.utils.translation import ugettext, ugettext_lazy as _
ADDITION = 1
CHANGE = 2
DELETION = 3
class LogEntryManager(models.Manager):
use_in_migrations = True
def log_action(self, user_id, content_type_id, object_id, object_repr, action_flag, change_message=''):
self.model.objects.create(
user_id=user_id,
content_type_id=content_type_id,
object_id=smart_text(object_id),
object_repr=object_repr[:200],
action_flag=action_flag,
change_message=change_message,
)
@python_2_unicode_compatible
class LogEntry(models.Model):
action_time = models.DateTimeField(
_('action time'),
default=timezone.now,
editable=False,
)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
models.CASCADE,
verbose_name=_('user'),
)
content_type = models.ForeignKey(
ContentType,
models.SET_NULL,
verbose_name=_('content type'),
blank=True, null=True,
)
object_id = models.TextField(_('object id'), blank=True, null=True)
object_repr = models.CharField(_('object repr'), max_length=200)
action_flag = models.PositiveSmallIntegerField(_('action flag'))
change_message = models.TextField(_('change message'), blank=True)
objects = LogEntryManager()
class Meta:
verbose_name = _('log entry')
verbose_name_plural = _('log entries')
db_table = 'django_admin_log'
ordering = ('-action_time',)
def __repr__(self):
return smart_text(self.action_time)
def __str__(self):
if self.is_addition():
return ugettext('Added "%(object)s".') % {'object': self.object_repr}
elif self.is_change():
return ugettext('Changed "%(object)s" - %(changes)s') % {
'object': self.object_repr,
'changes': self.change_message,
}
elif self.is_deletion():
return ugettext('Deleted "%(object)s."') % {'object': self.object_repr}
return ugettext('LogEntry Object')
def is_addition(self):
return self.action_flag == ADDITION
def is_change(self):
return self.action_flag == CHANGE
def is_deletion(self):
return self.action_flag == DELETION
def get_edited_object(self):
"Returns the edited object represented by this log entry"
return self.content_type.get_object_for_this_type(pk=self.object_id)
def get_admin_url(self):
"""
Returns the admin URL to edit the object represented by this log entry.
"""
if self.content_type and self.object_id:
url_name = 'admin:%s_%s_change' % (self.content_type.app_label, self.content_type.model)
try:
return reverse(url_name, args=(quote(self.object_id),))
except NoReverseMatch:
pass
return None
|
rozap/aircooledrescue
|
refs/heads/master
|
buspeople/middleware.py
|
1
|
import traceback
import sys
class ProcessExceptionMiddleware(object):
def process_exception(self, request, exception):
print exception # or log, or whatever.
# print traceback
print '\n'.join(traceback.format_exception(*sys.exc_info()))
|
gweintraub/SerialSculpt
|
refs/heads/master
|
node_modules/socket.io/node_modules/socket.io-client/node_modules/engine.io-client/node_modules/engine.io-parser/node_modules/utf8/tests/generate-test-data.py
|
1788
|
#!/usr/bin/env python
import re
import json
# https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
# http://stackoverflow.com/a/13436167/96656
def unisymbol(codePoint):
if codePoint >= 0x0000 and codePoint <= 0xFFFF:
return unichr(codePoint)
elif codePoint >= 0x010000 and codePoint <= 0x10FFFF:
highSurrogate = int((codePoint - 0x10000) / 0x400) + 0xD800
lowSurrogate = int((codePoint - 0x10000) % 0x400) + 0xDC00
return unichr(highSurrogate) + unichr(lowSurrogate)
else:
return 'Error'
def hexify(codePoint):
return 'U+' + hex(codePoint)[2:].upper().zfill(6)
def writeFile(filename, contents):
print filename
with open(filename, 'w') as f:
f.write(contents.strip() + '\n')
data = []
for codePoint in range(0x000000, 0x10FFFF + 1):
# Skip non-scalar values.
if codePoint >= 0xD800 and codePoint <= 0xDFFF:
continue
symbol = unisymbol(codePoint)
# http://stackoverflow.com/a/17199950/96656
bytes = symbol.encode('utf8').decode('latin1')
data.append({
'codePoint': codePoint,
'decoded': symbol,
'encoded': bytes
});
jsonData = json.dumps(data, sort_keys=False, indent=2, separators=(',', ': '))
# Use tabs instead of double spaces for indentation
jsonData = jsonData.replace(' ', '\t')
# Escape hexadecimal digits in escape sequences
jsonData = re.sub(
r'\\u([a-fA-F0-9]{4})',
lambda match: r'\u{}'.format(match.group(1).upper()),
jsonData
)
writeFile('data.json', jsonData)
|
bradmontgomery/django-avatar
|
refs/heads/master
|
tests/settings.py
|
71
|
from django.conf.urls.defaults import patterns, include, handler500, handler404
DEFAULT_CHARSET = 'utf-8'
DATABASE_ENGINE = 'sqlite3'
DATABASE_NAME = ':memory:'
ROOT_URLCONF = 'settings'
STATIC_URL = '/site_media/static/'
SITE_ID = 1
INSTALLED_APPS = (
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sites',
'django.contrib.comments',
'avatar',
)
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.load_template_source',
)
AVATAR_ALLOWED_FILE_EXTS = ('.jpg', '.png')
AVATAR_MAX_SIZE = 1024 * 1024
AVATAR_MAX_AVATARS_PER_USER = 20
urlpatterns = patterns('',
(r'^avatar/', include('avatar.urls')),
)
def __exported_functionality__():
return (handler500, handler404)
|
kblin/supybot-gsoc
|
refs/heads/stable
|
plugins/Reply/config.py
|
15
|
###
# Copyright (c) 2005, Daniel DiPaolo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import supybot.conf as conf
import supybot.registry as registry
def configure(advanced):
# This will be called by supybot to configure this module. advanced is
# a bool that specifies whether the user identified himself as an advanced
# user or not. You should effect your configuration by manipulating the
# registry as appropriate.
from supybot.questions import expect, anything, something, yn
conf.registerPlugin('Reply', True)
Reply = conf.registerPlugin('Reply')
# This is where your configuration variables (if any) should go. For example:
# conf.registerGlobalValue(Reply, 'someConfigVariableName',
# registry.Boolean(False, """Help for someConfigVariableName."""))
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
|
fedorpatlin/ansible
|
refs/heads/devel
|
lib/ansible/plugins/connection/lxc.py
|
89
|
# (c) 2015, Joerg Thalheim <joerg@higgsboson.tk>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import shutil
import traceback
import select
import fcntl
import errno
HAS_LIBLXC = False
try:
import lxc as _lxc
HAS_LIBLXC = True
except ImportError:
pass
from ansible import constants as C
from ansible import errors
from ansible.module_utils._text import to_bytes
from ansible.plugins.connection import ConnectionBase
class Connection(ConnectionBase):
''' Local lxc based connections '''
transport = 'lxc'
has_pipelining = True
become_methods = frozenset(C.BECOME_METHODS)
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self.container_name = self._play_context.remote_addr
self.container = None
def _connect(self):
''' connect to the lxc; nothing to do here '''
super(Connection, self)._connect()
if not HAS_LIBLXC:
msg = "lxc bindings for python2 are not installed"
raise errors.AnsibleError(msg)
if self.container:
return
self._display.vvv("THIS IS A LOCAL LXC DIR", host=self.container_name)
self.container = _lxc.Container(self.container_name)
if self.container.state == "STOPPED":
raise errors.AnsibleError("%s is not running" % self.container_name)
def _communicate(self, pid, in_data, stdin, stdout, stderr):
buf = { stdout: [], stderr: [] }
read_fds = [stdout, stderr]
if in_data:
write_fds = [stdin]
else:
write_fds = []
while len(read_fds) > 0 or len(write_fds) > 0:
try:
ready_reads, ready_writes, _ = select.select(read_fds, write_fds, [])
except select.error as e:
if e.args[0] == errno.EINTR:
continue
raise
for fd in ready_writes:
in_data = in_data[os.write(fd, in_data):]
if len(in_data) == 0:
write_fds.remove(fd)
for fd in ready_reads:
data = os.read(fd, 32768)
if not data:
read_fds.remove(fd)
buf[fd].append(data)
(pid, returncode) = os.waitpid(pid, 0)
return returncode, b"".join(buf[stdout]), b"".join(buf[stderr])
def _set_nonblocking(self, fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
return fd
def exec_command(self, cmd, in_data=None, sudoable=False):
''' run a command on the chroot '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
executable = to_bytes(self._play_context.executable, errors='surrogate_or_strict')
local_cmd = [executable, '-c', to_bytes(cmd, errors='surrogate_or_strict')]
read_stdout, write_stdout = None, None
read_stderr, write_stderr = None, None
read_stdin, write_stdin = None, None
try:
read_stdout, write_stdout = os.pipe()
read_stderr, write_stderr = os.pipe()
kwargs = {
'stdout': self._set_nonblocking(write_stdout),
'stderr': self._set_nonblocking(write_stderr),
'env_policy': _lxc.LXC_ATTACH_CLEAR_ENV
}
if in_data:
read_stdin, write_stdin = os.pipe()
kwargs['stdin'] = self._set_nonblocking(read_stdin)
self._display.vvv("EXEC %s" % (local_cmd), host=self.container_name)
pid = self.container.attach(_lxc.attach_run_command, local_cmd, **kwargs)
if pid == -1:
msg = "failed to attach to container %s" % self.container_name
raise errors.AnsibleError(msg)
write_stdout = os.close(write_stdout)
write_stderr = os.close(write_stderr)
if read_stdin:
read_stdin = os.close(read_stdin)
return self._communicate(pid,
in_data,
write_stdin,
read_stdout,
read_stderr)
finally:
fds = [read_stdout,
write_stdout,
read_stderr,
write_stderr,
read_stdin,
write_stdin]
for fd in fds:
if fd:
os.close(fd)
def put_file(self, in_path, out_path):
''' transfer a file from local to lxc '''
super(Connection, self).put_file(in_path, out_path)
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.container_name)
in_path = to_bytes(in_path, errors='surrogate_or_strict')
out_path = to_bytes(out_path, errors='surrogate_or_strict')
if not os.path.exists(in_path):
msg = "file or module does not exist: %s" % in_path
raise errors.AnsibleFileNotFound(msg)
try:
src_file = open(in_path, "rb")
except IOError:
traceback.print_exc()
raise errors.AnsibleError("failed to open input file to %s" % in_path)
try:
def write_file(args):
with open(out_path, 'wb+') as dst_file:
shutil.copyfileobj(src_file, dst_file)
try:
self.container.attach_wait(write_file, None)
except IOError:
traceback.print_exc()
msg = "failed to transfer file to %s" % out_path
raise errors.AnsibleError(msg)
finally:
src_file.close()
def fetch_file(self, in_path, out_path):
''' fetch a file from lxc to local '''
super(Connection, self).fetch_file(in_path, out_path)
self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.container_name)
in_path = to_bytes(in_path, errors='surrogate_or_strict')
out_path = to_bytes(out_path, errors='surrogate_or_strict')
try:
dst_file = open(out_path, "wb")
except IOError:
traceback.print_exc()
msg = "failed to open output file %s" % out_path
raise errors.AnsibleError(msg)
try:
def write_file(args):
try:
with open(in_path, 'rb') as src_file:
shutil.copyfileobj(src_file, dst_file)
finally:
# this is needed in the lxc child process
# to flush internal python buffers
dst_file.close()
try:
self.container.attach_wait(write_file, None)
except IOError:
traceback.print_exc()
msg = "failed to transfer file from %s to %s" % (in_path, out_path)
raise errors.AnsibleError(msg)
finally:
dst_file.close()
def close(self):
''' terminate the connection; nothing to do here '''
super(Connection, self).close()
self._connected = False
|
poiesisconsulting/openerp-restaurant
|
refs/heads/master
|
purchase_double_validation/__init__.py
|
441
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import purchase_double_validation_installer
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
pombredanne/NearPy
|
refs/heads/master
|
nearpy/tests/__init__.py
|
2
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Ole Krause-Sparmann
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
|
bnjmnhndrsn/lunchmove
|
refs/heads/master
|
moves/serializers.py
|
2
|
from .models import Move, Spot
from rest_framework import serializers
class MoveSerializer(serializers.ModelSerializer):
class Meta:
model = Move
fields = ('spot', 'user', 'id', 'uuid', 'time')
class SpotSerializer(serializers.ModelSerializer):
class Meta:
model = Spot
fields = ('name', 'id')
|
fo2rist/infra-strike
|
refs/heads/master
|
backend/venv/Lib/site-packages/pip/_vendor/requests/packages/chardet/constants.py
|
3007
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
_debug = 0
eDetecting = 0
eFoundIt = 1
eNotMe = 2
eStart = 0
eError = 1
eItsMe = 2
SHORTCUT_THRESHOLD = 0.95
|
p0cisk/Quantum-GIS
|
refs/heads/master
|
python/ext-libs/nose2/sphinxext.py
|
12
|
import types
from docutils import nodes
from docutils.statemachine import ViewList
from docutils.parsers.rst import Directive, directives
from nose2 import events, session, util
AD = u'<autodoc>'
__unittest = True
class AutoPlugin(Directive):
required_arguments = 1
optional_arguments = 1
final_argument_whitespace = False
has_content = False
option_spec = {'module': directives.unchanged}
def run(self):
plugin_name = self.arguments[0]
parent, plugin = util.object_from_name(plugin_name)
if isinstance(plugin, types.ModuleType):
# document all plugins in module
module = plugin
mod_name = module.__name__
plugins = self.plugins(module)
else:
if 'module' in self.options:
mod_name = self.options['module']
else:
mod_name = plugin_name[
0:plugin_name.index(plugin.__name__) - 1]
plugins = [plugin]
rst = ViewList()
if mod_name:
rst.append(u'.. automodule :: %s\n' % mod_name, AD)
rst.append(u'', AD)
for plug in plugins:
self.document(rst, plug)
# parse rst and generate new nodelist
state = self.state
node = nodes.section()
node.document = state.document
surrounding_title_styles = state.memo.title_styles
surrounding_section_level = state.memo.section_level
state.memo.title_styles = []
state.memo.section_level = 0
state.nested_parse(rst, 0, node, match_titles=1)
state.memo.title_styles = surrounding_title_styles
state.memo.section_level = surrounding_section_level
return node.children
def document(self, rst, plugin):
ssn = session.Session()
ssn.configClass = ssn.config = config = ConfigBucket()
ssn.pluginargs = opts = OptBucket()
plugin_name = plugin.__name__
config = ssn.config
obj = plugin(session=ssn)
try:
obj.pluginsLoaded(events.PluginsLoadedEvent([obj]))
except AttributeError:
pass
# config options
if config.vars:
self.add_config(rst, config)
# command-line options
if opts.opts:
self.headline(rst, u'Command-line options')
for opt in opts:
for line in opt.options():
rst.append(line, AD)
rst.append('', AD)
# class __doc__
self.headline(rst, u'Plugin class reference: %s' % plugin_name)
rst.append(u'.. autoclass :: %s' % plugin_name, AD)
rst.append(u' :members:', AD)
rst.append(u'', AD)
def add_config(self, rst, config):
headline = u'Configuration [%s]' % config.section
self.headline(rst, headline)
for var in sorted(config.vars.keys()):
info = config.vars[var]
rst.append(u'.. rst:configvar :: %s' % var, AD)
rst.append(u' ', AD)
rst.append(u' :Default: %(default)s' % info, AD)
rst.append(u' :Type: %(type)s' % info, AD)
rst.append(u'', AD)
self.headline(rst, u"Sample configuration", '-')
rst.append(u'The default configuration is equivalent to including '
u'the following in a unittest.cfg file.', AD)
rst.append(u'', AD)
rst.append(u'.. code-block:: ini', AD)
rst.append(u' ', AD)
rst.append(u' [%s]' % config.section, AD)
for var in sorted(config.vars.keys()):
info = config.vars[var]
entry = ' %s = ' % (var)
if info['type'] != 'list':
entry = u'%s%s' % (entry, info['default'])
rst.append(entry, AD)
elif info['default']:
pad = ' ' * len(entry)
entry = u'%s%s' % (entry, info['default'][0])
rst.append(entry, AD)
for val in info['default'][1:]:
rst.append(u'%s%s' % (pad, val), AD)
else:
rst.append(entry, AD)
rst.append(u'', AD)
def headline(self, rst, headline, level=u'='):
rst.append(headline, AD)
rst.append(level * len(headline), AD)
rst.append(u'', AD)
def plugins(self, module):
for entry in dir(module):
try:
item = getattr(module, entry)
except AttributeError:
pass
try:
if issubclass(item, events.Plugin):
yield item
except TypeError:
pass
def setup(app):
app.add_directive('autoplugin', AutoPlugin)
app.add_object_type('configvar', 'config', u'pair: %s; configvar')
DEFAULT = object()
class ConfigBucket(object):
def __init__(self):
self.section = None
self.vars = {}
def __call__(self, items):
self.vars = dict(items)
return self
def has_section(self, section):
self.section = section
return False
def items(self):
return self.vars.items()
def as_bool(self, item, default=DEFAULT):
self.vars[item] = {'type': 'boolean',
'default': default}
return default
as_tri = as_bool
def as_int(self, item, default=DEFAULT):
self.vars[item] = {'type': 'integer',
'default': default}
return default
def as_float(self, item, default=DEFAULT):
self.vars[item] = {'type': 'float',
'default': default}
return default
def as_str(self, item, default=DEFAULT):
self.vars[item] = {'type': 'str',
'default': default}
return default
def as_list(self, item, default=DEFAULT):
self.vars[item] = {'type': 'list',
'default': default}
return default
def __getitem__(self, item):
self.vars[item] = {'type': None,
'default': DEFAULT}
def get(self, item, default=DEFAULT):
self.vars[item] = {'type': None,
'default': default}
return default
class OptBucket(object):
def __init__(self, doc=None, prog='nosetests'):
self.seen = set()
self.opts = []
self.doc = doc
self.prog = prog
def __iter__(self):
return iter(self.opts)
def format_help(self):
return self.doc.replace('%prog', self.prog).replace(':\n', '::\n')
def add_argument(self, *arg, **kw):
if not arg in self.seen:
self.opts.append(Opt(*arg, **kw))
self.seen.add(arg)
def __call__(self, callback, opt=None, longOpt=None, help=None):
opts = []
if opt is not None:
opts.append('-' + opt)
if longOpt is not None:
opts.append('--' + longOpt)
self.add_option(*opts, help=help)
class Opt(object):
def __init__(self, *arg, **kw):
self.opts = arg
self.action = kw.pop('action', None)
self.default = kw.pop('default', None)
self.metavar = kw.pop('metavar', None)
self.help = kw.pop('help', None)
def options(self):
buf = []
for optstring in self.opts:
desc = optstring
if self.action not in ('store_true', 'store_false', None):
desc += ' %s' % self.meta(optstring)
buf.append(desc)
res = ['.. cmdoption :: ' + ', '.join(buf)]
if self.help:
res.append('')
res.append(' %s' % self.help)
res.append('')
return res
def meta(self, optstring):
# FIXME optparser default metavar?
return self.metavar or 'DEFAULT'
|
zhaochl/python-utils
|
refs/heads/master
|
sug/demo/tree_builder.py
|
1
|
#!/usr/bin/env python
# coding=utf-8
from data_util import *
from tire_tree import *
import pygraphviz as pgv
import json
import os
PROJECT_BATCH_SIZE = 100
def build_tree(pid = 0):
while True:
projs = DataUtil.get_project_term(pid, PROJECT_BATCH_SIZE)
for proj in projs:
title = proj[0]
category = proj[1].split()
keywords = proj[2].split()
brief = proj[3].split()
pid = proj[4]
projectid = proj[5]
info = DataUtil.get_project_info(projectid)
terms = {}
terms[title] = 1.0
for i in range(len(category)/2):
term = category[i*2].split('=')[1]
weight = category[i*2+1].split('=')[1]
if term[1] == '_':
term = term[2:]
weight = float(weight)
terms[term] = weight
for i in range(len(keywords)/2):
term = keywords[i*2].split('=')[1]
weight = keywords[i*2+1].split('=')[1]
if term[1] == '_':
term = term[2:]
weight = float(weight)
terms[term] = weight
for e in info:
name = e[0]
terms[name] = 1.0
for term, weight in terms.iteritems():
if len(unicode(term)) == 1:
continue
if len(str(term))==len(unicode(term)) and len(str(term))==2:
continue
add_term(term, weight)
if len(projs) < PROJECT_BATCH_SIZE:
break
treefile = open('tree_nodes.txt', 'w')
indexfile = open('tree_index.txt', 'w')
treefile.write(json.dumps(tree_nodes))
indexfile.write(json.dumps(node_index))
treefile.close()
indexfile.close()
def load_tree():
if not os.path.exists('tree_nodes.txt') or not os.path.exists('tree_index.txt'):
build_tree()
return
global tree_nodes
global node_index
treefile = open('tree_nodes.txt')
indexfile = open('tree_index.txt')
tree_nodes.pop()#pop root node
tree_nodes += json.loads(treefile.read())
index = json.loads(indexfile.read())
for key, nid in index.iteritems():
node_index[key] = nid
treefile.close()
indexfile.close()
def graph_tree(graph, parent):
graph_word1 = str(parent) + tree_nodes[parent][KEYWORD]
for child in tree_nodes[parent][CHILDREN]:
graph_word2 = str(child) + tree_nodes[child][KEYWORD]
graph.add_edge(graph_word1, graph_word2)
graph_tree(graph, child)
def get_graph(parent=0):
graph = pgv.AGraph(directed=True, strict=True)
graph_tree(graph, parent)
graph.graph_attr['epsilon'] = '0.001'
print graph.string()
graph.write('tree.dot')
graph.layout('dot')
graph.draw('tree.png')
if __name__ == '__main__':
load_tree()
sugs = get_suggestion('155')
for sug in sugs:
print sug
|
yorkerlin/shogun
|
refs/heads/develop
|
examples/undocumented/python_modular/kernel_simple_locality_improved_string_modular.py
|
26
|
#!/usr/bin/env python
from tools.load import LoadMatrix
lm=LoadMatrix()
traindat = lm.load_dna('../data/fm_train_dna.dat')
testdat = lm.load_dna('../data/fm_test_dna.dat')
parameter_list = [[traindat,testdat,5,5,1],[traindat,testdat,5,3,2]]
def kernel_simple_locality_improved_string_modular (fm_train_dna=traindat,fm_test_dna=testdat,
length=5,inner_degree=5,outer_degree=1 ):
from modshogun import StringCharFeatures, DNA
from modshogun import SimpleLocalityImprovedStringKernel, MSG_DEBUG
feats_train=StringCharFeatures(fm_train_dna, DNA)
#feats_train.io.set_loglevel(MSG_DEBUG)
feats_test=StringCharFeatures(fm_test_dna, DNA)
kernel=SimpleLocalityImprovedStringKernel(
feats_train, feats_train, length, inner_degree, outer_degree)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
if __name__=='__main__':
print('SimpleLocalityImprovedString')
kernel_simple_locality_improved_string_modular(*parameter_list[0])
|
tmpgit/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyArgumentListInspection/dictFromKeys.py
|
52
|
print(dict.fromkeys(<warning descr="Parameter 'seq' unfilled">)</warning>)
print(dict.fromkeys(['foo', 'bar']))
|
samdowd/drumm-farm
|
refs/heads/master
|
drumm_env/lib/python2.7/site-packages/django/forms/widgets.py
|
106
|
"""
HTML Widget classes
"""
from __future__ import unicode_literals
import copy
import datetime
import re
from itertools import chain
from django.conf import settings
from django.forms.utils import flatatt, to_current_timezone
from django.utils import datetime_safe, formats, six
from django.utils.datastructures import MultiValueDict
from django.utils.dates import MONTHS
from django.utils.encoding import (
force_str, force_text, python_2_unicode_compatible,
)
from django.utils.formats import get_format
from django.utils.html import conditional_escape, format_html, html_safe
from django.utils.safestring import mark_safe
from django.utils.six.moves import range
from django.utils.six.moves.urllib.parse import urljoin
from django.utils.translation import ugettext_lazy
__all__ = (
'Media', 'MediaDefiningClass', 'Widget', 'TextInput', 'NumberInput',
'EmailInput', 'URLInput', 'PasswordInput', 'HiddenInput',
'MultipleHiddenInput', 'FileInput', 'ClearableFileInput', 'Textarea',
'DateInput', 'DateTimeInput', 'TimeInput', 'CheckboxInput', 'Select',
'NullBooleanSelect', 'SelectMultiple', 'RadioSelect',
'CheckboxSelectMultiple', 'MultiWidget', 'SplitDateTimeWidget',
'SplitHiddenDateTimeWidget', 'SelectDateWidget',
)
MEDIA_TYPES = ('css', 'js')
@html_safe
@python_2_unicode_compatible
class Media(object):
def __init__(self, media=None, **kwargs):
if media:
media_attrs = media.__dict__
else:
media_attrs = kwargs
self._css = {}
self._js = []
for name in MEDIA_TYPES:
getattr(self, 'add_' + name)(media_attrs.get(name))
def __str__(self):
return self.render()
def render(self):
return mark_safe('\n'.join(chain(*[getattr(self, 'render_' + name)() for name in MEDIA_TYPES])))
def render_js(self):
return [
format_html(
'<script type="text/javascript" src="{}"></script>',
self.absolute_path(path)
) for path in self._js
]
def render_css(self):
# To keep rendering order consistent, we can't just iterate over items().
# We need to sort the keys, and iterate over the sorted list.
media = sorted(self._css.keys())
return chain(*[[
format_html(
'<link href="{}" type="text/css" media="{}" rel="stylesheet" />',
self.absolute_path(path), medium
) for path in self._css[medium]
] for medium in media])
def absolute_path(self, path, prefix=None):
if path.startswith(('http://', 'https://', '/')):
return path
if prefix is None:
if settings.STATIC_URL is None:
# backwards compatibility
prefix = settings.MEDIA_URL
else:
prefix = settings.STATIC_URL
return urljoin(prefix, path)
def __getitem__(self, name):
"Returns a Media object that only contains media of the given type"
if name in MEDIA_TYPES:
return Media(**{str(name): getattr(self, '_' + name)})
raise KeyError('Unknown media type "%s"' % name)
def add_js(self, data):
if data:
for path in data:
if path not in self._js:
self._js.append(path)
def add_css(self, data):
if data:
for medium, paths in data.items():
for path in paths:
if not self._css.get(medium) or path not in self._css[medium]:
self._css.setdefault(medium, []).append(path)
def __add__(self, other):
combined = Media()
for name in MEDIA_TYPES:
getattr(combined, 'add_' + name)(getattr(self, '_' + name, None))
getattr(combined, 'add_' + name)(getattr(other, '_' + name, None))
return combined
def media_property(cls):
def _media(self):
# Get the media property of the superclass, if it exists
sup_cls = super(cls, self)
try:
base = sup_cls.media
except AttributeError:
base = Media()
# Get the media definition for this class
definition = getattr(cls, 'Media', None)
if definition:
extend = getattr(definition, 'extend', True)
if extend:
if extend is True:
m = base
else:
m = Media()
for medium in extend:
m = m + base[medium]
return m + Media(definition)
else:
return Media(definition)
else:
return base
return property(_media)
class MediaDefiningClass(type):
"""
Metaclass for classes that can have media definitions.
"""
def __new__(mcs, name, bases, attrs):
new_class = (super(MediaDefiningClass, mcs)
.__new__(mcs, name, bases, attrs))
if 'media' not in attrs:
new_class.media = media_property(new_class)
return new_class
@html_safe
@python_2_unicode_compatible
class SubWidget(object):
"""
Some widgets are made of multiple HTML elements -- namely, RadioSelect.
This is a class that represents the "inner" HTML element of a widget.
"""
def __init__(self, parent_widget, name, value, attrs, choices):
self.parent_widget = parent_widget
self.name, self.value = name, value
self.attrs, self.choices = attrs, choices
def __str__(self):
args = [self.name, self.value, self.attrs]
if self.choices:
args.append(self.choices)
return self.parent_widget.render(*args)
class Widget(six.with_metaclass(MediaDefiningClass)):
needs_multipart_form = False # Determines does this widget need multipart form
is_localized = False
is_required = False
supports_microseconds = True
def __init__(self, attrs=None):
if attrs is not None:
self.attrs = attrs.copy()
else:
self.attrs = {}
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
memo[id(self)] = obj
return obj
@property
def is_hidden(self):
return self.input_type == 'hidden' if hasattr(self, 'input_type') else False
def subwidgets(self, name, value, attrs=None, choices=()):
"""
Yields all "subwidgets" of this widget. Used only by RadioSelect to
allow template access to individual <input type="radio"> buttons.
Arguments are the same as for render().
"""
yield SubWidget(self, name, value, attrs, choices)
def render(self, name, value, attrs=None):
"""
Returns this Widget rendered as HTML, as a Unicode string.
The 'value' given is not guaranteed to be valid input, so subclass
implementations should program defensively.
"""
raise NotImplementedError('subclasses of Widget must provide a render() method')
def build_attrs(self, extra_attrs=None, **kwargs):
"Helper function for building an attribute dictionary."
attrs = dict(self.attrs, **kwargs)
if extra_attrs:
attrs.update(extra_attrs)
return attrs
def value_from_datadict(self, data, files, name):
"""
Given a dictionary of data and this widget's name, returns the value
of this widget. Returns None if it's not provided.
"""
return data.get(name)
def id_for_label(self, id_):
"""
Returns the HTML ID attribute of this Widget for use by a <label>,
given the ID of the field. Returns None if no ID is available.
This hook is necessary because some widgets have multiple HTML
elements and, thus, multiple IDs. In that case, this method should
return an ID value that corresponds to the first ID in the widget's
tags.
"""
return id_
class Input(Widget):
"""
Base class for all <input> widgets (except type='checkbox' and
type='radio', which are special).
"""
input_type = None # Subclasses must define this.
def _format_value(self, value):
if self.is_localized:
return formats.localize_input(value)
return value
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_text(self._format_value(value))
return format_html('<input{} />', flatatt(final_attrs))
class TextInput(Input):
input_type = 'text'
def __init__(self, attrs=None):
if attrs is not None:
self.input_type = attrs.pop('type', self.input_type)
super(TextInput, self).__init__(attrs)
class NumberInput(TextInput):
input_type = 'number'
class EmailInput(TextInput):
input_type = 'email'
class URLInput(TextInput):
input_type = 'url'
class PasswordInput(TextInput):
input_type = 'password'
def __init__(self, attrs=None, render_value=False):
super(PasswordInput, self).__init__(attrs)
self.render_value = render_value
def render(self, name, value, attrs=None):
if not self.render_value:
value = None
return super(PasswordInput, self).render(name, value, attrs)
class HiddenInput(Input):
input_type = 'hidden'
class MultipleHiddenInput(HiddenInput):
"""
A widget that handles <input type="hidden"> for fields that have a list
of values.
"""
def __init__(self, attrs=None, choices=()):
super(MultipleHiddenInput, self).__init__(attrs)
# choices can be any iterable
self.choices = choices
def render(self, name, value, attrs=None, choices=()):
if value is None:
value = []
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
id_ = final_attrs.get('id')
inputs = []
for i, v in enumerate(value):
input_attrs = dict(value=force_text(v), **final_attrs)
if id_:
# An ID attribute was given. Add a numeric index as a suffix
# so that the inputs don't all have the same ID attribute.
input_attrs['id'] = '%s_%s' % (id_, i)
inputs.append(format_html('<input{} />', flatatt(input_attrs)))
return mark_safe('\n'.join(inputs))
def value_from_datadict(self, data, files, name):
if isinstance(data, MultiValueDict):
return data.getlist(name)
return data.get(name)
class FileInput(Input):
input_type = 'file'
needs_multipart_form = True
def render(self, name, value, attrs=None):
return super(FileInput, self).render(name, None, attrs=attrs)
def value_from_datadict(self, data, files, name):
"File widgets take data from FILES, not POST"
return files.get(name)
FILE_INPUT_CONTRADICTION = object()
class ClearableFileInput(FileInput):
initial_text = ugettext_lazy('Currently')
input_text = ugettext_lazy('Change')
clear_checkbox_label = ugettext_lazy('Clear')
template_with_initial = (
'%(initial_text)s: <a href="%(initial_url)s">%(initial)s</a> '
'%(clear_template)s<br />%(input_text)s: %(input)s'
)
template_with_clear = '%(clear)s <label for="%(clear_checkbox_id)s">%(clear_checkbox_label)s</label>'
def clear_checkbox_name(self, name):
"""
Given the name of the file input, return the name of the clear checkbox
input.
"""
return name + '-clear'
def clear_checkbox_id(self, name):
"""
Given the name of the clear checkbox input, return the HTML id for it.
"""
return name + '_id'
def is_initial(self, value):
"""
Return whether value is considered to be initial value.
"""
return bool(value and hasattr(value, 'url'))
def get_template_substitution_values(self, value):
"""
Return value-related substitutions.
"""
return {
'initial': conditional_escape(value),
'initial_url': conditional_escape(value.url),
}
def render(self, name, value, attrs=None):
substitutions = {
'initial_text': self.initial_text,
'input_text': self.input_text,
'clear_template': '',
'clear_checkbox_label': self.clear_checkbox_label,
}
template = '%(input)s'
substitutions['input'] = super(ClearableFileInput, self).render(name, value, attrs)
if self.is_initial(value):
template = self.template_with_initial
substitutions.update(self.get_template_substitution_values(value))
if not self.is_required:
checkbox_name = self.clear_checkbox_name(name)
checkbox_id = self.clear_checkbox_id(checkbox_name)
substitutions['clear_checkbox_name'] = conditional_escape(checkbox_name)
substitutions['clear_checkbox_id'] = conditional_escape(checkbox_id)
substitutions['clear'] = CheckboxInput().render(checkbox_name, False, attrs={'id': checkbox_id})
substitutions['clear_template'] = self.template_with_clear % substitutions
return mark_safe(template % substitutions)
def value_from_datadict(self, data, files, name):
upload = super(ClearableFileInput, self).value_from_datadict(data, files, name)
if not self.is_required and CheckboxInput().value_from_datadict(
data, files, self.clear_checkbox_name(name)):
if upload:
# If the user contradicts themselves (uploads a new file AND
# checks the "clear" checkbox), we return a unique marker
# object that FileField will turn into a ValidationError.
return FILE_INPUT_CONTRADICTION
# False signals to clear any existing value, as opposed to just None
return False
return upload
class Textarea(Widget):
def __init__(self, attrs=None):
# Use slightly better defaults than HTML's 20x2 box
default_attrs = {'cols': '40', 'rows': '10'}
if attrs:
default_attrs.update(attrs)
super(Textarea, self).__init__(default_attrs)
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
return format_html('<textarea{}>\r\n{}</textarea>',
flatatt(final_attrs),
force_text(value))
class DateTimeBaseInput(TextInput):
format_key = ''
supports_microseconds = False
def __init__(self, attrs=None, format=None):
super(DateTimeBaseInput, self).__init__(attrs)
self.format = format if format else None
def _format_value(self, value):
return formats.localize_input(value,
self.format or formats.get_format(self.format_key)[0])
class DateInput(DateTimeBaseInput):
format_key = 'DATE_INPUT_FORMATS'
class DateTimeInput(DateTimeBaseInput):
format_key = 'DATETIME_INPUT_FORMATS'
class TimeInput(DateTimeBaseInput):
format_key = 'TIME_INPUT_FORMATS'
# Defined at module level so that CheckboxInput is picklable (#17976)
def boolean_check(v):
return not (v is False or v is None or v == '')
class CheckboxInput(Widget):
def __init__(self, attrs=None, check_test=None):
super(CheckboxInput, self).__init__(attrs)
# check_test is a callable that takes a value and returns True
# if the checkbox should be checked for that value.
self.check_test = boolean_check if check_test is None else check_test
def render(self, name, value, attrs=None):
final_attrs = self.build_attrs(attrs, type='checkbox', name=name)
if self.check_test(value):
final_attrs['checked'] = 'checked'
if not (value is True or value is False or value is None or value == ''):
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_text(value)
return format_html('<input{} />', flatatt(final_attrs))
def value_from_datadict(self, data, files, name):
if name not in data:
# A missing value means False because HTML form submission does not
# send results for unselected checkboxes.
return False
value = data.get(name)
# Translate true and false strings to boolean values.
values = {'true': True, 'false': False}
if isinstance(value, six.string_types):
value = values.get(value.lower(), value)
return bool(value)
class Select(Widget):
allow_multiple_selected = False
def __init__(self, attrs=None, choices=()):
super(Select, self).__init__(attrs)
# choices can be any iterable, but we may need to render this widget
# multiple times. Thus, collapse it into a list so it can be consumed
# more than once.
self.choices = list(choices)
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
obj.choices = copy.copy(self.choices)
memo[id(self)] = obj
return obj
def render(self, name, value, attrs=None, choices=()):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
output = [format_html('<select{}>', flatatt(final_attrs))]
options = self.render_options(choices, [value])
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
def render_option(self, selected_choices, option_value, option_label):
if option_value is None:
option_value = ''
option_value = force_text(option_value)
if option_value in selected_choices:
selected_html = mark_safe(' selected="selected"')
if not self.allow_multiple_selected:
# Only allow for a single selection.
selected_choices.remove(option_value)
else:
selected_html = ''
return format_html('<option value="{}"{}>{}</option>',
option_value,
selected_html,
force_text(option_label))
def render_options(self, choices, selected_choices):
# Normalize to strings.
selected_choices = set(force_text(v) for v in selected_choices)
output = []
for option_value, option_label in chain(self.choices, choices):
if isinstance(option_label, (list, tuple)):
output.append(format_html('<optgroup label="{}">', force_text(option_value)))
for option in option_label:
output.append(self.render_option(selected_choices, *option))
output.append('</optgroup>')
else:
output.append(self.render_option(selected_choices, option_value, option_label))
return '\n'.join(output)
class NullBooleanSelect(Select):
"""
A Select Widget intended to be used with NullBooleanField.
"""
def __init__(self, attrs=None):
choices = (('1', ugettext_lazy('Unknown')),
('2', ugettext_lazy('Yes')),
('3', ugettext_lazy('No')))
super(NullBooleanSelect, self).__init__(attrs, choices)
def render(self, name, value, attrs=None, choices=()):
try:
value = {True: '2', False: '3', '2': '2', '3': '3'}[value]
except KeyError:
value = '1'
return super(NullBooleanSelect, self).render(name, value, attrs, choices)
def value_from_datadict(self, data, files, name):
value = data.get(name)
return {'2': True,
True: True,
'True': True,
'3': False,
'False': False,
False: False}.get(value)
class SelectMultiple(Select):
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
if value is None:
value = []
final_attrs = self.build_attrs(attrs, name=name)
output = [format_html('<select multiple="multiple"{}>', flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
def value_from_datadict(self, data, files, name):
if isinstance(data, MultiValueDict):
return data.getlist(name)
return data.get(name)
@html_safe
@python_2_unicode_compatible
class ChoiceInput(SubWidget):
"""
An object used by ChoiceFieldRenderer that represents a single
<input type='$input_type'>.
"""
input_type = None # Subclasses must define this
def __init__(self, name, value, attrs, choice, index):
self.name = name
self.value = value
self.attrs = attrs
self.choice_value = force_text(choice[0])
self.choice_label = force_text(choice[1])
self.index = index
if 'id' in self.attrs:
self.attrs['id'] += "_%d" % self.index
def __str__(self):
return self.render()
def render(self, name=None, value=None, attrs=None, choices=()):
if self.id_for_label:
label_for = format_html(' for="{}"', self.id_for_label)
else:
label_for = ''
attrs = dict(self.attrs, **attrs) if attrs else self.attrs
return format_html(
'<label{}>{} {}</label>', label_for, self.tag(attrs), self.choice_label
)
def is_checked(self):
return self.value == self.choice_value
def tag(self, attrs=None):
attrs = attrs or self.attrs
final_attrs = dict(attrs, type=self.input_type, name=self.name, value=self.choice_value)
if self.is_checked():
final_attrs['checked'] = 'checked'
return format_html('<input{} />', flatatt(final_attrs))
@property
def id_for_label(self):
return self.attrs.get('id', '')
class RadioChoiceInput(ChoiceInput):
input_type = 'radio'
def __init__(self, *args, **kwargs):
super(RadioChoiceInput, self).__init__(*args, **kwargs)
self.value = force_text(self.value)
class CheckboxChoiceInput(ChoiceInput):
input_type = 'checkbox'
def __init__(self, *args, **kwargs):
super(CheckboxChoiceInput, self).__init__(*args, **kwargs)
self.value = set(force_text(v) for v in self.value)
def is_checked(self):
return self.choice_value in self.value
@html_safe
@python_2_unicode_compatible
class ChoiceFieldRenderer(object):
"""
An object used by RadioSelect to enable customization of radio widgets.
"""
choice_input_class = None
outer_html = '<ul{id_attr}>{content}</ul>'
inner_html = '<li>{choice_value}{sub_widgets}</li>'
def __init__(self, name, value, attrs, choices):
self.name = name
self.value = value
self.attrs = attrs
self.choices = choices
def __getitem__(self, idx):
choice = self.choices[idx] # Let the IndexError propagate
return self.choice_input_class(self.name, self.value, self.attrs.copy(), choice, idx)
def __str__(self):
return self.render()
def render(self):
"""
Outputs a <ul> for this set of choice fields.
If an id was given to the field, it is applied to the <ul> (each
item in the list will get an id of `$id_$i`).
"""
id_ = self.attrs.get('id')
output = []
for i, choice in enumerate(self.choices):
choice_value, choice_label = choice
if isinstance(choice_label, (tuple, list)):
attrs_plus = self.attrs.copy()
if id_:
attrs_plus['id'] += '_{}'.format(i)
sub_ul_renderer = self.__class__(
name=self.name,
value=self.value,
attrs=attrs_plus,
choices=choice_label,
)
sub_ul_renderer.choice_input_class = self.choice_input_class
output.append(format_html(self.inner_html, choice_value=choice_value,
sub_widgets=sub_ul_renderer.render()))
else:
w = self.choice_input_class(self.name, self.value,
self.attrs.copy(), choice, i)
output.append(format_html(self.inner_html,
choice_value=force_text(w), sub_widgets=''))
return format_html(self.outer_html,
id_attr=format_html(' id="{}"', id_) if id_ else '',
content=mark_safe('\n'.join(output)))
class RadioFieldRenderer(ChoiceFieldRenderer):
choice_input_class = RadioChoiceInput
class CheckboxFieldRenderer(ChoiceFieldRenderer):
choice_input_class = CheckboxChoiceInput
class RendererMixin(object):
renderer = None # subclasses must define this
_empty_value = None
def __init__(self, *args, **kwargs):
# Override the default renderer if we were passed one.
renderer = kwargs.pop('renderer', None)
if renderer:
self.renderer = renderer
super(RendererMixin, self).__init__(*args, **kwargs)
def subwidgets(self, name, value, attrs=None, choices=()):
for widget in self.get_renderer(name, value, attrs, choices):
yield widget
def get_renderer(self, name, value, attrs=None, choices=()):
"""Returns an instance of the renderer."""
if value is None:
value = self._empty_value
final_attrs = self.build_attrs(attrs)
choices = list(chain(self.choices, choices))
return self.renderer(name, value, final_attrs, choices)
def render(self, name, value, attrs=None, choices=()):
return self.get_renderer(name, value, attrs, choices).render()
def id_for_label(self, id_):
# Widgets using this RendererMixin are made of a collection of
# subwidgets, each with their own <label>, and distinct ID.
# The IDs are made distinct by y "_X" suffix, where X is the zero-based
# index of the choice field. Thus, the label for the main widget should
# reference the first subwidget, hence the "_0" suffix.
if id_:
id_ += '_0'
return id_
class RadioSelect(RendererMixin, Select):
renderer = RadioFieldRenderer
_empty_value = ''
class CheckboxSelectMultiple(RendererMixin, SelectMultiple):
renderer = CheckboxFieldRenderer
_empty_value = []
class MultiWidget(Widget):
"""
A widget that is composed of multiple widgets.
Its render() method is different than other widgets', because it has to
figure out how to split a single value for display in multiple widgets.
The ``value`` argument can be one of two things:
* A list.
* A normal value (e.g., a string) that has been "compressed" from
a list of values.
In the second case -- i.e., if the value is NOT a list -- render() will
first "decompress" the value into a list before rendering it. It does so by
calling the decompress() method, which MultiWidget subclasses must
implement. This method takes a single "compressed" value and returns a
list.
When render() does its HTML rendering, each value in the list is rendered
with the corresponding widget -- the first value is rendered in the first
widget, the second value is rendered in the second widget, etc.
Subclasses may implement format_output(), which takes the list of rendered
widgets and returns a string of HTML that formats them any way you'd like.
You'll probably want to use this class with MultiValueField.
"""
def __init__(self, widgets, attrs=None):
self.widgets = [w() if isinstance(w, type) else w for w in widgets]
super(MultiWidget, self).__init__(attrs)
@property
def is_hidden(self):
return all(w.is_hidden for w in self.widgets)
def render(self, name, value, attrs=None):
if self.is_localized:
for widget in self.widgets:
widget.is_localized = self.is_localized
# value is a list of values, each corresponding to a widget
# in self.widgets.
if not isinstance(value, list):
value = self.decompress(value)
output = []
final_attrs = self.build_attrs(attrs)
id_ = final_attrs.get('id')
for i, widget in enumerate(self.widgets):
try:
widget_value = value[i]
except IndexError:
widget_value = None
if id_:
final_attrs = dict(final_attrs, id='%s_%s' % (id_, i))
output.append(widget.render(name + '_%s' % i, widget_value, final_attrs))
return mark_safe(self.format_output(output))
def id_for_label(self, id_):
# See the comment for RadioSelect.id_for_label()
if id_:
id_ += '_0'
return id_
def value_from_datadict(self, data, files, name):
return [widget.value_from_datadict(data, files, name + '_%s' % i) for i, widget in enumerate(self.widgets)]
def format_output(self, rendered_widgets):
"""
Given a list of rendered widgets (as strings), returns a Unicode string
representing the HTML for the whole lot.
This hook allows you to format the HTML design of the widgets, if
needed.
"""
return ''.join(rendered_widgets)
def decompress(self, value):
"""
Returns a list of decompressed values for the given compressed value.
The given value can be assumed to be valid, but not necessarily
non-empty.
"""
raise NotImplementedError('Subclasses must implement this method.')
def _get_media(self):
"Media for a multiwidget is the combination of all media of the subwidgets"
media = Media()
for w in self.widgets:
media = media + w.media
return media
media = property(_get_media)
def __deepcopy__(self, memo):
obj = super(MultiWidget, self).__deepcopy__(memo)
obj.widgets = copy.deepcopy(self.widgets)
return obj
@property
def needs_multipart_form(self):
return any(w.needs_multipart_form for w in self.widgets)
class SplitDateTimeWidget(MultiWidget):
"""
A Widget that splits datetime input into two <input type="text"> boxes.
"""
supports_microseconds = False
def __init__(self, attrs=None, date_format=None, time_format=None):
widgets = (DateInput(attrs=attrs, format=date_format),
TimeInput(attrs=attrs, format=time_format))
super(SplitDateTimeWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if value:
value = to_current_timezone(value)
return [value.date(), value.time().replace(microsecond=0)]
return [None, None]
class SplitHiddenDateTimeWidget(SplitDateTimeWidget):
"""
A Widget that splits datetime input into two <input type="hidden"> inputs.
"""
def __init__(self, attrs=None, date_format=None, time_format=None):
super(SplitHiddenDateTimeWidget, self).__init__(attrs, date_format, time_format)
for widget in self.widgets:
widget.input_type = 'hidden'
class SelectDateWidget(Widget):
"""
A Widget that splits date input into three <select> boxes.
This also serves as an example of a Widget that has more than one HTML
element and hence implements value_from_datadict.
"""
none_value = (0, '---')
month_field = '%s_month'
day_field = '%s_day'
year_field = '%s_year'
select_widget = Select
date_re = re.compile(r'(\d{4})-(\d\d?)-(\d\d?)$')
def __init__(self, attrs=None, years=None, months=None, empty_label=None):
self.attrs = attrs or {}
# Optional list or tuple of years to use in the "year" select box.
if years:
self.years = years
else:
this_year = datetime.date.today().year
self.years = range(this_year, this_year + 10)
# Optional dict of months to use in the "month" select box.
if months:
self.months = months
else:
self.months = MONTHS
# Optional string, list, or tuple to use as empty_label.
if isinstance(empty_label, (list, tuple)):
if not len(empty_label) == 3:
raise ValueError('empty_label list/tuple must have 3 elements.')
self.year_none_value = (0, empty_label[0])
self.month_none_value = (0, empty_label[1])
self.day_none_value = (0, empty_label[2])
else:
if empty_label is not None:
self.none_value = (0, empty_label)
self.year_none_value = self.none_value
self.month_none_value = self.none_value
self.day_none_value = self.none_value
@staticmethod
def _parse_date_fmt():
fmt = get_format('DATE_FORMAT')
escaped = False
for char in fmt:
if escaped:
escaped = False
elif char == '\\':
escaped = True
elif char in 'Yy':
yield 'year'
elif char in 'bEFMmNn':
yield 'month'
elif char in 'dj':
yield 'day'
def render(self, name, value, attrs=None):
try:
year_val, month_val, day_val = value.year, value.month, value.day
except AttributeError:
year_val = month_val = day_val = None
if isinstance(value, six.string_types):
if settings.USE_L10N:
try:
input_format = get_format('DATE_INPUT_FORMATS')[0]
v = datetime.datetime.strptime(force_str(value), input_format)
year_val, month_val, day_val = v.year, v.month, v.day
except ValueError:
pass
if year_val is None:
match = self.date_re.match(value)
if match:
year_val, month_val, day_val = [int(val) for val in match.groups()]
html = {}
choices = [(i, i) for i in self.years]
html['year'] = self.create_select(name, self.year_field, value, year_val, choices, self.year_none_value)
choices = list(self.months.items())
html['month'] = self.create_select(name, self.month_field, value, month_val, choices, self.month_none_value)
choices = [(i, i) for i in range(1, 32)]
html['day'] = self.create_select(name, self.day_field, value, day_val, choices, self.day_none_value)
output = []
for field in self._parse_date_fmt():
output.append(html[field])
return mark_safe('\n'.join(output))
def id_for_label(self, id_):
for first_select in self._parse_date_fmt():
return '%s_%s' % (id_, first_select)
else:
return '%s_month' % id_
def value_from_datadict(self, data, files, name):
y = data.get(self.year_field % name)
m = data.get(self.month_field % name)
d = data.get(self.day_field % name)
if y == m == d == "0":
return None
if y and m and d:
if settings.USE_L10N:
input_format = get_format('DATE_INPUT_FORMATS')[0]
try:
date_value = datetime.date(int(y), int(m), int(d))
except ValueError:
return '%s-%s-%s' % (y, m, d)
else:
date_value = datetime_safe.new_date(date_value)
return date_value.strftime(input_format)
else:
return '%s-%s-%s' % (y, m, d)
return data.get(name)
def create_select(self, name, field, value, val, choices, none_value):
if 'id' in self.attrs:
id_ = self.attrs['id']
else:
id_ = 'id_%s' % name
if not self.is_required:
choices.insert(0, none_value)
local_attrs = self.build_attrs(id=field % id_)
s = self.select_widget(choices=choices)
select_html = s.render(field % name, val, local_attrs)
return select_html
|
krikru/tensorflow-opencl
|
refs/heads/master
|
tensorflow/python/summary/writer/event_file_writer.py
|
36
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Writes events to disk in a logdir."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import threading
import time
import six
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.platform import gfile
from tensorflow.python.util import compat
class EventFileWriter(object):
"""Writes `Event` protocol buffers to an event file.
The `EventFileWriter` class creates an event file in the specified directory,
and asynchronously writes Event protocol buffers to the file. The Event file
is encoded using the tfrecord format, which is similar to RecordIO.
@@__init__
@@add_event
@@flush
@@close
"""
def __init__(self, logdir, max_queue=10, flush_secs=120):
"""Creates a `EventFileWriter` and an event file to write to.
On construction the summary writer creates a new event file in `logdir`.
This event file will contain `Event` protocol buffers, which are written to
disk via the add_event method.
The other arguments to the constructor control the asynchronous writes to
the event file:
* `flush_secs`: How often, in seconds, to flush the added summaries
and events to disk.
* `max_queue`: Maximum number of summaries or events pending to be
written to disk before one of the 'add' calls block.
Args:
logdir: A string. Directory where event file will be written.
max_queue: Integer. Size of the queue for pending events and summaries.
flush_secs: Number. How often, in seconds, to flush the
pending events and summaries to disk.
"""
self._logdir = logdir
if not gfile.IsDirectory(self._logdir):
gfile.MakeDirs(self._logdir)
self._event_queue = six.moves.queue.Queue(max_queue)
self._ev_writer = pywrap_tensorflow.EventsWriter(
compat.as_bytes(os.path.join(self._logdir, "events")))
self._closed = False
self._worker = _EventLoggerThread(self._event_queue, self._ev_writer,
flush_secs)
self._worker.start()
def get_logdir(self):
"""Returns the directory where event file will be written."""
return self._logdir
def reopen(self):
"""Reopens the EventFileWriter.
Can be called after `close()` to add more events in the same directory.
The events will go into a new events file.
Does nothing if the EventFileWriter was not closed.
"""
if self._closed:
self._closed = False
def add_event(self, event):
"""Adds an event to the event file.
Args:
event: An `Event` protocol buffer.
"""
if not self._closed:
self._event_queue.put(event)
def flush(self):
"""Flushes the event file to disk.
Call this method to make sure that all pending events have been written to
disk.
"""
self._event_queue.join()
self._ev_writer.Flush()
def close(self):
"""Flushes the event file to disk and close the file.
Call this method when you do not need the summary writer anymore.
"""
self.flush()
self._ev_writer.Close()
self._closed = True
class _EventLoggerThread(threading.Thread):
"""Thread that logs events."""
def __init__(self, queue, ev_writer, flush_secs):
"""Creates an _EventLoggerThread.
Args:
queue: A Queue from which to dequeue events.
ev_writer: An event writer. Used to log brain events for
the visualizer.
flush_secs: How often, in seconds, to flush the
pending file to disk.
"""
threading.Thread.__init__(self)
self.daemon = True
self._queue = queue
self._ev_writer = ev_writer
self._flush_secs = flush_secs
# The first event will be flushed immediately.
self._next_event_flush_time = 0
def run(self):
while True:
event = self._queue.get()
try:
self._ev_writer.WriteEvent(event)
# Flush the event writer every so often.
now = time.time()
if now > self._next_event_flush_time:
self._ev_writer.Flush()
# Do it again in two minutes.
self._next_event_flush_time = now + self._flush_secs
finally:
self._queue.task_done()
|
bplancher/odoo
|
refs/heads/9.0
|
openerp/addons/base/tests/test_mimetypes.py
|
12
|
import base64
import unittest
from openerp.tools.mimetypes import guess_mimetype
PNG = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVQI12P4//8/AAX+Av7czFnnAAAAAElFTkSuQmCC'
GIF = "R0lGODdhAQABAIAAAP///////ywAAAAAAQABAAACAkQBADs="
BMP = """Qk1+AAAAAAAAAHoAAABsAAAAAQAAAAEAAAABABgAAAAAAAQAAAATCwAAEwsAAAAAAAAAAAAAQkdScwAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAD///8A"""
JPG = """/9j/4AAQSkZJRgABAQEASABIAAD//gATQ3JlYXRlZCB3aXRoIEdJTVD/2wBDAP
//////////////////////////////////////////////////////////////////////////////////////2wBDAf///////
///////////////////////////////////////////////////////////////////////////////wgARCAABAAEDAREAAhEB
AxEB/8QAFAABAAAAAAAAAAAAAAAAAAAAAv/EABQBAQAAAAAAAAAAAAAAAAAAAAD/2gAMAwEAAhADEAAAAUf/xAAUEAEAAAAAAAA
AAAAAAAAAAAAA/9oACAEBAAEFAn//xAAUEQEAAAAAAAAAAAAAAAAAAAAA/9oACAEDAQE/AX//xAAUEQEAAAAAAAAAAAAAAAAAAA
AA/9oACAECAQE/AX//xAAUEAEAAAAAAAAAAAAAAAAAAAAA/9oACAEBAAY/An//xAAUEAEAAAAAAAAAAAAAAAAAAAAA/9oACAEBA
AE/IX//2gAMAwEAAgADAAAAEB//xAAUEQEAAAAAAAAAAAAAAAAAAAAA/9oACAEDAQE/EH//xAAUEQEAAAAAAAAAAAAAAAAAAAAA
/9oACAECAQE/EH//xAAUEAEAAAAAAAAAAAAAAAAAAAAA/9oACAEBAAE/EH//2Q=="""
class test_guess_mimetype(unittest.TestCase):
def test_default_mimetype_empty(self):
mimetype = guess_mimetype('')
# odoo implementation returns application/octet-stream by default
# if available, python-magic returns application/x-empty
self.assertIn(mimetype, ('application/octet-stream', 'application/x-empty'))
def test_default_mimetype(self):
mimetype = guess_mimetype('', default='test')
# if available, python-magic returns application/x-empty
self.assertIn(mimetype, ('test', 'application/x-empty'))
def test_mimetype_octet_stream(self):
mimetype = guess_mimetype('\0')
self.assertEqual(mimetype, 'application/octet-stream')
def test_mimetype_png(self):
content = base64.b64decode(PNG)
mimetype = guess_mimetype(content, default='test')
self.assertEqual(mimetype, 'image/png')
def test_mimetype_bmp(self):
content = base64.b64decode(BMP)
mimetype = guess_mimetype(content, default='test')
# mimetype should match image/bmp, image/x-ms-bmp, ...
self.assertRegexpMatches(mimetype, r'image/.*\bbmp')
def test_mimetype_jpg(self):
content = base64.b64decode(JPG)
mimetype = guess_mimetype(content, default='test')
self.assertEqual(mimetype, 'image/jpeg')
def test_mimetype_gif(self):
content = base64.b64decode(GIF)
mimetype = guess_mimetype(content, default='test')
self.assertEqual(mimetype, 'image/gif')
if __name__ == '__main__':
unittest.main()
|
40223210/w16b_test
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/unittest/test/_test_warnings.py
|
858
|
# helper module for test_runner.Test_TextTestRunner.test_warnings
"""
This module has a number of tests that raise different kinds of warnings.
When the tests are run, the warnings are caught and their messages are printed
to stdout. This module also accepts an arg that is then passed to
unittest.main to affect the behavior of warnings.
Test_TextTestRunner.test_warnings executes this script with different
combinations of warnings args and -W flags and check that the output is correct.
See #10535.
"""
import sys
import unittest
import warnings
def warnfun():
warnings.warn('rw', RuntimeWarning)
class TestWarnings(unittest.TestCase):
# unittest warnings will be printed at most once per type (max one message
# for the fail* methods, and one for the assert* methods)
def test_assert(self):
self.assertEquals(2+2, 4)
self.assertEquals(2*2, 4)
self.assertEquals(2**2, 4)
def test_fail(self):
self.failUnless(1)
self.failUnless(True)
def test_other_unittest(self):
self.assertAlmostEqual(2+2, 4)
self.assertNotAlmostEqual(4+4, 2)
# these warnings are normally silenced, but they are printed in unittest
def test_deprecation(self):
warnings.warn('dw', DeprecationWarning)
warnings.warn('dw', DeprecationWarning)
warnings.warn('dw', DeprecationWarning)
def test_import(self):
warnings.warn('iw', ImportWarning)
warnings.warn('iw', ImportWarning)
warnings.warn('iw', ImportWarning)
# user warnings should always be printed
def test_warning(self):
warnings.warn('uw')
warnings.warn('uw')
warnings.warn('uw')
# these warnings come from the same place; they will be printed
# only once by default or three times if the 'always' filter is used
def test_function(self):
warnfun()
warnfun()
warnfun()
if __name__ == '__main__':
with warnings.catch_warnings(record=True) as ws:
# if an arg is provided pass it to unittest.main as 'warnings'
if len(sys.argv) == 2:
unittest.main(exit=False, warnings=sys.argv.pop())
else:
unittest.main(exit=False)
# print all the warning messages collected
for w in ws:
print(w.message)
|
rmcgibbo/scipy
|
refs/heads/master
|
scipy/special/_ellip_harm.py
|
80
|
from __future__ import division, print_function, absolute_import
import threading
import numpy as np
from ._ufuncs import _ellip_harm
from ._ellip_harm_2 import _ellipsoid, _ellipsoid_norm
# the functions _ellipsoid, _ellipsoid_norm use global variables, the lock
# protects them if the function is called from multiple threads simultaneously
_ellip_lock = threading.Lock()
def ellip_harm(h2, k2, n, p, s, signm=1, signn=1):
r"""
Ellipsoidal harmonic functions E^p_n(l)
These are also known as Lame functions of the first kind, and are
solutions to the Lame equation:
.. math:: (s^2 - h^2)(s^2 - k^2)E''(s) + s(2s^2 - h^2 - k^2)E'(s) + (a - q s^2)E(s) = 0
where :math:`q = (n+1)n` and :math:`a` is the eigenvalue (not
returned) corresponding to the solutions.
Parameters
----------
h2 : float
``h**2``
k2 : float
``k**2``; should be larger than ``h**2``
n : int
Degree
s : float
Coordinate
p : int
Order, can range between [1,2n+1]
signm : {1, -1}, optional
Sign of prefactor of functions. Can be +/-1. See Notes.
signn : {1, -1}, optional
Sign of prefactor of functions. Can be +/-1. See Notes.
Returns
-------
E : float
the harmonic :math:`E^p_n(s)`
See Also
--------
ellip_harm_2, ellip_normal
Notes
-----
The geometric intepretation of the ellipsoidal functions is
explained in [2]_, [3]_, [4]_. The `signm` and `signn` arguments control the
sign of prefactors for functions according to their type::
K : +1
L : signm
M : signn
N : signm*signn
.. versionadded:: 0.15.0
References
----------
.. [1] Digital Libary of Mathematical Functions 29.12
http://dlmf.nist.gov/29.12
.. [2] Bardhan and Knepley, "Computational science and
re-discovery: open-source implementations of
ellipsoidal harmonics for problems in potential theory",
Comput. Sci. Disc. 5, 014006 (2012)
doi:10.1088/1749-4699/5/1/014006
.. [3] David J.and Dechambre P, "Computation of Ellipsoidal
Gravity Field Harmonics for small solar system bodies"
pp. 30-36, 2000
.. [4] George Dassios, "Ellipsoidal Harmonics: Theory and Applications"
pp. 418, 2012
Examples
--------
>>> from scipy.special import ellip_harm
>>> w = ellip_harm(5,8,1,1,2.5)
>>> w
2.5
Check that the functions indeed are solutions to the Lame equation:
>>> from scipy.interpolate import UnivariateSpline
>>> def eigenvalue(f, df, ddf):
... r = ((s**2 - h**2)*(s**2 - k**2)*ddf + s*(2*s**2 - h**2 - k**2)*df - n*(n+1)*s**2*f)/f
... return -r.mean(), r.std()
>>> s = np.linspace(0.1, 10, 200)
>>> k, h, n, p = 8.0, 2.2, 3, 2
>>> E = ellip_harm(h**2, k**2, n, p, s)
>>> E_spl = UnivariateSpline(s, E)
>>> a, a_err = eigenvalue(E_spl(s), E_spl(s,1), E_spl(s,2))
>>> a, a_err
(583.44366156701483, 6.4580890640310646e-11)
"""
return _ellip_harm(h2, k2, n, p, s, signm, signn)
# np.vectorize does not work on Cython functions on Numpy < 1.8, so a wrapper is needed
def _ellip_harm_2_vec(h2, k2, n, p, s):
return _ellipsoid(h2, k2, n, p, s)
_ellip_harm_2_vec = np.vectorize(_ellip_harm_2_vec, otypes='d')
def ellip_harm_2(h2, k2, n, p, s):
r"""
Ellipsoidal harmonic functions F^p_n(l)
These are also known as Lame functions of the second kind, and are
solutions to the Lame equation:
.. math:: (s^2 - h^2)(s^2 - k^2)F''(s) + s(2s^2 - h^2 - k^2)F'(s) + (a - q s^2)F(s) = 0
where :math:`q = (n+1)n` and :math:`a` is the eigenvalue (not
returned) corresponding to the solutions.
Parameters
----------
h2 : float
``h**2``
k2 : float
``k**2``; should be larger than ``h**2``
n : int
Degree.
p : int
Order, can range between [1,2n+1].
s : float
Coordinate
Returns
-------
F : float
The harmonic :math:`F^p_n(s)`
Notes
-----
Lame functions of the second kind are related to the functions of the first kind:
.. math::
F^p_n(s)=(2n + 1)E^p_n(s)\int_{0}^{1/s}\frac{du}{(E^p_n(1/u))^2\sqrt{(1-u^2k^2)(1-u^2h^2)}}
.. versionadded:: 0.15.0
See Also
--------
ellip_harm, ellip_normal
Examples
--------
>>> from scipy.special import ellip_harm_2
>>> w = ellip_harm_2(5,8,2,1,10)
>>> w
0.00108056853382
"""
with _ellip_lock:
with np.errstate(all='ignore'):
return _ellip_harm_2_vec(h2, k2, n, p, s)
def _ellip_normal_vec(h2, k2, n, p):
return _ellipsoid_norm(h2, k2, n, p)
_ellip_normal_vec = np.vectorize(_ellip_normal_vec, otypes='d')
def ellip_normal(h2, k2, n, p):
r"""
Ellipsoidal harmonic normalization constants gamma^p_n
The normalization constant is defined as
.. math::
\gamma^p_n=8\int_{0}^{h}dx\int_{h}^{k}dy\frac{(y^2-x^2)(E^p_n(y)E^p_n(x))^2}{\sqrt((k^2-y^2)(y^2-h^2)(h^2-x^2)(k^2-x^2)}
Parameters
----------
h2 : float
``h**2``
k2 : float
``k**2``; should be larger than ``h**2``
n : int
Degree.
p : int
Order, can range between [1,2n+1].
Returns
-------
gamma : float
The normalization constant :math:`\gamma^p_n`
See Also
--------
ellip_harm, ellip_harm_2
Notes
-----
.. versionadded:: 0.15.0
Examples
--------
>>> from scipy.special import ellip_normal
>>> w = ellip_normal(5,8,3,7)
>>> w
1723.38796997
"""
with _ellip_lock:
with np.errstate(all='ignore'):
return _ellip_normal_vec(h2, k2, n, p)
|
noironetworks/horizon
|
refs/heads/master
|
openstack_dashboard/urls.py
|
1
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
URL patterns for the OpenStack Dashboard.
"""
from django.conf import settings
from django.conf.urls import include
from django.conf.urls.static import static
from django.conf.urls import url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views import defaults
import horizon
import horizon.base
from horizon.browsers import views as browsers_views
from horizon.decorators import require_auth
from openstack_dashboard.api import rest
from openstack_dashboard import views
urlpatterns = [
url(r'^$', views.splash, name='splash'),
url(r'^api/', include(rest.urls)),
url(r'^header/', views.ExtensibleHeaderView.as_view()),
url(r'', horizon.base._wrapped_include(horizon.urls)),
]
# add URL for ngdetails
ngdetails_url = url(r'^ngdetails/',
browsers_views.AngularDetailsView.as_view(),
name='ngdetails')
urlpatterns.append(ngdetails_url)
horizon.base._decorate_urlconf([ngdetails_url], require_auth)
for u in getattr(settings, 'AUTHENTICATION_URLS', ['openstack_auth.urls']):
urlpatterns.append(url(r'^auth/', include(u)))
# Development static app and project media serving using the staticfiles app.
urlpatterns += staticfiles_urlpatterns()
# Convenience function for serving user-uploaded media during
# development. Only active if DEBUG==True and the URL prefix is a local
# path. Production media should NOT be served by Django.
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
urlpatterns.append(url(r'^500/$', defaults.server_error))
|
azide0x37/modocDB
|
refs/heads/master
|
venv/lib/python2.7/site-packages/setuptools/command/develop.py
|
477
|
from setuptools.command.easy_install import easy_install
from distutils.util import convert_path, subst_vars
from pkg_resources import Distribution, PathMetadata, normalize_path
from distutils import log
from distutils.errors import DistutilsError, DistutilsOptionError
import os, sys, setuptools, glob
class develop(easy_install):
"""Set up package for development"""
description = "install package in 'development mode'"
user_options = easy_install.user_options + [
("uninstall", "u", "Uninstall this source package"),
("egg-path=", None, "Set the path to be used in the .egg-link file"),
]
boolean_options = easy_install.boolean_options + ['uninstall']
command_consumes_arguments = False # override base
def run(self):
if self.uninstall:
self.multi_version = True
self.uninstall_link()
else:
self.install_for_development()
self.warn_deprecated_options()
def initialize_options(self):
self.uninstall = None
self.egg_path = None
easy_install.initialize_options(self)
self.setup_path = None
self.always_copy_from = '.' # always copy eggs installed in curdir
def finalize_options(self):
ei = self.get_finalized_command("egg_info")
if ei.broken_egg_info:
raise DistutilsError(
"Please rename %r to %r before using 'develop'"
% (ei.egg_info, ei.broken_egg_info)
)
self.args = [ei.egg_name]
easy_install.finalize_options(self)
self.expand_basedirs()
self.expand_dirs()
# pick up setup-dir .egg files only: no .egg-info
self.package_index.scan(glob.glob('*.egg'))
self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
self.egg_base = ei.egg_base
if self.egg_path is None:
self.egg_path = os.path.abspath(ei.egg_base)
target = normalize_path(self.egg_base)
if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target:
raise DistutilsOptionError(
"--egg-path must be a relative path from the install"
" directory to "+target
)
# Make a distribution for the package's source
self.dist = Distribution(
target,
PathMetadata(target, os.path.abspath(ei.egg_info)),
project_name = ei.egg_name
)
p = self.egg_base.replace(os.sep,'/')
if p!= os.curdir:
p = '../' * (p.count('/')+1)
self.setup_path = p
p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
if p != normalize_path(os.curdir):
raise DistutilsOptionError(
"Can't get a consistent path to setup script from"
" installation directory", p, normalize_path(os.curdir))
def install_for_development(self):
if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
# If we run 2to3 we can not do this inplace:
# Ensure metadata is up-to-date
self.reinitialize_command('build_py', inplace=0)
self.run_command('build_py')
bpy_cmd = self.get_finalized_command("build_py")
build_path = normalize_path(bpy_cmd.build_lib)
# Build extensions
self.reinitialize_command('egg_info', egg_base=build_path)
self.run_command('egg_info')
self.reinitialize_command('build_ext', inplace=0)
self.run_command('build_ext')
# Fixup egg-link and easy-install.pth
ei_cmd = self.get_finalized_command("egg_info")
self.egg_path = build_path
self.dist.location = build_path
self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) # XXX
else:
# Without 2to3 inplace works fine:
self.run_command('egg_info')
# Build extensions in-place
self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_ext')
self.install_site_py() # ensure that target dir is site-safe
if setuptools.bootstrap_install_from:
self.easy_install(setuptools.bootstrap_install_from)
setuptools.bootstrap_install_from = None
# create an .egg-link in the installation dir, pointing to our egg
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
if not self.dry_run:
f = open(self.egg_link,"w")
f.write(self.egg_path + "\n" + self.setup_path)
f.close()
# postprocess the installed distro, fixing up .pth, installing scripts,
# and handling requirements
self.process_distribution(None, self.dist, not self.no_deps)
def uninstall_link(self):
if os.path.exists(self.egg_link):
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
egg_link_file = open(self.egg_link)
contents = [line.rstrip() for line in egg_link_file]
egg_link_file.close()
if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
log.warn("Link points to %s: uninstall aborted", contents)
return
if not self.dry_run:
os.unlink(self.egg_link)
if not self.dry_run:
self.update_pth(self.dist) # remove any .pth link to us
if self.distribution.scripts:
# XXX should also check for entry point scripts!
log.warn("Note: you must uninstall or replace scripts manually!")
def install_egg_scripts(self, dist):
if dist is not self.dist:
# Installing a dependency, so fall back to normal behavior
return easy_install.install_egg_scripts(self,dist)
# create wrapper scripts in the script dir, pointing to dist.scripts
# new-style...
self.install_wrapper_scripts(dist)
# ...and old-style
for script_name in self.distribution.scripts or []:
script_path = os.path.abspath(convert_path(script_name))
script_name = os.path.basename(script_path)
f = open(script_path,'rU')
script_text = f.read()
f.close()
self.install_script(dist, script_name, script_text, script_path)
|
0x0all/nupic
|
refs/heads/master
|
tests/integration/py2/nupic/swarming/experiments/spatial_classification/description.py
|
1
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'/Users/ronmarianetti/nta/eng/lib/python2.6/site-packages/grokengine/frameworks/opf/expGenerator/ExpGenerator.pyc'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment
#
#
# NOTE: Use of DEFERRED VALUE-GETTERs: dictionary fields and list elements
# within the config dictionary may be assigned futures derived from the
# ValueGetterBase class, such as DeferredDictLookup.
# This facility is particularly handy for enabling substitution of values in
# the config dictionary from other values in the config dictionary, which is
# needed by permutation.py-based experiments. These values will be resolved
# during the call to applyValueGettersToContainer(),
# which we call after the base experiment's config dictionary is updated from
# the sub-experiment. See ValueGetterBase and
# DeferredDictLookup for more details about value-getters.
#
# For each custom encoder parameter to be exposed to the sub-experiment/
# permutation overrides, define a variable in this section, using key names
# beginning with a single underscore character to avoid collisions with
# pre-defined keys (e.g., _dsEncoderFieldName2_N).
#
# Example:
# config = dict(
# _dsEncoderFieldName2_N = 70,
# _dsEncoderFieldName2_W = 5,
# dsEncoderSchema = [
# base=dict(
# fieldname='Name2', type='ScalarEncoder',
# name='Name2', minval=0, maxval=270, clipInput=True,
# n=DeferredDictLookup('_dsEncoderFieldName2_N'),
# w=DeferredDictLookup('_dsEncoderFieldName2_W')),
# ],
# )
# updateConfigFromSubConfig(config)
# applyValueGettersToContainer(config)
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': {
'fields': [],
'days': 0,
'hours': 0,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0
},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'NontemporalClassification',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': {
'address': {
'fieldname': u'address',
'n': 300,
'name': u'address',
'type': 'SDRCategoryEncoder',
'w': 21
},
'_classifierInput': {
'name': u'_classifierInput',
'fieldname': u'consumption',
'classifierOnly': True,
'clipInput': True,
'maxval': 200,
'minval': 0,
'n': 1500,
'type': 'ScalarEncoder',
'w': 21
},
'gym': {
'fieldname': u'gym',
'n': 300,
'name': u'gym',
'type': 'SDRCategoryEncoder',
'w': 21
},
'timestamp_dayOfWeek': {
'dayOfWeek': (7, 3),
'fieldname': u'timestamp',
'name': u'timestamp_dayOfWeek',
'type': 'DateEncoder'
},
'timestamp_timeOfDay': {
'fieldname': u'timestamp',
'name': u'timestamp_timeOfDay',
'timeOfDay': (7, 8),
'type': 'DateEncoder'
}
},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': False,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActivePerInhArea': 40,
'seed': 1956,
# coincInputPoolPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose coincInputPoolPct * (2*coincInputRadius+1)^2
'coincInputPoolPct': 0.5,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
'randomSP': 0,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : False,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nta/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 20,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
'regionName' : 'CLAClassifierRegion',
'implementation': 'py',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'clVerbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '0',
},
'anomalyParams': { u'anomalyCacheRecords': None,
u'autoDetectThreshold': None,
u'autoDetectWaitRecords': None},
'trainSPNetOnlyIfRequested': False,
},
}
# end of config dictionary
# Adjust base config dictionary for any modifications if imported from a
# sub-experiment
updateConfigFromSubConfig(config)
# Compute predictionSteps based on the predictAheadTime and the aggregation
# period, which may be permuted over.
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
# Adjust config by applying ValueGetterBase-derived
# futures. NOTE: this MUST be called after updateConfigFromSubConfig() in order
# to support value-getter-based substitutions from the sub-experiment (if any)
applyValueGettersToContainer(config)
control = {
# The environment that the current model is being run in
"environment": 'grok',
# Input stream specification per py/nupic/frameworks/opf/jsonschema/stream_def.json.
#
'dataset' : { u'info': u'testSpatialClassification',
u'streams': [ { u'columns': [u'*'],
u'info': u'test data',
u'source': u'file://test_data.csv'}],
u'version': 1},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
#'iterationCount' : ITERATION_COUNT,
# A dictionary containing all the supplementary parameters for inference
"inferenceArgs":{u'predictedField': u'consumption', u'predictionSteps': [0]},
# Metrics: A list of MetricSpecs that instantiate the metrics that are
# computed for this experiment
'metrics':[
MetricSpec(field=u'consumption', metric='multiStep',
inferenceElement='multiStepBestPredictions',
params={'window': 1000, 'steps': [0], 'errorMetric': 'avg_err'})
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
'loggedMetrics': ['.*'],
}
################################################################################
################################################################################
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)
|
subodhchhabra/airflow
|
refs/heads/master
|
airflow/utils/asciiart.py
|
9
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
bug = r"""\
=, .=
=.| ,---. |.=
=.| "-(:::::)-" |.=
\\__/`-.|.-'\__//
`-| .::| .::|-' Pillendreher
_|`-._|_.-'|_ (Scarabaeus sacer)
/.-| | .::|-.\
// ,| .::|::::|. \\
|| //\::::|::' /\\ ||
/'\|| `.__|__.' ||/'\
^ \\ // ^
/'\ /'\
^ ^
"""
nukular = r"""
____/ ( ( ) ) \___
/( ( ( ) _ )) ) )\
(( ( )( ) ) ( ) )
((/ ( _( ) ( _) ) ( () ) )
( ( ( (_) (( ( ) .((_ ) . )_
( ( ) ( ( ) ) ) . ) ( )
( ( ( ( ) ( _ ( _) ). ) . ) ) ( )
( ( ( ) ( ) ( )) ) _)( ) ) )
( ( ( \ ) ( (_ ( ) ( ) ) ) ) )) ( )
( ( ( ( (_ ( ) ( _ ) ) ( ) ) )
( ( ( ( ( ) (_ ) ) ) _) ) _( ( )
(( ( )( ( _ ) _) _(_ ( (_ )
(_((__(_(__(( ( ( | ) ) ) )_))__))_)___)
((__) \\||lll|l||/// \_))
( /(/ ( ) ) )\ )
( ( ( ( | | ) ) )\ )
( /(| / ( )) ) ) )) )
( ( ((((_(|)_))))) )
( ||\(|(|)|/|| )
( |(||(||)|||| )
( //|/l|||)|\\ \ )
(/ / // /|//||||\\ \ \ \ _)
-------------------------------------------------------------------------------
"""
|
phanikiran2/Extending-RED-qdisc-in-ns3-to-support--NLRED
|
refs/heads/master
|
bindings/python/rad_util.py
|
212
|
# Copyright (c) 2007 RADLogic
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Provide various handy Python functions.
Running this script directly will execute the doctests.
Functions:
int2bin(i, n) -- Convert integer to binary string.
bin2int(bin_string) -- Convert binary string to integer.
reverse(input_string) -- Reverse a string.
transpose(matrix) -- Transpose a list of lists.
polygon_area(points_list) -- Calculate the area of an arbitrary polygon.
timestamp() -- Return string containing current time stamp.
pt2str(point) -- Return prettier string version of point tuple.
gcf(a, b) -- Return the greatest common factor of two numbers.
lcm(a, b) -- Return the least common multiple of two numbers.
permutations(input_list) -- Generate all permutations of a list of items.
reduce_fraction(fraction) -- Reduce fraction (num, denom) to simplest form.
quantile(l, p) -- Return p quantile of list l. E.g. p=0.25 for q1.
trim(l) -- Discard values in list more than 1.5*IQR outside IQR.
nice_units(value) -- Return value converted to human readable units.
uniquify(seq) -- Return sequence with duplicate items in sequence seq removed.
reverse_dict(d) -- Return the dictionary with the items as keys and vice-versa.
lsb(x, n) -- Return the n least significant bits of x.
gray_encode(i) -- Gray encode the given integer.
random_vec(bits, max_value=None) -- Return a random binary vector.
binary_range(bits) -- Return list of all possible binary numbers width=bits.
float_range([start], stop, [step]) -- Return range of floats.
find_common_fixes(s1, s2) -- Find common (prefix, suffix) of two strings.
is_rotated(seq1, seq2) -- Return true if the list is a rotation of other list.
getmodule(obj) -- Return the module that contains the object definition of obj.
(use inspect.getmodule instead, though)
get_args(argv) -- Store command-line args in a dictionary.
This module requires Python >= 2.2
"""
__author__ = 'Tim Wegener <twegener@radlogic.com.au>'
__date__ = '$Date: 2007/03/27 03:15:06 $'
__version__ = '$Revision: 0.45 $'
__credits__ = """
David Chandler, for polygon area algorithm.
(http://www.davidchandler.com/AreaOfAGeneralPolygon.pdf)
"""
import re
import sys
import time
import random
try:
True, False
except NameError:
True, False = (1==1, 0==1)
def int2bin(i, n):
"""Convert decimal integer i to n-bit binary number (string).
>>> int2bin(0, 8)
'00000000'
>>> int2bin(123, 8)
'01111011'
>>> int2bin(123L, 8)
'01111011'
>>> int2bin(15, 2)
Traceback (most recent call last):
ValueError: Value too large for given number of bits.
"""
hex2bin = {'0': '0000', '1': '0001', '2': '0010', '3': '0011',
'4': '0100', '5': '0101', '6': '0110', '7': '0111',
'8': '1000', '9': '1001', 'a': '1010', 'b': '1011',
'c': '1100', 'd': '1101', 'e': '1110', 'f': '1111'}
# Convert to hex then map each hex digit to binary equivalent.
result = ''.join([hex2bin[x] for x in hex(i).lower().replace('l','')[2:]])
# Shrink result to appropriate length.
# Raise an error if the value is changed by the truncation.
if '1' in result[:-n]:
raise ValueError("Value too large for given number of bits.")
result = result[-n:]
# Zero-pad if length longer than mapped result.
result = '0'*(n-len(result)) + result
return result
def bin2int(bin_string):
"""Convert binary number string to decimal integer.
Note: Python > v2 has int(bin_string, 2)
>>> bin2int('1111')
15
>>> bin2int('0101')
5
"""
## result = 0
## bin_list = list(bin_string)
## if len(filter(lambda x: x in ('1','0'), bin_list)) < len(bin_list):
## raise Exception ("bin2int: Error - not a binary number: %s"
## % bin_string)
## bit_list = map(int, bin_list)
## bit_list.reverse() # Make most significant bit have highest index.
## for bit_place in range(len(bit_list)):
## result = result + ((2**bit_place) * bit_list[bit_place])
## return result
return int(bin_string, 2)
def reverse(input_string):
"""Reverse a string. Useful for strings of binary numbers.
>>> reverse('abc')
'cba'
"""
str_list = list(input_string)
str_list.reverse()
return ''.join(str_list)
def transpose(matrix):
"""Transpose a list of lists.
>>> transpose([['a', 'b', 'c'], ['d', 'e', 'f'], ['g', 'h', 'i']])
[['a', 'd', 'g'], ['b', 'e', 'h'], ['c', 'f', 'i']]
>>> transpose([['a', 'b', 'c'], ['d', 'e', 'f']])
[['a', 'd'], ['b', 'e'], ['c', 'f']]
>>> transpose([['a', 'b'], ['d', 'e'], ['g', 'h']])
[['a', 'd', 'g'], ['b', 'e', 'h']]
"""
result = zip(*matrix)
# Convert list of tuples to list of lists.
# map is faster than a list comprehension since it is being used with
# a built-in function as an argument.
result = map(list, result)
return result
def polygon_area(points_list, precision=100):
"""Calculate area of an arbitrary polygon using an algorithm from the web.
Return the area of the polygon as a positive float.
Arguments:
points_list -- list of point tuples [(x0, y0), (x1, y1), (x2, y2), ...]
(Unclosed polygons will be closed automatically.
precision -- Internal arithmetic precision (integer arithmetic).
>>> polygon_area([(0, 0), (0, 1), (1, 1), (1, 2), (2, 2), (2, 0), (0, 0)])
3.0
Credits:
Area of a General Polygon by David Chandler
http://www.davidchandler.com/AreaOfAGeneralPolygon.pdf
"""
# Scale up co-ordinates and convert them to integers.
for i in range(len(points_list)):
points_list[i] = (int(points_list[i][0] * precision),
int(points_list[i][1] * precision))
# Close polygon if not closed.
if points_list[-1] != points_list[0]:
points_list.append(points_list[0])
# Calculate area.
area = 0
for i in range(len(points_list)-1):
(x_i, y_i) = points_list[i]
(x_i_plus_1, y_i_plus_1) = points_list[i+1]
area = area + (x_i_plus_1 * y_i) - (y_i_plus_1 * x_i)
area = abs(area / 2)
# Unscale area.
area = float(area)/(precision**2)
return area
def timestamp():
"""Return string containing current time stamp.
Note: In Python 2 onwards can use time.asctime() with no arguments.
"""
return time.asctime()
def pt2str(point):
"""Return prettier string version of point tuple.
>>> pt2str((1.8, 1.9))
'(1.8, 1.9)'
"""
return "(%s, %s)" % (str(point[0]), str(point[1]))
def gcf(a, b, epsilon=1e-16):
"""Return the greatest common factor of a and b, using Euclidean algorithm.
Arguments:
a, b -- two numbers
If both numbers are integers return an integer result,
otherwise return a float result.
epsilon -- floats less than this magnitude are considered to be zero
(default: 1e-16)
Examples:
>>> gcf(12, 34)
2
>>> gcf(13.5, 4)
0.5
>>> gcf(-2, 4)
2
>>> gcf(5, 0)
5
By (a convenient) definition:
>>> gcf(0, 0)
0
"""
result = max(a, b)
remainder = min(a, b)
while remainder and abs(remainder) > epsilon:
new_remainder = result % remainder
result = remainder
remainder = new_remainder
return abs(result)
def lcm(a, b, precision=None):
"""Return the least common multiple of a and b, using the gcf function.
Arguments:
a, b -- two numbers. If both are integers return an integer result,
otherwise a return a float result.
precision -- scaling factor if a and/or b are floats.
>>> lcm(21, 6)
42
>>> lcm(2.5, 3.5)
17.5
>>> str(lcm(1.5e-8, 2.5e-8, precision=1e9))
'7.5e-08'
By (an arbitary) definition:
>>> lcm(0, 0)
0
"""
# Note: Dummy precision argument is for backwards compatibility.
# Do the division first.
# (See http://en.wikipedia.org/wiki/Least_common_multiple )
denom = gcf(a, b)
if denom == 0:
result = 0
else:
result = a * (b / denom)
return result
def permutations(input_list):
"""Return a list containing all permutations of the input list.
Note: This is a recursive function.
>>> perms = permutations(['a', 'b', 'c'])
>>> perms.sort()
>>> for perm in perms:
... print perm
['a', 'b', 'c']
['a', 'c', 'b']
['b', 'a', 'c']
['b', 'c', 'a']
['c', 'a', 'b']
['c', 'b', 'a']
"""
out_lists = []
if len(input_list) > 1:
# Extract first item in list.
item = input_list[0]
# Find all permutations of remainder of list. (Recursive call.)
sub_lists = permutations(input_list[1:])
# For every permutation of the sub list...
for sub_list in sub_lists:
# Insert the extracted first item at every position of the list.
for i in range(len(input_list)):
new_list = sub_list[:]
new_list.insert(i, item)
out_lists.append(new_list)
else:
# Termination condition: only one item in input list.
out_lists = [input_list]
return out_lists
def reduce_fraction(fraction):
"""Reduce fraction tuple to simplest form. fraction=(num, denom)
>>> reduce_fraction((14, 7))
(2, 1)
>>> reduce_fraction((-2, 4))
(-1, 2)
>>> reduce_fraction((0, 4))
(0, 1)
>>> reduce_fraction((4, 0))
(1, 0)
"""
(numerator, denominator) = fraction
common_factor = abs(gcf(numerator, denominator))
result = (numerator/common_factor, denominator/common_factor)
return result
def quantile(l, p):
"""Return p quantile of list l. E.g. p=0.25 for q1.
See:
http://rweb.stat.umn.edu/R/library/base/html/quantile.html
"""
l_sort = l[:]
l_sort.sort()
n = len(l)
r = 1 + ((n - 1) * p)
i = int(r)
f = r - i
if i < n:
result = (1-f)*l_sort[i-1] + f*l_sort[i]
else:
result = l_sort[i-1]
return result
def trim(l):
"""Discard values in list more than 1.5*IQR outside IQR.
(IQR is inter-quartile-range)
This function uses rad_util.quantile
1.5*IQR -- mild outlier
3*IQR -- extreme outlier
See:
http://wind.cc.whecn.edu/~pwildman/statnew/section_7_-_exploratory_data_analysis.htm
"""
l_sort = l[:]
l_sort.sort()
# Calculate medianscore (based on stats.py lmedianscore by Gary Strangman)
if len(l_sort) % 2 == 0:
# If even number of scores, average middle 2.
index = int(len(l_sort) / 2) # Integer division correct
median = float(l_sort[index] + l_sort[index-1]) / 2
else:
# int divsion gives mid value when count from 0
index = int(len(l_sort) / 2)
median = l_sort[index]
# Calculate IQR.
q1 = quantile(l_sort, 0.25)
q3 = quantile(l_sort, 0.75)
iqr = q3 - q1
iqr_extra = iqr * 1.5
def in_interval(x, i=iqr_extra, q1=q1, q3=q3):
return (x >= q1-i and x <= q3+i)
l_trimmed = [x for x in l_sort if in_interval(x)]
return l_trimmed
def nice_units(value, dp=0, sigfigs=None, suffix='', space=' ',
use_extra_prefixes=False, use_full_name=False, mode='si'):
"""Return value converted to human readable units eg milli, micro, etc.
Arguments:
value -- number in base units
dp -- number of decimal places to display (rounded)
sigfigs -- number of significant figures to display (rounded)
This overrides dp if set.
suffix -- optional unit suffix to append to unit multiplier
space -- seperator between value and unit multiplier (default: ' ')
use_extra_prefixes -- use hecto, deka, deci and centi as well if set.
(default: False)
use_full_name -- use full name for multiplier symbol,
e.g. milli instead of m
(default: False)
mode -- 'si' for SI prefixes, 'bin' for binary multipliers (1024, etc.)
(Default: 'si')
SI prefixes from:
http://physics.nist.gov/cuu/Units/prefixes.html
(Greek mu changed to u.)
Binary prefixes based on:
http://physics.nist.gov/cuu/Units/binary.html
>>> nice_units(2e-11)
'20 p'
>>> nice_units(2e-11, space='')
'20p'
"""
si_prefixes = {1e24: ('Y', 'yotta'),
1e21: ('Z', 'zetta'),
1e18: ('E', 'exa'),
1e15: ('P', 'peta'),
1e12: ('T', 'tera'),
1e9: ('G', 'giga'),
1e6: ('M', 'mega'),
1e3: ('k', 'kilo'),
1e-3: ('m', 'milli'),
1e-6: ('u', 'micro'),
1e-9: ('n', 'nano'),
1e-12: ('p', 'pico'),
1e-15: ('f', 'femto'),
1e-18: ('a', 'atto'),
1e-21: ('z', 'zepto'),
1e-24: ('y', 'yocto')
}
if use_extra_prefixes:
si_prefixes.update({1e2: ('h', 'hecto'),
1e1: ('da', 'deka'),
1e-1: ('d', 'deci'),
1e-2: ('c', 'centi')
})
bin_prefixes = {2**10: ('K', 'kilo'),
2**20: ('M', 'mega'),
2**30: ('G', 'mega'),
2**40: ('T', 'tera'),
2**50: ('P', 'peta'),
2**60: ('E', 'exa')
}
if mode == 'bin':
prefixes = bin_prefixes
else:
prefixes = si_prefixes
prefixes[1] = ('', '') # Unity.
# Determine appropriate multiplier.
multipliers = prefixes.keys()
multipliers.sort()
mult = None
for i in range(len(multipliers) - 1):
lower_mult = multipliers[i]
upper_mult = multipliers[i+1]
if lower_mult <= value < upper_mult:
mult_i = i
break
if mult is None:
if value < multipliers[0]:
mult_i = 0
elif value >= multipliers[-1]:
mult_i = len(multipliers) - 1
mult = multipliers[mult_i]
# Convert value for this multiplier.
new_value = value / mult
# Deal with special case due to rounding.
if sigfigs is None:
if mult_i < (len(multipliers) - 1) and \
round(new_value, dp) == \
round((multipliers[mult_i+1] / mult), dp):
mult = multipliers[mult_i + 1]
new_value = value / mult
# Concatenate multiplier symbol.
if use_full_name:
label_type = 1
else:
label_type = 0
# Round and truncate to appropriate precision.
if sigfigs is None:
str_value = eval('"%.'+str(dp)+'f" % new_value', locals(), {})
else:
str_value = eval('"%.'+str(sigfigs)+'g" % new_value', locals(), {})
return str_value + space + prefixes[mult][label_type] + suffix
def uniquify(seq, preserve_order=False):
"""Return sequence with duplicate items in sequence seq removed.
The code is based on usenet post by Tim Peters.
This code is O(N) if the sequence items are hashable, O(N**2) if not.
Peter Bengtsson has a blog post with an empirical comparison of other
approaches:
http://www.peterbe.com/plog/uniqifiers-benchmark
If order is not important and the sequence items are hashable then
list(set(seq)) is readable and efficient.
If order is important and the sequence items are hashable generator
expressions can be used (in py >= 2.4) (useful for large sequences):
seen = set()
do_something(x for x in seq if x not in seen or seen.add(x))
Arguments:
seq -- sequence
preserve_order -- if not set the order will be arbitrary
Using this option will incur a speed penalty.
(default: False)
Example showing order preservation:
>>> uniquify(['a', 'aa', 'b', 'b', 'ccc', 'ccc', 'd'], preserve_order=True)
['a', 'aa', 'b', 'ccc', 'd']
Example using a sequence of un-hashable items:
>>> uniquify([['z'], ['x'], ['y'], ['z']], preserve_order=True)
[['z'], ['x'], ['y']]
The sorted output or the non-order-preserving approach should equal
that of the sorted order-preserving approach output:
>>> unordered = uniquify([3, 3, 1, 2], preserve_order=False)
>>> unordered.sort()
>>> ordered = uniquify([3, 3, 1, 2], preserve_order=True)
>>> ordered.sort()
>>> ordered
[1, 2, 3]
>>> int(ordered == unordered)
1
"""
try:
# Attempt fast algorithm.
d = {}
if preserve_order:
# This is based on Dave Kirby's method (f8) noted in the post:
# http://www.peterbe.com/plog/uniqifiers-benchmark
return [x for x in seq if (x not in d) and not d.__setitem__(x, 0)]
else:
for x in seq:
d[x] = 0
return d.keys()
except TypeError:
# Have an unhashable object, so use slow algorithm.
result = []
app = result.append
for x in seq:
if x not in result:
app(x)
return result
# Alias to noun form for backward compatibility.
unique = uniquify
def reverse_dict(d):
"""Reverse a dictionary so the items become the keys and vice-versa.
Note: The results will be arbitrary if the items are not unique.
>>> d = reverse_dict({'a': 1, 'b': 2})
>>> d_items = d.items()
>>> d_items.sort()
>>> d_items
[(1, 'a'), (2, 'b')]
"""
result = {}
for key, value in d.items():
result[value] = key
return result
def lsb(x, n):
"""Return the n least significant bits of x.
>>> lsb(13, 3)
5
"""
return x & ((2 ** n) - 1)
def gray_encode(i):
"""Gray encode the given integer."""
return i ^ (i >> 1)
def random_vec(bits, max_value=None):
"""Generate a random binary vector of length bits and given max value."""
vector = ""
for _ in range(int(bits / 10) + 1):
i = int((2**10) * random.random())
vector += int2bin(i, 10)
if max_value and (max_value < 2 ** bits - 1):
vector = int2bin((int(vector, 2) / (2 ** bits - 1)) * max_value, bits)
return vector[0:bits]
def binary_range(bits):
"""Return a list of all possible binary numbers in order with width=bits.
It would be nice to extend it to match the
functionality of python's range() built-in function.
"""
l = []
v = ['0'] * bits
toggle = [1] + [0] * bits
while toggle[bits] != 1:
v_copy = v[:]
v_copy.reverse()
l.append(''.join(v_copy))
toggle = [1] + [0]*bits
i = 0
while i < bits and toggle[i] == 1:
if toggle[i]:
if v[i] == '0':
v[i] = '1'
toggle[i+1] = 0
else:
v[i] = '0'
toggle[i+1] = 1
i += 1
return l
def float_range(start, stop=None, step=None):
"""Return a list containing an arithmetic progression of floats.
Return a list of floats between 0.0 (or start) and stop with an
increment of step.
This is in functionality to python's range() built-in function
but can accept float increments.
As with range(), stop is omitted from the list.
"""
if stop is None:
stop = float(start)
start = 0.0
if step is None:
step = 1.0
cur = float(start)
l = []
while cur < stop:
l.append(cur)
cur += step
return l
def find_common_fixes(s1, s2):
"""Find common (prefix, suffix) of two strings.
>>> find_common_fixes('abc', 'def')
('', '')
>>> find_common_fixes('abcelephantdef', 'abccowdef')
('abc', 'def')
>>> find_common_fixes('abcelephantdef', 'abccow')
('abc', '')
>>> find_common_fixes('elephantdef', 'abccowdef')
('', 'def')
"""
prefix = []
suffix = []
i = 0
common_len = min(len(s1), len(s2))
while i < common_len:
if s1[i] != s2[i]:
break
prefix.append(s1[i])
i += 1
i = 1
while i < (common_len + 1):
if s1[-i] != s2[-i]:
break
suffix.append(s1[-i])
i += 1
suffix.reverse()
prefix = ''.join(prefix)
suffix = ''.join(suffix)
return (prefix, suffix)
def is_rotated(seq1, seq2):
"""Return true if the first sequence is a rotation of the second sequence.
>>> seq1 = ['A', 'B', 'C', 'D']
>>> seq2 = ['C', 'D', 'A', 'B']
>>> int(is_rotated(seq1, seq2))
1
>>> seq2 = ['C', 'D', 'B', 'A']
>>> int(is_rotated(seq1, seq2))
0
>>> seq1 = ['A', 'B', 'C', 'A']
>>> seq2 = ['A', 'A', 'B', 'C']
>>> int(is_rotated(seq1, seq2))
1
>>> seq2 = ['A', 'B', 'C', 'A']
>>> int(is_rotated(seq1, seq2))
1
>>> seq2 = ['A', 'A', 'C', 'B']
>>> int(is_rotated(seq1, seq2))
0
"""
# Do a sanity check.
if len(seq1) != len(seq2):
return False
# Look for occurrences of second sequence head item in first sequence.
start_indexes = []
head_item = seq2[0]
for index1 in range(len(seq1)):
if seq1[index1] == head_item:
start_indexes.append(index1)
# Check that wrapped sequence matches.
double_seq1 = seq1 + seq1
for index1 in start_indexes:
if double_seq1[index1:index1+len(seq1)] == seq2:
return True
return False
def getmodule(obj):
"""Return the module that contains the object definition of obj.
Note: Use inspect.getmodule instead.
Arguments:
obj -- python obj, generally a class or a function
Examples:
A function:
>>> module = getmodule(random.choice)
>>> module.__name__
'random'
>>> module is random
1
A class:
>>> module = getmodule(random.Random)
>>> module.__name__
'random'
>>> module is random
1
A class inheriting from a class in another module:
(note: The inheriting class must define at least one function.)
>>> class MyRandom(random.Random):
... def play(self):
... pass
>>> module = getmodule(MyRandom)
>>> if __name__ == '__main__':
... name = 'rad_util'
... else:
... name = module.__name__
>>> name
'rad_util'
>>> module is sys.modules[__name__]
1
Discussion:
This approach is slightly hackish, and won't work in various situations.
However, this was the approach recommended by GvR, so it's as good as
you'll get.
See GvR's post in this thread:
http://groups.google.com.au/group/comp.lang.python/browse_thread/thread/966a7bdee07e3b34/c3cab3f41ea84236?lnk=st&q=python+determine+class+module&rnum=4&hl=en#c3cab3f41ea84236
"""
if hasattr(obj, 'func_globals'):
func = obj
else:
# Handle classes.
func = None
for item in obj.__dict__.values():
if hasattr(item, 'func_globals'):
func = item
break
if func is None:
raise ValueError("No functions attached to object: %r" % obj)
module_name = func.func_globals['__name__']
# Get module.
module = sys.modules[module_name]
return module
def round_grid(value, grid, mode=0):
"""Round off the given value to the given grid size.
Arguments:
value -- value to be roudne
grid -- result must be a multiple of this
mode -- 0 nearest, 1 up, -1 down
Examples:
>>> round_grid(7.5, 5)
10
>>> round_grid(7.5, 5, mode=-1)
5
>>> round_grid(7.3, 5, mode=1)
10
>>> round_grid(7.3, 5.0, mode=1)
10.0
"""
off_grid = value % grid
if mode == 0:
add_one = int(off_grid >= (grid / 2.0))
elif mode == 1 and off_grid:
add_one = 1
elif mode == -1 and off_grid:
add_one = 0
result = ((int(value / grid) + add_one) * grid)
return result
def get_args(argv):
"""Store command-line args in a dictionary.
-, -- prefixes are removed
Items not prefixed with - or -- are stored as a list, indexed by 'args'
For options that take a value use --option=value
Consider using optparse or getopt (in Python standard library) instead.
"""
d = {}
args = []
for arg in argv:
if arg.startswith('-'):
parts = re.sub(r'^-+', '', arg).split('=')
if len(parts) == 2:
d[parts[0]] = parts[1]
else:
d[parts[0]] = None
else:
args.append(arg)
d['args'] = args
return d
if __name__ == '__main__':
import doctest
doctest.testmod(sys.modules['__main__'])
|
pmisik/buildbot
|
refs/heads/master
|
master/buildbot/test/util/fuzz.py
|
6
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import os
from twisted.internet import defer
from twisted.internet import reactor
from twisted.trial import unittest
class FuzzTestCase(unittest.TestCase):
# run each test case for 10s
FUZZ_TIME = 10
@defer.inlineCallbacks
def test_fuzz(self):
# note that this will loop if do_fuzz doesn't take long enough
endTime = reactor.seconds() + self.FUZZ_TIME
while reactor.seconds() < endTime:
yield self.do_fuzz(endTime)
# delete this test case entirely if fuzzing is not enabled
if 'BUILDBOT_FUZZ' not in os.environ:
del test_fuzz
|
317070/Twitch-plays-LSD-neural-net
|
refs/heads/master
|
mat2npy.py
|
6
|
import numpy as np
import scipy.io
data = scipy.io.loadmat("data/imagenet-vgg-verydeep-16.mat")
print data.keys()
idxs = [0,2,5,7,10,12,14,17,19,21,24,26,28,31,33,35]
params = []
for i in idxs:
W = data['layers'][0][i][0][0][0][0][0]
W = np.transpose(W, (3,2,0,1))
b = data['layers'][0][i][0][0][0][0][1][0]
#W = W[:,:,::-1,::-1]
print W.shape, b.shape
params.extend([W,b])
np.save("data/vgg16.npy",params)
np.save("data/mean.npy",data['normalization'][0][0][0])
np.save("data/classes.npy",data["classes"][0][0].tolist()[1][0])
data = scipy.io.loadmat("data/imagenet-vgg-verydeep-19.mat")
print data.keys()
idxs = [0,2,5,7,10,12,14,16,19,21,23,25,28,30,32,34,37,39,41]
params = []
for i in idxs:
W = data['layers'][0][i][0][0][0][0][0]
W = np.transpose(W, (3,2,0,1))
b = data['layers'][0][i][0][0][0][0][1][0]
#W = W[:,:,::-1,::-1]
print W.shape, b.shape
params.extend([W,b])
np.save("data/vgg19.npy",params)
np.save("data/mean-19.npy",data['normalization'][0][0][0])
np.save("data/classes-19.npy",data["classes"][0][0].tolist()[1][0])
|
kantlove/flask-simple-page
|
refs/heads/master
|
Lib/site-packages/pip/_vendor/cachecontrol/compat.py
|
317
|
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
try:
import cPickle as pickle
except ImportError:
import pickle
from pip._vendor.requests.packages.urllib3.response import HTTPResponse
from pip._vendor.requests.packages.urllib3.util import is_fp_closed
|
doduytrung/odoo-8.0
|
refs/heads/master
|
addons/account/wizard/account_report_print_journal.py
|
378
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from lxml import etree
class account_print_journal(osv.osv_memory):
_inherit = "account.common.journal.report"
_name = 'account.print.journal'
_description = 'Account Print Journal'
_columns = {
'sort_selection': fields.selection([('l.date', 'Date'),
('am.name', 'Journal Entry Number'),],
'Entries Sorted by', required=True),
'journal_ids': fields.many2many('account.journal', 'account_print_journal_journal_rel', 'account_id', 'journal_id', 'Journals', required=True),
}
_defaults = {
'sort_selection': 'am.name',
'filter': 'filter_period',
'journal_ids': False,
}
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
'''
used to set the domain on 'journal_ids' field: we exclude or only propose the journals of type
sale/purchase (+refund) accordingly to the presence of the key 'sale_purchase_only' in the context.
'''
if context is None:
context = {}
res = super(account_print_journal, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
doc = etree.XML(res['arch'])
if context.get('sale_purchase_only'):
domain ="[('type', 'in', ('sale','purchase','sale_refund','purchase_refund'))]"
else:
domain ="[('type', 'not in', ('sale','purchase','sale_refund','purchase_refund'))]"
nodes = doc.xpath("//field[@name='journal_ids']")
for node in nodes:
node.set('domain', domain)
res['arch'] = etree.tostring(doc)
return res
def _print_report(self, cr, uid, ids, data, context=None):
if context is None:
context = {}
data = self.pre_print_report(cr, uid, ids, data, context=context)
data['form'].update(self.read(cr, uid, ids, ['sort_selection'], context=context)[0])
if context.get('sale_purchase_only'):
return self.pool['report'].get_action(cr, uid, [], 'account.report_salepurchasejournal', data=data, context=context)
else:
return self.pool['report'].get_action(cr, uid, [], 'account.report_journal', data=data, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
williamluke4/Examenable
|
refs/heads/master
|
node_modules/sitemap/env/lib/python2.7/site-packages/setuptools/tests/test_upload_docs.py
|
522
|
"""build_ext tests
"""
import sys, os, shutil, tempfile, unittest, site, zipfile
from setuptools.command.upload_docs import upload_docs
from setuptools.dist import Distribution
SETUP_PY = """\
from setuptools import setup
setup(name='foo')
"""
class TestUploadDocsTest(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp()
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'w')
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
os.chdir(self.dir)
self.upload_dir = os.path.join(self.dir, 'build')
os.mkdir(self.upload_dir)
# A test document.
f = open(os.path.join(self.upload_dir, 'index.html'), 'w')
f.write("Hello world.")
f.close()
# An empty folder.
os.mkdir(os.path.join(self.upload_dir, 'empty'))
if sys.version >= "2.6":
self.old_base = site.USER_BASE
site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
if sys.version >= "2.6":
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_create_zipfile(self):
# Test to make sure zipfile creation handles common cases.
# This explicitly includes a folder containing an empty folder.
dist = Distribution()
cmd = upload_docs(dist)
cmd.upload_dir = self.upload_dir
cmd.target_dir = self.upload_dir
tmp_dir = tempfile.mkdtemp()
tmp_file = os.path.join(tmp_dir, 'foo.zip')
try:
zip_file = cmd.create_zipfile(tmp_file)
assert zipfile.is_zipfile(tmp_file)
zip_file = zipfile.ZipFile(tmp_file) # woh...
assert zip_file.namelist() == ['index.html']
zip_file.close()
finally:
shutil.rmtree(tmp_dir)
|
ObsidianBlk/GemRB--Unofficial-
|
refs/heads/master
|
gemrb/GUIScripts/bg1/GUICG10.py
|
3
|
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
#character generation, multi-class (GUICG10)
import GemRB
from GUIDefines import *
from ie_stats import *
import GUICommon
import CommonTables
import CharGenCommon
ClassWindow = 0
TextAreaControl = 0
DoneButton = 0
def OnLoad():
global ClassWindow, TextAreaControl, DoneButton
GemRB.LoadWindowPack("GUICG", 640, 480)
ClassWindow = GemRB.LoadWindow(10)
GUICommon.CloseOtherWindow (ClassWindow.Unload)
ClassCount = CommonTables.Classes.GetRowCount()+1
RaceName = CommonTables.Races.GetRowName(GemRB.GetVar("Race")-1 )
j=0
for i in range(1,ClassCount):
if CommonTables.Classes.GetValue(i-1,4)==0:
continue
if j>11:
Button = ClassWindow.GetControl(j+7)
else:
Button = ClassWindow.GetControl(j+2)
Button.SetState(IE_GUI_BUTTON_DISABLED)
Button.SetFlags(IE_GUI_BUTTON_RADIOBUTTON,OP_OR)
j = j + 1
j=0
for i in range(1,ClassCount):
ClassName = CommonTables.Classes.GetRowName(i-1)
Allowed = CommonTables.Classes.GetValue(ClassName, RaceName)
if CommonTables.Classes.GetValue(i-1,4)==0:
continue
if j>11:
Button = ClassWindow.GetControl(j+7)
else:
Button = ClassWindow.GetControl(j+2)
t = CommonTables.Classes.GetValue(i-1, 0)
Button.SetText(t )
j=j+1
if Allowed ==0:
continue
Button.SetState(IE_GUI_BUTTON_ENABLED)
Button.SetEvent(IE_GUI_BUTTON_ON_PRESS, ClassPress)
Button.SetVarAssoc("Class", i) #multiclass, actually
BackButton = ClassWindow.GetControl(14)
BackButton.SetText(15416)
DoneButton = ClassWindow.GetControl(0)
DoneButton.SetText(11973)
TextAreaControl = ClassWindow.GetControl(12)
TextAreaControl.SetText(17244)
DoneButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, NextPress)
BackButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, CharGenCommon.BackPress)
DoneButton.SetState(IE_GUI_BUTTON_DISABLED)
ClassWindow.ShowModal(MODAL_SHADOW_NONE)
return
def ClassPress():
Class = GemRB.GetVar("Class")-1
TextAreaControl.SetText(CommonTables.Classes.GetValue(Class,1) )
DoneButton.SetState(IE_GUI_BUTTON_ENABLED)
return
def NextPress():
#class
ClassIndex = GemRB.GetVar ("Class")-1
Class = CommonTables.Classes.GetValue (ClassIndex, 5)
MyChar = GemRB.GetVar ("Slot")
GemRB.SetPlayerStat (MyChar, IE_CLASS, Class)
CharGenCommon.next()
|
xjnny/NRPhoto
|
refs/heads/master
|
node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
|
2710
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio user preferences file writer."""
import os
import re
import socket # for gethostname
import gyp.common
import gyp.easy_xml as easy_xml
#------------------------------------------------------------------------------
def _FindCommandInPath(command):
"""If there are no slashes in the command given, this function
searches the PATH env to find the given command, and converts it
to an absolute path. We have to do this because MSVS is looking
for an actual file to launch a debugger on, not just a command
line. Note that this happens at GYP time, so anything needing to
be built needs to have a full path."""
if '/' in command or '\\' in command:
# If the command already has path elements (either relative or
# absolute), then assume it is constructed properly.
return command
else:
# Search through the path list and find an existing file that
# we can access.
paths = os.environ.get('PATH','').split(os.pathsep)
for path in paths:
item = os.path.join(path, command)
if os.path.isfile(item) and os.access(item, os.X_OK):
return item
return command
def _QuoteWin32CommandLineArgs(args):
new_args = []
for arg in args:
# Replace all double-quotes with double-double-quotes to escape
# them for cmd shell, and then quote the whole thing if there
# are any.
if arg.find('"') != -1:
arg = '""'.join(arg.split('"'))
arg = '"%s"' % arg
# Otherwise, if there are any spaces, quote the whole arg.
elif re.search(r'[ \t\n]', arg):
arg = '"%s"' % arg
new_args.append(arg)
return new_args
class Writer(object):
"""Visual Studio XML user user file writer."""
def __init__(self, user_file_path, version, name):
"""Initializes the user file.
Args:
user_file_path: Path to the user file.
version: Version info.
name: Name of the user file.
"""
self.user_file_path = user_file_path
self.version = version
self.name = name
self.configurations = {}
def AddConfig(self, name):
"""Adds a configuration to the project.
Args:
name: Configuration name.
"""
self.configurations[name] = ['Configuration', {'Name': name}]
def AddDebugSettings(self, config_name, command, environment = {},
working_directory=""):
"""Adds a DebugSettings node to the user file for a particular config.
Args:
command: command line to run. First element in the list is the
executable. All elements of the command will be quoted if
necessary.
working_directory: other files which may trigger the rule. (optional)
"""
command = _QuoteWin32CommandLineArgs(command)
abs_command = _FindCommandInPath(command[0])
if environment and isinstance(environment, dict):
env_list = ['%s="%s"' % (key, val)
for (key,val) in environment.iteritems()]
environment = ' '.join(env_list)
else:
environment = ''
n_cmd = ['DebugSettings',
{'Command': abs_command,
'WorkingDirectory': working_directory,
'CommandArguments': " ".join(command[1:]),
'RemoteMachine': socket.gethostname(),
'Environment': environment,
'EnvironmentMerge': 'true',
# Currently these are all "dummy" values that we're just setting
# in the default manner that MSVS does it. We could use some of
# these to add additional capabilities, I suppose, but they might
# not have parity with other platforms then.
'Attach': 'false',
'DebuggerType': '3', # 'auto' debugger
'Remote': '1',
'RemoteCommand': '',
'HttpUrl': '',
'PDBPath': '',
'SQLDebugging': '',
'DebuggerFlavor': '0',
'MPIRunCommand': '',
'MPIRunArguments': '',
'MPIRunWorkingDirectory': '',
'ApplicationCommand': '',
'ApplicationArguments': '',
'ShimCommand': '',
'MPIAcceptMode': '',
'MPIAcceptFilter': ''
}]
# Find the config, and add it if it doesn't exist.
if config_name not in self.configurations:
self.AddConfig(config_name)
# Add the DebugSettings onto the appropriate config.
self.configurations[config_name].append(n_cmd)
def WriteIfChanged(self):
"""Writes the user file."""
configs = ['Configurations']
for config, spec in sorted(self.configurations.iteritems()):
configs.append(spec)
content = ['VisualStudioUserFile',
{'Version': self.version.ProjectVersion(),
'Name': self.name
},
configs]
easy_xml.WriteXmlIfChanged(content, self.user_file_path,
encoding="Windows-1252")
|
davidwilson-85/easymap
|
refs/heads/master
|
graphic_output/Pillow-4.2.1/Tests/test_image_toqimage.py
|
1
|
from helper import unittest, PillowTestCase, hopper
from test_imageqt import PillowQtTestCase
from PIL import ImageQt, Image
if ImageQt.qt_is_installed:
from PIL.ImageQt import QImage
try:
from PyQt5 import QtGui
from PyQt5.QtWidgets import QWidget, QHBoxLayout, QLabel, QApplication
QT_VERSION = 5
except (ImportError, RuntimeError):
try:
from PyQt4 import QtGui
from PyQt4.QtGui import QWidget, QHBoxLayout, QLabel, QApplication
QT_VERSION = 4
except (ImportError, RuntimeError):
from PySide import QtGui
from PySide.QtGui import QWidget, QHBoxLayout, QLabel, QApplication
QT_VERSION = 4
class TestToQImage(PillowQtTestCase, PillowTestCase):
def test_sanity(self):
PillowQtTestCase.setUp(self)
for mode in ('RGB', 'RGBA', 'L', 'P', '1'):
src = hopper(mode)
data = ImageQt.toqimage(src)
self.assertIsInstance(data, QImage)
self.assertFalse(data.isNull())
# reload directly from the qimage
rt = ImageQt.fromqimage(data)
if mode in ('L', 'P', '1'):
self.assert_image_equal(rt, src.convert('RGB'))
else:
self.assert_image_equal(rt, src)
if mode == '1':
# BW appears to not save correctly on QT4 and QT5
# kicks out errors on console:
# libpng warning: Invalid color type/bit depth combination in IHDR
# libpng error: Invalid IHDR data
continue
# Test saving the file
tempfile = self.tempfile('temp_{}.png'.format(mode))
data.save(tempfile)
# Check that it actually worked.
reloaded = Image.open(tempfile)
# Gray images appear to come back in palette mode.
# They're roughly equivalent
if QT_VERSION == 4 and mode == 'L':
src = src.convert('P')
self.assert_image_equal(reloaded, src)
def test_segfault(self):
PillowQtTestCase.setUp(self)
app = QApplication([])
ex = Example()
assert(app) # Silence warning
assert(ex) # Silence warning
if ImageQt.qt_is_installed:
class Example(QWidget):
def __init__(self):
super(Example, self).__init__()
img = hopper().resize((1000, 1000))
qimage = ImageQt.ImageQt(img)
pixmap1 = QtGui.QPixmap.fromImage(qimage)
hbox = QHBoxLayout(self)
lbl = QLabel(self)
# Segfault in the problem
lbl.setPixmap(pixmap1.copy())
if __name__ == '__main__':
unittest.main()
|
anhstudios/swganh
|
refs/heads/develop
|
data/scripts/templates/object/tangible/furniture/cheap/shared_coffee_table_s01.py
|
2
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/furniture/cheap/shared_coffee_table_s01.iff"
result.attribute_template_id = 6
result.stfName("frn_n","frn_coffee_table_s01")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
cloudera/avro
|
refs/heads/trunk
|
lang/c++/scripts/gen-cppcode.py
|
28
|
#!/usr/bin/python
license = '''/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'''
headers = '''
#include <stdint.h>
#include <string>
#include <vector>
#include <map>
#include "Boost.hh"
#include "Exception.hh"
#include "AvroSerialize.hh"
#include "AvroParse.hh"
#include "Layout.hh"
'''
done = False
typeToC= { 'int' : 'int32_t', 'long' :'int64_t', 'float' : 'float', 'double' : 'double',
'boolean' : 'bool', 'null': 'avro::Null', 'string' : 'std::string', 'bytes' : 'std::vector<uint8_t>'}
structList = []
structNames = {}
forwardDeclareList = []
def addStruct(name, declaration) :
if not structNames.has_key(name) :
structNames[name] = True
structList.append(declaration)
def addForwardDeclare(declaration) :
code = 'struct ' + declaration + ';'
forwardDeclareList.append(code)
def doPrimitive(type):
return (typeToC[type], type)
def doSymbolic(args):
addForwardDeclare(args[1])
return (args[1], args[1])
def addLayout(name, type, var) :
result = ' add(new $offsetType$(offset + offsetof($name$, $var$)));\n'
result = result.replace('$name$', name)
if typeToC.has_key(type) :
offsetType = 'avro::PrimitiveLayout'
else :
offsetType = type+ '_Layout'
result = result.replace('$offsetType$', offsetType)
result = result.replace('$var$', var)
return result;
def addSimpleLayout(type) :
result = ' add(new $offsetType$);\n'
if typeToC.has_key(type) :
offsetType = 'avro::PrimitiveLayout'
else :
offsetType = type+ '_Layout'
return result.replace('$offsetType$', offsetType)
recordfieldTemplate = '$type$ $name$\n'
recordTemplate = '''struct $name$ {
$name$ () :
$initializers$
{ }
$recordfields$};
template <typename Serializer>
inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
s.writeRecord();
$serializefields$ s.writeRecordEnd();
}
template <typename Parser>
inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
p.readRecord();
$parsefields$ p.readRecordEnd();
}
class $name$_Layout : public avro::CompoundLayout {
public:
$name$_Layout(size_t offset = 0) :
CompoundLayout(offset)
{
$offsetlist$ }
};
'''
def doRecord(args):
structDef = recordTemplate;
typename = args[1];
structDef = structDef.replace('$name$', typename);
fields = ''
serializefields = ''
parsefields = ''
initlist = ''
offsetlist = ''
end = False
while not end:
line = getNextLine()
if line[0] == 'end':
end = True
initlist = initlist.rstrip(',\n')
elif line[0] == 'name':
fieldname = line[1]
fieldline = getNextLine()
fieldtypename, fieldtype = processType(fieldline)
fields += ' ' + fieldtypename + ' ' + fieldname + ';\n'
serializefields += ' serialize(s, val.' + fieldname + ');\n'
initlist += ' ' + fieldname + '(),\n'
parsefields += ' parse(p, val.' + fieldname + ');\n'
offsetlist += addLayout(typename, fieldtype, fieldname)
structDef = structDef.replace('$initializers$', initlist)
structDef = structDef.replace('$recordfields$', fields)
structDef = structDef.replace('$serializefields$', serializefields)
structDef = structDef.replace('$parsefields$', parsefields)
structDef = structDef.replace('$offsetlist$', offsetlist)
addStruct(typename, structDef)
return (typename,typename)
uniontypestemplate = 'typedef $type$ Choice$N$Type'
unionTemplate = '''struct $name$ {
$typedeflist$
typedef void* (*GenericSetter)($name$ *, int64_t);
$name$() :
choice(0),
value(T0()),
genericSetter(&$name$::genericSet)
{ }
$setfuncs$
#ifdef AVRO_BOOST_NO_ANYREF
template<typename T>
const T &getValue() const {
const T *ptr = boost::any_cast<T>(&value);
return *ptr;
}
#else
template<typename T>
const T &getValue() const {
return boost::any_cast<const T&>(value);
}
#endif
static void *genericSet($name$ *u, int64_t choice) {
boost::any *val = &(u->value);
void *data = NULL;
switch (choice) {$switch$
}
return data;
}
int64_t choice;
boost::any value;
GenericSetter genericSetter;
};
template <typename Serializer>
inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
s.writeUnion(val.choice);
switch(val.choice) {
$switchserialize$ default :
throw avro::Exception("Unrecognized union choice");
}
}
template <typename Parser>
inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
val.choice = p.readUnion();
switch(val.choice) {
$switchparse$ default :
throw avro::Exception("Unrecognized union choice");
}
}
class $name$_Layout : public avro::CompoundLayout {
public:
$name$_Layout(size_t offset = 0) :
CompoundLayout(offset)
{
add(new avro::PrimitiveLayout(offset + offsetof($name$, choice)));
add(new avro::PrimitiveLayout(offset + offsetof($name$, genericSetter)));
$offsetlist$ }
};
'''
unionser = ' case $choice$:\n serialize(s, val.getValue< $type$ >());\n break;\n'
unionpar = ' case $choice$:\n { $type$ chosenVal; parse(p, chosenVal); val.value = chosenVal; }\n break;\n'
setfunc = ''' void set_$name$(const $type$ &val) {
choice = $N$;
value = val;
};\n'''
switcher = '''\n case $N$:
*val = T$N$();
data = boost::any_cast<T$N$>(val);
break;'''
def doUnion(args):
structDef = unionTemplate
uniontypes = ''
switchserialize= ''
switchparse= ''
typename = 'Union_of'
setters = ''
switches = ''
offsetlist = ''
i = 0
end = False
while not end:
line = getNextLine()
if line[0] == 'end': end = True
else :
uniontype, name = processType(line)
typename += '_' + name
uniontypes += ' ' + 'typedef ' + uniontype + ' T' + str(i) + ';\n'
switch = unionser
switch = switch.replace('$choice$', str(i))
switch = switch.replace('$type$', uniontype)
switchserialize += switch
switch = unionpar
switch = switch.replace('$choice$', str(i))
switch = switch.replace('$type$', uniontype)
switchparse += switch
setter = setfunc
setter = setter.replace('$name$', name)
setter = setter.replace('$type$', uniontype)
setter = setter.replace('$N$', str(i))
setters += setter
switch = switcher
switches += switch.replace('$N$', str(i))
offsetlist += addSimpleLayout(name)
i+= 1
structDef = structDef.replace('$name$', typename)
structDef = structDef.replace('$typedeflist$', uniontypes)
structDef = structDef.replace('$switchserialize$', switchserialize)
structDef = structDef.replace('$switchparse$', switchparse)
structDef = structDef.replace('$setfuncs$', setters)
structDef = structDef.replace('$switch$', switches)
structDef = structDef.replace('$offsetlist$', offsetlist)
addStruct(typename, structDef)
return (typename,typename)
enumTemplate = '''struct $name$ {
enum EnumSymbols {
$enumsymbols$
};
$name$() :
value($firstsymbol$)
{ }
EnumSymbols value;
};
template <typename Serializer>
inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
s.writeEnum(val.value);
}
template <typename Parser>
inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
val.value = static_cast<$name$::EnumSymbols>(p.readEnum());
}
class $name$_Layout : public avro::CompoundLayout {
public:
$name$_Layout(size_t offset = 0) :
CompoundLayout(offset)
{
add(new avro::PrimitiveLayout(offset + offsetof($name$, value)));
}
};
'''
def doEnum(args):
structDef = enumTemplate;
typename = args[1]
structDef = structDef.replace('$name$', typename)
end = False
symbols = '';
firstsymbol = '';
while not end:
line = getNextLine()
if line[0] == 'end': end = True
elif line[0] == 'name':
if symbols== '' :
firstsymbol = line[1]
else :
symbols += ', '
symbols += line[1]
else: print "error"
structDef = structDef.replace('$enumsymbols$', symbols);
structDef = structDef.replace('$firstsymbol$', firstsymbol);
addStruct(typename, structDef)
return (typename,typename)
arrayTemplate = '''struct $name$ {
typedef $valuetype$ ValueType;
typedef std::vector<ValueType> ArrayType;
typedef ValueType* (*GenericSetter)($name$ *);
$name$() :
value(),
genericSetter(&$name$::genericSet)
{ }
static ValueType *genericSet($name$ *array) {
array->value.push_back(ValueType());
return &array->value.back();
}
void addValue(const ValueType &val) {
value.push_back(val);
}
ArrayType value;
GenericSetter genericSetter;
};
template <typename Serializer>
inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
const size_t size = val.value.size();
if(size) {
s.writeArrayBlock(size);
for(size_t i = 0; i < size; ++i) {
serialize(s, val.value[i]);
}
}
s.writeArrayEnd();
}
template <typename Parser>
inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
val.value.clear();
while(1) {
int size = p.readArrayBlockSize();
if(size > 0) {
val.value.reserve(val.value.size() + size);
while (size-- > 0) {
val.value.push_back($name$::ValueType());
parse(p, val.value.back());
}
}
else {
break;
}
}
}
class $name$_Layout : public avro::CompoundLayout {
public:
$name$_Layout(size_t offset = 0) :
CompoundLayout(offset)
{
add(new avro::PrimitiveLayout(offset + offsetof($name$, genericSetter)));
$offsetlist$ }
};
'''
def doArray(args):
structDef = arrayTemplate
line = getNextLine()
arraytype, typename = processType(line)
offsetlist = addSimpleLayout(typename)
typename = 'Array_of_' + typename
structDef = structDef.replace('$name$', typename)
structDef = structDef.replace('$valuetype$', arraytype)
structDef = structDef.replace('$offsetlist$', offsetlist)
line = getNextLine()
if line[0] != 'end': print 'error'
addStruct(typename, structDef)
return (typename,typename)
mapTemplate = '''struct $name$ {
typedef $valuetype$ ValueType;
typedef std::map<std::string, ValueType> MapType;
typedef ValueType* (*GenericSetter)($name$ *, const std::string &);
$name$() :
value(),
genericSetter(&$name$::genericSet)
{ }
void addValue(const std::string &key, const ValueType &val) {
value.insert(MapType::value_type(key, val));
}
static ValueType *genericSet($name$ *map, const std::string &key) {
map->value[key] = ValueType();
return &(map->value[key]);
}
MapType value;
GenericSetter genericSetter;
};
template <typename Serializer>
inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
if(val.value.size()) {
s.writeMapBlock(val.value.size());
$name$::MapType::const_iterator iter = val.value.begin();
$name$::MapType::const_iterator end = val.value.end();
while(iter!=end) {
serialize(s, iter->first);
serialize(s, iter->second);
++iter;
}
}
s.writeMapEnd();
}
template <typename Parser>
inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
val.value.clear();
while(1) {
int size = p.readMapBlockSize();
if(size > 0) {
while (size-- > 0) {
std::string key;
parse(p, key);
$name$::ValueType m;
parse(p, m);
val.value.insert($name$::MapType::value_type(key, m));
}
}
else {
break;
}
}
}
class $name$_Layout : public avro::CompoundLayout {
public:
$name$_Layout(size_t offset = 0) :
CompoundLayout(offset)
{
add(new avro::PrimitiveLayout(offset + offsetof($name$, genericSetter)));
$offsetlist$ }
};
'''
def doMap(args):
structDef = mapTemplate
line = getNextLine() # must be string
line = getNextLine()
maptype, typename = processType(line);
offsetlist = addSimpleLayout(typename)
typename = 'Map_of_' + typename
structDef = structDef.replace('$name$', typename)
structDef = structDef.replace('$valuetype$', maptype)
structDef = structDef.replace('$offsetlist$', offsetlist)
line = getNextLine()
if line[0] != 'end': print 'error'
addStruct(typename, structDef)
return (typename,typename)
fixedTemplate = '''struct $name$ {
enum {
fixedSize = $N$
};
$name$() {
memset(value, 0, sizeof(value));
}
uint8_t value[fixedSize];
};
template <typename Serializer>
inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
s.writeFixed(val.value);
}
template <typename Parser>
inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
p.readFixed(val.value);
}
class $name$_Layout : public avro::CompoundLayout {
public:
$name$_Layout(size_t offset = 0) :
CompoundLayout(offset)
{
add(new avro::PrimitiveLayout(offset + offsetof($name$, value)));
}
};
'''
def doFixed(args):
structDef = fixedTemplate
typename = args[1]
size = args[2]
line = getNextLine()
if line[0] != 'end': print 'error'
structDef = structDef.replace('$name$', typename)
structDef = structDef.replace('$N$', size)
addStruct(typename, structDef)
return (typename,typename)
primitiveTemplate = '''struct $name$ {
$type$ value;
};
template <typename Serializer>
inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
s.writeValue(val.value);
}
template <typename Parser>
inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
p.readValue(val.value);
}
class $name$_Layout : public avro::CompoundLayout {
public:
$name$_Layout(size_t offset = 0) :
CompoundLayout(offset)
{
add(new avro::PrimitiveLayout(offset + offsetof($name$, value)));
}
};
'''
def doPrimitiveStruct(type):
structDef = primitiveTemplate
name = type.capitalize()
structDef = structDef.replace('$name$', name);
structDef = structDef.replace('$type$', typeToC[type]);
addStruct(name, structDef)
compoundBuilder= { 'record' : doRecord, 'union' : doUnion, 'enum' : doEnum,
'map' : doMap, 'array' : doArray, 'fixed' : doFixed, 'symbolic' : doSymbolic }
def processType(inputs) :
type = inputs[0]
if typeToC.has_key(type) :
result = doPrimitive(type)
else :
func = compoundBuilder[type]
result = func(inputs)
return result
def generateCode() :
inputs = getNextLine()
type = inputs[0]
if typeToC.has_key(type) :
doPrimitiveStruct(type)
else :
func = compoundBuilder[type]
func(inputs)
def getNextLine():
try:
line = raw_input()
except:
line = '';
globals()["done"] = True
if line == '':
globals()["done"] = True
return line.split(' ')
def writeHeader(filebase, namespace):
headerstring = "%s_%s_hh__" % (namespace, filebase)
print license
print "#ifndef %s" % headerstring
print "#define %s" % headerstring
print headers
print "namespace %s {\n" % namespace
for x in forwardDeclareList:
print "%s\n" % x
for x in structList:
print "/*----------------------------------------------------------------------------------*/\n"
print "%s\n" % x
print "\n} // namespace %s\n" % namespace
print "namespace avro {\n"
for x in structNames:
print 'template <> struct is_serializable<%s::%s> : public boost::true_type{};' % (namespace, x)
print "\n} // namespace avro\n"
print "#endif // %s" % headerstring
def usage():
print "-h, --help print this helpful message"
print "-i, --input=FILE input file to read (default is stdin)"
print "-o, --output=PATH output file to generate (default is stdout)"
print "-n, --namespace=LABEL namespace for schema (default is avrouser)"
if __name__ == "__main__":
from sys import argv
import getopt,sys
try:
opts, args = getopt.getopt(argv[1:], "hi:o:n:", ["help", "input=", "output=", "namespace="])
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit(2)
namespace = 'avrouser'
savein = sys.stdin
saveout = sys.stdout
inputFile = False
outputFile = False
outputFileBase = 'AvroGenerated'
for o, a in opts:
if o in ("-i", "--input"):
try:
inputFile = open(a, 'r')
sys.stdin = inputFile
except:
print "Could not open file " + a
sys.exit()
elif o in ("-o", "--output"):
try:
outputFile = open(a, 'w')
sys.stdout = outputFile
except:
print "Could not open file " + a
outputFileBase = a.rstrip('.hp') # strip for .h, .hh, .hpp
elif o in ("-n", "--namespace"):
namespace = a
elif o in ("-h", "--help"):
usage()
sys.exit()
else:
print "Unhandled option: " + o
usage()
sys.exit()
generateCode()
writeHeader(outputFileBase, namespace)
sys.stdin = savein
sys.stdout = saveout
if inputFile:
inputFile.close()
if outputFile:
outputFile.close()
|
Phil9l/cosmos
|
refs/heads/master
|
code/computational_geometry/src/area_of_triangle/area_of_triangle.py
|
3
|
#! /usr/local/bin/python3
# Part of Cosmos by OpenGenus Foundation
# Programmer: Amariah Del Mar
# Date Written: October 6th, 2017
# Function to find area of a triangle using three different vertices.
class MyPoint:
def __init__(self, x=0, y=0):
self.x = x
self.y = y
def area_of_triangle(a, b, c):
area = 0.5 * abs(
(a.x * b.y)
+ (b.x * c.y)
+ (c.x * a.y)
- (a.x * c.y)
- (c.x * b.y)
- (b.x * a.y)
)
return area
def test(a, b, c):
a.x, a.y = 3, 50
b.x, b.y = -6, 8
c.x, c.y = 8, 0
return a, b, c
if __name__ == "__main__":
pt1 = MyPoint()
pt2 = MyPoint()
pt3 = MyPoint()
pt1, pt2, pt3 = test(pt1, pt2, pt3)
tri_area = area_of_triangle(pt1, pt2, pt3)
print(
"The area of a triangle with vertices ({},{}), ({},{}) and ({},{}) is {}.".format(
pt1.x, pt1.y, pt2.x, pt2.y, pt3.x, pt3.y, tri_area
)
)
|
jeremiahyan/odoo
|
refs/heads/master
|
addons/website_forum/tests/test_forum.py
|
12
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from .common import KARMA, TestForumCommon
from odoo.exceptions import UserError, AccessError
from odoo.tools import mute_logger
from psycopg2 import IntegrityError
class TestForum(TestForumCommon):
def test_crud_rights(self):
Post = self.env['forum.post']
Vote = self.env['forum.post.vote']
self.user_portal.karma = 500
self.user_employee.karma = 500
# create some posts
self.admin_post = self.post
self.portal_post = Post.with_user(self.user_portal).create({
'name': 'Post from Portal User',
'content': 'I am not a bird.',
'forum_id': self.forum.id,
})
self.employee_post = Post.with_user(self.user_employee).create({
'name': 'Post from Employee User',
'content': 'I am not a bird.',
'forum_id': self.forum.id,
})
# vote on some posts
self.employee_vote_on_admin_post = Vote.with_user(self.user_employee).create({
'post_id': self.admin_post.id,
'vote': '1',
})
self.portal_vote_on_admin_post = Vote.with_user(self.user_portal).create({
'post_id': self.admin_post.id,
'vote': '1',
})
self.admin_vote_on_portal_post = Vote.create({
'post_id': self.portal_post.id,
'vote': '1',
})
self.admin_vote_on_employee_post = Vote.create({
'post_id': self.employee_post.id,
'vote': '1',
})
# One should not be able to modify someone else's vote
with self.assertRaises(UserError):
self.admin_vote_on_portal_post.with_user(self.user_employee).write({
'vote': '-1',
})
with self.assertRaises(UserError):
self.admin_vote_on_employee_post.with_user(self.user_portal).write({
'vote': '-1',
})
# One should not be able to give his vote to someone else
self.employee_vote_on_admin_post.with_user(self.user_employee).write({
'user_id': 1,
})
self.assertEqual(self.employee_vote_on_admin_post.user_id, self.user_employee, 'User employee should not be able to give its vote ownership to someone else')
# One should not be able to change his vote's post to a post of his own (would be self voting)
with self.assertRaises(UserError):
self.employee_vote_on_admin_post.with_user(self.user_employee).write({
'post_id': self.employee_post.id,
})
# One should not be able to give his vote to someone else
self.portal_vote_on_admin_post.with_user(self.user_portal).write({
'user_id': 1,
})
self.assertEqual(self.portal_vote_on_admin_post.user_id, self.user_portal, 'User portal should not be able to give its vote ownership to someone else')
# One should not be able to change his vote's post to a post of his own (would be self voting)
with self.assertRaises(UserError):
self.portal_vote_on_admin_post.with_user(self.user_portal).write({
'post_id': self.portal_post.id,
})
# One should not be able to vote for its own post
with self.assertRaises(UserError):
Vote.with_user(self.user_employee).create({
'post_id': self.employee_post.id,
'vote': '1',
})
# One should not be able to vote for its own post
with self.assertRaises(UserError):
Vote.with_user(self.user_portal).create({
'post_id': self.portal_post.id,
'vote': '1',
})
with mute_logger('odoo.sql_db'):
with self.assertRaises(IntegrityError):
with self.cr.savepoint():
# One should not be able to vote more than once on a same post
Vote.with_user(self.user_employee).create({
'post_id': self.admin_post.id,
'vote': '1',
})
with self.assertRaises(IntegrityError):
with self.cr.savepoint():
# One should not be able to vote more than once on a same post
Vote.with_user(self.user_employee).create({
'post_id': self.admin_post.id,
'vote': '1',
})
# One should not be able to create a vote for someone else
new_employee_vote = Vote.with_user(self.user_employee).create({
'post_id': self.portal_post.id,
'user_id': 1,
'vote': '1',
})
self.assertEqual(new_employee_vote.user_id, self.user_employee, 'Creating a vote for someone else should not be allowed. It should create it for yourself instead')
# One should not be able to create a vote for someone else
new_portal_vote = Vote.with_user(self.user_portal).create({
'post_id': self.employee_post.id,
'user_id': 1,
'vote': '1',
})
self.assertEqual(new_portal_vote.user_id, self.user_portal, 'Creating a vote for someone else should not be allowed. It should create it for yourself instead')
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models')
def test_ask(self):
Post = self.env['forum.post']
# Public user asks a question: not allowed
with self.assertRaises(AccessError):
Post.with_user(self.user_public).create({
'name': " Question ?",
'forum_id': self.forum.id,
})
# Portal user asks a question with tags: not allowed, unsufficient karma
with self.assertRaises(AccessError):
Post.with_user(self.user_portal).create({
'name': " Q_0",
'forum_id': self.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag0', 'forum_id': self.forum.id})]
})
# Portal user asks a question with tags: ok if enough karma
self.user_portal.karma = KARMA['tag_create']
Post.with_user(self.user_portal).create({
'name': " Q0",
'forum_id': self.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag1', 'forum_id': self.forum.id})]
})
self.assertEqual(self.user_portal.karma, KARMA['tag_create'], 'website_forum: wrong karma generation when asking question')
self.user_portal.karma = KARMA['post']
Post.with_user(self.user_portal).create({
'name': " Q0",
'forum_id': self.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag42', 'forum_id': self.forum.id})]
})
self.assertEqual(self.user_portal.karma, KARMA['post'] + KARMA['gen_que_new'], 'website_forum: wrong karma generation when asking question')
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models')
def test_answer(self):
Post = self.env['forum.post']
# Answers its own question: not allowed, unsufficient karma
with self.assertRaises(AccessError):
Post.with_user(self.user_employee).create({
'name': " A0",
'forum_id': self.forum.id,
'parent_id': self.post.id,
})
# Answers on question: ok if enough karma
self.user_employee.karma = KARMA['ans']
Post.with_user(self.user_employee).create({
'name': " A0",
'forum_id': self.forum.id,
'parent_id': self.post.id,
})
self.assertEqual(self.user_employee.karma, KARMA['ans'], 'website_forum: wrong karma generation when answering question')
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models')
def test_vote_crash(self):
Post = self.env['forum.post']
self.user_employee.karma = KARMA['ans']
emp_answer = Post.with_user(self.user_employee).create({
'name': 'TestAnswer',
'forum_id': self.forum.id,
'parent_id': self.post.id})
# upvote its own post
with self.assertRaises(UserError):
emp_answer.vote(upvote=True)
# not enough karma
with self.assertRaises(AccessError):
self.post.with_user(self.user_portal).vote(upvote=True)
def test_vote(self):
self.post.create_uid.karma = KARMA['ask']
self.user_portal.karma = KARMA['upv']
self.post.with_user(self.user_portal).vote(upvote=True)
self.assertEqual(self.post.create_uid.karma, KARMA['ask'] + KARMA['gen_que_upv'], 'website_forum: wrong karma generation of upvoted question author')
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models')
def test_downvote_crash(self):
Post = self.env['forum.post']
self.user_employee.karma = KARMA['ans']
emp_answer = Post.with_user(self.user_employee).create({
'name': 'TestAnswer',
'forum_id': self.forum.id,
'parent_id': self.post.id})
# downvote its own post
with self.assertRaises(UserError):
emp_answer.vote(upvote=False)
# not enough karma
with self.assertRaises(AccessError):
self.post.with_user(self.user_portal).vote(upvote=False)
def test_downvote(self):
self.post.create_uid.karma = 50
self.user_portal.karma = KARMA['dwv']
self.post.with_user(self.user_portal).vote(upvote=False)
self.assertEqual(self.post.create_uid.karma, 50 + KARMA['gen_que_dwv'], 'website_forum: wrong karma generation of downvoted question author')
def test_comment_crash(self):
with self.assertRaises(AccessError):
self.post.with_user(self.user_portal).message_post(body='Should crash', message_type='comment')
def test_comment(self):
self.post.with_user(self.user_employee).message_post(body='Test0', message_type='notification')
self.user_employee.karma = KARMA['com_all']
self.post.with_user(self.user_employee).message_post(body='Test1', message_type='comment')
self.assertEqual(len(self.post.message_ids), 4, 'website_forum: wrong behavior of message_post')
def test_flag_a_post(self):
Post = self.env['forum.post']
self.user_portal.karma = KARMA['ask']
post = Post.with_user(self.user_portal).create({
'name': "Q0",
'forum_id': self.forum.id,
})
# portal user flags a post: not allowed, unsufficient karma
with self.assertRaises(AccessError):
post.with_user(self.user_portal).flag()
# portal user flags a post: ok if enough karma
self.user_portal.karma = KARMA['flag']
post.state = 'active'
post.with_user(self.user_portal).flag()
self.assertEqual(post.state, 'flagged', 'website_forum: wrong state when flagging a post')
def test_validate_a_post(self):
Post = self.env['forum.post']
self.user_portal.karma = KARMA['ask']
post = Post.with_user(self.user_portal).create({
'name': "Q0",
'forum_id': self.forum.id,
})
# portal user validate a post: not allowed, unsufficient karma
with self.assertRaises(AccessError):
post.with_user(self.user_portal).validate()
# portal user validate a pending post
self.user_portal.karma = KARMA['moderate']
post.state = 'pending'
init_karma = post.create_uid.karma
post.with_user(self.user_portal).validate()
self.assertEqual(post.state, 'active', 'website_forum: wrong state when validate a post after pending')
self.assertEqual(post.create_uid.karma, init_karma + KARMA['gen_que_new'], 'website_forum: wrong karma when validate a post after pending')
# portal user validate a flagged post: ok if enough karma
self.user_portal.karma = KARMA['moderate']
post.state = 'flagged'
post.with_user(self.user_portal).validate()
self.assertEqual(post.state, 'active', 'website_forum: wrong state when validate a post after flagged')
# portal user validate an offensive post: ok if enough karma
self.user_portal.karma = KARMA['moderate']
post.state = 'offensive'
init_karma = post.create_uid.karma
post.with_user(self.user_portal).validate()
self.assertEqual(post.state, 'active', 'website_forum: wrong state when validate a post after offensive')
def test_refuse_a_post(self):
Post = self.env['forum.post']
self.user_portal.karma = KARMA['ask']
post = Post.with_user(self.user_portal).create({
'name': "Q0",
'forum_id': self.forum.id,
})
# portal user validate a post: not allowed, unsufficient karma
with self.assertRaises(AccessError):
post.with_user(self.user_portal).refuse()
# portal user validate a pending post
self.user_portal.karma = KARMA['moderate']
post.state = 'pending'
init_karma = post.create_uid.karma
post.with_user(self.user_portal).refuse()
self.assertEqual(post.moderator_id, self.user_portal, 'website_forum: wrong moderator_id when refusing')
self.assertEqual(post.create_uid.karma, init_karma, 'website_forum: wrong karma when refusing a post')
def test_mark_a_post_as_offensive(self):
Post = self.env['forum.post']
self.user_portal.karma = KARMA['ask']
post = Post.with_user(self.user_portal).create({
'name': "Q0",
'forum_id': self.forum.id,
})
# portal user mark a post as offensive: not allowed, unsufficient karma
with self.assertRaises(AccessError):
post.with_user(self.user_portal).mark_as_offensive(12)
# portal user mark a post as offensive
self.user_portal.karma = KARMA['moderate']
post.state = 'flagged'
init_karma = post.create_uid.karma
post.with_user(self.user_portal).mark_as_offensive(12)
self.assertEqual(post.state, 'offensive', 'website_forum: wrong state when marking a post as offensive')
self.assertEqual(post.create_uid.karma, init_karma + KARMA['gen_ans_flag'], 'website_forum: wrong karma when marking a post as offensive')
def test_convert_answer_to_comment_crash(self):
Post = self.env['forum.post']
# converting a question does nothing
new_msg = self.post.with_user(self.user_portal).convert_answer_to_comment()
self.assertEqual(new_msg.id, False, 'website_forum: question to comment conversion failed')
self.assertEqual(Post.search([('name', '=', 'TestQuestion')])[0].forum_id.name, 'TestForum', 'website_forum: question to comment conversion failed')
with self.assertRaises(AccessError):
self.answer.with_user(self.user_portal).convert_answer_to_comment()
def test_convert_answer_to_comment(self):
self.user_portal.karma = KARMA['com_conv_all']
post_author = self.answer.create_uid.partner_id
new_msg = self.answer.with_user(self.user_portal).convert_answer_to_comment()
self.assertEqual(len(new_msg), 1, 'website_forum: wrong answer to comment conversion')
self.assertEqual(new_msg.author_id, post_author, 'website_forum: wrong answer to comment conversion')
self.assertIn('I am an anteater', new_msg.body, 'website_forum: wrong answer to comment conversion')
def test_edit_post_crash(self):
with self.assertRaises(AccessError):
self.post.with_user(self.user_portal).write({'name': 'I am not your father.'})
def test_edit_post(self):
self.post.create_uid.karma = KARMA['edit_own']
self.post.write({'name': 'Actually I am your dog.'})
self.user_portal.karma = KARMA['edit_all']
self.post.with_user(self.user_portal).write({'name': 'Actually I am your cat.'})
def test_close_post_crash(self):
with self.assertRaises(AccessError):
self.post.with_user(self.user_portal).close(None)
def test_close_post_own(self):
self.post.create_uid.karma = KARMA['close_own']
self.post.close(None)
def test_close_post_all(self):
self.user_portal.karma = KARMA['close_all']
self.post.with_user(self.user_portal).close(None)
def test_deactivate_post_crash(self):
with self.assertRaises(AccessError):
self.post.with_user(self.user_portal).write({'active': False})
def test_deactivate_post_own(self):
self.post.create_uid.karma = KARMA['unlink_own']
self.post.write({'active': False})
def test_deactivate_post_all(self):
self.user_portal.karma = KARMA['unlink_all']
self.post.with_user(self.user_portal).write({'active': False})
def test_unlink_post_crash(self):
with self.assertRaises(AccessError):
self.post.with_user(self.user_portal).unlink()
def test_unlink_post_own(self):
self.post.create_uid.karma = KARMA['unlink_own']
self.post.unlink()
def test_unlink_post_all(self):
self.user_portal.karma = KARMA['unlink_all']
self.post.with_user(self.user_portal).unlink()
def test_forum_mode_questions(self):
Forum = self.env['forum.forum']
forum_questions = Forum.create({
'name': 'Questions Forum',
'mode': 'questions',
'active': True
})
Post = self.env['forum.post']
questions_post = Post.create({
'name': 'My First Post',
'forum_id': forum_questions.id,
'parent_id': self.post.id,
})
answer_to_questions_post = Post.create({
'name': 'This is an answer',
'forum_id': forum_questions.id,
'parent_id': questions_post.id,
})
self.assertEqual(
not questions_post.uid_has_answered or questions_post.forum_id.mode == 'discussions', False)
self.assertEqual(
questions_post.uid_has_answered and questions_post.forum_id.mode == 'questions', True)
def test_forum_mode_discussions(self):
Forum = self.env['forum.forum']
forum_discussions = Forum.create({
'name': 'Discussions Forum',
'mode': 'discussions',
'active': True
})
Post = self.env['forum.post']
discussions_post = Post.create({
'name': 'My First Post',
'forum_id': forum_discussions.id,
'parent_id': self.post.id,
})
answer_to_discussions_post = Post.create({
'name': 'This is an answer',
'forum_id': forum_discussions.id,
'parent_id': discussions_post.id,
})
self.assertEqual(
not discussions_post.uid_has_answered or discussions_post.forum_id.mode == 'discussions', True)
self.assertEqual(
discussions_post.uid_has_answered and discussions_post.forum_id.mode == 'questions', False)
|
aferr/LatticeMemCtl
|
refs/heads/master
|
src/python/m5/SimObject.py
|
6
|
# Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2004-2006 The Regents of The University of Michigan
# Copyright (c) 2010 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Steve Reinhardt
# Nathan Binkert
# Andreas Hansson
import sys
from types import FunctionType, MethodType, ModuleType
import m5
from m5.util import *
# Have to import params up top since Param is referenced on initial
# load (when SimObject class references Param to create a class
# variable, the 'name' param)...
from m5.params import *
# There are a few things we need that aren't in params.__all__ since
# normal users don't need them
from m5.params import ParamDesc, VectorParamDesc, \
isNullPointer, SimObjectVector, Port
from m5.proxy import *
from m5.proxy import isproxy
#####################################################################
#
# M5 Python Configuration Utility
#
# The basic idea is to write simple Python programs that build Python
# objects corresponding to M5 SimObjects for the desired simulation
# configuration. For now, the Python emits a .ini file that can be
# parsed by M5. In the future, some tighter integration between M5
# and the Python interpreter may allow bypassing the .ini file.
#
# Each SimObject class in M5 is represented by a Python class with the
# same name. The Python inheritance tree mirrors the M5 C++ tree
# (e.g., SimpleCPU derives from BaseCPU in both cases, and all
# SimObjects inherit from a single SimObject base class). To specify
# an instance of an M5 SimObject in a configuration, the user simply
# instantiates the corresponding Python object. The parameters for
# that SimObject are given by assigning to attributes of the Python
# object, either using keyword assignment in the constructor or in
# separate assignment statements. For example:
#
# cache = BaseCache(size='64KB')
# cache.hit_latency = 3
# cache.assoc = 8
#
# The magic lies in the mapping of the Python attributes for SimObject
# classes to the actual SimObject parameter specifications. This
# allows parameter validity checking in the Python code. Continuing
# the example above, the statements "cache.blurfl=3" or
# "cache.assoc='hello'" would both result in runtime errors in Python,
# since the BaseCache object has no 'blurfl' parameter and the 'assoc'
# parameter requires an integer, respectively. This magic is done
# primarily by overriding the special __setattr__ method that controls
# assignment to object attributes.
#
# Once a set of Python objects have been instantiated in a hierarchy,
# calling 'instantiate(obj)' (where obj is the root of the hierarchy)
# will generate a .ini file.
#
#####################################################################
# list of all SimObject classes
allClasses = {}
# dict to look up SimObjects based on path
instanceDict = {}
def public_value(key, value):
return key.startswith('_') or \
isinstance(value, (FunctionType, MethodType, ModuleType,
classmethod, type))
# The metaclass for SimObject. This class controls how new classes
# that derive from SimObject are instantiated, and provides inherited
# class behavior (just like a class controls how instances of that
# class are instantiated, and provides inherited instance behavior).
class MetaSimObject(type):
# Attributes that can be set only at initialization time
init_keywords = { 'abstract' : bool,
'cxx_class' : str,
'cxx_type' : str,
'type' : str }
# Attributes that can be set any time
keywords = { 'check' : FunctionType }
# __new__ is called before __init__, and is where the statements
# in the body of the class definition get loaded into the class's
# __dict__. We intercept this to filter out parameter & port assignments
# and only allow "private" attributes to be passed to the base
# __new__ (starting with underscore).
def __new__(mcls, name, bases, dict):
assert name not in allClasses, "SimObject %s already present" % name
# Copy "private" attributes, functions, and classes to the
# official dict. Everything else goes in _init_dict to be
# filtered in __init__.
cls_dict = {}
value_dict = {}
for key,val in dict.items():
if public_value(key, val):
cls_dict[key] = val
else:
# must be a param/port setting
value_dict[key] = val
if 'abstract' not in value_dict:
value_dict['abstract'] = False
cls_dict['_value_dict'] = value_dict
cls = super(MetaSimObject, mcls).__new__(mcls, name, bases, cls_dict)
if 'type' in value_dict:
allClasses[name] = cls
return cls
# subclass initialization
def __init__(cls, name, bases, dict):
# calls type.__init__()... I think that's a no-op, but leave
# it here just in case it's not.
super(MetaSimObject, cls).__init__(name, bases, dict)
# initialize required attributes
# class-only attributes
cls._params = multidict() # param descriptions
cls._ports = multidict() # port descriptions
# class or instance attributes
cls._values = multidict() # param values
cls._children = multidict() # SimObject children
cls._port_refs = multidict() # port ref objects
cls._instantiated = False # really instantiated, cloned, or subclassed
# We don't support multiple inheritance of sim objects. If you want
# to, you must fix multidict to deal with it properly. Non sim-objects
# are ok, though
bTotal = 0
for c in bases:
if isinstance(c, MetaSimObject):
bTotal += 1
if bTotal > 1:
raise TypeError, "SimObjects do not support multiple inheritance"
base = bases[0]
# Set up general inheritance via multidicts. A subclass will
# inherit all its settings from the base class. The only time
# the following is not true is when we define the SimObject
# class itself (in which case the multidicts have no parent).
if isinstance(base, MetaSimObject):
cls._base = base
cls._params.parent = base._params
cls._ports.parent = base._ports
cls._values.parent = base._values
cls._children.parent = base._children
cls._port_refs.parent = base._port_refs
# mark base as having been subclassed
base._instantiated = True
else:
cls._base = None
# default keyword values
if 'type' in cls._value_dict:
if 'cxx_class' not in cls._value_dict:
cls._value_dict['cxx_class'] = cls._value_dict['type']
cls._value_dict['cxx_type'] = '%s *' % cls._value_dict['cxx_class']
# Export methods are automatically inherited via C++, so we
# don't want the method declarations to get inherited on the
# python side (and thus end up getting repeated in the wrapped
# versions of derived classes). The code below basicallly
# suppresses inheritance by substituting in the base (null)
# versions of these methods unless a different version is
# explicitly supplied.
for method_name in ('export_methods', 'export_method_cxx_predecls',
'export_method_swig_predecls'):
if method_name not in cls.__dict__:
base_method = getattr(MetaSimObject, method_name)
m = MethodType(base_method, cls, MetaSimObject)
setattr(cls, method_name, m)
# Now process the _value_dict items. They could be defining
# new (or overriding existing) parameters or ports, setting
# class keywords (e.g., 'abstract'), or setting parameter
# values or port bindings. The first 3 can only be set when
# the class is defined, so we handle them here. The others
# can be set later too, so just emulate that by calling
# setattr().
for key,val in cls._value_dict.items():
# param descriptions
if isinstance(val, ParamDesc):
cls._new_param(key, val)
# port objects
elif isinstance(val, Port):
cls._new_port(key, val)
# init-time-only keywords
elif cls.init_keywords.has_key(key):
cls._set_keyword(key, val, cls.init_keywords[key])
# default: use normal path (ends up in __setattr__)
else:
setattr(cls, key, val)
def _set_keyword(cls, keyword, val, kwtype):
if not isinstance(val, kwtype):
raise TypeError, 'keyword %s has bad type %s (expecting %s)' % \
(keyword, type(val), kwtype)
if isinstance(val, FunctionType):
val = classmethod(val)
type.__setattr__(cls, keyword, val)
def _new_param(cls, name, pdesc):
# each param desc should be uniquely assigned to one variable
assert(not hasattr(pdesc, 'name'))
pdesc.name = name
cls._params[name] = pdesc
if hasattr(pdesc, 'default'):
cls._set_param(name, pdesc.default, pdesc)
def _set_param(cls, name, value, param):
assert(param.name == name)
try:
value = param.convert(value)
except Exception, e:
msg = "%s\nError setting param %s.%s to %s\n" % \
(e, cls.__name__, name, value)
e.args = (msg, )
raise
cls._values[name] = value
# if param value is a SimObject, make it a child too, so that
# it gets cloned properly when the class is instantiated
if isSimObjectOrVector(value) and not value.has_parent():
cls._add_cls_child(name, value)
def _add_cls_child(cls, name, child):
# It's a little funky to have a class as a parent, but these
# objects should never be instantiated (only cloned, which
# clears the parent pointer), and this makes it clear that the
# object is not an orphan and can provide better error
# messages.
child.set_parent(cls, name)
cls._children[name] = child
def _new_port(cls, name, port):
# each port should be uniquely assigned to one variable
assert(not hasattr(port, 'name'))
port.name = name
cls._ports[name] = port
# same as _get_port_ref, effectively, but for classes
def _cls_get_port_ref(cls, attr):
# Return reference that can be assigned to another port
# via __setattr__. There is only ever one reference
# object per port, but we create them lazily here.
ref = cls._port_refs.get(attr)
if not ref:
ref = cls._ports[attr].makeRef(cls)
cls._port_refs[attr] = ref
return ref
# Set attribute (called on foo.attr = value when foo is an
# instance of class cls).
def __setattr__(cls, attr, value):
# normal processing for private attributes
if public_value(attr, value):
type.__setattr__(cls, attr, value)
return
if cls.keywords.has_key(attr):
cls._set_keyword(attr, value, cls.keywords[attr])
return
if cls._ports.has_key(attr):
cls._cls_get_port_ref(attr).connect(value)
return
if isSimObjectOrSequence(value) and cls._instantiated:
raise RuntimeError, \
"cannot set SimObject parameter '%s' after\n" \
" class %s has been instantiated or subclassed" \
% (attr, cls.__name__)
# check for param
param = cls._params.get(attr)
if param:
cls._set_param(attr, value, param)
return
if isSimObjectOrSequence(value):
# If RHS is a SimObject, it's an implicit child assignment.
cls._add_cls_child(attr, coerceSimObjectOrVector(value))
return
# no valid assignment... raise exception
raise AttributeError, \
"Class %s has no parameter \'%s\'" % (cls.__name__, attr)
def __getattr__(cls, attr):
if attr == 'cxx_class_path':
return cls.cxx_class.split('::')
if attr == 'cxx_class_name':
return cls.cxx_class_path[-1]
if attr == 'cxx_namespaces':
return cls.cxx_class_path[:-1]
if cls._values.has_key(attr):
return cls._values[attr]
if cls._children.has_key(attr):
return cls._children[attr]
raise AttributeError, \
"object '%s' has no attribute '%s'" % (cls.__name__, attr)
def __str__(cls):
return cls.__name__
# See ParamValue.cxx_predecls for description.
def cxx_predecls(cls, code):
code('#include "params/$cls.hh"')
# See ParamValue.swig_predecls for description.
def swig_predecls(cls, code):
code('%import "python/m5/internal/param_$cls.i"')
# Hook for exporting additional C++ methods to Python via SWIG.
# Default is none, override using @classmethod in class definition.
def export_methods(cls, code):
pass
# Generate the code needed as a prerequisite for the C++ methods
# exported via export_methods() to be compiled in the _wrap.cc
# file. Typically generates one or more #include statements. If
# any methods are exported, typically at least the C++ header
# declaring the relevant SimObject class must be included.
def export_method_cxx_predecls(cls, code):
pass
# Generate the code needed as a prerequisite for the C++ methods
# exported via export_methods() to be processed by SWIG.
# Typically generates one or more %include or %import statements.
# If any methods are exported, typically at least the C++ header
# declaring the relevant SimObject class must be included.
def export_method_swig_predecls(cls, code):
pass
# Generate the declaration for this object for wrapping with SWIG.
# Generates code that goes into a SWIG .i file. Called from
# src/SConscript.
def swig_decl(cls, code):
class_path = cls.cxx_class.split('::')
classname = class_path[-1]
namespaces = class_path[:-1]
# The 'local' attribute restricts us to the params declared in
# the object itself, not including inherited params (which
# will also be inherited from the base class's param struct
# here).
params = cls._params.local.values()
ports = cls._ports.local
code('%module(package="m5.internal") param_$cls')
code()
code('%{')
code('#include "params/$cls.hh"')
for param in params:
param.cxx_predecls(code)
cls.export_method_cxx_predecls(code)
code('''\
/**
* This is a workaround for bug in swig. Prior to gcc 4.6.1 the STL
* headers like vector, string, etc. used to automatically pull in
* the cstddef header but starting with gcc 4.6.1 they no longer do.
* This leads to swig generated a file that does not compile so we
* explicitly include cstddef. Additionally, including version 2.0.4,
* swig uses ptrdiff_t without the std:: namespace prefix which is
* required with gcc 4.6.1. We explicitly provide access to it.
*/
#include <cstddef>
using std::ptrdiff_t;
''')
code('%}')
code()
for param in params:
param.swig_predecls(code)
cls.export_method_swig_predecls(code)
code()
if cls._base:
code('%import "python/m5/internal/param_${{cls._base}}.i"')
code()
for ns in namespaces:
code('namespace $ns {')
if namespaces:
code('// avoid name conflicts')
sep_string = '_COLONS_'
flat_name = sep_string.join(class_path)
code('%rename($flat_name) $classname;')
code()
code('// stop swig from creating/wrapping default ctor/dtor')
code('%nodefault $classname;')
code('class $classname')
if cls._base:
code(' : public ${{cls._base.cxx_class}}')
code('{')
code(' public:')
cls.export_methods(code)
code('};')
for ns in reversed(namespaces):
code('} // namespace $ns')
code()
code('%include "params/$cls.hh"')
# Generate the C++ declaration (.hh file) for this SimObject's
# param struct. Called from src/SConscript.
def cxx_param_decl(cls, code):
# The 'local' attribute restricts us to the params declared in
# the object itself, not including inherited params (which
# will also be inherited from the base class's param struct
# here).
params = cls._params.local.values()
ports = cls._ports.local
try:
ptypes = [p.ptype for p in params]
except:
print cls, p, p.ptype_str
print params
raise
class_path = cls._value_dict['cxx_class'].split('::')
code('''\
#ifndef __PARAMS__${cls}__
#define __PARAMS__${cls}__
''')
# A forward class declaration is sufficient since we are just
# declaring a pointer.
for ns in class_path[:-1]:
code('namespace $ns {')
code('class $0;', class_path[-1])
for ns in reversed(class_path[:-1]):
code('} // namespace $ns')
code()
# The base SimObject has a couple of params that get
# automatically set from Python without being declared through
# the normal Param mechanism; we slip them in here (needed
# predecls now, actual declarations below)
if cls == SimObject:
code('''
#ifndef PY_VERSION
struct PyObject;
#endif
#include <string>
class EventQueue;
''')
for param in params:
param.cxx_predecls(code)
for port in ports.itervalues():
port.cxx_predecls(code)
code()
if cls._base:
code('#include "params/${{cls._base.type}}.hh"')
code()
for ptype in ptypes:
if issubclass(ptype, Enum):
code('#include "enums/${{ptype.__name__}}.hh"')
code()
# now generate the actual param struct
code("struct ${cls}Params")
if cls._base:
code(" : public ${{cls._base.type}}Params")
code("{")
if not hasattr(cls, 'abstract') or not cls.abstract:
if 'type' in cls.__dict__:
code(" ${{cls.cxx_type}} create();")
code.indent()
if cls == SimObject:
code('''
SimObjectParams()
{
extern EventQueue mainEventQueue;
eventq = &mainEventQueue;
}
virtual ~SimObjectParams() {}
std::string name;
PyObject *pyobj;
EventQueue *eventq;
''')
for param in params:
param.cxx_decl(code)
for port in ports.itervalues():
port.cxx_decl(code)
code.dedent()
code('};')
code()
code('#endif // __PARAMS__${cls}__')
return code
# The SimObject class is the root of the special hierarchy. Most of
# the code in this class deals with the configuration hierarchy itself
# (parent/child node relationships).
class SimObject(object):
# Specify metaclass. Any class inheriting from SimObject will
# get this metaclass.
__metaclass__ = MetaSimObject
type = 'SimObject'
abstract = True
@classmethod
def export_method_cxx_predecls(cls, code):
code('''
#include <Python.h>
#include "sim/serialize.hh"
#include "sim/sim_object.hh"
''')
@classmethod
def export_method_swig_predecls(cls, code):
code('''
%include <std_string.i>
''')
@classmethod
def export_methods(cls, code):
code('''
enum State {
Running,
Draining,
Drained
};
void init();
void loadState(Checkpoint *cp);
void initState();
void regStats();
void resetStats();
void startup();
unsigned int drain(Event *drain_event);
void resume();
void switchOut();
void takeOverFrom(BaseCPU *cpu);
''')
# Initialize new instance. For objects with SimObject-valued
# children, we need to recursively clone the classes represented
# by those param values as well in a consistent "deep copy"-style
# fashion. That is, we want to make sure that each instance is
# cloned only once, and that if there are multiple references to
# the same original object, we end up with the corresponding
# cloned references all pointing to the same cloned instance.
def __init__(self, **kwargs):
ancestor = kwargs.get('_ancestor')
memo_dict = kwargs.get('_memo')
if memo_dict is None:
# prepare to memoize any recursively instantiated objects
memo_dict = {}
elif ancestor:
# memoize me now to avoid problems with recursive calls
memo_dict[ancestor] = self
if not ancestor:
ancestor = self.__class__
ancestor._instantiated = True
# initialize required attributes
self._parent = None
self._name = None
self._ccObject = None # pointer to C++ object
self._ccParams = None
self._instantiated = False # really "cloned"
# Clone children specified at class level. No need for a
# multidict here since we will be cloning everything.
# Do children before parameter values so that children that
# are also param values get cloned properly.
self._children = {}
for key,val in ancestor._children.iteritems():
self.add_child(key, val(_memo=memo_dict))
# Inherit parameter values from class using multidict so
# individual value settings can be overridden but we still
# inherit late changes to non-overridden class values.
self._values = multidict(ancestor._values)
# clone SimObject-valued parameters
for key,val in ancestor._values.iteritems():
val = tryAsSimObjectOrVector(val)
if val is not None:
self._values[key] = val(_memo=memo_dict)
# clone port references. no need to use a multidict here
# since we will be creating new references for all ports.
self._port_refs = {}
for key,val in ancestor._port_refs.iteritems():
self._port_refs[key] = val.clone(self, memo_dict)
# apply attribute assignments from keyword args, if any
for key,val in kwargs.iteritems():
setattr(self, key, val)
# "Clone" the current instance by creating another instance of
# this instance's class, but that inherits its parameter values
# and port mappings from the current instance. If we're in a
# "deep copy" recursive clone, check the _memo dict to see if
# we've already cloned this instance.
def __call__(self, **kwargs):
memo_dict = kwargs.get('_memo')
if memo_dict is None:
# no memo_dict: must be top-level clone operation.
# this is only allowed at the root of a hierarchy
if self._parent:
raise RuntimeError, "attempt to clone object %s " \
"not at the root of a tree (parent = %s)" \
% (self, self._parent)
# create a new dict and use that.
memo_dict = {}
kwargs['_memo'] = memo_dict
elif memo_dict.has_key(self):
# clone already done & memoized
return memo_dict[self]
return self.__class__(_ancestor = self, **kwargs)
def _get_port_ref(self, attr):
# Return reference that can be assigned to another port
# via __setattr__. There is only ever one reference
# object per port, but we create them lazily here.
ref = self._port_refs.get(attr)
if not ref:
ref = self._ports[attr].makeRef(self)
self._port_refs[attr] = ref
return ref
def __getattr__(self, attr):
if self._ports.has_key(attr):
return self._get_port_ref(attr)
if self._values.has_key(attr):
return self._values[attr]
if self._children.has_key(attr):
return self._children[attr]
# If the attribute exists on the C++ object, transparently
# forward the reference there. This is typically used for
# SWIG-wrapped methods such as init(), regStats(),
# resetStats(), startup(), drain(), and
# resume().
if self._ccObject and hasattr(self._ccObject, attr):
return getattr(self._ccObject, attr)
raise AttributeError, "object '%s' has no attribute '%s'" \
% (self.__class__.__name__, attr)
# Set attribute (called on foo.attr = value when foo is an
# instance of class cls).
def __setattr__(self, attr, value):
# normal processing for private attributes
if attr.startswith('_'):
object.__setattr__(self, attr, value)
return
if self._ports.has_key(attr):
# set up port connection
self._get_port_ref(attr).connect(value)
return
if isSimObjectOrSequence(value) and self._instantiated:
raise RuntimeError, \
"cannot set SimObject parameter '%s' after\n" \
" instance been cloned %s" % (attr, `self`)
param = self._params.get(attr)
if param:
try:
value = param.convert(value)
except Exception, e:
msg = "%s\nError setting param %s.%s to %s\n" % \
(e, self.__class__.__name__, attr, value)
e.args = (msg, )
raise
self._values[attr] = value
# implicitly parent unparented objects assigned as params
if isSimObjectOrVector(value) and not value.has_parent():
self.add_child(attr, value)
return
# if RHS is a SimObject, it's an implicit child assignment
if isSimObjectOrSequence(value):
self.add_child(attr, value)
return
# no valid assignment... raise exception
raise AttributeError, "Class %s has no parameter %s" \
% (self.__class__.__name__, attr)
# this hack allows tacking a '[0]' onto parameters that may or may
# not be vectors, and always getting the first element (e.g. cpus)
def __getitem__(self, key):
if key == 0:
return self
raise TypeError, "Non-zero index '%s' to SimObject" % key
# Also implemented by SimObjectVector
def clear_parent(self, old_parent):
assert self._parent is old_parent
self._parent = None
# Also implemented by SimObjectVector
def set_parent(self, parent, name):
self._parent = parent
self._name = name
# Also implemented by SimObjectVector
def get_name(self):
return self._name
# Also implemented by SimObjectVector
def has_parent(self):
return self._parent is not None
# clear out child with given name. This code is not likely to be exercised.
# See comment in add_child.
def clear_child(self, name):
child = self._children[name]
child.clear_parent(self)
del self._children[name]
# Add a new child to this object.
def add_child(self, name, child):
child = coerceSimObjectOrVector(child)
if child.has_parent():
print "warning: add_child('%s'): child '%s' already has parent" % \
(name, child.get_name())
if self._children.has_key(name):
# This code path had an undiscovered bug that would make it fail
# at runtime. It had been here for a long time and was only
# exposed by a buggy script. Changes here will probably not be
# exercised without specialized testing.
self.clear_child(name)
child.set_parent(self, name)
self._children[name] = child
# Take SimObject-valued parameters that haven't been explicitly
# assigned as children and make them children of the object that
# they were assigned to as a parameter value. This guarantees
# that when we instantiate all the parameter objects we're still
# inside the configuration hierarchy.
def adoptOrphanParams(self):
for key,val in self._values.iteritems():
if not isSimObjectVector(val) and isSimObjectSequence(val):
# need to convert raw SimObject sequences to
# SimObjectVector class so we can call has_parent()
val = SimObjectVector(val)
self._values[key] = val
if isSimObjectOrVector(val) and not val.has_parent():
print "warning: %s adopting orphan SimObject param '%s'" \
% (self, key)
self.add_child(key, val)
def path(self):
if not self._parent:
return '<orphan %s>' % self.__class__
ppath = self._parent.path()
if ppath == 'root':
return self._name
return ppath + "." + self._name
def __str__(self):
return self.path()
def ini_str(self):
return self.path()
def find_any(self, ptype):
if isinstance(self, ptype):
return self, True
found_obj = None
for child in self._children.itervalues():
if isinstance(child, ptype):
if found_obj != None and child != found_obj:
raise AttributeError, \
'parent.any matched more than one: %s %s' % \
(found_obj.path, child.path)
found_obj = child
# search param space
for pname,pdesc in self._params.iteritems():
if issubclass(pdesc.ptype, ptype):
match_obj = self._values[pname]
if found_obj != None and found_obj != match_obj:
raise AttributeError, \
'parent.any matched more than one: %s and %s' % (found_obj.path, match_obj.path)
found_obj = match_obj
return found_obj, found_obj != None
def find_all(self, ptype):
all = {}
# search children
for child in self._children.itervalues():
if isinstance(child, ptype) and not isproxy(child) and \
not isNullPointer(child):
all[child] = True
if isSimObject(child):
# also add results from the child itself
child_all, done = child.find_all(ptype)
all.update(dict(zip(child_all, [done] * len(child_all))))
# search param space
for pname,pdesc in self._params.iteritems():
if issubclass(pdesc.ptype, ptype):
match_obj = self._values[pname]
if not isproxy(match_obj) and not isNullPointer(match_obj):
all[match_obj] = True
return all.keys(), True
def unproxy(self, base):
return self
def unproxyParams(self):
for param in self._params.iterkeys():
value = self._values.get(param)
if value != None and isproxy(value):
try:
value = value.unproxy(self)
except:
print "Error in unproxying param '%s' of %s" % \
(param, self.path())
raise
setattr(self, param, value)
# Unproxy ports in sorted order so that 'append' operations on
# vector ports are done in a deterministic fashion.
port_names = self._ports.keys()
port_names.sort()
for port_name in port_names:
port = self._port_refs.get(port_name)
if port != None:
port.unproxy(self)
def print_ini(self, ini_file):
print >>ini_file, '[' + self.path() + ']' # .ini section header
instanceDict[self.path()] = self
if hasattr(self, 'type'):
print >>ini_file, 'type=%s' % self.type
if len(self._children.keys()):
print >>ini_file, 'children=%s' % \
' '.join(self._children[n].get_name() \
for n in sorted(self._children.keys()))
for param in sorted(self._params.keys()):
value = self._values.get(param)
if value != None:
print >>ini_file, '%s=%s' % (param,
self._values[param].ini_str())
for port_name in sorted(self._ports.keys()):
port = self._port_refs.get(port_name, None)
if port != None:
print >>ini_file, '%s=%s' % (port_name, port.ini_str())
print >>ini_file # blank line between objects
# generate a tree of dictionaries expressing all the parameters in the
# instantiated system for use by scripts that want to do power, thermal
# visualization, and other similar tasks
def get_config_as_dict(self):
d = attrdict()
if hasattr(self, 'type'):
d.type = self.type
if hasattr(self, 'cxx_class'):
d.cxx_class = self.cxx_class
# Add the name and path of this object to be able to link to
# the stats
d.name = self.get_name()
d.path = self.path()
for param in sorted(self._params.keys()):
value = self._values.get(param)
if value != None:
try:
# Use native type for those supported by JSON and
# strings for everything else. skipkeys=True seems
# to not work as well as one would hope
if type(self._values[param].value) in \
[str, unicode, int, long, float, bool, None]:
d[param] = self._values[param].value
else:
d[param] = str(self._values[param])
except AttributeError:
pass
for n in sorted(self._children.keys()):
child = self._children[n]
# Use the name of the attribute (and not get_name()) as
# the key in the JSON dictionary to capture the hierarchy
# in the Python code that assembled this system
d[n] = child.get_config_as_dict()
for port_name in sorted(self._ports.keys()):
port = self._port_refs.get(port_name, None)
if port != None:
# Represent each port with a dictionary containing the
# prominent attributes
d[port_name] = port.get_config_as_dict()
return d
def getCCParams(self):
if self._ccParams:
return self._ccParams
cc_params_struct = getattr(m5.internal.params, '%sParams' % self.type)
cc_params = cc_params_struct()
cc_params.pyobj = self
cc_params.name = str(self)
param_names = self._params.keys()
param_names.sort()
for param in param_names:
value = self._values.get(param)
if value is None:
fatal("%s.%s without default or user set value",
self.path(), param)
value = value.getValue()
if isinstance(self._params[param], VectorParamDesc):
assert isinstance(value, list)
vec = getattr(cc_params, param)
assert not len(vec)
for v in value:
vec.append(v)
else:
setattr(cc_params, param, value)
port_names = self._ports.keys()
port_names.sort()
for port_name in port_names:
port = self._port_refs.get(port_name, None)
if port != None:
port_count = len(port)
else:
port_count = 0
setattr(cc_params, 'port_' + port_name + '_connection_count',
port_count)
self._ccParams = cc_params
return self._ccParams
# Get C++ object corresponding to this object, calling C++ if
# necessary to construct it. Does *not* recursively create
# children.
def getCCObject(self):
if not self._ccObject:
# Make sure this object is in the configuration hierarchy
if not self._parent and not isRoot(self):
raise RuntimeError, "Attempt to instantiate orphan node"
# Cycles in the configuration hierarchy are not supported. This
# will catch the resulting recursion and stop.
self._ccObject = -1
params = self.getCCParams()
self._ccObject = params.create()
elif self._ccObject == -1:
raise RuntimeError, "%s: Cycle found in configuration hierarchy." \
% self.path()
return self._ccObject
def descendants(self):
yield self
for child in self._children.itervalues():
for obj in child.descendants():
yield obj
# Call C++ to create C++ object corresponding to this object
def createCCObject(self):
self.getCCParams()
self.getCCObject() # force creation
def getValue(self):
return self.getCCObject()
# Create C++ port connections corresponding to the connections in
# _port_refs
def connectPorts(self):
for portRef in self._port_refs.itervalues():
portRef.ccConnect()
def getMemoryMode(self):
if not isinstance(self, m5.objects.System):
return None
return self._ccObject.getMemoryMode()
def changeTiming(self, mode):
if isinstance(self, m5.objects.System):
# i don't know if there's a better way to do this - calling
# setMemoryMode directly from self._ccObject results in calling
# SimObject::setMemoryMode, not the System::setMemoryMode
self._ccObject.setMemoryMode(mode)
def takeOverFrom(self, old_cpu):
self._ccObject.takeOverFrom(old_cpu._ccObject)
# Function to provide to C++ so it can look up instances based on paths
def resolveSimObject(name):
obj = instanceDict[name]
return obj.getCCObject()
def isSimObject(value):
return isinstance(value, SimObject)
def isSimObjectClass(value):
return issubclass(value, SimObject)
def isSimObjectVector(value):
return isinstance(value, SimObjectVector)
def isSimObjectSequence(value):
if not isinstance(value, (list, tuple)) or len(value) == 0:
return False
for val in value:
if not isNullPointer(val) and not isSimObject(val):
return False
return True
def isSimObjectOrSequence(value):
return isSimObject(value) or isSimObjectSequence(value)
def isRoot(obj):
from m5.objects import Root
return obj and obj is Root.getInstance()
def isSimObjectOrVector(value):
return isSimObject(value) or isSimObjectVector(value)
def tryAsSimObjectOrVector(value):
if isSimObjectOrVector(value):
return value
if isSimObjectSequence(value):
return SimObjectVector(value)
return None
def coerceSimObjectOrVector(value):
value = tryAsSimObjectOrVector(value)
if value is None:
raise TypeError, "SimObject or SimObjectVector expected"
return value
baseClasses = allClasses.copy()
baseInstances = instanceDict.copy()
def clear():
global allClasses, instanceDict
allClasses = baseClasses.copy()
instanceDict = baseInstances.copy()
# __all__ defines the list of symbols that get exported when
# 'from config import *' is invoked. Try to keep this reasonably
# short to avoid polluting other namespaces.
__all__ = [ 'SimObject' ]
|
hammerlab/immuno_research
|
refs/heads/master
|
Mar18_no_mincount.py
|
1
|
# Copyright (c) 2014. Mount Sinai School of Medicine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pandas as pd
import sklearn
import sklearn.cross_validation
from epitopes import \
(cri_tumor_antigens, iedb, features, reduced_alphabet, reference)
import eval_dataset
from balanced_ensemble import BalancedEnsembleClassifier
cancer_peptides = cri_tumor_antigens.load_peptides(mhc_class = 1)
# self_peptides = reference.load_peptide_set(nrows = 1000)
# sweet over filtering criteria and n-gram transformation
# parameters and for all parameter combos evaluate
# - cross-validation accuracy
# - cross-validation area under ROC curve
# - accuracy on validation set of cancer peptides
params = []
aucs = []
accs = []
recalls = []
d = {'param' : [], 'auc':[], 'acc':[], 'recall': [], 'combined':[]}
"""
Setting a minimum count for IEDB entries seems to
always yield worse performance
"""
best_model = None
best_vectorizer = None
best_params = None
for assay in ('cytotoxicity', None ):
for alphabet in ('hp2', 'aromatic2', None):
for max_ngram in (1, 2, 3):
if alphabet is None and max_ngram > 1:
continue
param_str = \
"Assay %s, ngram %s, alphabet %s" % \
(assay, max_ngram, alphabet)
d['param'].append(param_str)
print param_str
if alphabet == 'hp2':
alphabet_dict = reduced_alphabet.hp2
elif alphabet == 'aromatic2':
alphabet_dict = reduced_alphabet.aromatic2
else:
assert alphabet is None, alphabet
alphabet_dict = None
X, Y, vectorizer = iedb.load_tcell_ngrams(
assay_group = assay,
human = True,
mhc_class = 1,
max_ngram = max_ngram,
reduced_alphabet = alphabet_dict,
min_count = None,
return_transformer = True)
print "Data shape", X.shape, "n_true", np.sum(Y)
ensemble = BalancedEnsembleClassifier()
accs = sklearn.cross_validation.cross_val_score(
ensemble, X, Y, cv = 5)
print "CV accuracy %0.4f (std %0.4f)" % \
(np.mean(accs), np.std(accs))
d['acc'].append(np.mean(accs))
aucs = sklearn.cross_validation.cross_val_score(
ensemble, X, Y, cv = 5, scoring='roc_auc')
print "CV AUC %0.4f (std %0.4f)" % \
(np.mean(aucs), np.std(aucs))
d['auc'].append(np.mean(aucs))
ensemble.fit(X, Y)
#X_self = vectorizer.transform(self_peptides)
#Y_pred = ensemble.predict(X_self)
#print "Self epitope accuracy %0.4f" % \
# (1.0 - np.mean(Y_pred))
X_test = vectorizer.transform(cancer_peptides)
Y_pred = ensemble.predict(X_test)
recall = np.mean(Y_pred)
print "Tumor antigen accuracy %0.4f" % (recall,)
d['recall'].append(recall)
print "---"
print
combined = (np.mean(aucs) + recall) / 2.0
d['combined'].append(combined)
df = pd.DataFrame(d)
print df.sort('combined', ascending=False)
|
acshi/osf.io
|
refs/heads/develop
|
api_tests/applications/views/test_application_detail.py
|
6
|
import mock
from nose.tools import * # flake8: noqa
from website.models import ApiOAuth2Application, User
from website.util import api_v2_url
from tests.base import ApiTestCase
from osf_tests.factories import ApiOAuth2ApplicationFactory, AuthUserFactory
def _get_application_detail_route(app):
path = "applications/{}/".format(app.client_id)
return api_v2_url(path, base_route='/')
def _get_application_list_url():
path = "applications/"
return api_v2_url(path, base_route='/')
class TestApplicationDetail(ApiTestCase):
def setUp(self):
super(TestApplicationDetail, self).setUp()
self.user1 = AuthUserFactory()
self.user2 = AuthUserFactory()
self.user1_app = ApiOAuth2ApplicationFactory(owner=self.user1)
self.user1_app_url = _get_application_detail_route(self.user1_app)
self.missing_id = {
'data': {
'type': 'applications',
'attributes': {
'name': 'A shiny new application',
'home_url': 'http://osf.io',
'callback_url': 'https://cos.io'
}
}
}
self.missing_type = {
'data': {
'id': self.user1_app.client_id,
'attributes': {
'name': 'A shiny new application',
'home_url': 'http://osf.io',
'callback_url': 'https://cos.io'
}
}
}
self.incorrect_id = {
'data': {
'id': '12345',
'type': 'applications',
'attributes': {
'name': 'A shiny new application',
'home_url': 'http://osf.io',
'callback_url': 'https://cos.io'
}
}
}
self.incorrect_type = {
'data': {
'id': self.user1_app.client_id,
'type': 'Wrong type.',
'attributes': {
'name': 'A shiny new application',
'home_url': 'http://osf.io',
'callback_url': 'https://cos.io'
}
}
}
self.correct = {
'data': {
'id': self.user1_app.client_id,
'type': 'applications',
'attributes': {
'name': 'A shiny new application',
'home_url': 'http://osf.io',
'callback_url': 'https://cos.io'
}
}
}
def test_owner_can_view(self):
res = self.app.get(self.user1_app_url, auth=self.user1.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['client_id'], self.user1_app.client_id)
def test_non_owner_cant_view(self):
res = self.app.get(self.user1_app_url, auth=self.user2.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_returns_401_when_not_logged_in(self):
res = self.app.get(self.user1_app_url, expect_errors=True)
assert_equal(res.status_code, 401)
@mock.patch('framework.auth.cas.CasClient.revoke_application_tokens')
def test_owner_can_delete(self, mock_method):
mock_method.return_value(True)
res = self.app.delete(self.user1_app_url, auth=self.user1.auth)
assert_equal(res.status_code, 204)
def test_non_owner_cant_delete(self):
res = self.app.delete(self.user1_app_url,
auth=self.user2.auth,
expect_errors=True)
assert_equal(res.status_code, 403)
@mock.patch('framework.auth.cas.CasClient.revoke_application_tokens')
def test_deleting_application_makes_api_view_inaccessible(self, mock_method):
mock_method.return_value(True)
res = self.app.delete(self.user1_app_url, auth=self.user1.auth)
res = self.app.get(self.user1_app_url, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_updating_one_field_should_not_blank_others_on_patch_update(self):
user1_app = self.user1_app
new_name = "The instance formerly known as Prince"
res = self.app.patch_json_api(self.user1_app_url,
{'data': {'attributes':
{'name': new_name},
'id': self.user1_app.client_id,
'type': 'applications'
}}, auth=self.user1.auth, expect_errors=True)
user1_app.reload()
assert_equal(res.status_code, 200)
assert_dict_contains_subset({'client_id': user1_app.client_id,
'client_secret': user1_app.client_secret,
'owner': user1_app.owner._id,
'name': new_name,
'description': user1_app.description,
'home_url': user1_app.home_url,
'callback_url': user1_app.callback_url
},
res.json['data']['attributes'])
def test_updating_an_instance_does_not_change_the_number_of_instances(self):
new_name = "The instance formerly known as Prince"
res = self.app.patch_json_api(self.user1_app_url,
{'data': {
'attributes': {"name": new_name},
'id': self.user1_app.client_id,
'type': 'applications'}}, auth=self.user1.auth)
assert_equal(res.status_code, 200)
list_url = _get_application_list_url()
res = self.app.get(list_url, auth=self.user1.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json['data']),
1)
@mock.patch('framework.auth.cas.CasClient.revoke_application_tokens')
def test_deleting_application_flags_instance_inactive(self, mock_method):
mock_method.return_value(True)
res = self.app.delete(self.user1_app_url, auth=self.user1.auth)
self.user1_app.reload()
assert_false(self.user1_app.is_active)
def test_update_application(self):
res = self.app.put_json_api(self.user1_app_url, self.correct, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 200)
def test_update_application_incorrect_type(self):
res = self.app.put_json_api(self.user1_app_url, self.incorrect_type, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 409)
def test_update_application_incorrect_id(self):
res = self.app.put_json_api(self.user1_app_url, self.incorrect_id, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 409)
def test_update_application_no_type(self):
res = self.app.put_json_api(self.user1_app_url, self.missing_type, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_update_application_no_id(self):
res = self.app.put_json_api(self.user1_app_url, self.missing_id, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_update_application_no_attributes(self):
payload = {'id': self.user1_app.client_id, 'type': 'applications', 'name': 'The instance formerly known as Prince'}
res = self.app.put_json_api(self.user1_app_url, payload, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_partial_update_application_incorrect_type(self):
res = self.app.patch_json_api(self.user1_app_url, self.incorrect_type, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 409)
def test_partial_update_application_incorrect_id(self):
res = self.app.patch_json_api(self.user1_app_url, self.incorrect_id, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 409)
def test_partial_update_application_no_type(self):
res = self.app.patch_json_api(self.user1_app_url, self.missing_type, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_partial_update_application_no_id(self):
res = self.app.patch_json_api(self.user1_app_url, self.missing_id, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_partial_update_application_no_attributes(self):
payload = {
'data':
{'id': self.user1_app.client_id,
'type': 'applications',
'name': 'The instance formerly known as Prince'
}
}
res = self.app.patch_json_api(self.user1_app_url, payload, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 400)
|
texttochange/vusion-backend
|
refs/heads/develop
|
transports/africastalking_uganda/__init__.py
|
5
|
from .mtech_kenya import MTechKenyaTransport, MTechKenyaTransportV2
__all__ = ['MTechKenyaTransport', 'MTechKenyaTransportV2']
|
yupengyan/python-oauth2
|
refs/heads/master
|
oauth2/__init__.py
|
37
|
"""
The MIT License
Copyright (c) 2007 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import urllib
import time
import random
import urlparse
import hmac
import binascii
import httplib2
try:
from urlparse import parse_qs, parse_qsl
except ImportError:
from cgi import parse_qs, parse_qsl
VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
class Error(RuntimeError):
"""Generic exception class."""
def __init__(self, message='OAuth error occured.'):
self._message = message
@property
def message(self):
"""A hack to get around the deprecation errors in 2.6."""
return self._message
def __str__(self):
return self._message
class MissingSignature(Error):
pass
def build_authenticate_header(realm=''):
"""Optional WWW-Authenticate header (401 error)"""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def escape(s):
"""Escape a URL including any /."""
return urllib.quote(s, safe='~')
def generate_timestamp():
"""Get seconds since epoch (UTC)."""
return int(time.time())
def generate_nonce(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
def generate_verifier(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
class Consumer(object):
"""A consumer of OAuth-protected services.
The OAuth consumer is a "third-party" service that wants to access
protected resources from an OAuth service provider on behalf of an end
user. It's kind of the OAuth client.
Usually a consumer must be registered with the service provider by the
developer of the consumer software. As part of that process, the service
provider gives the consumer a *key* and a *secret* with which the consumer
software can identify itself to the service. The consumer will include its
key in each request to identify itself, but will use its secret only when
signing requests, to prove that the request is from that particular
registered consumer.
Once registered, the consumer can then use its consumer credentials to ask
the service provider for a request token, kicking off the OAuth
authorization process.
"""
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def __str__(self):
data = {
'oauth_consumer_key': self.key,
'oauth_consumer_secret': self.secret
}
return urllib.urlencode(data)
class Token(object):
"""An OAuth credential used to request authorization or a protected
resource.
Tokens in OAuth comprise a *key* and a *secret*. The key is included in
requests to identify the token being used, but the secret is used only in
the signature, to prove that the requester is who the server gave the
token to.
When first negotiating the authorization, the consumer asks for a *request
token* that the live user authorizes with the service provider. The
consumer then exchanges the request token for an *access token* that can
be used to access protected resources.
"""
key = None
secret = None
callback = None
callback_confirmed = None
verifier = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def set_callback(self, callback):
self.callback = callback
self.callback_confirmed = 'true'
def set_verifier(self, verifier=None):
if verifier is not None:
self.verifier = verifier
else:
self.verifier = generate_verifier()
def get_callback_url(self):
if self.callback and self.verifier:
# Append the oauth_verifier.
parts = urlparse.urlparse(self.callback)
scheme, netloc, path, params, query, fragment = parts[:6]
if query:
query = '%s&oauth_verifier=%s' % (query, self.verifier)
else:
query = 'oauth_verifier=%s' % self.verifier
return urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
return self.callback
def to_string(self):
"""Returns this token as a plain string, suitable for storage.
The resulting string includes the token's secret, so you should never
send or store this string where a third party can read it.
"""
data = {
'oauth_token': self.key,
'oauth_token_secret': self.secret,
}
if self.callback_confirmed is not None:
data['oauth_callback_confirmed'] = self.callback_confirmed
return urllib.urlencode(data)
@staticmethod
def from_string(s):
"""Deserializes a token from a string like one returned by
`to_string()`."""
if not len(s):
raise ValueError("Invalid parameter string.")
params = parse_qs(s, keep_blank_values=False)
if not len(params):
raise ValueError("Invalid parameter string.")
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in "
"OAuth request.")
token = Token(key, secret)
try:
token.callback_confirmed = params['oauth_callback_confirmed'][0]
except KeyError:
pass # 1.0, no callback confirmed.
return token
def __str__(self):
return self.to_string()
def setter(attr):
name = attr.__name__
def getter(self):
try:
return self.__dict__[name]
except KeyError:
raise AttributeError(name)
def deleter(self):
del self.__dict__[name]
return property(getter, attr, deleter)
class Request(dict):
"""The parameters and information for an HTTP request, suitable for
authorizing with OAuth credentials.
When a consumer wants to access a service's protected resources, it does
so using a signed HTTP request identifying itself (the consumer) with its
key, and providing an access token authorized by the end user to access
those resources.
"""
http_method = HTTP_METHOD
http_url = None
version = VERSION
def __init__(self, method=HTTP_METHOD, url=None, parameters=None):
if method is not None:
self.method = method
if url is not None:
self.url = url
if parameters is not None:
self.update(parameters)
@setter
def url(self, value):
parts = urlparse.urlparse(value)
scheme, netloc, path = parts[:3]
# Exclude default port numbers.
if scheme == 'http' and netloc[-3:] == ':80':
netloc = netloc[:-3]
elif scheme == 'https' and netloc[-4:] == ':443':
netloc = netloc[:-4]
if scheme != 'http' and scheme != 'https':
raise ValueError("Unsupported URL %s (%s)." % (value, scheme))
value = '%s://%s%s' % (scheme, netloc, path)
self.__dict__['url'] = value
@setter
def method(self, value):
self.__dict__['method'] = value.upper()
def _get_timestamp_nonce(self):
return self['oauth_timestamp'], self['oauth_nonce']
def get_nonoauth_parameters(self):
"""Get any non-OAuth parameters."""
return dict([(k, v) for k, v in self.iteritems()
if not k.startswith('oauth_')])
def to_header(self, realm=''):
"""Serialize as a header for an HTTPAuth request."""
oauth_params = ((k, v) for k, v in self.items()
if k.startswith('oauth_'))
stringy_params = ((k, escape(str(v))) for k, v in oauth_params)
header_params = ('%s="%s"' % (k, v) for k, v in stringy_params)
params_header = ', '.join(header_params)
auth_header = 'OAuth realm="%s"' % realm
if params_header:
auth_header = "%s, %s" % (auth_header, params_header)
return {'Authorization': auth_header}
def to_postdata(self):
"""Serialize as post data for a POST request."""
return self.encode_postdata(self)
def encode_postdata(self, data):
# tell urlencode to deal with sequence values and map them correctly
# to resulting querystring. for example self["k"] = ["v1", "v2"] will
# result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D
return urllib.urlencode(data, True)
def to_url(self):
"""Serialize as a URL for a GET request."""
return '%s?%s' % (self.url, self.to_postdata())
def get_parameter(self, parameter):
ret = self.get(parameter)
if ret is None:
raise Error('Parameter not found: %s' % parameter)
return ret
def get_normalized_parameters(self):
"""Return a string that contains the parameters that must be signed."""
items = [(k, v) for k, v in self.items() if k != 'oauth_signature']
encoded_str = urllib.urlencode(sorted(items), True)
# Encode signature parameters per Oauth Core 1.0 protocol
# spec draft 7, section 3.6
# (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6)
# Spaces must be encoded with "%20" instead of "+"
return encoded_str.replace('+', '%20')
def sign_request(self, signature_method, consumer, token):
"""Set the signature parameter to the result of sign."""
if 'oauth_consumer_key' not in self:
self['oauth_consumer_key'] = consumer.key
if token and 'oauth_token' not in self:
self['oauth_token'] = token.key
self['oauth_signature_method'] = signature_method.name
self['oauth_signature'] = signature_method.sign(self, consumer, token)
@classmethod
def make_timestamp(cls):
"""Get seconds since epoch (UTC)."""
return str(int(time.time()))
@classmethod
def make_nonce(cls):
"""Generate pseudorandom number."""
return str(random.randint(0, 100000000))
@classmethod
def from_request(cls, http_method, http_url, headers=None, parameters=None,
query_string=None):
"""Combines multiple parameter sources."""
if parameters is None:
parameters = {}
# Headers
if headers and 'Authorization' in headers:
auth_header = headers['Authorization']
# Check that the authorization header is OAuth.
if auth_header[:6] == 'OAuth ':
auth_header = auth_header[6:]
try:
# Get the parameters from the header.
header_params = cls._split_header(auth_header)
parameters.update(header_params)
except:
raise Error('Unable to parse OAuth parameters from '
'Authorization header.')
# GET or POST query string.
if query_string:
query_params = cls._split_url_string(query_string)
parameters.update(query_params)
# URL parameters.
param_str = urlparse.urlparse(http_url)[4] # query
url_params = cls._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return cls(http_method, http_url, parameters)
return None
@classmethod
def from_consumer_and_token(cls, consumer, token=None,
http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': consumer.key,
'oauth_timestamp': cls.make_timestamp(),
'oauth_nonce': cls.make_nonce(),
'oauth_version': cls.version,
}
defaults.update(parameters)
parameters = defaults
if token:
parameters['oauth_token'] = token.key
return Request(http_method, http_url, parameters)
@classmethod
def from_token_and_callback(cls, token, callback=None,
http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = callback
return cls(http_method, http_url, parameters)
@staticmethod
def _split_header(header):
"""Turn Authorization: header into parameters."""
params = {}
parts = header.split(',')
for param in parts:
# Ignore realm parameter.
if param.find('realm') > -1:
continue
# Remove whitespace.
param = param.strip()
# Split key-value.
param_parts = param.split('=', 1)
# Remove quotes and unescape the value.
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
@staticmethod
def _split_url_string(param_str):
"""Turn URL string into parameters."""
parameters = parse_qs(param_str, keep_blank_values=False)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
class Server(object):
"""A skeletal implementation of a service provider, providing protected
resources to requests from authorized consumers.
This class implements the logic to check requests for authorization. You
can use it with your web server or web framework to protect certain
resources with OAuth.
"""
timestamp_threshold = 300 # In seconds, five minutes.
version = VERSION
signature_methods = None
def __init__(self, signature_methods=None):
self.signature_methods = signature_methods or {}
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.name] = signature_method
return self.signature_methods
def verify_request(self, request, consumer, token):
"""Verifies an api call and checks all the parameters."""
version = self._get_version(request)
self._check_signature(request, consumer, token)
parameters = request.get_nonoauth_parameters()
return parameters
def build_authenticate_header(self, realm=''):
"""Optional support for the authenticate header."""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def _get_version(self, request):
"""Verify the correct version request for this server."""
try:
version = request.get_parameter('oauth_version')
except:
version = VERSION
if version and version != self.version:
raise Error('OAuth version %s not supported.' % str(version))
return version
def _get_signature_method(self, request):
"""Figure out the signature with some defaults."""
try:
signature_method = request.get_parameter('oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# Get the signature method object.
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_verifier(self, request):
return request.get_parameter('oauth_verifier')
def _check_signature(self, request, consumer, token):
timestamp, nonce = request._get_timestamp_nonce()
self._check_timestamp(timestamp)
signature_method = self._get_signature_method(request)
try:
signature = request.get_parameter('oauth_signature')
except:
raise MissingSignature('Missing oauth_signature.')
# Validate the signature.
valid = signature_method.check(request, consumer, token, signature)
if not valid:
key, base = signature_method.signing_base(request, consumer, token)
raise Error('Invalid signature. Expected signature base '
'string: %s' % base)
built = signature_method.sign(request, consumer, token)
def _check_timestamp(self, timestamp):
"""Verify that timestamp is recentish."""
timestamp = int(timestamp)
now = int(time.time())
lapsed = now - timestamp
if lapsed > self.timestamp_threshold:
raise Error('Expired timestamp: given %d and now %s has a '
'greater difference than threshold %d' % (timestamp, now, self.timestamp_threshold))
class Client(httplib2.Http):
"""OAuthClient is a worker to attempt to execute a request."""
def __init__(self, consumer, token=None, cache=None, timeout=None,
proxy_info=None):
if consumer is not None and not isinstance(consumer, Consumer):
raise ValueError("Invalid consumer.")
if token is not None and not isinstance(token, Token):
raise ValueError("Invalid token.")
self.consumer = consumer
self.token = token
self.method = SignatureMethod_HMAC_SHA1()
httplib2.Http.__init__(self, cache=cache, timeout=timeout,
proxy_info=proxy_info)
def set_signature_method(self, method):
if not isinstance(method, SignatureMethod):
raise ValueError("Invalid signature method.")
self.method = method
def request(self, uri, method="GET", body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None,
force_auth_header=False):
if not isinstance(headers, dict):
headers = {}
if body and method == "POST":
parameters = dict(parse_qsl(body))
elif method == "GET":
parsed = urlparse.urlparse(uri)
parameters = parse_qs(parsed.query)
else:
parameters = None
req = Request.from_consumer_and_token(self.consumer, token=self.token,
http_method=method, http_url=uri, parameters=parameters)
req.sign_request(self.method, self.consumer, self.token)
if force_auth_header:
# ensure we always send Authorization
headers.update(req.to_header())
if method == "POST":
if not force_auth_header:
body = req.to_postdata()
else:
body = req.encode_postdata(req.get_nonoauth_parameters())
headers['Content-Type'] = 'application/x-www-form-urlencoded'
elif method == "GET":
if not force_auth_header:
uri = req.to_url()
else:
if not force_auth_header:
# don't call update twice.
headers.update(req.to_header())
return httplib2.Http.request(self, uri, method=method, body=body,
headers=headers, redirections=redirections,
connection_type=connection_type)
class SignatureMethod(object):
"""A way of signing requests.
The OAuth protocol lets consumers and service providers pick a way to sign
requests. This interface shows the methods expected by the other `oauth`
modules for signing requests. Subclass it and implement its methods to
provide a new way to sign requests.
"""
def signing_base(self, request, consumer, token):
"""Calculates the string that needs to be signed.
This method returns a 2-tuple containing the starting key for the
signing and the message to be signed. The latter may be used in error
messages to help clients debug their software.
"""
raise NotImplementedError
def sign(self, request, consumer, token):
"""Returns the signature for the given request, based on the consumer
and token also provided.
You should use your implementation of `signing_base()` to build the
message to sign. Otherwise it may be less useful for debugging.
"""
raise NotImplementedError
def check(self, request, consumer, token, signature):
"""Returns whether the given signature is the correct signature for
the given consumer and token signing the given request."""
built = self.sign(request, consumer, token)
return built == signature
class SignatureMethod_HMAC_SHA1(SignatureMethod):
name = 'HMAC-SHA1'
def signing_base(self, request, consumer, token):
sig = (
escape(request.method),
escape(request.url),
escape(request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def sign(self, request, consumer, token):
"""Builds the base signature string."""
key, raw = self.signing_base(request, consumer, token)
# HMAC object.
try:
import hashlib # 2.5
hashed = hmac.new(key, raw, hashlib.sha1)
except ImportError:
import sha # Deprecated
hashed = hmac.new(key, raw, sha)
# Calculate the digest base 64.
return binascii.b2a_base64(hashed.digest())[:-1]
class SignatureMethod_PLAINTEXT(SignatureMethod):
name = 'PLAINTEXT'
def signing_base(self, request, consumer, token):
"""Concatenates the consumer key and secret with the token's
secret."""
sig = '%s&' % escape(consumer.secret)
if token:
sig = sig + escape(token.secret)
return sig, sig
def sign(self, request, consumer, token):
key, raw = self.signing_base(request, consumer, token)
return raw
|
xiaoshaozi52/ansible
|
refs/heads/devel
|
lib/ansible/parsing/utils/__init__.py
|
7690
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
|
hechaoyuyu/swinst
|
refs/heads/master
|
src/wubi/frontends/win32/cd_finish_page.py
|
2
|
# Copyright (c) 2008 Agostino Russo
#
# Written by Agostino Russo <agostino.russo@gmail.com>
#
# This file is part of Wubi the Win32 Ubuntu Installer.
#
# Wubi is free software; you can redistribute it and/or modify
# it under 5the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# Wubi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from winui import ui
from page import Page
import logging
log = logging.getLogger("WinuiCDFinishPage")
class CDFinishPage(Page):
def on_init(self):
Page.on_init(self)
self.set_background_color(255,255,255)
self.insert_vertical_image("%s-vertical.bmp" % self.info.cd_distro.name)
#navigation
self.insert_navigation(_("< Back"), _("Finish"), _("Cancel"), default=2)
self.navigation.button1.on_click = self.on_back
self.navigation.button2.on_click = self.on_finish
self.navigation.button3.on_click = self.on_cancel
#main container
self.insert_main()
self.main.set_background_color(255,255,255)
self.main.title = ui.Label(self.main, 40, 20, self.main.width - 80, 60, _("Reboot required"))
self.main.title.set_font(size=20, bold=True, family="Arial")
txt = _("To start the Live CD you need to reboot your machine leaving the CD in the tray. If your machine cannot boot from the CD, the last option should work in most cases.")
self.main.label = ui.Label(self.main, 40, 90, self.main.width - 80, 40, txt)
#self.main.reboot_now = ui.RadioButton(self.main, 60, 150, self.main.width - 100, 20, _("Reboot now"))
self.main.reboot_later = ui.RadioButton(self.main, 60, 180, self.main.width - 100, 20, _("I want to manually reboot Later"))
self.main.cd_boot = ui.RadioButton(self.main, 60, 210, self.main.width - 100, 20, _("Help me to boot from CD"))
self.main.reboot_later.set_check(True)
def on_finish(self):
if self.main.reboot_later.is_checked():
self.application.quit()
#elif self.main.reboot_now.is_checked():
# self.info.run_task = "reboot"
elif self.main.cd_boot.is_checked():
self.info.run_task = "cd_boot"
self.frontend.stop()
def on_back(self):
self.frontend.show_page(self.frontend.cd_menu_page)
def on_cancel(self):
self.frontend.cancel()
|
rajalokan/nova
|
refs/heads/master
|
nova/db/sqlalchemy/api_migrations/migrate_repo/versions/006_build_request.py
|
25
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.changeset.constraint import ForeignKeyConstraint
from migrate import UniqueConstraint
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import dialects
from sqlalchemy import Enum
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import Text
def InetSmall():
return String(length=39).with_variant(dialects.postgresql.INET(),
'postgresql')
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
request_specs = Table('request_specs', meta, autoload=True)
build_requests = Table('build_requests', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('request_spec_id', Integer, nullable=False),
Column('project_id', String(length=255), nullable=False),
Column('user_id', String(length=255), nullable=False),
Column('display_name', String(length=255)),
Column('instance_metadata', Text),
Column('progress', Integer),
Column('vm_state', String(length=255)),
Column('task_state', String(length=255)),
Column('image_ref', String(length=255)),
Column('access_ip_v4', InetSmall()),
Column('access_ip_v6', InetSmall()),
Column('info_cache', Text),
Column('security_groups', Text, nullable=False),
Column('config_drive', Boolean, default=False, nullable=False),
Column('key_name', String(length=255)),
Column('locked_by', Enum('owner', 'admin',
name='build_requests0locked_by')),
UniqueConstraint('request_spec_id',
name='uniq_build_requests0request_spec_id'),
Index('build_requests_project_id_idx', 'project_id'),
ForeignKeyConstraint(columns=['request_spec_id'],
refcolumns=[request_specs.c.id]),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
build_requests.create(checkfirst=True)
|
turbokongen/home-assistant
|
refs/heads/dev
|
homeassistant/components/speedtestdotnet/__init__.py
|
12
|
"""Support for testing internet speed via Speedtest.net."""
from datetime import timedelta
import logging
import speedtest
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
CONF_MONITORED_CONDITIONS,
CONF_SCAN_INTERVAL,
EVENT_HOMEASSISTANT_STARTED,
)
from homeassistant.core import CoreState, callback
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
CONF_MANUAL,
CONF_SERVER_ID,
DEFAULT_SCAN_INTERVAL,
DEFAULT_SERVER,
DOMAIN,
SENSOR_TYPES,
SPEED_TEST_SERVICE,
)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_SERVER_ID): cv.positive_int,
vol.Optional(
CONF_SCAN_INTERVAL, default=timedelta(minutes=DEFAULT_SCAN_INTERVAL)
): cv.positive_time_period,
vol.Optional(CONF_MANUAL, default=False): cv.boolean,
vol.Optional(
CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)
): vol.All(cv.ensure_list, [vol.In(list(SENSOR_TYPES))]),
}
)
},
extra=vol.ALLOW_EXTRA,
)
def server_id_valid(server_id):
"""Check if server_id is valid."""
try:
api = speedtest.Speedtest()
api.get_servers([int(server_id)])
except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers):
return False
return True
async def async_setup(hass, config):
"""Import integration from config."""
if DOMAIN in config:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up the Speedtest.net component."""
coordinator = SpeedTestDataCoordinator(hass, config_entry)
await coordinator.async_setup()
async def _enable_scheduled_speedtests(*_):
"""Activate the data update coordinator."""
coordinator.update_interval = timedelta(
minutes=config_entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
)
await coordinator.async_refresh()
if not config_entry.options[CONF_MANUAL]:
if hass.state == CoreState.running:
await _enable_scheduled_speedtests()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
else:
# Running a speed test during startup can prevent
# integrations from being able to setup because it
# can saturate the network interface.
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STARTED, _enable_scheduled_speedtests
)
hass.data[DOMAIN] = coordinator
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "sensor")
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload SpeedTest Entry from config_entry."""
hass.services.async_remove(DOMAIN, SPEED_TEST_SERVICE)
hass.data[DOMAIN].async_unload()
await hass.config_entries.async_forward_entry_unload(config_entry, "sensor")
hass.data.pop(DOMAIN)
return True
class SpeedTestDataCoordinator(DataUpdateCoordinator):
"""Get the latest data from speedtest.net."""
def __init__(self, hass, config_entry):
"""Initialize the data object."""
self.hass = hass
self.config_entry = config_entry
self.api = None
self.servers = {}
self._unsub_update_listener = None
super().__init__(
self.hass,
_LOGGER,
name=DOMAIN,
update_method=self.async_update,
)
def update_servers(self):
"""Update list of test servers."""
try:
server_list = self.api.get_servers()
except speedtest.ConfigRetrievalError:
_LOGGER.debug("Error retrieving server list")
return
self.servers[DEFAULT_SERVER] = {}
for server in sorted(
server_list.values(),
key=lambda server: server[0]["country"] + server[0]["sponsor"],
):
self.servers[
f"{server[0]['country']} - {server[0]['sponsor']} - {server[0]['name']}"
] = server[0]
def update_data(self):
"""Get the latest data from speedtest.net."""
self.update_servers()
self.api.closest.clear()
if self.config_entry.options.get(CONF_SERVER_ID):
server_id = self.config_entry.options.get(CONF_SERVER_ID)
self.api.get_servers(servers=[server_id])
self.api.get_best_server()
_LOGGER.debug(
"Executing speedtest.net speed test with server_id: %s", self.api.best["id"]
)
self.api.download()
self.api.upload()
return self.api.results.dict()
async def async_update(self, *_):
"""Update Speedtest data."""
try:
return await self.hass.async_add_executor_job(self.update_data)
except (speedtest.ConfigRetrievalError, speedtest.NoMatchedServers) as err:
raise UpdateFailed from err
async def async_set_options(self):
"""Set options for entry."""
if not self.config_entry.options:
data = {**self.config_entry.data}
options = {
CONF_SCAN_INTERVAL: data.pop(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL),
CONF_MANUAL: data.pop(CONF_MANUAL, False),
CONF_SERVER_ID: str(data.pop(CONF_SERVER_ID, "")),
}
self.hass.config_entries.async_update_entry(
self.config_entry, data=data, options=options
)
async def async_setup(self):
"""Set up SpeedTest."""
try:
self.api = await self.hass.async_add_executor_job(speedtest.Speedtest)
except speedtest.ConfigRetrievalError as err:
raise ConfigEntryNotReady from err
async def request_update(call):
"""Request update."""
await self.async_request_refresh()
await self.async_set_options()
await self.hass.async_add_executor_job(self.update_servers)
self.hass.services.async_register(DOMAIN, SPEED_TEST_SERVICE, request_update)
self._unsub_update_listener = self.config_entry.add_update_listener(
options_updated_listener
)
@callback
def async_unload(self):
"""Unload the coordinator."""
if not self._unsub_update_listener:
return
self._unsub_update_listener()
self._unsub_update_listener = None
async def options_updated_listener(hass, entry):
"""Handle options update."""
if entry.options[CONF_MANUAL]:
hass.data[DOMAIN].update_interval = None
return
hass.data[DOMAIN].update_interval = timedelta(
minutes=entry.options[CONF_SCAN_INTERVAL]
)
await hass.data[DOMAIN].async_request_refresh()
|
RefugeeMatchmaking/HackZurich
|
refs/heads/master
|
refugee_matchmaking/users/tests.py
|
1
|
from django.test import TestCase
from django.test import Client
from .models import *
# Create your tests here.
client=Client()
#Test to check if the input form is working correctly
class Matchmaking_Test(TestCase):
def test_user_submission(self):
resp=self.client.post('/',{'refugee_or_local': 'L', 'first_name': 'PJtest', 'last_name': 'JayathissaTest', 'location':'Zurich', 'occupation':'Architect', 'about':'guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'M', 'gender_preference':'A', 'language_1':'en', 'language_2':'es', 'language_3':'pl','birthdate':'1988-09-27', 'social_media':'http://www.example.com' })
print(resp.context)
self.assertEqual(resp.context, None)
def test_scoring(self):
resp=self.client.post('/',{'refugee_or_local': 'L', 'first_name': 'PJtest', 'last_name': 'JayathissaTest', 'location':'Zurich', 'occupation':'Architect', 'about':'guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'M', 'gender_preference':'A', 'language_1':'es', 'language_2':'en', 'language_3':'ger','birthdate':'1988-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'L', 'first_name': 'Mike', 'last_name': 'Ong', 'location':'Berlin', 'occupation':'Architect', 'about':'guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'M', 'gender_preference':'S', 'language_1':'es', 'language_2':'en', 'language_3':'pl', 'birthdate':'1989-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'R', 'first_name': 'Karim', 'last_name': 'karim', 'location':'Zurich', 'occupation':'Architect', 'about':'guitar, baseball', 'email':'p.jayatthissa@gmail.com', 'gender':'M', 'gender_preference':'A', 'language_1':'en', 'language_2':'en', 'language_3':'en', 'birthdate':'1984-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'R', 'first_name': 'Ali', 'last_name': 'aladin', 'location':'Rome', 'occupation':'Architect', 'about':'violin, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'F', 'gender_preference':'S', 'language_1':'en', 'language_2':'en', 'language_3':'en', 'birthdate':'1985-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'L', 'first_name': 'Ognyan', 'last_name': 'theognyan', 'location':'Zurich', 'occupation':'Architect', 'about':'running, guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'M', 'gender_preference':'A', 'language_1':'en', 'language_2':'en', 'language_3':'en', 'birthdate':'1982-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'L', 'first_name': 'Karl', 'last_name': 'Wruck', 'location':'Bern', 'occupation':'Architect', 'about':'guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'F', 'gender_preference':'A', 'language_1':'en', 'language_2':'en', 'language_3':'en', 'birthdate':'1985-09-27', 'social_media':'http://www.example.com' })
resp=self.client.post('/',{'refugee_or_local': 'R', 'first_name': 'Sara', 'last_name': 'Akhbar', 'location':'Rapperswil', 'occupation':'Architect', 'about':'guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'M', 'gender_preference':'A', 'language_1':'es', 'language_2':'en', 'language_3':'es', 'birthdate':'1982-09-27', 'social_media':'http://www.example.com' })
def test_language(self):
resp=self.client.post('/',{'refugee_or_local': 'L', 'first_name': 'PJtest', 'last_name': 'JayathissaTest', 'location':'Zurich', 'occupation':'Architect', 'about':'guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'M', 'gender_preference':'A', 'language_1':'es', 'language_2':'en', 'language_3':'','birthdate':'1988-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'L', 'first_name': 'Mike', 'last_name': 'Ong', 'location':'Berlin', 'occupation':'Architect', 'about':'guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'M', 'gender_preference':'S', 'language_1':'es', 'language_2':'en', 'language_3':'', 'birthdate':'1989-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'R', 'first_name': 'Karim', 'last_name': 'karim', 'location':'Zurich', 'occupation':'Architect', 'about':'guitar, baseball', 'email':'p.jayatthissa@gmail.com', 'gender':'M', 'gender_preference':'A', 'language_1':'en', 'language_2':'en', 'language_3':'en', 'birthdate':'1984-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'R', 'first_name': 'Ali', 'last_name': 'aladin', 'location':'Rome', 'occupation':'Architect', 'about':'violin, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'F', 'gender_preference':'S', 'language_1':'de', 'language_2':'de', 'language_3':'de', 'birthdate':'1985-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'L', 'first_name': 'Ognyan', 'last_name': 'theognyan', 'location':'Zurich', 'occupation':'Architect', 'about':'running, guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'M', 'gender_preference':'A', 'language_1':'en', 'language_2':'en', 'language_3':'en', 'birthdate':'1982-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'L', 'first_name': 'Karl', 'last_name': 'Wruck', 'location':'Bern', 'occupation':'Architect', 'about':'guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'F', 'gender_preference':'A', 'language_1':'en', 'language_2':'en', 'language_3':'en', 'birthdate':'1985-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'R', 'first_name': 'Ali', 'last_name': 'aladin', 'location':'Rome', 'occupation':'Architect', 'about':'violin, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'F', 'gender_preference':'S', 'language_1':'it', 'language_2':'it', 'language_3':'it', 'birthdate':'1985-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'L', 'first_name': 'Ognyan', 'last_name': 'theognyan', 'location':'Zurich', 'occupation':'Architect', 'about':'running, guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'M', 'gender_preference':'A', 'language_1':'it', 'language_2':'it', 'language_3':'de', 'birthdate':'1982-09-27', 'social_media':'http://www.example.com' })
self.client.post('/',{'refugee_or_local': 'L', 'first_name': 'Karl', 'last_name': 'Wruck', 'location':'Bern', 'occupation':'Architect', 'about':'guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'F', 'gender_preference':'A', 'language_1':'en', 'language_2':'en', 'language_3':'en', 'birthdate':'1985-09-27', 'social_media':'http://www.example.com' })
resp=self.client.post('/',{'refugee_or_local': 'R', 'first_name': 'Sara', 'last_name': 'Akhbar', 'location':'Rapperswil', 'occupation':'Architect', 'about':'guitar, tennis', 'email':'p.jayatthissa@gmail.com', 'gender':'M', 'gender_preference':'A', 'language_1':'es', 'language_2':'en', 'language_3':'es', 'birthdate':'1982-09-27', 'social_media':'http://www.example.com' })
|
kohnle-lernmodule/palama
|
refs/heads/master
|
exe/__init__.py
|
14
|
# ===========================================================================
# __init__.py
# Copyright 2004-2006, University of Auckland
#
# This is a placeholder
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
This is the main module for the eXe project
"""
|
userzimmermann/robotframework
|
refs/heads/python3
|
utest/model/test_itemlist.py
|
1
|
from six import text_type as unicode
import unittest
from robot.utils.asserts import (assert_equal, assert_true, assert_raises,
assert_raises_with_msg)
from robot.model.itemlist import ItemList
class Object(object):
attr = 1
def __init__(self, id=None):
self.id = id
class OldStyle:
pass
class CustomItems(ItemList):
pass
class TestItemLists(unittest.TestCase):
def test_create_items(self):
items = ItemList(str)
item = items.create(object=1)
assert_true(isinstance(item, str))
assert_equal(item, '1')
assert_equal(list(items), [item])
def test_create_with_args_and_kwargs(self):
class Item(object):
def __init__(self, arg1, arg2):
self.arg1 = arg1
self.arg2 = arg2
items = ItemList(Item)
item = items.create('value 1', arg2='value 2')
assert_equal(item.arg1, 'value 1')
assert_equal(item.arg2, 'value 2')
assert_equal(list(items), [item])
def test_append_and_extend(self):
items = ItemList(int)
items.append(1)
items.append(2)
items.extend((3, 4))
assert_equal(list(items), [1, 2, 3, 4])
def test_only_matching_types_can_be_added(self):
assert_raises_with_msg(TypeError,
'Only int objects accepted, got str.',
ItemList(int).append, 'not integer')
assert_raises_with_msg(TypeError,
'Only OldStyle objects accepted, got Object.',
ItemList(OldStyle).append, Object())
assert_raises_with_msg(TypeError,
'Only Object objects accepted, got OldStyle.',
ItemList(Object).append, OldStyle())
def test_common_attrs(self):
item1 = Object()
item2 = Object()
parent = object()
items = ItemList(Object, {'attr': 2, 'parent': parent}, [item1])
items.append(item2)
assert_true(item1.parent is parent)
assert_equal(item1.attr, 2)
assert_true(item2.parent is parent)
assert_equal(item2.attr, 2)
assert_equal(list(items), [item1, item2])
def test_getitem(self):
item1 = object()
item2 = object()
items = ItemList(object, items=[item1, item2])
assert_true(items[0] is item1)
assert_true(items[1] is item2)
assert_true(items[-1] is item2)
def test_getitem_slice(self):
items = ItemList(int, items=range(10))
sub = items[:5]
assert_true(isinstance(sub, ItemList))
assert_equal(list(sub), list(range(0, 5)))
assert_equal(list(items), list(range(10)))
sub.append(5)
assert_equal(list(sub), list(range(0, 6)))
assert_equal(list(items), list(range(10)))
backwards = items[::-1]
assert_true(isinstance(backwards, ItemList))
assert_equal(list(backwards), list(reversed(items)))
empty = items[100:]
assert_true(isinstance(empty, ItemList))
assert_equal(list(empty), [])
def test_index(self):
items = ItemList(str, items=('first', 'second'))
assert_equal(items.index('first'), 0)
assert_equal(items.index('second'), 1)
assert_raises(ValueError, items.index, 'nonex')
def test_index_with_start_and_stop(self):
numbers = [0, 1, 2, 3, 2, 1, 0]
items = ItemList(int, items=numbers)
for num in sorted(set(numbers)):
for start in range(len(numbers)):
if num in numbers[start:]:
assert_equal(items.index(num, start),
numbers.index(num, start))
for end in range(start, len(numbers)):
if num in numbers[start:end]:
assert_equal(items.index(num, start, end),
numbers.index(num, start, end))
def test_setitem(self):
orig1, orig2 = Object(), Object()
new1, new2 = Object(), Object()
items = ItemList(Object, {'attr': 2}, [orig1, orig2])
items[0] = new1
assert_equal(list(items), [new1, orig2])
assert_equal(new1.attr, 2)
items[-1] = new2
assert_equal(list(items), [new1, new2])
assert_equal(new2.attr, 2)
def test_setitem_slice(self):
items = ItemList(int, items=range(10))
items[:5] = []
items[-2:] = [42]
assert_equal(list(items), [5, 6, 7, 42])
items = CustomItems(Object, {'a': 1}, [Object(i) for i in range(10)])
items[1::3] = tuple(Object(c) for c in 'abc')
assert_true(all(obj.a == 1 for obj in items))
assert_equal([obj.id for obj in items],
[0, 'a', 2, 3, 'b', 5, 6, 'c', 8, 9])
def test_setitem_slice_invalid_type(self):
assert_raises_with_msg(TypeError,
'Only int objects accepted, got float.',
ItemList(int).__setitem__, slice(0), [1, 1.1])
def test_len(self):
items = ItemList(object)
assert_equal(len(items), 0)
items.create()
assert_equal(len(items), 1)
def test_truth(self):
assert_true(not ItemList(int))
assert_true(ItemList(int, items=[1]))
def test_clear(self):
items = ItemList(int, items=list(range(10)))
items.clear()
assert_equal(len(items), 0)
def test_str(self):
assert_equal(str(ItemList(str, items=['foo', 'bar', 'quux'])),
'[foo, bar, quux]')
def test_unicode(self):
assert_equal(unicode(ItemList(int, items=[1, 2, 3, 4])),
'[1, 2, 3, 4]')
assert_equal(unicode(ItemList(unicode, items=[u'hyv\xe4\xe4', u'y\xf6'])),
u'[hyv\xe4\xe4, y\xf6]')
if __name__ == '__main__':
unittest.main()
|
Venturi/cms
|
refs/heads/master
|
env/lib/python2.7/site-packages/phonenumbers/data/alt_format_62.py
|
11
|
"""Auto-generated file, do not edit by hand. 62 metadata"""
from ..phonemetadata import NumberFormat
PHONE_ALT_FORMAT_62 = [NumberFormat(pattern='(\\d{2})(\\d{3,4})(\\d{4})', format='\\1 \\2', leading_digits_pattern=['2[124]|[36]1'])]
|
thomnico/DNS-Charm
|
refs/heads/master
|
contrib/tests/test_zone.py
|
1
|
import unittest
from mock import patch, Mock
import sys
from bind.zone import Zone
class TestZone(unittest.TestCase):
def test_dictionary(self):
z = Zone()
self.assertTrue(hasattr(z, 'contents'))
def test_a_getset(self):
z = Zone()
record = {'ttl': 300, 'addr': '10.0.0.1', 'alias': '@'}
self.assertEqual(z.a(), [])
self.assertEqual(z.a(record),
[{'ttl': 300, 'addr': '10.0.0.1', 'alias': '@'}])
self.assertEqual(z.a(record),
[{'ttl': 300, 'addr': '10.0.0.1', 'alias': '@'}])
def test_aaa_getset(self):
z = Zone()
record = {'ttl': 300, 'addr': '10.0.0.1', 'alias': '@'}
self.assertEqual(z.aaaa(), [])
self.assertEqual(z.aaaa(record),
[{'ttl': 300, 'addr': '10.0.0.1', 'alias': '@'}])
def test_caa_getset(self):
z = Zone()
record = {'ttl': 300, 'priority': 0, 'issue': 'thawte.com'}
self.assertEqual(z.caa(), [])
self.assertEqual(z.caa(record),
[{'ttl': 300, 'priority': 0, 'issue': 'thawte.com'}])
def test_cert_getset(self):
z = Zone()
record = {'ttl': 300, 'type': 1, 'key-tag': '12179',
'algorithm': 3, 'cert-crl': 'AQPSKmynfz'}
self.assertEqual(z.cert(), [])
self.assertEqual(z.cert(record),
[{'ttl': 300, 'type': 1, 'key-tag': '12179',
'algorithm': 3, 'cert-crl': 'AQPSKmynfz'}])
def test_cname_getset(self):
z = Zone()
record = {'ttl': 300, 'addr': 'abc.foo.com', 'alias': 'd'}
self.assertEqual(z.cname(), [])
self.assertEqual(z.cname(record),
[{'ttl': 300, 'addr': 'abc.foo.com', 'alias': 'd'}])
# assert that value pops and gets updated
record = {'ttl': 300, 'addr': 'def.foo.com', 'alias': 'd'}
self.assertEqual(z.cname(record), [{'ttl': 300, 'addr': 'def.foo.com',
'alias': 'd'}])
def test_ns_getset(self):
z = Zone()
record = {'alias': 'example.com.', 'addr': '10.0.0.1'}
self.assertEqual(z.ns(), [])
self.assertEqual(z.ns(record),
[{'addr': '10.0.0.1', 'alias': 'example.com.'}])
record = {'alias': 'example.com.', 'addr': '10.0.0.2'}
self.assertEqual(z.ns(record),
[{'addr': '10.0.0.2', 'alias': 'example.com.'}])
def test_ptr_getset(self):
z = Zone()
record = {'ttl': 2, 'addr': 'joe.example.com'}
self.assertEqual(z.ptr(), [])
self.assertEqual(z.ptr(record),
[{'ttl': 2, 'addr': 'joe.example.com'}])
def test_soa_getset(self):
z = Zone()
record = {'ttl': '@', 'addr': 'ns1.example.com.',
'owner': 'hostmaster.example.com', 'serial': '2003080800',
'refresh': '12h', 'update-retry': '15m', 'expiry': '3w',
'minimum': '3h'}
self.assertEqual(z.soa(), [])
self.assertEqual(z.soa(record),
[{'ttl': '@', 'addr': 'ns1.example.com.',
'owner': 'hostmaster.example.com',
'serial': '2003080800',
'refresh': '12h',
'update-retry': '15m',
'expiry': '3w',
'minimum': '3h'}])
record = {'ttl': '@', 'addr': 'ns1.example.com.',
'owner': 'hostmaster.example.com', 'serial': '2003080800',
'refresh': '12h', 'update-retry': '15m', 'expiry': '3w',
'minimum': '4h'}
self.assertEqual(z.soa(record),
[{'ttl': '@', 'addr': 'ns1.example.com.',
'owner': 'hostmaster.example.com',
'serial': '2003080800',
'refresh': '12h',
'update-retry': '15m',
'expiry': '3w',
'minimum': '4h'}])
def test_spf_getset(self):
z = Zone()
record = {'addr': 'example.com.',
'txt': '"v=spf1 mx include:example.net -all"'}
self.assertEqual(z.spf(), [])
self.assertEqual(z.spf(record),
[{'addr': 'example.com.',
'txt': '"v=spf1 mx include:example.net -all"'}])
def test_srv_getset(self):
z = Zone()
record = {'addr': '_ldap._tcp.example.com.',
'priority': 0,
'weight': 0, 'port': 389,
'target': 'old-slow-box.example.com'}
self.assertEqual(z.srv(), [])
self.assertEqual(z.srv(record),
[{'addr': '_ldap._tcp.example.com.',
'priority': 0,
'weight': 0,
'port': 389,
'target': 'old-slow-box.example.com'}])
def test_txt_getset(self):
z = Zone()
record = {'alias': 'joe',
'txt': '"Located in a black hole" " somewhere"'}
self.assertEqual(z.txt(), [])
self.assertEqual(z.txt(record),
[{'alias': 'joe',
'txt': '"Located in a black hole" " somewhere"'}])
@patch('builtins.open' if sys.version_info > (3,) else '__builtin__.open')
@patch('bind.zone.jinja2.Template.render')
def test_to_file(self, tm, mopen):
mopen.return_value.__enter__ = lambda s: s
mopen.return_value.__exit__ = Mock()
mopen.return_value.write = Mock()
z = Zone()
z.read_template = Mock()
z.read_template.return_value = "hi {{data}}"
z.to_file()
z.read_template.assert_called_once()
mopen.assert_called_with('/etc/bind/db.example.com', 'w')
tm.assert_called_with(data={'SOA': [], 'AAAA': [], 'TXT': [],
'PTR': [], 'SPF': [], 'A': [], 'CERT': [], 'CNAME': [], 'SRV': [],
'CAA': [], 'NS': [], 'NAPTR': []})
@patch.dict('os.environ', {'CHARM_DIR': '/tmp/foo'})
@patch('builtins.open' if sys.version_info > (3,) else '__builtin__.open')
def test_read_template(self, mopen):
mopen.return_value.__enter__ = lambda s: s
mopen.return_value.__exit__ = Mock()
mopen.return_value.read.return_value = "{{foo}}"
z = Zone()
self.assertEqual(z.read_template(), "{{foo}}")
def test_remove(self):
z = Zone()
z.contents['A'] = [{'addr': '10.0.0.1', 'alias': 'abc', 'ttl': 300}]
z.remove('alias', 'A', 'abc')
self.assertEqual(z.a(), [])
with self.assertRaises(IndexError):
z.remove('alias', 'NOPE', 'abc')
with self.assertRaises(KeyError):
z.remove('alias', 'A', 'abc')
|
motion2015/edx-platform
|
refs/heads/master
|
lms/djangoapps/bulk_email/migrations/0007_load_course_email_template.py
|
182
|
# -*- coding: utf-8 -*-
from south.v2 import DataMigration
class Migration(DataMigration):
def forwards(self, orm):
"Load data from fixture."
from django.core.management import call_command
call_command("loaddata", "course_email_template.json")
def backwards(self, orm):
"Perform a no-op to go backwards."
pass
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'bulk_email.courseemail': {
'Meta': {'object_name': 'CourseEmail'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'html_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'text_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'to_option': ('django.db.models.fields.CharField', [], {'default': "'myself'", 'max_length': '64'})
},
'bulk_email.courseemailtemplate': {
'Meta': {'object_name': 'CourseEmailTemplate'},
'html_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'plain_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'bulk_email.optout': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'Optout'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['bulk_email']
symmetrical = True
|
burnpanck/traits
|
refs/heads/master
|
traits/adaptation/tests/test_global_adaptation_manager.py
|
1
|
""" Test the setting/getting/resetting/using the global adaptation manager. """
from traits.adaptation.api import adapt, AdaptationError, AdaptationManager, \
AdaptationOffer, get_global_adaptation_manager, provides_protocol, \
register_factory, register_provides, register_offer, \
reset_global_adaptation_manager, set_global_adaptation_manager, \
supports_protocol
import traits.adaptation.tests.abc_examples
from traits.testing.unittest_tools import unittest
class TestGlobalAdaptationManager(unittest.TestCase):
""" Test the setting/getting/resetting/using the global adaptation manager.
"""
#: Class attribute pointing at the module containing the example data
examples = traits.adaptation.tests.abc_examples
#### 'TestCase' protocol ##################################################
def setUp(self):
""" Prepares the test fixture before each test method is called. """
reset_global_adaptation_manager()
#### Tests ################################################################
def test_reset_adaptation_manager(self):
ex = self.examples
adaptation_manager = get_global_adaptation_manager()
# UKStandard->EUStandard.
adaptation_manager.register_factory(
factory = ex.UKStandardToEUStandard,
from_protocol = ex.UKStandard,
to_protocol = ex.EUStandard,
)
# Create a UKPlug.
uk_plug = ex.UKPlug()
reset_global_adaptation_manager()
adaptation_manager = get_global_adaptation_manager()
with self.assertRaises(AdaptationError):
adaptation_manager.adapt(uk_plug, ex.EUStandard)
def test_set_adaptation_manager(self):
ex = self.examples
adaptation_manager = AdaptationManager()
# UKStandard->EUStandard.
adaptation_manager.register_factory(
factory = ex.UKStandardToEUStandard,
from_protocol = ex.UKStandard,
to_protocol = ex.EUStandard
)
# Create a UKPlug.
uk_plug = ex.UKPlug()
set_global_adaptation_manager(adaptation_manager)
global_adaptation_manager = get_global_adaptation_manager()
eu_plug = global_adaptation_manager.adapt(uk_plug, ex.EUStandard)
self.assertIsNotNone(eu_plug)
self.assertIsInstance(eu_plug, ex.UKStandardToEUStandard)
def test_global_convenience_functions(self):
ex = self.examples
# Global `register_factory`.
register_factory(
factory = ex.UKStandardToEUStandard,
from_protocol = ex.UKStandard,
to_protocol = ex.EUStandard
)
uk_plug = ex.UKPlug()
# Global `adapt`.
eu_plug = adapt(uk_plug, ex.EUStandard)
self.assertIsNotNone(eu_plug)
self.assertIsInstance(eu_plug, ex.UKStandardToEUStandard)
# Global `provides_protocol`.
self.assertTrue(provides_protocol(ex.UKPlug, ex.UKStandard))
# Global `supports_protocol`.
self.assertTrue(supports_protocol(uk_plug, ex.EUStandard))
def test_global_register_provides(self):
from traits.api import Interface
class IFoo(Interface):
pass
obj = {}
# Global `register_provides`.
register_provides(dict, IFoo)
self.assertEqual(obj, adapt(obj, IFoo))
def test_global_register_offer(self):
ex = self.examples
offer = AdaptationOffer(
factory = ex.UKStandardToEUStandard,
from_protocol = ex.UKStandard,
to_protocol = ex.EUStandard
)
# Global `register_offer`.
register_offer(offer)
uk_plug = ex.UKPlug()
eu_plug = adapt(uk_plug, ex.EUStandard)
self.assertIsNotNone(eu_plug)
self.assertIsInstance(eu_plug, ex.UKStandardToEUStandard)
if __name__ == '__main__':
unittest.main()
#### EOF ######################################################################
|
russellb/nova
|
refs/heads/master
|
nova/network/quantum/melange_ipam_lib.py
|
1
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Nicira Networks, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from netaddr import IPNetwork, IPAddress
from nova import db
from nova import exception
from nova import flags
from nova import log as logging
from nova.network.quantum import melange_connection
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
def get_ipam_lib(net_man):
return QuantumMelangeIPAMLib()
class QuantumMelangeIPAMLib(object):
"""Implements Quantum IP Address Management (IPAM) interface
using the Melange service, which is access using the Melange
web services API.
"""
def __init__(self):
"""Initialize class used to connect to Melange server"""
self.m_conn = melange_connection.MelangeConnection()
def create_subnet(self, context, label, project_id,
quantum_net_id, priority, cidr=None,
gateway=None, gateway_v6=None, cidr_v6=None,
dns1=None, dns2=None):
"""Contact Melange and create a subnet for any non-NULL
IPv4 or IPv6 subnets.
Also create a entry in the Nova networks DB, but only
to store values not represented in Melange or to
temporarily provide compatibility with Nova code that
accesses IPAM data directly via the DB (e.g., nova-api)
"""
tenant_id = project_id or FLAGS.quantum_default_tenant_id
if cidr:
self.m_conn.create_block(quantum_net_id, cidr,
project_id=tenant_id,
gateway=gateway,
dns1=dns1, dns2=dns2)
if cidr_v6:
self.m_conn.create_block(quantum_net_id, cidr_v6,
project_id=tenant_id,
gateway=gateway_v6,
dns1=dns1, dns2=dns2)
net = {"uuid": quantum_net_id,
"project_id": tenant_id,
"priority": priority,
"label": label}
if FLAGS.quantum_use_dhcp:
if cidr:
n = IPNetwork(cidr)
net['dhcp_start'] = IPAddress(n.first + 2)
else:
net['dhcp_start'] = None
admin_context = context.elevated()
network = db.network_create_safe(admin_context, net)
def allocate_fixed_ips(self, context, project_id, quantum_net_id,
network_tenant_id, vif_ref):
"""Pass call to allocate fixed IP on to Melange"""
ips = self.m_conn.allocate_ip(quantum_net_id, network_tenant_id,
vif_ref['uuid'], project_id,
vif_ref['address'])
return [ip['address'] for ip in ips]
def get_network_id_by_cidr(self, context, cidr, project_id):
"""Find the Quantum UUID associated with a IPv4 CIDR
address for the specified tenant.
"""
tenant_id = project_id or FLAGS.quantum_default_tenant_id
all_blocks = self.m_conn.get_blocks(tenant_id)
for b in all_blocks['ip_blocks']:
LOG.debug("block: %s" % b)
if b['cidr'] == cidr:
return b['network_id']
raise exception.NotFound(_("No network found for cidr %s" % cidr))
def delete_subnets_by_net_id(self, context, net_id, project_id):
"""Find Melange block associated with the Quantum UUID,
then tell Melange to delete that block.
"""
admin_context = context.elevated()
tenant_id = project_id or FLAGS.quantum_default_tenant_id
all_blocks = self.m_conn.get_blocks(tenant_id)
for b in all_blocks['ip_blocks']:
if b['network_id'] == net_id:
self.m_conn.delete_block(b['id'], tenant_id)
network = db.network_get_by_uuid(admin_context, net_id)
db.network_delete_safe(context, network['id'])
def get_networks_by_tenant(self, admin_context, tenant_id):
nets = {}
blocks = self.m_conn.get_blocks(tenant_id)
for ip_block in blocks['ip_blocks']:
network_id = ip_block['network_id']
network = db.network_get_by_uuid(admin_context, network_id)
nets[network_id] = network
return nets.values()
def get_global_networks(self, admin_context):
return self.get_networks_by_tenant(admin_context,
FLAGS.quantum_default_tenant_id)
def get_project_networks(self, admin_context):
try:
nets = db.network_get_all(admin_context.elevated())
except exception.NoNetworksFound:
return []
# only return networks with a project_id set
return [net for net in nets if net['project_id']]
def get_project_and_global_net_ids(self, context, project_id):
"""Fetches all networks associated with this project, or
that are "global" (i.e., have no project set).
Returns list sorted by 'priority' (lowest integer value
is highest priority).
"""
if project_id is None:
raise Exception(_("get_project_and_global_net_ids must be called"
" with a non-null project_id"))
admin_context = context.elevated()
# Decorate with priority
priority_nets = []
for tenant_id in (project_id, FLAGS.quantum_default_tenant_id):
nets = self.get_networks_by_tenant(admin_context, tenant_id)
for network in nets:
priority = network['priority']
priority_nets.append((priority, network['uuid'], tenant_id))
# Sort by priority
priority_nets.sort()
# Undecorate
return [(network_id, tenant_id)
for priority, network_id, tenant_id in priority_nets]
def get_tenant_id_by_net_id(self, context, net_id, vif_id, project_id):
ipam_tenant_id = None
tenant_ids = [FLAGS.quantum_default_tenant_id, project_id, None]
for tid in tenant_ids:
try:
ips = self.m_conn.get_allocated_ips(net_id, vif_id, tid)
except Exception, e:
continue
ipam_tenant_id = tid
break
return ipam_tenant_id
# TODO(bgh): Rename this method .. it's now more of a
# "get_subnets_by_net_id_and_vif_id" method, but we could probably just
# call it "get_subnets".
def get_subnets_by_net_id(self, context, tenant_id, net_id, vif_id):
"""Returns information about the IPv4 and IPv6 subnets
associated with a Quantum Network UUID.
"""
subnets = []
ips = self.m_conn.get_allocated_ips(net_id, vif_id, tenant_id)
for ip_address in ips:
block = ip_address['ip_block']
subnet = {'network_id': block['network_id'],
'id': block['id'],
'cidr': block['cidr'],
'gateway': block['gateway'],
'broadcast': block['broadcast'],
'netmask': block['netmask'],
'dns1': block['dns1'],
'dns2': block['dns2']}
if ip_address['version'] == 4:
subnet['version'] = 4
else:
subnet['version'] = 6
subnets.append(subnet)
return subnets
def get_routes_by_ip_block(self, context, block_id, project_id):
"""Returns the list of routes for the IP block"""
return self.m_conn.get_routes(block_id, project_id)
def get_v4_ips_by_interface(self, context, net_id, vif_id, project_id):
"""Returns a list of IPv4 address strings associated with
the specified virtual interface.
"""
return self._get_ips_by_interface(context, net_id, vif_id,
project_id, 4)
def get_v6_ips_by_interface(self, context, net_id, vif_id, project_id):
"""Returns a list of IPv6 address strings associated with
the specified virtual interface.
"""
return self._get_ips_by_interface(context, net_id, vif_id,
project_id, 6)
def _get_ips_by_interface(self, context, net_id, vif_id, project_id,
ip_version):
"""Helper method to fetch v4 or v6 addresses for a particular
virtual interface.
"""
tenant_id = project_id or FLAGS.quantum_default_tenant_id
ip_list = self.m_conn.get_allocated_ips(net_id, vif_id, tenant_id)
return [ip['address'] for ip in ip_list
if IPNetwork(ip['address']).version == ip_version]
def verify_subnet_exists(self, context, project_id, quantum_net_id):
"""Confirms that a subnet exists that is associated with the
specified Quantum Network UUID.
"""
# TODO(bgh): Would be nice if we could just do something like:
# GET /ipam/tenants/{tenant_id}/networks/{network_id}/ instead
# of searching through all the blocks. Checking for a 404
# will then determine whether it exists.
tenant_id = project_id or FLAGS.quantum_default_tenant_id
all_blocks = self.m_conn.get_blocks(tenant_id)
for b in all_blocks['ip_blocks']:
if b['network_id'] == quantum_net_id:
return True
return False
def deallocate_ips_by_vif(self, context, project_id, net_id, vif_ref):
"""Deallocate all fixed IPs associated with the specified
virtual interface.
"""
tenant_id = project_id or FLAGS.quantum_default_tenant_id
self.m_conn.deallocate_ips(net_id, vif_ref['uuid'], tenant_id)
def get_allocated_ips(self, context, subnet_id, project_id):
ips = self.m_conn.get_allocated_ips_for_network(subnet_id, project_id)
return [(ip['address'], ip['interface_id']) for ip in ips]
def create_vif(self, vif_id, instance_id, project_id=None):
"""Create a new vif with the specified information.
"""
tenant_id = project_id or FLAGS.quantum_default_tenant_id
return self.m_conn.create_vif(vif_id, instance_id, tenant_id)
def get_floating_ips_by_fixed_address(self, context, fixed_address):
"""This call is not supported in quantum yet"""
return []
|
JeisonPacateque/Asphalt-Mixtures-Aging-Simulator
|
refs/heads/master
|
app/ui/configure_simulation.py
|
1
|
'''
Copyright (C) 2015 Jeison Pacateque, Santiago Puerto, Wilmar Fernandez
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
'''
from PyQt5 import QtWidgets, QtCore
from app.graphic_controller import SimulationController
from app.output.results import Result
class ConfigureSimulationDialog(QtWidgets.QDialog):
"""
This dialog enables the user to control the simulation parameters after
the simulation runs
"""
def __init__(self, collection):
super(ConfigureSimulationDialog, self).__init__()
self.collection = collection
_, _, self.size_Z = self.collection.shape
self._initUI()
def _initUI(self):
self.title = QtWidgets.QLabel('<b> Select the vertical slice </b>')
self.slider = QtWidgets.QSlider()
self.slider.setGeometry(QtCore.QRect(120, 380, 321, 31))
self.slider.setOrientation(QtCore.Qt.Horizontal)
print("valude size_z", self.size_Z)
print("shape of collecton=", self.collection.shape)
self.slider.setRange(0, self.size_Z)
# self.slider.valueChanged.connect(self.changeText)
self.lcd = QtWidgets.QLCDNumber(self) # replaces the QLineEdit() object
# self.lcd.setDigitCount(2)
self.slider.valueChanged.connect(self.lcd.display) # replaces the QLineEdit() object
# self.sliderSelected = QtWidgets.QLineEdit()
# self.sliderSelected.setGeometry(QtCore.QRect(112, 280, 331, 20))
self.mechanicsLabel = QtWidgets.QLabel("<b> Young's modulus </b>")
self.modulusAggregateLabel = QtWidgets.QLabel("Aggregate:")
self.modulusMasticLabel = QtWidgets.QLabel("Mastic:")
self.modulusAirLabel = QtWidgets.QLabel("Air voids:")
self.mechanicalForceLabel = QtWidgets.QLabel("Applied force: ")
self.aggregate_YM = QtWidgets.QLineEdit()
self.mastic_YM = QtWidgets.QLineEdit()
self.air_YM = QtWidgets.QLineEdit()
self.mechanicalForceEdit = QtWidgets.QLineEdit()
self.thermalLabel = QtWidgets.QLabel("<b> Thermal conductivity </b>")
self.thermalAggregateLabel = QtWidgets.QLabel("Aggregate:")
self.thermalMasticLabel = QtWidgets.QLabel("Mastic:")
self.thermalAirLabel = QtWidgets.QLabel("Air voids:")
self.aggregate_TC = QtWidgets.QLineEdit()
self.mastic_TC = QtWidgets.QLineEdit()
self.air_TC = QtWidgets.QLineEdit()
self.chemicalLabel = QtWidgets.QLabel("<b> Chemical constants </b>")
self.chemicalAggregateLabel = QtWidgets.QLabel("Chemical value1:")
self.chemicalMasticLabel = QtWidgets.QLabel("Chemical value2:")
self.chemicalAirLabel = QtWidgets.QLabel("Chemical value3:")
self.thermalStepsLabel = QtWidgets.QLabel("Steps:")
self.thermalSteps = QtWidgets.QLineEdit()
self.aggregate_CH = QtWidgets.QLineEdit()
self.mastic_CH = QtWidgets.QLineEdit()
self.air_CH = QtWidgets.QLineEdit()
self.runSimulationButton = QtWidgets.QPushButton('Run simulation', self)
self.runSimulationButton.clicked[bool].connect(self.runSimulation) # Listener
self.cancelButton = QtWidgets.QPushButton('Cancel', self)
self.cancelButton.clicked[bool].connect(self.closeWindow)
self.grid = QtWidgets.QGridLayout()
self.grid.setSpacing(2)
self.grid.addWidget(self.title, 0, 0)
self.grid.addWidget(self.slider, 1, 0)
# self.grid.addWidget(self.sliderSelected, 1, 1)
self.grid.addWidget(self.lcd, 1, 1)
self.grid.addWidget(self.mechanicsLabel, 2, 0)
self.grid.addWidget(self.modulusAggregateLabel, 3, 0)
self.grid.addWidget(self.aggregate_YM, 3, 1)
self.grid.addWidget(self.mechanicalForceLabel, 3, 2)
self.grid.addWidget(self.mechanicalForceEdit, 3, 3)
self.grid.addWidget(self.modulusMasticLabel, 4, 0)
self.grid.addWidget(self.mastic_YM, 4, 1)
self.grid.addWidget(self.modulusAirLabel, 5, 0)
self.grid.addWidget(self.air_YM, 5, 1)
# ==============================================================================
# This graphical elements are commented because modifying those values strongly
# affect the behavior of the simulation
# ==============================================================================
self.grid.addWidget(self.thermalLabel, 6, 0)
# self.grid.addWidget(self.thermalAggregateLabel, 7, 0)
# self.grid.addWidget(self.aggregate_TC, 7, 1)
# self.grid.addWidget(self.thermalMasticLabel, 8, 0)
# self.grid.addWidget(self.mastic_TC, 8, 1)
# self.grid.addWidget(self.thermalAirLabel, 9, 0)
# self.grid.addWidget(self.air_TC, 9, 1)
self.grid.addWidget(self.thermalStepsLabel, 7, 1)
self.grid.addWidget(self.thermalSteps, 7, 2)
# self.grid.addWidget(self.chemicalLabel, 10, 0)
# self.grid.addWidget(self.chemicalAggregateLabel, 11, 0)
# self.grid.addWidget(self.aggregate_CH, 11, 1)
# self.grid.addWidget(self.chemicalMasticLabel, 12, 0)
# self.grid.addWidget(self.mastic_CH, 12, 1)
# self.grid.addWidget(self.chemicalAirLabel, 13, 0)
# self.grid.addWidget(self.air_CH, 13, 1)
self.grid.addWidget(self.runSimulationButton, 14, 1)
self.grid.addWidget(self.cancelButton, 14, 2)
self.setLayout(self.grid)
self.setGeometry(10, 35, 560, 520)
window_size = self.geometry()
left = window_size.left()
right = window_size.right() - 500
top = window_size.top() + 200
bottom = window_size.bottom() - 20
self.window_size = QtCore.QRect(left, top, bottom, right)
self.setWindowTitle('Configure Simulation')
self.setDefaultValues()
self.show()
def closeWindow(self):
self.close()
def changeText(self, value):
# deprecated
self.z = value
self.sliderSelected.setText(str(self.z))
def setDefaultValues(self):
"""
This method writes default test values over the configuration dialog
"""
E2 = 21000000
E1 = 10000000
E0 = 100
conductAsphalt = 0.75
conductRock = 7.8
conductAir = 0.026
steps = 10000
target_slice = self.size_Z / 2
mechanical_force = 800
self.aggregate_YM.setText(str(E2))
self.mastic_YM.setText(str(E1))
self.air_YM.setText(str(E0))
self.aggregate_TC.setText(str(conductRock))
self.mastic_TC.setText(str(conductAsphalt))
self.air_TC.setText(str(conductAir))
self.thermalSteps.setText(str(steps))
self.mechanicalForceEdit.setText(str(mechanical_force))
# self.sliderSelected.setText(str(target_slice))
self.aggregate_CH.setText('Chem Aggregate')
self.mastic_CH.setText('Chem Mastic')
self.air_CH.setText('Chem Air')
def runSimulation(self):
"""
This method loads the user input and initialize the simulation engine
"""
options = {
'physical_cons': {
'aggregate_YM': self.aggregate_YM.text(),
'aggregate_TC': self.aggregate_TC.text(),
'aggregate_CH': self.aggregate_CH.text(),
'mastic_YM': self.mastic_YM.text(),
'mastic_TC': self.mastic_TC.text(),
'mastic_CH': self.mastic_CH.text(),
'air_YM': self.air_YM.text(),
'air_TC': self.air_TC.text(),
'air_CH': self.air_CH.text(),
},
'inputs': {
'force_input': int(self.mechanicalForceEdit.text()),
'thermal_steps': int(self.thermalSteps.text()),
}
}
# slice_id = int(self.sliderSelected.text())
slice_id = int(self.lcd.value())
print("slice_id", slice_id)
# Close the dialog before the simulation starts
self.progressBar = QtWidgets.QProgressBar(self)
self.progressBar.setGeometry(QtCore.QRect(self.window_size))
self.controller = SimulationController(self.collection, slice_id, **options)
def onFinished():
self.progressBar.setRange(0, 1)
self.progressBar.setValue(1)
self.progressBar.hide()
data1, data2 = self.controller.getData()
output_results1 = Result(data1, "data1")
output_results1.showResults()
output_results2 = Result(data2, "data2")
output_results2.showResults()
QtWidgets.QMessageBox.about(self, "Information:",
"Simulation done, results saved at Results folder")
self.controller.finished.connect(onFinished)
self.controller.start()
self.progressBar.show()
self.progressBar.setRange(0, 0)
|
shinyChen/browserscope
|
refs/heads/master
|
bin/local_scores.py
|
9
|
#!/usr/bin/python2.5
#
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License')
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compute scores from locally downloaded data.
Compare local numbers with online numbers.
"""
# Each level
# Each test_set
import bisect
import datetime
import getopt
import logging
import MySQLdb
import os
import re
import sys
sys.path.append('/usr/local/google/google_appengine')
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from categories import all_test_sets
BROWSERS_SQL = """
SELECT family, v1, v2, v3
FROM user_agent
GROUP BY family, v1, v2, v3
;"""
SCORE_SQL = """
SELECT %(columns)s
FROM result_time
LEFT JOIN result_parent USING (result_parent_key)
LEFT JOIN user_agent USING (user_agent_key)
WHERE
category = %%(category)s AND
test = %%(test)s AND
family = %%(family)s
%(v_clauses)s
%(limit_clause)s
;"""
CATEGORY_BROWSERS_SQL = """
SELECT family, v1, v2, v3
FROM scores
WHERE category=%s AND family IS NOT NULL
GROUP BY family, v1, v2, v3
;"""
CATEGORY_COUNTS_SQL = """
SELECT category, count(*) FROM scores GROUP BY category
;"""
SCORES_SQL = """
SELECT %(columns)s
FROM scores
WHERE
category=%%(category)s AND
test=%%(test)s AND
family=%%(family)s
%(v_clauses)s
%(limit_clause)s
;"""
class UserAgent(object):
@staticmethod
def pretty_print(family, v1=None, v2=None, v3=None):
"""Pretty browser string."""
if v3:
if v3[0].isdigit():
return '%s %s.%s.%s' % (family, v1, v2, v3)
else:
return '%s %s.%s%s' % (family, v1, v2, v3)
elif v2:
return '%s %s.%s' % (family, v1, v2)
elif v1:
return '%s %s' % (family, v1)
return family
@classmethod
def parts_to_string_list(cls, family, v1=None, v2=None, v3=None):
"""Return a list of user agent version strings.
e.g. ['Firefox', 'Firefox 3', 'Firefox 3.5']
"""
key = family, v1, v2, v3
string_list = []
if family:
string_list.append(family)
if v1:
string_list.append(cls.pretty_print(family, v1))
if v2:
string_list.append(cls.pretty_print(family, v1, v2))
if v3:
string_list.append(cls.pretty_print(family, v1, v2, v3))
return string_list
class LastNRanker(object):
MAX_NUM_SAMPLED_SCORES = 100
def __init__(self):
self.num_scores = 0
self.scores = []
def GetMedianAndNumScores(self):
"""Return the median of the last N scores."""
num_sampled_scores = len(self.scores)
if num_sampled_scores:
return self.scores[num_sampled_scores / 2], self.num_scores
else:
return None, 0
def Add(self, score):
"""Add a score into the last N scores.
If needed, drops the score that is furthest away from the given score.
"""
num_sampled_scores = len(self.scores)
if num_sampled_scores < self.MAX_NUM_SAMPLED_SCORES:
bisect.insort(self.scores, score)
else:
index_left = bisect.bisect_left(self.scores, score)
index_right = bisect.bisect_right(self.scores, score)
index_center = index_left + (index_right - index_left) / 2
self.scores.insert(index_left, score)
if index_center < num_sampled_scores / 2:
self.scores.pop()
else:
self.scores.pop(0)
self.num_scores += 1
def GetValues(self):
return self.scores
class CountRanker(object):
"""Maintain a list of score counts.
The minimum score is assumed to be 0.
The maximum score must be MAX_SCORE or less.
"""
MIN_SCORE = 0
MAX_SCORE = 100
def __init__(self):
self.counts = []
def GetMedianAndNumScores(self):
median = None
num_scores = sum(self.counts)
median_rank = num_scores / 2
index = 0
for score, count in enumerate(self.counts):
median = score
index += count
if median_rank < index:
break
return median, num_scores
def Add(self, score):
if score < self.MIN_SCORE:
score = self.MIN_SCORE
logging.warn('CountRanker(key_name=%s) value out of range (%s to %s): %s',
self.key().name(), self.MIN_SCORE, self.MAX_SCORE, score)
elif score > self.MAX_SCORE:
score = self.MAX_SCORE
logging.warn('CountRanker(key_name=%s) value out of range (%s to %s): %s',
self.key().name(), self.MIN_SCORE, self.MAX_SCORE, score)
slots_needed = score - len(self.counts) + 1
if slots_needed > 0:
self.counts.extend([0] * slots_needed)
self.counts[score] += 1
def GetValues(self):
return self.counts
def CreateRanker(test, browser, params_str=None):
if test.min_value >= 0 and test.max_value <= CountRanker.MAX_SCORE:
return CountRanker()
else:
return LastNRanker()
def DumpRankers(fh, rankers):
for (category, browser, test_key), ranker in sorted(rankers.items()):
fields = [
category,
test_key,
browser,
ranker.__class__.__name__,
]
median, num_scores = ranker.GetMedianAndNumScores()
fields.append(str(median))
fields.append(str(num_scores))
fields.append('|'.join(map(str, ranker.GetValues())))
print >>fh, ','.join(fields)
def BuildRankers(db, category):
cursor = db.cursor()
cursor.execute('''
SELECT test, family, v1, v2, v3, score
FROM scores
WHERE category=%s AND test IS NOT NULL AND family IS NOT NULL
ORDER by test, family, v1, v2, v3
;''', category)
test_set = all_test_sets.GetTestSet(category)
last_test_key = None
last_parts = None
rankers = {}
for test_key, family, v1, v2, v3, score in cursor.fetchall():
if test_key != last_test_key:
last_test_key = test_key
test = test_set.GetTest(test_key)
if test is None:
continue
parts = family, v1, v2, v3
if parts != last_parts:
browsers = UserAgent.parts_to_string_list(family, v1, v2, v3)
for browser in browsers:
browser_rankers = rankers.setdefault(browser, {})
if test_key not in browser_rankers:
ranker = browser_rankers[test_key] = CreateRanker(test, browser)
else:
ranker = browser_rankers[test_key]
ranker.Add(score)
return rankers
def GetCategoryBrowsers(db, category):
cursor = db.cursor()
cursor.execute(CATEGORY_BROWSERS_SQL, category)
level_browsers = [set() for version_level in range(4)]
for family, v1, v2, v3 in cursor.fetchall():
ua_browsers = UserAgent.parts_to_string_list(family, v1, v2, v3)
max_ua_browsers_index = len(ua_browsers) - 1
for version_level in range(4):
level_browsers[version_level].add(
ua_browsers[min(max_ua_browsers_index, version_level)])
return level_browsers
def GetCategories():
return [test_set.category for test_set in all_test_sets.GetAllTestSets()]
def CheckTests(db):
cursor = db.cursor()
cursor.execute('''
SELECT category, test, count(*)
FROM scores
WHERE category IS NOT NULL
GROUP BY category, test
ORDER BY category, test
;''')
for category, test_key, num_scores in cursor.fetchall():
test_set = all_test_sets.GetTestSet(category)
if not test_set:
logging.warn('No test_set for category: %s (num_scores=%s)',
category, num_scores)
continue
test = test_set.GetTest(test_key)
if not test:
logging.warn('No test: %s, %s (num_scores=%s)',
category, test_key, num_scores)
def DumpScores(db):
cursor = db.cursor()
cursor.execute(CREATE_TEMP_SCORES_SQL)
cursor.execute(TEMP_CATEGORY_COUNTS_SQL)
for category, count in cursor.fetchall():
logging.info("Num scores for category, %s: %s", category, count)
cursor.execute(BROWSERS_SQL)
browser_parts = cursor.fetchall()
for test_set in all_test_sets.GetAllTestSets():
category = test_set.category
logging.info("Dump scores for category: %s", category)
for family, v1, v2, v3 in browser_parts:
v_clauses = ''
for column, value in (('v1', v1), ('v2', v2), ('v3', v3)):
if value is None:
v_clauses += ' AND %s IS NULL' % column
else:
v_clauses += ' AND %s = "%s"' % (column, value)
max_num_scores = 0
medians = []
for test in test_set.tests:
sql_params = {
'category': category,
'test': test.key,
'family': family,
}
sql = TEMP_SCORES_SQL % {
'columns': 'count(*)',
'v_clauses': v_clauses,
'limit_clause': '',
}
sql = re.sub(r'\s+', ' ', sql)
#print sql, str(sql_params)
cursor.execute(sql, sql_params)
num_scores = cursor.fetchone()[0]
if num_scores:
max_num_scores = max(max_num_scores, num_scores)
sql = TEMP_SCORES_SQL % {
'columns': 'score',
'v_clauses': v_clauses,
'limit_clause': 'limit %d,1' % (num_scores / 2),
}
#print sql, str(sql_params)
cursor.execute(sql, sql_params)
medians.append(cursor.fetchone()[0])
else:
medians.append(None)
if max_num_scores > 0:
print '%s,"%s",%s,%s' % (category, pretty_print(family, v1, v2, v3),
','.join(map(str, medians)), max_num_scores)
def ParseArgs(argv):
options, args = getopt.getopt(
argv[1:],
'h:e:p:f:',
['host=', 'email=', 'params=', 'mysql_default_file='])
host = None
gae_user = None
params = None
mysql_default_file = None
for option_key, option_value in options:
if option_key in ('-h', '--host'):
host = option_value
elif option_key in ('-e', '--email'):
gae_user = option_value
elif option_key in ('-p', '--params'):
params = option_value
elif option_key in ('-f', '--mysql_default_file'):
mysql_default_file = option_value
return host, gae_user, params, mysql_default_file, args
def main(argv):
host, user, params, mysql_default_file, argv = ParseArgs(argv)
start = datetime.datetime.now()
db = MySQLdb.connect(read_default_file=mysql_default_file)
#DumpScores(db)
rankers = BuildRankers(db)
DumpRankers(sys.stdout, rankers)
#CheckTests(db)
end = datetime.datetime.now()
logging.info(' start: %s', start)
logging.info(' end: %s', end)
logging.info('elapsed: %s', str(end - start)[:-7])
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main(sys.argv)
|
nirvn/QGIS
|
refs/heads/master
|
python/plugins/processing/tests/ModelerTest.py
|
10
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
ModelerTest
---------------------
Date : November 2016
Copyright : (C) 2016 by Nyall Dawson
Email : nyall dot dawson at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************8
"""
__author__ = 'Nyall Dawson'
__date__ = 'November 2016'
__copyright__ = '(C) 2016, Nyall Dawson'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.testing import start_app, unittest
from qgis.core import (QgsProcessingModelAlgorithm,
QgsProcessingModelParameter,
QgsProcessingParameterString,
QgsProcessingParameterNumber,
QgsProcessingParameterField,
QgsProcessingParameterFile)
from processing.modeler.ModelerParametersDialog import (ModelerParametersDialog)
start_app()
class ModelerTest(unittest.TestCase):
def testModelerParametersDialogAvailableValuesOfType(self):
# test getAvailableValuesOfType from ModelerParametersDialog
m = QgsProcessingModelAlgorithm()
string_param_1 = QgsProcessingModelParameter('string')
m.addModelParameter(QgsProcessingParameterString('string'), string_param_1)
string_param_2 = QgsProcessingModelParameter('string2')
m.addModelParameter(QgsProcessingParameterString('string2'), string_param_2)
num_param = QgsProcessingModelParameter('number')
m.addModelParameter(QgsProcessingParameterNumber('number'), num_param)
table_field_param = QgsProcessingModelParameter('field')
m.addModelParameter(QgsProcessingParameterField('field'), table_field_param)
file_param = QgsProcessingModelParameter('file')
m.addModelParameter(QgsProcessingParameterFile('file'), file_param)
dlg = ModelerParametersDialog(m, m)
# test single types
self.assertEqual(set(p.parameterName() for p in dlg.getAvailableValuesOfType(QgsProcessingParameterNumber)),
set(['number']))
self.assertEqual(set(p.parameterName() for p in dlg.getAvailableValuesOfType(QgsProcessingParameterField)),
set(['field']))
self.assertEqual(set(p.parameterName() for p in dlg.getAvailableValuesOfType(QgsProcessingParameterFile)),
set(['file']))
# test multiple types
self.assertEqual(set(p.parameterName() for p in dlg.getAvailableValuesOfType([QgsProcessingParameterString, QgsProcessingParameterNumber, QgsProcessingParameterFile])),
set(['string', 'string2', 'number', 'file']))
if __name__ == '__main__':
unittest.main()
|
pepeportela/edx-platform
|
refs/heads/master
|
lms/djangoapps/django_comment_client/tests/mock_cs_server/mock_cs_server.py
|
23
|
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from logging import getLogger
logger = getLogger(__name__)
class MockCommentServiceRequestHandler(BaseHTTPRequestHandler):
'''
A handler for Comment Service POST requests.
'''
protocol = "HTTP/1.0"
def do_POST(self):
'''
Handle a POST request from the client
Used by the APIs for comment threads, commentables, comments,
subscriptions, commentables, users
'''
# Retrieve the POST data into a dict.
# It should have been sent in json format
length = int(self.headers.getheader('content-length'))
data_string = self.rfile.read(length)
post_dict = json.loads(data_string)
# Log the request
# pylint: disable=logging-format-interpolation
logger.debug(
"Comment Service received POST request {0} to path {1}"
.format(json.dumps(post_dict), self.path)
)
# Every good post has at least an API key
if 'X-Edx-Api-Key' in self.headers:
response = self.server._response_str
# Log the response
logger.debug("Comment Service: sending response %s", json.dumps(response))
# Send a response back to the client
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(response)
else:
# Respond with failure
self.send_response(500, 'Bad Request: does not contain API key')
self.send_header('Content-type', 'text/plain')
self.end_headers()
return False
def do_PUT(self):
'''
Handle a PUT request from the client
Used by the APIs for comment threads, commentables, comments,
subscriptions, commentables, users
'''
# Retrieve the PUT data into a dict.
# It should have been sent in json format
length = int(self.headers.getheader('content-length'))
data_string = self.rfile.read(length)
post_dict = json.loads(data_string)
# Log the request
# pylint: disable=logging-format-interpolation
logger.debug(
"Comment Service received PUT request {0} to path {1}"
.format(json.dumps(post_dict), self.path)
)
# Every good post has at least an API key
if 'X-Edx-Api-Key' in self.headers:
response = self.server._response_str
# Log the response
logger.debug("Comment Service: sending response %s", json.dumps(response))
# Send a response back to the client
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(response)
else:
# Respond with failure
self.send_response(500, 'Bad Request: does not contain API key')
self.send_header('Content-type', 'text/plain')
self.end_headers()
return False
class MockCommentServiceServer(HTTPServer):
'''
A mock Comment Service server that responds
to POST requests to localhost.
'''
def __init__(self, port_num,
response={'username': 'new', 'external_id': 1}):
'''
Initialize the mock Comment Service server instance.
*port_num* is the localhost port to listen to
*response* is a dictionary that will be JSON-serialized
and sent in response to comment service requests.
'''
self._response_str = json.dumps(response)
handler = MockCommentServiceRequestHandler
address = ('', port_num)
HTTPServer.__init__(self, address, handler)
def shutdown(self):
'''
Stop the server and free up the port
'''
# First call superclass shutdown()
HTTPServer.shutdown(self)
# We also need to manually close the socket
self.socket.close()
|
ArnossArnossi/django
|
refs/heads/master
|
django/contrib/gis/utils/srs.py
|
450
|
from django.contrib.gis.gdal import SpatialReference
from django.db import DEFAULT_DB_ALIAS, connections
def add_srs_entry(srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None,
database=None):
"""
This function takes a GDAL SpatialReference system and adds its information
to the `spatial_ref_sys` table of the spatial backend. Doing this enables
database-level spatial transformations for the backend. Thus, this utility
is useful for adding spatial reference systems not included by default with
the backend:
>>> from django.contrib.gis.utils import add_srs_entry
>>> add_srs_entry(3857)
Keyword Arguments:
auth_name:
This keyword may be customized with the value of the `auth_name` field.
Defaults to 'EPSG'.
auth_srid:
This keyword may be customized with the value of the `auth_srid` field.
Defaults to the SRID determined by GDAL.
ref_sys_name:
For SpatiaLite users only, sets the value of the `ref_sys_name` field.
Defaults to the name determined by GDAL.
database:
The name of the database connection to use; the default is the value
of `django.db.DEFAULT_DB_ALIAS` (at the time of this writing, its value
is 'default').
"""
if not database:
database = DEFAULT_DB_ALIAS
connection = connections[database]
if not hasattr(connection.ops, 'spatial_version'):
raise Exception('The `add_srs_entry` utility only works '
'with spatial backends.')
if not connection.features.supports_add_srs_entry:
raise Exception('This utility does not support your database backend.')
SpatialRefSys = connection.ops.spatial_ref_sys()
# If argument is not a `SpatialReference` instance, use it as parameter
# to construct a `SpatialReference` instance.
if not isinstance(srs, SpatialReference):
srs = SpatialReference(srs)
if srs.srid is None:
raise Exception('Spatial reference requires an SRID to be '
'compatible with the spatial backend.')
# Initializing the keyword arguments dictionary for both PostGIS
# and SpatiaLite.
kwargs = {'srid': srs.srid,
'auth_name': auth_name,
'auth_srid': auth_srid or srs.srid,
'proj4text': srs.proj4,
}
# Backend-specific fields for the SpatialRefSys model.
srs_field_names = {f.name for f in SpatialRefSys._meta.get_fields()}
if 'srtext' in srs_field_names:
kwargs['srtext'] = srs.wkt
if 'ref_sys_name' in srs_field_names:
# Spatialite specific
kwargs['ref_sys_name'] = ref_sys_name or srs.name
# Creating the spatial_ref_sys model.
try:
# Try getting via SRID only, because using all kwargs may
# differ from exact wkt/proj in database.
SpatialRefSys.objects.using(database).get(srid=srs.srid)
except SpatialRefSys.DoesNotExist:
SpatialRefSys.objects.using(database).create(**kwargs)
# Alias is for backwards-compatibility purposes.
add_postgis_srs = add_srs_entry
|
Cat5TV/nems-migrator
|
refs/heads/master
|
data/1.6/nagios/plugins/check_esxi_hardware.py
|
2
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
#
# Script for checking global health of host running VMware ESX/ESXi
#
# Licence : GNU General Public Licence (GPL) http://www.gnu.org/
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
# Pre-req : pywbem
#
# Copyright (c) 2008 David Ligeret
# Copyright (c) 2009 Joshua Daniel Franklin
# Copyright (c) 2010 Branden Schneider
# Copyright (c) 2010-2018 Claudio Kuenzler
# Copyright (c) 2010 Samir Ibradzic
# Copyright (c) 2010 Aaron Rogers
# Copyright (c) 2011 Ludovic Hutin
# Copyright (c) 2011 Carsten Schoene
# Copyright (c) 2011-2012 Phil Randal
# Copyright (c) 2011 Fredrik Aslund
# Copyright (c) 2011 Bertrand Jomin
# Copyright (c) 2011 Ian Chard
# Copyright (c) 2012 Craig Hart
# Copyright (c) 2013 Carl R. Friend
# Copyright (c) 2015 Andreas Gottwald
# Copyright (c) 2015 Stanislav German-Evtushenko
# Copyright (c) 2015 Stefan Roos
# Copyright (c) 2018 Peter Newman
#
# The VMware 4.1 CIM API is documented here:
# http://www.vmware.com/support/developer/cim-sdk/4.1/smash/cim_smash_410_prog.pdf
# http://www.vmware.com/support/developer/cim-sdk/smash/u2/ga/apirefdoc/
#
# The VMware 5.x CIM API is documented here:
# http://pubs.vmware.com/vsphere-50/index.jsp?nav=/5_1_1
#
# This Nagios plugin is maintained here:
# http://www.claudiokuenzler.com/nagios-plugins/check_esxi_hardware.php
#
#@---------------------------------------------------
#@ History
#@---------------------------------------------------
#@ Date : 20080820
#@ Author : David Ligeret
#@ Reason : Initial release
#@---------------------------------------------------
#@ Date : 20080821
#@ Author : David Ligeret
#@ Reason : Add verbose mode
#@---------------------------------------------------
#@ Date : 20090219
#@ Author : Joshua Daniel Franklin
#@ Reason : Add try/except to catch AuthError and CIMError
#@---------------------------------------------------
#@ Date : 20100202
#@ Author : Branden Schneider
#@ Reason : Added HP Support (HealthState)
#@---------------------------------------------------
#@ Date : 20100512
#@ Author : Claudio Kuenzler www.claudiokuenzler.com
#@ Reason : Combined different versions (Joshua and Branden)
#@ Reason : Added hardware type switch (dell or hp)
#@---------------------------------------------------
#@ Date : 20100626/28
#@ Author : Samir Ibradzic www.brastel.com
#@ Reason : Added basic server info
#@ Reason : Wanted to have server name, serial number & bios version at output
#@ Reason : Set default return status to Unknown
#@---------------------------------------------------
#@ Date : 20100702
#@ Author : Aaron Rogers www.cloudmark.com
#@ Reason : GlobalStatus was incorrectly getting (re)set to OK with every CIM element check
#@---------------------------------------------------
#@ Date : 20100705
#@ Author : Claudio Kuenzler www.claudiokuenzler.com
#@ Reason : Due to change 20100702 all Dell servers would return UNKNOWN instead of OK...
#@ Reason : ... so added Aaron's logic at the end of the Dell checks as well
#@---------------------------------------------------
#@ Date : 20101028
#@ Author : Claudio Kuenzler www.claudiokuenzler.com
#@ Reason : Changed text in Usage and Example so people dont forget to use https://
#@---------------------------------------------------
#@ Date : 20110110
#@ Author : Ludovic Hutin (Idea and Coding) / Claudio Kuenzler (Bugfix)
#@ Reason : If Dell Blade Servers are used, Serial Number of Chassis was returned
#@---------------------------------------------------
#@ Date : 20110207
#@ Author : Carsten Schoene carsten.schoene.cc
#@ Reason : Bugfix for Intel systems (in this case Intel SE7520) - use 'intel' as system type
#@---------------------------------------------------
#@ Date : 20110215
#@ Author : Ludovic Hutin
#@ Reason : Plugin now catches Socket Error (Timeout Error) and added a timeout parameter
#@---------------------------------------------------
#@ Date : 20110217/18
#@ Author : Ludovic Hutin / Tom Murphy
#@ Reason : Bugfix in Socket Error if clause
#@---------------------------------------------------
#@ Date : 20110221
#@ Author : Claudio Kuenzler www.claudiokuenzler.com
#@ Reason : Remove recently added Timeout due to incompabatility on Windows
#@ Reason : and changed name of plugin to check_esxi_hardware
#@---------------------------------------------------
#@ Date : 20110426
#@ Author : Claudio Kuenzler www.claudiokuenzler.com
#@ Reason : Added 'ibm' hardware type (compatible to Dell output). Tested by Keith Erekson.
#@---------------------------------------------------
#@ Date : 20110426
#@ Author : Phil Randal
#@ Reason : URLise Dell model and tag numbers (as in check_openmanage)
#@ Reason : Return performance data (as in check_openmanage, using similar names where possible)
#@ Reason : Minor code tidyup - use elementName instead of instance['ElementName']
#@---------------------------------------------------
#@ Date : 20110428
#@ Author : Phil Randal (phil.randal@gmail.com)
#@ Reason : If hardware type is specified as 'auto' try to autodetect vendor
#@ Reason : Return performance data for some HP models
#@ Reason : Indent 'verbose' output to make it easier to read
#@ Reason : Use OptionParser to give better parameter parsing (retaining compatability with original)
#@---------------------------------------------------
#@ Date : 20110503
#@ Author : Phil Randal (phil.randal@gmail.com)
#@ Reason : Fix bug in HP Virtual Fan percentage output
#@ Reason : Slight code reorganisation
#@ Reason : Sort performance data
#@ Reason : Fix formatting of current output
#@---------------------------------------------------
#@ Date : 20110504
#@ Author : Phil Randal (phil.randal@gmail.com)
#@ Reason : Minor code changes and documentation improvements
#@ Reason : Remove redundant mismatched ' character in performance data output
#@ Reason : Output non-integral values for all sensors to fix problem seen with system board voltage sensors
#@ on an IBM server (thanks to Attilio Drei for the sample output)
#@---------------------------------------------------
#@ Date : 20110505
#@ Author : Fredrik Aslund
#@ Reason : Added possibility to use first line of a file as password (file:)
#@---------------------------------------------------
#@ Date : 20110505
#@ Author : Phil Randal (phil.randal@gmail.com)
#@ Reason : Simplfy 'verboseoutput' to use 'verbose' as global variable instead of as parameter
#@ Reason : Don't look at performance data from CIM_NumericSensor if we're not using it
#@ Reason : Add --no-power, --no-volts, --no-current, --no-temp, and --no-fan options
#@---------------------------------------------------
#@ Date : 20110506
#@ Author : Phil Randal (phil.randal@gmail.com)
#@ Reason : Reinstate timeouts with --timeout parameter (but not on Windows)
#@ Reason : Allow file:passwordfile in old-style arguments too
#@---------------------------------------------------
#@ Date : 20110507
#@ Author : Phil Randal (phil.randal@gmail.com)
#@ Reason : On error, include numeric sensor value in output
#@---------------------------------------------------
#@ Date : 20110520
#@ Author : Bertrand Jomin
#@ Reason : Plugin had problems to handle some S/N from IBM Blade Servers
#@---------------------------------------------------
#@ Date : 20110614
#@ Author : Claudio Kuenzler (www.claudiokuenzler.com)
#@ Reason : Rewrote file handling and file can now be used for user AND password
#@---------------------------------------------------
#@ Date : 20111003
#@ Author : Ian Chard (ian@chard.org)
#@ Reason : Allow a list of unwanted elements to be specified, which is useful
#@ in cases where hardware isn't well supported by ESXi
#@---------------------------------------------------
#@ Date : 20120402
#@ Author : Claudio Kuenzler (www.claudiokuenzler.com)
#@ Reason : Making plugin GPL compatible (Copyright) and preparing for OpenBSD port
#@---------------------------------------------------
#@ Date : 20120405
#@ Author : Phil Randal (phil.randal@gmail.com)
#@ Reason : Fix lookup of warranty info for Dell
#@---------------------------------------------------
#@ Date : 20120501
#@ Author : Craig Hart
#@ Reason : Bugfix in manufacturer discovery when cim entry not found or empty
#@---------------------------------------------------
#@ Date : 20121027
#@ Author : Claudio Kuenzler (www.claudiokuenzler.com)
#@ Reason : Added workaround for Dell PE x620 where "System Board 1 Riser Config Err 0: Connected"
#@ element outputs wrong return code. Dell, please fix that.
#@ Added web-link to VMware CIM API 5.x at top of script.
#@---------------------------------------------------
#@ Date : 20130424
#@ Author : Claudio Kuenzler (www.claudiokuenzler.com)
#@ Reason : Another workaround for Dell systems "System Board 1 LCD Cable Pres 0: Connected"
#@---------------------------------------------------
#@ Date : 20130702
#@ Author : Carl R. Friend
#@ Reason : Improving wrong authentication timeout and exit UNKNOWN
#@---------------------------------------------------
#@ Date : 20130725
#@ Author : Phil Randal (phil.randal@gmail.com)
#@ Reason : Fix lookup of warranty info for Dell
#@---------------------------------------------------
#@ Date : 20140319
#@ Author : Claudio Kuenzler (www.claudiokuenzler.com)
#@ Reason : Another two workarounds for Dell systems (VGA Cable Pres 0, Add-in Card 4 PEM Presence 0)
#@---------------------------------------------------
#@ Date : 20150109
#@ Author : Claudio Kuenzler (www.claudiokuenzler.com)
#@ Reason : Output serial number of chassis if a blade server is checked
#@---------------------------------------------------
#@ Date : 20150119
#@ Author : Andreas Gottwald
#@ Reason : Fix NoneType element bug
#@---------------------------------------------------
#@ Date : 20150626
#@ Author : Claudio Kuenzler (www.claudiokuenzler.com)
#@ Reason : Added support for patched pywbem 0.7.0 and new version 0.8.0, handle SSL error exception
#@---------------------------------------------------
#@ Date : 20150710
#@ Author : Stanislav German-Evtushenko
#@ Reason : Exit Unknown instead of Critical for timeouts and auth errors
#@---------------------------------------------------
#@ Date : 20151111
#@ Author : Stefan Roos
#@ Reason : Removed unused sensor_value variable and string import.
#@ Reason : Added global hosturl variable declaration after imports.
#@---------------------------------------------------
#@ Date : 20160411
#@ Author : Claudio Kuenzler (www.claudiokuenzler.com)
#@ Reason : Distinguish between pywbem 0.7 and 0.8 (which is now released)
#@---------------------------------------------------
#@ Date : 20160531
#@ Author : Claudio Kuenzler (www.claudiokuenzler.com)
#@ Reason : Add parameter for variable CIM port (useful when behind NAT)
#@---------------------------------------------------
#@ Date : 20161013
#@ Author : Claudio Kuenzler (www.claudiokuenzler.com)
#@ Reason : Added support for pywbem 0.9.x (and upcoming releases)
#@---------------------------------------------------
#@ Date : 20170905
#@ Author : Claudio Kuenzler (www.claudiokuenzler.com)
#@ Reason : Added option to ignore LCD/Display related elements (--no-lcd)
#@---------------------------------------------------
#@ Date : 20180329
#@ Author : Claudio Kuenzler (www.claudiokuenzler.com)
#@ Reason : Try to use internal pywbem function to determine version
#@---------------------------------------------------
#@ Date : 20180411
#@ Author : Peter Newman
#@ Reason : Throw an unknown if we can't fetch the data for some reason
#@---------------------------------------------------
#@ Date : 20181001
#@ Author : Claudio Kuenzler
#@ Reason : python3 compatibility
#@---------------------------------------------------
from __future__ import print_function
import sys
import time
import pywbem
import re
import pkg_resources
from optparse import OptionParser,OptionGroup
version = '20181001'
NS = 'root/cimv2'
hosturl = ''
# define classes to check 'OperationStatus' instance
ClassesToCheck = [
'OMC_SMASHFirmwareIdentity',
'CIM_Chassis',
'CIM_Card',
'CIM_ComputerSystem',
'CIM_NumericSensor',
'CIM_Memory',
'CIM_Processor',
'CIM_RecordLog',
'OMC_DiscreteSensor',
'OMC_Fan',
'OMC_PowerSupply',
'VMware_StorageExtent',
'VMware_Controller',
'VMware_StorageVolume',
'VMware_Battery',
'VMware_SASSATAPort'
]
sensor_Type = {
0:'unknown',
1:'Other',
2:'Temperature',
3:'Voltage',
4:'Current',
5:'Tachometer',
6:'Counter',
7:'Switch',
8:'Lock',
9:'Humidity',
10:'Smoke Detection',
11:'Presence',
12:'Air Flow',
13:'Power Consumption',
14:'Power Production',
15:'Pressure',
16:'Intrusion',
32768:'DMTF Reserved',
65535:'Vendor Reserved'
}
data = []
perf_Prefix = {
1:'Pow',
2:'Vol',
3:'Cur',
4:'Tem',
5:'Fan',
6:'FanP'
}
# parameters
# host name
hostname=''
# cim port
cimport=''
# user
user=''
# password
password=''
# vendor - possible values are 'unknown', 'auto', 'dell', 'hp', 'ibm', 'intel'
vendor='unknown'
# verbose
verbose=False
# Produce performance data output for nagios
perfdata=False
# timeout
timeout = 0
# elements to ignore (full SEL, broken BIOS, etc)
ignore_list=[]
# urlise model and tag numbers (currently only Dell supported, but the code does the right thing for other vendors)
urlise_country=''
# collect perfdata for each category
get_power = True
get_volts = True
get_current = True
get_temp = True
get_fan = True
get_lcd = True
# define exit codes
ExitOK = 0
ExitWarning = 1
ExitCritical = 2
ExitUnknown = 3
# Special handling for blade servers
isblade = "no"
def dell_country(country):
if country == 'at': # Austria
return 'at/de/'
if country == 'be': # Belgium
return 'be/nl/'
if country == 'cz': # Czech Republic
return 'cz/cs/'
if country == 'de': # Germany
return 'de/de/'
if country == 'dk': # Denmark
return 'dk/da/'
if country == 'es': # Spain
return 'es/es/'
if country == 'fi': # Finland
return 'fi/fi/'
if country == 'fr': # France
return 'fr/fr/'
if country == 'gr': # Greece
return 'gr/en/'
if country == 'it': # Italy
return 'it/it/'
if country == 'il': # Israel
return 'il/en/'
if country == 'me': # Middle East
return 'me/en/'
if country == 'no': # Norway
return 'no/no/'
if country == 'nl': # The Netherlands
return 'nl/nl/'
if country == 'pl': # Poland
return 'pl/pl/'
if country == 'pt': # Portugal
return 'pt/en/'
if country == 'ru': # Russia
return 'ru/ru/'
if country == 'se': # Sweden
return 'se/sv/'
if country == 'uk': # United Kingdom
return 'uk/en/'
if country == 'za': # South Africa
return 'za/en/'
if country == 'br': # Brazil
return 'br/pt/'
if country == 'ca': # Canada
return 'ca/en/'
if country == 'mx': # Mexico
return 'mx/es/'
if country == 'us': # United States
return 'us/en/'
if country == 'au': # Australia
return 'au/en/'
if country == 'cn': # China
return 'cn/zh/'
if country == 'in': # India
return 'in/en/'
# default
return 'en/us/'
def urlised_server_info(vendor, country, server_info):
#server_inf = server_info
if vendor == 'dell' :
# Dell support URLs (idea and tables borrowed from check_openmanage)
du = 'http://www.dell.com/support/troubleshooting/' + dell_country(country) + '19/Product/poweredge-'
if (server_info is not None) :
p=re.match('(.*)PowerEdge (.*) (.*)',server_info)
if (p is not None) :
md=p.group(2)
if md == 'R210 II':
md='r210-2'
md=md.lower()
server_info = p.group(1) + '<a href="' + du + md + '#ui-tabs-4">PowerEdge ' + p.group(2)+'</a> ' + p.group(3)
elif vendor == 'hp':
return server_info
elif vendor == 'ibm':
return server_info
elif vendor == 'intel':
return server_info
return server_info
# ----------------------------------------------------------------------
def system_tag_url(vendor,country):
if vendor == 'dell':
# Dell support sites
supportsite = 'http://www.dell.com/support/troubleshooting/'
dellsuffix = 'nodhs1/Index?t=warranty&servicetag='
# warranty URLs for different country codes
return supportsite + dell_country(country) + dellsuffix
# elif vendor == 'hp':
# elif vendor == 'ibm':
# elif vendor == 'intel':
return ''
# ----------------------------------------------------------------------
def urlised_serialnumber(vendor,country,SerialNumber):
if SerialNumber is not None :
tu = system_tag_url(vendor,country)
if tu != '' :
SerialNumber = '<a href="' + tu + SerialNumber + '">' + SerialNumber + '</a>'
return SerialNumber
# ----------------------------------------------------------------------
def verboseoutput(message) :
if verbose:
print(time.strftime("%Y%m%d %H:%M:%S"), message)
# ----------------------------------------------------------------------
def getopts() :
global hosturl,cimport,user,password,vendor,verbose,perfdata,urlise_country,timeout,ignore_list,get_power,get_volts,get_current,get_temp,get_fan,get_lcd
usage = "usage: %prog -H hostname -U username -P password [-C port -V system -v -p -I XX]\n" \
"example: %prog -H my-shiny-new-vmware-server -U root -P fakepassword -C 5989 -V auto -I uk\n\n" \
"or, verbosely:\n\n" \
"usage: %prog --host=hostname --user=username --pass=password [--cimport=port --vendor=system --verbose --perfdata --html=XX]\n"
parser = OptionParser(usage=usage, version="%prog "+version)
group1 = OptionGroup(parser, 'Mandatory parameters')
group2 = OptionGroup(parser, 'Optional parameters')
group1.add_option("-H", "--host", dest="host", help="report on HOST", metavar="HOST")
group1.add_option("-U", "--user", dest="user", help="user to connect as", metavar="USER")
group1.add_option("-P", "--pass", dest="password", \
help="password, if password matches file:<path>, first line of given file will be used as password", metavar="PASS")
group2.add_option("-C", "--cimport", dest="cimport", help="CIM port (default 5989)", metavar="CIMPORT")
group2.add_option("-V", "--vendor", dest="vendor", help="Vendor code: auto, dell, hp, ibm, intel, or unknown (default)", \
metavar="VENDOR", type='choice', choices=['auto','dell','hp','ibm','intel','unknown'],default="unknown")
group2.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, \
help="print status messages to stdout (default is to be quiet)")
group2.add_option("-p", "--perfdata", action="store_true", dest="perfdata", default=False, \
help="collect performance data for pnp4nagios (default is not to)")
group2.add_option("-I", "--html", dest="urlise_country", default="", \
help="generate html links for country XX (default is not to)", metavar="XX")
group2.add_option("-t", "--timeout", action="store", type="int", dest="timeout", default=0, \
help="timeout in seconds - no effect on Windows (default = no timeout)")
group2.add_option("-i", "--ignore", action="store", type="string", dest="ignore", default="", \
help="comma-separated list of elements to ignore")
group2.add_option("--no-power", action="store_false", dest="get_power", default=True, \
help="don't collect power performance data")
group2.add_option("--no-volts", action="store_false", dest="get_volts", default=True, \
help="don't collect voltage performance data")
group2.add_option("--no-current", action="store_false", dest="get_current", default=True, \
help="don't collect current performance data")
group2.add_option("--no-temp", action="store_false", dest="get_temp", default=True, \
help="don't collect temperature performance data")
group2.add_option("--no-fan", action="store_false", dest="get_fan", default=True, \
help="don't collect fan performance data")
group2.add_option("--no-lcd", action="store_false", dest="get_lcd", default=True, \
help="don't collect lcd/front display status")
parser.add_option_group(group1)
parser.add_option_group(group2)
# check input arguments
if len(sys.argv) < 2:
print("no parameters specified\n")
parser.print_help()
sys.exit(-1)
# if first argument starts with 'https://' we have old-style parameters, so handle in old way
if re.match("https://",sys.argv[1]):
# check input arguments
if len(sys.argv) < 5:
print("too few parameters\n")
parser.print_help()
sys.exit(-1)
if len(sys.argv) > 5 :
if sys.argv[5] == "verbose" :
verbose = True
hosturl = sys.argv[1]
user = sys.argv[2]
password = sys.argv[3]
vendor = sys.argv[4]
else:
# we're dealing with new-style parameters, so go get them!
(options, args) = parser.parse_args()
# Making sure all mandatory options appeared.
mandatories = ['host', 'user', 'password']
for m in mandatories:
if not options.__dict__[m]:
print("mandatory parameter '--" + m + "' is missing\n")
parser.print_help()
sys.exit(-1)
hostname=options.host.lower()
# if user has put "https://" in front of hostname out of habit, do the right thing
# hosturl will end up as https://hostname
if re.match('^https://',hostname):
hosturl = hostname
else:
hosturl = 'https://' + hostname
user=options.user
password=options.password
cimport=options.cimport
vendor=options.vendor.lower()
verbose=options.verbose
perfdata=options.perfdata
urlise_country=options.urlise_country.lower()
timeout=options.timeout
ignore_list=options.ignore.split(',')
get_power=options.get_power
get_volts=options.get_volts
get_current=options.get_current
get_temp=options.get_temp
get_fan=options.get_fan
get_lcd=options.get_lcd
# if user or password starts with 'file:', use the first string in file as user, second as password
if (re.match('^file:', user) or re.match('^file:', password)):
if re.match('^file:', user):
filextract = re.sub('^file:', '', user)
filename = open(filextract, 'r')
filetext = filename.readline().split()
user = filetext[0]
password = filetext[1]
filename.close()
elif re.match('^file:', password):
filextract = re.sub('^file:', '', password)
filename = open(filextract, 'r')
filetext = filename.readline().split()
password = filetext[0]
filename.close()
# ----------------------------------------------------------------------
getopts()
# if running on Windows, don't use timeouts and signal.alarm
on_windows = True
os_platform = sys.platform
if os_platform != "win32":
on_windows = False
import signal
def handler(signum, frame):
print('UNKNOWN: Execution time too long!')
sys.exit(ExitUnknown)
if cimport:
verboseoutput("Using manually defined CIM port "+cimport)
hosturl += ':'+cimport
# Append lcd related elements to ignore list if --no-lcd was used
verboseoutput("LCD Status: %s" % get_lcd)
if not get_lcd:
ignore_list.append("System Board 1 LCD Cable Pres 0: Connected")
ignore_list.append("System Board 1 VGA Cable Pres 0: Connected")
ignore_list.append("Front Panel Board 1 FP LCD Cable 0: Connected")
ignore_list.append("Front Panel Board 1 FP LCD Cable 0: Config Error")
# connection to host
verboseoutput("Connection to "+hosturl)
# pywbem 0.7.0 handling is special, some patched 0.7.0 installations work differently
try:
pywbemversion = pywbem.__version__
except:
pywbemversion = pkg_resources.get_distribution("pywbem").version
else:
pywbemversion = pywbem.__version__
verboseoutput("Found pywbem version "+pywbemversion)
if '0.7.' in pywbemversion:
try:
conntest = pywbem.WBEMConnection(hosturl, (user,password))
c = conntest.EnumerateInstances('CIM_Card')
except:
#raise
verboseoutput("Connection error, disable SSL certification verification (probably patched pywbem)")
wbemclient = pywbem.WBEMConnection(hosturl, (user,password), no_verification=True)
else:
verboseoutput("Connection worked")
wbemclient = pywbem.WBEMConnection(hosturl, (user,password))
# pywbem 0.8.0 and later
else:
wbemclient = pywbem.WBEMConnection(hosturl, (user,password), NS, no_verification=True)
# Add a timeout for the script. When using with Nagios, the Nagios timeout cannot be < than plugin timeout.
if on_windows == False and timeout > 0:
signal.signal(signal.SIGALRM, handler)
signal.alarm(timeout)
# run the check for each defined class
GlobalStatus = ExitUnknown
server_info = ""
bios_info = ""
SerialNumber = ""
ExitMsg = ""
# if vendor is specified as 'auto', try to get vendor from CIM
# note: the default vendor is 'unknown'
if vendor=='auto':
try:
c=wbemclient.EnumerateInstances('CIM_Chassis')
except pywbem.cim_operations.CIMError as args:
if ( args[1].find('Socket error') >= 0 ):
print("UNKNOWN: {}".format(args))
sys.exit (ExitUnknown)
elif ( args[1].find('ThreadPool --- Failed to enqueue request') >= 0 ):
print("UNKNOWN: {}".format(args))
sys.exit (ExitUnknown)
else:
verboseoutput("Unknown CIM Error: %s" % args)
except pywbem.cim_http.AuthError as arg:
verboseoutput("Global exit set to UNKNOWN")
GlobalStatus = ExitUnknown
print("UNKNOWN: Authentication Error")
sys.exit (GlobalStatus)
else:
man=c[0][u'Manufacturer']
if re.match("Dell",man):
vendor="dell"
elif re.match("HP",man):
vendor="hp"
elif re.match("IBM",man):
vendor="ibm"
elif re.match("Intel",man):
vendor="intel"
else:
vendor='unknown'
for classe in ClassesToCheck :
verboseoutput("Check classe "+classe)
try:
instance_list = wbemclient.EnumerateInstances(classe)
except pywbem.cim_operations.CIMError as args:
if ( args[1].find('Socket error') >= 0 ):
print("UNKNOWN: {}".format(args))
sys.exit (ExitUnknown)
elif ( args[1].find('ThreadPool --- Failed to enqueue request') >= 0 ):
print("UNKNOWN: {}".format(args))
sys.exit (ExitUnknown)
else:
verboseoutput("Unknown CIM Error: %s" % args)
except pywbem.cim_http.AuthError as arg:
verboseoutput("Global exit set to UNKNOWN")
GlobalStatus = ExitUnknown
print("UNKNOWN: Authentication Error")
sys.exit (GlobalStatus)
else:
# GlobalStatus = ExitOK #ARR
for instance in instance_list :
elementName = instance['ElementName']
if elementName is None :
elementName = 'Unknown'
elementNameValue = elementName
verboseoutput(" Element Name = "+elementName)
# Ignore element if we don't want it
if elementName in ignore_list :
verboseoutput(" (ignored)")
continue
# BIOS & Server info
if elementName == 'System BIOS' :
bios_info = instance[u'Name'] + ': ' \
+ instance[u'VersionString'] + ' ' \
+ str(instance[u'ReleaseDate'].datetime.date())
verboseoutput(" VersionString = "+instance[u'VersionString'])
elif elementName == 'Chassis' :
man = instance[u'Manufacturer']
if man is None :
man = 'Unknown Manufacturer'
verboseoutput(" Manufacturer = "+man)
SerialNumber = instance[u'SerialNumber']
SerialChassis = instance[u'SerialNumber']
if SerialNumber:
verboseoutput(" SerialNumber = "+SerialNumber)
server_info = man + ' '
if vendor != 'intel':
model = instance[u'Model']
if model:
verboseoutput(" Model = "+model)
server_info += model + ' s/n:'
elif elementName == 'Server Blade' :
SerialNumber = instance[u'SerialNumber']
if SerialNumber:
verboseoutput(" SerialNumber = "+SerialNumber)
isblade = "yes"
# Report detail of Numeric Sensors and generate nagios perfdata
if classe == "CIM_NumericSensor" :
sensorType = instance[u'sensorType']
sensStr = sensor_Type.get(sensorType,"Unknown")
if sensorType:
verboseoutput(" sensorType = %d - %s" % (sensorType,sensStr))
units = instance[u'BaseUnits']
if units:
verboseoutput(" BaseUnits = %d" % units)
# grab some of these values for Nagios performance data
scale = 10**instance[u'UnitModifier']
verboseoutput(" Scaled by = %f " % scale)
cr = int(instance[u'CurrentReading'])*scale
verboseoutput(" Current Reading = %f" % cr)
elementNameValue = "%s: %g" % (elementName,cr)
ltnc = 0
utnc = 0
ltc = 0
utc = 0
if instance[u'LowerThresholdNonCritical'] is not None:
ltnc = instance[u'LowerThresholdNonCritical']*scale
verboseoutput(" Lower Threshold Non Critical = %f" % ltnc)
if instance[u'UpperThresholdNonCritical'] is not None:
utnc = instance[u'UpperThresholdNonCritical']*scale
verboseoutput(" Upper Threshold Non Critical = %f" % utnc)
if instance[u'LowerThresholdCritical'] is not None:
ltc = instance[u'LowerThresholdCritical']*scale
verboseoutput(" Lower Threshold Critical = %f" % ltc)
if instance[u'UpperThresholdCritical'] is not None:
utc = instance[u'UpperThresholdCritical']*scale
verboseoutput(" Upper Threshold Critical = %f" % utc)
#
if perfdata:
perf_el = elementName.replace(' ','_')
# Power and Current
if sensorType == 4: # Current or Power Consumption
if units == 7: # Watts
if get_power:
data.append( ("%s=%g;%g;%g " % (perf_el, cr, utnc, utc),1) )
elif units == 6: # Current
if get_current:
data.append( ("%s=%g;%g;%g " % (perf_el, cr, utnc, utc),3) )
# PSU Voltage
elif sensorType == 3: # Voltage
if get_volts:
data.append( ("%s=%g;%g;%g " % (perf_el, cr, utnc, utc),2) )
# Temperatures
elif sensorType == 2: # Temperature
if get_temp:
data.append( ("%s=%g;%g;%g " % (perf_el, cr, utnc, utc),4) )
# Fan speeds
elif sensorType == 5: # Tachometer
if get_fan:
if units == 65: # percentage
data.append( ("%s=%g%%;%g;%g " % (perf_el, cr, utnc, utc),6) )
else:
data.append( ("%s=%g;%g;%g " % (perf_el, cr, utnc, utc),5) )
elif classe == "CIM_Processor" :
verboseoutput(" Family = %d" % instance['Family'])
verboseoutput(" CurrentClockSpeed = %dMHz" % instance['CurrentClockSpeed'])
# HP Check
if vendor == "hp" :
if instance['HealthState'] is not None :
elementStatus = instance['HealthState']
verboseoutput(" Element HealthState = %d" % elementStatus)
interpretStatus = {
0 : ExitOK, # Unknown
5 : ExitOK, # OK
10 : ExitWarning, # Degraded
15 : ExitWarning, # Minor
20 : ExitCritical, # Major
25 : ExitCritical, # Critical
30 : ExitCritical, # Non-recoverable Error
}[elementStatus]
if (interpretStatus == ExitCritical) :
verboseoutput("Global exit set to CRITICAL")
GlobalStatus = ExitCritical
ExitMsg += " CRITICAL : %s " % elementNameValue
if (interpretStatus == ExitWarning and GlobalStatus != ExitCritical) :
verboseoutput("Global exit set to WARNING")
GlobalStatus = ExitWarning
ExitMsg += " WARNING : %s " % elementNameValue
# Added the following for when GlobalStatus is ExitCritical and a warning is detected
# This way the ExitMsg gets added but GlobalStatus isn't changed
if (interpretStatus == ExitWarning and GlobalStatus == ExitCritical) : # ARR
ExitMsg += " WARNING : %s " % elementNameValue #ARR
# Added the following so that GlobalStatus gets set to OK if there's no warning or critical
if (interpretStatus == ExitOK and GlobalStatus != ExitWarning and GlobalStatus != ExitCritical) : #ARR
GlobalStatus = ExitOK #ARR
# Dell, Intel, IBM and unknown hardware check
elif (vendor == "dell" or vendor == "intel" or vendor == "ibm" or vendor=="unknown") :
# Added 20121027 As long as Dell doesnt correct these CIM elements return code we have to ignore it
ignore_list.append("System Board 1 Riser Config Err 0: Connected")
ignore_list.append("Add-in Card 4 PEM Presence 0: Connected")
if instance['OperationalStatus'] is not None :
elementStatus = instance['OperationalStatus'][0]
verboseoutput(" Element Op Status = %d" % elementStatus)
interpretStatus = {
0 : ExitOK, # Unknown
1 : ExitCritical, # Other
2 : ExitOK, # OK
3 : ExitWarning, # Degraded
4 : ExitWarning, # Stressed
5 : ExitWarning, # Predictive Failure
6 : ExitCritical, # Error
7 : ExitCritical, # Non-Recoverable Error
8 : ExitWarning, # Starting
9 : ExitWarning, # Stopping
10 : ExitCritical, # Stopped
11 : ExitOK, # In Service
12 : ExitWarning, # No Contact
13 : ExitCritical, # Lost Communication
14 : ExitCritical, # Aborted
15 : ExitOK, # Dormant
16 : ExitCritical, # Supporting Entity in Error
17 : ExitOK, # Completed
18 : ExitOK, # Power Mode
19 : ExitOK, # DMTF Reserved
20 : ExitOK # Vendor Reserved
}[elementStatus]
if (interpretStatus == ExitCritical) :
verboseoutput("Global exit set to CRITICAL")
GlobalStatus = ExitCritical
ExitMsg += " CRITICAL : %s " % elementNameValue
if (interpretStatus == ExitWarning and GlobalStatus != ExitCritical) :
verboseoutput("Global exit set to WARNING")
GlobalStatus = ExitWarning
ExitMsg += " WARNING : %s " % elementNameValue
# Added same logic as in 20100702 here, otherwise Dell servers would return UNKNOWN instead of OK
if (interpretStatus == ExitWarning and GlobalStatus == ExitCritical) : # ARR
ExitMsg += " WARNING : %s " % elementNameValue #ARR
if (interpretStatus == ExitOK and GlobalStatus != ExitWarning and GlobalStatus != ExitCritical) : #ARR
GlobalStatus = ExitOK #ARR
if elementName == 'Server Blade' :
if SerialNumber :
if SerialNumber.find(".") != -1 :
SerialNumber = SerialNumber.split('.')[1]
# Munge the ouptput to give links to documentation and warranty info
if (urlise_country != '') :
SerialNumber = urlised_serialnumber(vendor,urlise_country,SerialNumber)
server_info = urlised_server_info(vendor,urlise_country,server_info)
# If this is a blade server, also output chassis serial number as additional info
if (isblade == "yes") :
SerialNumber += " Chassis S/N: %s " % (SerialChassis)
# Output performance data
perf = '|'
if perfdata:
sdata=[]
ctr=[0,0,0,0,0,0,0]
# sort the data so we always get perfdata in the right order
# we make no assumptions about the order in which CIM returns data
# first sort by element name (effectively) and insert sequence numbers
for p in sorted(data):
p1 = p[1]
sdata.append( ("P%d%s_%d_%s") % (p1,perf_Prefix[p1], ctr[p1], p[0]) )
ctr[p1] += 1
# then sort perfdata into groups and output perfdata string
for p in sorted(sdata):
perf += p
# sanitise perfdata - don't output "|" if nothing to report
if perf == '|':
perf = ''
if GlobalStatus == ExitOK :
print("OK - Server: %s %s %s%s" % (server_info, SerialNumber, bios_info, perf))
elif GlobalStatus == ExitUnknown :
print("UNKNOWN: %s" % (ExitMsg)) #ARR
else:
print("%s - Server: %s %s %s%s" % (ExitMsg, server_info, SerialNumber, bios_info, perf))
sys.exit (GlobalStatus)
|
kenorb/wp_site
|
refs/heads/master
|
sites/all/modules/contributions/[wysiwyg]/fckeditor/fckeditor/editor/filemanager/connectors/py/connector.py
|
44
|
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
|
suneeth51/neutron
|
refs/heads/master
|
neutron/db/migration/alembic_migrations/mlnx_init_ops.py
|
32
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Initial operations for the Mellanox plugin
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'segmentation_id_allocation',
sa.Column('physical_network', sa.String(length=64), nullable=False),
sa.Column('segmentation_id', sa.Integer(), autoincrement=False,
nullable=False),
sa.Column('allocated', sa.Boolean(), nullable=False,
server_default=sa.sql.false()),
sa.PrimaryKeyConstraint('physical_network', 'segmentation_id'))
op.create_table(
'mlnx_network_bindings',
sa.Column('network_id', sa.String(length=36), nullable=False),
sa.Column('network_type', sa.String(length=32), nullable=False),
sa.Column('physical_network', sa.String(length=64), nullable=True),
sa.Column('segmentation_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['network_id'], ['networks.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('network_id'))
op.create_table(
'port_profile',
sa.Column('port_id', sa.String(length=36), nullable=False),
sa.Column('vnic_type', sa.String(length=32), nullable=False),
sa.ForeignKeyConstraint(['port_id'], ['ports.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('port_id'))
|
IvanJobs/play
|
refs/heads/master
|
ceph/swift/auth_get.py
|
1
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import urllib
from urllib.request import Request
import hmac
import hashlib
import base64
import datetime
import sys
# demouserid
#access_key = 'Z2ETKC4RQFTR4XBQ1A72'
#secret_key = 'vqdQGtmruGW855mduffA8lsLx+ot9iXIb9QTtT2I'
#host_port = '172.16.6.81:7480'
#host_port = '10.192.40.29'
#hr
#access_key = "9M3C3NCBEWSRDPRJGL0O"
#secret_key = "QCS0ju6dkqblLVQe966KwuE2Cg6cCfS/S2u2K+Qt"
# demo from local vcenter
user = 'demo:swift'
key='J6X0gj6O4NOMGBPVKbW9Rde4Kx5Fb4ck0TeSJ1pN'
#content = None
#with open('./test03.xls', 'rb') as f:
# content = f.read()
req = Request('http://172.16.6.78/auth/v1',
method = 'GET')
timestr = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
req.add_header('Host', '172.16.6.78')
req.add_header('Date', timestr)
#req.add_header('x-amz-acl', 'public-read-write')
req.add_header('X-Auth-User', user)
req.add_header('X-Auth-Key', key)
with urllib.request.urlopen(req) as f:
print(f.getheader('X-Auth-Token', 'nulll'))
print(f.getheader('X-Storage-Url', 'ok'))
print(f.status)
print(f.read().decode('utf-8'))
|
skylerberg/third-party-ci-tools
|
refs/heads/master
|
monitoring/ciwatch/ciwatch/db.py
|
1
|
# Copyright (c) 2015 Tintri. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from ciwatch import models
from ciwatch.config import cfg, get_projects
engine = create_engine(cfg.database.connection)
Session = sessionmaker()
Session.configure(bind=engine)
models.Base.metadata.create_all(engine)
session = Session()
def create_projects():
for name in get_projects():
get_or_create(models.Project,
commit_=False,
name=name)
session.commit()
def update_or_create_comment(commit_=True, **kwargs):
comment = session.query(models.Comment).filter_by(
ci_server_id=kwargs['ci_server_id'],
patch_set_id=kwargs['patch_set_id']).scalar()
if comment is not None:
for key, value in kwargs.iteritems():
setattr(comment, key, value)
else:
session.add(models.Comment(**kwargs))
if commit_:
session.commit()
def get_or_create(model, commit_=True, **kwargs):
result = session.query(model).filter_by(**kwargs).first()
if not result:
result = model(**kwargs)
session.add(result)
if commit_:
session.commit()
return result
|
yedivanseven/LPDE
|
refs/heads/master
|
lpde/geometry/grid.py
|
1
|
class Grid:
def __init__(self, x: int, y: int) -> None:
self.__x = self.__integer_type_and_range_checked(x)
self.__y = self.__integer_type_and_range_checked(y)
@property
def x(self) -> int:
return self.__x
@property
def y(self) -> int:
return self.__y
@staticmethod
def __integer_type_and_range_checked(value: int) -> int:
if type(value) is not int:
raise TypeError('Number of grid points must be an integer!')
if value < 1:
raise ValueError('Number of grid points must be positive!')
return value
if __name__ == '__main__':
grid = Grid(10, 20)
print(grid.x)
print(grid.y)
|
kisonecat/sequences-and-series
|
refs/heads/master
|
quizzes/multiplyPowerSeries/__init__.py
|
3
|
from questions import *
class Question(RandomizedQuestion):
module = __file__
video = 'multiply-power-series'
forum = 10160
title = 'multiply two power series'
def good_enough(self):
return True
def perturb(self):
x = var('x')
self.x = x
nn = var('n')
self.nn = nn
self.term_a = (randint(1,5) * nn + randint(1,5)) * x**nn
self.term_b = (randint(1,5) * nn + randint(1,5)) * x**nn
self.sum_a = sum([self.term_a(n=i) for i in range(0,5)])
self.sum_b = sum([self.term_b(n=i) for i in range(0,5)])
self.the_answer = (self.sum_a * self.sum_b).polynomial(QQ).truncate(4)
def __init__(self):
self.perturb()
super(Question, self).__init__()
def distractors(self,count):
results = []
for i in range(1,count+1):
wrong_answer = self.the_answer + i*(x**2) * (-1)**(randint(0,1)) + randint(0,8)*(x**3) * (-1)**(randint(0,1)) + randint(0,8)*(x**4) * (-1)**(randint(0,1))
wrong_answer = wrong_answer.polynomial(QQ).truncate(4)
results.append((r'\(' + join((latex(wrong_answer).split('+'))[::-1],'+') + r'+ \cdots \)','Focus on the \(x^2\) term, since that is the first to differ among the choices.'))
return results
def verify(self):
return True
def answer(self):
return r'\(' + join((latex(self.the_answer).split('+'))[::-1],'+') + r'+ \cdots \)'
|
bsipocz/astropy
|
refs/heads/hacking
|
astropy/coordinates/tests/test_frames.py
|
1
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from copy import deepcopy
import numpy as np
from astropy import units as u
from astropy.tests.helper import (catch_warnings, pytest,
assert_quantity_allclose as assert_allclose)
from astropy.utils import OrderedDescriptorContainer
from astropy.utils.compat import NUMPY_LT_1_14
from astropy.utils.exceptions import AstropyWarning
from astropy.coordinates import representation as r
from astropy.coordinates.representation import REPRESENTATION_CLASSES
from astropy.units import allclose
from .test_representation import unitphysics # this fixture is used below
def setup_function(func):
func.REPRESENTATION_CLASSES_ORIG = deepcopy(REPRESENTATION_CLASSES)
def teardown_function(func):
REPRESENTATION_CLASSES.clear()
REPRESENTATION_CLASSES.update(func.REPRESENTATION_CLASSES_ORIG)
def test_frame_attribute_descriptor():
""" Unit tests of the Attribute descriptor """
from astropy.coordinates.attributes import Attribute
class TestAttributes(metaclass=OrderedDescriptorContainer):
attr_none = Attribute()
attr_2 = Attribute(default=2)
attr_3_attr2 = Attribute(default=3, secondary_attribute='attr_2')
attr_none_attr2 = Attribute(default=None, secondary_attribute='attr_2')
attr_none_nonexist = Attribute(default=None, secondary_attribute='nonexist')
t = TestAttributes()
# Defaults
assert t.attr_none is None
assert t.attr_2 == 2
assert t.attr_3_attr2 == 3
assert t.attr_none_attr2 == t.attr_2
assert t.attr_none_nonexist is None # No default and non-existent secondary attr
# Setting values via '_'-prefixed internal vars (as would normally done in __init__)
t._attr_none = 10
assert t.attr_none == 10
t._attr_2 = 20
assert t.attr_2 == 20
assert t.attr_3_attr2 == 3
assert t.attr_none_attr2 == t.attr_2
t._attr_none_attr2 = 40
assert t.attr_none_attr2 == 40
# Make sure setting values via public attribute fails
with pytest.raises(AttributeError) as err:
t.attr_none = 5
assert 'Cannot set frame attribute' in str(err.value)
def test_frame_subclass_attribute_descriptor():
from astropy.coordinates.builtin_frames import FK4
from astropy.coordinates.attributes import Attribute, TimeAttribute
from astropy.time import Time
_EQUINOX_B1980 = Time('B1980', scale='tai')
class MyFK4(FK4):
# equinox inherited from FK4, obstime overridden, and newattr is new
obstime = TimeAttribute(default=_EQUINOX_B1980)
newattr = Attribute(default='newattr')
mfk4 = MyFK4()
assert mfk4.equinox.value == 'B1950.000'
assert mfk4.obstime.value == 'B1980.000'
assert mfk4.newattr == 'newattr'
assert set(mfk4.get_frame_attr_names()) == set(['equinox', 'obstime', 'newattr'])
mfk4 = MyFK4(equinox='J1980.0', obstime='J1990.0', newattr='world')
assert mfk4.equinox.value == 'J1980.000'
assert mfk4.obstime.value == 'J1990.000'
assert mfk4.newattr == 'world'
def test_create_data_frames():
from astropy.coordinates.builtin_frames import ICRS
# from repr
i1 = ICRS(r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc))
i2 = ICRS(r.UnitSphericalRepresentation(lon=1*u.deg, lat=2*u.deg))
# from preferred name
i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc)
i4 = ICRS(ra=1*u.deg, dec=2*u.deg)
assert i1.data.lat == i3.data.lat
assert i1.data.lon == i3.data.lon
assert i1.data.distance == i3.data.distance
assert i2.data.lat == i4.data.lat
assert i2.data.lon == i4.data.lon
# now make sure the preferred names work as properties
assert_allclose(i1.ra, i3.ra)
assert_allclose(i2.ra, i4.ra)
assert_allclose(i1.distance, i3.distance)
with pytest.raises(AttributeError):
i1.ra = [11.]*u.deg
def test_create_orderered_data():
from astropy.coordinates.builtin_frames import ICRS, Galactic, AltAz
TOL = 1e-10*u.deg
i = ICRS(1*u.deg, 2*u.deg)
assert (i.ra - 1*u.deg) < TOL
assert (i.dec - 2*u.deg) < TOL
g = Galactic(1*u.deg, 2*u.deg)
assert (g.l - 1*u.deg) < TOL
assert (g.b - 2*u.deg) < TOL
a = AltAz(1*u.deg, 2*u.deg)
assert (a.az - 1*u.deg) < TOL
assert (a.alt - 2*u.deg) < TOL
with pytest.raises(TypeError):
ICRS(1*u.deg, 2*u.deg, 1*u.deg, 2*u.deg)
with pytest.raises(TypeError):
sph = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc)
ICRS(sph, 1*u.deg, 2*u.deg)
def test_create_nodata_frames():
from astropy.coordinates.builtin_frames import ICRS, FK4, FK5
i = ICRS()
assert len(i.get_frame_attr_names()) == 0
f5 = FK5()
assert f5.equinox == FK5.get_frame_attr_names()['equinox']
f4 = FK4()
assert f4.equinox == FK4.get_frame_attr_names()['equinox']
# obstime is special because it's a property that uses equinox if obstime is not set
assert f4.obstime in (FK4.get_frame_attr_names()['obstime'],
FK4.get_frame_attr_names()['equinox'])
def test_no_data_nonscalar_frames():
from astropy.coordinates.builtin_frames import AltAz
from astropy.time import Time
a1 = AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day,
temperature=np.ones((3, 1)) * u.deg_C)
assert a1.obstime.shape == (3, 10)
assert a1.temperature.shape == (3, 10)
assert a1.shape == (3, 10)
with pytest.raises(ValueError) as exc:
AltAz(obstime=Time('2012-01-01') + np.arange(10.) * u.day,
temperature=np.ones((3,)) * u.deg_C)
assert 'inconsistent shapes' in str(exc.value)
def test_frame_repr():
from astropy.coordinates.builtin_frames import ICRS, FK5
i = ICRS()
assert repr(i) == '<ICRS Frame>'
f5 = FK5()
assert repr(f5).startswith('<FK5 Frame (equinox=')
i2 = ICRS(ra=1*u.deg, dec=2*u.deg)
i3 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.kpc)
assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n'
' ({})>').format(' 1., 2.' if NUMPY_LT_1_14
else '1., 2.')
assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n'
' ({})>').format(' 1., 2., 3.' if NUMPY_LT_1_14
else '1., 2., 3.')
# try with arrays
i2 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[2.1, 3.1]*u.deg)
i3 = ICRS(ra=[1.1, 2.1]*u.deg, dec=[-15.6, 17.1]*u.deg, distance=[11., 21.]*u.kpc)
assert repr(i2) == ('<ICRS Coordinate: (ra, dec) in deg\n'
' [{}]>').format('( 1.1, 2.1), ( 2.1, 3.1)'
if NUMPY_LT_1_14 else
'(1.1, 2.1), (2.1, 3.1)')
if NUMPY_LT_1_14:
assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n'
' [( 1.1, -15.6, 11.), ( 2.1, 17.1, 21.)]>')
else:
assert repr(i3) == ('<ICRS Coordinate: (ra, dec, distance) in (deg, deg, kpc)\n'
' [(1.1, -15.6, 11.), (2.1, 17.1, 21.)]>')
def test_frame_repr_vels():
from astropy.coordinates.builtin_frames import ICRS
i = ICRS(ra=1*u.deg, dec=2*u.deg,
pm_ra_cosdec=1*u.marcsec/u.yr, pm_dec=2*u.marcsec/u.yr)
# unit comes out as mas/yr because of the preferred units defined in the
# frame RepresentationMapping
assert repr(i) == ('<ICRS Coordinate: (ra, dec) in deg\n'
' ({0})\n'
' (pm_ra_cosdec, pm_dec) in mas / yr\n'
' ({0})>').format(' 1., 2.' if NUMPY_LT_1_14 else
'1., 2.')
def test_converting_units():
import re
from astropy.coordinates.baseframe import RepresentationMapping
from astropy.coordinates.builtin_frames import ICRS, FK5
# this is a regular expression that with split (see below) removes what's
# the decimal point to fix rounding problems
rexrepr = re.compile(r'(.*?=\d\.).*?( .*?=\d\.).*?( .*)')
# Use values that aren't subject to rounding down to X.9999...
i2 = ICRS(ra=2.*u.deg, dec=2.*u.deg)
i2_many = ICRS(ra=[2., 4.]*u.deg, dec=[2., -8.1]*u.deg)
# converting from FK5 to ICRS and back changes the *internal* representation,
# but it should still come out in the preferred form
i4 = i2.transform_to(FK5).transform_to(ICRS)
i4_many = i2_many.transform_to(FK5).transform_to(ICRS)
ri2 = ''.join(rexrepr.split(repr(i2)))
ri4 = ''.join(rexrepr.split(repr(i4)))
assert ri2 == ri4
assert i2.data.lon.unit != i4.data.lon.unit # Internal repr changed
ri2_many = ''.join(rexrepr.split(repr(i2_many)))
ri4_many = ''.join(rexrepr.split(repr(i4_many)))
assert ri2_many == ri4_many
assert i2_many.data.lon.unit != i4_many.data.lon.unit # Internal repr changed
# but that *shouldn't* hold if we turn off units for the representation
class FakeICRS(ICRS):
frame_specific_representation_info = {
'spherical': [RepresentationMapping('lon', 'ra', u.hourangle),
RepresentationMapping('lat', 'dec', None),
RepresentationMapping('distance', 'distance')] # should fall back to default of None unit
}
fi = FakeICRS(i4.data)
ri2 = ''.join(rexrepr.split(repr(i2)))
rfi = ''.join(rexrepr.split(repr(fi)))
rfi = re.sub('FakeICRS', 'ICRS', rfi) # Force frame name to match
assert ri2 != rfi
# the attributes should also get the right units
assert i2.dec.unit == i4.dec.unit
# unless no/explicitly given units
assert i2.dec.unit != fi.dec.unit
assert i2.ra.unit != fi.ra.unit
assert fi.ra.unit == u.hourangle
def test_representation_info():
from astropy.coordinates.baseframe import RepresentationMapping
from astropy.coordinates.builtin_frames import ICRS
class NewICRS1(ICRS):
frame_specific_representation_info = {
r.SphericalRepresentation: [
RepresentationMapping('lon', 'rara', u.hourangle),
RepresentationMapping('lat', 'decdec', u.degree),
RepresentationMapping('distance', 'distance', u.kpc)]
}
i1 = NewICRS1(rara=10*u.degree, decdec=-12*u.deg, distance=1000*u.pc,
pm_rara_cosdecdec=100*u.mas/u.yr,
pm_decdec=17*u.mas/u.yr,
radial_velocity=10*u.km/u.s)
assert allclose(i1.rara, 10*u.deg)
assert i1.rara.unit == u.hourangle
assert allclose(i1.decdec, -12*u.deg)
assert allclose(i1.distance, 1000*u.pc)
assert i1.distance.unit == u.kpc
assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr)
assert allclose(i1.pm_decdec, 17*u.mas/u.yr)
# this should auto-set the names of UnitSpherical:
i1.set_representation_cls(r.UnitSphericalRepresentation,
s=r.UnitSphericalCosLatDifferential)
assert allclose(i1.rara, 10*u.deg)
assert allclose(i1.decdec, -12*u.deg)
assert allclose(i1.pm_rara_cosdecdec, 100*u.mas/u.yr)
assert allclose(i1.pm_decdec, 17*u.mas/u.yr)
# For backwards compatibility, we also support the string name in the
# representation info dictionary:
class NewICRS2(ICRS):
frame_specific_representation_info = {
'spherical': [
RepresentationMapping('lon', 'ang1', u.hourangle),
RepresentationMapping('lat', 'ang2', u.degree),
RepresentationMapping('distance', 'howfar', u.kpc)]
}
i2 = NewICRS2(ang1=10*u.degree, ang2=-12*u.deg, howfar=1000*u.pc)
assert allclose(i2.ang1, 10*u.deg)
assert i2.ang1.unit == u.hourangle
assert allclose(i2.ang2, -12*u.deg)
assert allclose(i2.howfar, 1000*u.pc)
assert i2.howfar.unit == u.kpc
# Test that the differential kwargs get overridden
class NewICRS3(ICRS):
frame_specific_representation_info = {
r.SphericalCosLatDifferential: [
RepresentationMapping('d_lon_coslat', 'pm_ang1', u.hourangle/u.year),
RepresentationMapping('d_lat', 'pm_ang2'),
RepresentationMapping('d_distance', 'vlos', u.kpc/u.Myr)]
}
i3 = NewICRS3(lon=10*u.degree, lat=-12*u.deg, distance=1000*u.pc,
pm_ang1=1*u.mas/u.yr, pm_ang2=2*u.mas/u.yr,
vlos=100*u.km/u.s)
assert allclose(i3.pm_ang1, 1*u.mas/u.yr)
assert i3.pm_ang1.unit == u.hourangle/u.year
assert allclose(i3.pm_ang2, 2*u.mas/u.yr)
assert allclose(i3.vlos, 100*u.km/u.s)
assert i3.vlos.unit == u.kpc/u.Myr
def test_realizing():
from astropy.coordinates.builtin_frames import ICRS, FK5
from astropy.time import Time
rep = r.SphericalRepresentation(1*u.deg, 2*u.deg, 3*u.kpc)
i = ICRS()
i2 = i.realize_frame(rep)
assert not i.has_data
assert i2.has_data
f = FK5(equinox=Time('J2001'))
f2 = f.realize_frame(rep)
assert not f.has_data
assert f2.has_data
assert f2.equinox == f.equinox
assert f2.equinox != FK5.get_frame_attr_names()['equinox']
# Check that a nicer error message is returned:
with pytest.raises(TypeError) as excinfo:
f.realize_frame(f.representation_type)
assert ('Class passed as data instead of a representation' in
excinfo.value.args[0])
def test_replicating():
from astropy.coordinates.builtin_frames import ICRS, AltAz
from astropy.time import Time
i = ICRS(ra=[1]*u.deg, dec=[2]*u.deg)
icopy = i.replicate(copy=True)
irepl = i.replicate(copy=False)
i.data._lat[:] = 0*u.deg
assert np.all(i.data.lat == irepl.data.lat)
assert np.all(i.data.lat != icopy.data.lat)
iclone = i.replicate_without_data()
assert i.has_data
assert not iclone.has_data
aa = AltAz(alt=1*u.deg, az=2*u.deg, obstime=Time('J2000'))
aaclone = aa.replicate_without_data(obstime=Time('J2001'))
assert not aaclone.has_data
assert aa.obstime != aaclone.obstime
assert aa.pressure == aaclone.pressure
assert aa.obswl == aaclone.obswl
def test_getitem():
from astropy.coordinates.builtin_frames import ICRS
rep = r.SphericalRepresentation(
[1, 2, 3]*u.deg, [4, 5, 6]*u.deg, [7, 8, 9]*u.kpc)
i = ICRS(rep)
assert len(i.ra) == 3
iidx = i[1:]
assert len(iidx.ra) == 2
iidx2 = i[0]
assert iidx2.ra.isscalar
def test_transform():
"""
This test just makes sure the transform architecture works, but does *not*
actually test all the builtin transforms themselves are accurate
"""
from astropy.coordinates.builtin_frames import ICRS, FK4, FK5, Galactic
from astropy.time import Time
i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg)
f = i.transform_to(FK5)
i2 = f.transform_to(ICRS)
assert i2.data.__class__ == r.UnitSphericalRepresentation
assert_allclose(i.ra, i2.ra)
assert_allclose(i.dec, i2.dec)
i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc)
f = i.transform_to(FK5)
i2 = f.transform_to(ICRS)
assert i2.data.__class__ != r.UnitSphericalRepresentation
f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001'))
f4 = f.transform_to(FK4)
f4_2 = f.transform_to(FK4(equinox=f.equinox))
# make sure attributes are copied over correctly
assert f4.equinox == FK4.get_frame_attr_names()['equinox']
assert f4_2.equinox == f.equinox
# make sure self-transforms also work
i = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg)
i2 = i.transform_to(ICRS)
assert_allclose(i.ra, i2.ra)
assert_allclose(i.dec, i2.dec)
f = FK5(ra=1*u.deg, dec=2*u.deg, equinox=Time('J2001'))
f2 = f.transform_to(FK5) # default equinox, so should be *different*
assert f2.equinox == FK5().equinox
with pytest.raises(AssertionError):
assert_allclose(f.ra, f2.ra)
with pytest.raises(AssertionError):
assert_allclose(f.dec, f2.dec)
# finally, check Galactic round-tripping
i1 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg)
i2 = i1.transform_to(Galactic).transform_to(ICRS)
assert_allclose(i1.ra, i2.ra)
assert_allclose(i1.dec, i2.dec)
def test_transform_to_nonscalar_nodata_frame():
# https://github.com/astropy/astropy/pull/5254#issuecomment-241592353
from astropy.coordinates.builtin_frames import ICRS, FK5
from astropy.time import Time
times = Time('2016-08-23') + np.linspace(0, 10, 12)*u.day
coo1 = ICRS(ra=[[0.], [10.], [20.]]*u.deg,
dec=[[-30.], [30.], [60.]]*u.deg)
coo2 = coo1.transform_to(FK5(equinox=times))
assert coo2.shape == (3, 12)
def test_sep():
from astropy.coordinates.builtin_frames import ICRS
i1 = ICRS(ra=0*u.deg, dec=1*u.deg)
i2 = ICRS(ra=0*u.deg, dec=2*u.deg)
sep = i1.separation(i2)
assert sep.deg == 1
i3 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc)
i4 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[4, 5]*u.kpc)
sep3d = i3.separation_3d(i4)
assert_allclose(sep3d.to(u.kpc), np.array([1, 1])*u.kpc)
# check that it works even with velocities
i5 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[5, 6]*u.kpc,
pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr,
radial_velocity=[5, 6]*u.km/u.s)
i6 = ICRS(ra=[1, 2]*u.deg, dec=[3, 4]*u.deg, distance=[7, 8]*u.kpc,
pm_ra_cosdec=[1, 2]*u.mas/u.yr, pm_dec=[3, 4]*u.mas/u.yr,
radial_velocity=[5, 6]*u.km/u.s)
sep3d = i5.separation_3d(i6)
assert_allclose(sep3d.to(u.kpc), np.array([2, 2])*u.kpc)
# 3d separations of dimensionless distances should still work
i7 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=3*u.one)
i8 = ICRS(ra=1*u.deg, dec=2*u.deg, distance=4*u.one)
sep3d = i7.separation_3d(i8)
assert_allclose(sep3d, 1*u.one)
# but should fail with non-dimensionless
with pytest.raises(ValueError):
i7.separation_3d(i3)
def test_time_inputs():
"""
Test validation and conversion of inputs for equinox and obstime attributes.
"""
from astropy.time import Time
from astropy.coordinates.builtin_frames import FK4
c = FK4(1 * u.deg, 2 * u.deg, equinox='J2001.5', obstime='2000-01-01 12:00:00')
assert c.equinox == Time('J2001.5')
assert c.obstime == Time('2000-01-01 12:00:00')
with pytest.raises(ValueError) as err:
c = FK4(1 * u.deg, 2 * u.deg, equinox=1.5)
assert 'Invalid time input' in str(err.value)
with pytest.raises(ValueError) as err:
c = FK4(1 * u.deg, 2 * u.deg, obstime='hello')
assert 'Invalid time input' in str(err.value)
# A vector time should work if the shapes match, but we don't automatically
# broadcast the basic data (just like time).
FK4([1, 2] * u.deg, [2, 3] * u.deg, obstime=['J2000', 'J2001'])
with pytest.raises(ValueError) as err:
FK4(1 * u.deg, 2 * u.deg, obstime=['J2000', 'J2001'])
assert 'shape' in str(err.value)
def test_is_frame_attr_default():
"""
Check that the `is_frame_attr_default` machinery works as expected
"""
from astropy.time import Time
from astropy.coordinates.builtin_frames import FK5
c1 = FK5(ra=1*u.deg, dec=1*u.deg)
c2 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=FK5.get_frame_attr_names()['equinox'])
c3 = FK5(ra=1*u.deg, dec=1*u.deg, equinox=Time('J2001.5'))
assert c1.equinox == c2.equinox
assert c1.equinox != c3.equinox
assert c1.is_frame_attr_default('equinox')
assert not c2.is_frame_attr_default('equinox')
assert not c3.is_frame_attr_default('equinox')
c4 = c1.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg))
c5 = c2.realize_frame(r.UnitSphericalRepresentation(3*u.deg, 4*u.deg))
assert c4.is_frame_attr_default('equinox')
assert not c5.is_frame_attr_default('equinox')
def test_altaz_attributes():
from astropy.time import Time
from astropy.coordinates import EarthLocation, AltAz
aa = AltAz(1*u.deg, 2*u.deg)
assert aa.obstime is None
assert aa.location is None
aa2 = AltAz(1*u.deg, 2*u.deg, obstime='J2000')
assert aa2.obstime == Time('J2000')
aa3 = AltAz(1*u.deg, 2*u.deg, location=EarthLocation(0*u.deg, 0*u.deg, 0*u.m))
assert isinstance(aa3.location, EarthLocation)
def test_representation():
"""
Test the getter and setter properties for `representation`
"""
from astropy.coordinates.builtin_frames import ICRS
# Create the frame object.
icrs = ICRS(ra=1*u.deg, dec=1*u.deg)
data = icrs.data
# Create some representation objects.
icrs_cart = icrs.cartesian
icrs_spher = icrs.spherical
icrs_cyl = icrs.cylindrical
# Testing when `_representation` set to `CartesianRepresentation`.
icrs.representation_type = r.CartesianRepresentation
assert icrs.representation_type == r.CartesianRepresentation
assert icrs_cart.x == icrs.x
assert icrs_cart.y == icrs.y
assert icrs_cart.z == icrs.z
assert icrs.data == data
# Testing that an ICRS object in CartesianRepresentation must not have spherical attributes.
for attr in ('ra', 'dec', 'distance'):
with pytest.raises(AttributeError) as err:
getattr(icrs, attr)
assert 'object has no attribute' in str(err.value)
# Testing when `_representation` set to `CylindricalRepresentation`.
icrs.representation_type = r.CylindricalRepresentation
assert icrs.representation_type == r.CylindricalRepresentation
assert icrs.data == data
# Testing setter input using text argument for spherical.
icrs.representation_type = 'spherical'
assert icrs.representation_type is r.SphericalRepresentation
assert icrs_spher.lat == icrs.dec
assert icrs_spher.lon == icrs.ra
assert icrs_spher.distance == icrs.distance
assert icrs.data == data
# Testing that an ICRS object in SphericalRepresentation must not have cartesian attributes.
for attr in ('x', 'y', 'z'):
with pytest.raises(AttributeError) as err:
getattr(icrs, attr)
assert 'object has no attribute' in str(err.value)
# Testing setter input using text argument for cylindrical.
icrs.representation_type = 'cylindrical'
assert icrs.representation_type is r.CylindricalRepresentation
assert icrs_cyl.rho == icrs.rho
assert icrs_cyl.phi == icrs.phi
assert icrs_cyl.z == icrs.z
assert icrs.data == data
# Testing that an ICRS object in CylindricalRepresentation must not have spherical attributes.
for attr in ('ra', 'dec', 'distance'):
with pytest.raises(AttributeError) as err:
getattr(icrs, attr)
assert 'object has no attribute' in str(err.value)
with pytest.raises(ValueError) as err:
icrs.representation_type = 'WRONG'
assert 'but must be a BaseRepresentation class' in str(err.value)
with pytest.raises(ValueError) as err:
icrs.representation_type = ICRS
assert 'but must be a BaseRepresentation class' in str(err.value)
def test_represent_as():
from astropy.coordinates.builtin_frames import ICRS
icrs = ICRS(ra=1*u.deg, dec=1*u.deg)
cart1 = icrs.represent_as('cartesian')
cart2 = icrs.represent_as(r.CartesianRepresentation)
cart1.x == cart2.x
cart1.y == cart2.y
cart1.z == cart2.z
# now try with velocities
icrs = ICRS(ra=0*u.deg, dec=0*u.deg, distance=10*u.kpc,
pm_ra_cosdec=0*u.mas/u.yr, pm_dec=0*u.mas/u.yr,
radial_velocity=1*u.km/u.s)
# single string
rep2 = icrs.represent_as('cylindrical')
assert isinstance(rep2, r.CylindricalRepresentation)
assert isinstance(rep2.differentials['s'], r.CylindricalDifferential)
# single class with positional in_frame_units, verify that warning raised
with catch_warnings() as w:
icrs.represent_as(r.CylindricalRepresentation, False)
assert len(w) == 1
assert w[0].category == AstropyWarning
assert 'argument position' in str(w[0].message)
# TODO: this should probably fail in the future once we figure out a better
# workaround for dealing with UnitSphericalRepresentation's with
# RadialDifferential's
# two classes
# rep2 = icrs.represent_as(r.CartesianRepresentation,
# r.SphericalCosLatDifferential)
# assert isinstance(rep2, r.CartesianRepresentation)
# assert isinstance(rep2.differentials['s'], r.SphericalCosLatDifferential)
with pytest.raises(ValueError):
icrs.represent_as('odaigahara')
def test_shorthand_representations():
from astropy.coordinates.builtin_frames import ICRS
rep = r.CartesianRepresentation([1, 2, 3]*u.pc)
dif = r.CartesianDifferential([1, 2, 3]*u.km/u.s)
rep = rep.with_differentials(dif)
icrs = ICRS(rep)
cyl = icrs.cylindrical
assert isinstance(cyl, r.CylindricalRepresentation)
assert isinstance(cyl.differentials['s'], r.CylindricalDifferential)
sph = icrs.spherical
assert isinstance(sph, r.SphericalRepresentation)
assert isinstance(sph.differentials['s'], r.SphericalDifferential)
sph = icrs.sphericalcoslat
assert isinstance(sph, r.SphericalRepresentation)
assert isinstance(sph.differentials['s'], r.SphericalCosLatDifferential)
def test_dynamic_attrs():
from astropy.coordinates.builtin_frames import ICRS
c = ICRS(1*u.deg, 2*u.deg)
assert 'ra' in dir(c)
assert 'dec' in dir(c)
with pytest.raises(AttributeError) as err:
c.blahblah
assert "object has no attribute 'blahblah'" in str(err.value)
with pytest.raises(AttributeError) as err:
c.ra = 1
assert "Cannot set any frame attribute" in str(err.value)
c.blahblah = 1
assert c.blahblah == 1
def test_nodata_error():
from astropy.coordinates.builtin_frames import ICRS
i = ICRS()
with pytest.raises(ValueError) as excinfo:
i.data
assert 'does not have associated data' in str(excinfo.value)
def test_len0_data():
from astropy.coordinates.builtin_frames import ICRS
i = ICRS([]*u.deg, []*u.deg)
assert i.has_data
repr(i)
def test_quantity_attributes():
from astropy.coordinates.builtin_frames import GCRS
# make sure we can create a GCRS frame with valid inputs
GCRS(obstime='J2002', obsgeoloc=[1, 2, 3]*u.km, obsgeovel=[4, 5, 6]*u.km/u.s)
# make sure it fails for invalid lovs or vels
with pytest.raises(TypeError):
GCRS(obsgeoloc=[1, 2, 3]) # no unit
with pytest.raises(u.UnitsError):
GCRS(obsgeoloc=[1, 2, 3]*u.km/u.s) # incorrect unit
with pytest.raises(ValueError):
GCRS(obsgeoloc=[1, 3]*u.km) # incorrect shape
@pytest.mark.remote_data
def test_eloc_attributes():
from astropy.coordinates import AltAz, ITRS, GCRS, EarthLocation
el = EarthLocation(lon=12.3*u.deg, lat=45.6*u.deg, height=1*u.km)
it = ITRS(r.SphericalRepresentation(lon=12.3*u.deg, lat=45.6*u.deg, distance=1*u.km))
gc = GCRS(ra=12.3*u.deg, dec=45.6*u.deg, distance=6375*u.km)
el1 = AltAz(location=el).location
assert isinstance(el1, EarthLocation)
# these should match *exactly* because the EarthLocation
assert el1.lat == el.lat
assert el1.lon == el.lon
assert el1.height == el.height
el2 = AltAz(location=it).location
assert isinstance(el2, EarthLocation)
# these should *not* match because giving something in Spherical ITRS is
# *not* the same as giving it as an EarthLocation: EarthLocation is on an
# elliptical geoid. So the longitude should match (because flattening is
# only along the z-axis), but latitude should not. Also, height is relative
# to the *surface* in EarthLocation, but the ITRS distance is relative to
# the center of the Earth
assert not allclose(el2.lat, it.spherical.lat)
assert allclose(el2.lon, it.spherical.lon)
assert el2.height < -6000*u.km
el3 = AltAz(location=gc).location
# GCRS inputs implicitly get transformed to ITRS and then onto
# EarthLocation's elliptical geoid. So both lat and lon shouldn't match
assert isinstance(el3, EarthLocation)
assert not allclose(el3.lat, gc.dec)
assert not allclose(el3.lon, gc.ra)
assert np.abs(el3.height) < 500*u.km
def test_equivalent_frames():
from astropy.coordinates import SkyCoord
from astropy.coordinates.builtin_frames import ICRS, FK4, FK5, AltAz
i = ICRS()
i2 = ICRS(1*u.deg, 2*u.deg)
assert i.is_equivalent_frame(i)
assert i.is_equivalent_frame(i2)
with pytest.raises(TypeError):
assert i.is_equivalent_frame(10)
with pytest.raises(TypeError):
assert i2.is_equivalent_frame(SkyCoord(i2))
f0 = FK5() # this J2000 is TT
f1 = FK5(equinox='J2000')
f2 = FK5(1*u.deg, 2*u.deg, equinox='J2000')
f3 = FK5(equinox='J2010')
f4 = FK4(equinox='J2010')
assert f1.is_equivalent_frame(f1)
assert not i.is_equivalent_frame(f1)
assert f0.is_equivalent_frame(f1)
assert f1.is_equivalent_frame(f2)
assert not f1.is_equivalent_frame(f3)
assert not f3.is_equivalent_frame(f4)
aa1 = AltAz()
aa2 = AltAz(obstime='J2010')
assert aa2.is_equivalent_frame(aa2)
assert not aa1.is_equivalent_frame(i)
assert not aa1.is_equivalent_frame(aa2)
def test_representation_subclass():
# Regression test for #3354
from astropy.coordinates.builtin_frames import FK5
# Normally when instantiating a frame without a distance the frame will try
# and use UnitSphericalRepresentation internally instead of
# SphericalRepresentation.
frame = FK5(representation_type=r.SphericalRepresentation, ra=32 * u.deg, dec=20 * u.deg)
assert type(frame._data) == r.UnitSphericalRepresentation
assert frame.representation_type == r.SphericalRepresentation
# If using a SphericalRepresentation class this used to not work, so we
# test here that this is now fixed.
class NewSphericalRepresentation(r.SphericalRepresentation):
attr_classes = r.SphericalRepresentation.attr_classes
frame = FK5(representation_type=NewSphericalRepresentation, lon=32 * u.deg, lat=20 * u.deg)
assert type(frame._data) == r.UnitSphericalRepresentation
assert frame.representation_type == NewSphericalRepresentation
# A similar issue then happened in __repr__ with subclasses of
# SphericalRepresentation.
assert repr(frame) == ("<FK5 Coordinate (equinox=J2000.000): (lon, lat) in deg\n"
" ({})>").format(' 32., 20.' if NUMPY_LT_1_14
else '32., 20.')
# A more subtle issue is when specifying a custom
# UnitSphericalRepresentation subclass for the data and
# SphericalRepresentation or a subclass for the representation.
class NewUnitSphericalRepresentation(r.UnitSphericalRepresentation):
attr_classes = r.UnitSphericalRepresentation.attr_classes
def __repr__(self):
return "<NewUnitSphericalRepresentation: spam spam spam>"
frame = FK5(NewUnitSphericalRepresentation(lon=32 * u.deg, lat=20 * u.deg),
representation_type=NewSphericalRepresentation)
assert repr(frame) == "<FK5 Coordinate (equinox=J2000.000): spam spam spam>"
def test_getitem_representation():
"""
Make sure current representation survives __getitem__ even if different
from data representation.
"""
from astropy.coordinates.builtin_frames import ICRS
c = ICRS([1, 1] * u.deg, [2, 2] * u.deg)
c.representation_type = 'cartesian'
assert c[0].representation_type is r.CartesianRepresentation
def test_component_error_useful():
"""
Check that a data-less frame gives useful error messages about not having
data when the attributes asked for are possible coordinate components
"""
from astropy.coordinates.builtin_frames import ICRS
i = ICRS()
with pytest.raises(ValueError) as excinfo:
i.ra
assert 'does not have associated data' in str(excinfo.value)
with pytest.raises(AttributeError) as excinfo1:
i.foobar
with pytest.raises(AttributeError) as excinfo2:
i.lon # lon is *not* the component name despite being the underlying representation's name
assert "object has no attribute 'foobar'" in str(excinfo1.value)
assert "object has no attribute 'lon'" in str(excinfo2.value)
def test_cache_clear():
from astropy.coordinates.builtin_frames import ICRS
i = ICRS(1*u.deg, 2*u.deg)
# Add an in frame units version of the rep to the cache.
repr(i)
assert len(i.cache['representation']) == 2
i.cache.clear()
assert len(i.cache['representation']) == 0
def test_inplace_array():
from astropy.coordinates.builtin_frames import ICRS
i = ICRS([[1, 2], [3, 4]]*u.deg, [[10, 20], [30, 40]]*u.deg)
# Add an in frame units version of the rep to the cache.
repr(i)
# Check that repr() has added a rep to the cache
assert len(i.cache['representation']) == 2
# Modify the data
i.data.lon[:, 0] = [100, 200]*u.deg
# Clear the cache
i.cache.clear()
# This will use a second (potentially cached rep)
assert_allclose(i.ra, [[100, 2], [200, 4]]*u.deg)
assert_allclose(i.dec, [[10, 20], [30, 40]]*u.deg)
def test_inplace_change():
from astropy.coordinates.builtin_frames import ICRS
i = ICRS(1*u.deg, 2*u.deg)
# Add an in frame units version of the rep to the cache.
repr(i)
# Check that repr() has added a rep to the cache
assert len(i.cache['representation']) == 2
# Modify the data
i.data.lon[()] = 10*u.deg
# Clear the cache
i.cache.clear()
# This will use a second (potentially cached rep)
assert i.ra == 10 * u.deg
assert i.dec == 2 * u.deg
def test_representation_with_multiple_differentials():
from astropy.coordinates.builtin_frames import ICRS
dif1 = r.CartesianDifferential([1, 2, 3]*u.km/u.s)
dif2 = r.CartesianDifferential([1, 2, 3]*u.km/u.s**2)
rep = r.CartesianRepresentation([1, 2, 3]*u.pc,
differentials={'s': dif1, 's2': dif2})
# check warning is raised for a scalar
with pytest.raises(ValueError):
ICRS(rep)
def test_representation_arg_backwards_compatibility():
# TODO: this test can be removed when the `representation` argument is
# removed from the BaseCoordinateFrame initializer.
from astropy.coordinates.builtin_frames import ICRS
c1 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc,
representation_type=r.CartesianRepresentation)
c2 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc,
representation_type=r.CartesianRepresentation)
c3 = ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc,
representation_type='cartesian')
assert c1.x == c2.x
assert c1.y == c2.y
assert c1.z == c2.z
assert c1.x == c3.x
assert c1.y == c3.y
assert c1.z == c3.z
assert c1.representation_type == c1.representation_type
with pytest.raises(ValueError):
ICRS(x=1*u.pc, y=2*u.pc, z=3*u.pc,
representation_type='cartesian',
representation='cartesian')
def test_missing_component_error_names():
"""
This test checks that the component names are frame component names, not
representation or differential names, when referenced in an exception raised
when not passing in enough data. For example:
ICRS(ra=10*u.deg)
should state:
TypeError: __init__() missing 1 required positional argument: 'dec'
"""
from astropy.coordinates.builtin_frames import ICRS
with pytest.raises(TypeError) as e:
ICRS(ra=150 * u.deg)
assert "missing 1 required positional argument: 'dec'" in str(e.value)
with pytest.raises(TypeError) as e:
ICRS(ra=150*u.deg, dec=-11*u.deg,
pm_ra=100*u.mas/u.yr, pm_dec=10*u.mas/u.yr)
assert "pm_ra_cosdec" in str(e.value)
def test_non_spherical_representation_unit_creation(unitphysics):
from astropy.coordinates.builtin_frames import ICRS
class PhysicsICRS(ICRS):
default_representation = r.PhysicsSphericalRepresentation
pic = PhysicsICRS(phi=1*u.deg, theta=25*u.deg, r=1*u.kpc)
assert isinstance(pic.data, r.PhysicsSphericalRepresentation)
picu = PhysicsICRS(phi=1*u.deg, theta=25*u.deg)
assert isinstance(picu.data, unitphysics)
def test_attribute_repr():
from astropy.coordinates.attributes import Attribute
from astropy.coordinates.baseframe import BaseCoordinateFrame
class Spam:
def _astropy_repr_in_frame(self):
return "TEST REPR"
class TestFrame(BaseCoordinateFrame):
attrtest = Attribute(default=Spam())
assert "TEST REPR" in repr(TestFrame())
def test_component_names_repr():
from astropy.coordinates.baseframe import BaseCoordinateFrame, RepresentationMapping
# Frame class with new component names that includes a name swap
class NameChangeFrame(BaseCoordinateFrame):
default_representation = r.PhysicsSphericalRepresentation
frame_specific_representation_info = {
r.PhysicsSphericalRepresentation: [
RepresentationMapping('phi', 'theta', u.deg),
RepresentationMapping('theta', 'phi', u.arcsec),
RepresentationMapping('r', 'JUSTONCE', u.AU)]
}
frame = NameChangeFrame(0*u.deg, 0*u.arcsec, 0*u.AU)
# Check for the new names in the Frame repr
assert "(theta, phi, JUSTONCE)" in repr(frame)
# Check that the letter "r" has not been replaced more than once in the Frame repr
assert repr(frame).count("JUSTONCE") == 1
|
Bennson/Projects
|
refs/heads/master
|
Project Euler/008 - Largest Product in a series/Largest_Product_in_a_series.py
|
1
|
string = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
place = 0
product = 0 #Produkt der 13 Zahlen
maxproduct = 0 #Größtes Produkt
string = list(string) #Wandelt String in Array um
string = [int(i) for i in string]
while(place + 12 < 1000):
product =string[place]*string[place+1]*string[place+2]*string[place+3]*string[place+4]*string[place+5]*string[place+6]*string[place+7]*string[place+8]*string[place+9]*string[place+10]*string[place+11]*string[place+12]
if(product>maxproduct):
maxproduct = product
place = place + 1
else:
place = place + 1
print(maxproduct)
|
edx/edx-enterprise
|
refs/heads/master
|
integrated_channels/degreed/migrations/0008_auto_20191001_0742.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-10-01 07:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('degreed', '0007_auto_20190925_0730'),
]
operations = [
migrations.AddField(
model_name='degreedenterprisecustomerconfiguration',
name='catalogs_to_transmit',
field=models.TextField(blank=True, help_text='A comma-separated list of catalog UUIDs to transmit.', null=True),
),
migrations.AddField(
model_name='historicaldegreedenterprisecustomerconfiguration',
name='catalogs_to_transmit',
field=models.TextField(blank=True, help_text='A comma-separated list of catalog UUIDs to transmit.', null=True),
),
]
|
lllcho/CAPTCHA-breaking
|
refs/heads/master
|
keras-master/keras/layers/normalization.py
|
22
|
from ..layers.core import Layer
from ..utils.theano_utils import shared_zeros
from .. import initializations
import theano.tensor as T
class BatchNormalization(Layer):
'''
Reference:
Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift
http://arxiv.org/pdf/1502.03167v3.pdf
mode: 0 -> featurewise normalization
1 -> samplewise normalization (may sometimes outperform featurewise mode)
momentum: momentum term in the computation of a running estimate of the mean and std of the data
'''
def __init__(self, input_shape, epsilon=1e-6, mode=0, momentum=0.9, weights=None):
super(BatchNormalization,self).__init__()
self.init = initializations.get("uniform")
self.input_shape = input_shape
self.epsilon = epsilon
self.mode = mode
self.momentum = momentum
self.gamma = self.init((self.input_shape))
self.beta = shared_zeros(self.input_shape)
self.running_mean = None
self.running_std = None
self.params = [self.gamma, self.beta]
if weights is not None:
self.set_weights(weights)
def get_output(self, train):
X = self.get_input(train)
if self.mode == 0:
if train:
m = X.mean(axis=0)
# manual computation of std to prevent NaNs
std = T.mean((X-m)**2 + self.epsilon, axis=0) ** 0.5
X_normed = (X - m) / (std + self.epsilon)
if self.running_mean is None:
self.running_mean = m
self.running_std = std
else:
self.running_mean *= self.momentum
self.running_mean += (1-self.momentum) * m
self.running_std *= self.momentum
self.running_std += (1-self.momentum) * std
else:
X_normed = (X - self.running_mean) / (self.running_std + self.epsilon)
elif self.mode == 1:
m = X.mean(axis=-1, keepdims=True)
std = X.std(axis=-1, keepdims=True)
X_normed = (X - m) / (std + self.epsilon)
out = self.gamma * X_normed + self.beta
return out
def get_config(self):
return {"name":self.__class__.__name__,
"input_shape":self.input_shape,
"epsilon":self.epsilon,
"mode":self.mode}
class LRN2D(Layer):
"""
This code is adapted from pylearn2.
License at: https://github.com/lisa-lab/pylearn2/blob/master/LICENSE.txt
"""
def __init__(self, alpha=1e-4, k=2, beta=0.75, n=5):
if n % 2 == 0:
raise NotImplementedError("LRN2D only works with odd n. n provided: " + str(n))
super(LRN2D, self).__init__()
self.alpha = alpha
self.k = k
self.beta = beta
self.n = n
def get_output(self, train):
X = self.get_input(train)
b, ch, r, c = X.shape
half_n = self.n // 2
input_sqr = T.sqr(X)
extra_channels = T.alloc(0., b, ch + 2*half_n, r, c)
input_sqr = T.set_subtensor(extra_channels[:, half_n:half_n+ch, :, :], input_sqr)
scale = self.k
for i in range(self.n):
scale += self.alpha * input_sqr[:, i:i+ch, :, :]
scale = scale ** self.beta
return X / scale
def get_config(self):
return {"name":self.__class__.__name__,
"alpha":self.alpha,
"k":self.k,
"beta":self.beta,
"n": self.n}
|
MER-GROUP/intellij-community
|
refs/heads/master
|
python/testData/intentions/removeLeadingU_after.py
|
83
|
a = "text"
|
evanson/yowsup
|
refs/heads/master
|
yowsup/layers/protocol_presence/__init__.py
|
70
|
from .layer import YowPresenceProtocolLayer
|
jeroenj/CouchPotatoServer
|
refs/heads/master
|
libs/pyasn1/__init__.py
|
193
|
import sys
# http://www.python.org/dev/peps/pep-0396/
__version__ = '0.1.7'
if sys.version_info[:2] < (2, 4):
raise RuntimeError('PyASN1 requires Python 2.4 or later')
|
makinacorpus/django
|
refs/heads/master
|
tests/fixtures_model_package/__init__.py
|
45382
| |
iphoting/healthchecks
|
refs/heads/heroku
|
hc/api/migrations/0071_check_manual_resume.py
|
2
|
# Generated by Django 3.0.4 on 2020-06-02 07:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0070_auto_20200411_1310'),
]
operations = [
migrations.AddField(
model_name='check',
name='manual_resume',
field=models.NullBooleanField(default=False),
),
]
|
evildmp/django-cms
|
refs/heads/master
|
cms/test_utils/project/sampleapp/migrations/0001_initial.py
|
66
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import cms.models.fields
class Migration(migrations.Migration):
dependencies = [
('cms', '0002_auto_20140816_1918'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', auto_created=True, serialize=False)),
('name', models.CharField(max_length=20)),
('lft', models.PositiveIntegerField(db_index=True, editable=False)),
('rght', models.PositiveIntegerField(db_index=True, editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(db_index=True, editable=False)),
('description', cms.models.fields.PlaceholderField(null=True, to='cms.Placeholder', slotname='category_description', editable=False)),
('parent', models.ForeignKey(null=True, to='sampleapp.Category', blank=True)),
],
options={
'verbose_name_plural': 'categories',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Picture',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', auto_created=True, serialize=False)),
('image', models.ImageField(upload_to='pictures')),
('category', models.ForeignKey(to='sampleapp.Category')),
],
options={
},
bases=(models.Model,),
),
]
|
OAPDF/oapdftools
|
refs/heads/master
|
oapdf/jrecord.py
|
2
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup
######################## Part3: Journal Record #############################
############### General Journal Record class ###############################
class Jrecord(object):
'''Basic journal record information'''
def __init__(self):
self.journal=""
self.title=""
self.authors=[]
self.year=""
self.volume=""
self.issue=""
self.pages=""
self.doi=""
self.issn=""
self.publisher=""
self.urls=[]
self.pdf=""
self.abstract=""
self.note=""
def __getattr__(self, name):
"""Locate the function with the dotted attribute."""
def traverse(parent, child):
if instance(parent, str):
parent = getattr(self, parent)
return getattr(parent, child)
return reduce(traverse, name.split('.'))
def __getitem__(self,name):
'''Aact as dict'''
return getattr(self,name)
def reset(self):
self.journal=""
self.title=""
del self.authors[:]
self.year=""
self.volume=""
self.issue=""
self.pages=""
self.doi=""
self.issn=""
self.publisher=""
del self.urls[:]
self.pdf=""
self.abstract=""
self.note=""
def __repr__(self):
return (self.doi+": "+self.title+" | "+self.journal+" | "+self.year+"; "+self.volume+"("+self.issue+")"+", "+self.pages).encode('utf-8')
def __str__(self):
return self.__repr__()
def writexml(self):
pass
def writeenw(self):
pass
def writebib(self):
pass
def writeris(self):
pass
def parseNoteFirst(self,text=None,infile=None):
'''Parse NoteFirst record (xml format), return self'''
if isinstance(text,basestring):
pass
elif isinstance(infile,basestring):
f=open(infile);
text=f.read()
f.close()
elif isinstance(infile,file):
text=infile.read()
else: #Do nothing
return None
soup=BeautifulSoup(text,"html.parser")
self.title=soup.primarytitle.text
doi=soup.doi.text
self.doi=doi[doi.find("10."):]
self.journal=soup.media.info.text
self.year=soup.year.text
self.volume=soup.volume.text
self.issue=soup.issue.text
self.pages=soup.pagescope.text
authors=soup.findChildren('fullname')
self.authors=[ author.info.text for author in authors]
#self.issn=""
return self
def parseenw(self,text=None,infile=None):
'''Parse the endnote enw file, return self'''
lines=None
# Use basestring for both str and unicode!
if isinstance(text,basestring):
lines=text.splitlines()
elif isinstance(text,list):
lines=text
elif isinstance(infile,basestring):
lines=open(infile);
elif isinstance(infile,file):
lines=infile
else: #Do nothing
return None
for line in lines:
if (len(line)>1):
item=line[1]
if item=="T":
self.title=line[3:].strip()
elif item=="D":
self.year=line[3:].strip()
elif item=="P":
self.pages=line[3:].strip()
elif item=="J":
self.journal=line[3:].strip()
elif item=="V":
self.volume=line[3:].strip()
elif item=="N":
self.issue=line[3:].strip()
elif item=="A":
self.authors.append(line[3:].strip())
if isinstance(infile,basestring):
lines.close()
return self
|
dceoy/fract
|
refs/heads/master
|
fract/util/kalmanfilter.py
|
1
|
#!/usr/bin/env python
import logging
import os
import numpy as np
import pandas as pd
from scipy.optimize import minimize_scalar
class KalmanFilter(object):
def __init__(self, x0=0, v0=1e-8, q=1e-8, r=1e-8, keep_history=False):
self.x = np.array([x0]) # estimate of x
self.v = np.array([v0]) # error estimate
self.q = q # process variance
self.r = r # measurement variance
self.y = np.array([np.nan])
self.__keep_history = keep_history
def fit(self, y, x0=None, v0=None, q=None, r=None):
x0_ = x0 or self.x[-1]
v0_ = v0 or self.v[-1]
q_ = q or self.q
r_ = r or self.r
len_y = len(y)
new_x = np.empty(len_y)
new_v = np.empty(len_y)
for i, y_n in enumerate(y):
x_n_1 = (new_x[i - 1] if i else x0_)
v_n_1 = (new_v[i - 1] if i else v0_) + q_
k = v_n_1 / (v_n_1 + r_)
new_x[i] = x_n_1 + k * (y_n - x_n_1)
new_v[i] = (1 - k) * v_n_1
if self.__keep_history:
self.x = np.append(self.x, new_x)
self.v = np.append(self.v, new_v)
self.y = np.append(self.y, y)
else:
self.x = np.array([new_x[-1]])
self.v = np.array([new_v[-1]])
return pd.DataFrame(
{'y': y, 'x': new_x, 'v': new_v},
index=(y.index if hasattr(y, 'index') else range(len_y))
)
class KalmanFilterOptimizer(object):
def __init__(self, y, x0=0, v0=1e-8, pmv_ratio=1, method='Golden'):
self.__logger = logging.getLogger(__name__)
self.y = y
self.x0 = x0
self.v0 = v0
self.__pmv_ratio = pmv_ratio # process / measurement variance ratio
self.__method = method # Brent | Bounded | Golden
def optimize(self):
res = minimize_scalar(
fun=self._loss, args=(self.y, self.x0, self.v0, self.__pmv_ratio),
method=self.__method
)
self.__logger.debug(f'{os.linesep}{res}')
r = np.exp(res.x)
self.__logger.debug(f'measurement variance:\t{r}')
q = r * self.__pmv_ratio
self.__logger.debug(f'process variance:\t{q}')
return q, r
@staticmethod
def _loss(a, y, x0, v0, pmv_ratio=1):
r = np.exp(a)
return KalmanFilter(
x0=x0, v0=v0, q=(r * pmv_ratio), r=r
).fit(y=y).pipe(
lambda d: np.sum(
np.log(d['v'] + r) + np.square(d['y'] - d['x']) / (d['v'] + r)
)
)
|
Alisa-lisa/FinReporter
|
refs/heads/master
|
core/visualizing/OnlinePlotting.py
|
1
|
""" Basically small util to visualize the graphs """
from core.utils.Aggregators import Approach, Aggregators, PlotTypes
from core.db_style.basic_functions import PostgresDBUnit
from core.pandas_style.basic_functions import PandasUtil
import plotly
from typing import List, Tuple
import plotly.graph_objs as go
from core.utils.formatters import date_from_string, get_months, increment_month
from core.utils.validation import is_time_frame_valid
# TODO: add functionality for aggregated stats except balance (not only for periods)
def collect_date_intervals(start_date: str, end_date: str) -> dict:
periods = {}
start = date_from_string(start_date)
end = date_from_string(end_date)
# collect pairs year/month from start till end
inner_interval = start
while not inner_interval > end:
key = "{}-{}".format(inner_interval.year, inner_interval.month)
periods[key] = inner_interval
inner_interval = increment_month(inner_interval.isoformat())
return periods
class OnlineVisualizer:
def __init__(self, approach, file_path=None):
""" connects to a data source """
if approach == Approach.DB:
self.data_source = PostgresDBUnit()
elif approach == Approach.DF:
assert file_path is not None
self.data_source = PandasUtil(file_path=file_path)
def _prepare_cost_structure(self, start_date:str = None, end_date:str = None,
exclude_category: List[str] = None) -> dict:
"""
Helper function to unite single and several subplots
:param start_date: str start date
:param end_date: str end date
:param exclude_category: list of categories not to show on plot
:return: dict {category: SUM cost}
"""
cats = self.data_source.get_categories()
cats = [x if x != "" else "Unknown" for x in cats]
# TODO: fix non-vanishing categories
if exclude_category is not None:
for cat in cats:
if cat in exclude_category:
cats.remove(cat)
subsets = {}
for cat in cats:
subsets[cat] = self.data_source.generic_metrics_aggregator(Aggregators.SUM,
start_date, end_date, cat)[0]
return subsets
def simple_pie_chart(self, start_date:str = None, end_date:str = None,
exclude_category: List[str] = None) -> go.Pie:
"""
Possible combinations:
1. total spending per category for time
2. avg spending per category per time
"""
subsets = self._prepare_cost_structure(start_date, end_date, exclude_category)
return go.Pie(labels=list(subsets.keys()),
values=list(subsets.values()),
hoverinfo='label',
textinfo='value',
textfont=dict(size=14),
name='Cost structure')
def simple_bar_chart(self, start_date: str = None, end_date: str = None,
category: str = None, aggregator: Aggregators = Aggregators.SUM,
formatter: str = "%Y-%m-%d") -> go.Bar:
"""
Sums cost of a category for given period per month
:param start_date: str start date of a main date frame
:param end_date: str end date of a main date frame
:param category: str cost category name
:param aggregator: Aggregator to build a bar chart for
:param formatter: str formatter to parse datetime from
:return: None
"""
if is_time_frame_valid(start_date, end_date, formatter):
periods = collect_date_intervals(start_date, end_date)
# compute a sums for collected month:
res = {}
# TODO: check if the category is being validated
for k, v in periods.items():
res[k] = self.data_source.generic_metrics_aggregator(aggregator,
v.isoformat(),
v.isoformat(),
category)[0]
else:
print("Chosen time frame {}/{} is nt valid".format(start_date, end_date))
res = {}
return go.Bar(
x=list(res.keys()),
y=list(res.values()),
hoverinfo='label',
name='{} cost'.format(category))
def simple_balance_chart(self, start_date: str = None, end_date: str = None) -> go.Bar:
"""
Draws balance over time
:param start_date: str start date
:param end_date: str end date
:return: None
"""
res = {}
periods = collect_date_intervals(start_date, end_date)
for k, v in periods.items():
res[k] = self.data_source.balance(v.isoformat(), v.isoformat())
return go.Bar(
x=list(res.keys()),
y=list(res.values()),
hoverinfo='value',
name="simple_balance")
def accumulated_balance_chart(self, start_date: str = None, end_date: str = None) -> go.Scatter:
"""
Accumulated balance over time
:param start_date: str stat
:param end_date: srt end
:return: None
"""
tmp = {}
periods = collect_date_intervals(start_date, end_date)
for k, v in periods.items():
tmp[k] = self.data_source.balance(v.isoformat(), v.isoformat())
res = tmp
indexes = list(tmp.keys())
for i in range(1, len(indexes)):
res[indexes[i]] = res[indexes[i-1]] + tmp[indexes[i]]
return go.Scatter(
x=list(res.keys()),
y=list(res.values()),
hoverinfo='value',
name="accumulated_balance")
def show_pie_subplots(self, start_date: str = None, end_date: str = None,
exclude_category: List[str] = None, name: str = "Cost Structure") -> None:
"""
Several charts for monthly intervals within given date range
Hardcoded for 6 month
:return: None, fig being rendered
"""
# validate time frame is 6 month
month = get_months(start_date, end_date)
if month > 6:
print("This option works for 6 month period only")
raise Exception
else:
# collect all data for all sub-periods
periods = collect_date_intervals(start_date, end_date)
cost_structure = {}
for k, v in periods.items():
cost_structure[k] = self._prepare_cost_structure(v.isoformat(),
v.isoformat(),
exclude_category)
# create a separate pie for each data set
data = []
annotations = []
for i, v in enumerate(cost_structure.values()):
data.append({
"values": list(v.values()),
"labels": list(v.keys()),
"type": "pie",
"domain": {"x": [0 + i * .15, .15 + i * .15],
"y": [0, -1]},
"hoverinfo": "label+value+name",
"name": periods[list(cost_structure.keys())[i]],
"textinfo": periods[list(cost_structure.keys())[i]]
})
annotations.append({
"font": {"size":14},
"text": "{}".format(periods[list(cost_structure.keys())[i]]),
"x": .055 + i * .15,
"y": .8,
"showarrow": False
})
layout = {'title': '{} for {}/{}'.format(name, start_date, end_date),
'showlegend': True,
"annotations": annotations}
fig = {"data": data, "layout": layout}
plotly.offline.plot(fig)
@staticmethod
def show_combined_plot(aggregators: List, start_date: str, end_date: str, name: str) -> None:
"""
Plots several figures as one
:return: None
"""
layout = go.Layout(
title="{} over {}/{}".format(name, start_date, end_date),
)
fig = go.Figure(data=aggregators, layout=layout)
plotly.offline.plot(fig)
|
noironetworks/nova
|
refs/heads/master
|
nova/tests/unit/api/openstack/compute/test_extended_ips.py
|
33
|
# Copyright 2013 Nebula, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
import six
import webob
from nova import compute
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
UUID1 = '00000000-0000-0000-0000-000000000001'
UUID2 = '00000000-0000-0000-0000-000000000002'
UUID3 = '00000000-0000-0000-0000-000000000003'
NW_CACHE = [
{
'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {
'bridge': 'br0',
'id': 1,
'label': 'private',
'subnets': [
{
'cidr': '192.168.1.0/24',
'ips': [
{
'address': '192.168.1.100',
'type': 'fixed',
'floating_ips': [
{'address': '5.0.0.1', 'type': 'floating'},
],
},
],
},
]
}
},
{
'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {
'bridge': 'br1',
'id': 2,
'label': 'public',
'subnets': [
{
'cidr': '10.0.0.0/24',
'ips': [
{
'address': '10.0.0.100',
'type': 'fixed',
'floating_ips': [
{'address': '5.0.0.2', 'type': 'floating'},
],
}
],
},
]
}
}
]
ALL_IPS = []
for cache in NW_CACHE:
for subnet in cache['network']['subnets']:
for fixed in subnet['ips']:
sanitized = dict(fixed)
sanitized.pop('floating_ips')
ALL_IPS.append(sanitized)
for floating in fixed['floating_ips']:
ALL_IPS.append(floating)
ALL_IPS.sort(key=lambda x: str(x))
def fake_compute_get(*args, **kwargs):
inst = fakes.stub_instance_obj(None, 1, uuid=UUID3, nw_cache=NW_CACHE)
return inst
def fake_compute_get_all(*args, **kwargs):
inst_list = [
fakes.stub_instance_obj(None, 1, uuid=UUID1, nw_cache=NW_CACHE),
fakes.stub_instance_obj(None, 2, uuid=UUID2, nw_cache=NW_CACHE),
]
return objects.InstanceList(objects=inst_list)
class ExtendedIpsTestV21(test.TestCase):
content_type = 'application/json'
prefix = 'OS-EXT-IPS:'
def setUp(self):
super(ExtendedIpsTestV21, self).setUp()
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(compute.api.API, 'get', fake_compute_get)
self.stubs.Set(compute.api.API, 'get_all', fake_compute_get_all)
def _make_request(self, url):
req = webob.Request.blank(url)
req.headers['Accept'] = self.content_type
res = req.get_response(fakes.wsgi_app_v21(init_only=('servers',)))
return res
def _get_server(self, body):
return jsonutils.loads(body).get('server')
def _get_servers(self, body):
return jsonutils.loads(body).get('servers')
def _get_ips(self, server):
for network in six.itervalues(server['addresses']):
for ip in network:
yield ip
def assertServerStates(self, server):
results = []
for ip in self._get_ips(server):
results.append({'address': ip.get('addr'),
'type': ip.get('%stype' % self.prefix)})
self.assertEqual(ALL_IPS, sorted(results))
def test_show(self):
url = '/v2/fake/servers/%s' % UUID3
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
self.assertServerStates(self._get_server(res.body))
def test_detail(self):
url = '/v2/fake/servers/detail'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
for i, server in enumerate(self._get_servers(res.body)):
self.assertServerStates(server)
class ExtendedIpsTestV2(ExtendedIpsTestV21):
def setUp(self):
super(ExtendedIpsTestV2, self).setUp()
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Extended_ips'])
def _make_request(self, url):
req = webob.Request.blank(url)
req.headers['Accept'] = self.content_type
res = req.get_response(fakes.wsgi_app(init_only=('servers',)))
return res
|
rogerhu/django
|
refs/heads/master
|
django/contrib/gis/geos/io.py
|
114
|
"""
Module that holds classes for performing I/O operations on GEOS geometry
objects. Specifically, this has Python implementations of WKB/WKT
reader and writer classes.
"""
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.contrib.gis.geos.prototypes.io import _WKTReader, _WKBReader, WKBWriter, WKTWriter
__all__ = ['WKBWriter', 'WKTWriter', 'WKBReader', 'WKTReader']
# Public classes for (WKB|WKT)Reader, which return GEOSGeometry
class WKBReader(_WKBReader):
def read(self, wkb):
"Returns a GEOSGeometry for the given WKB buffer."
return GEOSGeometry(super(WKBReader, self).read(wkb))
class WKTReader(_WKTReader):
def read(self, wkt):
"Returns a GEOSGeometry for the given WKT string."
return GEOSGeometry(super(WKTReader, self).read(wkt))
|
mrkeng/alerta
|
refs/heads/master
|
alerta/plugins/normalise.py
|
3
|
from alerta.plugins import PluginBase
class NormaliseAlert(PluginBase):
def pre_receive(self, alert):
alert.text = '%s: %s' % (alert.severity.upper(), alert.text)
return alert
def post_receive(self, alert):
return
|
kashifmin/KKernel_yu_msm8916
|
refs/heads/cm-12.0
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py
|
4653
|
# EventClass.py
#
# This is a library defining some events types classes, which could
# be used by other scripts to analyzing the perf samples.
#
# Currently there are just a few classes defined for examples,
# PerfEvent is the base class for all perf event sample, PebsEvent
# is a HW base Intel x86 PEBS event, and user could add more SW/HW
# event classes based on requirements.
import struct
# Event types, user could add more here
EVTYPE_GENERIC = 0
EVTYPE_PEBS = 1 # Basic PEBS event
EVTYPE_PEBS_LL = 2 # PEBS event with load latency info
EVTYPE_IBS = 3
#
# Currently we don't have good way to tell the event type, but by
# the size of raw buffer, raw PEBS event with load latency data's
# size is 176 bytes, while the pure PEBS event's size is 144 bytes.
#
def create_event(name, comm, dso, symbol, raw_buf):
if (len(raw_buf) == 144):
event = PebsEvent(name, comm, dso, symbol, raw_buf)
elif (len(raw_buf) == 176):
event = PebsNHM(name, comm, dso, symbol, raw_buf)
else:
event = PerfEvent(name, comm, dso, symbol, raw_buf)
return event
class PerfEvent(object):
event_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC):
self.name = name
self.comm = comm
self.dso = dso
self.symbol = symbol
self.raw_buf = raw_buf
self.ev_type = ev_type
PerfEvent.event_num += 1
def show(self):
print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso)
#
# Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer
# contains the context info when that event happened: the EFLAGS and
# linear IP info, as well as all the registers.
#
class PebsEvent(PerfEvent):
pebs_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS):
tmp_buf=raw_buf[0:80]
flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf)
self.flags = flags
self.ip = ip
self.ax = ax
self.bx = bx
self.cx = cx
self.dx = dx
self.si = si
self.di = di
self.bp = bp
self.sp = sp
PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsEvent.pebs_num += 1
del tmp_buf
#
# Intel Nehalem and Westmere support PEBS plus Load Latency info which lie
# in the four 64 bit words write after the PEBS data:
# Status: records the IA32_PERF_GLOBAL_STATUS register value
# DLA: Data Linear Address (EIP)
# DSE: Data Source Encoding, where the latency happens, hit or miss
# in L1/L2/L3 or IO operations
# LAT: the actual latency in cycles
#
class PebsNHM(PebsEvent):
pebs_nhm_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL):
tmp_buf=raw_buf[144:176]
status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf)
self.status = status
self.dla = dla
self.dse = dse
self.lat = lat
PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsNHM.pebs_nhm_num += 1
del tmp_buf
|
w1ll1am23/home-assistant
|
refs/heads/dev
|
homeassistant/components/alexa/logbook.py
|
19
|
"""Describe logbook events."""
from homeassistant.core import callback
from .const import DOMAIN, EVENT_ALEXA_SMART_HOME
@callback
def async_describe_events(hass, async_describe_event):
"""Describe logbook events."""
@callback
def async_describe_logbook_event(event):
"""Describe a logbook event."""
data = event.data
entity_id = data["request"].get("entity_id")
if entity_id:
state = hass.states.get(entity_id)
name = state.name if state else entity_id
message = f"send command {data['request']['namespace']}/{data['request']['name']} for {name}"
else:
message = (
f"send command {data['request']['namespace']}/{data['request']['name']}"
)
return {"name": "Amazon Alexa", "message": message, "entity_id": entity_id}
async_describe_event(DOMAIN, EVENT_ALEXA_SMART_HOME, async_describe_logbook_event)
|
gangadharkadam/saloon_erp
|
refs/heads/master
|
erpnext/accounts/doctype/shipping_rule_country/shipping_rule_country.py
|
83
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class ShippingRuleCountry(Document):
pass
|
frivoal/presto-testo
|
refs/heads/master
|
core/standards/scripts/opjsunit/harness/jsshells.py
|
4
|
import killableprocess
import subprocess
import os
import tempfile
class JSShell(object):
exec_name = None #Default name of the shell executable
default_repetitions = 1
def __init__(self, path, verbose=False):
self.path = path
self.verbose = verbose
def getCommand(self, filenames, *args):
"""Return a list of [path, *arguments] sutiable for passing to
killableprocess.Popen objects
filenames - a list of filenames to run
args - extra arguments to pass to the command
"""
raise NotImplementedError
def runCommand(self, filenames, args, async = False, use_temp_file=False, valgrind=False):
"""Run the shell loading the files in filename and passing any extra
arguments in args. Returns a tuple of (success, stdout)"""
commandline = self.getCommand(filenames, *args)
if valgrind:
commandline = ["valgrind", "--smc-check=all", "--leak-check=yes", "--error-exitcode=102"] + commandline # 102 is fairly random
if use_temp_file:
stdout = tempfile.TemporaryFile()
else:
stdout = subprocess.PIPE
if self.verbose:
print " ".join(map(lambda s: (' ' in s or '\n' in s) and '"' + re.sub("\\s+", " ", s) + '"' or s, commandline))
try:
proc = killableprocess.Popen(commandline,
stdout=stdout,
stderr=subprocess.STDOUT,
env=os.environ
)
if use_temp_file:
proc.stdout = stdout
except:
raise
sys.stderr.write("Command %s failed\n Did you specify the correct path the the jsshell?\n"%" ".join(commandline))
sys.exit(1)
if async:
return proc
else:
res = proc.wait(timeout)
return res == 0, proc.stdout.read()
class CarakanShell(JSShell):
name = "carakan"
exec_name = "jsshell"
default_repetitions = 2
def getCommand(self, filenames, *args):
rv = [self.path, "-q"]
rv.extend(args)
rv.extend(filenames)
return rv
class CarakanGCShell(JSShell):
name = "carakan-gc"
exec_name = "jsshell"
default_repetitions = 2
def getCommand(self, filenames, *args):
rv = [self.path, "-q", "-gc"]
rv.extend(args)
rv.extend(filenames)
return rv
class CarakanNativeShell(JSShell):
name = "carakan-nc"
exec_name = "jsshell-nc"
default_repetitions = 100
def getCommand(self, filenames, *args):
rv = [self.path, "-q", "-np"]
rv.extend(args)
rv.extend(filenames)
return rv
class CarakanNativeX87Shell(JSShell):
name = "carakan-nc-x87"
exec_name = "jsshell-nc"
default_repetitions = 100
def getCommand(self, filenames, *args):
rv = [self.path, "-q", "-np", "-fpmode", "x87"]
rv.extend(args)
rv.extend(filenames)
return rv
class FutharkShell(JSShell):
name = "futhark"
exec_name = "jsshell-futhark"
def getCommand(self, filenames, *args):
rv = [self.path, "-q"]
rv.extend(args)
rv.extend(filenames)
return rv
class SpiderMonkeyShell(JSShell):
name = "spidermonkey"
exec_name = "js"
def getCommand(self, filenames, *args):
rv = [self.path, "-m"] + list(args)
for fn in filenames:
rv.extend(["-f", fn])
return rv
class SquirrelFishShell(JSShell):
name = "squirrelfish"
exec_name = "jsc"
def getCommand(self, filenames, *args):
rv = [self.path, "-s"] #I think I want -s here
rv.extend(args)
rv.extend(filenames)
return rv
class V8Shell(JSShell):
name = "v8"
exec_name = "shell"
def getCommand(self,filenames, *args):
rv = [self.path]
rv.extend(args)
rv.extend(filenames)
return rv
shells = {
"carakan":CarakanShell,
"carakan-gc":CarakanGCShell,
"carakan-nc":CarakanNativeShell,
"carakan-nc-x87":CarakanNativeX87Shell,
"v8":V8Shell,
"squirrelfish":SquirrelFishShell,
"spidermonkey":SpiderMonkeyShell
}
|
safwanrahman/kitsune
|
refs/heads/master
|
kitsune/gallery/tests/test_api.py
|
7
|
from nose.tools import eq_
from kitsune.sumo.urlresolvers import reverse
from kitsune.sumo.tests import TestCase
class TestImageListView(TestCase):
def test_it_works(self):
url = reverse('image-list')
res = self.client.get(url)
eq_(res.status_code, 200)
|
rew4332/tensorflow
|
refs/heads/rew4332-patch-1
|
tensorflow/models/image/cifar10/cifar10_input_test.py
|
34
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for cifar10 input."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
from tensorflow.models.image.cifar10 import cifar10_input
class CIFAR10InputTest(tf.test.TestCase):
def _record(self, label, red, green, blue):
image_size = 32 * 32
record = bytes(bytearray([label] + [red] * image_size +
[green] * image_size + [blue] * image_size))
expected = [[[red, green, blue]] * 32] * 32
return record, expected
def testSimple(self):
labels = [9, 3, 0]
records = [self._record(labels[0], 0, 128, 255),
self._record(labels[1], 255, 0, 1),
self._record(labels[2], 254, 255, 0)]
contents = b"".join([record for record, _ in records])
expected = [expected for _, expected in records]
filename = os.path.join(self.get_temp_dir(), "cifar")
open(filename, "wb").write(contents)
with self.test_session() as sess:
q = tf.FIFOQueue(99, [tf.string], shapes=())
q.enqueue([filename]).run()
q.close().run()
result = cifar10_input.read_cifar10(q)
for i in range(3):
key, label, uint8image = sess.run([
result.key, result.label, result.uint8image])
self.assertEqual("%s:%d" % (filename, i), tf.compat.as_text(key))
self.assertEqual(labels[i], label)
self.assertAllEqual(expected[i], uint8image)
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run([result.key, result.uint8image])
if __name__ == "__main__":
tf.test.main()
|
racitup/djangoshop-subscribe
|
refs/heads/master
|
shop_subscribe/templatetags/subscribe_tags.py
|
1
|
# -*- coding: utf-8 -*-
from django import template
register = template.Library()
@register.simple_tag(takes_context=True)
def subscribe_form(context):
"Get an empty subscription form. Added form context variable is only valid within the block scope."
from ..forms import SubscribeForm
context['form'] = SubscribeForm(request=context['request'])
context['action'] = 'DO_NOTHING'
return ''
@register.simple_tag(takes_context=True)
def confirm_form(context):
"Get an empty confirmation form. Added form context variable is only valid within the block scope."
from ..forms import ConfirmForm_factory
context['form'] = ConfirmForm_factory()(request=context['request'])
context['action'] = 'DO_NOTHING'
return ''
|
beswarm/django-allauth
|
refs/heads/master
|
allauth/socialaccount/providers/facebook/tests.py
|
48
|
try:
from mock import patch
except ImportError:
from unittest.mock import patch
import json
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from django.test.client import RequestFactory
from allauth.socialaccount.tests import create_oauth2_tests
from allauth.tests import MockedResponse
from allauth.socialaccount.models import SocialAccount
from allauth.socialaccount import providers
from allauth.socialaccount.providers import registry
from allauth.account import app_settings as account_settings
from allauth.account.models import EmailAddress
from allauth.utils import get_user_model
from .provider import FacebookProvider
@override_settings(
SOCIALACCOUNT_AUTO_SIGNUP=True,
ACCOUNT_SIGNUP_FORM_CLASS=None,
LOGIN_REDIRECT_URL='/accounts/profile/',
ACCOUNT_EMAIL_VERIFICATION=account_settings
.EmailVerificationMethod.NONE,
SOCIALACCOUNT_PROVIDERS={
'facebook': {
'AUTH_PARAMS': {},
'VERIFIED_EMAIL': False}})
class FacebookTests(create_oauth2_tests(registry.by_id(FacebookProvider.id))):
def get_mocked_response(self):
return MockedResponse(200, """
{
"id": "630595557",
"name": "Raymond Penners",
"first_name": "Raymond",
"last_name": "Penners",
"email": "raymond.penners@gmail.com",
"link": "https://www.facebook.com/raymond.penners",
"username": "raymond.penners",
"birthday": "07/17/1973",
"work": [
{
"employer": {
"id": "204953799537777",
"name": "IntenCT"
}
}
],
"timezone": 1,
"locale": "nl_NL",
"verified": true,
"updated_time": "2012-11-30T20:40:33+0000"
}""")
def test_username_conflict(self):
User = get_user_model()
User.objects.create(username='raymond.penners')
self.login(self.get_mocked_response())
socialaccount = SocialAccount.objects.get(uid='630595557')
self.assertEqual(socialaccount.user.username, 'raymond')
def test_username_based_on_provider(self):
self.login(self.get_mocked_response())
socialaccount = SocialAccount.objects.get(uid='630595557')
self.assertEqual(socialaccount.user.username, 'raymond.penners')
def test_media_js(self):
provider = providers.registry.by_id(FacebookProvider.id)
request = RequestFactory().get(reverse('account_login'))
request.session = {}
script = provider.media_js(request)
self.assertTrue('"appId": "app123id"' in script)
def test_login_by_token(self):
resp = self.client.get(reverse('account_login'))
with patch('allauth.socialaccount.providers.facebook.views'
'.requests') as requests_mock:
mocks = [self.get_mocked_response().json()]
requests_mock.get.return_value.json \
= lambda: mocks.pop()
resp = self.client.post(reverse('facebook_login_by_token'),
data={'access_token': 'dummy'})
self.assertEqual('http://testserver/accounts/profile/',
resp['location'])
@override_settings(
SOCIALACCOUNT_PROVIDERS={
'facebook': {
'AUTH_PARAMS': {'auth_type': 'reauthenticate'},
'VERIFIED_EMAIL': False}})
def test_login_by_token_reauthenticate(self):
resp = self.client.get(reverse('account_login'))
nonce = json.loads(resp.context['fb_data'])['loginOptions']['auth_nonce']
with patch('allauth.socialaccount.providers.facebook.views'
'.requests') as requests_mock:
mocks = [self.get_mocked_response().json(),
{'auth_nonce': nonce}]
requests_mock.get.return_value.json \
= lambda: mocks.pop()
resp = self.client.post(reverse('facebook_login_by_token'),
data={'access_token': 'dummy'})
self.assertEqual('http://testserver/accounts/profile/',
resp['location'])
@override_settings(
SOCIALACCOUNT_PROVIDERS={
'facebook': {
'VERIFIED_EMAIL': True}})
def test_login_verified(self):
emailaddress = self._login_verified()
self.assertTrue(emailaddress.verified)
def test_login_unverified(self):
emailaddress = self._login_verified()
self.assertFalse(emailaddress.verified)
def _login_verified(self):
resp = self.login(self.get_mocked_response())
return EmailAddress.objects.get(email='raymond.penners@gmail.com')
|
persandstrom/home-assistant
|
refs/heads/master
|
tests/helpers/test_init.py
|
45
|
"""Test component helpers."""
# pylint: disable=protected-access
from collections import OrderedDict
import unittest
from homeassistant import helpers
from tests.common import get_test_home_assistant
class TestHelpers(unittest.TestCase):
"""Tests homeassistant.helpers module."""
# pylint: disable=invalid-name
def setUp(self):
"""Init needed objects."""
self.hass = get_test_home_assistant()
# pylint: disable=invalid-name
def tearDown(self):
"""Stop everything that was started."""
self.hass.stop()
def test_extract_domain_configs(self):
"""Test the extraction of domain configuration."""
config = {
'zone': None,
'zoner': None,
'zone ': None,
'zone Hallo': None,
'zone 100': None,
}
self.assertEqual(set(['zone', 'zone Hallo', 'zone 100']),
set(helpers.extract_domain_configs(config, 'zone')))
def test_config_per_platform(self):
"""Test config per platform method."""
config = OrderedDict([
('zone', {'platform': 'hello'}),
('zoner', None),
('zone Hallo', [1, {'platform': 'hello 2'}]),
('zone 100', None),
])
assert [
('hello', config['zone']),
(None, 1),
('hello 2', config['zone Hallo'][1]),
] == list(helpers.config_per_platform(config, 'zone'))
|
mcrowson/django
|
refs/heads/master
|
django/contrib/gis/gdal/prototypes/geom.py
|
450
|
from ctypes import POINTER, c_char_p, c_double, c_int, c_void_p
from django.contrib.gis.gdal.envelope import OGREnvelope
from django.contrib.gis.gdal.libgdal import lgdal
from django.contrib.gis.gdal.prototypes.errcheck import check_envelope
from django.contrib.gis.gdal.prototypes.generation import (
const_string_output, double_output, geom_output, int_output, srs_output,
string_output, void_output,
)
# ### Generation routines specific to this module ###
def env_func(f, argtypes):
"For getting OGREnvelopes."
f.argtypes = argtypes
f.restype = None
f.errcheck = check_envelope
return f
def pnt_func(f):
"For accessing point information."
return double_output(f, [c_void_p, c_int])
def topology_func(f):
f.argtypes = [c_void_p, c_void_p]
f.restype = c_int
f.errchck = bool
return f
# ### OGR_G ctypes function prototypes ###
# GeoJSON routines.
from_json = geom_output(lgdal.OGR_G_CreateGeometryFromJson, [c_char_p])
to_json = string_output(lgdal.OGR_G_ExportToJson, [c_void_p], str_result=True, decoding='ascii')
to_kml = string_output(lgdal.OGR_G_ExportToKML, [c_void_p, c_char_p], str_result=True, decoding='ascii')
# GetX, GetY, GetZ all return doubles.
getx = pnt_func(lgdal.OGR_G_GetX)
gety = pnt_func(lgdal.OGR_G_GetY)
getz = pnt_func(lgdal.OGR_G_GetZ)
# Geometry creation routines.
from_wkb = geom_output(lgdal.OGR_G_CreateFromWkb, [c_char_p, c_void_p, POINTER(c_void_p), c_int], offset=-2)
from_wkt = geom_output(lgdal.OGR_G_CreateFromWkt, [POINTER(c_char_p), c_void_p, POINTER(c_void_p)], offset=-1)
create_geom = geom_output(lgdal.OGR_G_CreateGeometry, [c_int])
clone_geom = geom_output(lgdal.OGR_G_Clone, [c_void_p])
get_geom_ref = geom_output(lgdal.OGR_G_GetGeometryRef, [c_void_p, c_int])
get_boundary = geom_output(lgdal.OGR_G_GetBoundary, [c_void_p])
geom_convex_hull = geom_output(lgdal.OGR_G_ConvexHull, [c_void_p])
geom_diff = geom_output(lgdal.OGR_G_Difference, [c_void_p, c_void_p])
geom_intersection = geom_output(lgdal.OGR_G_Intersection, [c_void_p, c_void_p])
geom_sym_diff = geom_output(lgdal.OGR_G_SymmetricDifference, [c_void_p, c_void_p])
geom_union = geom_output(lgdal.OGR_G_Union, [c_void_p, c_void_p])
# Geometry modification routines.
add_geom = void_output(lgdal.OGR_G_AddGeometry, [c_void_p, c_void_p])
import_wkt = void_output(lgdal.OGR_G_ImportFromWkt, [c_void_p, POINTER(c_char_p)])
# Destroys a geometry
destroy_geom = void_output(lgdal.OGR_G_DestroyGeometry, [c_void_p], errcheck=False)
# Geometry export routines.
to_wkb = void_output(lgdal.OGR_G_ExportToWkb, None, errcheck=True) # special handling for WKB.
to_wkt = string_output(lgdal.OGR_G_ExportToWkt, [c_void_p, POINTER(c_char_p)], decoding='ascii')
to_gml = string_output(lgdal.OGR_G_ExportToGML, [c_void_p], str_result=True, decoding='ascii')
get_wkbsize = int_output(lgdal.OGR_G_WkbSize, [c_void_p])
# Geometry spatial-reference related routines.
assign_srs = void_output(lgdal.OGR_G_AssignSpatialReference, [c_void_p, c_void_p], errcheck=False)
get_geom_srs = srs_output(lgdal.OGR_G_GetSpatialReference, [c_void_p])
# Geometry properties
get_area = double_output(lgdal.OGR_G_GetArea, [c_void_p])
get_centroid = void_output(lgdal.OGR_G_Centroid, [c_void_p, c_void_p])
get_dims = int_output(lgdal.OGR_G_GetDimension, [c_void_p])
get_coord_dim = int_output(lgdal.OGR_G_GetCoordinateDimension, [c_void_p])
set_coord_dim = void_output(lgdal.OGR_G_SetCoordinateDimension, [c_void_p, c_int], errcheck=False)
get_geom_count = int_output(lgdal.OGR_G_GetGeometryCount, [c_void_p])
get_geom_name = const_string_output(lgdal.OGR_G_GetGeometryName, [c_void_p], decoding='ascii')
get_geom_type = int_output(lgdal.OGR_G_GetGeometryType, [c_void_p])
get_point_count = int_output(lgdal.OGR_G_GetPointCount, [c_void_p])
get_point = void_output(lgdal.OGR_G_GetPoint,
[c_void_p, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double)], errcheck=False
)
geom_close_rings = void_output(lgdal.OGR_G_CloseRings, [c_void_p], errcheck=False)
# Topology routines.
ogr_contains = topology_func(lgdal.OGR_G_Contains)
ogr_crosses = topology_func(lgdal.OGR_G_Crosses)
ogr_disjoint = topology_func(lgdal.OGR_G_Disjoint)
ogr_equals = topology_func(lgdal.OGR_G_Equals)
ogr_intersects = topology_func(lgdal.OGR_G_Intersects)
ogr_overlaps = topology_func(lgdal.OGR_G_Overlaps)
ogr_touches = topology_func(lgdal.OGR_G_Touches)
ogr_within = topology_func(lgdal.OGR_G_Within)
# Transformation routines.
geom_transform = void_output(lgdal.OGR_G_Transform, [c_void_p, c_void_p])
geom_transform_to = void_output(lgdal.OGR_G_TransformTo, [c_void_p, c_void_p])
# For retrieving the envelope of the geometry.
get_envelope = env_func(lgdal.OGR_G_GetEnvelope, [c_void_p, POINTER(OGREnvelope)])
|
CuonDeveloper/cuon
|
refs/heads/master
|
cuon_client/cuon_newclient/bin/cuon/Garden/SingleBotanyDivisio.py
|
2
|
# -*- coding: utf-8 -*-
##Copyright (C) [2003] [Jürgen Hamel, D-32584 Löhne]
##This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as
##published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version.
##This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
##warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
##for more details.
##You should have received a copy of the GNU General Public License along with this program; if not, write to the
##Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from cuon.Databases.SingleData import SingleData
import logging
import pygtk
pygtk.require('2.0')
import gtk
import gtk.glade
import gobject
class SingleBotanyDivisio(SingleData):
def __init__(self, allTables):
SingleData.__init__(self)
# tables.dbd and address
self.sNameOfTable = "botany_divisio"
self.xmlTableDef = 0
# self.loadTable()
# self.saveTable()
self.loadTable(allTables)
self.setStore( gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_UINT) )
self.listHeader['size'] = [25,10,25,25,10]
#
self.setTreeFields( ['name', 'description'] )
self.setTreeOrder('name')
self.setListHeader([_('name'),_('description')])
|
mluke93/osf.io
|
refs/heads/develop
|
api/licenses/urls.py
|
23
|
from django.conf.urls import url
from api.licenses import views
urlpatterns = [
url(r'^$', views.LicenseList.as_view(), name=views.LicenseList.view_name),
url(r'^(?P<license_id>\w+)/$', views.LicenseDetail.as_view(), name=views.LicenseDetail.view_name),
]
|
VoIPGRID/PJSIP
|
refs/heads/master
|
tests/pjsua/scripts-sipp/uas-answer-200-update-without-sdp.py
|
70
|
# $Id$
#
import inc_const as const
PJSUA = ["--null-audio --max-calls=1 $SIPP_URI"]
PJSUA_EXPECTS = [[0, const.STATE_CONFIRMED, "U"]]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.