repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
peterhogan/data-analytics | webUI/webpage/webpage/settings.py | 1 | 3168 | """
Django settings for webpage project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'h8fx-ih7umnjf77)$qb!9^xqt=@fi_!f%*=j_0fe-7908_o6l4'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["*"]
#ALLOWED_HOSTS = ["172.17.0.4"]
# Application definition
INSTALLED_APPS = [
'graphUI.apps.GraphuiConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'webpage.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'webpage.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'GMT'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
| gpl-3.0 |
pforret/python-for-android | python-modules/twisted/twisted/words/protocols/jabber/sasl.py | 53 | 7681 | # Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
XMPP-specific SASL profile.
"""
import re
from twisted.internet import defer
from twisted.words.protocols.jabber import sasl_mechanisms, xmlstream
from twisted.words.xish import domish
# The b64decode and b64encode functions from the base64 module are new in
# Python 2.4. For Python 2.3 compatibility, the legacy interface is used while
# working around MIMEisms.
try:
from base64 import b64decode, b64encode
except ImportError:
import base64
def b64encode(s):
return "".join(base64.encodestring(s).split("\n"))
b64decode = base64.decodestring
NS_XMPP_SASL = 'urn:ietf:params:xml:ns:xmpp-sasl'
def get_mechanisms(xs):
"""
Parse the SASL feature to extract the available mechanism names.
"""
mechanisms = []
for element in xs.features[(NS_XMPP_SASL, 'mechanisms')].elements():
if element.name == 'mechanism':
mechanisms.append(str(element))
return mechanisms
class SASLError(Exception):
"""
SASL base exception.
"""
class SASLNoAcceptableMechanism(SASLError):
"""
The server did not present an acceptable SASL mechanism.
"""
class SASLAuthError(SASLError):
"""
SASL Authentication failed.
"""
def __init__(self, condition=None):
self.condition = condition
def __str__(self):
return "SASLAuthError with condition %r" % self.condition
class SASLIncorrectEncodingError(SASLError):
"""
SASL base64 encoding was incorrect.
RFC 3920 specifies that any characters not in the base64 alphabet
and padding characters present elsewhere than at the end of the string
MUST be rejected. See also L{fromBase64}.
This exception is raised whenever the encoded string does not adhere
to these additional restrictions or when the decoding itself fails.
The recommended behaviour for so-called receiving entities (like servers in
client-to-server connections, see RFC 3920 for terminology) is to fail the
SASL negotiation with a C{'incorrect-encoding'} condition. For initiating
entities, one should assume the receiving entity to be either buggy or
malevolent. The stream should be terminated and reconnecting is not
advised.
"""
base64Pattern = re.compile("^[0-9A-Za-z+/]*[0-9A-Za-z+/=]{,2}$")
def fromBase64(s):
"""
Decode base64 encoded string.
This helper performs regular decoding of a base64 encoded string, but also
rejects any characters that are not in the base64 alphabet and padding
occurring elsewhere from the last or last two characters, as specified in
section 14.9 of RFC 3920. This safeguards against various attack vectors
among which the creation of a covert channel that "leaks" information.
"""
if base64Pattern.match(s) is None:
raise SASLIncorrectEncodingError()
try:
return b64decode(s)
except Exception, e:
raise SASLIncorrectEncodingError(str(e))
class SASLInitiatingInitializer(xmlstream.BaseFeatureInitiatingInitializer):
"""
Stream initializer that performs SASL authentication.
The supported mechanisms by this initializer are C{DIGEST-MD5}, C{PLAIN}
and C{ANONYMOUS}. The C{ANONYMOUS} SASL mechanism is used when the JID, set
on the authenticator, does not have a localpart (username), requesting an
anonymous session where the username is generated by the server.
Otherwise, C{DIGEST-MD5} and C{PLAIN} are attempted, in that order.
"""
feature = (NS_XMPP_SASL, 'mechanisms')
_deferred = None
def setMechanism(self):
"""
Select and setup authentication mechanism.
Uses the authenticator's C{jid} and C{password} attribute for the
authentication credentials. If no supported SASL mechanisms are
advertized by the receiving party, a failing deferred is returned with
a L{SASLNoAcceptableMechanism} exception.
"""
jid = self.xmlstream.authenticator.jid
password = self.xmlstream.authenticator.password
mechanisms = get_mechanisms(self.xmlstream)
if jid.user is not None:
if 'DIGEST-MD5' in mechanisms:
self.mechanism = sasl_mechanisms.DigestMD5('xmpp', jid.host, None,
jid.user, password)
elif 'PLAIN' in mechanisms:
self.mechanism = sasl_mechanisms.Plain(None, jid.user, password)
else:
raise SASLNoAcceptableMechanism()
else:
if 'ANONYMOUS' in mechanisms:
self.mechanism = sasl_mechanisms.Anonymous()
else:
raise SASLNoAcceptableMechanism()
def start(self):
"""
Start SASL authentication exchange.
"""
self.setMechanism()
self._deferred = defer.Deferred()
self.xmlstream.addObserver('/challenge', self.onChallenge)
self.xmlstream.addOnetimeObserver('/success', self.onSuccess)
self.xmlstream.addOnetimeObserver('/failure', self.onFailure)
self.sendAuth(self.mechanism.getInitialResponse())
return self._deferred
def sendAuth(self, data=None):
"""
Initiate authentication protocol exchange.
If an initial client response is given in C{data}, it will be
sent along.
@param data: initial client response.
@type data: L{str} or L{None}.
"""
auth = domish.Element((NS_XMPP_SASL, 'auth'))
auth['mechanism'] = self.mechanism.name
if data is not None:
auth.addContent(b64encode(data) or '=')
self.xmlstream.send(auth)
def sendResponse(self, data=''):
"""
Send response to a challenge.
@param data: client response.
@type data: L{str}.
"""
response = domish.Element((NS_XMPP_SASL, 'response'))
if data:
response.addContent(b64encode(data))
self.xmlstream.send(response)
def onChallenge(self, element):
"""
Parse challenge and send response from the mechanism.
@param element: the challenge protocol element.
@type element: L{domish.Element}.
"""
try:
challenge = fromBase64(str(element))
except SASLIncorrectEncodingError:
self._deferred.errback()
else:
self.sendResponse(self.mechanism.getResponse(challenge))
def onSuccess(self, success):
"""
Clean up observers, reset the XML stream and send a new header.
@param success: the success protocol element. For now unused, but
could hold additional data.
@type success: L{domish.Element}
"""
self.xmlstream.removeObserver('/challenge', self.onChallenge)
self.xmlstream.removeObserver('/failure', self.onFailure)
self.xmlstream.reset()
self.xmlstream.sendHeader()
self._deferred.callback(xmlstream.Reset)
def onFailure(self, failure):
"""
Clean up observers, parse the failure and errback the deferred.
@param failure: the failure protocol element. Holds details on
the error condition.
@type failure: L{domish.Element}
"""
self.xmlstream.removeObserver('/challenge', self.onChallenge)
self.xmlstream.removeObserver('/success', self.onSuccess)
try:
condition = failure.firstChildElement().name
except AttributeError:
condition = None
self._deferred.errback(SASLAuthError(condition))
| apache-2.0 |
JimCircadian/ansible | test/units/modules/network/f5/test_bigip_device_sshd.py | 23 | 4054 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_device_sshd import ApiParameters
from library.modules.bigip_device_sshd import ModuleParameters
from library.modules.bigip_device_sshd import ModuleManager
from library.modules.bigip_device_sshd import ArgumentSpec
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_device_sshd import ApiParameters
from ansible.modules.network.f5.bigip_device_sshd import ModuleParameters
from ansible.modules.network.f5.bigip_device_sshd import ModuleManager
from ansible.modules.network.f5.bigip_device_sshd import ArgumentSpec
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
allow=['all'],
banner='enabled',
banner_text='asdf',
inactivity_timeout='100',
log_level='debug',
login='enabled',
port=1010,
server='localhost',
user='admin',
password='password'
)
p = ModuleParameters(params=args)
assert p.allow == ['all']
assert p.banner == 'enabled'
assert p.banner_text == 'asdf'
assert p.inactivity_timeout == 100
assert p.log_level == 'debug'
assert p.login == 'enabled'
assert p.port == 1010
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_update_settings(self, *args):
set_module_args(dict(
allow=['all'],
banner='enabled',
banner_text='asdf',
inactivity_timeout='100',
log_level='debug',
login='enabled',
port=1010,
server='localhost',
user='admin',
password='password'
))
# Configure the parameters that would be returned by querying the
# remote device
current = ApiParameters(
params=dict(
allow=['172.27.1.1']
)
)
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.update_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is True
assert results['allow'] == ['all']
| gpl-3.0 |
nchammas/spark | python/pyspark/tests/test_join.py | 23 | 2944 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pyspark.testing.utils import ReusedPySparkTestCase
class JoinTests(ReusedPySparkTestCase):
def test_narrow_dependency_in_join(self):
rdd = self.sc.parallelize(range(10)).map(lambda x: (x, x))
parted = rdd.partitionBy(2)
self.assertEqual(2, parted.union(parted).getNumPartitions())
self.assertEqual(rdd.getNumPartitions() + 2, parted.union(rdd).getNumPartitions())
self.assertEqual(rdd.getNumPartitions() + 2, rdd.union(parted).getNumPartitions())
tracker = self.sc.statusTracker()
self.sc.setJobGroup("test1", "test", True)
d = sorted(parted.join(parted).collect())
self.assertEqual(10, len(d))
self.assertEqual((0, (0, 0)), d[0])
jobId = tracker.getJobIdsForGroup("test1")[0]
self.assertEqual(2, len(tracker.getJobInfo(jobId).stageIds))
self.sc.setJobGroup("test2", "test", True)
d = sorted(parted.join(rdd).collect())
self.assertEqual(10, len(d))
self.assertEqual((0, (0, 0)), d[0])
jobId = tracker.getJobIdsForGroup("test2")[0]
self.assertEqual(3, len(tracker.getJobInfo(jobId).stageIds))
self.sc.setJobGroup("test3", "test", True)
d = sorted(parted.cogroup(parted).collect())
self.assertEqual(10, len(d))
self.assertEqual([[0], [0]], list(map(list, d[0][1])))
jobId = tracker.getJobIdsForGroup("test3")[0]
self.assertEqual(2, len(tracker.getJobInfo(jobId).stageIds))
self.sc.setJobGroup("test4", "test", True)
d = sorted(parted.cogroup(rdd).collect())
self.assertEqual(10, len(d))
self.assertEqual([[0], [0]], list(map(list, d[0][1])))
jobId = tracker.getJobIdsForGroup("test4")[0]
self.assertEqual(3, len(tracker.getJobInfo(jobId).stageIds))
if __name__ == "__main__":
import unittest
from pyspark.tests.test_join import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| apache-2.0 |
gurkih/lammps | tools/i-pi/ipi/utils/io/io_binary.py | 41 | 1532 | """Contains the functions used to print the trajectories and read input
configurations (or even full status dump) as unformatted binary.
Copyright (C) 2013, Joshua More and Michele Ceriotti
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http.//www.gnu.org/licenses/>.
Functions:
print_bin: Prints an atomic configuration.
"""
__all__ = ['print_bin']
import os
import numpy as np
import math, sys
from ipi.utils.depend import depstrip
def print_bin(atoms, cell, filedesc = sys.stdout, title=""):
"""Prints the centroid configurations, into a binary file.
Args:
beads: An atoms object giving the centroid positions.
cell: A cell object giving the system box.
filedesc: An open writable file object. Defaults to standard output.
title: This gives a string to be appended to the comment line.
"""
buff = filedesc # .buffer
cell.h.tofile(buff)
nat = np.asarray([atoms.natoms])
nat.tofile(buff)
atoms.names.tofile(buff)
atoms.q.tofile(buff)
| gpl-2.0 |
apark263/tensorflow | tensorflow/python/data/experimental/kernel_tests/serialization/interleave_dataset_serialization_test.py | 21 | 3086 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the InterleaveDataset serialization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.python.data.experimental.kernel_tests.serialization import dataset_serialization_test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import sparse_ops
from tensorflow.python.platform import test
class InterleaveDatasetSerializationTest(
dataset_serialization_test_base.DatasetSerializationTestBase,
parameterized.TestCase):
def _build_iterator_graph(self, input_values, cycle_length, block_length,
num_parallel_calls):
repeat_count = 2
return dataset_ops.Dataset.from_tensor_slices(input_values).repeat(
repeat_count).interleave(
lambda x: dataset_ops.Dataset.from_tensors(x).repeat(x),
cycle_length, block_length, num_parallel_calls)
@parameterized.named_parameters(
("1", 2, 3, None),
("2", 2, 3, 1),
("3", 2, 3, 2),
("4", 1, 3, None),
("5", 1, 3, 1),
("6", 2, 1, None),
("7", 2, 1, 1),
("8", 2, 1, 2),
)
def testSerializationCore(self, cycle_length, block_length,
num_parallel_calls):
input_values = np.array([4, 5, 6], dtype=np.int64)
num_outputs = np.sum(input_values) * 2
# pylint: disable=g-long-lambda
self.run_core_tests(
lambda: self._build_iterator_graph(
input_values, cycle_length, block_length, num_parallel_calls),
lambda: self._build_iterator_graph(
input_values, cycle_length * 2, block_length, num_parallel_calls),
num_outputs)
# pylint: enable=g-long-lambda
def testSparseCore(self):
def _map_fn(i):
return sparse_tensor.SparseTensorValue(
indices=[[0, 0], [1, 1]], values=(i * [1, -1]), dense_shape=[2, 2])
def _interleave_fn(x):
return dataset_ops.Dataset.from_tensor_slices(
sparse_ops.sparse_to_dense(x.indices, x.dense_shape, x.values))
def _build_dataset():
return dataset_ops.Dataset.range(10).map(_map_fn).interleave(
_interleave_fn, cycle_length=1)
self.run_core_tests(_build_dataset, None, 20)
if __name__ == "__main__":
test.main()
| apache-2.0 |
dohoangkhiem/uwsgi | contrib/spoolqueue/tasksconsumer.py | 21 | 1047 | from uwsgidecorators import spool
import Queue
from threading import Thread
queues = {}
class queueconsumer(object):
def __init__(self, name, num=1, **kwargs):
self.name = name
self.num = num
self.queue = Queue.Queue()
self.threads = []
self.func = None
queues[self.name] = self
@staticmethod
def consumer(self):
while True:
req = self.queue.get()
print req
self.func(req)
self.queue.task_done()
def __call__(self, f):
self.func = f
for i in range(self.num):
t = Thread(target=self.consumer, args=(self,))
self.threads.append(t)
t.daemon = True
t.start()
@spool
def spooler_enqueuer(arguments):
if 'queue' in arguments:
queue = arguments['queue']
queues[queue].queue.put(arguments)
else:
raise Exception("You have to specify a queue name")
def enqueue(*args, **kwargs):
return spooler_enqueuer.spool(*args, **kwargs)
| gpl-2.0 |
cedk/odoo | addons/project_issue_sheet/__openerp__.py | 260 | 1814 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Timesheet on Issues',
'version': '1.0',
'category': 'Project Management',
'description': """
This module adds the Timesheet support for the Issues/Bugs Management in Project.
=================================================================================
Worklogs can be maintained to signify number of hours spent by users to handle an issue.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/project-management',
'depends': [
'project_issue',
'hr_timesheet_sheet',
],
'data': [
'project_issue_sheet_view.xml',
'security/ir.model.access.csv',
],
'demo': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
egraba/vbox_openbsd | VirtualBox-5.0.0/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/FdfClass.py | 11 | 9967 | ## @file
# classes represent data in FDF
#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
## FD data in FDF
#
#
class FDClassObject:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.FdUiName = ''
self.CreateFileName = None
self.BaseAddress = None
self.BaseAddressPcd = None
self.Size = None
self.SizePcd = None
self.ErasePolarity = '1'
# 3-tuple list (blockSize, numBlocks, pcd)
self.BlockSizeList = []
# DefineVarDict[var] = value
self.DefineVarDict = {}
# SetVarDict[var] = value
self.SetVarDict = {}
self.RegionList = []
self.vtfRawDict = {}
## FV data in FDF
#
#
class FvClassObject:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.UiFvName = None
self.CreateFileName = None
# 3-tuple list (blockSize, numBlocks, pcd)
self.BlockSizeList = []
# DefineVarDict[var] = value
self.DefineVarDict = {}
# SetVarDict[var] = value
self.SetVarDict = {}
self.FvAlignment = None
# FvAttributeDict[attribute] = TRUE/FALSE (1/0)
self.FvAttributeDict = {}
self.FvNameGuid = None
self.AprioriSectionList = []
self.FfsList = []
self.BsBaseAddress = None
self.RtBaseAddress = None
## Region data in FDF
#
#
class RegionClassObject:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.Offset = None # The begin position of the Region
self.Size = None # The Size of the Region
self.PcdOffset = None
self.PcdSize = None
self.SetVarDict = {}
self.RegionType = None
self.RegionDataList = []
## FFS data in FDF
#
#
class FfsClassObject:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.NameGuid = None
self.Fixed = False
self.CheckSum = False
self.Alignment = None
self.SectionList = []
## FILE statement data in FDF
#
#
class FileStatementClassObject (FfsClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
FfsClassObject.__init__(self)
self.FvFileType = None
self.FileName = None
self.KeyStringList = []
self.FvName = None
self.FdName = None
self.DefineVarDict = {}
self.AprioriSection = None
self.KeepReloc = None
## INF statement data in FDF
#
#
class FfsInfStatementClassObject(FfsClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
FfsClassObject.__init__(self)
self.Rule = None
self.Version = None
self.Ui = None
self.InfFileName = None
self.BuildNum = ''
self.KeyStringList = []
self.KeepReloc = None
self.UseArch = None
## APRIORI section data in FDF
#
#
class AprioriSectionClassObject:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
# DefineVarDict[var] = value
self.DefineVarDict = {}
self.FfsList = []
## section data in FDF
#
#
class SectionClassObject:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.Alignment = None
## Depex expression section in FDF
#
#
class DepexSectionClassObject (SectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.DepexType = None
self.Expression = None
self.ExpressionProcessed = False
## Compress section data in FDF
#
#
class CompressSectionClassObject (SectionClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.CompType = None
self.SectionList = []
## Data section data in FDF
#
#
class DataSectionClassObject (SectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.SecType = None
self.SectFileName = None
self.SectionList = []
self.KeepReloc = True
## Rule section data in FDF
#
#
class EfiSectionClassObject (SectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.SectionType = None
self.Optional = False
self.FileType = None
self.StringData = None
self.FileName = None
self.FileExtension = None
self.BuildNum = None
self.KeepReloc = None
## FV image section data in FDF
#
#
class FvImageSectionClassObject (SectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.Fv = None
self.FvName = None
self.FvFileType = None
self.FvFileName = None
self.FvFileExtension = None
self.FvAddr = None
## GUIDed section data in FDF
#
#
class GuidSectionClassObject (SectionClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.NameGuid = None
self.SectionList = []
self.SectionType = None
self.ProcessRequired = False
self.AuthStatusValid = False
self.ExtraHeaderSize = -1
self.FvAddr = []
self.FvParentAddr = None
self.IncludeFvSection = False
## UI section data in FDF
#
#
class UiSectionClassObject (SectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.StringData = None
self.FileName = None
## Version section data in FDF
#
#
class VerSectionClassObject (SectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.BuildNum = None
self.StringData = None
self.FileName = None
## Rule data in FDF
#
#
class RuleClassObject :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.Arch = None
self.ModuleType = None # For Module Type
self.TemplateName = None
self.NameGuid = None
self.Fixed = False
self.Alignment = None
self.SectAlignment = None
self.CheckSum = False
self.FvFileType = None # for Ffs File Type
self.KeyStringList = []
self.KeepReloc = None
## Complex rule data in FDF
#
#
class RuleComplexFileClassObject(RuleClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
RuleClassObject.__init__(self)
self.SectionList = []
## Simple rule data in FDF
#
#
class RuleSimpleFileClassObject(RuleClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
RuleClassObject.__init__(self)
self.FileName = None
self.SectionType = ''
self.FileExtension = None
## File extension rule data in FDF
#
#
class RuleFileExtensionClassObject(RuleClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
RuleClassObject.__init__(self)
self.FileExtension = None
## Capsule data in FDF
#
#
class CapsuleClassObject :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.SpecName = None
self.UiCapsuleName = None
self.CreateFile = None
self.GroupIdNumber = None
# DefineVarDict[var] = value
self.DefineVarDict = {}
# SetVarDict[var] = value
self.SetVarDict = {}
# TokensDict[var] = value
self.TokensDict = {}
self.CapsuleDataList = []
## VTF data in FDF
#
#
class VtfClassObject :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.KeyArch = None
self.ArchList = None
self.UiName = None
self.ResetBin = None
self.ComponentStatementList = []
## VTF component data in FDF
#
#
class ComponentStatementClassObject :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.CompName = None
self.CompLoc = None
self.CompType = None
self.CompVer = None
self.CompCs = None
self.CompBin = None
self.CompSym = None
self.CompSize = None
self.FilePos = None
## OptionROM data in FDF
#
#
class OptionRomClassObject:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.DriverName = None
self.FfsList = []
| mit |
mariodebian/tcosconfig | TcosGui.py | 1 | 37114 | # -*- coding: UTF-8 -*-
# TcosConfig version __VERSION__
#
# Copyright (c) 2006-2011 Mario Izquierdo <mariodebian@gmail.com>
#
# This package is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
#
# This package is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from ConfigReader import ConfigReader
import shared
from subprocess import Popen, PIPE, STDOUT
from threading import Thread
import os
import shutil
#import sys, re
import pygtk
pygtk.require('2.0')
import gtk
import time
from gettext import gettext as _
from gettext import locale
import pwd
import sys
#gtk.threads_init()
gtk.gdk.threads_init()
def print_debug(txt):
global debug
if shared.debug:
print( "%s::%s " %(__name__, txt) )
class TcosGui:
def __init__(self):
import shared
self.isfinished=False
self.changedvalues=[]
# load some classes
self.tcos_config_file = shared.tcos_config_file
if pwd.getpwuid(os.getuid())[0] != "root":
self.error_msg( _("Error, you must run this app as root user") )
sys.exit(1)
self.config=ConfigReader()
self.step=0
self.languages=[locale.getdefaultlocale()[0]]
if self.languages[0] and "_" in self.languages[0]:
self.languages.append( self.languages[0].split('_')[0] )
self.languages.append( self.languages[0].split('_')[1] )
print_debug ( "__init__ languages=%s" %self.languages)
# Widgets
print_debug ("loading %s"%(shared.UI_DIR + 'tcosconfig.ui'))
self.ui = gtk.Builder()
self.ui.set_translation_domain(shared.PACKAGE)
print_debug("Loading ui file...")
self.ui.add_from_file(shared.UI_DIR + 'tcosconfig.ui')
# load all widgets
for widget in self.ui.get_objects():
if hasattr(widget, 'get_name'):
setattr(self, widget.get_name(), widget)
for widget in self.ui.get_objects():
try:
if issubclass(type(widget), gtk.Buildable):
name = gtk.Buildable.get_name(widget)
setattr(self, name, widget)
#print_debug("widget_name %s" %name)
except AttributeError, err:
print "Exception get_objects() err=%s"%err
self.steps.set_show_tabs(False)
self.tcosconfig.set_icon_from_file(shared.IMG_DIR +'tcos-icon.png')
self.aboutui=gtk.Builder()
self.aboutui.set_translation_domain(shared.PACKAGE)
self.aboutui.add_from_file(shared.UI_DIR + 'tcosconfig-aboutdialog.ui')
self.aboutdialog = self.aboutui.get_object("aboutdialog")
self.aboutdialog.connect("response", self.on_aboutdialog_response)
self.aboutdialog.connect("close", self.on_aboutdialog_close)
self.aboutdialog.connect("delete_event", self.on_aboutdialog_close)
self.aboutdialog.set_version(shared.VERSION)
self.aboutdialog.set_name('TcosConfig')
self.aboutdialog.set_icon_from_file(shared.IMG_DIR +'tcos-icon.png')
# set initial bottom status
self.backbutton.hide()
self.nextbutton.set_label('gtk-go-forward')
self.donebutton.hide()
# add signals
self.tcosconfig.connect('destroy', self.on_cancelbutton_click )
self.nextbutton.connect('clicked', self.on_nextbutton_click )
self.backbutton.connect('clicked', self.on_backbutton_click )
self.cancelbutton.connect('clicked', self.on_cancelbutton_click )
self.donebutton.connect( 'clicked', self.on_donebutton_click )
self.aboutbutton.connect('clicked', self.on_aboutbutton_click )
self.startbutton.connect('clicked', self.on_startbutton_click )
self.TCOS_TEMPLATE.connect('changed', self.on_template_change)
"""
http://www.pygtk.org/pygtk2tutorial-es/sec-ExpanderWidget.html
"""
# put all expanders into a list
self.expanders=[self.expander_debug,
self.expander_services,
self.expander_wifi,
self.expander_xorg,
self.expander_dri,
self.expander_sound,
self.expander_remote,
self.expander_auth,
self.expander_bootmenu,
self.expander_kernel, # expand by default kernel
self.expander_thinclients,
self.expander_other ]
# connect signal expanded and call on_expander_click to close others
for exp in self.expanders:
exp.connect('notify::expanded', self.on_expander_click)
# by default all expanders closed
for expander in self.expanders:
#print_debug ( "closing expander %s" %(expander) )
expander.set_expanded(False)
self.settings_loaded=False
for radio in ["TCOS_MENU_MODE", "TCOS_MENU_MODE_SIMPLE", "TCOS_MENU_MODE_GRAPHIC"]:
widget=getattr(self, radio)
widget.connect('toggled', self.on_tcos_menu_mode_change)
self.menu_type=""
if len(shared.TCOS_PLYMOUTH_VALUES) == 1 and shared.TCOS_PLYMOUTH_VALUES[0][0]=="":
self.TCOS_DISABLE_PLYMOUTH.set_active(True)
self.TCOS_PLYMOUTH.set_sensitive(False)
self.TCOS_DISABLE_PLYMOUTH.set_sensitive(False)
self.hbox_plymouth.hide()
if len(shared.TCOS_USPLASH_VALUES) == 1 and shared.TCOS_USPLASH_VALUES[0][0]=="":
self.TCOS_DISABLE_USPLASH.set_active(True)
self.TCOS_USPLASH.set_sensitive(False)
self.TCOS_DISABLE_USPLASH.set_sensitive(False)
self.hbox_usplash.hide()
#self.TCOS_XORG_DRI.connect('toggled', self.on_disable_dri_change)
self.TCOS_DISABLE_USPLASH.connect('toggled', self.on_disable_usplash_change)
self.TCOS_DISABLE_PLYMOUTH.connect('toggled', self.on_disable_plymouth_change)
# events for linked widgets
for widget in shared.linked_widgets:
if not hasattr(self, widget): continue
w=getattr(self, widget)
data=shared.linked_widgets[widget]
w.connect(data[0], self.on_linked_widgets, data)
def on_linked_widgets(self, widget, data):
active=widget.get_active()
if active:
other=data[1]
else:
other=data[2]
if len(other) < 1:
#print_debug("on_linked_widgets() nothing to do")
return
for w in other:
if hasattr(self, w):
wid=getattr(self, w)
enabled=wid.get_active()
if other[w] != None:
wid.set_active(other[w])
wid.set_sensitive(other[w])
#print dir(getattr(self, w))
if hasattr(wid, "set_tooltip_markup"):
wid.set_tooltip_markup( _("Need to enable <b>%s</b> before") %(gtk.Buildable.get_name(widget)) )
else:
wid.set_sensitive(True)
if hasattr(wid, "set_tooltip_text"):
wid.set_tooltip_text( "" )
#print_debug("on_linked_widgets() widget=%s enabled=%s new=%s"%(w, enabled, other[w]) )
def on_disable_plymouth_change(self, widget):
print_debug("on_disable_plymouth_change() value=%s"%widget.get_active())
if widget.get_active():
self.TCOS_PLYMOUTH.set_sensitive(False)
else:
self.TCOS_PLYMOUTH.set_sensitive(True)
def on_disable_usplash_change(self, widget):
print_debug("on_disable_usplash_change() value=%s"%widget.get_active())
if widget.get_active():
self.TCOS_USPLASH.set_sensitive(False)
else:
self.TCOS_USPLASH.set_sensitive(True)
def on_disable_dri_change(self, widget):
print_debug("on_disable_dri_change() value=%s"%widget.get_active())
if widget.get_active():
self.TCOS_XORG_DRI_RADEON.set_sensitive(True)
else:
self.TCOS_XORG_DRI_RADEON.set_sensitive(False)
def on_tcos_menu_mode_change(self, widget):
#print_debug("on_tcos_menu_mode_change() widget=%s active=%s"%(widget.name, widget.get_active()))
if not widget.get_active():
return
menu_type=gtk.Buildable.get_name(widget).replace('TCOS_MENU_MODE','').replace('_','')
print_debug("on_tcos_menu_mode_change() widget=%s type=%s" %(gtk.Buildable.get_name(widget),menu_type))
for item in shared.TCOS_MENUS_TYPES:
#print_debug("on_tcos_menu_mode_change() item[0]=%s menu_type=%s"%(item[0], menu_type))
if item[0] == menu_type:
self.config.changevalue("TCOS_NETBOOT_MENU", item[1])
self.config.changevalue("TCOS_NETBOOT_MENU_VESA", item[2])
self.TCOS_NETBOOT_HIDE_INSTALL.set_sensitive(item[3])
self.TCOS_NETBOOT_HIDE_LOCAL.set_sensitive(item[4])
def on_expander_click(self, expander, params):
"""
close all expanders except actual when clicked
"""
# exit if calling when close
if not expander.get_expanded(): return
for exp in self.expanders:
if exp != expander:
exp.set_expanded(False)
def on_backbutton_click(self, widget):
#print_debug ("Back clicked")
self.changestep( -1)
return
def on_nextbutton_click(self, widget):
#print_debug ("Next clicked")
self.changestep( 1 )
return
def on_cancelbutton_click(self, widget):
#print_debug ("Cancel clicked")
self.exitapp(widget)
self.config.revert()
return
def on_donebutton_click(self, widget):
#print_debug ("Done clicked")
# 1 is for show popup
#self.saveconfig(True)
self.exitapp(widget)
return
def on_aboutbutton_click(self, widget):
print_debug ("TcosGui::on_aboutbutton_click() About clicked")
self.aboutdialog.show()
return
def on_aboutdialog_close(self, widget, event=None):
print_debug ("TcosGui::on_aboutdialog_close() Closing about")
self.aboutdialog.hide()
return True
def on_aboutdialog_response(self, dialog, response, *args):
#http://www.async.com.br/faq/pygtk/index.py?req=show&file=faq10.013.htp
if response < 0:
dialog.hide()
dialog.emit_stop_by_name('response')
def on_startbutton_click(self, widget):
textbuffer = self.processtxt.get_buffer()
textbuffer.set_text('')
if self.TCOS_ROOT_PASSWD.get_text() == "" and self.config.use_secrets == False:
self.info_msg(_("You leave blank root password for thin clients in:\n - Advanced settings -> Users and passwords\
\n\nThe password will be established to: \"root\""))
#print_debug("Start clicked")
# disable nextbutton
self.nextbutton.set_sensitive(False)
self.startbutton.set_sensitive(False)
self.backbutton.set_sensitive(False)
self.cancelbutton.set_sensitive(False)
# read conf
#self.writeintoprogresstxt( _("Backup configuration settings.") )
#backup config file
#self.backupconfig("backup")
#self.updateprogressbar(0.1)
#time.sleep(0.3)
self.writeintoprogresstxt( _("Overwriting it with your own settings.") )
#FIXME save config without popup
self.saveconfig(False)
self.updateprogressbar(0.05)
#time.sleep(0.3)
# generate cmdline for gentcos
cmdline=self.getcmdline()
if cmdline == -1:
# ERROR processing cmdline
error_txt=_("Something wrong ocurred while parsing command line options for gentcos.\n\nSelect boot method??")
self.writeintoprogresstxt( error_txt )
self.error_msg ( error_txt )
self.backupconfig("restore")
return
cmdline=shared.gentcos + cmdline
self.writeintoprogresstxt( _("EXEC: %s") %(cmdline))
# start generate in a thread
th=Thread(target=self.generateimages, args=(cmdline,) )
th.start()
def enablebuttons(self):
# when done, activate nextbutton
self.nextbutton.set_sensitive(True)
self.backbutton.set_sensitive(True)
self.cancelbutton.set_sensitive(True)
# IMPORTANT restore backup
#self.backupconfig("restore")
#self.writeintoprogresstxt( _("Restore configuration settings.") )
self.startbutton.set_sensitive(True)
def generateimages(self, cmdline):
self.isfinished=False
p = Popen(cmdline, shell=True, bufsize=0, stdout=PIPE, stderr=STDOUT, close_fds=True)
print_debug ("generateimages() exec: %s"%cmdline)
stdout=p.stdout
counter=0.1
step=0.02
while not self.isfinished:
time.sleep(0.1)
line=stdout.readline().replace('\n','')
if len(line) > 0:
counter=counter+step
if p.poll() != None:
self.isfinished=True
if 'bash: no job' in line or \
'root@' in line or \
'df: Warning' in line:
print_debug("generateimages() NO VISIBLE LINE %s"%line)
continue
print_debug("generateimages() %s"%line)
gtk.gdk.threads_enter()
self.updateprogressbar(counter)
self.writeintoprogresstxt( line )
gtk.gdk.threads_leave()
gtk.gdk.threads_enter()
self.enablebuttons()
self.updateprogressbar(1)
gtk.gdk.threads_leave()
def updateprogressbar(self, num):
#print ("DEBUG: update progressbar to %f" %(num))
if num > 1:
num = 0.99
self.gentcosprogressbar.set_fraction( num )
self.gentcosprogressbar.set_text( _("Working... (%d %%)") %int(num*100) )
if num==1:
self.gentcosprogressbar.set_text( _("Complete") )
return
def backupconfig(self, method):
return
# method is backup or restore
origfile=self.tcos_config_file
abspath = os.path.abspath(origfile)
destfile= abspath + ".orig"
print_debug("TcosGui::backupconfig() orig file %s" %(abspath) )
print_debug("TcosGui::backupconfig() dest file %s" %(destfile) )
if shared.debug:
os.system("diff -ur %s %s" %(abspath, destfile) )
try:
if method == "backup":
print_debug("TcosGui::backupconfig() Making backup...")
shutil.copyfile(abspath, destfile)
elif method == "restore":
print_debug("TcosGui::backupconfig() Restoring backup...")
shutil.copyfile(destfile, abspath)
# re-read config file
self.config.reset()
self.config.getvars()
else:
print_debug("TcosGui::backupconfig() ERROR, unknow method %s" %(method) )
except OSError, problem:
print_debug("TcosGui::backupconfig() ERROR \"%s\"!!!" %(problem) )
pass
def saveconfig(self, popup):
# popup boolean (show or not) info msg about vars
print_debug ("saveconfig() Saving config")
changedvaluestxt=''
active_menu=''
# get value of TCOS_NETBOOT_MENU
for w in ['TCOS_MENU_MODE', 'TCOS_MENU_MODE_SIMPLE', 'TCOS_MENU_MODE_GRAPHIC']:
if getattr(self, w).get_active():
active_menu=w.replace('TCOS_MENU_MODE', '').replace('_','')
for menu in shared.TCOS_MENUS_TYPES:
if active_menu == menu[0]:
print_debug("saveconfig() menu=%s self.config.menus=%s" %(menu, self.config.menus) )
print_debug("saveconfig() ACTIVE MENU =%s"%active_menu)
for exp in self.config.confdata:
if not hasattr(self, exp):
if exp in ["TCOS_NETBOOT_MENU", "TCOS_NETBOOT_MENU_VESA"]:
self.changedvalues.append ( exp )
#print_debug ( "saveconfig() widget %s ___NOT___ found" %(exp) )
continue
widget=getattr(self, exp)
varname = self.getvalueof_andsave(widget, exp)
#print_debug ("saveconfig() exp=%s varname=%s"%(exp, varname))
if varname != None:
self.changedvalues.append ( varname )
print_debug("saveconfig() changedvalues=%s"%self.changedvalues)
if len(self.changedvalues) > 0:
self.config.savedata(self.changedvalues)
if len(self.changedvalues) > 0 and popup:
for txt in self.changedvalues:
changedvaluestxt+='\n' + txt
self.info_msg( _("Configuration saved succesfully.\nChanged values are:\n %s") %(changedvaluestxt))
return
def getvalueof_andsave(self, widget, varname):
wtype=[]
widget_type=widget.class_path().split('.')
wtype=widget_type[ len(widget_type)-1 ]
value=str( self.config.getvalue(varname) )
guivalue=str( self.readguiconf(widget, varname, wtype) )
#print_debug("getvalueof_andsave() varname=%s, oldvalue=%s guivalue=%s wtype=%s ##################" %(varname, value, guivalue, wtype) )
# changevalue if is distinct
if value != guivalue:
print_debug ("getvalueof() CHANGED widget=%s type=%s value=%s guiconf=%s" %(varname, wtype, value, guivalue))
self.config.changevalue(varname, str(guivalue) )
return( str( varname) )
def getcmdline(self):
"""
Generate gentcos cmdline based on tcos.conf and gui settings
"""
# for gentcos cmdline need:
# TCOS_KERNEL -vmlinuz
# TCOS_METHOD -tftp -nbi -cdrom -nfs
# TCOS_SUFFIX -suffix=foo
# TCOS_DEBUG -size (show ramdisk and usr.squashfs sizes)
cmdline=""
methodindex=self.read_select(self.TCOS_METHOD, "TCOS_METHOD")
if methodindex == -1:
print_debug("TcosGui::getcmdline() Unknow method in TCOS_METHOD")
return -1
cmdline+=" %s" %(self.search_selected_index(self.TCOS_METHOD, "TCOS_METHOD", methodindex) )
# get TCOS_KERNEL Combobox
model=self.TCOS_KERNEL.get_model()
kernelindex=self.TCOS_KERNEL.get_active()
kerneltext=model[kernelindex][0]
print_debug("TcosGui::getcmdline() selected TCOS_KERNEL index(%d) = %s" %(kernelindex, kerneltext) )
if kernelindex != "" and kernelindex != None and kerneltext != self.config.getvalue("TCOS_KERNEL"):
cmdline+=" -vmlinuz=\""+ kerneltext + "\""
is_append_in_list=False
for item in self.changedvalues:
if item == "TCOS_APPEND":
is_append_in_list=True
# get TCOS_APPEND
if self.TCOS_APPEND.get_text() != "" and self.TCOS_APPEND.get_text() != None and is_append_in_list:
value=self.TCOS_APPEND.get_text()
value=self.cleanvar(value, spaces=False)
cmdline+=" -extra-append=\""+ value + "\""
# get TCOS_DEBUG
if self.TCOS_DEBUG.get_active():
cmdline+=" -size"
print_debug ("getcmdline() cmdline=%s" %(cmdline) )
if shared.updatetcosimages:
# return cmdline into ""
return "--gentcos=\"%s\"" %(cmdline)
return cmdline
def populate_select(self,widget, varname):
"""
Try to read %varname%_VALUES (in shared class) and populate ComboBox with it
params:
- widget = GtkComboBox to populate
- varname = combobox name
return:
nothing
"""
#print_debug ( "TcosGui::populate_select() populating \"%s\"" %(varname) )
if not hasattr(shared, varname+"_VALUES"):
#print_debug ( "TcosGui::populate_select() WARNING: %s not have %s_VALUES" %(varname, varname) )
return
values=getattr(shared, varname+"_VALUES")
if len(values) < 1:
print_debug ( "TcosGui::populate_select() WARNING %s_VALUES is empty" %(varname) )
else:
itemlist = gtk.ListStore(str, str)
#print eval("shared."+varname+"_VALUES")
for item in values:
#print_debug ("TcosGui::populate_select() %s item[0]=\"%s\" item[1]=%s" %(varname, item[0], item[1]) )
# populate select
itemlist.append ( [item[0], item[1]] )
widget.set_model(itemlist)
if widget.get_text_column() != 1:
widget.set_text_column(1)
itemlist=None
return
def set_active_select(self, widget, varname, value):
"""
Set default ComboBox value based on tcos.conf settings
params:
- widget = GtkComboBox to set active
- varname = ComboBox name
- value = value of varname in tcos.conf
returns:
nothing
"""
# search index of selected value
if not hasattr(shared, varname+"_VALUES"):
#print_debug ( "TcosGui::set_active_select() WARNING: %s not have %s_VALUES" %(varname, varname) )
return
values=getattr(shared, varname+"_VALUES")
if len(values) < 1:
print_debug ( "TcosGui::set_active_select() WARNING %s_VALUES is empty" %(varname) )
else:
for i in range(len( values ) ):
if values[i][0] == value:
#print_debug ( "TcosGui::set_active_selected() index=%d SELECTED=%s" %( i, values[i][0] ) )
widget.set_active(i)
return
def read_select(self, widget, varname):
"""
Read index of active ComboBox value
params:
- widget = GtkComboBox to read
- varname = ComboBox name
return:
index of selected value or -1 if nothing selected
"""
#print_debug ( "TcosGui::read_select() reading \"%s\"" %(varname) )
selected=-1
try:
selected=widget.get_active()
except:
print_debug ( "TcosGui::read_select() ERROR reading %s" %(varname) )
return selected
def search_selected_index(self,widget,varname, index):
"""
Convert index of selected value in tcos.conf text value
If varname not have %varname%_VALUES return first column value
params:
- widget = GtkComboBox to read
- varname = ComboBox name
- index = index of selected value retunr by self.read.selected
returns:
txt value or "" if not found
"""
print_debug("search_selected_index widget=%s varname=%s index=%s"%(widget,varname, index))
value=""
# if index is < 0 return empty string (PLYMOUTH_DISABLE)
if index < 0:
return ''
if hasattr(shared, varname + "_VALUES"):
return getattr(shared, varname + "_VALUES")[index][0]
model=widget.get_model()
if varname == "TCOS_TEMPLATE":
return model[index][1]
else:
return model[index][0]
def cleanvar(self, value, spaces=True):
# delete ;|&>< of value
if spaces:
value=value.replace(' ','_') #replace spaces with _
value=value.replace(';','_') #replace ; with _
value=value.replace('&','_') #replace & with _
value=value.replace('>','_') #replace > with _
value=value.replace('<','_') #replace < with _
value=value.replace('|','_') #replace | with _
return value
def readguiconf(self, widget, varname, wtype):
value=""
if wtype == "GtkComboBoxEntry":
index=self.read_select(widget, varname)
value=self.search_selected_index(widget, varname, index)
#print_debug ( "TcosGui::readguiconf() Read ComboBox %s index=%d value=%s" %(varname, index, value) )
elif wtype == "GtkEntry":
value=widget.get_text()
elif wtype == "GtkCheckButton":
if widget.get_active():
value=1
else:
value=""
elif wtype == "GtkSpinButton" and gtk.Buildable.get_name(widget) == "TCOS_VOLUME":
# add % to TCOS_VOLUME
value=str( int( widget.get_value() ) )
value=value+"%"
elif wtype == "GtkSpinButton" and gtk.Buildable.get_name(widget) != "TCOS_VOLUME":
value=str( int( widget.get_value() ) )
elif wtype == "GtkSpinButton" and gtk.Buildable.get_name(widget) != "TCOS_MAX_MEM":
value=str( int( widget.get_value() ) )
elif wtype == "GtkSpinButton" and gtk.Buildable.get_name(widget) != "TCOS_COMPCACHE_PERCENT":
value=str( int( widget.get_value() ) )
else:
print_debug ("TcosGui::readguiconf() __ERROR__ unknow %s of type %s" %(varname, wtype) )
return value
def writeintoprogresstxt(self, txt):
buffer = self.processtxt.get_buffer()
iter = buffer.get_end_iter()
mark = buffer.get_insert()
txt=str(txt)
buffer.insert(iter, '\n' + txt)
# scroll window
self.processtxt.scroll_to_mark(mark, 0.2)
return
def changestep(self, newstep):
#print_debug("changestep() newstep=%s"%newstep)
self.step=self.step + newstep
if self.step <= 0:
self.backbutton.hide()
self.step = 0
elif self.step == 1:
print_debug("changestep() ****** STEP 1 ****")
self.backbutton.show()
if newstep == 1:
self.config.loadbase()
self.config.loadtemplates()
self.populatetemplates()
# load TCOS var settings from file and populate Gtk Entries
#self.loadsettings()
#elif self.step == 2:
# self.loadsettings()
# # nothing to do???
#elif self.step == 3:
# self.loadsettings()
elif self.step == 6:
self.nextbutton.show()
self.donebutton.hide()
elif self.step >= 7 : # step 5
self.donebutton.show()
self.nextbutton.hide()
self.step = 7
# move step
if newstep == 1: # click next
self.steps.next_page()
else: # click back
self.steps.prev_page()
return
def on_template_change(self, widget):
self.changedvalues=[]
model=widget.get_model()
value=model[widget.get_active()][1]
if value:
print_debug("****** ON_TEMPLATE_CHANGE() ***** value=%s"%value)
self.config.reloadtemplate(value)
self.loadsettings()
self.config.changevalue("TCOS_TEMPLATE", value)
def getTranslatedDescription(self, tpl):
for lang in self.languages:
if self.config.templates[tpl].has_key('TEMPLATE_DESCRIPTION_' + lang):
return self.config.templates[tpl]['TEMPLATE_DESCRIPTION_' + lang]
if self.config.templates[tpl].has_key('TEMPLATE_DESCRIPTION'):
return self.config.templates[tpl]['TEMPLATE_DESCRIPTION']
return _("No template description avalaible")
def populatetemplates(self):
default_template=self.config.getvalue('TCOS_TEMPLATE')
if os.path.exists(shared.tcosconfig_template):
default_template=os.path.basename(shared.tcosconfig_template)
elif os.path.exists(shared.tcos_force_base_template):
default_template=os.path.basename(shared.tcos_force_base_template)
print_debug("populatetemplates() default=%s"%default_template)
# populate template list
templatelist = gtk.ListStore(str,str)
templatelist.append( [ "base.conf : " + _("don't use any template") , "base.conf"] )
i=0
for tpl in self.config.templates:
text="%s : %s" %(tpl, self.getTranslatedDescription(tpl))
if tpl == default_template:
default_template=i
print_debug("populatetemplates() default_template tpl=%s i=%s"%(tpl, i))
templatelist.append([text,tpl])
i+=1
self.TCOS_TEMPLATE.set_model(templatelist)
if self.TCOS_TEMPLATE.get_text_column() != 0:
self.TCOS_TEMPLATE.set_text_column(0)
self.TCOS_TEMPLATE.set_active(default_template+1) # set i+1 because base.conf is 0
def loadsettings(self):
# set default PXE build
self.populate_select(self.TCOS_METHOD, "TCOS_METHOD")
self.TCOS_METHOD.set_active(1)
# configure boot menu
#("TCOS_NETBOOT_MENU", item[1])
#("TCOS_NETBOOT_MENU_VESA", item[2])
default_menu=False
for item in shared.TCOS_MENUS_TYPES:
if self.config.getvalue('TCOS_NETBOOT_MENU') == item[1] and self.config.getvalue('TCOS_NETBOOT_MENU_VESA') == item[2]:
# set menu type to item[0]
if item[0] == '':
widget=self.TCOS_MENU_MODE
default_menu=True
else:
widget=getattr(self, 'TCOS_MENU_MODE' + '_' + item[0])
print_debug("loadsettings() TCOS_MENU_MODE=%s"%gtk.Buildable.get_name(widget))
widget.set_active(True)
print_debug("loadsettings() NETBOOT_HIDE_INSTALL = %s"%item[3])
if default_menu:
print_debug("loadsettings() default menu disable hide_install and hide_local")
self.TCOS_NETBOOT_HIDE_INSTALL.set_sensitive(False)
self.TCOS_NETBOOT_HIDE_LOCAL.set_sensitive(False)
# overwrite method (tcos.conf.nfs use it)
if self.config.getvalue('TCOS_METHOD') != '':
model=self.TCOS_METHOD.get_model()
for i in range(len(model)):
if model[i][0] == self.config.getvalue('TCOS_METHOD'):
self.TCOS_METHOD.set_active(i)
# populate kernel list
kernellist = gtk.ListStore(str)
for kernel in self.config.kernels:
kernellist.append([kernel.split()[0]])
self.TCOS_KERNEL.set_model(kernellist)
if self.TCOS_KERNEL.get_text_column() != 0:
self.TCOS_KERNEL.set_text_column(0)
#set default tcos.conf kernel
model=self.TCOS_KERNEL.get_model()
for i in range(len(model)):
print_debug ("TcosGui::loadsettings() TCOS_KERNEL model[i][0] is '%s' default '%s' KERNEL_FIXED is '%s'"
%(model[i][0], self.config.getvalue("TCOS_KERNEL"), self.config.getvalue("TCOS_KERNEL_FIXED")) )
if self.config.getvalue("TCOS_KERNEL_FIXED") != "":
if model[i][0] == self.config.getvalue("TCOS_KERNEL_FIXED"):
print_debug ("TcosGui::loadsettings() TCOS_KERNEL default is %s, index %d" %( model[i][0] , i ) )
self.TCOS_KERNEL.set_active( i )
elif model[i][0] == self.config.getvalue("TCOS_KERNEL"):
print_debug ("TcosGui::loadsettings() TCOS_KERNEL default is %s, index %d" %( model[i][0] , i ) )
self.TCOS_KERNEL.set_active( i )
# read all tcos.conf and guiconf vars and populate
for exp in self.config.confdata:
#print_debug ( "TcosGui::loadsettings() searching for %s" %(exp) )
value=self.config.confdata[exp]
value=value.replace('"', '')
if value == "":
# empty=False, used to checkbox
value = False
# widget exits??
if hasattr(self, exp):
widget=getattr(self, exp)
else:
if not exp in self.config.ignored_widgets:
print_debug("loadsettings() widget %s don't exists"%exp)
continue
# type of widget
wtype=type(widget)
if wtype == gtk.ComboBoxEntry:
self.populate_select( widget, exp )
if exp == 'TCOS_XORG_VIDEO_DRIVER':
# by default set xorgauto if supported
if self.config.templates.has_key('tcosconfig.conf'):
if not self.config.templates['tcosconfig.conf'].has_key('TCOS_XORG_VIDEO_DRIVER'):
# not found configured video driver
if os.path.isdir('/usr/share/X11/xorg.conf.d/') or \
os.path.isdir('/usr/lib/X11/xorg.conf.d/'):
value='xorgauto'
print_debug("loadsettings() TCOS_XORG_VIDEO_DRIVER not found in tcosconfig.conf, ****force**** 'xorgauto'")
self.set_active_select( widget, exp, value )
elif wtype == gtk.Entry:
#print_debug ( "%s is a Entry, putting value=%s" %(exp, value) )
if value == False: value = ""
widget.set_text(value)
elif wtype == gtk.CheckButton:
#print_debug ( "%s is a CheckButton" %(exp) )
if value:
widget.set_active(1)
else:
widget.set_active(0)
elif wtype == gtk.SpinButton and gtk.Buildable.get_name(widget) == "TCOS_VOLUME":
# change %
if value.find("%") > 0:
# give % value
values=[]
values=value.split('%')
widget.set_value( float(values[0]) )
else:
#give a value between 1-31, change to 1-100
value=float(value)*100/31
widget.set_value( float(value) )
elif wtype == gtk.SpinButton and gtk.Buildable.get_name(widget) != "TCOS_VOLUME":
#print widget
#print gtk.Buildable.get_name(widget)
widget.set_value( int(value) )
elif wtype == gtk.SpinButton and gtk.Buildable.get_name(widget) != "TCOS_MAX_MEM":
widget.set_value( int(value) )
elif wtype == gtk.SpinButton and gtk.Buildable.get_name(widget) != "TCOS_COMPCACHE_PERCENT":
widget.set_value( int(value) )
else:
print_debug( "TcosGui::loadsettings() __ERROR__ unknow %s type %s" %(exp, wtype ) )
if os.path.isfile(shared.config_file_secrets):
try:
fd=file(shared.config_file_secrets, 'r')
except:
return
data=fd.readline()
fd.close()
if data != "\n":
self.config.use_secrets=True
self.TCOS_ADMIN_USER.set_text("")
self.TCOS_ROOT_PASSWD.set_text("")
self.TCOS_ADMIN_USER.set_sensitive(False)
self.TCOS_ROOT_PASSWD.set_sensitive(False)
# some dialog messages
def error_msg(self,txt):
d = gtk.MessageDialog(None,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING, gtk.BUTTONS_OK,
txt)
d.run()
d.destroy()
print_debug ( "TcosGui::error_msg() ERROR: %s" %(txt) )
def info_msg(self,txt):
d = gtk.MessageDialog(None,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_INFO, gtk.BUTTONS_OK,
txt)
d.run()
d.destroy()
print_debug ( "TcosGui::info_msg() INFO: %s" %(txt) )
def exitapp(self, widget):
print_debug ( "TcosGui::exitapp() Exiting" )
self.isfinished=True
gtk.main_quit()
return
def run (self):
gtk.main()
if __name__ == '__main__':
shared.debug=True
gui = TcosGui()
# Run app
gtk.main()
| gpl-2.0 |
gangadharkadam/vervefrappe | frappe/custom/doctype/customize_form/customize_form.py | 5 | 8017 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""
Customize Form is a Single DocType used to mask the Property Setter
Thus providing a better UI from user perspective
"""
import frappe, json
from frappe import _
from frappe.utils import cint
from frappe.model.document import Document
from frappe.core.doctype.doctype.doctype import validate_fields_for_doctype
class CustomizeForm(Document):
doctype_properties = {
'search_fields': 'Data',
'sort_field': 'Data',
'sort_order': 'Data',
'default_print_format': 'Data',
'read_only_onload': 'Check',
'allow_copy': 'Check',
'max_attachments': 'Int'
}
docfield_properties = {
'idx': 'Int',
'label': 'Data',
'fieldtype': 'Select',
'options': 'Text',
'permlevel': 'Int',
'width': 'Data',
'print_width': 'Data',
'reqd': 'Check',
'unique': 'Check',
'ignore_user_permissions': 'Check',
'in_filter': 'Check',
'in_list_view': 'Check',
'hidden': 'Check',
'print_hide': 'Check',
'report_hide': 'Check',
'allow_on_submit': 'Check',
'depends_on': 'Data',
'description': 'Text',
'default': 'Text',
'precision': 'Select'
}
allowed_fieldtype_change = (('Currency', 'Float', 'Percent'), ('Small Text', 'Data'),
('Text', 'Text Editor', 'Code'), ('Data', 'Select'), ('Text', 'Small Text'))
def on_update(self):
frappe.db.sql("delete from tabSingles where doctype='Customize Form'")
frappe.db.sql("delete from `tabCustomize Form Field`")
def fetch_to_customize(self):
self.clear_existing_doc()
if not self.doc_type:
return
meta = frappe.get_meta(self.doc_type)
# doctype properties
for property in self.doctype_properties:
self.set(property, meta.get(property))
for d in meta.get("fields"):
new_d = {"fieldname": d.fieldname, "is_custom_field": d.get("is_custom_field"), "name": d.name}
for property in self.docfield_properties:
new_d[property] = d.get(property)
self.append("fields", new_d)
# NOTE doc is sent to clientside by run_method
def clear_existing_doc(self):
doc_type = self.doc_type
for fieldname in self.meta.get_valid_columns():
self.set(fieldname, None)
for df in self.meta.get_table_fields():
self.set(df.fieldname, [])
self.doc_type = doc_type
self.name = "Customize Form"
def save_customization(self):
if not self.doc_type:
return
self.set_property_setters()
self.update_custom_fields()
self.set_idx_property_setter()
validate_fields_for_doctype(self.doc_type)
frappe.msgprint(_("{0} updated").format(_(self.doc_type)))
frappe.clear_cache(doctype=self.doc_type)
self.fetch_to_customize()
def set_property_setters(self):
meta = frappe.get_meta(self.doc_type)
# doctype property setters
for property in self.doctype_properties:
if self.get(property) != meta.get(property):
self.make_property_setter(property=property, value=self.get(property),
property_type=self.doctype_properties[property])
update_db = False
for df in self.get("fields"):
if df.get("__islocal"):
continue
meta_df = meta.get("fields", {"fieldname": df.fieldname})
if not meta_df or meta_df[0].get("is_custom_field"):
continue
for property in self.docfield_properties:
if df.get(property) != meta_df[0].get(property):
if property == "fieldtype":
self.validate_fieldtype_change(df, meta_df[0].get(property), df.get(property))
elif property == "allow_on_submit" and df.get(property):
frappe.msgprint(_("Row {0}: Not allowed to enable Allow on Submit for standard fields")\
.format(df.idx))
continue
elif property == "precision" and cint(df.get("precision")) > 6 \
and cint(df.get("precision")) > cint(meta_df[0].get("precision")):
update_db = True
self.make_property_setter(property=property, value=df.get(property),
property_type=self.docfield_properties[property], fieldname=df.fieldname)
if update_db:
from frappe.model.db_schema import updatedb
updatedb(self.doc_type)
def update_custom_fields(self):
for df in self.get("fields"):
if df.get("__islocal"):
self.add_custom_field(df)
else:
self.update_in_custom_field(df)
self.delete_custom_fields()
def add_custom_field(self, df):
d = frappe.new_doc("Custom Field")
d.dt = self.doc_type
for property in self.docfield_properties:
d.set(property, df.get(property))
d.insert()
df.fieldname = d.fieldname
def update_in_custom_field(self, df):
meta = frappe.get_meta(self.doc_type)
meta_df = meta.get("fields", {"fieldname": df.fieldname})
if not (meta_df and meta_df[0].get("is_custom_field")):
return
custom_field = frappe.get_doc("Custom Field", meta_df[0].name)
changed = False
for property in self.docfield_properties:
if df.get(property) != custom_field.get(property):
if property == "fieldtype":
self.validate_fieldtype_change(df, meta_df[0].get(property), df.get(property))
custom_field.set(property, df.get(property))
changed = True
if changed:
custom_field.flags.ignore_validate = True
custom_field.save()
def delete_custom_fields(self):
meta = frappe.get_meta(self.doc_type)
fields_to_remove = (set([df.fieldname for df in meta.get("fields")])
- set(df.fieldname for df in self.get("fields")))
for fieldname in fields_to_remove:
df = meta.get("fields", {"fieldname": fieldname})[0]
if df.get("is_custom_field"):
frappe.delete_doc("Custom Field", df.name)
def set_idx_property_setter(self):
meta = frappe.get_meta(self.doc_type)
field_order_has_changed = [df.fieldname for df in meta.get("fields")] != \
[d.fieldname for d in self.get("fields")]
if field_order_has_changed:
_idx = []
for df in sorted(self.get("fields"), key=lambda x: x.idx):
_idx.append(df.fieldname)
self.make_property_setter(property="_idx", value=json.dumps(_idx), property_type="Text")
def make_property_setter(self, property, value, property_type, fieldname=None):
self.delete_existing_property_setter(property, fieldname)
property_value = self.get_existing_property_value(property, fieldname)
if property_value==value:
return
# create a new property setter
# ignore validation becuase it will be done at end
frappe.make_property_setter({
"doctype": self.doc_type,
"doctype_or_field": "DocField" if fieldname else "DocType",
"fieldname": fieldname,
"property": property,
"value": value,
"property_type": property_type
}, ignore_validate=True)
def delete_existing_property_setter(self, property, fieldname=None):
# first delete existing property setter
existing_property_setter = frappe.db.get_value("Property Setter", {"doc_type": self.doc_type,
"property": property, "field_name['']": fieldname or ''})
if existing_property_setter:
frappe.delete_doc("Property Setter", existing_property_setter)
def get_existing_property_value(self, property_name, fieldname=None):
# check if there is any need to make property setter!
if fieldname:
property_value = frappe.db.get_value("DocField", {"parent": self.doc_type,
"fieldname": fieldname}, property_name)
else:
try:
property_value = frappe.db.get_value("DocType", self.doc_type, property_name)
except Exception, e:
if e.args[0]==1054:
property_value = None
else:
raise
return property_value
def validate_fieldtype_change(self, df, old_value, new_value):
allowed = False
for allowed_changes in self.allowed_fieldtype_change:
if (old_value in allowed_changes and new_value in allowed_changes):
allowed = True
if not allowed:
frappe.throw(_("Fieldtype cannot be changed from {0} to {1} in row {2}").format(old_value, new_value, df.idx))
def reset_to_defaults(self):
if not self.doc_type:
return
frappe.db.sql("""delete from `tabProperty Setter` where doc_type=%s
and ifnull(field_name, '')!='naming_series'""", self.doc_type)
frappe.clear_cache(doctype=self.doc_type)
self.fetch_to_customize()
| mit |
gimite/personfinder | app/vendors/pyasn1/debug.py | 6 | 3361 | #
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2018, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pyasn1/license.html
#
import logging
from pyasn1 import __version__
from pyasn1 import error
from pyasn1.compat.octets import octs2ints
__all__ = ['Debug', 'setLogger', 'hexdump']
flagNone = 0x0000
flagEncoder = 0x0001
flagDecoder = 0x0002
flagAll = 0xffff
flagMap = {
'none': flagNone,
'encoder': flagEncoder,
'decoder': flagDecoder,
'all': flagAll
}
class Printer(object):
# noinspection PyShadowingNames
def __init__(self, logger=None, handler=None, formatter=None):
if logger is None:
logger = logging.getLogger('pyasn1')
logger.setLevel(logging.DEBUG)
if handler is None:
handler = logging.StreamHandler()
if formatter is None:
formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
self.__logger = logger
def __call__(self, msg):
self.__logger.debug(msg)
def __str__(self):
return '<python logging>'
if hasattr(logging, 'NullHandler'):
NullHandler = logging.NullHandler
else:
# Python 2.6 and older
class NullHandler(logging.Handler):
def emit(self, record):
pass
class Debug(object):
defaultPrinter = Printer()
def __init__(self, *flags, **options):
self._flags = flagNone
if 'loggerName' in options:
# route our logs to parent logger
self._printer = Printer(
logger=logging.getLogger(options['loggerName']),
handler=NullHandler()
)
elif 'printer' in options:
self._printer = options.get('printer')
else:
self._printer = self.defaultPrinter
self._printer('running pyasn1 %s, debug flags %s' % (__version__, ', '.join(flags)))
for flag in flags:
inverse = flag and flag[0] in ('!', '~')
if inverse:
flag = flag[1:]
try:
if inverse:
self._flags &= ~flagMap[flag]
else:
self._flags |= flagMap[flag]
except KeyError:
raise error.PyAsn1Error('bad debug flag %s' % flag)
self._printer("debug category '%s' %s" % (flag, inverse and 'disabled' or 'enabled'))
def __str__(self):
return 'logger %s, flags %x' % (self._printer, self._flags)
def __call__(self, msg):
self._printer(msg)
def __and__(self, flag):
return self._flags & flag
def __rand__(self, flag):
return flag & self._flags
logger = 0
def setLogger(userLogger):
global logger
if userLogger:
logger = userLogger
else:
logger = 0
def hexdump(octets):
return ' '.join(
['%s%.2X' % (n % 16 == 0 and ('\n%.5d: ' % n) or '', x)
for n, x in zip(range(len(octets)), octs2ints(octets))]
)
class Scope(object):
def __init__(self):
self._list = []
def __str__(self): return '.'.join(self._list)
def push(self, token):
self._list.append(token)
def pop(self):
return self._list.pop()
scope = Scope()
| apache-2.0 |
glaudsonml/kurgan-ai | tools/sqlmap/lib/core/datatype.py | 1 | 2973 | #!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import copy
import types
from lib.core.exception import SqlmapDataException
class AttribDict(dict):
"""
This class defines the sqlmap object, inheriting from Python data
type dictionary.
>>> foo = AttribDict()
>>> foo.bar = 1
>>> foo.bar
1
"""
def __init__(self, indict=None, attribute=None):
if indict is None:
indict = {}
# Set any attributes here - before initialisation
# these remain as normal attributes
self.attribute = attribute
dict.__init__(self, indict)
self.__initialised = True
# After initialisation, setting attributes
# is the same as setting an item
def __getattr__(self, item):
"""
Maps values to attributes
Only called if there *is NOT* an attribute with this name
"""
try:
return self.__getitem__(item)
except KeyError:
raise SqlmapDataException("unable to access item '%s'" % item)
def __setattr__(self, item, value):
"""
Maps attributes to values
Only if we are initialised
"""
# This test allows attributes to be set in the __init__ method
if "_AttribDict__initialised" not in self.__dict__:
return dict.__setattr__(self, item, value)
# Any normal attributes are handled normally
elif item in self.__dict__:
dict.__setattr__(self, item, value)
else:
self.__setitem__(item, value)
def __getstate__(self):
return self.__dict__
def __setstate__(self, dict):
self.__dict__ = dict
def __deepcopy__(self, memo):
retVal = self.__class__()
memo[id(self)] = retVal
for attr in dir(self):
if not attr.startswith('_'):
value = getattr(self, attr)
if not isinstance(value, (types.BuiltinFunctionType, types.FunctionType, types.MethodType)):
setattr(retVal, attr, copy.deepcopy(value, memo))
for key, value in self.items():
retVal.__setitem__(key, copy.deepcopy(value, memo))
return retVal
class InjectionDict(AttribDict):
def __init__(self):
AttribDict.__init__(self)
self.place = None
self.parameter = None
self.ptype = None
self.prefix = None
self.suffix = None
self.clause = None
self.notes = set()
# data is a dict with various stype, each which is a dict with
# all the information specific for that stype
self.data = AttribDict()
# conf is a dict which stores current snapshot of important
# options used during detection
self.conf = AttribDict()
self.dbms = None
self.dbms_version = None
self.os = None
| apache-2.0 |
arahuja/generative-tf | generative-tf/models/variational_autoencoder.py | 1 | 7955 | import tensorflow as tf
import numpy as np
from initialization import xavier_glorot_initialization
class VariationalAutoencoder():
def __init__(self,
input_dim,
latent_dim,
hidden_dim=10,
batch_size=100,
num_layers=0,
activation_func=tf.nn.relu,
output_activation_func=tf.nn.sigmoid):
self.graph = tf.Graph()
self.activation_func = activation_func
self.output_activation_func = output_activation_func
self.input_dim = input_dim
self.batch_size = batch_size
with self.graph.as_default():
## Input x variable
self.x = tf.placeholder(tf.float32, shape=(self.batch_size, input_dim))
## Dimension of the latent variables mu/mean and log_variance
self._latent_dim = latent_dim
self.batch_size = batch_size
self._encoder_W = tf.Variable(xavier_glorot_initialization(input_dim, hidden_dim))
self._encoder_bias = tf.Variable(tf.zeros([hidden_dim]))
self._mean_encoder = tf.Variable(xavier_glorot_initialization(hidden_dim, latent_dim))
self._mean_encoder_bias = tf.Variable(tf.zeros([latent_dim]))
self._log_variance_encoder = tf.Variable(xavier_glorot_initialization(hidden_dim, latent_dim))
self._log_variance_encoder_bias = tf.Variable(tf.zeros([latent_dim]))
self._decoder_W = tf.Variable(xavier_glorot_initialization(latent_dim, hidden_dim))
self._decoder_bias = tf.Variable(tf.zeros([hidden_dim]))
self._mean_decoder = tf.Variable(xavier_glorot_initialization(hidden_dim, input_dim))
self._mean_decoder_bias = tf.Variable(tf.zeros([input_dim]))
def _generate(self,
z,
activation_func=tf.nn.softplus,
output_activation_func=tf.nn.sigmoid):
with self.graph.as_default():
# Compute the hidden state from latent variables
h = activation_func(
tf.matmul(z, self._decoder_W) + self._decoder_bias
)
# Compute the reconstruction from hidden state
mean = output_activation_func(
tf.matmul(h, self._mean_decoder) + self._mean_decoder_bias
)
log_variance = None
return (mean, log_variance)
def _encode(self, x):
"""
Forward step of the variational autoencoder
Takes input
"""
with self.graph.as_default():
h = self.activation_func(
tf.matmul(x, self._encoder_W) + self._encoder_bias
)
latent_mean = self.activation_func(
tf.matmul(h, self._mean_encoder) + self._mean_encoder_bias
)
latent_log_variance = self.activation_func(
tf.matmul(h, self._log_variance_encoder) + self._log_variance_encoder_bias
)
return (latent_mean, latent_log_variance)
def _evidence_lower_bound(self,
monte_carlo_samples=5,
importance_weighting=False,
tol=1e-5):
"""
Variational objective function
ELBO = E(log joint log-likelihood) - E(log q)
= MC estimate of log joint - Entropy(q)
"""
with self.graph.as_default():
x_resampled = tf.tile(self.x, tf.constant([monte_carlo_samples, 1]))
# Forward pass of data into latent space
mean_encoder, log_variance_encoder = self._encode(x_resampled)
random_noise = tf.random_normal(
(self.batch_size * monte_carlo_samples, self._latent_dim), 0, 1, dtype=tf.float32)
# Reparameterization trick of re-scaling/transforming random error
std_dev = tf.sqrt(tf.exp(log_variance_encoder))
z = mean_encoder + std_dev * random_noise
# Reconstruction/decoding of latent space
mean_decoder, _ = self._generate(z)
# Bernoulli log-likelihood reconstruction
# TODO: other distributon types
def bernoulli_log_joint(x):
return tf.reduce_sum(
(x * tf.log(tol + mean_decoder))
+ ((1 - x) * tf.log(tol + 1 - mean_decoder)),
1)
log2pi = tf.log(2.0 * np.pi)
def gaussian_likelihood(data, mean, log_variance):
"""Log-likelihood of data given ~ N(mean, exp(log_variance))
Parameters
----------
data :
Samples from Gaussian centered at mean
mean :
Mean of the Gaussian distribution
log_variance :
Log variance of the Gaussian distribution
Returns
-------
log_likelihood : float
"""
num_components = data.get_shape().as_list()[1]
variance = tf.exp(log_variance)
log_likelihood = (
-(log2pi * (num_components / 2.0))
- tf.reduce_sum(
(tf.square(data - mean) / (2 * variance)) + (log_variance / 2.0),
1)
)
return log_likelihood
def standard_gaussian_likelihood(data):
"""Log-likelihood of data given ~ N(0, 1)
Parameters
----------
data :
Samples from Guassian centered at 0
Returns
-------
log_likelihood : float
"""
num_components = data.get_shape().as_list()[1]
log_likelihood = (
-(log2pi * (num_components / 2.0))
- tf.reduce_sum(tf.square(data) / 2.0, 1)
)
return log_likelihood
log_p_given_z = bernoulli_log_joint(x_resampled)
if importance_weighting:
log_q_z = gaussian_likelihood(z, mean_encoder, log_variance_encoder)
log_p_z = standard_gaussian_likelihood(z)
regularization_term = log_p_z - log_q_z
else:
# Analytic solution to KL(q_z | p_z)
p_z_q_z_kl_divergence = \
-0.5 * tf.reduce_sum(1
+ log_variance_encoder
- tf.square(mean_encoder)
- tf.exp(log_variance_encoder), 1)
regularization_term = -p_z_q_z_kl_divergence
log_p_given_z_mc = tf.reshape(log_p_given_z,
[self.batch_size, monte_carlo_samples])
regularization_term_mc = tf.reshape(regularization_term,
[self.batch_size, monte_carlo_samples])
log_weights = log_p_given_z_mc + regularization_term_mc
if importance_weighting:
# Need to compute normalization constant for weights, which is
# log (sum (exp(log_weights)))
# weights_iw = tf.log(tf.sum(tf.exp(log_weights)))
# Instead using log-sum-exp trick
wmax = tf.reduce_max(log_weights, 1, keep_dims=True)
# w_i = p_x/ q_z, log_wi = log_p_joint - log_qz
# log ( 1/k * sum(exp(log w_i)))
weights_iw = tf.log(tf.reduce_mean(tf.exp(log_weights - wmax), 1))
objective = tf.reduce_mean(wmax) + tf.reduce_mean(weights_iw)
else:
objective = tf.reduce_mean(log_weights)
return objective | apache-2.0 |
ahmadiga/min_edx | common/lib/xmodule/xmodule/modulestore/__init__.py | 11 | 54817 | """
This module provides an abstraction for working with XModuleDescriptors
that are stored in a database an accessible using their Location as an identifier
"""
import logging
import re
import json
import datetime
from pytz import UTC
from collections import defaultdict
import collections
from contextlib import contextmanager
import threading
from operator import itemgetter
from sortedcontainers import SortedListWithKey
from abc import ABCMeta, abstractmethod
from contracts import contract, new_contract
from xblock.plugin import default_select
from .exceptions import InvalidLocationError, InsufficientSpecificationError
from xmodule.errortracker import make_error_tracker
from xmodule.assetstore import AssetMetadata
from opaque_keys.edx.keys import CourseKey, UsageKey, AssetKey
from opaque_keys.edx.locations import Location # For import backwards compatibility
from xblock.runtime import Mixologist
from xblock.core import XBlock
log = logging.getLogger('edx.modulestore')
new_contract('CourseKey', CourseKey)
new_contract('AssetKey', AssetKey)
new_contract('AssetMetadata', AssetMetadata)
new_contract('XBlock', XBlock)
LIBRARY_ROOT = 'library.xml'
COURSE_ROOT = 'course.xml'
class ModuleStoreEnum(object):
"""
A class to encapsulate common constants that are used with the various modulestores.
"""
class Type(object):
"""
The various types of modulestores provided
"""
split = 'split'
mongo = 'mongo'
xml = 'xml'
class RevisionOption(object):
"""
Revision constants to use for Module Store operations
Note: These values are passed into store APIs and only used at run time
"""
# both DRAFT and PUBLISHED versions are queried, with preference to DRAFT versions
draft_preferred = 'rev-opt-draft-preferred'
# only DRAFT versions are queried and no PUBLISHED versions
draft_only = 'rev-opt-draft-only'
# # only PUBLISHED versions are queried and no DRAFT versions
published_only = 'rev-opt-published-only'
# all revisions are queried
all = 'rev-opt-all'
class Branch(object):
"""
Branch constants to use for stores, such as Mongo, that have only 2 branches: DRAFT and PUBLISHED
Note: These values are taken from server configuration settings, so should not be changed without alerting DevOps
"""
draft_preferred = 'draft-preferred'
published_only = 'published-only'
class BranchName(object):
"""
Branch constants to use for stores, such as Split, that have named branches
"""
draft = 'draft-branch'
published = 'published-branch'
library = 'library'
class UserID(object):
"""
Values for user ID defaults
"""
# Note: we use negative values here to (try to) not collide
# with user identifiers provided by actual user services.
# user ID to use for all management commands
mgmt_command = -1
# user ID to use for primitive commands
primitive_command = -2
# user ID to use for tests that do not have a django user available
test = -3
# user ID for automatic update by the system
system = -4
class SortOrder(object):
"""
Values for sorting asset metadata.
"""
ascending = 1
descending = 2
class BulkOpsRecord(object):
"""
For handling nesting of bulk operations
"""
def __init__(self):
self._active_count = 0
self.has_publish_item = False
self.has_library_updated_item = False
@property
def active(self):
"""
Return whether this bulk write is active.
"""
return self._active_count > 0
def nest(self):
"""
Record another level of nesting of this bulk write operation
"""
self._active_count += 1
def unnest(self):
"""
Record the completion of a level of nesting of the bulk write operation
"""
self._active_count -= 1
@property
def is_root(self):
"""
Return whether the bulk write is at the root (first) level of nesting
"""
return self._active_count == 1
class ActiveBulkThread(threading.local):
"""
Add the expected vars to the thread.
"""
def __init__(self, bulk_ops_record_type, **kwargs):
super(ActiveBulkThread, self).__init__(**kwargs)
self.records = defaultdict(bulk_ops_record_type)
class BulkOperationsMixin(object):
"""
This implements the :meth:`bulk_operations` modulestore semantics which handles nested invocations
In particular, it implements :meth:`_begin_bulk_operation` and
:meth:`_end_bulk_operation` to provide the external interface
Internally, this mixin records the set of all active bulk operations (keyed on the active course),
and only writes those values when :meth:`_end_bulk_operation` is called.
If a bulk write operation isn't active, then the changes are immediately written to the underlying
mongo_connection.
"""
def __init__(self, *args, **kwargs):
super(BulkOperationsMixin, self).__init__(*args, **kwargs)
self._active_bulk_ops = ActiveBulkThread(self._bulk_ops_record_type)
self.signal_handler = None
@contextmanager
def bulk_operations(self, course_id, emit_signals=True):
"""
A context manager for notifying the store of bulk operations. This affects only the current thread.
In the case of Mongo, it temporarily disables refreshing the metadata inheritance tree
until the bulk operation is completed.
"""
try:
self._begin_bulk_operation(course_id)
yield
finally:
self._end_bulk_operation(course_id, emit_signals)
# the relevant type of bulk_ops_record for the mixin (overriding classes should override
# this variable)
_bulk_ops_record_type = BulkOpsRecord
def _get_bulk_ops_record(self, course_key, ignore_case=False):
"""
Return the :class:`.BulkOpsRecord` for this course.
"""
if course_key is None:
return self._bulk_ops_record_type()
# Retrieve the bulk record based on matching org/course/run (possibly ignoring case)
if ignore_case:
for key, record in self._active_bulk_ops.records.iteritems():
# Shortcut: check basic equivalence for cases where org/course/run might be None.
if key == course_key or (
key.org.lower() == course_key.org.lower() and
key.course.lower() == course_key.course.lower() and
key.run.lower() == course_key.run.lower()
):
return record
return self._active_bulk_ops.records[course_key.for_branch(None)]
@property
def _active_records(self):
"""
Yield all active (CourseLocator, BulkOpsRecord) tuples.
"""
for course_key, record in self._active_bulk_ops.records.iteritems():
if record.active:
yield (course_key, record)
def _clear_bulk_ops_record(self, course_key):
"""
Clear the record for this course
"""
if course_key.for_branch(None) in self._active_bulk_ops.records:
del self._active_bulk_ops.records[course_key.for_branch(None)]
def _start_outermost_bulk_operation(self, bulk_ops_record, course_key):
"""
The outermost nested bulk_operation call: do the actual begin of the bulk operation.
Implementing classes must override this method; otherwise, the bulk operations are a noop
"""
pass
def _begin_bulk_operation(self, course_key):
"""
Begin a bulk operation on course_key.
"""
bulk_ops_record = self._get_bulk_ops_record(course_key)
# Increment the number of active bulk operations (bulk operations
# on the same course can be nested)
bulk_ops_record.nest()
# If this is the highest level bulk operation, then initialize it
if bulk_ops_record.is_root:
self._start_outermost_bulk_operation(bulk_ops_record, course_key)
def _end_outermost_bulk_operation(self, bulk_ops_record, structure_key):
"""
The outermost nested bulk_operation call: do the actual end of the bulk operation.
Implementing classes must override this method; otherwise, the bulk operations are a noop
"""
pass
def _end_bulk_operation(self, structure_key, emit_signals=True):
"""
End the active bulk operation on structure_key (course or library key).
"""
# If no bulk op is active, return
bulk_ops_record = self._get_bulk_ops_record(structure_key)
if not bulk_ops_record.active:
return
# Send the pre-publish signal within the context of the bulk operation.
# Writes performed by signal handlers will be persisted when the bulk
# operation ends.
if emit_signals and bulk_ops_record.is_root:
self.send_pre_publish_signal(bulk_ops_record, structure_key)
bulk_ops_record.unnest()
# If this wasn't the outermost context, then don't close out the
# bulk operation.
if bulk_ops_record.active:
return
dirty = self._end_outermost_bulk_operation(bulk_ops_record, structure_key)
# The bulk op has ended. However, the signal tasks below still need to use the
# built-up bulk op information (if the signals trigger tasks in the same thread).
# So re-nest until the signals are sent.
bulk_ops_record.nest()
if emit_signals and dirty:
self.send_bulk_published_signal(bulk_ops_record, structure_key)
self.send_bulk_library_updated_signal(bulk_ops_record, structure_key)
# Signals are sent. Now unnest and clear the bulk op for good.
bulk_ops_record.unnest()
self._clear_bulk_ops_record(structure_key)
def _is_in_bulk_operation(self, course_key, ignore_case=False):
"""
Return whether a bulk operation is active on `course_key`.
"""
return self._get_bulk_ops_record(course_key, ignore_case).active
def send_pre_publish_signal(self, bulk_ops_record, course_id):
"""
Send a signal just before items are published in the course.
"""
signal_handler = getattr(self, "signal_handler", None)
if signal_handler and bulk_ops_record.has_publish_item:
signal_handler.send("pre_publish", course_key=course_id)
def send_bulk_published_signal(self, bulk_ops_record, course_id):
"""
Sends out the signal that items have been published from within this course.
"""
if self.signal_handler and bulk_ops_record.has_publish_item:
# We remove the branch, because publishing always means copying from draft to published
self.signal_handler.send("course_published", course_key=course_id.for_branch(None))
bulk_ops_record.has_publish_item = False
def send_bulk_library_updated_signal(self, bulk_ops_record, library_id):
"""
Sends out the signal that library have been updated.
"""
if self.signal_handler and bulk_ops_record.has_library_updated_item:
self.signal_handler.send("library_updated", library_key=library_id)
bulk_ops_record.has_library_updated_item = False
class EditInfo(object):
"""
Encapsulates the editing info of a block.
"""
def __init__(self, **kwargs):
self.from_storable(kwargs)
# For details, see caching_descriptor_system.py get_subtree_edited_by/on.
self._subtree_edited_on = kwargs.get('_subtree_edited_on', None)
self._subtree_edited_by = kwargs.get('_subtree_edited_by', None)
def to_storable(self):
"""
Serialize to a Mongo-storable format.
"""
return {
'previous_version': self.previous_version,
'update_version': self.update_version,
'source_version': self.source_version,
'edited_on': self.edited_on,
'edited_by': self.edited_by,
'original_usage': self.original_usage,
'original_usage_version': self.original_usage_version,
}
def from_storable(self, edit_info):
"""
De-serialize from Mongo-storable format to an object.
"""
# Guid for the structure which previously changed this XBlock.
# (Will be the previous value of 'update_version'.)
self.previous_version = edit_info.get('previous_version', None)
# Guid for the structure where this XBlock got its current field values.
# May point to a structure not in this structure's history (e.g., to a draft
# branch from which this version was published).
self.update_version = edit_info.get('update_version', None)
self.source_version = edit_info.get('source_version', None)
# Datetime when this XBlock's fields last changed.
self.edited_on = edit_info.get('edited_on', None)
# User ID which changed this XBlock last.
self.edited_by = edit_info.get('edited_by', None)
# If this block has been copied from a library using copy_from_template,
# these fields point to the original block in the library, for analytics.
self.original_usage = edit_info.get('original_usage', None)
self.original_usage_version = edit_info.get('original_usage_version', None)
def __repr__(self):
# pylint: disable=bad-continuation, redundant-keyword-arg
return ("{classname}(previous_version={self.previous_version}, "
"update_version={self.update_version}, "
"source_version={source_version}, "
"edited_on={self.edited_on}, "
"edited_by={self.edited_by}, "
"original_usage={self.original_usage}, "
"original_usage_version={self.original_usage_version}, "
"_subtree_edited_on={self._subtree_edited_on}, "
"_subtree_edited_by={self._subtree_edited_by})").format(
self=self,
classname=self.__class__.__name__,
source_version="UNSET" if self.source_version is None else self.source_version,
) # pylint: disable=bad-continuation
def __eq__(self, edit_info):
"""
Two EditInfo instances are equal iff their storable representations
are equal.
"""
return self.to_storable() == edit_info.to_storable()
def __neq__(self, edit_info):
"""
Two EditInfo instances are not equal if they're not equal.
"""
return not self == edit_info
class BlockData(object):
"""
Wrap the block data in an object instead of using a straight Python dictionary.
Allows the storing of meta-information about a structure that doesn't persist along with
the structure itself.
"""
def __init__(self, **kwargs):
# Has the definition been loaded?
self.definition_loaded = False
self.from_storable(kwargs)
def to_storable(self):
"""
Serialize to a Mongo-storable format.
"""
return {
'fields': self.fields,
'block_type': self.block_type,
'definition': self.definition,
'defaults': self.defaults,
'edit_info': self.edit_info.to_storable()
}
def from_storable(self, block_data):
"""
De-serialize from Mongo-storable format to an object.
"""
# Contains the Scope.settings and 'children' field values.
# 'children' are stored as a list of (block_type, block_id) pairs.
self.fields = block_data.get('fields', {})
# XBlock type ID.
self.block_type = block_data.get('block_type', None)
# DB id of the record containing the content of this XBlock.
self.definition = block_data.get('definition', None)
# Scope.settings default values copied from a template block (used e.g. when
# blocks are copied from a library to a course)
self.defaults = block_data.get('defaults', {})
# EditInfo object containing all versioning/editing data.
self.edit_info = EditInfo(**block_data.get('edit_info', {}))
def __repr__(self):
# pylint: disable=bad-continuation, redundant-keyword-arg
return ("{classname}(fields={self.fields}, "
"block_type={self.block_type}, "
"definition={self.definition}, "
"definition_loaded={self.definition_loaded}, "
"defaults={self.defaults}, "
"edit_info={self.edit_info})").format(
self=self,
classname=self.__class__.__name__,
) # pylint: disable=bad-continuation
def __eq__(self, block_data):
"""
Two BlockData objects are equal iff all their attributes are equal.
"""
attrs = ['fields', 'block_type', 'definition', 'defaults', 'edit_info']
return all(getattr(self, attr) == getattr(block_data, attr) for attr in attrs)
def __neq__(self, block_data):
"""
Just define this as not self.__eq__(block_data)
"""
return not self == block_data
new_contract('BlockData', BlockData)
class IncorrectlySortedList(Exception):
"""
Thrown when calling find() on a SortedAssetList not sorted by filename.
"""
pass
class SortedAssetList(SortedListWithKey):
"""
List of assets that is sorted based on an asset attribute.
"""
def __init__(self, **kwargs):
self.filename_sort = False
key_func = kwargs.get('key', None)
if key_func is None:
kwargs['key'] = itemgetter('filename')
self.filename_sort = True
super(SortedAssetList, self).__init__(**kwargs)
@contract(asset_id=AssetKey)
def find(self, asset_id):
"""
Find the index of a particular asset in the list. This method is only functional for lists
sorted by filename. If the list is sorted on any other key, find() raises a
Returns: Index of asset, if found. None if not found.
"""
# Don't attempt to find an asset by filename in a list that's not sorted by filename.
if not self.filename_sort:
raise IncorrectlySortedList()
# See if this asset already exists by checking the external_filename.
# Studio doesn't currently support using multiple course assets with the same filename.
# So use the filename as the unique identifier.
idx = None
idx_left = self.bisect_left({'filename': asset_id.path})
idx_right = self.bisect_right({'filename': asset_id.path})
if idx_left != idx_right:
# Asset was found in the list.
idx = idx_left
return idx
@contract(asset_md=AssetMetadata)
def insert_or_update(self, asset_md):
"""
Insert asset metadata if asset is not present. Update asset metadata if asset is already present.
"""
metadata_to_insert = asset_md.to_storable()
asset_idx = self.find(asset_md.asset_id)
if asset_idx is None:
# Add new metadata sorted into the list.
self.add(metadata_to_insert)
else:
# Replace existing metadata.
self[asset_idx] = metadata_to_insert
class ModuleStoreAssetBase(object):
"""
The methods for accessing assets and their metadata
"""
def _find_course_asset(self, asset_key):
"""
Returns same as _find_course_assets plus the index to the given asset or None. Does not convert
to AssetMetadata; thus, is internal.
Arguments:
asset_key (AssetKey): what to look for
Returns:
Tuple of:
- AssetMetadata[] for all assets of the given asset_key's type
- the index of asset in list (None if asset does not exist)
"""
course_assets = self._find_course_assets(asset_key.course_key)
all_assets = SortedAssetList(iterable=[])
# Assets should be pre-sorted, so add them efficiently without sorting.
# extend() will raise a ValueError if the passed-in list is not sorted.
all_assets.extend(course_assets.setdefault(asset_key.block_type, []))
idx = all_assets.find(asset_key)
return course_assets, idx
@contract(asset_key='AssetKey')
def find_asset_metadata(self, asset_key, **kwargs):
"""
Find the metadata for a particular course asset.
Arguments:
asset_key (AssetKey): key containing original asset filename
Returns:
asset metadata (AssetMetadata) -or- None if not found
"""
course_assets, asset_idx = self._find_course_asset(asset_key)
if asset_idx is None:
return None
mdata = AssetMetadata(asset_key, asset_key.path, **kwargs)
all_assets = course_assets[asset_key.asset_type]
mdata.from_storable(all_assets[asset_idx])
return mdata
@contract(
course_key='CourseKey', asset_type='None | basestring',
start='int | None', maxresults='int | None', sort='tuple(str,(int,>=1,<=2))|None'
)
def get_all_asset_metadata(self, course_key, asset_type, start=0, maxresults=-1, sort=None, **kwargs):
"""
Returns a list of asset metadata for all assets of the given asset_type in the course.
Args:
course_key (CourseKey): course identifier
asset_type (str): the block_type of the assets to return. If None, return assets of all types.
start (int): optional - start at this asset number. Zero-based!
maxresults (int): optional - return at most this many, -1 means no limit
sort (array): optional - None means no sort
(sort_by (str), sort_order (str))
sort_by - one of 'uploadDate' or 'displayname'
sort_order - one of SortOrder.ascending or SortOrder.descending
Returns:
List of AssetMetadata objects.
"""
course_assets = self._find_course_assets(course_key)
# Determine the proper sort - with defaults of ('displayname', SortOrder.ascending).
key_func = None
sort_order = ModuleStoreEnum.SortOrder.ascending
if sort:
if sort[0] == 'uploadDate':
key_func = lambda x: x['edit_info']['edited_on']
if sort[1] == ModuleStoreEnum.SortOrder.descending:
sort_order = ModuleStoreEnum.SortOrder.descending
if asset_type is None:
# Add assets of all types to the sorted list.
all_assets = SortedAssetList(iterable=[], key=key_func)
for asset_type, val in course_assets.iteritems():
all_assets.update(val)
else:
# Add assets of a single type to the sorted list.
all_assets = SortedAssetList(iterable=course_assets.get(asset_type, []), key=key_func)
num_assets = len(all_assets)
start_idx = start
end_idx = min(num_assets, start + maxresults)
if maxresults < 0:
# No limit on the results.
end_idx = num_assets
step_incr = 1
if sort_order == ModuleStoreEnum.SortOrder.descending:
# Flip the indices and iterate backwards.
step_incr = -1
start_idx = (num_assets - 1) - start_idx
end_idx = (num_assets - 1) - end_idx
ret_assets = []
for idx in xrange(start_idx, end_idx, step_incr):
raw_asset = all_assets[idx]
asset_key = course_key.make_asset_key(raw_asset['asset_type'], raw_asset['filename'])
new_asset = AssetMetadata(asset_key)
new_asset.from_storable(raw_asset)
ret_assets.append(new_asset)
return ret_assets
# pylint: disable=unused-argument
def check_supports(self, course_key, method):
"""
Verifies that a modulestore supports a particular method.
Some modulestores may differ based on the course_key, such
as mixed (since it has to find the underlying modulestore),
so it's required as part of the method signature.
"""
return hasattr(self, method)
class ModuleStoreAssetWriteInterface(ModuleStoreAssetBase):
"""
The write operations for assets and asset metadata
"""
def _save_assets_by_type(self, course_key, asset_metadata_list, course_assets, user_id, import_only):
"""
Common private method that saves/updates asset metadata items in the internal modulestore
structure used to store asset metadata items.
"""
# Lazily create a sorted list if not already created.
assets_by_type = defaultdict(lambda: SortedAssetList(iterable=course_assets.get(asset_type, [])))
for asset_md in asset_metadata_list:
if asset_md.asset_id.course_key != course_key:
# pylint: disable=logging-format-interpolation
log.warning("Asset's course {} does not match other assets for course {} - not saved.".format(
asset_md.asset_id.course_key, course_key
))
continue
if not import_only:
asset_md.update({'edited_by': user_id, 'edited_on': datetime.datetime.now(UTC)})
asset_type = asset_md.asset_id.asset_type
all_assets = assets_by_type[asset_type]
all_assets.insert_or_update(asset_md)
return assets_by_type
@contract(asset_metadata='AssetMetadata')
def save_asset_metadata(self, asset_metadata, user_id, import_only):
"""
Saves the asset metadata for a particular course's asset.
Arguments:
asset_metadata (AssetMetadata): data about the course asset data
user_id (int): user ID saving the asset metadata
import_only (bool): True if importing without editing, False if editing
Returns:
True if metadata save was successful, else False
"""
raise NotImplementedError()
@contract(asset_metadata_list='list(AssetMetadata)')
def save_asset_metadata_list(self, asset_metadata_list, user_id, import_only):
"""
Saves a list of asset metadata for a particular course's asset.
Arguments:
asset_metadata (AssetMetadata): data about the course asset data
user_id (int): user ID saving the asset metadata
import_only (bool): True if importing without editing, False if editing
Returns:
True if metadata save was successful, else False
"""
raise NotImplementedError()
def set_asset_metadata_attrs(self, asset_key, attrs, user_id):
"""
Base method to over-ride in modulestore.
"""
raise NotImplementedError()
def delete_asset_metadata(self, asset_key, user_id):
"""
Base method to over-ride in modulestore.
"""
raise NotImplementedError()
@contract(asset_key='AssetKey', attr=str)
def set_asset_metadata_attr(self, asset_key, attr, value, user_id):
"""
Add/set the given attr on the asset at the given location. Value can be any type which pymongo accepts.
Arguments:
asset_key (AssetKey): asset identifier
attr (str): which attribute to set
value: the value to set it to (any type pymongo accepts such as datetime, number, string)
user_id (int): user ID saving the asset metadata
Raises:
ItemNotFoundError if no such item exists
AttributeError is attr is one of the build in attrs.
"""
return self.set_asset_metadata_attrs(asset_key, {attr: value}, user_id)
@contract(source_course_key='CourseKey', dest_course_key='CourseKey')
def copy_all_asset_metadata(self, source_course_key, dest_course_key, user_id):
"""
Copy all the course assets from source_course_key to dest_course_key.
NOTE: unlike get_all_asset_metadata, this does not take an asset type because
this function is intended for things like cloning or exporting courses not for
clients to list assets.
Arguments:
source_course_key (CourseKey): identifier of course to copy from
dest_course_key (CourseKey): identifier of course to copy to
user_id (int): user ID copying the asset metadata
"""
pass
# pylint: disable=abstract-method
class ModuleStoreRead(ModuleStoreAssetBase):
"""
An abstract interface for a database backend that stores XModuleDescriptor
instances and extends read-only functionality
"""
__metaclass__ = ABCMeta
@abstractmethod
def has_item(self, usage_key):
"""
Returns True if usage_key exists in this ModuleStore.
"""
pass
@abstractmethod
def get_item(self, usage_key, depth=0, using_descriptor_system=None, **kwargs):
"""
Returns an XModuleDescriptor instance for the item at location.
If any segment of the location is None except revision, raises
xmodule.modulestore.exceptions.InsufficientSpecificationError
If no object is found at that location, raises
xmodule.modulestore.exceptions.ItemNotFoundError
usage_key: A :class:`.UsageKey` subclass instance
depth (int): An argument that some module stores may use to prefetch
descendents of the queried modules for more efficient results later
in the request. The depth is counted in the number of calls to
get_children() to cache. None indicates to cache all descendents
"""
pass
@abstractmethod
def get_course_errors(self, course_key):
"""
Return a list of (msg, exception-or-None) errors that the modulestore
encountered when loading the course at course_id.
Raises the same exceptions as get_item if the location isn't found or
isn't fully specified.
Args:
course_key (:class:`.CourseKey`): The course to check for errors
"""
pass
@abstractmethod
def get_items(self, course_id, qualifiers=None, **kwargs):
"""
Returns a list of XModuleDescriptor instances for the items
that match location. Any element of location that is None is treated
as a wildcard that matches any value
location: Something that can be passed to Location
"""
pass
@contract(block='XBlock | BlockData | dict', qualifiers=dict)
def _block_matches(self, block, qualifiers):
"""
Return True or False depending on whether the field value (block contents)
matches the qualifiers as per get_items.
NOTE: Method only finds directly set value matches - not inherited nor default value matches.
For substring matching:
pass a regex object.
For arbitrary function comparison such as date time comparison:
pass the function as in start=lambda x: x < datetime.datetime(2014, 1, 1, 0, tzinfo=pytz.UTC)
Args:
block (dict, XBlock, or BlockData): either the BlockData (transformed from the db) -or-
a dict (from BlockData.fields or get_explicitly_set_fields_by_scope) -or-
the xblock.fields() value -or-
the XBlock from which to get the 'fields' value.
qualifiers (dict): {field: value} search pairs.
"""
if isinstance(block, XBlock):
# If an XBlock is passed-in, just match its fields.
xblock, fields = (block, block.fields)
elif isinstance(block, BlockData):
# BlockData is an object - compare its attributes in dict form.
xblock, fields = (None, block.__dict__)
else:
xblock, fields = (None, block)
def _is_set_on(key):
"""
Is this key set in fields? (return tuple of boolean and value). A helper which can
handle fields either being the json doc or xblock fields. Is inner function to restrict
use and to access local vars.
"""
if key not in fields:
return False, None
field = fields[key]
if xblock is not None:
return field.is_set_on(block), getattr(xblock, key)
else:
return True, field
for key, criteria in qualifiers.iteritems():
is_set, value = _is_set_on(key)
if isinstance(criteria, dict) and '$exists' in criteria and criteria['$exists'] == is_set:
continue
if not is_set:
return False
if not self._value_matches(value, criteria):
return False
return True
def _value_matches(self, target, criteria):
"""
helper for _block_matches: does the target (field value) match the criteria?
If target is a list, do any of the list elements meet the criteria
If the criteria is a regex, does the target match it?
If the criteria is a function, does invoking it on the target yield something truthy?
If criteria is a dict {($nin|$in): []}, then do (none|any) of the list elements meet the criteria
Otherwise, is the target == criteria
"""
if isinstance(target, list):
return any(self._value_matches(ele, criteria) for ele in target)
elif isinstance(criteria, re._pattern_type): # pylint: disable=protected-access
return criteria.search(target) is not None
elif callable(criteria):
return criteria(target)
elif isinstance(criteria, dict) and '$in' in criteria:
# note isn't handling any other things in the dict other than in
return any(self._value_matches(target, test_val) for test_val in criteria['$in'])
elif isinstance(criteria, dict) and '$nin' in criteria:
# note isn't handling any other things in the dict other than nin
return not any(self._value_matches(target, test_val) for test_val in criteria['$nin'])
else:
return criteria == target
@abstractmethod
def make_course_key(self, org, course, run):
"""
Return a valid :class:`~opaque_keys.edx.keys.CourseKey` for this modulestore
that matches the supplied `org`, `course`, and `run`.
This key may represent a course that doesn't exist in this modulestore.
"""
pass
@abstractmethod
def make_course_usage_key(self, course_key):
"""
Return a valid :class:`~opaque_keys.edx.keys.UsageKey` for this modulestore
that matches the supplied course_key.
"""
pass
@abstractmethod
def get_courses(self, **kwargs):
'''
Returns a list containing the top level XModuleDescriptors of the courses
in this modulestore. This method can take an optional argument 'org' which
will efficiently apply a filter so that only the courses of the specified
ORG in the CourseKey will be fetched.
'''
pass
@abstractmethod
def get_course(self, course_id, depth=0, **kwargs):
'''
Look for a specific course by its id (:class:`CourseKey`).
Returns the course descriptor, or None if not found.
'''
pass
@abstractmethod
def has_course(self, course_id, ignore_case=False, **kwargs):
'''
Look for a specific course id. Returns whether it exists.
Args:
course_id (CourseKey):
ignore_case (boolean): some modulestores are case-insensitive. Use this flag
to search for whether a potentially conflicting course exists in that case.
'''
pass
@abstractmethod
def get_parent_location(self, location, **kwargs):
'''
Find the location that is the parent of this location in this
course. Needed for path_to_location().
'''
pass
@abstractmethod
def get_orphans(self, course_key, **kwargs):
"""
Get all of the xblocks in the given course which have no parents and are not of types which are
usually orphaned. NOTE: may include xblocks which still have references via xblocks which don't
use children to point to their dependents.
"""
pass
@abstractmethod
def get_errored_courses(self):
"""
Return a dictionary of course_dir -> [(msg, exception_str)], for each
course_dir where course loading failed.
"""
pass
@abstractmethod
def get_modulestore_type(self, course_id):
"""
Returns a type which identifies which modulestore is servicing the given
course_id. The return can be either "xml" (for XML based courses) or "mongo" for MongoDB backed courses
"""
pass
@abstractmethod
def get_courses_for_wiki(self, wiki_slug, **kwargs):
"""
Return the list of courses which use this wiki_slug
:param wiki_slug: the course wiki root slug
:return: list of course keys
"""
pass
@abstractmethod
def has_published_version(self, xblock):
"""
Returns true if this xblock exists in the published course regardless of whether it's up to date
"""
pass
@abstractmethod
def close_connections(self):
"""
Closes any open connections to the underlying databases
"""
pass
@contextmanager
def bulk_operations(self, course_id, emit_signals=True): # pylint: disable=unused-argument
"""
A context manager for notifying the store of bulk operations. This affects only the current thread.
"""
yield
def ensure_indexes(self):
"""
Ensure that all appropriate indexes are created that are needed by this modulestore, or raise
an exception if unable to.
This method is intended for use by tests and administrative commands, and not
to be run during server startup.
"""
pass
# pylint: disable=abstract-method
class ModuleStoreWrite(ModuleStoreRead, ModuleStoreAssetWriteInterface):
"""
An abstract interface for a database backend that stores XModuleDescriptor
instances and extends both read and write functionality
"""
__metaclass__ = ABCMeta
@abstractmethod
def update_item(self, xblock, user_id, allow_not_found=False, force=False, **kwargs):
"""
Update the given xblock's persisted repr. Pass the user's unique id which the persistent store
should save with the update if it has that ability.
:param allow_not_found: whether this method should raise an exception if the given xblock
has not been persisted before.
:param force: fork the structure and don't update the course draftVersion if there's a version
conflict (only applicable to version tracking and conflict detecting persistence stores)
:raises VersionConflictError: if org, course, run, and version_guid given and the current
version head != version_guid and force is not True. (only applicable to version tracking stores)
"""
pass
@abstractmethod
def delete_item(self, location, user_id, **kwargs):
"""
Delete an item and its subtree from persistence. Remove the item from any parents (Note, does not
affect parents from other branches or logical branches; thus, in old mongo, deleting something
whose parent cannot be draft, deletes it from both but deleting a component under a draft vertical
only deletes it from the draft.
Pass the user's unique id which the persistent store
should save with the update if it has that ability.
:param force: fork the structure and don't update the course draftVersion if there's a version
conflict (only applicable to version tracking and conflict detecting persistence stores)
:raises VersionConflictError: if org, course, run, and version_guid given and the current
version head != version_guid and force is not True. (only applicable to version tracking stores)
"""
pass
@abstractmethod
def create_course(self, org, course, run, user_id, fields=None, **kwargs):
"""
Creates and returns the course.
Args:
org (str): the organization that owns the course
course (str): the name of the course
run (str): the name of the run
user_id: id of the user creating the course
fields (dict): Fields to set on the course at initialization
kwargs: Any optional arguments understood by a subset of modulestores to customize instantiation
Returns: a CourseDescriptor
"""
pass
@abstractmethod
def create_item(self, user_id, course_key, block_type, block_id=None, fields=None, **kwargs):
"""
Creates and saves a new item in a course.
Returns the newly created item.
Args:
user_id: ID of the user creating and saving the xmodule
course_key: A :class:`~opaque_keys.edx.CourseKey` identifying which course to create
this item in
block_type: The type of block to create
block_id: a unique identifier for the new item. If not supplied,
a new identifier will be generated
fields (dict): A dictionary specifying initial values for some or all fields
in the newly created block
"""
pass
@abstractmethod
def clone_course(self, source_course_id, dest_course_id, user_id, fields=None):
"""
Sets up source_course_id to point a course with the same content as the desct_course_id. This
operation may be cheap or expensive. It may have to copy all assets and all xblock content or
merely setup new pointers.
Backward compatibility: this method used to require in some modulestores that dest_course_id
pointed to an empty but already created course. Implementers should support this or should
enable creating the course from scratch.
Raises:
ItemNotFoundError: if the source course doesn't exist (or any of its xblocks aren't found)
DuplicateItemError: if the destination course already exists (with content in some cases)
"""
pass
@abstractmethod
def delete_course(self, course_key, user_id, **kwargs):
"""
Deletes the course. It may be a soft or hard delete. It may or may not remove the xblock definitions
depending on the persistence layer and how tightly bound the xblocks are to the course.
Args:
course_key (CourseKey): which course to delete
user_id: id of the user deleting the course
"""
pass
@abstractmethod
def _drop_database(self):
"""
A destructive operation to drop the underlying database and close all connections.
Intended to be used by test code for cleanup.
"""
pass
# pylint: disable=abstract-method
class ModuleStoreReadBase(BulkOperationsMixin, ModuleStoreRead):
'''
Implement interface functionality that can be shared.
'''
# pylint: disable=invalid-name
def __init__(
self,
contentstore=None,
doc_store_config=None, # ignore if passed up
metadata_inheritance_cache_subsystem=None, request_cache=None,
xblock_mixins=(), xblock_select=None, disabled_xblock_types=(), # pylint: disable=bad-continuation
# temporary parms to enable backward compatibility. remove once all envs migrated
db=None, collection=None, host=None, port=None, tz_aware=True, user=None, password=None,
# allow lower level init args to pass harmlessly
** kwargs
):
'''
Set up the error-tracking logic.
'''
super(ModuleStoreReadBase, self).__init__(**kwargs)
self._course_errors = defaultdict(make_error_tracker) # location -> ErrorLog
# pylint: disable=fixme
# TODO move the inheritance_cache_subsystem to classes which use it
self.metadata_inheritance_cache_subsystem = metadata_inheritance_cache_subsystem
self.request_cache = request_cache
self.xblock_mixins = xblock_mixins
self.xblock_select = xblock_select
self.disabled_xblock_types = disabled_xblock_types
self.contentstore = contentstore
def get_course_errors(self, course_key):
"""
Return list of errors for this :class:`.CourseKey`, if any. Raise the same
errors as get_item if course_key isn't present.
"""
# check that item is present and raise the promised exceptions if needed
# pylint: disable=fixme
# TODO (vshnayder): post-launch, make errors properties of items
# self.get_item(location)
assert isinstance(course_key, CourseKey)
return self._course_errors[course_key].errors
def get_errored_courses(self):
"""
Returns an empty dict.
It is up to subclasses to extend this method if the concept
of errored courses makes sense for their implementation.
"""
return {}
def get_course(self, course_id, depth=0, **kwargs):
"""
See ModuleStoreRead.get_course
Default impl--linear search through course list
"""
assert isinstance(course_id, CourseKey)
for course in self.get_courses(**kwargs):
if course.id == course_id:
return course
return None
def has_course(self, course_id, ignore_case=False, **kwargs):
"""
Returns the course_id of the course if it was found, else None
Args:
course_id (CourseKey):
ignore_case (boolean): some modulestores are case-insensitive. Use this flag
to search for whether a potentially conflicting course exists in that case.
"""
# linear search through list
assert isinstance(course_id, CourseKey)
if ignore_case:
return next(
(
c.id for c in self.get_courses()
if c.id.org.lower() == course_id.org.lower() and
c.id.course.lower() == course_id.course.lower() and
c.id.run.lower() == course_id.run.lower()
),
None
)
else:
return next(
(c.id for c in self.get_courses() if c.id == course_id),
None
)
def has_published_version(self, xblock):
"""
Returns True since this is a read-only store.
"""
return True
def heartbeat(self):
"""
Is this modulestore ready?
"""
# default is to say yes by not raising an exception
return {'default_impl': True}
def close_connections(self):
"""
Closes any open connections to the underlying databases
"""
if self.contentstore:
self.contentstore.close_connections()
super(ModuleStoreReadBase, self).close_connections()
@contextmanager
def default_store(self, store_type):
"""
A context manager for temporarily changing the default store
"""
if self.get_modulestore_type(None) != store_type:
raise ValueError(u"Cannot set default store to type {}".format(store_type))
yield
# pylint: disable=abstract-method
class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite):
'''
Implement interface functionality that can be shared.
'''
def __init__(self, contentstore, **kwargs):
super(ModuleStoreWriteBase, self).__init__(contentstore=contentstore, **kwargs)
self.mixologist = Mixologist(self.xblock_mixins)
def partition_fields_by_scope(self, category, fields):
"""
Return dictionary of {scope: {field1: val, ..}..} for the fields of this potential xblock
:param category: the xblock category
:param fields: the dictionary of {fieldname: value}
"""
result = collections.defaultdict(dict)
if fields is None:
return result
cls = self.mixologist.mix(XBlock.load_class(category, select=prefer_xmodules))
for field_name, value in fields.iteritems():
field = getattr(cls, field_name)
result[field.scope][field_name] = value
return result
def create_course(self, org, course, run, user_id, fields=None, runtime=None, **kwargs):
"""
Creates any necessary other things for the course as a side effect and doesn't return
anything useful. The real subclass should call this before it returns the course.
"""
# clone a default 'about' overview module as well
about_location = self.make_course_key(org, course, run).make_usage_key('about', 'overview')
about_descriptor = XBlock.load_class('about')
overview_template = about_descriptor.get_template('overview.yaml')
self.create_item(
user_id,
about_location.course_key,
about_location.block_type,
block_id=about_location.block_id,
definition_data={'data': overview_template.get('data')},
metadata=overview_template.get('metadata'),
runtime=runtime,
continue_version=True,
)
def clone_course(self, source_course_id, dest_course_id, user_id, fields=None, **kwargs):
"""
This base method just copies the assets. The lower level impls must do the actual cloning of
content.
"""
with self.bulk_operations(dest_course_id):
# copy the assets
if self.contentstore:
self.contentstore.copy_all_course_assets(source_course_id, dest_course_id)
return dest_course_id
def delete_course(self, course_key, user_id, **kwargs):
"""
This base method just deletes the assets. The lower level impls must do the actual deleting of
content.
"""
# delete the assets
if self.contentstore:
self.contentstore.delete_all_course_assets(course_key)
super(ModuleStoreWriteBase, self).delete_course(course_key, user_id)
def _drop_database(self):
"""
A destructive operation to drop the underlying database and close all connections.
Intended to be used by test code for cleanup.
"""
if self.contentstore:
self.contentstore._drop_database() # pylint: disable=protected-access
super(ModuleStoreWriteBase, self)._drop_database() # pylint: disable=protected-access
def create_child(self, user_id, parent_usage_key, block_type, block_id=None, fields=None, **kwargs):
"""
Creates and saves a new xblock that as a child of the specified block
Returns the newly created item.
Args:
user_id: ID of the user creating and saving the xmodule
parent_usage_key: a :class:`~opaque_key.edx.UsageKey` identifing the
block that this item should be parented under
block_type: The type of block to create
block_id: a unique identifier for the new item. If not supplied,
a new identifier will be generated
fields (dict): A dictionary specifying initial values for some or all fields
in the newly created block
"""
item = self.create_item(user_id, parent_usage_key.course_key, block_type, block_id=block_id, fields=fields, **kwargs)
parent = self.get_item(parent_usage_key)
parent.children.append(item.location)
self.update_item(parent, user_id)
def _flag_library_updated_event(self, library_key):
"""
Wrapper around calls to fire the library_updated signal
Unless we're nested in an active bulk operation, this simply fires the signal
otherwise a publish will be signalled at the end of the bulk operation
Arguments:
library_key - library_key to which the signal applies
"""
if self.signal_handler:
bulk_record = self._get_bulk_ops_record(library_key) if isinstance(self, BulkOperationsMixin) else None
if bulk_record and bulk_record.active:
bulk_record.has_library_updated_item = True
else:
self.signal_handler.send("library_updated", library_key=library_key)
def _emit_course_deleted_signal(self, course_key):
"""
Helper method used to emit the course_deleted signal.
"""
if self.signal_handler:
self.signal_handler.send("course_deleted", course_key=course_key)
def only_xmodules(identifier, entry_points):
"""Only use entry_points that are supplied by the xmodule package"""
from_xmodule = [entry_point for entry_point in entry_points if entry_point.dist.key == 'xmodule']
return default_select(identifier, from_xmodule)
def prefer_xmodules(identifier, entry_points):
"""Prefer entry_points from the xmodule package"""
from_xmodule = [entry_point for entry_point in entry_points if entry_point.dist.key == 'xmodule']
if from_xmodule:
return default_select(identifier, from_xmodule)
else:
return default_select(identifier, entry_points)
class EdxJSONEncoder(json.JSONEncoder):
"""
Custom JSONEncoder that handles `Location` and `datetime.datetime` objects.
`Location`s are encoded as their url string form, and `datetime`s as
ISO date strings
"""
def default(self, obj):
if isinstance(obj, (CourseKey, UsageKey)):
return unicode(obj)
elif isinstance(obj, datetime.datetime):
if obj.tzinfo is not None:
if obj.utcoffset() is None:
return obj.isoformat() + 'Z'
else:
return obj.isoformat()
else:
return obj.isoformat()
else:
return super(EdxJSONEncoder, self).default(obj)
| agpl-3.0 |
dcosentino/edx-platform | common/djangoapps/student/tests/test_verification_status.py | 8 | 12784 | """Tests for per-course verification status on the dashboard. """
from datetime import datetime, timedelta
import unittest
import ddt
from mock import patch
from pytz import UTC
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from student.helpers import (
VERIFY_STATUS_NEED_TO_VERIFY,
VERIFY_STATUS_SUBMITTED,
VERIFY_STATUS_APPROVED,
VERIFY_STATUS_MISSED_DEADLINE
)
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase, mixed_store_config
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from course_modes.tests.factories import CourseModeFactory
from verify_student.models import SoftwareSecurePhotoVerification # pylint: disable=F0401
from util.testing import UrlResetMixin
MODULESTORE_CONFIG = mixed_store_config(settings.COMMON_TEST_DATA_ROOT, {}, include_xml=False)
@override_settings(MODULESTORE=MODULESTORE_CONFIG)
@patch.dict(settings.FEATURES, {
'SEPARATE_VERIFICATION_FROM_PAYMENT': True,
'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True
})
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
@ddt.ddt
class TestCourseVerificationStatus(UrlResetMixin, ModuleStoreTestCase):
"""Tests for per-course verification status on the dashboard. """
PAST = datetime.now(UTC) - timedelta(days=5)
FUTURE = datetime.now(UTC) + timedelta(days=5)
@patch.dict(settings.FEATURES, {'SEPARATE_VERIFICATION_FROM_PAYMENT': True})
def setUp(self):
# Invoke UrlResetMixin
super(TestCourseVerificationStatus, self).setUp('verify_student.urls')
self.user = UserFactory(password="edx")
self.course = CourseFactory.create()
success = self.client.login(username=self.user.username, password="edx")
self.assertTrue(success, msg="Did not log in successfully")
# Use the URL with the querystring param to put the user
# in the experimental track.
# TODO (ECOM-188): Once the A/B test of decoupling verified / payment
# completes, we can remove the querystring param.
self.dashboard_url = reverse('dashboard') + '?separate-verified=1'
def test_enrolled_as_non_verified(self):
self._setup_mode_and_enrollment(None, "honor")
# Expect that the course appears on the dashboard
# without any verification messaging
self._assert_course_verification_status(None)
def test_no_verified_mode_available(self):
# Enroll the student in a verified mode, but don't
# create any verified course mode.
# This won't happen unless someone deletes a course mode,
# but if so, make sure we handle it gracefully.
CourseEnrollmentFactory(
course_id=self.course.id,
user=self.user,
mode="verified"
)
# The default course has no verified mode,
# so no verification status should be displayed
self._assert_course_verification_status(None)
def test_need_to_verify_no_expiration(self):
self._setup_mode_and_enrollment(None, "verified")
# Since the student has not submitted a photo verification,
# the student should see a "need to verify" message
self._assert_course_verification_status(VERIFY_STATUS_NEED_TO_VERIFY)
# Start the photo verification process, but do not submit
# Since we haven't submitted the verification, we should still
# see the "need to verify" message
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
self._assert_course_verification_status(VERIFY_STATUS_NEED_TO_VERIFY)
# Upload images, but don't submit to the verification service
# We should still need to verify
attempt.mark_ready()
self._assert_course_verification_status(VERIFY_STATUS_NEED_TO_VERIFY)
def test_need_to_verify_expiration(self):
self._setup_mode_and_enrollment(self.FUTURE, "verified")
response = self.client.get(self.dashboard_url)
self.assertContains(response, self.BANNER_ALT_MESSAGES[VERIFY_STATUS_NEED_TO_VERIFY])
self.assertContains(response, "You only have 4 days left to verify for this course.")
@ddt.data(None, FUTURE)
def test_waiting_approval(self, expiration):
self._setup_mode_and_enrollment(expiration, "verified")
# The student has submitted a photo verification
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
# Now the student should see a "verification submitted" message
self._assert_course_verification_status(VERIFY_STATUS_SUBMITTED)
@ddt.data(None, FUTURE)
def test_fully_verified(self, expiration):
self._setup_mode_and_enrollment(expiration, "verified")
# The student has an approved verification
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
attempt.approve()
# Expect that the successfully verified message is shown
self._assert_course_verification_status(VERIFY_STATUS_APPROVED)
# Check that the "verification good until" date is displayed
response = self.client.get(self.dashboard_url)
self.assertContains(response, attempt.expiration_datetime.strftime("%m/%d/%Y"))
def test_missed_verification_deadline(self):
# Expiration date in the past
self._setup_mode_and_enrollment(self.PAST, "verified")
# The student does NOT have an approved verification
# so the status should show that the student missed the deadline.
self._assert_course_verification_status(VERIFY_STATUS_MISSED_DEADLINE)
def test_missed_verification_deadline_verification_was_expired(self):
# Expiration date in the past
self._setup_mode_and_enrollment(self.PAST, "verified")
# Create a verification, but the expiration date of the verification
# occurred before the deadline.
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
attempt.approve()
attempt.created_at = self.PAST - timedelta(days=900)
attempt.save()
# The student didn't have an approved verification at the deadline,
# so we should show that the student missed the deadline.
self._assert_course_verification_status(VERIFY_STATUS_MISSED_DEADLINE)
def test_missed_verification_deadline_but_later_verified(self):
# Expiration date in the past
self._setup_mode_and_enrollment(self.PAST, "verified")
# Successfully verify, but after the deadline has already passed
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
attempt.approve()
attempt.created_at = self.PAST - timedelta(days=900)
attempt.save()
# The student didn't have an approved verification at the deadline,
# so we should show that the student missed the deadline.
self._assert_course_verification_status(VERIFY_STATUS_MISSED_DEADLINE)
def test_verification_denied(self):
# Expiration date in the future
self._setup_mode_and_enrollment(self.FUTURE, "verified")
# Create a verification with the specified status
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
attempt.deny("Not valid!")
# Since this is not a status we handle, don't display any
# messaging relating to verification
self._assert_course_verification_status(None)
def test_verification_error(self):
# Expiration date in the future
self._setup_mode_and_enrollment(self.FUTURE, "verified")
# Create a verification with the specified status
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.status = "must_retry"
attempt.system_error("Error!")
# Since this is not a status we handle, don't display any
# messaging relating to verification
self._assert_course_verification_status(None)
def test_verification_will_expire_by_deadline(self):
# Expiration date in the future
self._setup_mode_and_enrollment(self.FUTURE, "verified")
# Create a verification attempt that:
# 1) Is current (submitted in the last year)
# 2) Will expire by the deadline for the course
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
# This attempt will expire tomorrow, before the course deadline
attempt.created_at = attempt.created_at - timedelta(days=364)
attempt.save()
# Expect that the "verify now" message is hidden
# (since the user isn't allowed to submit another attempt while
# a verification is active).
self._assert_course_verification_status(None)
def _setup_mode_and_enrollment(self, deadline, enrollment_mode):
"""Create a course mode and enrollment.
Arguments:
deadline (datetime): The deadline for submitting your verification.
enrollment_mode (str): The mode of the enrollment.
"""
CourseModeFactory(
course_id=self.course.id,
mode_slug="verified",
expiration_datetime=deadline
)
CourseEnrollmentFactory(
course_id=self.course.id,
user=self.user,
mode=enrollment_mode
)
BANNER_ALT_MESSAGES = {
None: "Honor",
VERIFY_STATUS_NEED_TO_VERIFY: "ID verification pending",
VERIFY_STATUS_SUBMITTED: "ID verification pending",
VERIFY_STATUS_APPROVED: "ID Verified Ribbon/Badge",
VERIFY_STATUS_MISSED_DEADLINE: "Honor"
}
NOTIFICATION_MESSAGES = {
VERIFY_STATUS_NEED_TO_VERIFY: [
"You still need to verify for this course.",
"Verification not yet complete"
],
VERIFY_STATUS_SUBMITTED: ["Thanks for your patience as we process your request."],
VERIFY_STATUS_APPROVED: ["You have already verified your ID!"],
}
MODE_CLASSES = {
None: "honor",
VERIFY_STATUS_NEED_TO_VERIFY: "verified",
VERIFY_STATUS_SUBMITTED: "verified",
VERIFY_STATUS_APPROVED: "verified",
VERIFY_STATUS_MISSED_DEADLINE: "honor"
}
def _assert_course_verification_status(self, status):
"""Check whether the specified verification status is shown on the dashboard.
Arguments:
status (str): One of the verification status constants.
If None, check that *none* of the statuses are displayed.
Raises:
AssertionError
"""
response = self.client.get(self.dashboard_url)
# Sanity check: verify that the course is on the page
self.assertContains(response, unicode(self.course.id))
# Verify that the correct banner is rendered on the dashboard
self.assertContains(response, self.BANNER_ALT_MESSAGES[status])
# Verify that the correct banner color is rendered
self.assertContains(
response,
"<article class=\"course {}\">".format(self.MODE_CLASSES[status])
)
# Verify that the correct copy is rendered on the dashboard
if status is not None:
if status in self.NOTIFICATION_MESSAGES:
# Different states might have different messaging
# so in some cases we check several possibilities
# and fail if none of these are found.
found_msg = False
for message in self.NOTIFICATION_MESSAGES[status]:
if message in response.content:
found_msg = True
break
fail_msg = "Could not find any of these messages: {expected}".format(
expected=self.NOTIFICATION_MESSAGES[status]
)
self.assertTrue(found_msg, msg=fail_msg)
else:
# Combine all possible messages into a single list
all_messages = []
for msg_group in self.NOTIFICATION_MESSAGES.values():
all_messages.extend(msg_group)
# Verify that none of the messages are displayed
for msg in all_messages:
self.assertNotContains(response, msg)
| agpl-3.0 |
Anonymouslemming/ansible | lib/ansible/plugins/action/fail.py | 227 | 1391 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2012, Dag Wieers <dag@wieers.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
''' Fail with custom message '''
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
msg = 'Failed as requested from task'
if self._task.args and 'msg' in self._task.args:
msg = self._task.args.get('msg')
result['failed'] = True
result['msg'] = msg
return result
| gpl-3.0 |
mameneses/python-deployment | venv/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/lint.py | 979 | 4306 | from __future__ import absolute_import, division, unicode_literals
from gettext import gettext
_ = gettext
from . import _base
from ..constants import cdataElements, rcdataElements, voidElements
from ..constants import spaceCharacters
spaceCharacters = "".join(spaceCharacters)
class LintError(Exception):
pass
class Filter(_base.Filter):
def __iter__(self):
open_elements = []
contentModelFlag = "PCDATA"
for token in _base.Filter.__iter__(self):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
name = token["name"]
if contentModelFlag != "PCDATA":
raise LintError(_("StartTag not in PCDATA content model flag: %(tag)s") % {"tag": name})
if not isinstance(name, str):
raise LintError(_("Tag name is not a string: %(tag)r") % {"tag": name})
if not name:
raise LintError(_("Empty tag name"))
if type == "StartTag" and name in voidElements:
raise LintError(_("Void element reported as StartTag token: %(tag)s") % {"tag": name})
elif type == "EmptyTag" and name not in voidElements:
raise LintError(_("Non-void element reported as EmptyTag token: %(tag)s") % {"tag": token["name"]})
if type == "StartTag":
open_elements.append(name)
for name, value in token["data"]:
if not isinstance(name, str):
raise LintError(_("Attribute name is not a string: %(name)r") % {"name": name})
if not name:
raise LintError(_("Empty attribute name"))
if not isinstance(value, str):
raise LintError(_("Attribute value is not a string: %(value)r") % {"value": value})
if name in cdataElements:
contentModelFlag = "CDATA"
elif name in rcdataElements:
contentModelFlag = "RCDATA"
elif name == "plaintext":
contentModelFlag = "PLAINTEXT"
elif type == "EndTag":
name = token["name"]
if not isinstance(name, str):
raise LintError(_("Tag name is not a string: %(tag)r") % {"tag": name})
if not name:
raise LintError(_("Empty tag name"))
if name in voidElements:
raise LintError(_("Void element reported as EndTag token: %(tag)s") % {"tag": name})
start_name = open_elements.pop()
if start_name != name:
raise LintError(_("EndTag (%(end)s) does not match StartTag (%(start)s)") % {"end": name, "start": start_name})
contentModelFlag = "PCDATA"
elif type == "Comment":
if contentModelFlag != "PCDATA":
raise LintError(_("Comment not in PCDATA content model flag"))
elif type in ("Characters", "SpaceCharacters"):
data = token["data"]
if not isinstance(data, str):
raise LintError(_("Attribute name is not a string: %(name)r") % {"name": data})
if not data:
raise LintError(_("%(type)s token with empty data") % {"type": type})
if type == "SpaceCharacters":
data = data.strip(spaceCharacters)
if data:
raise LintError(_("Non-space character(s) found in SpaceCharacters token: %(token)r") % {"token": data})
elif type == "Doctype":
name = token["name"]
if contentModelFlag != "PCDATA":
raise LintError(_("Doctype not in PCDATA content model flag: %(name)s") % {"name": name})
if not isinstance(name, str):
raise LintError(_("Tag name is not a string: %(tag)r") % {"tag": name})
# XXX: what to do with token["data"] ?
elif type in ("ParseError", "SerializeError"):
pass
else:
raise LintError(_("Unknown token type: %(type)s") % {"type": type})
yield token
| mit |
sbalde/edxplatform | lms/djangoapps/instructor/paidcourse_enrollment_report.py | 11 | 8911 | """
Defines concrete class for cybersource Enrollment Report.
"""
from courseware.access import has_access
import collections
from django.conf import settings
from django.utils.translation import ugettext as _
from courseware.courses import get_course_by_id
from instructor.enrollment_report import BaseAbstractEnrollmentReportProvider
from microsite_configuration import microsite
from shoppingcart.models import RegistrationCodeRedemption, PaidCourseRegistration, CouponRedemption, OrderItem, \
InvoiceTransaction
from student.models import CourseEnrollment, ManualEnrollmentAudit
class PaidCourseEnrollmentReportProvider(BaseAbstractEnrollmentReportProvider):
"""
The concrete class for all CyberSource Enrollment Reports.
"""
def get_enrollment_info(self, user, course_id):
"""
Returns the User Enrollment information.
"""
course = get_course_by_id(course_id, depth=0)
is_course_staff = bool(has_access(user, 'staff', course))
# check the user enrollment role
if user.is_staff:
platform_name = microsite.get_value('platform_name', settings.PLATFORM_NAME)
enrollment_role = _('{platform_name} Staff').format(platform_name=platform_name)
elif is_course_staff:
enrollment_role = _('Course Staff')
else:
enrollment_role = _('Student')
course_enrollment = CourseEnrollment.get_enrollment(user=user, course_key=course_id)
if is_course_staff:
enrollment_source = _('Staff')
else:
# get the registration_code_redemption object if exists
registration_code_redemption = RegistrationCodeRedemption.registration_code_used_for_enrollment(
course_enrollment)
# get the paid_course registration item if exists
paid_course_reg_item = PaidCourseRegistration.get_course_item_for_user_enrollment(
user=user,
course_id=course_id,
course_enrollment=course_enrollment
)
# from where the user get here
if registration_code_redemption is not None:
enrollment_source = _('Used Registration Code')
elif paid_course_reg_item is not None:
enrollment_source = _('Credit Card - Individual')
else:
manual_enrollment = ManualEnrollmentAudit.get_manual_enrollment(course_enrollment)
if manual_enrollment is not None:
enrollment_source = _(
'manually enrolled by {username} - reason: {reason}'
).format(username=manual_enrollment.enrolled_by.username, reason=manual_enrollment.reason)
else:
enrollment_source = _('Manually Enrolled')
enrollment_date = course_enrollment.created.strftime("%B %d, %Y")
currently_enrolled = course_enrollment.is_active
course_enrollment_data = collections.OrderedDict()
course_enrollment_data['Enrollment Date'] = enrollment_date
course_enrollment_data['Currently Enrolled'] = currently_enrolled
course_enrollment_data['Enrollment Source'] = enrollment_source
course_enrollment_data['Enrollment Role'] = enrollment_role
return course_enrollment_data
def get_payment_info(self, user, course_id):
"""
Returns the User Payment information.
"""
course_enrollment = CourseEnrollment.get_enrollment(user=user, course_key=course_id)
paid_course_reg_item = PaidCourseRegistration.get_course_item_for_user_enrollment(
user=user,
course_id=course_id,
course_enrollment=course_enrollment
)
payment_data = collections.OrderedDict()
# check if the user made a single self purchase scenario
# for enrollment in the course.
if paid_course_reg_item is not None:
coupon_redemption = CouponRedemption.objects.select_related('coupon').filter(
order_id=paid_course_reg_item.order_id)
coupon_codes = [redemption.coupon.code for redemption in coupon_redemption]
coupon_codes = ", ".join(coupon_codes)
registration_code_used = 'N/A'
list_price = paid_course_reg_item.get_list_price()
payment_amount = paid_course_reg_item.unit_cost
coupon_codes_used = coupon_codes
payment_status = paid_course_reg_item.status
transaction_reference_number = paid_course_reg_item.order_id
else:
# check if the user used a registration code for the enrollment.
registration_code_redemption = RegistrationCodeRedemption.registration_code_used_for_enrollment(
course_enrollment)
if registration_code_redemption is not None:
registration_code = registration_code_redemption.registration_code
registration_code_used = registration_code.code
if getattr(registration_code, 'invoice_item_id'):
list_price, payment_amount, payment_status, transaction_reference_number =\
self._get_invoice_data(registration_code_redemption)
coupon_codes_used = 'N/A'
elif getattr(registration_code_redemption.registration_code, 'order_id'):
list_price, payment_amount, coupon_codes_used, payment_status, transaction_reference_number = \
self._get_order_data(registration_code_redemption, course_id)
else:
# this happens when the registration code is not created via invoice or bulk purchase
# scenario.
list_price = 'N/A'
payment_amount = 'N/A'
coupon_codes_used = 'N/A'
registration_code_used = 'N/A'
payment_status = _('Data Integrity Error')
transaction_reference_number = 'N/A'
else:
list_price = 'N/A'
payment_amount = 'N/A'
coupon_codes_used = 'N/A'
registration_code_used = 'N/A'
payment_status = _('TBD')
transaction_reference_number = 'N/A'
payment_data['List Price'] = list_price
payment_data['Payment Amount'] = payment_amount
payment_data['Coupon Codes Used'] = coupon_codes_used
payment_data['Registration Code Used'] = registration_code_used
payment_data['Payment Status'] = payment_status
payment_data['Transaction Reference Number'] = transaction_reference_number
return payment_data
def _get_order_data(self, registration_code_redemption, course_id):
"""
Returns the order data
"""
order_item = OrderItem.objects.get(order=registration_code_redemption.registration_code.order,
courseregcodeitem__course_id=course_id)
coupon_redemption = CouponRedemption.objects.select_related('coupon').filter(
order_id=registration_code_redemption.registration_code.order)
coupon_codes = [redemption.coupon.code for redemption in coupon_redemption]
coupon_codes = ", ".join(coupon_codes)
list_price = order_item.get_list_price()
payment_amount = order_item.unit_cost
coupon_codes_used = coupon_codes
payment_status = order_item.status
transaction_reference_number = order_item.order_id
return list_price, payment_amount, coupon_codes_used, payment_status, transaction_reference_number
def _get_invoice_data(self, registration_code_redemption):
"""
Returns the Invoice data
"""
registration_code = registration_code_redemption.registration_code
list_price = getattr(registration_code.invoice_item, 'unit_price')
total_amount = registration_code_redemption.registration_code.invoice.total_amount
qty = registration_code_redemption.registration_code.invoice_item.qty
payment_amount = total_amount / qty
invoice_transaction = InvoiceTransaction.get_invoice_transaction(
invoice_id=registration_code_redemption.registration_code.invoice.id)
if invoice_transaction is not None:
# amount greater than 0 is invoice has bee paid
if invoice_transaction.amount > 0:
payment_status = 'Invoice Paid'
else:
# amount less than 0 is invoice has been refunded
payment_status = 'Refunded'
else:
payment_status = 'Invoice Outstanding'
transaction_reference_number = registration_code_redemption.registration_code.invoice_id
return list_price, payment_amount, payment_status, transaction_reference_number
| agpl-3.0 |
tbinjiayou/Odoo | addons/account/wizard/account_move_line_unreconcile_select.py | 385 | 1864 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_move_line_unreconcile_select(osv.osv_memory):
_name = "account.move.line.unreconcile.select"
_description = "Unreconciliation"
_columns ={
'account_id': fields.many2one('account.account','Account',required=True),
}
def action_open_window(self, cr, uid, ids, context=None):
data = self.read(cr, uid, ids, context=context)[0]
return {
'domain': "[('account_id','=',%d),('reconcile_id','<>',False),('state','<>','draft')]" % data['account_id'],
'name': 'Unreconciliation',
'view_type': 'form',
'view_mode': 'tree,form',
'view_id': False,
'res_model': 'account.move.line',
'type': 'ir.actions.act_window'
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
dietrichc/streamline-ppc-reports | examples/dfa/v1_20/create_html_asset.py | 4 | 2263 | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example creates a HTML creative asset in a given advertiser.
To create an advertiser, run create_advertiser.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: creative.saveCreativeAsset
"""
__author__ = 'Joseph DiLallo'
import base64
# Import appropriate modules from the client library.
from googleads import dfa
ADVERTISER_ID = 'INSERT_ADVERTISER_ID_HERE'
ASSET_NAME = 'INSERT_SWF_ASSET_NAME_HERE'
PATH_TO_FILE = 'INSERT_PATH_TO_SWF_FILE_HERE'
def main(client, advertiser_id, asset_name, path_to_file):
# Initialize appropriate service.
creative_service = client.GetService(
'creative', 'v1.20', 'https://advertisersapitest.doubleclick.net')
# Convert file into format that can be sent in SOAP messages.
with open(path_to_file, 'r') as file_handle:
content = base64.encodestring(file_handle.read())
# Construct and save HTML asset.
html_asset = {
'name': asset_name,
'advertiserId': advertiser_id,
'content': content,
# Set he following to true if this asset is being used for HTML creative.
'forHTMLCreatives': 'true'
}
result = creative_service.saveCreativeAsset(html_asset)
# Display results.
print ('Creative asset with file name of \'%s\' was created.'
% result['savedFilename'])
if __name__ == '__main__':
# Initialize client object.
dfa_client = dfa.DfaClient.LoadFromStorage()
main(dfa_client, ADVERTISER_ID, ASSET_NAME, PATH_TO_FILE)
| apache-2.0 |
hiroakis/ansible | lib/ansible/plugins/inventory/ini.py | 90 | 2220 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from . import InventoryParser
class InventoryIniParser(InventoryAggregateParser):
def __init__(self, inven_directory):
directory = inven_directory
names = os.listdir(inven_directory)
filtered_names = []
# Clean up the list of filenames
for filename in names:
# Skip files that end with certain extensions or characters
if any(filename.endswith(ext) for ext in ("~", ".orig", ".bak", ".ini", ".retry", ".pyc", ".pyo")):
continue
# Skip hidden files
if filename.startswith('.') and not filename.startswith('.{0}'.format(os.path.sep)):
continue
# These are things inside of an inventory basedir
if filename in ("host_vars", "group_vars", "vars_plugins"):
continue
fullpath = os.path.join(directory, filename)
new_names.append(fullpath)
super(InventoryDirectoryParser, self).__init__(new_names)
def parse(self):
return super(InventoryDirectoryParser, self).parse()
def _before_comment(self, msg):
''' what's the part of a string before a comment? '''
msg = msg.replace("\#","**NOT_A_COMMENT**")
msg = msg.split("#")[0]
msg = msg.replace("**NOT_A_COMMENT**","#")
return msg
| gpl-3.0 |
st-tu-dresden/inloop | inloop/statistics/forms.py | 1 | 1232 | from django import forms
from django.core.exceptions import ValidationError
ALLOWED_TRUNCATOR_IDENTIFIERS = ["minute", "hour", "day", "month", "year"]
VALID_DATETIME_FORMATS = ["%Y-%m-%dT%H:%M:%S.%fZ", "%Y-%m-%d"]
def validate_granularity(value: str) -> None:
"""
Validate that a given value corresponds to
a supported SQL truncator identifier.
"""
if value not in ALLOWED_TRUNCATOR_IDENTIFIERS:
raise ValidationError(
f"Granularity was supplied with the value {value} but is not allowed. "
f"Allowed granularities are {ALLOWED_TRUNCATOR_IDENTIFIERS}."
)
class SubmissionsHistogramForm(forms.Form):
queryset_limit = forms.IntegerField(required=False)
from_timestamp = forms.DateTimeField(input_formats=VALID_DATETIME_FORMATS, required=False)
to_timestamp = forms.DateTimeField(input_formats=VALID_DATETIME_FORMATS, required=False)
passed = forms.NullBooleanField(required=False)
category_id = forms.IntegerField(required=False)
granularity = forms.CharField(validators=[validate_granularity], required=False)
class AttemptsHistogramForm(forms.Form):
queryset_limit = forms.IntegerField(required=False)
task_id = forms.IntegerField()
| gpl-3.0 |
Kingclove/INFO3180 | server/lib/werkzeug/wsgi.py | 312 | 37386 | # -*- coding: utf-8 -*-
"""
werkzeug.wsgi
~~~~~~~~~~~~~
This module implements WSGI related helpers.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
import os
import sys
import posixpath
import mimetypes
from itertools import chain
from zlib import adler32
from time import time, mktime
from datetime import datetime
from functools import partial, update_wrapper
from werkzeug._compat import iteritems, text_type, string_types, \
implements_iterator, make_literal_wrapper, to_unicode, to_bytes, \
wsgi_get_bytes, try_coerce_native, PY2
from werkzeug._internal import _empty_stream, _encode_idna
from werkzeug.http import is_resource_modified, http_date
from werkzeug.urls import uri_to_iri, url_quote, url_parse, url_join
def responder(f):
"""Marks a function as responder. Decorate a function with it and it
will automatically call the return value as WSGI application.
Example::
@responder
def application(environ, start_response):
return Response('Hello World!')
"""
return update_wrapper(lambda *a: f(*a)(*a[-2:]), f)
def get_current_url(environ, root_only=False, strip_querystring=False,
host_only=False, trusted_hosts=None):
"""A handy helper function that recreates the full URL for the current
request or parts of it. Here an example:
>>> from werkzeug.test import create_environ
>>> env = create_environ("/?param=foo", "http://localhost/script")
>>> get_current_url(env)
'http://localhost/script/?param=foo'
>>> get_current_url(env, root_only=True)
'http://localhost/script/'
>>> get_current_url(env, host_only=True)
'http://localhost/'
>>> get_current_url(env, strip_querystring=True)
'http://localhost/script/'
This optionally it verifies that the host is in a list of trusted hosts.
If the host is not in there it will raise a
:exc:`~werkzeug.exceptions.SecurityError`.
:param environ: the WSGI environment to get the current URL from.
:param root_only: set `True` if you only want the root URL.
:param strip_querystring: set to `True` if you don't want the querystring.
:param host_only: set to `True` if the host URL should be returned.
:param trusted_hosts: a list of trusted hosts, see :func:`host_is_trusted`
for more information.
"""
tmp = [environ['wsgi.url_scheme'], '://', get_host(environ, trusted_hosts)]
cat = tmp.append
if host_only:
return uri_to_iri(''.join(tmp) + '/')
cat(url_quote(wsgi_get_bytes(environ.get('SCRIPT_NAME', ''))).rstrip('/'))
cat('/')
if not root_only:
cat(url_quote(wsgi_get_bytes(environ.get('PATH_INFO', '')).lstrip(b'/')))
if not strip_querystring:
qs = get_query_string(environ)
if qs:
cat('?' + qs)
return uri_to_iri(''.join(tmp))
def host_is_trusted(hostname, trusted_list):
"""Checks if a host is trusted against a list. This also takes care
of port normalization.
.. versionadded:: 0.9
:param hostname: the hostname to check
:param trusted_list: a list of hostnames to check against. If a
hostname starts with a dot it will match against
all subdomains as well.
"""
if not hostname:
return False
if isinstance(trusted_list, string_types):
trusted_list = [trusted_list]
def _normalize(hostname):
if ':' in hostname:
hostname = hostname.rsplit(':', 1)[0]
return _encode_idna(hostname)
hostname = _normalize(hostname)
for ref in trusted_list:
if ref.startswith('.'):
ref = ref[1:]
suffix_match = True
else:
suffix_match = False
ref = _normalize(ref)
if ref == hostname:
return True
if suffix_match and hostname.endswith('.' + ref):
return True
return False
def get_host(environ, trusted_hosts=None):
"""Return the real host for the given WSGI environment. This takes care
of the `X-Forwarded-Host` header. Optionally it verifies that the host
is in a list of trusted hosts. If the host is not in there it will raise
a :exc:`~werkzeug.exceptions.SecurityError`.
:param environ: the WSGI environment to get the host of.
:param trusted_hosts: a list of trusted hosts, see :func:`host_is_trusted`
for more information.
"""
if 'HTTP_X_FORWARDED_HOST' in environ:
rv = environ['HTTP_X_FORWARDED_HOST'].split(',')[0].strip()
elif 'HTTP_HOST' in environ:
rv = environ['HTTP_HOST']
else:
rv = environ['SERVER_NAME']
if (environ['wsgi.url_scheme'], environ['SERVER_PORT']) not \
in (('https', '443'), ('http', '80')):
rv += ':' + environ['SERVER_PORT']
if trusted_hosts is not None:
if not host_is_trusted(rv, trusted_hosts):
from werkzeug.exceptions import SecurityError
raise SecurityError('Host "%s" is not trusted' % rv)
return rv
def get_content_length(environ):
"""Returns the content length from the WSGI environment as
integer. If it's not available `None` is returned.
.. versionadded:: 0.9
:param environ: the WSGI environ to fetch the content length from.
"""
content_length = environ.get('CONTENT_LENGTH')
if content_length is not None:
try:
return max(0, int(content_length))
except (ValueError, TypeError):
pass
def get_input_stream(environ, safe_fallback=True):
"""Returns the input stream from the WSGI environment and wraps it
in the most sensible way possible. The stream returned is not the
raw WSGI stream in most cases but one that is safe to read from
without taking into account the content length.
.. versionadded:: 0.9
:param environ: the WSGI environ to fetch the stream from.
:param safe: indicates weather the function should use an empty
stream as safe fallback or just return the original
WSGI input stream if it can't wrap it safely. The
default is to return an empty string in those cases.
"""
stream = environ['wsgi.input']
content_length = get_content_length(environ)
# A wsgi extension that tells us if the input is terminated. In
# that case we return the stream unchanged as we know we can savely
# read it until the end.
if environ.get('wsgi.input_terminated'):
return stream
# If we don't have a content length we fall back to an empty stream
# in case of a safe fallback, otherwise we return the stream unchanged.
# The non-safe fallback is not recommended but might be useful in
# some situations.
if content_length is None:
return safe_fallback and _empty_stream or stream
# Otherwise limit the stream to the content length
return LimitedStream(stream, content_length)
def get_query_string(environ):
"""Returns the `QUERY_STRING` from the WSGI environment. This also takes
care about the WSGI decoding dance on Python 3 environments as a
native string. The string returned will be restricted to ASCII
characters.
.. versionadded:: 0.9
:param environ: the WSGI environment object to get the query string from.
"""
qs = wsgi_get_bytes(environ.get('QUERY_STRING', ''))
# QUERY_STRING really should be ascii safe but some browsers
# will send us some unicode stuff (I am looking at you IE).
# In that case we want to urllib quote it badly.
return try_coerce_native(url_quote(qs, safe=':&%=+$!*\'(),'))
def get_path_info(environ, charset='utf-8', errors='replace'):
"""Returns the `PATH_INFO` from the WSGI environment and properly
decodes it. This also takes care about the WSGI decoding dance
on Python 3 environments. if the `charset` is set to `None` a
bytestring is returned.
.. versionadded:: 0.9
:param environ: the WSGI environment object to get the path from.
:param charset: the charset for the path info, or `None` if no
decoding should be performed.
:param errors: the decoding error handling.
"""
path = wsgi_get_bytes(environ.get('PATH_INFO', ''))
return to_unicode(path, charset, errors, allow_none_charset=True)
def get_script_name(environ, charset='utf-8', errors='replace'):
"""Returns the `SCRIPT_NAME` from the WSGI environment and properly
decodes it. This also takes care about the WSGI decoding dance
on Python 3 environments. if the `charset` is set to `None` a
bytestring is returned.
.. versionadded:: 0.9
:param environ: the WSGI environment object to get the path from.
:param charset: the charset for the path, or `None` if no
decoding should be performed.
:param errors: the decoding error handling.
"""
path = wsgi_get_bytes(environ.get('SCRIPT_NAME', ''))
return to_unicode(path, charset, errors, allow_none_charset=True)
def pop_path_info(environ, charset='utf-8', errors='replace'):
"""Removes and returns the next segment of `PATH_INFO`, pushing it onto
`SCRIPT_NAME`. Returns `None` if there is nothing left on `PATH_INFO`.
If the `charset` is set to `None` a bytestring is returned.
If there are empty segments (``'/foo//bar``) these are ignored but
properly pushed to the `SCRIPT_NAME`:
>>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'}
>>> pop_path_info(env)
'a'
>>> env['SCRIPT_NAME']
'/foo/a'
>>> pop_path_info(env)
'b'
>>> env['SCRIPT_NAME']
'/foo/a/b'
.. versionadded:: 0.5
.. versionchanged:: 0.9
The path is now decoded and a charset and encoding
parameter can be provided.
:param environ: the WSGI environment that is modified.
"""
path = environ.get('PATH_INFO')
if not path:
return None
script_name = environ.get('SCRIPT_NAME', '')
# shift multiple leading slashes over
old_path = path
path = path.lstrip('/')
if path != old_path:
script_name += '/' * (len(old_path) - len(path))
if '/' not in path:
environ['PATH_INFO'] = ''
environ['SCRIPT_NAME'] = script_name + path
rv = wsgi_get_bytes(path)
else:
segment, path = path.split('/', 1)
environ['PATH_INFO'] = '/' + path
environ['SCRIPT_NAME'] = script_name + segment
rv = wsgi_get_bytes(segment)
return to_unicode(rv, charset, errors, allow_none_charset=True)
def peek_path_info(environ, charset='utf-8', errors='replace'):
"""Returns the next segment on the `PATH_INFO` or `None` if there
is none. Works like :func:`pop_path_info` without modifying the
environment:
>>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'}
>>> peek_path_info(env)
'a'
>>> peek_path_info(env)
'a'
If the `charset` is set to `None` a bytestring is returned.
.. versionadded:: 0.5
.. versionchanged:: 0.9
The path is now decoded and a charset and encoding
parameter can be provided.
:param environ: the WSGI environment that is checked.
"""
segments = environ.get('PATH_INFO', '').lstrip('/').split('/', 1)
if segments:
return to_unicode(wsgi_get_bytes(segments[0]),
charset, errors, allow_none_charset=True)
def extract_path_info(environ_or_baseurl, path_or_url, charset='utf-8',
errors='replace', collapse_http_schemes=True):
"""Extracts the path info from the given URL (or WSGI environment) and
path. The path info returned is a unicode string, not a bytestring
suitable for a WSGI environment. The URLs might also be IRIs.
If the path info could not be determined, `None` is returned.
Some examples:
>>> extract_path_info('http://example.com/app', '/app/hello')
u'/hello'
>>> extract_path_info('http://example.com/app',
... 'https://example.com/app/hello')
u'/hello'
>>> extract_path_info('http://example.com/app',
... 'https://example.com/app/hello',
... collapse_http_schemes=False) is None
True
Instead of providing a base URL you can also pass a WSGI environment.
.. versionadded:: 0.6
:param environ_or_baseurl: a WSGI environment dict, a base URL or
base IRI. This is the root of the
application.
:param path_or_url: an absolute path from the server root, a
relative path (in which case it's the path info)
or a full URL. Also accepts IRIs and unicode
parameters.
:param charset: the charset for byte data in URLs
:param errors: the error handling on decode
:param collapse_http_schemes: if set to `False` the algorithm does
not assume that http and https on the
same server point to the same
resource.
"""
def _normalize_netloc(scheme, netloc):
parts = netloc.split(u'@', 1)[-1].split(u':', 1)
if len(parts) == 2:
netloc, port = parts
if (scheme == u'http' and port == u'80') or \
(scheme == u'https' and port == u'443'):
port = None
else:
netloc = parts[0]
port = None
if port is not None:
netloc += u':' + port
return netloc
# make sure whatever we are working on is a IRI and parse it
path = uri_to_iri(path_or_url, charset, errors)
if isinstance(environ_or_baseurl, dict):
environ_or_baseurl = get_current_url(environ_or_baseurl,
root_only=True)
base_iri = uri_to_iri(environ_or_baseurl, charset, errors)
base_scheme, base_netloc, base_path = url_parse(base_iri)[:3]
cur_scheme, cur_netloc, cur_path, = \
url_parse(url_join(base_iri, path))[:3]
# normalize the network location
base_netloc = _normalize_netloc(base_scheme, base_netloc)
cur_netloc = _normalize_netloc(cur_scheme, cur_netloc)
# is that IRI even on a known HTTP scheme?
if collapse_http_schemes:
for scheme in base_scheme, cur_scheme:
if scheme not in (u'http', u'https'):
return None
else:
if not (base_scheme in (u'http', u'https') and \
base_scheme == cur_scheme):
return None
# are the netlocs compatible?
if base_netloc != cur_netloc:
return None
# are we below the application path?
base_path = base_path.rstrip(u'/')
if not cur_path.startswith(base_path):
return None
return u'/' + cur_path[len(base_path):].lstrip(u'/')
class SharedDataMiddleware(object):
"""A WSGI middleware that provides static content for development
environments or simple server setups. Usage is quite simple::
import os
from werkzeug.wsgi import SharedDataMiddleware
app = SharedDataMiddleware(app, {
'/shared': os.path.join(os.path.dirname(__file__), 'shared')
})
The contents of the folder ``./shared`` will now be available on
``http://example.com/shared/``. This is pretty useful during development
because a standalone media server is not required. One can also mount
files on the root folder and still continue to use the application because
the shared data middleware forwards all unhandled requests to the
application, even if the requests are below one of the shared folders.
If `pkg_resources` is available you can also tell the middleware to serve
files from package data::
app = SharedDataMiddleware(app, {
'/shared': ('myapplication', 'shared_files')
})
This will then serve the ``shared_files`` folder in the `myapplication`
Python package.
The optional `disallow` parameter can be a list of :func:`~fnmatch.fnmatch`
rules for files that are not accessible from the web. If `cache` is set to
`False` no caching headers are sent.
Currently the middleware does not support non ASCII filenames. If the
encoding on the file system happens to be the encoding of the URI it may
work but this could also be by accident. We strongly suggest using ASCII
only file names for static files.
The middleware will guess the mimetype using the Python `mimetype`
module. If it's unable to figure out the charset it will fall back
to `fallback_mimetype`.
.. versionchanged:: 0.5
The cache timeout is configurable now.
.. versionadded:: 0.6
The `fallback_mimetype` parameter was added.
:param app: the application to wrap. If you don't want to wrap an
application you can pass it :exc:`NotFound`.
:param exports: a dict of exported files and folders.
:param disallow: a list of :func:`~fnmatch.fnmatch` rules.
:param fallback_mimetype: the fallback mimetype for unknown files.
:param cache: enable or disable caching headers.
:Param cache_timeout: the cache timeout in seconds for the headers.
"""
def __init__(self, app, exports, disallow=None, cache=True,
cache_timeout=60 * 60 * 12, fallback_mimetype='text/plain'):
self.app = app
self.exports = {}
self.cache = cache
self.cache_timeout = cache_timeout
for key, value in iteritems(exports):
if isinstance(value, tuple):
loader = self.get_package_loader(*value)
elif isinstance(value, string_types):
if os.path.isfile(value):
loader = self.get_file_loader(value)
else:
loader = self.get_directory_loader(value)
else:
raise TypeError('unknown def %r' % value)
self.exports[key] = loader
if disallow is not None:
from fnmatch import fnmatch
self.is_allowed = lambda x: not fnmatch(x, disallow)
self.fallback_mimetype = fallback_mimetype
def is_allowed(self, filename):
"""Subclasses can override this method to disallow the access to
certain files. However by providing `disallow` in the constructor
this method is overwritten.
"""
return True
def _opener(self, filename):
return lambda: (
open(filename, 'rb'),
datetime.utcfromtimestamp(os.path.getmtime(filename)),
int(os.path.getsize(filename))
)
def get_file_loader(self, filename):
return lambda x: (os.path.basename(filename), self._opener(filename))
def get_package_loader(self, package, package_path):
from pkg_resources import DefaultProvider, ResourceManager, \
get_provider
loadtime = datetime.utcnow()
provider = get_provider(package)
manager = ResourceManager()
filesystem_bound = isinstance(provider, DefaultProvider)
def loader(path):
if path is None:
return None, None
path = posixpath.join(package_path, path)
if not provider.has_resource(path):
return None, None
basename = posixpath.basename(path)
if filesystem_bound:
return basename, self._opener(
provider.get_resource_filename(manager, path))
return basename, lambda: (
provider.get_resource_stream(manager, path),
loadtime,
0
)
return loader
def get_directory_loader(self, directory):
def loader(path):
if path is not None:
path = os.path.join(directory, path)
else:
path = directory
if os.path.isfile(path):
return os.path.basename(path), self._opener(path)
return None, None
return loader
def generate_etag(self, mtime, file_size, real_filename):
if not isinstance(real_filename, bytes):
real_filename = real_filename.encode(sys.getfilesystemencoding())
return 'wzsdm-%d-%s-%s' % (
mktime(mtime.timetuple()),
file_size,
adler32(real_filename) & 0xffffffff
)
def __call__(self, environ, start_response):
cleaned_path = get_path_info(environ)
if PY2:
cleaned_path = cleaned_path.encode(sys.getfilesystemencoding())
# sanitize the path for non unix systems
cleaned_path = cleaned_path.strip('/')
for sep in os.sep, os.altsep:
if sep and sep != '/':
cleaned_path = cleaned_path.replace(sep, '/')
path = '/'.join([''] + [x for x in cleaned_path.split('/')
if x and x != '..'])
file_loader = None
for search_path, loader in iteritems(self.exports):
if search_path == path:
real_filename, file_loader = loader(None)
if file_loader is not None:
break
if not search_path.endswith('/'):
search_path += '/'
if path.startswith(search_path):
real_filename, file_loader = loader(path[len(search_path):])
if file_loader is not None:
break
if file_loader is None or not self.is_allowed(real_filename):
return self.app(environ, start_response)
guessed_type = mimetypes.guess_type(real_filename)
mime_type = guessed_type[0] or self.fallback_mimetype
f, mtime, file_size = file_loader()
headers = [('Date', http_date())]
if self.cache:
timeout = self.cache_timeout
etag = self.generate_etag(mtime, file_size, real_filename)
headers += [
('Etag', '"%s"' % etag),
('Cache-Control', 'max-age=%d, public' % timeout)
]
if not is_resource_modified(environ, etag, last_modified=mtime):
f.close()
start_response('304 Not Modified', headers)
return []
headers.append(('Expires', http_date(time() + timeout)))
else:
headers.append(('Cache-Control', 'public'))
headers.extend((
('Content-Type', mime_type),
('Content-Length', str(file_size)),
('Last-Modified', http_date(mtime))
))
start_response('200 OK', headers)
return wrap_file(environ, f)
class DispatcherMiddleware(object):
"""Allows one to mount middlewares or applications in a WSGI application.
This is useful if you want to combine multiple WSGI applications::
app = DispatcherMiddleware(app, {
'/app2': app2,
'/app3': app3
})
"""
def __init__(self, app, mounts=None):
self.app = app
self.mounts = mounts or {}
def __call__(self, environ, start_response):
script = environ.get('PATH_INFO', '')
path_info = ''
while '/' in script:
if script in self.mounts:
app = self.mounts[script]
break
items = script.split('/')
script = '/'.join(items[:-1])
path_info = '/%s%s' % (items[-1], path_info)
else:
app = self.mounts.get(script, self.app)
original_script_name = environ.get('SCRIPT_NAME', '')
environ['SCRIPT_NAME'] = original_script_name + script
environ['PATH_INFO'] = path_info
return app(environ, start_response)
@implements_iterator
class ClosingIterator(object):
"""The WSGI specification requires that all middlewares and gateways
respect the `close` callback of an iterator. Because it is useful to add
another close action to a returned iterator and adding a custom iterator
is a boring task this class can be used for that::
return ClosingIterator(app(environ, start_response), [cleanup_session,
cleanup_locals])
If there is just one close function it can be passed instead of the list.
A closing iterator is not needed if the application uses response objects
and finishes the processing if the response is started::
try:
return response(environ, start_response)
finally:
cleanup_session()
cleanup_locals()
"""
def __init__(self, iterable, callbacks=None):
iterator = iter(iterable)
self._next = partial(next, iterator)
if callbacks is None:
callbacks = []
elif callable(callbacks):
callbacks = [callbacks]
else:
callbacks = list(callbacks)
iterable_close = getattr(iterator, 'close', None)
if iterable_close:
callbacks.insert(0, iterable_close)
self._callbacks = callbacks
def __iter__(self):
return self
def __next__(self):
return self._next()
def close(self):
for callback in self._callbacks:
callback()
def wrap_file(environ, file, buffer_size=8192):
"""Wraps a file. This uses the WSGI server's file wrapper if available
or otherwise the generic :class:`FileWrapper`.
.. versionadded:: 0.5
If the file wrapper from the WSGI server is used it's important to not
iterate over it from inside the application but to pass it through
unchanged. If you want to pass out a file wrapper inside a response
object you have to set :attr:`~BaseResponse.direct_passthrough` to `True`.
More information about file wrappers are available in :pep:`333`.
:param file: a :class:`file`-like object with a :meth:`~file.read` method.
:param buffer_size: number of bytes for one iteration.
"""
return environ.get('wsgi.file_wrapper', FileWrapper)(file, buffer_size)
@implements_iterator
class FileWrapper(object):
"""This class can be used to convert a :class:`file`-like object into
an iterable. It yields `buffer_size` blocks until the file is fully
read.
You should not use this class directly but rather use the
:func:`wrap_file` function that uses the WSGI server's file wrapper
support if it's available.
.. versionadded:: 0.5
If you're using this object together with a :class:`BaseResponse` you have
to use the `direct_passthrough` mode.
:param file: a :class:`file`-like object with a :meth:`~file.read` method.
:param buffer_size: number of bytes for one iteration.
"""
def __init__(self, file, buffer_size=8192):
self.file = file
self.buffer_size = buffer_size
def close(self):
if hasattr(self.file, 'close'):
self.file.close()
def __iter__(self):
return self
def __next__(self):
data = self.file.read(self.buffer_size)
if data:
return data
raise StopIteration()
def _make_chunk_iter(stream, limit, buffer_size):
"""Helper for the line and chunk iter functions."""
if isinstance(stream, (bytes, bytearray, text_type)):
raise TypeError('Passed a string or byte object instead of '
'true iterator or stream.')
if not hasattr(stream, 'read'):
for item in stream:
if item:
yield item
return
if not isinstance(stream, LimitedStream) and limit is not None:
stream = LimitedStream(stream, limit)
_read = stream.read
while 1:
item = _read(buffer_size)
if not item:
break
yield item
def make_line_iter(stream, limit=None, buffer_size=10 * 1024):
"""Safely iterates line-based over an input stream. If the input stream
is not a :class:`LimitedStream` the `limit` parameter is mandatory.
This uses the stream's :meth:`~file.read` method internally as opposite
to the :meth:`~file.readline` method that is unsafe and can only be used
in violation of the WSGI specification. The same problem applies to the
`__iter__` function of the input stream which calls :meth:`~file.readline`
without arguments.
If you need line-by-line processing it's strongly recommended to iterate
over the input stream using this helper function.
.. versionchanged:: 0.8
This function now ensures that the limit was reached.
.. versionadded:: 0.9
added support for iterators as input stream.
:param stream: the stream or iterate to iterate over.
:param limit: the limit in bytes for the stream. (Usually
content length. Not necessary if the `stream`
is a :class:`LimitedStream`.
:param buffer_size: The optional buffer size.
"""
_iter = _make_chunk_iter(stream, limit, buffer_size)
first_item = next(_iter, '')
if not first_item:
return
s = make_literal_wrapper(first_item)
empty = s('')
cr = s('\r')
lf = s('\n')
crlf = s('\r\n')
_iter = chain((first_item,), _iter)
def _iter_basic_lines():
_join = empty.join
buffer = []
while 1:
new_data = next(_iter, '')
if not new_data:
break
new_buf = []
for item in chain(buffer, new_data.splitlines(True)):
new_buf.append(item)
if item and item[-1:] in crlf:
yield _join(new_buf)
new_buf = []
buffer = new_buf
if buffer:
yield _join(buffer)
# This hackery is necessary to merge 'foo\r' and '\n' into one item
# of 'foo\r\n' if we were unlucky and we hit a chunk boundary.
previous = empty
for item in _iter_basic_lines():
if item == lf and previous[-1:] == cr:
previous += item
item = empty
if previous:
yield previous
previous = item
if previous:
yield previous
def make_chunk_iter(stream, separator, limit=None, buffer_size=10 * 1024):
"""Works like :func:`make_line_iter` but accepts a separator
which divides chunks. If you want newline based processing
you should use :func:`make_line_iter` instead as it
supports arbitrary newline markers.
.. versionadded:: 0.8
.. versionadded:: 0.9
added support for iterators as input stream.
:param stream: the stream or iterate to iterate over.
:param separator: the separator that divides chunks.
:param limit: the limit in bytes for the stream. (Usually
content length. Not necessary if the `stream`
is otherwise already limited).
:param buffer_size: The optional buffer size.
"""
_iter = _make_chunk_iter(stream, limit, buffer_size)
first_item = next(_iter, '')
if not first_item:
return
_iter = chain((first_item,), _iter)
if isinstance(first_item, text_type):
separator = to_unicode(separator)
_split = re.compile(r'(%s)' % re.escape(separator)).split
_join = u''.join
else:
separator = to_bytes(separator)
_split = re.compile(b'(' + re.escape(separator) + b')').split
_join = b''.join
buffer = []
while 1:
new_data = next(_iter, '')
if not new_data:
break
chunks = _split(new_data)
new_buf = []
for item in chain(buffer, chunks):
if item == separator:
yield _join(new_buf)
new_buf = []
else:
new_buf.append(item)
buffer = new_buf
if buffer:
yield _join(buffer)
@implements_iterator
class LimitedStream(object):
"""Wraps a stream so that it doesn't read more than n bytes. If the
stream is exhausted and the caller tries to get more bytes from it
:func:`on_exhausted` is called which by default returns an empty
string. The return value of that function is forwarded
to the reader function. So if it returns an empty string
:meth:`read` will return an empty string as well.
The limit however must never be higher than what the stream can
output. Otherwise :meth:`readlines` will try to read past the
limit.
.. admonition:: Note on WSGI compliance
calls to :meth:`readline` and :meth:`readlines` are not
WSGI compliant because it passes a size argument to the
readline methods. Unfortunately the WSGI PEP is not safely
implementable without a size argument to :meth:`readline`
because there is no EOF marker in the stream. As a result
of that the use of :meth:`readline` is discouraged.
For the same reason iterating over the :class:`LimitedStream`
is not portable. It internally calls :meth:`readline`.
We strongly suggest using :meth:`read` only or using the
:func:`make_line_iter` which safely iterates line-based
over a WSGI input stream.
:param stream: the stream to wrap.
:param limit: the limit for the stream, must not be longer than
what the string can provide if the stream does not
end with `EOF` (like `wsgi.input`)
"""
def __init__(self, stream, limit):
self._read = stream.read
self._readline = stream.readline
self._pos = 0
self.limit = limit
def __iter__(self):
return self
@property
def is_exhausted(self):
"""If the stream is exhausted this attribute is `True`."""
return self._pos >= self.limit
def on_exhausted(self):
"""This is called when the stream tries to read past the limit.
The return value of this function is returned from the reading
function.
"""
# Read null bytes from the stream so that we get the
# correct end of stream marker.
return self._read(0)
def on_disconnect(self):
"""What should happen if a disconnect is detected? The return
value of this function is returned from read functions in case
the client went away. By default a
:exc:`~werkzeug.exceptions.ClientDisconnected` exception is raised.
"""
from werkzeug.exceptions import ClientDisconnected
raise ClientDisconnected()
def exhaust(self, chunk_size=1024 * 64):
"""Exhaust the stream. This consumes all the data left until the
limit is reached.
:param chunk_size: the size for a chunk. It will read the chunk
until the stream is exhausted and throw away
the results.
"""
to_read = self.limit - self._pos
chunk = chunk_size
while to_read > 0:
chunk = min(to_read, chunk)
self.read(chunk)
to_read -= chunk
def read(self, size=None):
"""Read `size` bytes or if size is not provided everything is read.
:param size: the number of bytes read.
"""
if self._pos >= self.limit:
return self.on_exhausted()
if size is None or size == -1: # -1 is for consistence with file
size = self.limit
to_read = min(self.limit - self._pos, size)
try:
read = self._read(to_read)
except (IOError, ValueError):
return self.on_disconnect()
if to_read and len(read) != to_read:
return self.on_disconnect()
self._pos += len(read)
return read
def readline(self, size=None):
"""Reads one line from the stream."""
if self._pos >= self.limit:
return self.on_exhausted()
if size is None:
size = self.limit - self._pos
else:
size = min(size, self.limit - self._pos)
try:
line = self._readline(size)
except (ValueError, IOError):
return self.on_disconnect()
if size and not line:
return self.on_disconnect()
self._pos += len(line)
return line
def readlines(self, size=None):
"""Reads a file into a list of strings. It calls :meth:`readline`
until the file is read to the end. It does support the optional
`size` argument if the underlaying stream supports it for
`readline`.
"""
last_pos = self._pos
result = []
if size is not None:
end = min(self.limit, last_pos + size)
else:
end = self.limit
while 1:
if size is not None:
size -= last_pos - self._pos
if self._pos >= end:
break
result.append(self.readline(size))
if size is not None:
last_pos = self._pos
return result
def tell(self):
"""Returns the position of the stream.
.. versionadded:: 0.9
"""
return self._pos
def __next__(self):
line = self.readline()
if not line:
raise StopIteration()
return line
| apache-2.0 |
guiquanz/googletest | test/gtest_xml_output_unittest.py | 1815 | 14580 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module"""
__author__ = 'eefacm@gmail.com (Sean Mcafee)'
import datetime
import errno
import os
import re
import sys
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_FILTER_FLAG = '--gtest_filter'
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
GTEST_OUTPUT_FLAG = "--gtest_output"
GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
SUPPORTS_STACK_TRACES = False
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
else:
STACK_TRACE_TEMPLATE = ''
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="23" failures="4" disabled="2" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="Fails" status="run" time="*" classname="FailedTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
<testcase name="Fails" status="run" time="*" classname="MixedResultTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2%(stack)s]]></failure>
</testcase>
<testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
<testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*" SetUpTestCase="yes" TearDownTestCase="aye">
<testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
<testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
<testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
<testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
<testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
<testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
<testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
</testsuite>
</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0"
errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
</testsuites>"""
EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="0" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests">
</testsuites>"""
GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
[GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
"""
Unit test for Google Test's XML output functionality.
"""
# This test currently breaks on platforms that do not support typed and
# type-parameterized tests, so we don't run it under them.
if SUPPORTS_TYPED_TESTS:
def testNonEmptyXmlOutput(self):
"""
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
def testEmptyXmlOutput(self):
"""Verifies XML output for a Google Test binary without actual tests.
Runs a test program that generates an empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput('gtest_no_test_unittest', EXPECTED_EMPTY_XML, 0)
def testTimestampValue(self):
"""Checks whether the timestamp attribute in the XML output is valid.
Runs a test program that generates an empty XML output, and checks if
the timestamp attribute in the testsuites tag is valid.
"""
actual = self._GetXmlOutput('gtest_no_test_unittest', [], 0)
date_time_str = actual.documentElement.getAttributeNode('timestamp').value
# datetime.strptime() is only available in Python 2.5+ so we have to
# parse the expected datetime manually.
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
self.assertTrue(
re.match,
'XML datettime string %s has incorrect format' % date_time_str)
date_time_from_xml = datetime.datetime(
year=int(match.group(1)), month=int(match.group(2)),
day=int(match.group(3)), hour=int(match.group(4)),
minute=int(match.group(5)), second=int(match.group(6)))
time_delta = abs(datetime.datetime.now() - date_time_from_xml)
# timestamp value should be near the current local time
self.assertTrue(time_delta < datetime.timedelta(seconds=600),
'time_delta is %s' % time_delta)
actual.unlink()
def testDefaultOutputFile(self):
"""
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
output_file = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_DEFAULT_OUTPUT_FILE)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
'gtest_no_test_unittest')
try:
os.remove(output_file)
except OSError, e:
if e.errno != errno.ENOENT:
raise
p = gtest_test_utils.Subprocess(
[gtest_prog_path, '%s=xml' % GTEST_OUTPUT_FLAG],
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
self.assert_(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
"""
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_PROGRAM_NAME + 'out.xml')
if os.path.isfile(xml_path):
os.remove(xml_path)
command = [GTEST_PROGRAM_PATH,
'%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),
'--shut_down_xml']
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
# p.signal is avalable only if p.terminated_by_signal is True.
self.assertFalse(
p.terminated_by_signal,
'%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(1, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, 1))
self.assert_(not os.path.isfile(xml_path))
def testFilteredTestXmlOutput(self):
"""Verifies XML output when a filter is applied.
Runs a test program that executes only some tests and verifies that
non-selected tests do not show up in the XML output.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED_TEST_XML, 0,
extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])
def _GetXmlOutput(self, gtest_prog_name, extra_args, expected_exit_code):
"""
Returns the xml output generated by running the program gtest_prog_name.
Furthermore, the program's exit code must be expected_exit_code.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
gtest_prog_name + 'out.xml')
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
command = ([gtest_prog_path, '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path)] +
extra_args)
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
'%s was killed by signal %d' % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(expected_exit_code, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, expected_exit_code))
actual = minidom.parse(xml_path)
return actual
def _TestXmlOutput(self, gtest_prog_name, expected_xml,
expected_exit_code, extra_args=None):
"""
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
"""
actual = self._GetXmlOutput(gtest_prog_name, extra_args or [],
expected_exit_code)
expected = minidom.parseString(expected_xml)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == '__main__':
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
gtest_test_utils.Main()
| bsd-3-clause |
gnieboer/tensorflow | tensorflow/python/ops/distributions/student_t.py | 72 | 12995 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Student's t distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import special_math_ops
from tensorflow.python.ops.distributions import distribution
from tensorflow.python.ops.distributions import util as distribution_util
__all__ = [
"StudentT",
"StudentTWithAbsDfSoftplusScale",
]
class StudentT(distribution.Distribution):
"""Student's t-distribution.
This distribution has parameters: degree of freedom `df`, location `loc`,
and `scale`.
#### Mathematical details
The probability density function (pdf) is,
```none
pdf(x; df, mu, sigma) = (1 + y**2 / df)**(-0.5 (df + 1)) / Z
where,
y = (x - mu) / sigma
Z = abs(sigma) sqrt(df pi) Gamma(0.5 df) / Gamma(0.5 (df + 1))
```
where:
* `loc = mu`,
* `scale = sigma`, and,
* `Z` is the normalization constant, and,
* `Gamma` is the [gamma function](
https://en.wikipedia.org/wiki/Gamma_function).
The StudentT distribution is a member of the [location-scale family](
https://en.wikipedia.org/wiki/Location-scale_family), i.e., it can be
constructed as,
```none
X ~ StudentT(df, loc=0, scale=1)
Y = loc + scale * X
```
Notice that `scale` has semantics more similar to standard deviation than
variance. However it is not actually the std. deviation; the Student's
t-distribution std. dev. is `scale sqrt(df / (df - 2))` when `df > 2`.
#### Examples
Examples of initialization of one or a batch of distributions.
```python
# Define a single scalar Student t distribution.
single_dist = tf.distributions.StudentT(df=3)
# Evaluate the pdf at 1, returning a scalar Tensor.
single_dist.prob(1.)
# Define a batch of two scalar valued Student t's.
# The first has degrees of freedom 2, mean 1, and scale 11.
# The second 3, 2 and 22.
multi_dist = tf.distributions.StudentT(df=[2, 3],
loc=[1, 2.],
scale=[11, 22.])
# Evaluate the pdf of the first distribution on 0, and the second on 1.5,
# returning a length two tensor.
multi_dist.prob([0, 1.5])
# Get 3 samples, returning a 3 x 2 tensor.
multi_dist.sample(3)
```
Arguments are broadcast when possible.
```python
# Define a batch of two Student's t distributions.
# Both have df 2 and mean 1, but different scales.
dist = tf.distributions.StudentT(df=2, loc=1, scale=[11, 22.])
# Evaluate the pdf of both distributions on the same point, 3.0,
# returning a length 2 tensor.
dist.prob(3.0)
```
"""
# pylint: enable=line-too-long
def __init__(self,
df,
loc,
scale,
validate_args=False,
allow_nan_stats=True,
name="StudentT"):
"""Construct Student's t distributions.
The distributions have degree of freedom `df`, mean `loc`, and scale
`scale`.
The parameters `df`, `loc`, and `scale` must be shaped in a way that
supports broadcasting (e.g. `df + loc + scale` is a valid operation).
Args:
df: Floating-point `Tensor`. The degrees of freedom of the
distribution(s). `df` must contain only positive values.
loc: Floating-point `Tensor`. The mean(s) of the distribution(s).
scale: Floating-point `Tensor`. The scaling factor(s) for the
distribution(s). Note that `scale` is not technically the standard
deviation of this distribution but has semantics more similar to
standard deviation than variance.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`,
statistics (e.g., mean, mode, variance) use the value "`NaN`" to
indicate the result is undefined. When `False`, an exception is raised
if one or more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Raises:
TypeError: if loc and scale are different dtypes.
"""
parameters = locals()
with ops.name_scope(name, values=[df, loc, scale]):
with ops.control_dependencies([check_ops.assert_positive(df)]
if validate_args else []):
self._df = array_ops.identity(df, name="df")
self._loc = array_ops.identity(loc, name="loc")
self._scale = array_ops.identity(scale, name="scale")
check_ops.assert_same_float_dtype(
(self._df, self._loc, self._scale))
super(StudentT, self).__init__(
dtype=self._scale.dtype,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._df, self._loc, self._scale],
name=name)
@staticmethod
def _param_shapes(sample_shape):
return dict(
zip(("df", "loc", "scale"), (
[ops.convert_to_tensor(
sample_shape, dtype=dtypes.int32)] * 3)))
@property
def df(self):
"""Degrees of freedom in these Student's t distribution(s)."""
return self._df
@property
def loc(self):
"""Locations of these Student's t distribution(s)."""
return self._loc
@property
def scale(self):
"""Scaling factors of these Student's t distribution(s)."""
return self._scale
def _batch_shape_tensor(self):
return array_ops.broadcast_dynamic_shape(
array_ops.shape(self.df),
array_ops.broadcast_dynamic_shape(
array_ops.shape(self.loc), array_ops.shape(self.scale)))
def _batch_shape(self):
return array_ops.broadcast_static_shape(
array_ops.broadcast_static_shape(self.df.get_shape(),
self.loc.get_shape()),
self.scale.get_shape())
def _event_shape_tensor(self):
return constant_op.constant([], dtype=math_ops.int32)
def _event_shape(self):
return tensor_shape.scalar()
def _sample_n(self, n, seed=None):
# The sampling method comes from the fact that if:
# X ~ Normal(0, 1)
# Z ~ Chi2(df)
# Y = X / sqrt(Z / df)
# then:
# Y ~ StudentT(df).
shape = array_ops.concat([[n], self.batch_shape_tensor()], 0)
normal_sample = random_ops.random_normal(shape, dtype=self.dtype, seed=seed)
df = self.df * array_ops.ones(self.batch_shape_tensor(), dtype=self.dtype)
gamma_sample = random_ops.random_gamma(
[n],
0.5 * df,
beta=0.5,
dtype=self.dtype,
seed=distribution_util.gen_new_seed(seed, salt="student_t"))
samples = normal_sample * math_ops.rsqrt(gamma_sample / df)
return samples * self.scale + self.loc # Abs(scale) not wanted.
def _log_prob(self, x):
return self._log_unnormalized_prob(x) - self._log_normalization()
def _log_unnormalized_prob(self, x):
y = (x - self.loc) / self.scale # Abs(scale) superfluous.
return -0.5 * (self.df + 1.) * math_ops.log1p(y**2. / self.df)
def _log_normalization(self):
return (math_ops.log(math_ops.abs(self.scale)) +
0.5 * math_ops.log(self.df) +
0.5 * np.log(np.pi) +
math_ops.lgamma(0.5 * self.df) -
math_ops.lgamma(0.5 * (self.df + 1.)))
def _prob(self, x):
return math_ops.exp(self._log_prob(x))
def _cdf(self, x):
# Take Abs(scale) to make subsequent where work correctly.
y = (x - self.loc) / math_ops.abs(self.scale)
x_t = self.df / (y**2. + self.df)
neg_cdf = 0.5 * math_ops.betainc(0.5 * self.df, 0.5, x_t)
return array_ops.where(math_ops.less(y, 0.), neg_cdf, 1. - neg_cdf)
def _entropy(self):
v = array_ops.ones(self.batch_shape_tensor(),
dtype=self.dtype)[..., array_ops.newaxis]
u = v * self.df[..., array_ops.newaxis]
beta_arg = array_ops.concat([u, v], -1) / 2.
return (math_ops.log(math_ops.abs(self.scale)) +
0.5 * math_ops.log(self.df) +
special_math_ops.lbeta(beta_arg) +
0.5 * (self.df + 1.) *
(math_ops.digamma(0.5 * (self.df + 1.)) -
math_ops.digamma(0.5 * self.df)))
@distribution_util.AppendDocstring(
"""The mean of Student's T equals `loc` if `df > 1`, otherwise it is
`NaN`. If `self.allow_nan_stats=True`, then an exception will be raised
rather than returning `NaN`.""")
def _mean(self):
mean = self.loc * array_ops.ones(self.batch_shape_tensor(),
dtype=self.dtype)
if self.allow_nan_stats:
nan = np.array(np.nan, dtype=self.dtype.as_numpy_dtype())
return array_ops.where(
math_ops.greater(
self.df,
array_ops.ones(self.batch_shape_tensor(), dtype=self.dtype)),
mean,
array_ops.fill(self.batch_shape_tensor(), nan, name="nan"))
else:
return control_flow_ops.with_dependencies(
[
check_ops.assert_less(
array_ops.ones([], dtype=self.dtype),
self.df,
message="mean not defined for components of df <= 1"),
],
mean)
@distribution_util.AppendDocstring("""
The variance for Student's T equals
```
df / (df - 2), when df > 2
infinity, when 1 < df <= 2
NaN, when df <= 1
```
""")
def _variance(self):
# We need to put the tf.where inside the outer tf.where to ensure we never
# hit a NaN in the gradient.
denom = array_ops.where(math_ops.greater(self.df, 2.),
self.df - 2.,
array_ops.ones_like(self.df))
# Abs(scale) superfluous.
var = (array_ops.ones(self.batch_shape_tensor(), dtype=self.dtype) *
math_ops.square(self.scale) * self.df / denom)
# When 1 < df <= 2, variance is infinite.
inf = np.array(np.inf, dtype=self.dtype.as_numpy_dtype())
result_where_defined = array_ops.where(
self.df > array_ops.fill(self.batch_shape_tensor(), 2.),
var,
array_ops.fill(self.batch_shape_tensor(), inf, name="inf"))
if self.allow_nan_stats:
nan = np.array(np.nan, dtype=self.dtype.as_numpy_dtype())
return array_ops.where(
math_ops.greater(
self.df,
array_ops.ones(self.batch_shape_tensor(), dtype=self.dtype)),
result_where_defined,
array_ops.fill(self.batch_shape_tensor(), nan, name="nan"))
else:
return control_flow_ops.with_dependencies(
[
check_ops.assert_less(
array_ops.ones([], dtype=self.dtype),
self.df,
message="variance not defined for components of df <= 1"),
],
result_where_defined)
def _mode(self):
return array_ops.identity(self.loc)
class StudentTWithAbsDfSoftplusScale(StudentT):
"""StudentT with `df = floor(abs(df))` and `scale = softplus(scale)`."""
def __init__(self,
df,
loc,
scale,
validate_args=False,
allow_nan_stats=True,
name="StudentTWithAbsDfSoftplusScale"):
parameters = locals()
with ops.name_scope(name, values=[df, scale]):
super(StudentTWithAbsDfSoftplusScale, self).__init__(
df=math_ops.floor(math_ops.abs(df)),
loc=loc,
scale=nn.softplus(scale, name="softplus_scale"),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
name=name)
self._parameters = parameters
| apache-2.0 |
qistoph/thug | src/ActiveX/modules/SymantecBackupExec.py | 8 | 1609 | # Symantec BackupExec
# CVE-2007-6016,CVE-2007-6017
import logging
log = logging.getLogger("Thug")
def Set_DOWText0(self, val):
self.__dict__['_DOWText0'] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(self._window.url,
"Symantec BackupExec ActiveX",
"Overflow in property _DOWText0",
cve = 'CVE-2007-6016')
def Set_DOWText6(self, val):
self.__dict__['_DOWText6'] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(self._window.url,
"Symantec BackupExec ActiveX",
"Overflow in property _DOWText6",
cve = 'CVE-2007-6016')
def Set_MonthText0(self, val):
self.__dict__['_MonthText0'] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(self._window.url,
"Symantec BackupExec ActiveX",
"Overflow in property _MonthText6",
cve = 'CVE-2007-6016')
def Set_MonthText11(self, val):
self.__dict__['_MonthText11'] = val
if len(val) > 255:
log.ThugLogging.log_exploit_event(self._window.url,
"Symantec BackupExec ActiveX",
"Overflow in property _MonthText11",
cve = 'CVE-2007-6016')
def Save(self, a, b):
return
| gpl-2.0 |
mweisman/QGIS | python/plugins/processing/algs/RasterLayerHistogram.py | 6 | 3219 | # -*- coding: utf-8 -*-
"""
***************************************************************************
RasterLayerHistogram.py
---------------------
Date : January 2013
Copyright : (C) 2013 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'January 2013'
__copyright__ = '(C) 2013, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import matplotlib.pyplot as plt
import matplotlib.pylab as lab
from PyQt4.QtCore import *
from qgis.core import *
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.parameters.ParameterNumber import ParameterNumber
from processing.parameters.ParameterRaster import ParameterRaster
from processing.outputs.OutputTable import OutputTable
from processing.outputs.OutputHTML import OutputHTML
from processing.tools import dataobjects
from processing.tools import raster
class RasterLayerHistogram(GeoAlgorithm):
INPUT = 'INPUT'
PLOT = 'PLOT'
TABLE = 'TABLE'
BINS = 'BINS'
def processAlgorithm(self, progress):
uri = self.getParameterValue(self.INPUT)
layer = dataobjects.getObjectFromUri(uri)
outputplot = self.getOutputValue(self.PLOT)
outputtable = self.getOutputFromName(self.TABLE)
values = raster.scanraster(layer, progress)
nbins = self.getParameterValue(self.BINS)
# ALERT: this is potentially blocking if the layer is too big
plt.close()
valueslist = []
for v in values:
if v is not None:
valueslist.append(v)
(n, bins, values) = plt.hist(valueslist, nbins)
fields = [QgsField('CENTER_VALUE', QVariant.Double),
QgsField('NUM_ELEM', QVariant.Double)]
writer = outputtable.getTableWriter(fields)
for i in xrange(len(values)):
writer.addRecord([str(bins[i]) + '-' + str(bins[i + 1]), n[i]])
plotFilename = outputplot + '.png'
lab.savefig(plotFilename)
f = open(outputplot, 'w')
f.write('<img src="' + plotFilename + '"/>')
f.close()
def defineCharacteristics(self):
self.name = 'Raster layer histogram'
self.group = 'Graphics'
self.addParameter(ParameterRaster(self.INPUT, 'Input layer'))
self.addParameter(ParameterNumber(self.BINS, 'Number of bins', 2,
None, 10))
self.addOutput(OutputHTML(self.PLOT, 'Output plot'))
self.addOutput(OutputTable(self.TABLE, 'Output table'))
| gpl-2.0 |
alanjds/fen | images.py | 1 | 47284 | #!/usr/bin/env python
black_bishop = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="45" height="45" id="svg3128" sodipodi:version="0.32" inkscape:version="0.44.1" version="1.0" sodipodi:docbase="/home/cburnett/wikipedia/images/chess" sodipodi:docname="Chess bdt45.svg">
<defs id="defs3130"/>
<sodipodi:namedview id="base" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" gridtolerance="10000" guidetolerance="10" objecttolerance="10" inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="16" inkscape:cx="21.47589" inkscape:cy="21.674445" inkscape:document-units="px" inkscape:current-layer="layer1" height="45px" width="45px" inkscape:grid-points="true" showgrid="true" inkscape:window-width="977" inkscape:window-height="965" inkscape:window-x="0" inkscape:window-y="31" gridspacingx="0.5px" gridspacingy="0.5px"/>
<metadata id="metadata3133">
<rdf:RDF>
<cc:Work rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
</cc:Work>
</rdf:RDF>
</metadata>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1" style="display: inline;">
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 9,36 C 12.385255,35.027671 19.114744,36.430821 22.5,34 C 25.885256,36.430821 32.614745,35.027671 36,36 C 36,36 37.645898,36.541507 39,38 C 38.322949,38.972328 37.354102,38.986164 36,38.5 C 32.614745,37.527672 25.885256,38.958493 22.5,37.5 C 19.114744,38.958493 12.385255,37.527672 9,38.5 C 7.6458978,38.986164 6.6770511,38.972328 6,38 C 7.3541023,36.055343 9,36 9,36 z " id="path4582" sodipodi:nodetypes="ccccccccc"/>
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 15,32 C 17.5,34.5 27.5,34.5 30,32 C 30.5,30.5 30,30 30,30 C 30,27.5 27.5,26 27.5,26 C 33,24.5 33.5,14.5 22.5,10.5 C 11.5,14.5 12,24.5 17.5,26 C 17.5,26 15,27.5 15,30 C 15,30 14.5,30.5 15,32 z " id="path4584" sodipodi:nodetypes="cccccccc"/>
<path sodipodi:type="arc" style="opacity: 1; fill: black; fill-opacity: 1; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path4586" sodipodi:cx="22.5" sodipodi:cy="10" sodipodi:rx="2.5" sodipodi:ry="2.5" d="M 25 10 A 2.5 2.5 0 1 1 20,10 A 2.5 2.5 0 1 1 25 10 z" transform="translate(0, -2)"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 17.5,26 L 27.5,26" id="path4588" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 15,30 L 30,30" id="path4590" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 22.5,15.5 L 22.5,20.5" id="path4592"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 20,18 L 25,18" id="path4594"/>
</g>
</svg>
"""
black_king = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="45" height="45" id="svg3128" sodipodi:version="0.32" inkscape:version="0.44.1" version="1.0" sodipodi:docbase="/home/cburnett/wikipedia/images/chess" sodipodi:docname="Chess kdt45.svg">
<defs id="defs3130"/>
<sodipodi:namedview id="base" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" gridtolerance="10000" guidetolerance="10" objecttolerance="10" inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="11.313708" inkscape:cx="50.19508" inkscape:cy="20.61116" inkscape:document-units="px" inkscape:current-layer="layer1" height="45px" width="45px" inkscape:grid-points="true" showgrid="true" inkscape:window-width="1272" inkscape:window-height="965" inkscape:window-x="0" inkscape:window-y="31" gridspacingx="0.5px" gridspacingy="0.5px"/>
<metadata id="metadata3133">
<rdf:RDF>
<cc:Work rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
</cc:Work>
</rdf:RDF>
</metadata>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1">
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 22.5,11.625 L 22.5,6" id="path6570"/>
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: miter; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 22.5,25 C 22.5,25 27,17.5 25.5,14.5 C 25.5,14.5 24.5,12 22.5,12 C 20.5,12 19.5,14.5 19.5,14.5 C 18,17.5 22.5,25 22.5,25" id="path5683" sodipodi:nodetypes="ccccc"/>
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 11.5,37 C 17,40.5 27,40.5 32.5,37 L 32.5,30 C 32.5,30 41.5,25.5 38.5,19.5 C 34.5,13 25,16 22.5,23.5 L 22.5,27 L 22.5,23.5 C 19,16 9.5,13 6.5,19.5 C 3.5,25.5 11.5,29.5 11.5,29.5 L 11.5,37 z " id="path5681" sodipodi:nodetypes="cccccccccc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 20,8 L 25,8" id="path6572"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 11.5,29.5 C 17,27 27,27 32.5,30" id="path6574" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 11.5,37 C 17,34.5 27,34.5 32.5,37" id="path6576" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 11.5,33.5 C 17,31.5 27,31.5 32.5,33.5" id="path6578" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 1; fill-rule: evenodd; stroke: white; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 32,29.5 C 32,29.5 40.5,25.5 38.025969,19.846552 C 34.147406,13.996552 25,18 22.5,24.5 L 22.511718,26.596552 L 22.5,24.5 C 20,18 9.9063892,13.996552 6.9974672,19.846552 C 4.5,25.5 11.845671,28.846552 11.845671,28.846552" id="path5638" sodipodi:nodetypes="ccccccc"/>
</g>
</svg>
"""
black_knight = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="45" height="45" id="svg3128" sodipodi:version="0.32" inkscape:version="0.44.1" version="1.0" sodipodi:docbase="/home/cburnett/wikipedia/images/chess" sodipodi:docname="Chess ndt45.svg">
<defs id="defs3130"/>
<sodipodi:namedview id="base" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" gridtolerance="10000" guidetolerance="10" objecttolerance="10" inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="8" inkscape:cx="37.995338" inkscape:cy="23.908871" inkscape:document-units="px" inkscape:current-layer="layer1" height="45px" width="45px" inkscape:grid-points="true" showgrid="true" inkscape:window-width="977" inkscape:window-height="965" inkscape:window-x="0" inkscape:window-y="31" gridspacingx="0.05px" gridspacingy="0.05px"/>
<metadata id="metadata3133">
<rdf:RDF>
<cc:Work rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
</cc:Work>
</rdf:RDF>
</metadata>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1">
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1;" d="M 22,10 C 32.5,11 38.5,18 38,39 L 15,39 C 15,30 25,32.5 23,18" id="path3491" sodipodi:nodetypes="cccc"/>
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1;" d="M 24,18 C 24.384461,20.911278 18.447064,25.368624 16,27 C 13,29 13.180802,31.342892 11,31 C 9.95828,30.055984 12.413429,27.962451 11,28 C 10,28 11.187332,29.231727 10,30 C 9,30 5.9968392,30.999999 6,26 C 6,24 12,14 12,14 C 12,14 13.885866,12.097871 14,10.5 C 13.273953,9.505631 13.5,8.5 13.5,7.5 C 14.5,6.5 16.5,10 16.5,10 L 18.5,10 C 18.5,10 19.281781,8.0080745 21,7 C 22,7 22,10 22,10" id="path3495" sodipodi:nodetypes="csccccccccccc"/>
<path sodipodi:type="arc" style="opacity: 1; fill: black; fill-opacity: 1; stroke: white; stroke-width: 1; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path3499" sodipodi:cx="8.5" sodipodi:cy="23.5" sodipodi:rx="0.5" sodipodi:ry="0.5" d="M 9 23.5 A 0.5 0.5 0 1 1 8,23.5 A 0.5 0.5 0 1 1 9 23.5 z" transform="translate(0.5, 2)"/>
<path sodipodi:type="arc" style="opacity: 1; fill: black; fill-opacity: 1; stroke: white; stroke-width: 1; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path3501" sodipodi:cx="14.5" sodipodi:cy="15.5" sodipodi:rx="0.5" sodipodi:ry="1.5" d="M 15 15.5 A 0.5 1.5 0 1 1 14,15.5 A 0.5 1.5 0 1 1 15 15.5 z" transform="matrix(0.866025, 0.5, -0.5, 0.866025, 9.69263, -5.17339)"/>
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: none; stroke-width: 1; stroke-linecap: square; stroke-linejoin: miter; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 24.55,10.4 L 24.25,11.5 L 24.8,11.6 C 27.901459,12.077147 31.123526,13.834204 33.375,18.09375 C 35.626474,22.353296 36.297157,29.05687 35.8,39 L 35.75,39.5 L 37.5,39.5 L 37.5,39 C 38.002843,28.94313 36.623526,22.146704 34.25,17.65625 C 31.876474,13.165796 28.461041,11.022853 25.0625,10.5 L 24.55,10.4 z " id="path8049" sodipodi:nodetypes="cccsccccscc"/>
</g>
</svg>
"""
black_pawn = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="45" height="45" id="svg3128" sodipodi:version="0.32" inkscape:version="0.44.1" version="1.0" sodipodi:docbase="/home/cburnett/wikipedia/images/chess" sodipodi:docname="Chess pdt45.svg">
<defs id="defs3130"/>
<sodipodi:namedview id="base" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" gridtolerance="10000" guidetolerance="10" objecttolerance="10" inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="11.2" inkscape:cx="22.89394" inkscape:cy="17.72157" inkscape:document-units="px" inkscape:current-layer="layer1" height="45px" width="45px" inkscape:grid-points="true" showgrid="true" inkscape:window-width="977" inkscape:window-height="965" inkscape:window-x="0" inkscape:window-y="31" gridspacingx="0.5px" gridspacingy="0.5px"/>
<metadata id="metadata3133">
<rdf:RDF>
<cc:Work rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
</cc:Work>
</rdf:RDF>
</metadata>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1">
<path style="opacity: 1; fill: black; fill-opacity: 1; fill-rule: nonzero; stroke: black; stroke-width: 1; stroke-linecap: round; stroke-linejoin: miter; stroke-miterlimit: 4; stroke-dasharray: none; stroke-dashoffset: 10; stroke-opacity: 1;" d="M 22 9 C 19.792 9 18 10.792 18 13 C 18 13.885103 18.29397 14.712226 18.78125 15.375 C 16.829274 16.496917 15.5 18.588492 15.5 21 C 15.5 23.033947 16.442042 24.839082 17.90625 26.03125 C 14.907101 27.08912 10.5 31.578049 10.5 39.5 L 33.5 39.5 C 33.5 31.578049 29.092899 27.08912 26.09375 26.03125 C 27.557958 24.839082 28.5 23.033948 28.5 21 C 28.5 18.588492 27.170726 16.496917 25.21875 15.375 C 25.70603 14.712226 26 13.885103 26 13 C 26 10.792 24.208 9 22 9 z " id="path3194"/>
</g>
</svg>
"""
black_queen = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="45" height="45" id="svg3128" sodipodi:version="0.32" inkscape:version="0.44.1" version="1.0" sodipodi:docbase="/home/cburnett/wikipedia/images/chess" sodipodi:docname="Chess qdt45.svg">
<defs id="defs3130"/>
<sodipodi:namedview id="base" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" gridtolerance="10000" guidetolerance="10" objecttolerance="10" inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="16" inkscape:cx="34.028184" inkscape:cy="22.902797" inkscape:document-units="px" inkscape:current-layer="layer1" height="45px" width="45px" inkscape:grid-points="true" showgrid="true" inkscape:window-width="1272" inkscape:window-height="965" inkscape:window-x="0" inkscape:window-y="31" gridspacingx="0.5px" gridspacingy="0.5px"/>
<metadata id="metadata3133">
<rdf:RDF>
<cc:Work rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
</cc:Work>
</rdf:RDF>
</metadata>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1">
<path sodipodi:type="arc" style="opacity: 1; fill: black; fill-opacity: 1; stroke: black; stroke-width: 1; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path5571" sodipodi:cx="7" sodipodi:cy="13" sodipodi:rx="2" sodipodi:ry="2" d="M 9 13 A 2 2 0 1 1 5,13 A 2 2 0 1 1 9 13 z" transform="translate(-1, -1)"/>
<path sodipodi:type="arc" style="opacity: 1; fill: black; fill-opacity: 1; stroke: black; stroke-width: 1; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path5573" sodipodi:cx="7" sodipodi:cy="13" sodipodi:rx="2" sodipodi:ry="2" d="M 9 13 A 2 2 0 1 1 5,13 A 2 2 0 1 1 9 13 z" transform="translate(15.5, -5.5)"/>
<path sodipodi:type="arc" style="opacity: 1; fill: black; fill-opacity: 1; stroke: black; stroke-width: 1; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path5575" sodipodi:cx="7" sodipodi:cy="13" sodipodi:rx="2" sodipodi:ry="2" d="M 9 13 A 2 2 0 1 1 5,13 A 2 2 0 1 1 9 13 z" transform="translate(32, -1)"/>
<path sodipodi:type="arc" style="opacity: 1; fill: black; fill-opacity: 1; stroke: black; stroke-width: 1; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path5577" sodipodi:cx="7" sodipodi:cy="13" sodipodi:rx="2" sodipodi:ry="2" d="M 9 13 A 2 2 0 1 1 5,13 A 2 2 0 1 1 9 13 z" transform="translate(7, -4.5)"/>
<path sodipodi:type="arc" style="opacity: 1; fill: black; fill-opacity: 1; stroke: black; stroke-width: 1; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path5579" sodipodi:cx="7" sodipodi:cy="13" sodipodi:rx="2" sodipodi:ry="2" d="M 9 13 A 2 2 0 1 1 5,13 A 2 2 0 1 1 9 13 z" transform="translate(24, -4)"/>
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1px; stroke-linecap: butt; stroke-linejoin: round; stroke-opacity: 1;" d="M 9,26 C 17.5,24.5 30,24.5 36,26 L 38,14 L 31,25 L 31,11 L 25.5,24.5 L 22.5,9.5 L 19.5,24.5 L 14,10.5 L 14,25 L 7,14 L 9,26 z " id="path5581" sodipodi:nodetypes="cccccccccccc"/>
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1px; stroke-linecap: butt; stroke-linejoin: round; stroke-opacity: 1;" d="M 9,26 C 9,28 10.5,28 11.5,30 C 12.5,31.5 12.5,31 12,33.5 C 10.5,34.5 10.5,36 10.5,36 C 9,37.5 11,38.5 11,38.5 C 17.5,39.5 27.5,39.5 34,38.5 C 34,38.5 35.5,37.5 34,36 C 34,36 34.5,34.5 33,33.5 C 32.5,31 32.5,31.5 33.5,30 C 34.5,28 36,28 36,26 C 27.5,24.5 17.5,24.5 9,26 z " id="path5583" sodipodi:nodetypes="ccccccccccc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1;" d="M 11.5,30 C 15,29 30,29 33.5,30" id="path5585" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1;" d="M 12,33.5 C 18,32.5 27,32.5 33,33.5" id="path5587" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1;" d="M 10.5,36 C 15.5,35 29,35 34,36" id="path5589" sodipodi:nodetypes="cc"/>
</g>
</svg>
"""
black_rook = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="45" height="45" id="svg3128" sodipodi:version="0.32" inkscape:version="0.44.1" version="1.0" sodipodi:docbase="/home/cburnett/wikipedia/images/chess" sodipodi:docname="Chess rdt45.svg">
<defs id="defs3130"/>
<sodipodi:namedview id="base" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" gridtolerance="10000" guidetolerance="10" objecttolerance="10" inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="16" inkscape:cx="19.736579" inkscape:cy="22.06684" inkscape:document-units="px" inkscape:current-layer="layer1" height="45px" width="45px" inkscape:grid-points="true" showgrid="true" inkscape:window-width="977" inkscape:window-height="852" inkscape:window-x="0" inkscape:window-y="31" gridspacingx="0.5px" gridspacingy="0.5px"/>
<metadata id="metadata3133">
<rdf:RDF>
<cc:Work rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
</cc:Work>
</rdf:RDF>
</metadata>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1" style="display: inline;">
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 9,39 L 36,39 L 36,36 L 9,36 L 9,39 z " id="path3119" sodipodi:nodetypes="ccccc"/>
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 12.5,32 L 14,29.5 L 31,29.5 L 32.5,32 L 12.5,32 z " id="path3123" sodipodi:nodetypes="ccccc"/>
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 12,36 L 12,32 L 33,32 L 33,36 L 12,36 z " id="path3121" sodipodi:nodetypes="ccccc"/>
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: miter; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 14,29.5 L 14,16.5 L 31,16.5 L 31,29.5 L 14,29.5 z " id="path3125" sodipodi:nodetypes="ccccc"/>
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 14,16.5 L 11,14 L 34,14 L 31,16.5 L 14,16.5 z " id="path3127" sodipodi:nodetypes="ccccc"/>
<path style="fill: black; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 11,14 L 11,9 L 15,9 L 15,11 L 20,11 L 20,9 L 25,9 L 25,11 L 30,11 L 30,9 L 34,9 L 34,14 L 11,14 z " id="path3129" sodipodi:nodetypes="ccccccccccccc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1;" d="M 12,35.5 L 33,35.5 L 33,35.5" id="path4967"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1;" d="M 13,31.5 L 32,31.5" id="path4969"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1;" d="M 14,29.5 L 31,29.5" id="path4971"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1;" d="M 14,16.5 L 31,16.5" id="path5050"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: white; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1;" d="M 11,14 L 34,14" id="path5052"/>
</g>
</svg>
"""
white_king = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="45" height="45" id="svg3128" sodipodi:version="0.32" inkscape:version="0.44.1" version="1.0" sodipodi:docbase="/home/cburnett/wikipedia/images/chess" sodipodi:docname="Chess klt45.svg">
<defs id="defs3130"/>
<sodipodi:namedview id="base" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" gridtolerance="10000" guidetolerance="10" objecttolerance="10" inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="11.313708" inkscape:cx="42.721441" inkscape:cy="13.400149" inkscape:document-units="px" inkscape:current-layer="layer1" height="45px" width="45px" inkscape:grid-points="true" showgrid="true" inkscape:window-width="1272" inkscape:window-height="965" inkscape:window-x="0" inkscape:window-y="31" gridspacingx="0.5px" gridspacingy="0.5px"/>
<metadata id="metadata3133">
<rdf:RDF>
<cc:Work rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
</cc:Work>
</rdf:RDF>
</metadata>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1">
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 22.5,11.625 L 22.5,6" id="path6570"/>
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: miter; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 22.5,25 C 22.5,25 27,17.5 25.5,14.5 C 25.5,14.5 24.5,12 22.5,12 C 20.5,12 19.5,14.5 19.5,14.5 C 18,17.5 22.5,25 22.5,25" id="path5683" sodipodi:nodetypes="ccccc"/>
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 11.5,37 C 17,40.5 27,40.5 32.5,37 L 32.5,30 C 32.5,30 41.5,25.5 38.5,19.5 C 34.5,13 25,16 22.5,23.5 L 22.5,27 L 22.5,23.5 C 19,16 9.5,13 6.5,19.5 C 3.5,25.5 11.5,29.5 11.5,29.5 L 11.5,37 z " id="path5681" sodipodi:nodetypes="cccccccccc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 20,8 L 25,8" id="path6572"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 11.5,29.5 C 17,27 27,27 32.5,30" id="path6574" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 11.5,37 C 17,34.5 27,34.5 32.5,37" id="path6576" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 11.5,33.5 C 17,31.5 27,31.5 32.5,33.5" id="path6578" sodipodi:nodetypes="cc"/>
</g>
</svg>"""
white_queen = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="45" height="45" id="svg3128" sodipodi:version="0.32" inkscape:version="0.44.1" version="1.0" sodipodi:docbase="/home/cburnett/wikipedia/images/chess" sodipodi:docname="Chess qlt45.svg">
<defs id="defs3130"/>
<sodipodi:namedview id="base" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" gridtolerance="10000" guidetolerance="10" objecttolerance="10" inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="16" inkscape:cx="34.028184" inkscape:cy="22.902797" inkscape:document-units="px" inkscape:current-layer="layer1" height="45px" width="45px" inkscape:grid-points="true" showgrid="true" inkscape:window-width="1272" inkscape:window-height="965" inkscape:window-x="0" inkscape:window-y="31" gridspacingx="0.5px" gridspacingy="0.5px"/>
<metadata id="metadata3133">
<rdf:RDF>
<cc:Work rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
</cc:Work>
</rdf:RDF>
</metadata>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1">
<path sodipodi:type="arc" style="opacity: 1; fill: white; fill-opacity: 1; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path5571" sodipodi:cx="7" sodipodi:cy="13" sodipodi:rx="2" sodipodi:ry="2" d="M 9 13 A 2 2 0 1 1 5,13 A 2 2 0 1 1 9 13 z" transform="translate(-1, -1)"/>
<path sodipodi:type="arc" style="opacity: 1; fill: white; fill-opacity: 1; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path5573" sodipodi:cx="7" sodipodi:cy="13" sodipodi:rx="2" sodipodi:ry="2" d="M 9 13 A 2 2 0 1 1 5,13 A 2 2 0 1 1 9 13 z" transform="translate(15.5, -5.5)"/>
<path sodipodi:type="arc" style="opacity: 1; fill: white; fill-opacity: 1; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path5575" sodipodi:cx="7" sodipodi:cy="13" sodipodi:rx="2" sodipodi:ry="2" d="M 9 13 A 2 2 0 1 1 5,13 A 2 2 0 1 1 9 13 z" transform="translate(32, -1)"/>
<path sodipodi:type="arc" style="opacity: 1; fill: white; fill-opacity: 1; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path5577" sodipodi:cx="7" sodipodi:cy="13" sodipodi:rx="2" sodipodi:ry="2" d="M 9 13 A 2 2 0 1 1 5,13 A 2 2 0 1 1 9 13 z" transform="translate(7, -4.5)"/>
<path sodipodi:type="arc" style="opacity: 1; fill: white; fill-opacity: 1; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path5579" sodipodi:cx="7" sodipodi:cy="13" sodipodi:rx="2" sodipodi:ry="2" d="M 9 13 A 2 2 0 1 1 5,13 A 2 2 0 1 1 9 13 z" transform="translate(24, -4)"/>
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 9,26 C 17.5,24.5 30,24.5 36,26 L 38,14 L 31,25 L 31,11 L 25.5,24.5 L 22.5,9.5 L 19.5,24.5 L 14,10.5 L 14,25 L 7,14 L 9,26 z " id="path5581" sodipodi:nodetypes="cccccccccccc"/>
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 9,26 C 9,28 10.5,28 11.5,30 C 12.5,31.5 12.5,31 12,33.5 C 10.5,34.5 10.5,36 10.5,36 C 9,37.5 11,38.5 11,38.5 C 17.5,39.5 27.5,39.5 34,38.5 C 34,38.5 35.5,37.5 34,36 C 34,36 34.5,34.5 33,33.5 C 32.5,31 32.5,31.5 33.5,30 C 34.5,28 36,28 36,26 C 27.5,24.5 17.5,24.5 9,26 z " id="path5583" sodipodi:nodetypes="ccccccccccc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1;" d="M 11.5,30 C 15,29 30,29 33.5,30" id="path5585" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1;" d="M 12,33.5 C 18,32.5 27,32.5 33,33.5" id="path5587" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1;" d="M 10.5,36 C 15.5,35 29,35 34,36" id="path5589" sodipodi:nodetypes="cc"/>
</g>
</svg>
"""
white_rook = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="45" height="45" id="svg3128" sodipodi:version="0.32" inkscape:version="0.44.1" version="1.0" sodipodi:docbase="/home/cburnett/wikipedia/images/chess" sodipodi:docname="Chess rlt45.svg">
<defs id="defs3130"/>
<sodipodi:namedview id="base" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" gridtolerance="10000" guidetolerance="10" objecttolerance="10" inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="11.313709" inkscape:cx="25.614263" inkscape:cy="20.600169" inkscape:document-units="px" inkscape:current-layer="layer1" height="45px" width="45px" inkscape:grid-points="true" showgrid="true" inkscape:window-width="977" inkscape:window-height="852" inkscape:window-x="0" inkscape:window-y="31" gridspacingx="0.5px" gridspacingy="0.5px"/>
<metadata id="metadata3133">
<rdf:RDF>
<cc:Work rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
</cc:Work>
</rdf:RDF>
</metadata>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1" style="display: inline;">
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 9,39 L 36,39 L 36,36 L 9,36 L 9,39 z " id="path3119" sodipodi:nodetypes="ccccc"/>
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 12,36 L 12,32 L 33,32 L 33,36 L 12,36 z " id="path3121" sodipodi:nodetypes="ccccc"/>
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 11,14 L 11,9 L 15,9 L 15,11 L 20,11 L 20,9 L 25,9 L 25,11 L 30,11 L 30,9 L 34,9 L 34,14" id="path3129" sodipodi:nodetypes="cccccccccccc"/>
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 34,14 L 31,17 L 14,17 L 11,14" id="path3127" sodipodi:nodetypes="cccc"/>
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: miter; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 31,17 L 31,29.500018 L 14,29.500018 L 14,17" id="path3125" sodipodi:nodetypes="cccc"/>
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 31,29.5 L 32.5,32 L 12.5,32 L 14,29.5" id="path3123" sodipodi:nodetypes="cccc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1px; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1;" d="M 11,14 L 34,14" id="path5175"/>
</g>
</svg>
"""
white_bishop = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="45" height="45" id="svg3128" sodipodi:version="0.32" inkscape:version="0.44.1" version="1.0" sodipodi:docbase="/home/cburnett/wikipedia/images/chess" sodipodi:docname="Chess blt45.svg">
<defs id="defs3130"/>
<sodipodi:namedview id="base" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" gridtolerance="10000" guidetolerance="10" objecttolerance="10" inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="16" inkscape:cx="21.47589" inkscape:cy="21.674445" inkscape:document-units="px" inkscape:current-layer="layer1" height="45px" width="45px" inkscape:grid-points="true" showgrid="true" inkscape:window-width="977" inkscape:window-height="965" inkscape:window-x="0" inkscape:window-y="31" gridspacingx="0.5px" gridspacingy="0.5px"/>
<metadata id="metadata3133">
<rdf:RDF>
<cc:Work rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
</cc:Work>
</rdf:RDF>
</metadata>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1" style="display: inline;">
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 9,36 C 12.385255,35.027671 19.114744,36.430821 22.5,34 C 25.885256,36.430821 32.614745,35.027671 36,36 C 36,36 37.645898,36.541507 39,38 C 38.322949,38.972328 37.354102,38.986164 36,38.5 C 32.614745,37.527672 25.885256,38.958493 22.5,37.5 C 19.114744,38.958493 12.385255,37.527672 9,38.5 C 7.6458978,38.986164 6.6770511,38.972328 6,38 C 7.3541023,36.055343 9,36 9,36 z " id="path4582" sodipodi:nodetypes="ccccccccc"/>
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 15,32 C 17.5,34.5 27.5,34.5 30,32 C 30.5,30.5 30,30 30,30 C 30,27.5 27.5,26 27.5,26 C 33,24.5 33.5,14.5 22.5,10.5 C 11.5,14.5 12,24.5 17.5,26 C 17.5,26 15,27.5 15,30 C 15,30 14.5,30.5 15,32 z " id="path4584" sodipodi:nodetypes="cccccccc"/>
<path sodipodi:type="arc" style="opacity: 1; fill: white; fill-opacity: 1; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path4586" sodipodi:cx="22.5" sodipodi:cy="10" sodipodi:rx="2.5" sodipodi:ry="2.5" d="M 25 10 A 2.5 2.5 0 1 1 20,10 A 2.5 2.5 0 1 1 25 10 z" transform="translate(0, -2)"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: miter; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 17.5,26 L 27.5,26" id="path4588" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: butt; stroke-linejoin: miter; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 15,30 L 30,30" id="path4590" sodipodi:nodetypes="cc"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 22.5,15.5 L 22.5,20.5" id="path4592"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" d="M 20,18 L 25,18" id="path4594"/>
</g>
<g inkscape:groupmode="layer" id="layer2" inkscape:label="Layer 1#1" style="display: inline;"/>
</svg>
"""
white_knight = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="45" height="45" id="svg3128" sodipodi:version="0.32" inkscape:version="0.44.1" version="1.0" sodipodi:docbase="/home/cburnett/wikipedia/images/chess" sodipodi:docname="Chess nlt45.svg">
<defs id="defs3130"/>
<sodipodi:namedview id="base" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" gridtolerance="10000" guidetolerance="10" objecttolerance="10" inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="16" inkscape:cx="24.795834" inkscape:cy="23.520246" inkscape:document-units="px" inkscape:current-layer="layer1" height="45px" width="45px" inkscape:grid-points="true" showgrid="true" inkscape:window-width="977" inkscape:window-height="965" inkscape:window-x="0" inkscape:window-y="31" gridspacingx="0.5px" gridspacingy="0.5px"/>
<metadata id="metadata3133">
<rdf:RDF>
<cc:Work rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
</cc:Work>
</rdf:RDF>
</metadata>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1">
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 22,10 C 32.5,11 38.5,18 38,39 L 15,39 C 15,30 25,32.5 23,18" id="path3491" sodipodi:nodetypes="cccc"/>
<path style="fill: white; fill-opacity: 1; fill-rule: evenodd; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 24,18 C 24.384461,20.911278 18.447064,25.368624 16,27 C 13,29 13.180802,31.342892 11,31 C 9.95828,30.055984 12.413429,27.962451 11,28 C 10,28 11.187332,29.231727 10,30 C 9,30 5.9968392,30.999999 6,26 C 6,24 12,14 12,14 C 12,14 13.885866,12.097871 14,10.5 C 13.273953,9.505631 13.5,8.5 13.5,7.5 C 14.5,6.5 16.5,10 16.5,10 L 18.5,10 C 18.5,10 19.281781,8.0080745 21,7 C 22,7 22,10 22,10" id="path3495" sodipodi:nodetypes="csccccccccccc"/>
<path sodipodi:type="arc" style="opacity: 1; fill: black; fill-opacity: 1; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path3499" sodipodi:cx="8.5" sodipodi:cy="23.5" sodipodi:rx="0.5" sodipodi:ry="0.5" d="M 9 23.5 A 0.5 0.5 0 1 1 8,23.5 A 0.5 0.5 0 1 1 9 23.5 z" transform="translate(0.5, 2)"/>
<path sodipodi:type="arc" style="opacity: 1; fill: black; fill-opacity: 1; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: round; stroke-miterlimit: 4; stroke-dasharray: none; stroke-opacity: 1;" id="path3501" sodipodi:cx="14.5" sodipodi:cy="15.5" sodipodi:rx="0.5" sodipodi:ry="1.5" d="M 15 15.5 A 0.5 1.5 0 1 1 14,15.5 A 0.5 1.5 0 1 1 15 15.5 z" transform="matrix(0.866025, 0.5, -0.5, 0.866025, 9.69263, -5.17339)"/>
<path style="fill: none; fill-opacity: 0.75; fill-rule: evenodd; stroke: black; stroke-width: 1; stroke-linecap: round; stroke-linejoin: miter; stroke-opacity: 1; stroke-miterlimit: 4; stroke-dasharray: none;" d="M 37,39 C 38,19 31.5,11.5 25,10.5" id="path8049" sodipodi:nodetypes="cc"/>
</g>
</svg>
"""
white_pawn = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" width="45" height="45" id="svg3128" sodipodi:version="0.32" inkscape:version="0.44.1" version="1.0" sodipodi:docbase="/home/cburnett/wikipedia/images/chess" sodipodi:docname="Chess plt45.svg">
<defs id="defs3130"/>
<sodipodi:namedview id="base" pagecolor="#ffffff" bordercolor="#666666" borderopacity="1.0" gridtolerance="10000" guidetolerance="10" objecttolerance="10" inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="11.2" inkscape:cx="22.89394" inkscape:cy="17.72157" inkscape:document-units="px" inkscape:current-layer="layer1" height="45px" width="45px" inkscape:grid-points="true" showgrid="true" inkscape:window-width="977" inkscape:window-height="965" inkscape:window-x="0" inkscape:window-y="31" gridspacingx="0.5px" gridspacingy="0.5px"/>
<metadata id="metadata3133">
<rdf:RDF>
<cc:Work rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
</cc:Work>
</rdf:RDF>
</metadata>
<g inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1">
<path style="opacity: 1; fill: white; fill-opacity: 1; fill-rule: nonzero; stroke: black; stroke-width: 1.5; stroke-linecap: round; stroke-linejoin: miter; stroke-miterlimit: 4; stroke-dasharray: none; stroke-dashoffset: 10; stroke-opacity: 1;" d="M 22 9 C 19.792 9 18 10.792 18 13 C 18 13.885103 18.29397 14.712226 18.78125 15.375 C 16.829274 16.496917 15.5 18.588492 15.5 21 C 15.5 23.033947 16.442042 24.839082 17.90625 26.03125 C 14.907101 27.08912 10.5 31.578049 10.5 39.5 L 33.5 39.5 C 33.5 31.578049 29.092899 27.08912 26.09375 26.03125 C 27.557958 24.839082 28.5 23.033948 28.5 21 C 28.5 18.588492 27.170726 16.496917 25.21875 15.375 C 25.70603 14.712226 26 13.885103 26 13 C 26 10.792 24.208 9 22 9 z " id="path3194"/>
</g>
</svg>
""" | gpl-3.0 |
initOS/server-tools | module_auto_update/tests/test_module.py | 6 | 9647 | # -*- coding: utf-8 -*-
# Copyright 2017 LasLabs Inc.
# Copyright 2018 ACSONE SA/NV.
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
import os
import tempfile
import mock
from openerp.modules import get_module_path
from openerp.tests import common
from openerp.tests.common import TransactionCase
from ..addon_hash import addon_hash
from ..models.module import IncompleteUpgradeError, DEFAULT_EXCLUDE_PATTERNS
MODULE_NAME = 'module_auto_update'
class TestModule(TransactionCase):
def setUp(self):
super(TestModule, self).setUp()
self.own_module = self.env['ir.module.module'].search([
('name', '=', MODULE_NAME),
])
self.own_dir_path = get_module_path(MODULE_NAME)
keep_langs = self.env['res.lang'].search([]).mapped('code')
self.own_checksum = addon_hash(
self.own_dir_path,
exclude_patterns=DEFAULT_EXCLUDE_PATTERNS.split(','),
keep_langs=keep_langs,
)
self.own_writeable = os.access(self.own_dir_path, os.W_OK)
def test_compute_checksum_dir(self):
"""It should compute the directory's SHA-1 hash"""
self.assertEqual(
self.own_module._get_checksum_dir(), self.own_checksum,
'Module directory checksum not computed properly',
)
def test_compute_checksum_dir_ignore_excluded(self):
"""It should exclude .pyc/.pyo extensions from checksum
calculations"""
if not self.own_writeable:
self.skipTest("Own directory not writeable")
with tempfile.NamedTemporaryFile(suffix='.pyc', dir=self.own_dir_path):
self.assertEqual(
self.own_module._get_checksum_dir(), self.own_checksum,
'SHA1 checksum does not ignore excluded extensions',
)
def test_compute_checksum_dir_recomputes_when_file_added(self):
"""It should return a different value when a non-.pyc/.pyo file is
added to the module directory"""
if not self.own_writeable:
self.skipTest("Own directory not writeable")
with tempfile.NamedTemporaryFile(suffix='.py', dir=self.own_dir_path):
self.assertNotEqual(
self.own_module._get_checksum_dir(), self.own_checksum,
'SHA1 checksum not recomputed',
)
def test_saved_checksums(self):
Imm = self.env['ir.module.module']
base_module = Imm.search([('name', '=', 'base')])
self.assertEqual(base_module.state, 'installed')
self.assertFalse(Imm._get_saved_checksums())
Imm._save_installed_checksums()
saved_checksums = Imm._get_saved_checksums()
self.assertTrue(saved_checksums)
self.assertTrue(saved_checksums['base'])
def test_get_modules_with_changed_checksum(self):
Imm = self.env['ir.module.module']
self.assertTrue(Imm._get_modules_with_changed_checksum())
Imm._save_installed_checksums()
self.assertFalse(Imm._get_modules_with_changed_checksum())
@common.at_install(False)
@common.post_install(True)
class TestModuleAfterInstall(TransactionCase):
def setUp(self):
super(TestModuleAfterInstall, self).setUp()
Imm = self.env['ir.module.module']
self.own_module = Imm.search([('name', '=', MODULE_NAME)])
self.base_module = Imm.search([('name', '=', 'base')])
def test_get_modules_partially_installed(self):
Imm = self.env['ir.module.module']
self.assertTrue(
self.own_module not in Imm._get_modules_partially_installed())
self.own_module.button_upgrade()
self.assertTrue(
self.own_module in Imm._get_modules_partially_installed())
self.own_module.button_upgrade_cancel()
self.assertTrue(
self.own_module not in Imm._get_modules_partially_installed())
def test_upgrade_changed_checksum(self):
Imm = self.env['ir.module.module']
Bmu = self.env['base.module.upgrade']
# check modules are in installed state
installed_modules = Imm.search([('state', '=', 'installed')])
self.assertTrue(self.own_module in installed_modules)
self.assertTrue(self.base_module in installed_modules)
self.assertTrue(len(installed_modules) > 2)
# change the checksum of 'base'
Imm._save_installed_checksums()
saved_checksums = Imm._get_saved_checksums()
saved_checksums['base'] = False
Imm._save_checksums(saved_checksums)
changed_modules = Imm._get_modules_with_changed_checksum()
self.assertEqual(len(changed_modules), 1)
self.assertTrue(self.base_module in changed_modules)
def upgrade_module_mock(self_model):
upgrade_module_mock.call_count += 1
# since we are upgrading base, all installed module
# must have been marked to upgrade at this stage
self.assertEqual(self.base_module.state, 'to upgrade')
self.assertEqual(self.own_module.state, 'to upgrade')
installed_modules.write({'state': 'installed'})
upgrade_module_mock.call_count = 0
# upgrade_changed_checksum commits, so mock that
with mock.patch.object(self.env.cr, 'commit'):
# we simulate an install by setting module states
Bmu._patch_method('upgrade_module', upgrade_module_mock)
try:
Imm.upgrade_changed_checksum()
self.assertEqual(upgrade_module_mock.call_count, 1)
self.assertEqual(self.base_module.state, 'installed')
self.assertEqual(self.own_module.state, 'installed')
saved_checksums = Imm._get_saved_checksums()
self.assertTrue(saved_checksums['base'])
self.assertTrue(saved_checksums[MODULE_NAME])
finally:
Bmu._revert_method('upgrade_module')
def test_incomplete_upgrade(self):
Imm = self.env['ir.module.module']
Bmu = self.env['base.module.upgrade']
installed_modules = Imm.search([('state', '=', 'installed')])
# change the checksum of 'base'
Imm._save_installed_checksums()
saved_checksums = Imm._get_saved_checksums()
saved_checksums['base'] = False
Imm._save_checksums(saved_checksums)
def upgrade_module_mock(self_model):
upgrade_module_mock.call_count += 1
# since we are upgrading base, all installed module
# must have been marked to upgrade at this stage
self.assertEqual(self.base_module.state, 'to upgrade')
self.assertEqual(self.own_module.state, 'to upgrade')
installed_modules.write({'state': 'installed'})
# simulate partial upgrade
self.own_module.write({'state': 'to upgrade'})
upgrade_module_mock.call_count = 0
# upgrade_changed_checksum commits, so mock that
with mock.patch.object(self.env.cr, 'commit'):
# we simulate an install by setting module states
Bmu._patch_method('upgrade_module', upgrade_module_mock)
try:
with self.assertRaises(IncompleteUpgradeError):
Imm.upgrade_changed_checksum()
self.assertEqual(upgrade_module_mock.call_count, 1)
finally:
Bmu._revert_method('upgrade_module')
def test_incomplete_upgrade_no_checkusm(self):
Imm = self.env['ir.module.module']
Bmu = self.env['base.module.upgrade']
installed_modules = Imm.search(
[('state', '=', 'installed')])
# change the checksum of 'base'
Imm._save_installed_checksums()
saved_checksums = Imm._get_saved_checksums()
Imm._save_checksums(saved_checksums)
self.base_module.write({'state': 'to upgrade'})
def upgrade_module_mock(self_model):
upgrade_module_mock.call_count += 1
# since we are upgrading base, all installed module
# must have been marked to upgrade at this stage
self.assertEqual(self.base_module.state, 'to upgrade')
self.assertEqual(self.own_module.state, 'installed')
installed_modules.write({'state': 'installed'})
upgrade_module_mock.call_count = 0
# upgrade_changed_checksum commits, so mock that
with mock.patch.object(self.env.cr, 'commit'):
# we simulate an install by setting module states
Bmu._patch_method('upgrade_module',
upgrade_module_mock)
# got just other modules to_upgrade and no checksum ones
try:
Imm.upgrade_changed_checksum()
self.assertEqual(upgrade_module_mock.call_count, 1)
finally:
Bmu._revert_method('upgrade_module')
def test_nothing_to_upgrade(self):
Imm = self.env['ir.module.module']
Bmu = self.env['base.module.upgrade']
Imm._save_installed_checksums()
def upgrade_module_mock(self_model):
upgrade_module_mock.call_count += 1
upgrade_module_mock.call_count = 0
# upgrade_changed_checksum commits, so mock that
with mock.patch.object(self.env.cr, 'commit'):
# we simulate an install by setting module states
Bmu._patch_method('upgrade_module', upgrade_module_mock)
try:
Imm.upgrade_changed_checksum()
self.assertEqual(upgrade_module_mock.call_count, 0)
finally:
Bmu._revert_method('upgrade_module')
| agpl-3.0 |
coelias/SatMapper | resourcefetcher.py | 1 | 2994 | #!/usr/bin/python
# SatMapper - 2012-2013 Carlos del Ojo and John Cole.
# This code is part of the SATMAPPER software and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
import os
import logging
import sys
import bz2
from lib.fastgzip import FastGzip
import urllib2
import re
import socket
import tempfile
socket.setdefaulttimeout(10)
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG)
class Resource:
def __init__(self,individual,res):
self.individual=individual
self.res=res
self.linecount=0
if re.match("^(http|ftp|https):",res,re.I):
try: urllib2.urlopen(res).read(100)
except: raise Exception("Resource not found: {0}".format(res))
self.restype="URL"
else:
if not os.path.isfile(res):
raise Exception("File not found: {0}".format(res))
self.restype="FILE"
if res.lower().endswith(".gz"):
self.compression="GZ"
elif res.lower().endswith(".bz2"):
self.compression="BZ2"
else:
self.compression="RAW"
def __iter__(self):
if self.restype=="URL":
self.tempfile=self.resfile=tempfile.NamedTemporaryFile(mode="w+b")
resource=urllib2.urlopen(self.res)
data=resource.read(1024*1024)
while data:
self.resfile.write(data)
data=resource.read(1024*1024)
self.resfile.seek(0)
if self.compression=="GZ":
self.resfile = FastGzip(fileobj=self.resfile)
elif self.compression=="BZ2":
self.resfile = bz2.BZ2File(self.resfile.name,mode='rb')
else:
if self.compression=="GZ":
self.resfile = FastGzip(self.res)
elif self.compression=="BZ2":
self.resfile = bz2.BZ2File(self.res,mode='rb')
else:
self.resfile=open(self.res)
self.nextobj=self.resfile.__iter__()
return self
def next(self):
line=self.nextobj.next()
if not self.linecount % 4:
line="@"+self.individual+"@"+line
self.linecount+=1
return line
def close(self):
if self.restype=="URL":
if self.compression!="RAW":
self.resfile.close()
self.tempfile.close()
else:
self.resfile.close()
def usage():
print "You need to provide a file with the FastQ file input information, please read REAME.txt\nUsage: {0} resources.txt\n".format(sys.argv[0])
sys.exit(-1)
if __name__=="__main__":
if len(sys.argv)<2:
usage()
try:
f=open(sys.argv[1])
except:
print "File {0} does not exist!".format(sys.argv[1])
sys.exit(-1)
resources=[]
for i in f:
if i.strip().startswith("#"): continue
j=i.strip().split()
if not j: continue
if len(j)!=2 or "@" in j[0]:
logging.error( "ERROR --> {0}".format(i))
logging.error( "Invalid format, please read README.txt")
sys.exit(-1)
try:
resources.append(Resource(j[0],j[1]))
except Exception as e:
logging.error( "Resource error:" + str(e))
for i in resources:
try:
for k in i:
sys.stdout.write(k)
i.close()
except:
fil=open("errors.txt","a")
fil.write("Error in resource {0} \n".format(str(i.res)))
fil.close()
| gpl-2.0 |
weaver-viii/h2o-3 | py2/testdir_rapids/test_rapids_basic_with_funs_pick5.py | 21 | 11375 | import unittest, random, sys, time, re
sys.path.extend(['.','..','../..','py'])
import h2o2 as h2o
import h2o_browse as h2b, h2o_exec as h2e, h2o_import as h2i, h2o_cmd
from h2o_test import dump_json, verboseprint
initList = [
'(+ (* #2 #2) (* #5 #5))',
'(* #1 (+ (* #2 #2) (* #5 #5)))',
'(= !x (c {#1;#5;#8;#10;#33}))',
'(= !x (c {(: #0 #5) }))',
'(= !x (c {(: #5 #5) }))',
# why is num_rows = -4 here? Will blow up if we use it?
# illegal? assertion
# '(= !x (c {(: #5 #0) }))',
'(= !v (c {#1;#4567;(: #9 #90);(: #9 #45);#450})',
'(= !v2 (+ %v %v))',
# FIX! test with space after { and before }
'(= !v (c {#1;#4567;(: #91234 #9000209);(: #9000210 #45001045);#45001085})',
'(= !v (c {#1;#4567;(: #91234 #9000209);(: #9000210 #45001045);45001085})',
# remember need %v to reference
'(= !v (c {#1;#4567;(: #9 #90);(: #9 #45);#450})',
'(= !v2 %v )',
# FIX! if i use v here now, it has to be assigned within the function or an input to the function?
# maybe always pass v for use in the function (in the latter function)
'(= !v2 (n %v %v))',
'(= !v2 (N %v %v))',
'(= !v2 (- %v %v))',
'(= !v2 (+ %v %v))',
'(= !v2 (sum (+ %v %v) %TRUE)',
'(= !v2 (+ #1.0 (sum (+ %v %v) %TRUE))',
# different dimensions?
'(= !v3 (+ %v (sum (+ %v %v) %TRUE))',
# can't return more than one col thru function
# '(= !v3 (cbind %v %v %v %v))',
# '(= !v3 (rbind %v %v %v %v))',
# '(= !keys (ls))', # works
# '(= !x #1)', # works
# '(= !x (sum ([ %r1 "null" #0) %TRUE))', # works
# '(= !x (sum ([ r1 "null" (: #0 #0)) %TRUE))', # bad r1
# '(= !x (xorsum ([ %r1 "null" #0) %TRUE))', # works
# 'a', # AAIOBE
# 'x', # AAIOBE
# 'c', # AAIOBE
# 'c(1)', # says 'c(1' is unimplemented
# '(= #1)', # AAIOBE
# '(= !x #1)', # works
# 'x=c(1.3,0,1,2,3,4,5)', # says 'x=c(1.3,0,1,2,3,4,5' is unimplemented
# 'x=c(1.3', # AAIOBE
# '()', # Unimplemented on token ''
# '(x)', # unimplemented on x
# '(= !x)', # AAIOBE
# '(= !x ())', # unimplemented
# '(= !x #1)', # works
# '(= !x #1 #2)', # works, answer is 1?
# '(= !x (cbind (#1 #2) %TRUE))', # ClassCast exception
# '(= !x (cbind (#1 #2)))', # ClassCast exception
# '(= !x (cbind (#1)))', # ClassCast exception
# '(= !x (cbind #1))', # ClassCast exception
# '(= !x (seq (#1, #2)) )', # number format exception
# '(= !x (seq (#1, #2)) )', # bad
# '(= !x (seq #1, #2) )', # bad
# '(= !x (seq (#1) )', # bad
# '(= !x #1; = !x #2)', # no error but why answer is 1?
# '(= !x #1) (=!x #2)', # no error but why answer is 1?
# '{(= !x #1); (=!y %x)', # AAIOBE
# '{(= !x #1)', # AAIOBE
# '({(= !x #1); (= !y #1))', # AAIOBE
# '(1)',
# '((1))',
# '(((1)))',
'(#(1))', # why isn't this illegal?
'(#1)',
'((#1))',
'(((#1)))',
'(= !x #1)',
'((= !x #1))',
'(((= !x #1)))',
# complains
# '(= !x (#1 #2))',
# '((= !x (#1 #2)))',
# '(((= !x (#1 #2))))',
# okay. not okay if comma separated. seems wrong
'(= !x (+ #1 #2))',
'((= !x (+ #1 #2)))',
'(((= !x (+ #1 #2))))',
# complains
# '(= !x (+ #1 #2 #4))',
# '((= !x (+ #1 #2 #4)))',
# '(((= !x (+ #1 #2 #4))))',
# okay.
'(= !x + #1 #2)',
'((= !x + #1 #2))',
'(((= !x + #1 #2)))',
# '(= x + #1 #2)', # fails
# parens on binary operators
'(= !x + #1 + #1 (+ #1 #1))',
'= !x + #1 + #1 (+ #1 #1)',
'= !x N #1 N #1 (N #1 #1)',
'= !x n #1 n #1 (n #1 #1)',
'= !x L #1 L #1 (L #1 #1)',
'= !x l #1 l #1 (l #1 #1)',
'= !x G #1 G #1 (G #1 #1)',
'= !x g #1 g #1 (g #1 #1)',
'= !x (* (* #1 #1) (* #1 #1))',
'= !x * #1 * #1 (* #1 #1)',
'= !x - #1 - #1 (- #1 #1)',
'= !x ^ #1 ^ #1 (^ #1 #1)',
'= !x / #1 / #1 (/ #1 #1)',
'= !x ** #1 ** #1 (** #1 #1)',
'= !x % #1 % #1 (% #1 #1)',
# '= !x %/% #1 %/% #1 %/% #1 #1', # unimplemented
# '= !x %% #1 %% #1 %% #1 #1', # unimplemented
# '(= !x + _#1 + _#1 + _#1 _#1)', # unimplemented
'= !x _ + #1 + #1 (+ #1 _ #1)',
'= !x _ N #1 N #1 (N #1 _ #1)',
'= !x _ n #1 n #1 (n #1 _ #1)',
'= !x _ L #1 L #1 (L #1 _ #1)',
'= !x _ l #1 l #1 (l #1 _ #1)',
'= !x _ G #1 G #1 (G #1 _ #1)',
'= !x _ g #1 g #1 (g #1 _ #1)',
'= !x _ * #1 * #1 (* #1 _ #1)',
'= !x _ - #1 - #1 (- #1 _ #1)',
'= !x _ ^ #1 ^ #1 (^ #1 _ #1)',
'= !x _ / #1 / #1 (/ #1 _ #1)',
'= !x _ ** #1 ** #1 (** #1 _ #1)',
'= !x _ % #1 % #1 (% #1 _ #1)',
# can have space between ( and function
'= !x1 ( sum ([ %r1 "null" #0) %TRUE)',
'= !x2 ( sum ([ %r1 "null" #0) %TRUE)',
'= !x2a ( sum ([ %r1 "null" #0) %TRUE )',
# can have space after (
'= !x3 ( sum ([ %r1 "null" #0) %TRUE )',
'= !x3a ( sum ([ %r1 "null" #0) %TRUE )',
'= !x3b ( sum ([ %r1 "null" #0 ) %TRUE )',
'= !x4 ( sum ([ %r1 " null " #0 ) %TRUE )',
# can have space after (
'(= !x3 ( sum ([ %r1 "null" #0) %TRUE ))',
'(= !x3a ( sum ([ %r1 "null" #0) %TRUE ) )',
'(= !x3b ( sum ([ %r1 "null" #0 ) %TRUE ) )',
'((= !x4 ( sum ([ %r1 " null " #0 ) %TRUE )))',
'(= !x3 ( max ([ %r1 "null" #0) %TRUE ))',
'(= !x3a ( max ([ %r1 "null" #0) %TRUE ) )',
'(= !x3b ( max ([ %r1 "null" #0 ) %TRUE ) )',
'((= !x4 ( max ([ %r1 " null " #0 ) %TRUE )))',
'(= !x3 ( min ([ %r1 "null" #0) %TRUE ))',
'(= !x3a ( min ([ %r1 "null" #0) %TRUE ) )',
'(= !x3b ( min ([ %r1 "null" #0 ) %TRUE ) )',
'((= !x4 ( min ([ %r1 " null " #0 ) %TRUE )))',
'(= !x3 ( min ([ %r1 "null" #0) %TRUE ))',
'(= !x3 (+ (sum ([ %r1 "null" #0) %TRUE) (sum ([ %r1 "null" #0) %TRUE) )',
'(= !x3 (+ (xorsum ([ %r1 "null" #0) %TRUE) (xorsum ([ %r1 "null" #0) %TRUE) )',
# FIX! these should be like sum
# '(= !x3 (+ (max ([ %r1 "null" #0) %TRUE) (max ([ %r1 "null" #0) %TRUE) )',
# '(= !x3 (+ (min ([ %r1 "null" #0) %TRUE) (min ([ %r1 "null" #0) %TRUE) )',
# '{ #1 #1 }',
# '(= !x4 { #1 #1 })',
# r1[c(1,5,8,10,33),]
# commas are illegal (var name?)
# vectors can be strings or numbers only, not vars or keys
# h2o objects can't be in a vector
# c(1,2,3,4)
# '= !x (sum %r1 )'
# '(= !x (xorsum ([ %r1 "null" #0) %TRUE))', # works
# 'cave=c(1.3,0,1,2,3,4,5)',
# 'ma=c(2.3,0,1,2,3,4,5)',
# 'r2.hex=c(3.3,0,1,2,3,4,5)',
# 'r3.hex=c(4.3,0,1,2,3,4,5)',
# 'r4.hex=c(5.3,0,1,2,3,4,5)',
# 'r.hex=i.hex',
]
exprList = [
"round(r.hex[,1],0)",
"round(r.hex[,1],1)",
"round(r.hex[,1],2)",
# "signif(r.hex[,1],-1)",
# "signif(r.hex[,1],0)",
"signif(r.hex[,1],1)",
"signif(r.hex[,1],2)",
"signif(r.hex[,1],22)",
"trunc(r.hex[,1])",
"trunc(r.hex[,1])",
"trunc(r.hex[,1])",
"trunc(r.hex[,1])",
## Compute row and column sums for a matrix:
# 'x <- cbind(x1 = 3, x2 = c(4:1, 2:5))',
# 'dimnames(x)[[1]] <- letters[1:8]',
# 'apply(x, 2, mean, trim = .2)',
'apply(x, 2, mean)',
'col.sums <- apply(x, 2, sum)',
'row.sums <- apply(x, 1, sum)',
# 'rbind(cbind(x, Rtot = row.sums), Ctot = c(col.sums, sum(col.sums)))',
# 'stopifnot( apply(x, 2, is.vector))',
## Sort the columns of a matrix
# 'apply(x, 2, sort)',
##- function with extra args:
# 'cave <- function(x, c1, c2) c(mean(x[c1]), mean(x[c2]))',
# 'apply(x, 1, cave, c1 = "x1", c2 = c("x1","x2"))',
# 'ma <- matrix(c(1:4, 1, 6:8), nrow = 2)',
'ma',
# fails unimplemented
# 'apply(ma, 1, table)', #--> a list of length 2
# 'apply(ma, 1, stats::quantile)', # 5 x n matrix with rownames
#'stopifnot(dim(ma) == dim(apply(ma, 1:2, sum)))',
## Example with different lengths for each call
# 'z <- array(1:24, dim = 2:4)',
# 'zseq <- apply(z, 1:2, function(x) seq_len(max(x)))',
# 'zseq', ## a 2 x 3 matrix
# 'typeof(zseq)', ## list
# 'dim(zseq)', ## 2 3
# zseq[1,]',
# 'apply(z, 3, function(x) seq_len(max(x)))',
# a list without a dim attribute
]
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED
SEED = h2o.setup_random_seed()
h2o.init(1, base_port=54333)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_rapids_basic_with_funs_pick5(self):
bucket = 'smalldata'
csvPathname = 'iris/iris_wheader.csv'
hexKey = 'r1'
parseResult = h2i.import_parse(bucket=bucket, path=csvPathname, schema='put', hex_key=hexKey)
keys = []
while initList:
if len(initList) >= 5:
pick5 = [initList.pop(0) for i in range(5)]
else:
pick5 = initList
global initList
initList = []
pick6 = ['(= !v (c {#1;#4567;(: #9 #90);(: #9 #45);#450})'] + pick5
execExpr1 = ";;".join(pick6)
# always do a v assign first, as they may reference %v
funs = '[(def anon {x} (%s);;;)]' % execExpr1
execResult, result = h2e.exec_expr(h2o.nodes[0], funs, doFuns=True, resultKey=None, timeoutSecs=5)
execExpr2 = '(apply %r1 #2 %anon)'
execResult, result = h2e.exec_expr(h2o.nodes[0], execExpr2, doFuns=False, resultKey=None, timeoutSecs=25)
# see if the execExpr had a lhs assign. If so, it better be in the storeview
r = re.search('![a-zA-Z0-9]+', execExpr1)
if r:
lhs = r.group(0)[1:]
print "Found key lhs assign", lhs
# KeyIndexeds gets too many rollup stats problems. Don't use for now
if 1==0:
inspect = h2o_cmd.runInspect(key=lhs)
missingList, labelList, numRows, numCols = infoFromInspect(inspect)
storeview = h2o_cmd.runStoreView()
print "\nstoreview:", dump_json(storeview)
if not k in storeView['keys']:
raise Exception("Expected to find %s in %s", (k, storeView['keys']))
else:
print "No key lhs assign"
# rows might be zero!
if execResult['num_rows'] or execResult['num_cols']:
keys.append(execExpr2)
print "\nExpressions that created keys"
for k in keys:
print k
# for execExpr in exprList:
# h2e.exec_expr(execExpr=execExpr, resultKey=None, timeoutSecs=10)
h2o.check_sandbox_for_errors()
if __name__ == '__main__':
h2o.unit_main()
| apache-2.0 |
timfreund/hyde | hydeengine/site_pre_processors.py | 1 | 2563 | import sys
from hydeengine.siteinfo import ContentNode
from django.conf import settings
from hydeengine.file_system import Folder
from siteinfo import SiteNode
"""
PRE PROCESSORS
Can be launched before the parsing of each templates and
after the loading of site info.
"""
class Category:
def __init__(self):
self.posts = set()
self.feed_url = None
@property
def posts(self):
return self.posts
@property
def feed_url(self):
return self.feed_url
class CategoriesManager:
"""
Fetch the category(ies) from every post under the given node
and creates a reference on them in CONTEXT and the node.
"""
@staticmethod
def process(folder, params):
context = settings.CONTEXT
site = context['site']
node = params['node']
categories = {}
for post in node.walk_pages():
if hasattr(post, 'categories') and post.categories != None:
for category in post.categories:
if categories.has_key(category) is False:
categories[category] = Category()
categories[category].posts.add(post)
elif hasattr(settings, 'DEFAULT_CATEGORY'):
if categories.has_key(settings.DEFAULT_CATEGORY) is False:
categories[settings.DEFAULT_CATEGORY] = Category()
categories[settings.DEFAULT_CATEGORY].posts.add(post)
setattr(post, 'categories', [settings.DEFAULT_CATEGORY])
context['categories'] = categories
node.categories = categories
class NodeInjector(object):
"""
Finds the node that represents the given path and injects it with the given
variable name into all the posts contained in the current node.
"""
@staticmethod
def process(folder, params):
context = settings.CONTEXT
site = context['site']
node = params['node']
try:
varName = params['variable']
path = params['path']
params['injections'] = { varName: path }
except KeyError:
pass
for varName, path in params['injections'].iteritems():
nodeFromPathFragment = site.find_node(site.folder.parent.child_folder(path))
if not nodeFromPathFragment:
continue
for post in node.walk_pages():
setattr(post, varName, nodeFromPathFragment)
| mit |
Kagami/kisa | lib/twisted/conch/ssh/common.py | 20 | 2781 | # -*- test-case-name: twisted.conch.test.test_ssh -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Common functions for the SSH classes.
Maintainer: Paul Swartz
"""
import struct, warnings
try:
from Crypto import Util
except ImportError:
warnings.warn("PyCrypto not installed, but continuing anyways!",
RuntimeWarning)
from twisted.python import randbytes
def NS(t):
"""
net string
"""
return struct.pack('!L',len(t)) + t
def getNS(s, count=1):
"""
get net string
"""
ns = []
c = 0
for i in range(count):
l, = struct.unpack('!L',s[c:c+4])
ns.append(s[c+4:4+l+c])
c += 4 + l
return tuple(ns) + (s[c:],)
def MP(number):
if number==0: return '\000'*4
assert number>0
bn = Util.number.long_to_bytes(number)
if ord(bn[0])&128:
bn = '\000' + bn
return struct.pack('>L',len(bn)) + bn
def getMP(data, count=1):
"""
Get multiple precision integer out of the string. A multiple precision
integer is stored as a 4-byte length followed by length bytes of the
integer. If count is specified, get count integers out of the string.
The return value is a tuple of count integers followed by the rest of
the data.
"""
mp = []
c = 0
for i in range(count):
length, = struct.unpack('>L',data[c:c+4])
mp.append(Util.number.bytes_to_long(data[c+4:c+4+length]))
c += 4 + length
return tuple(mp) + (data[c:],)
def _MPpow(x, y, z):
"""return the MP version of (x**y)%z
"""
return MP(pow(x,y,z))
def ffs(c, s):
"""
first from second
goes through the first list, looking for items in the second, returns the first one
"""
for i in c:
if i in s: return i
getMP_py = getMP
MP_py = MP
_MPpow_py = _MPpow
pyPow = pow
def _fastgetMP(data, count=1):
mp = []
c = 0
for i in range(count):
length = struct.unpack('!L', data[c:c+4])[0]
mp.append(long(gmpy.mpz(data[c + 4:c + 4 + length][::-1] + '\x00', 256)))
c += length + 4
return tuple(mp) + (data[c:],)
def _fastMP(i):
i2 = gmpy.mpz(i).binary()[::-1]
return struct.pack('!L', len(i2)) + i2
def _fastMPpow(x, y, z=None):
r = pyPow(gmpy.mpz(x),y,z).binary()[::-1]
return struct.pack('!L', len(r)) + r
def install():
global getMP, MP, _MPpow
getMP = _fastgetMP
MP = _fastMP
_MPpow = _fastMPpow
# XXX: We override builtin pow so that PyCrypto can benefit from gmpy too.
def _fastpow(x, y, z=None, mpz=gmpy.mpz):
if type(x) in (long, int):
x = mpz(x)
return pyPow(x, y, z)
__builtins__['pow'] = _fastpow # evil evil
try:
import gmpy
install()
except ImportError:
pass
| cc0-1.0 |
chen0031/distributed-deep-q | main.py | 2 | 6137 | """
Barista serves as an interface to a long-running caffe process.
"""
import os
import sys
import time
import argparse
import socket
import threading
import caffe
from caffe import SGDSolver
import barista
from barista.baristanet import BaristaNet
from barista import netutils
from replay import ReplayDataset
from gamesim.SnakeGame import SnakeGame, gray_scale
from expgain import ExpGain, generate_preprocessor
# Modules necessary only for faking Experience Gainer
import random
import numpy as np
def recv_all(socket, size):
message = ""
while len(message) < size:
chunk = socket.recv(4096)
if not chunk:
break
message += chunk
return message
def process_connection(socket, net, exp_gain, log_frequency=50):
print "Processing...",
message = recv_all(socket, barista.MSG_LENGTH)
if message == barista.GRAD_UPDATE:
iteration_num = net.fetch_model()
exp_gain.generate_experience(iteration_num)
net.load_minibatch()
net.full_pass()
response = net.send_gradient_update()
if iter_num % log_frequency == 0:
net.log()
socket.send(response)
elif message == barista.DARWIN_UPDATE:
raise NotImplementedError("Cannot process request " + message +
"; Darwinian SGD not implemented")
else:
print "Unknown request:", message
socket.close()
print "done."
def debug_process_connection(socket, net, exp_gain):
message = ""
while len(message) < barista.MSG_LENGTH:
chunk = socket.recv(4096)
if not chunk:
break
message += chunk
if message == barista.GRAD_UPDATE:
print "- Fetching model..."
iteration_num = net.fetch_model()
print "Processing gradient update request:", iteration_num
exp_gain.generate_experience(iteration_num)
print "- Loading minibatch..."
net.load_minibatch()
print "- Running Caffe..."
tic = time.time()
net.full_pass()
toc = time.time()
print " * Caffe took % 0.2f milliseconds." % (1000 * (toc - tic))
# Compute debug info
param_norms = netutils.compute_param_norms(net.net)
grad_norms = netutils.compute_gradient_norms(net.net)
loss = netutils.extract_net_data(net.net, ('loss',))['loss']
print
print "Parameter norms:"
print "-"*50
netutils.pretty_print(param_norms)
print
print "Gradient norms:"
print "-"*50
netutils.pretty_print(grad_norms)
print
print "Loss:", loss
print "- Generating/sending gradient message..."
response = net.send_gradient_update()
net.log()
socket.send(response)
elif message == barista.DARWIN_UPDATE:
raise NotImplementedError("Cannot process request " + message +
"; Darwinian SGD not implemented")
else:
print "Unknown request:", message
socket.close()
print "Closed connection"
def issue_ready_signal(idx):
if not os.path.isdir("flags"):
os.makedirs("flags")
with open('flags/__BARISTA_READY__.%d' % idx, 'w') as fp:
pass
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("architecture")
parser.add_argument("model")
parser.add_argument("--solver", default=None)
parser.add_argument("--mode", default="cpu", choices=["cpu", "gpu"])
parser.add_argument("--port", type=int, default=50001)
parser.add_argument("--driver", default="127.0.0.1:5500")
parser.add_argument("--dataset", default="replay-dataset.hdf5")
parser.add_argument("--dset-size", dest="dset_size", type=int, default=1000)
parser.add_argument("--overwrite", action="store_true")
parser.add_argument("--debug", action="store_true")
parser.add_argument("--initial-replay", type=int, default=20000)
args = parser.parse_args()
if args.driver == "None":
args.driver = None
return args
def main():
args = get_args()
caffe.set_phase_test()
if args.mode == "cpu":
caffe.set_mode_cpu()
else:
caffe.set_mode_gpu()
if not os.path.isfile(args.model):
if not args.solver:
print "Error: Model does not exist. No solver specified."
sys.exit(1)
print "Warning: model %s does not exist. Creating..."
solver = SGDSolver(args.solver)
solver.net.save(args.model)
# Initialize objects
net = BaristaNet(args.architecture, args.model, args.driver,
reset_log=True)
replay_dataset = ReplayDataset(args.dataset, net.state[0].shape,
dset_size=args.dset_size,
overwrite=args.overwrite)
net.add_dataset(replay_dataset)
game = SnakeGame()
preprocessor = generate_preprocessor(net.state.shape[2:], gray_scale)
exp_gain = ExpGain(net, ['w', 'a', 's', 'd'], preprocessor, game.cpu_play,
replay_dataset, game.encode_state())
if(args.overwrite):
for _ in xrange(min(args.initial_replay, args.dset_size)):
exp_gain.generate_experience(0)
# Start server loop
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serversocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
serversocket.bind(('127.0.0.1', args.port))
serversocket.listen(5)
print
print "*"*80
print "* Starting BARISTA server: listening on port %d." % args.port
print "*"*80
# Signal Spark Executor that Barista is ready to receive connections
issue_ready_signal(args.port)
while True:
(clientsocket, address) = serversocket.accept()
if args.debug:
handler = debug_process_connection
else:
handler = process_connection
client_thread = threading.Thread(
target=handler,
args=(clientsocket, net, exp_gain))
client_thread.run()
if __name__ == "__main__":
main()
| mit |
dagwieers/ansible | lib/ansible/modules/cloud/opennebula/one_service.py | 31 | 25610 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
"""
(c) 2017, Milan Ilic <milani@nordeus.com>
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: one_service
short_description: Deploy and manage OpenNebula services
description:
- Manage OpenNebula services
version_added: "2.6"
options:
api_url:
description:
- URL of the OpenNebula OneFlow API server.
- It is recommended to use HTTPS so that the username/password are not transferred over the network unencrypted.
- If not set then the value of the ONEFLOW_URL environment variable is used.
api_username:
description:
- Name of the user to login into the OpenNebula OneFlow API server. If not set then the value of the C(ONEFLOW_USERNAME) environment variable is used.
api_password:
description:
- Password of the user to login into OpenNebula OneFlow API server. If not set then the value of the C(ONEFLOW_PASSWORD) environment variable is used.
template_name:
description:
- Name of service template to use to create a new instace of a service
template_id:
description:
- ID of a service template to use to create a new instance of a service
service_id:
description:
- ID of a service instance that you would like to manage
service_name:
description:
- Name of a service instance that you would like to manage
unique:
description:
- Setting C(unique=yes) will make sure that there is only one service instance running with a name set with C(service_name) when
- instantiating a service from a template specified with C(template_id)/C(template_name). Check examples below.
type: bool
default: no
state:
description:
- C(present) - instantiate a service from a template specified with C(template_id)/C(template_name).
- C(absent) - terminate an instance of a service specified with C(service_id)/C(service_name).
choices: ["present", "absent"]
default: present
mode:
description:
- Set permission mode of a service instance in octet format, e.g. C(600) to give owner C(use) and C(manage) and nothing to group and others.
owner_id:
description:
- ID of the user which will be set as the owner of the service
group_id:
description:
- ID of the group which will be set as the group of the service
wait:
description:
- Wait for the instance to reach RUNNING state after DEPLOYING or COOLDOWN state after SCALING
type: bool
default: no
wait_timeout:
description:
- How long before wait gives up, in seconds
default: 300
custom_attrs:
description:
- Dictionary of key/value custom attributes which will be used when instantiating a new service.
default: {}
role:
description:
- Name of the role whose cardinality should be changed
cardinality:
description:
- Number of VMs for the specified role
force:
description:
- Force the new cardinality even if it is outside the limits
type: bool
default: no
author:
- "Milan Ilic (@ilicmilan)"
'''
EXAMPLES = '''
# Instantiate a new service
- one_service:
template_id: 90
register: result
# Print service properties
- debug:
msg: result
# Instantiate a new service with specified service_name, service group and mode
- one_service:
template_name: 'app1_template'
service_name: 'app1'
group_id: 1
mode: '660'
# Instantiate a new service with template_id and pass custom_attrs dict
- one_service:
template_id: 90
custom_attrs:
public_network_id: 21
private_network_id: 26
# Instiate a new service 'foo' if the service doesn't already exist, otherwise do nothing
- one_service:
template_id: 53
service_name: 'foo'
unique: yes
# Delete a service by ID
- one_service:
service_id: 153
state: absent
# Get service info
- one_service:
service_id: 153
register: service_info
# Change service owner, group and mode
- one_service:
service_name: 'app2'
owner_id: 34
group_id: 113
mode: '600'
# Instantiate service and wait for it to become RUNNING
- one_service:
template_id: 43
service_name: 'foo1'
# Wait service to become RUNNING
- one_service:
service_id: 112
wait: yes
# Change role cardinality
- one_service:
service_id: 153
role: bar
cardinality: 5
# Change role cardinality and wait for it to be applied
- one_service:
service_id: 112
role: foo
cardinality: 7
wait: yes
'''
RETURN = '''
service_id:
description: service id
type: int
returned: success
sample: 153
service_name:
description: service name
type: str
returned: success
sample: app1
group_id:
description: service's group id
type: int
returned: success
sample: 1
group_name:
description: service's group name
type: str
returned: success
sample: one-users
owner_id:
description: service's owner id
type: int
returned: success
sample: 143
owner_name:
description: service's owner name
type: str
returned: success
sample: ansible-test
state:
description: state of service instance
type: str
returned: success
sample: RUNNING
mode:
description: service's mode
type: int
returned: success
sample: 660
roles:
description: list of dictionaries of roles, each role is described by name, cardinality, state and nodes ids
type: list
returned: success
sample: '[{"cardinality": 1,"name": "foo","state": "RUNNING","ids": [ 123, 456 ]},
{"cardinality": 2,"name": "bar","state": "RUNNING", "ids": [ 452, 567, 746 ]}]'
'''
import os
import sys
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import open_url
STATES = ("PENDING", "DEPLOYING", "RUNNING", "UNDEPLOYING", "WARNING", "DONE",
"FAILED_UNDEPLOYING", "FAILED_DEPLOYING", "SCALING", "FAILED_SCALING", "COOLDOWN")
def get_all_templates(module, auth):
try:
all_templates = open_url(url=(auth.url + "/service_template"), method="GET", force_basic_auth=True, url_username=auth.user, url_password=auth.password)
except Exception as e:
module.fail_json(msg=str(e))
return module.from_json(all_templates.read())
def get_template(module, auth, pred):
all_templates_dict = get_all_templates(module, auth)
found = 0
found_template = None
template_name = ''
if "DOCUMENT_POOL" in all_templates_dict and "DOCUMENT" in all_templates_dict["DOCUMENT_POOL"]:
for template in all_templates_dict["DOCUMENT_POOL"]["DOCUMENT"]:
if pred(template):
found = found + 1
found_template = template
template_name = template["NAME"]
if found <= 0:
return None
elif found > 1:
module.fail_json(msg="There is no template with unique name: " + template_name)
else:
return found_template
def get_all_services(module, auth):
try:
response = open_url(auth.url + "/service", method="GET", force_basic_auth=True, url_username=auth.user, url_password=auth.password)
except Exception as e:
module.fail_json(msg=str(e))
return module.from_json(response.read())
def get_service(module, auth, pred):
all_services_dict = get_all_services(module, auth)
found = 0
found_service = None
service_name = ''
if "DOCUMENT_POOL" in all_services_dict and "DOCUMENT" in all_services_dict["DOCUMENT_POOL"]:
for service in all_services_dict["DOCUMENT_POOL"]["DOCUMENT"]:
if pred(service):
found = found + 1
found_service = service
service_name = service["NAME"]
# fail if there are more services with same name
if found > 1:
module.fail_json(msg="There are multiple services with a name: '" +
service_name + "'. You have to use a unique service name or use 'service_id' instead.")
elif found <= 0:
return None
else:
return found_service
def get_service_by_id(module, auth, service_id):
return get_service(module, auth, lambda service: (int(service["ID"]) == int(service_id))) if service_id else None
def get_service_by_name(module, auth, service_name):
return get_service(module, auth, lambda service: (service["NAME"] == service_name))
def get_service_info(module, auth, service):
result = {
"service_id": int(service["ID"]),
"service_name": service["NAME"],
"group_id": int(service["GID"]),
"group_name": service["GNAME"],
"owner_id": int(service["UID"]),
"owner_name": service["UNAME"],
"state": STATES[service["TEMPLATE"]["BODY"]["state"]]
}
roles_status = service["TEMPLATE"]["BODY"]["roles"]
roles = []
for role in roles_status:
nodes_ids = []
if "nodes" in role:
for node in role["nodes"]:
nodes_ids.append(node["deploy_id"])
roles.append({"name": role["name"], "cardinality": role["cardinality"], "state": STATES[int(role["state"])], "ids": nodes_ids})
result["roles"] = roles
result["mode"] = int(parse_service_permissions(service))
return result
def create_service(module, auth, template_id, service_name, custom_attrs, unique, wait, wait_timeout):
# make sure that the values in custom_attrs dict are strings
custom_attrs_with_str = dict((k, str(v)) for k, v in custom_attrs.items())
data = {
"action": {
"perform": "instantiate",
"params": {
"merge_template": {
"custom_attrs_values": custom_attrs_with_str,
"name": service_name
}
}
}
}
try:
response = open_url(auth.url + "/service_template/" + str(template_id) + "/action", method="POST",
data=module.jsonify(data), force_basic_auth=True, url_username=auth.user, url_password=auth.password)
except Exception as e:
module.fail_json(msg=str(e))
service_result = module.from_json(response.read())["DOCUMENT"]
return service_result
def wait_for_service_to_become_ready(module, auth, service_id, wait_timeout):
import time
start_time = time.time()
while (time.time() - start_time) < wait_timeout:
try:
status_result = open_url(auth.url + "/service/" + str(service_id), method="GET",
force_basic_auth=True, url_username=auth.user, url_password=auth.password)
except Exception as e:
module.fail_json(msg="Request for service status has failed. Error message: " + str(e))
status_result = module.from_json(status_result.read())
service_state = status_result["DOCUMENT"]["TEMPLATE"]["BODY"]["state"]
if service_state in [STATES.index("RUNNING"), STATES.index("COOLDOWN")]:
return status_result["DOCUMENT"]
elif service_state not in [STATES.index("PENDING"), STATES.index("DEPLOYING"), STATES.index("SCALING")]:
log_message = ''
for log_info in status_result["DOCUMENT"]["TEMPLATE"]["BODY"]["log"]:
if log_info["severity"] == "E":
log_message = log_message + log_info["message"]
break
module.fail_json(msg="Deploying is unsuccessful. Service state: " + STATES[service_state] + ". Error message: " + log_message)
time.sleep(1)
module.fail_json(msg="Wait timeout has expired")
def change_service_permissions(module, auth, service_id, permissions):
data = {
"action": {
"perform": "chmod",
"params": {"octet": permissions}
}
}
try:
status_result = open_url(auth.url + "/service/" + str(service_id) + "/action", method="POST", force_basic_auth=True,
url_username=auth.user, url_password=auth.password, data=module.jsonify(data))
except Exception as e:
module.fail_json(msg=str(e))
def change_service_owner(module, auth, service_id, owner_id):
data = {
"action": {
"perform": "chown",
"params": {"owner_id": owner_id}
}
}
try:
status_result = open_url(auth.url + "/service/" + str(service_id) + "/action", method="POST", force_basic_auth=True,
url_username=auth.user, url_password=auth.password, data=module.jsonify(data))
except Exception as e:
module.fail_json(msg=str(e))
def change_service_group(module, auth, service_id, group_id):
data = {
"action": {
"perform": "chgrp",
"params": {"group_id": group_id}
}
}
try:
status_result = open_url(auth.url + "/service/" + str(service_id) + "/action", method="POST", force_basic_auth=True,
url_username=auth.user, url_password=auth.password, data=module.jsonify(data))
except Exception as e:
module.fail_json(msg=str(e))
def change_role_cardinality(module, auth, service_id, role, cardinality, force):
data = {
"cardinality": cardinality,
"force": force
}
try:
status_result = open_url(auth.url + "/service/" + str(service_id) + "/role/" + role, method="PUT",
force_basic_auth=True, url_username=auth.user, url_password=auth.password, data=module.jsonify(data))
except Exception as e:
module.fail_json(msg=str(e))
if status_result.getcode() != 204:
module.fail_json(msg="Failed to change cardinality for role: " + role + ". Return code: " + str(status_result.getcode()))
def check_change_service_owner(module, service, owner_id):
old_owner_id = int(service["UID"])
return old_owner_id != owner_id
def check_change_service_group(module, service, group_id):
old_group_id = int(service["GID"])
return old_group_id != group_id
def parse_service_permissions(service):
perm_dict = service["PERMISSIONS"]
'''
This is the structure of the 'PERMISSIONS' dictionary:
"PERMISSIONS": {
"OWNER_U": "1",
"OWNER_M": "1",
"OWNER_A": "0",
"GROUP_U": "0",
"GROUP_M": "0",
"GROUP_A": "0",
"OTHER_U": "0",
"OTHER_M": "0",
"OTHER_A": "0"
}
'''
owner_octal = int(perm_dict["OWNER_U"]) * 4 + int(perm_dict["OWNER_M"]) * 2 + int(perm_dict["OWNER_A"])
group_octal = int(perm_dict["GROUP_U"]) * 4 + int(perm_dict["GROUP_M"]) * 2 + int(perm_dict["GROUP_A"])
other_octal = int(perm_dict["OTHER_U"]) * 4 + int(perm_dict["OTHER_M"]) * 2 + int(perm_dict["OTHER_A"])
permissions = str(owner_octal) + str(group_octal) + str(other_octal)
return permissions
def check_change_service_permissions(module, service, permissions):
old_permissions = parse_service_permissions(service)
return old_permissions != permissions
def check_change_role_cardinality(module, service, role_name, cardinality):
roles_list = service["TEMPLATE"]["BODY"]["roles"]
for role in roles_list:
if role["name"] == role_name:
return int(role["cardinality"]) != cardinality
module.fail_json(msg="There is no role with name: " + role_name)
def create_service_and_operation(module, auth, template_id, service_name, owner_id, group_id, permissions, custom_attrs, unique, wait, wait_timeout):
if not service_name:
service_name = ''
changed = False
service = None
if unique:
service = get_service_by_name(module, auth, service_name)
if not service:
if not module.check_mode:
service = create_service(module, auth, template_id, service_name, custom_attrs, unique, wait, wait_timeout)
changed = True
# if check_mode=true and there would be changes, service doesn't exist and we can not get it
if module.check_mode and changed:
return {"changed": True}
result = service_operation(module, auth, owner_id=owner_id, group_id=group_id, wait=wait,
wait_timeout=wait_timeout, permissions=permissions, service=service)
if result["changed"]:
changed = True
result["changed"] = changed
return result
def service_operation(module, auth, service_id=None, owner_id=None, group_id=None, permissions=None,
role=None, cardinality=None, force=None, wait=False, wait_timeout=None, service=None):
changed = False
if not service:
service = get_service_by_id(module, auth, service_id)
else:
service_id = service["ID"]
if not service:
module.fail_json(msg="There is no service with id: " + str(service_id))
if owner_id:
if check_change_service_owner(module, service, owner_id):
if not module.check_mode:
change_service_owner(module, auth, service_id, owner_id)
changed = True
if group_id:
if check_change_service_group(module, service, group_id):
if not module.check_mode:
change_service_group(module, auth, service_id, group_id)
changed = True
if permissions:
if check_change_service_permissions(module, service, permissions):
if not module.check_mode:
change_service_permissions(module, auth, service_id, permissions)
changed = True
if role:
if check_change_role_cardinality(module, service, role, cardinality):
if not module.check_mode:
change_role_cardinality(module, auth, service_id, role, cardinality, force)
changed = True
if wait and not module.check_mode:
service = wait_for_service_to_become_ready(module, auth, service_id, wait_timeout)
# if something has changed, fetch service info again
if changed:
service = get_service_by_id(module, auth, service_id)
service_info = get_service_info(module, auth, service)
service_info["changed"] = changed
return service_info
def delete_service(module, auth, service_id):
service = get_service_by_id(module, auth, service_id)
if not service:
return {"changed": False}
service_info = get_service_info(module, auth, service)
service_info["changed"] = True
if module.check_mode:
return service_info
try:
result = open_url(auth.url + '/service/' + str(service_id), method="DELETE", force_basic_auth=True, url_username=auth.user, url_password=auth.password)
except Exception as e:
module.fail_json(msg="Service deletion has failed. Error message: " + str(e))
return service_info
def get_template_by_name(module, auth, template_name):
return get_template(module, auth, lambda template: (template["NAME"] == template_name))
def get_template_by_id(module, auth, template_id):
return get_template(module, auth, lambda template: (int(template["ID"]) == int(template_id))) if template_id else None
def get_template_id(module, auth, requested_id, requested_name):
template = get_template_by_id(module, auth, requested_id) if requested_id else get_template_by_name(module, auth, requested_name)
if template:
return template["ID"]
return None
def get_service_id_by_name(module, auth, service_name):
service = get_service_by_name(module, auth, service_name)
if service:
return service["ID"]
return None
def get_connection_info(module):
url = module.params.get('api_url')
username = module.params.get('api_username')
password = module.params.get('api_password')
if not url:
url = os.environ.get('ONEFLOW_URL')
if not username:
username = os.environ.get('ONEFLOW_USERNAME')
if not password:
password = os.environ.get('ONEFLOW_PASSWORD')
if not(url and username and password):
module.fail_json(msg="One or more connection parameters (api_url, api_username, api_password) were not specified")
from collections import namedtuple
auth_params = namedtuple('auth', ('url', 'user', 'password'))
return auth_params(url=url, user=username, password=password)
def main():
fields = {
"api_url": {"required": False, "type": "str"},
"api_username": {"required": False, "type": "str"},
"api_password": {"required": False, "type": "str", "no_log": True},
"service_name": {"required": False, "type": "str"},
"service_id": {"required": False, "type": "int"},
"template_name": {"required": False, "type": "str"},
"template_id": {"required": False, "type": "int"},
"state": {
"default": "present",
"choices": ['present', 'absent'],
"type": "str"
},
"mode": {"required": False, "type": "str"},
"owner_id": {"required": False, "type": "int"},
"group_id": {"required": False, "type": "int"},
"unique": {"default": False, "type": "bool"},
"wait": {"default": False, "type": "bool"},
"wait_timeout": {"default": 300, "type": "int"},
"custom_attrs": {"default": {}, "type": "dict"},
"role": {"required": False, "type": "str"},
"cardinality": {"required": False, "type": "int"},
"force": {"default": False, "type": "bool"}
}
module = AnsibleModule(argument_spec=fields,
mutually_exclusive=[
['template_id', 'template_name', 'service_id'],
['service_id', 'service_name'],
['template_id', 'template_name', 'role'],
['template_id', 'template_name', 'cardinality'],
['service_id', 'custom_attrs']
],
required_together=[['role', 'cardinality']],
supports_check_mode=True)
auth = get_connection_info(module)
params = module.params
service_name = params.get('service_name')
service_id = params.get('service_id')
requested_template_id = params.get('template_id')
requested_template_name = params.get('template_name')
state = params.get('state')
permissions = params.get('mode')
owner_id = params.get('owner_id')
group_id = params.get('group_id')
unique = params.get('unique')
wait = params.get('wait')
wait_timeout = params.get('wait_timeout')
custom_attrs = params.get('custom_attrs')
role = params.get('role')
cardinality = params.get('cardinality')
force = params.get('force')
template_id = None
if requested_template_id or requested_template_name:
template_id = get_template_id(module, auth, requested_template_id, requested_template_name)
if not template_id:
if requested_template_id:
module.fail_json(msg="There is no template with template_id: " + str(requested_template_id))
elif requested_template_name:
module.fail_json(msg="There is no template with name: " + requested_template_name)
if unique and not service_name:
module.fail_json(msg="You cannot use unique without passing service_name!")
if template_id and state == 'absent':
module.fail_json(msg="State absent is not valid for template")
if template_id and state == 'present': # Intantiate a service
result = create_service_and_operation(module, auth, template_id, service_name, owner_id,
group_id, permissions, custom_attrs, unique, wait, wait_timeout)
else:
if not (service_id or service_name):
module.fail_json(msg="To manage the service at least the service id or service name should be specified!")
if custom_attrs:
module.fail_json(msg="You can only set custom_attrs when instantiate service!")
if not service_id:
service_id = get_service_id_by_name(module, auth, service_name)
# The task should be failed when we want to manage a non-existent service identified by its name
if not service_id and state == 'present':
module.fail_json(msg="There is no service with name: " + service_name)
if state == 'absent':
result = delete_service(module, auth, service_id)
else:
result = service_operation(module, auth, service_id, owner_id, group_id, permissions, role, cardinality, force, wait, wait_timeout)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
caoxiongkun/ardupilot | Tools/autotest/jsb_sim/runsim.py | 167 | 12772 | #!/usr/bin/env python
# run a jsbsim model as a child process
import sys, os, pexpect, socket
import math, time, select, struct, signal, errno
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'pysim'))
import util, atexit, fdpexpect
from pymavlink import fgFDM
class control_state(object):
def __init__(self):
self.aileron = 0
self.elevator = 0
self.throttle = 0
self.rudder = 0
self.ground_height = 0
sitl_state = control_state()
def interpret_address(addrstr):
'''interpret a IP:port string'''
a = addrstr.split(':')
a[1] = int(a[1])
return tuple(a)
def jsb_set(variable, value):
'''set a JSBSim variable'''
global jsb_console
jsb_console.send('set %s %s\r\n' % (variable, value))
def setup_template(home):
'''setup aircraft/Rascal/reset.xml'''
global opts
v = home.split(',')
if len(v) != 4:
print("home should be lat,lng,alt,hdg - '%s'" % home)
sys.exit(1)
latitude = float(v[0])
longitude = float(v[1])
altitude = float(v[2])
heading = float(v[3])
sitl_state.ground_height = altitude
template = os.path.join('aircraft', 'Rascal', 'reset_template.xml')
reset = os.path.join('aircraft', 'Rascal', 'reset.xml')
xml = open(template).read() % { 'LATITUDE' : str(latitude),
'LONGITUDE' : str(longitude),
'HEADING' : str(heading) }
open(reset, mode='w').write(xml)
print("Wrote %s" % reset)
baseport = int(opts.simout.split(':')[1])
template = os.path.join('jsb_sim', 'fgout_template.xml')
out = os.path.join('jsb_sim', 'fgout.xml')
xml = open(template).read() % { 'FGOUTPORT' : str(baseport+3) }
open(out, mode='w').write(xml)
print("Wrote %s" % out)
template = os.path.join('jsb_sim', 'rascal_test_template.xml')
out = os.path.join('jsb_sim', 'rascal_test.xml')
xml = open(template).read() % { 'JSBCONSOLEPORT' : str(baseport+4) }
open(out, mode='w').write(xml)
print("Wrote %s" % out)
def process_sitl_input(buf):
'''process control changes from SITL sim'''
control = list(struct.unpack('<14H', buf))
pwm = control[:11]
(speed, direction, turbulance) = control[11:]
global wind
wind.speed = speed*0.01
wind.direction = direction*0.01
wind.turbulance = turbulance*0.01
aileron = (pwm[0]-1500)/500.0
elevator = (pwm[1]-1500)/500.0
throttle = (pwm[2]-1000)/1000.0
if opts.revthr:
throttle = 1.0 - throttle
rudder = (pwm[3]-1500)/500.0
if opts.elevon:
# fake an elevon plane
ch1 = aileron
ch2 = elevator
aileron = (ch2-ch1)/2.0
# the minus does away with the need for RC2_REV=-1
elevator = -(ch2+ch1)/2.0
if opts.vtail:
# fake an elevon plane
ch1 = elevator
ch2 = rudder
# this matches VTAIL_OUTPUT==2
elevator = (ch2-ch1)/2.0
rudder = (ch2+ch1)/2.0
buf = ''
if aileron != sitl_state.aileron:
buf += 'set fcs/aileron-cmd-norm %s\n' % aileron
sitl_state.aileron = aileron
if elevator != sitl_state.elevator:
buf += 'set fcs/elevator-cmd-norm %s\n' % elevator
sitl_state.elevator = elevator
if rudder != sitl_state.rudder:
buf += 'set fcs/rudder-cmd-norm %s\n' % rudder
sitl_state.rudder = rudder
if throttle != sitl_state.throttle:
buf += 'set fcs/throttle-cmd-norm %s\n' % throttle
sitl_state.throttle = throttle
buf += 'step\n'
global jsb_console
jsb_console.send(buf)
def update_wind(wind):
'''update wind simulation'''
(speed, direction) = wind.current()
jsb_set('atmosphere/psiw-rad', math.radians(direction))
jsb_set('atmosphere/wind-mag-fps', speed/0.3048)
def process_jsb_input(buf, simtime):
'''process FG FDM input from JSBSim'''
global fdm, fg_out, sim_out
fdm.parse(buf)
if fg_out:
try:
agl = fdm.get('agl', units='meters')
fdm.set('altitude', agl+sitl_state.ground_height, units='meters')
fdm.set('rpm', sitl_state.throttle*1000)
fg_out.send(fdm.pack())
except socket.error as e:
if e.errno not in [ errno.ECONNREFUSED ]:
raise
timestamp = int(simtime*1.0e6)
simbuf = struct.pack('<Q17dI',
timestamp,
fdm.get('latitude', units='degrees'),
fdm.get('longitude', units='degrees'),
fdm.get('altitude', units='meters'),
fdm.get('psi', units='degrees'),
fdm.get('v_north', units='mps'),
fdm.get('v_east', units='mps'),
fdm.get('v_down', units='mps'),
fdm.get('A_X_pilot', units='mpss'),
fdm.get('A_Y_pilot', units='mpss'),
fdm.get('A_Z_pilot', units='mpss'),
fdm.get('phidot', units='dps'),
fdm.get('thetadot', units='dps'),
fdm.get('psidot', units='dps'),
fdm.get('phi', units='degrees'),
fdm.get('theta', units='degrees'),
fdm.get('psi', units='degrees'),
fdm.get('vcas', units='mps'),
0x4c56414f)
try:
sim_out.send(simbuf)
except socket.error as e:
if e.errno not in [ errno.ECONNREFUSED ]:
raise
##################
# main program
from optparse import OptionParser
parser = OptionParser("runsim.py [options]")
parser.add_option("--simin", help="SITL input (IP:port)", default="127.0.0.1:5502")
parser.add_option("--simout", help="SITL output (IP:port)", default="127.0.0.1:5501")
parser.add_option("--fgout", help="FG display output (IP:port)", default="127.0.0.1:5503")
parser.add_option("--home", type='string', help="home lat,lng,alt,hdg (required)")
parser.add_option("--script", type='string', help='jsbsim model script', default='jsb_sim/rascal_test.xml')
parser.add_option("--options", type='string', help='jsbsim startup options')
parser.add_option("--elevon", action='store_true', default=False, help='assume elevon input')
parser.add_option("--revthr", action='store_true', default=False, help='reverse throttle')
parser.add_option("--vtail", action='store_true', default=False, help='assume vtail input')
parser.add_option("--wind", dest="wind", help="Simulate wind (speed,direction,turbulance)", default='0,0,0')
parser.add_option("--rate", type='int', help="Simulation rate (Hz)", default=1000)
parser.add_option("--speedup", type='float', default=1.0, help="speedup from realtime")
(opts, args) = parser.parse_args()
for m in [ 'home', 'script' ]:
if not opts.__dict__[m]:
print("Missing required option '%s'" % m)
parser.print_help()
sys.exit(1)
os.chdir(util.reltopdir('Tools/autotest'))
# kill off child when we exit
atexit.register(util.pexpect_close_all)
setup_template(opts.home)
# start child
cmd = "JSBSim --realtime --suspend --nice --simulation-rate=%u --logdirectivefile=jsb_sim/fgout.xml --script=%s" % (opts.rate, opts.script)
if opts.options:
cmd += ' %s' % opts.options
jsb = pexpect.spawn(cmd, logfile=sys.stdout, timeout=10)
jsb.delaybeforesend = 0
util.pexpect_autoclose(jsb)
i = jsb.expect(["Successfully bound to socket for input on port (\d+)",
"Could not bind to socket for input"])
if i == 1:
print("Failed to start JSBSim - is another copy running?")
sys.exit(1)
jsb_out_address = interpret_address("127.0.0.1:%u" % int(jsb.match.group(1)))
jsb.expect("Creating UDP socket on port (\d+)")
jsb_in_address = interpret_address("127.0.0.1:%u" % int(jsb.match.group(1)))
jsb.expect("Successfully connected to socket for output")
jsb.expect("JSBSim Execution beginning")
# setup output to jsbsim
print("JSBSim console on %s" % str(jsb_out_address))
jsb_out = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
jsb_out.connect(jsb_out_address)
jsb_console = fdpexpect.fdspawn(jsb_out.fileno(), logfile=sys.stdout)
jsb_console.delaybeforesend = 0
# setup input from jsbsim
print("JSBSim FG FDM input on %s" % str(jsb_in_address))
jsb_in = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
jsb_in.bind(jsb_in_address)
jsb_in.setblocking(0)
# socket addresses
sim_out_address = interpret_address(opts.simout)
sim_in_address = interpret_address(opts.simin)
# setup input from SITL sim
sim_in = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sim_in.bind(sim_in_address)
sim_in.setblocking(0)
# setup output to SITL sim
sim_out = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sim_out.connect(interpret_address(opts.simout))
sim_out.setblocking(0)
# setup possible output to FlightGear for display
fg_out = None
if opts.fgout:
fg_out = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
fg_out.connect(interpret_address(opts.fgout))
# setup wind generator
wind = util.Wind(opts.wind)
fdm = fgFDM.fgFDM()
jsb_console.send('info\n')
jsb_console.send('resume\n')
jsb.expect(["trim computation time","Trim Results"])
time.sleep(1.5)
jsb_console.send('step\n')
jsb_console.logfile = None
print("Simulator ready to fly")
def main_loop():
'''run main loop'''
tnow = time.time()
last_report = tnow
last_sim_input = tnow
last_wind_update = tnow
frame_count = 0
paused = False
simstep = 1.0/opts.rate
simtime = simstep
frame_time = 1.0/opts.rate
scaled_frame_time = frame_time/opts.speedup
last_wall_time = time.time()
achieved_rate = opts.speedup
while True:
new_frame = False
rin = [jsb_in.fileno(), sim_in.fileno(), jsb_console.fileno(), jsb.fileno()]
try:
(rin, win, xin) = select.select(rin, [], [], 1.0)
except select.error:
util.check_parent()
continue
tnow = time.time()
if jsb_in.fileno() in rin:
buf = jsb_in.recv(fdm.packet_size())
process_jsb_input(buf, simtime)
frame_count += 1
new_frame = True
if sim_in.fileno() in rin:
simbuf = sim_in.recv(28)
process_sitl_input(simbuf)
simtime += simstep
last_sim_input = tnow
# show any jsbsim console output
if jsb_console.fileno() in rin:
util.pexpect_drain(jsb_console)
if jsb.fileno() in rin:
util.pexpect_drain(jsb)
# only simulate wind above 5 meters, to prevent crashes while
# waiting for takeoff
if tnow - last_wind_update > 0.1:
update_wind(wind)
last_wind_update = tnow
if tnow - last_report > 3:
print("FPS %u asl=%.1f agl=%.1f roll=%.1f pitch=%.1f a=(%.2f %.2f %.2f) AR=%.1f" % (
frame_count / (time.time() - last_report),
fdm.get('altitude', units='meters'),
fdm.get('agl', units='meters'),
fdm.get('phi', units='degrees'),
fdm.get('theta', units='degrees'),
fdm.get('A_X_pilot', units='mpss'),
fdm.get('A_Y_pilot', units='mpss'),
fdm.get('A_Z_pilot', units='mpss'),
achieved_rate))
frame_count = 0
last_report = time.time()
if new_frame:
now = time.time()
if now < last_wall_time + scaled_frame_time:
dt = last_wall_time+scaled_frame_time - now
time.sleep(last_wall_time+scaled_frame_time - now)
now = time.time()
if now > last_wall_time and now - last_wall_time < 0.1:
rate = 1.0/(now - last_wall_time)
achieved_rate = (0.98*achieved_rate) + (0.02*rate)
if achieved_rate < opts.rate*opts.speedup:
scaled_frame_time *= 0.999
else:
scaled_frame_time *= 1.001
last_wall_time = now
def exit_handler():
'''exit the sim'''
print("running exit handler")
signal.signal(signal.SIGINT, signal.SIG_IGN)
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# JSBSim really doesn't like to die ...
if getattr(jsb, 'pid', None) is not None:
os.kill(jsb.pid, signal.SIGKILL)
jsb_console.send('quit\n')
jsb.close(force=True)
util.pexpect_close_all()
sys.exit(1)
signal.signal(signal.SIGINT, exit_handler)
signal.signal(signal.SIGTERM, exit_handler)
try:
main_loop()
except Exception as ex:
print(ex)
exit_handler()
raise
| gpl-3.0 |
pdebuyl/lammps | tools/i-pi/ipi/utils/io/io_binary.py | 41 | 1532 | """Contains the functions used to print the trajectories and read input
configurations (or even full status dump) as unformatted binary.
Copyright (C) 2013, Joshua More and Michele Ceriotti
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http.//www.gnu.org/licenses/>.
Functions:
print_bin: Prints an atomic configuration.
"""
__all__ = ['print_bin']
import os
import numpy as np
import math, sys
from ipi.utils.depend import depstrip
def print_bin(atoms, cell, filedesc = sys.stdout, title=""):
"""Prints the centroid configurations, into a binary file.
Args:
beads: An atoms object giving the centroid positions.
cell: A cell object giving the system box.
filedesc: An open writable file object. Defaults to standard output.
title: This gives a string to be appended to the comment line.
"""
buff = filedesc # .buffer
cell.h.tofile(buff)
nat = np.asarray([atoms.natoms])
nat.tofile(buff)
atoms.names.tofile(buff)
atoms.q.tofile(buff)
| gpl-2.0 |
Belxjander/Kirito | Python-3.5.0-main/Lib/test/test_nntplib.py | 10 | 59796 | import io
import socket
import datetime
import textwrap
import unittest
import functools
import contextlib
from test import support
from nntplib import NNTP, GroupInfo
import nntplib
from unittest.mock import patch
try:
import ssl
except ImportError:
ssl = None
TIMEOUT = 30
# TODO:
# - test the `file` arg to more commands
# - test error conditions
# - test auth and `usenetrc`
class NetworkedNNTPTestsMixin:
def test_welcome(self):
welcome = self.server.getwelcome()
self.assertEqual(str, type(welcome))
def test_help(self):
resp, lines = self.server.help()
self.assertTrue(resp.startswith("100 "), resp)
for line in lines:
self.assertEqual(str, type(line))
def test_list(self):
resp, groups = self.server.list()
if len(groups) > 0:
self.assertEqual(GroupInfo, type(groups[0]))
self.assertEqual(str, type(groups[0].group))
def test_list_active(self):
resp, groups = self.server.list(self.GROUP_PAT)
if len(groups) > 0:
self.assertEqual(GroupInfo, type(groups[0]))
self.assertEqual(str, type(groups[0].group))
def test_unknown_command(self):
with self.assertRaises(nntplib.NNTPPermanentError) as cm:
self.server._shortcmd("XYZZY")
resp = cm.exception.response
self.assertTrue(resp.startswith("500 "), resp)
def test_newgroups(self):
# gmane gets a constant influx of new groups. In order not to stress
# the server too much, we choose a recent date in the past.
dt = datetime.date.today() - datetime.timedelta(days=7)
resp, groups = self.server.newgroups(dt)
if len(groups) > 0:
self.assertIsInstance(groups[0], GroupInfo)
self.assertIsInstance(groups[0].group, str)
def test_description(self):
def _check_desc(desc):
# Sanity checks
self.assertIsInstance(desc, str)
self.assertNotIn(self.GROUP_NAME, desc)
desc = self.server.description(self.GROUP_NAME)
_check_desc(desc)
# Another sanity check
self.assertIn("Python", desc)
# With a pattern
desc = self.server.description(self.GROUP_PAT)
_check_desc(desc)
# Shouldn't exist
desc = self.server.description("zk.brrtt.baz")
self.assertEqual(desc, '')
def test_descriptions(self):
resp, descs = self.server.descriptions(self.GROUP_PAT)
# 215 for LIST NEWSGROUPS, 282 for XGTITLE
self.assertTrue(
resp.startswith("215 ") or resp.startswith("282 "), resp)
self.assertIsInstance(descs, dict)
desc = descs[self.GROUP_NAME]
self.assertEqual(desc, self.server.description(self.GROUP_NAME))
def test_group(self):
result = self.server.group(self.GROUP_NAME)
self.assertEqual(5, len(result))
resp, count, first, last, group = result
self.assertEqual(group, self.GROUP_NAME)
self.assertIsInstance(count, int)
self.assertIsInstance(first, int)
self.assertIsInstance(last, int)
self.assertLessEqual(first, last)
self.assertTrue(resp.startswith("211 "), resp)
def test_date(self):
resp, date = self.server.date()
self.assertIsInstance(date, datetime.datetime)
# Sanity check
self.assertGreaterEqual(date.year, 1995)
self.assertLessEqual(date.year, 2030)
def _check_art_dict(self, art_dict):
# Some sanity checks for a field dictionary returned by OVER / XOVER
self.assertIsInstance(art_dict, dict)
# NNTP has 7 mandatory fields
self.assertGreaterEqual(art_dict.keys(),
{"subject", "from", "date", "message-id",
"references", ":bytes", ":lines"}
)
for v in art_dict.values():
self.assertIsInstance(v, (str, type(None)))
def test_xover(self):
resp, count, first, last, name = self.server.group(self.GROUP_NAME)
resp, lines = self.server.xover(last - 5, last)
if len(lines) == 0:
self.skipTest("no articles retrieved")
# The 'last' article is not necessarily part of the output (cancelled?)
art_num, art_dict = lines[0]
self.assertGreaterEqual(art_num, last - 5)
self.assertLessEqual(art_num, last)
self._check_art_dict(art_dict)
def test_over(self):
resp, count, first, last, name = self.server.group(self.GROUP_NAME)
start = last - 10
# The "start-" article range form
resp, lines = self.server.over((start, None))
art_num, art_dict = lines[0]
self._check_art_dict(art_dict)
# The "start-end" article range form
resp, lines = self.server.over((start, last))
art_num, art_dict = lines[-1]
# The 'last' article is not necessarily part of the output (cancelled?)
self.assertGreaterEqual(art_num, start)
self.assertLessEqual(art_num, last)
self._check_art_dict(art_dict)
# XXX The "message_id" form is unsupported by gmane
# 503 Overview by message-ID unsupported
def test_xhdr(self):
resp, count, first, last, name = self.server.group(self.GROUP_NAME)
resp, lines = self.server.xhdr('subject', last)
for line in lines:
self.assertEqual(str, type(line[1]))
def check_article_resp(self, resp, article, art_num=None):
self.assertIsInstance(article, nntplib.ArticleInfo)
if art_num is not None:
self.assertEqual(article.number, art_num)
for line in article.lines:
self.assertIsInstance(line, bytes)
# XXX this could exceptionally happen...
self.assertNotIn(article.lines[-1], (b".", b".\n", b".\r\n"))
def test_article_head_body(self):
resp, count, first, last, name = self.server.group(self.GROUP_NAME)
# Try to find an available article
for art_num in (last, first, last - 1):
try:
resp, head = self.server.head(art_num)
except nntplib.NNTPTemporaryError as e:
if not e.response.startswith("423 "):
raise
# "423 No such article" => choose another one
continue
break
else:
self.skipTest("could not find a suitable article number")
self.assertTrue(resp.startswith("221 "), resp)
self.check_article_resp(resp, head, art_num)
resp, body = self.server.body(art_num)
self.assertTrue(resp.startswith("222 "), resp)
self.check_article_resp(resp, body, art_num)
resp, article = self.server.article(art_num)
self.assertTrue(resp.startswith("220 "), resp)
self.check_article_resp(resp, article, art_num)
# Tolerate running the tests from behind a NNTP virus checker
blacklist = lambda line: line.startswith(b'X-Antivirus')
filtered_head_lines = [line for line in head.lines
if not blacklist(line)]
filtered_lines = [line for line in article.lines
if not blacklist(line)]
self.assertEqual(filtered_lines, filtered_head_lines + [b''] + body.lines)
def test_capabilities(self):
# The server under test implements NNTP version 2 and has a
# couple of well-known capabilities. Just sanity check that we
# got them.
def _check_caps(caps):
caps_list = caps['LIST']
self.assertIsInstance(caps_list, (list, tuple))
self.assertIn('OVERVIEW.FMT', caps_list)
self.assertGreaterEqual(self.server.nntp_version, 2)
_check_caps(self.server.getcapabilities())
# This re-emits the command
resp, caps = self.server.capabilities()
_check_caps(caps)
@unittest.skipUnless(ssl, 'requires SSL support')
def test_starttls(self):
file = self.server.file
sock = self.server.sock
try:
self.server.starttls()
except nntplib.NNTPPermanentError:
self.skipTest("STARTTLS not supported by server.")
else:
# Check that the socket and internal pseudo-file really were
# changed.
self.assertNotEqual(file, self.server.file)
self.assertNotEqual(sock, self.server.sock)
# Check that the new socket really is an SSL one
self.assertIsInstance(self.server.sock, ssl.SSLSocket)
# Check that trying starttls when it's already active fails.
self.assertRaises(ValueError, self.server.starttls)
def test_zlogin(self):
# This test must be the penultimate because further commands will be
# refused.
baduser = "notarealuser"
badpw = "notarealpassword"
# Check that bogus credentials cause failure
self.assertRaises(nntplib.NNTPError, self.server.login,
user=baduser, password=badpw, usenetrc=False)
# FIXME: We should check that correct credentials succeed, but that
# would require valid details for some server somewhere to be in the
# test suite, I think. Gmane is anonymous, at least as used for the
# other tests.
def test_zzquit(self):
# This test must be called last, hence the name
cls = type(self)
try:
self.server.quit()
finally:
cls.server = None
@classmethod
def wrap_methods(cls):
# Wrap all methods in a transient_internet() exception catcher
# XXX put a generic version in test.support?
def wrap_meth(meth):
@functools.wraps(meth)
def wrapped(self):
with support.transient_internet(self.NNTP_HOST):
meth(self)
return wrapped
for name in dir(cls):
if not name.startswith('test_'):
continue
meth = getattr(cls, name)
if not callable(meth):
continue
# Need to use a closure so that meth remains bound to its current
# value
setattr(cls, name, wrap_meth(meth))
def test_with_statement(self):
def is_connected():
if not hasattr(server, 'file'):
return False
try:
server.help()
except (OSError, EOFError):
return False
return True
with self.NNTP_CLASS(self.NNTP_HOST, timeout=TIMEOUT, usenetrc=False) as server:
self.assertTrue(is_connected())
self.assertTrue(server.help())
self.assertFalse(is_connected())
with self.NNTP_CLASS(self.NNTP_HOST, timeout=TIMEOUT, usenetrc=False) as server:
server.quit()
self.assertFalse(is_connected())
NetworkedNNTPTestsMixin.wrap_methods()
class NetworkedNNTPTests(NetworkedNNTPTestsMixin, unittest.TestCase):
# This server supports STARTTLS (gmane doesn't)
NNTP_HOST = 'news.trigofacile.com'
GROUP_NAME = 'fr.comp.lang.python'
GROUP_PAT = 'fr.comp.lang.*'
NNTP_CLASS = NNTP
@classmethod
def setUpClass(cls):
support.requires("network")
with support.transient_internet(cls.NNTP_HOST):
cls.server = cls.NNTP_CLASS(cls.NNTP_HOST, timeout=TIMEOUT, usenetrc=False)
@classmethod
def tearDownClass(cls):
if cls.server is not None:
cls.server.quit()
@unittest.skipUnless(ssl, 'requires SSL support')
class NetworkedNNTP_SSLTests(NetworkedNNTPTests):
# Technical limits for this public NNTP server (see http://www.aioe.org):
# "Only two concurrent connections per IP address are allowed and
# 400 connections per day are accepted from each IP address."
NNTP_HOST = 'nntp.aioe.org'
GROUP_NAME = 'comp.lang.python'
GROUP_PAT = 'comp.lang.*'
NNTP_CLASS = getattr(nntplib, 'NNTP_SSL', None)
# Disabled as it produces too much data
test_list = None
# Disabled as the connection will already be encrypted.
test_starttls = None
#
# Non-networked tests using a local server (or something mocking it).
#
class _NNTPServerIO(io.RawIOBase):
"""A raw IO object allowing NNTP commands to be received and processed
by a handler. The handler can push responses which can then be read
from the IO object."""
def __init__(self, handler):
io.RawIOBase.__init__(self)
# The channel from the client
self.c2s = io.BytesIO()
# The channel to the client
self.s2c = io.BytesIO()
self.handler = handler
self.handler.start(self.c2s.readline, self.push_data)
def readable(self):
return True
def writable(self):
return True
def push_data(self, data):
"""Push (buffer) some data to send to the client."""
pos = self.s2c.tell()
self.s2c.seek(0, 2)
self.s2c.write(data)
self.s2c.seek(pos)
def write(self, b):
"""The client sends us some data"""
pos = self.c2s.tell()
self.c2s.write(b)
self.c2s.seek(pos)
self.handler.process_pending()
return len(b)
def readinto(self, buf):
"""The client wants to read a response"""
self.handler.process_pending()
b = self.s2c.read(len(buf))
n = len(b)
buf[:n] = b
return n
def make_mock_file(handler):
sio = _NNTPServerIO(handler)
# Using BufferedRWPair instead of BufferedRandom ensures the file
# isn't seekable.
file = io.BufferedRWPair(sio, sio)
return (sio, file)
class MockedNNTPTestsMixin:
# Override in derived classes
handler_class = None
def setUp(self):
super().setUp()
self.make_server()
def tearDown(self):
super().tearDown()
del self.server
def make_server(self, *args, **kwargs):
self.handler = self.handler_class()
self.sio, file = make_mock_file(self.handler)
self.server = nntplib._NNTPBase(file, 'test.server', *args, **kwargs)
return self.server
class MockedNNTPWithReaderModeMixin(MockedNNTPTestsMixin):
def setUp(self):
super().setUp()
self.make_server(readermode=True)
class NNTPv1Handler:
"""A handler for RFC 977"""
welcome = "200 NNTP mock server"
def start(self, readline, push_data):
self.in_body = False
self.allow_posting = True
self._readline = readline
self._push_data = push_data
self._logged_in = False
self._user_sent = False
# Our welcome
self.handle_welcome()
def _decode(self, data):
return str(data, "utf-8", "surrogateescape")
def process_pending(self):
if self.in_body:
while True:
line = self._readline()
if not line:
return
self.body.append(line)
if line == b".\r\n":
break
try:
meth, tokens = self.body_callback
meth(*tokens, body=self.body)
finally:
self.body_callback = None
self.body = None
self.in_body = False
while True:
line = self._decode(self._readline())
if not line:
return
if not line.endswith("\r\n"):
raise ValueError("line doesn't end with \\r\\n: {!r}".format(line))
line = line[:-2]
cmd, *tokens = line.split()
#meth = getattr(self.handler, "handle_" + cmd.upper(), None)
meth = getattr(self, "handle_" + cmd.upper(), None)
if meth is None:
self.handle_unknown()
else:
try:
meth(*tokens)
except Exception as e:
raise ValueError("command failed: {!r}".format(line)) from e
else:
if self.in_body:
self.body_callback = meth, tokens
self.body = []
def expect_body(self):
"""Flag that the client is expected to post a request body"""
self.in_body = True
def push_data(self, data):
"""Push some binary data"""
self._push_data(data)
def push_lit(self, lit):
"""Push a string literal"""
lit = textwrap.dedent(lit)
lit = "\r\n".join(lit.splitlines()) + "\r\n"
lit = lit.encode('utf-8')
self.push_data(lit)
def handle_unknown(self):
self.push_lit("500 What?")
def handle_welcome(self):
self.push_lit(self.welcome)
def handle_QUIT(self):
self.push_lit("205 Bye!")
def handle_DATE(self):
self.push_lit("111 20100914001155")
def handle_GROUP(self, group):
if group == "fr.comp.lang.python":
self.push_lit("211 486 761 1265 fr.comp.lang.python")
else:
self.push_lit("411 No such group {}".format(group))
def handle_HELP(self):
self.push_lit("""\
100 Legal commands
authinfo user Name|pass Password|generic <prog> <args>
date
help
Report problems to <root@example.org>
.""")
def handle_STAT(self, message_spec=None):
if message_spec is None:
self.push_lit("412 No newsgroup selected")
elif message_spec == "3000234":
self.push_lit("223 3000234 <45223423@example.com>")
elif message_spec == "<45223423@example.com>":
self.push_lit("223 0 <45223423@example.com>")
else:
self.push_lit("430 No Such Article Found")
def handle_NEXT(self):
self.push_lit("223 3000237 <668929@example.org> retrieved")
def handle_LAST(self):
self.push_lit("223 3000234 <45223423@example.com> retrieved")
def handle_LIST(self, action=None, param=None):
if action is None:
self.push_lit("""\
215 Newsgroups in form "group high low flags".
comp.lang.python 0000052340 0000002828 y
comp.lang.python.announce 0000001153 0000000993 m
free.it.comp.lang.python 0000000002 0000000002 y
fr.comp.lang.python 0000001254 0000000760 y
free.it.comp.lang.python.learner 0000000000 0000000001 y
tw.bbs.comp.lang.python 0000000304 0000000304 y
.""")
elif action == "ACTIVE":
if param == "*distutils*":
self.push_lit("""\
215 Newsgroups in form "group high low flags"
gmane.comp.python.distutils.devel 0000014104 0000000001 m
gmane.comp.python.distutils.cvs 0000000000 0000000001 m
.""")
else:
self.push_lit("""\
215 Newsgroups in form "group high low flags"
.""")
elif action == "OVERVIEW.FMT":
self.push_lit("""\
215 Order of fields in overview database.
Subject:
From:
Date:
Message-ID:
References:
Bytes:
Lines:
Xref:full
.""")
elif action == "NEWSGROUPS":
assert param is not None
if param == "comp.lang.python":
self.push_lit("""\
215 Descriptions in form "group description".
comp.lang.python\tThe Python computer language.
.""")
elif param == "comp.lang.python*":
self.push_lit("""\
215 Descriptions in form "group description".
comp.lang.python.announce\tAnnouncements about the Python language. (Moderated)
comp.lang.python\tThe Python computer language.
.""")
else:
self.push_lit("""\
215 Descriptions in form "group description".
.""")
else:
self.push_lit('501 Unknown LIST keyword')
def handle_NEWNEWS(self, group, date_str, time_str):
# We hard code different return messages depending on passed
# argument and date syntax.
if (group == "comp.lang.python" and date_str == "20100913"
and time_str == "082004"):
# Date was passed in RFC 3977 format (NNTP "v2")
self.push_lit("""\
230 list of newsarticles (NNTP v2) created after Mon Sep 13 08:20:04 2010 follows
<a4929a40-6328-491a-aaaf-cb79ed7309a2@q2g2000vbk.googlegroups.com>
<f30c0419-f549-4218-848f-d7d0131da931@y3g2000vbm.googlegroups.com>
.""")
elif (group == "comp.lang.python" and date_str == "100913"
and time_str == "082004"):
# Date was passed in RFC 977 format (NNTP "v1")
self.push_lit("""\
230 list of newsarticles (NNTP v1) created after Mon Sep 13 08:20:04 2010 follows
<a4929a40-6328-491a-aaaf-cb79ed7309a2@q2g2000vbk.googlegroups.com>
<f30c0419-f549-4218-848f-d7d0131da931@y3g2000vbm.googlegroups.com>
.""")
elif (group == 'comp.lang.python' and
date_str in ('20100101', '100101') and
time_str == '090000'):
self.push_lit('too long line' * 3000 +
'\n.')
else:
self.push_lit("""\
230 An empty list of newsarticles follows
.""")
# (Note for experiments: many servers disable NEWNEWS.
# As of this writing, sicinfo3.epfl.ch doesn't.)
def handle_XOVER(self, message_spec):
if message_spec == "57-59":
self.push_lit(
"224 Overview information for 57-58 follows\n"
"57\tRe: ANN: New Plone book with strong Python (and Zope) themes throughout"
"\tDoug Hellmann <doug.hellmann-Re5JQEeQqe8AvxtiuMwx3w@public.gmane.org>"
"\tSat, 19 Jun 2010 18:04:08 -0400"
"\t<4FD05F05-F98B-44DC-8111-C6009C925F0C@gmail.com>"
"\t<hvalf7$ort$1@dough.gmane.org>\t7103\t16"
"\tXref: news.gmane.org gmane.comp.python.authors:57"
"\n"
"58\tLooking for a few good bloggers"
"\tDoug Hellmann <doug.hellmann-Re5JQEeQqe8AvxtiuMwx3w@public.gmane.org>"
"\tThu, 22 Jul 2010 09:14:14 -0400"
"\t<A29863FA-F388-40C3-AA25-0FD06B09B5BF@gmail.com>"
"\t\t6683\t16"
"\t"
"\n"
# An UTF-8 overview line from fr.comp.lang.python
"59\tRe: Message d'erreur incompréhensible (par moi)"
"\tEric Brunel <eric.brunel@pragmadev.nospam.com>"
"\tWed, 15 Sep 2010 18:09:15 +0200"
"\t<eric.brunel-2B8B56.18091515092010@news.wanadoo.fr>"
"\t<4c90ec87$0$32425$ba4acef3@reader.news.orange.fr>\t1641\t27"
"\tXref: saria.nerim.net fr.comp.lang.python:1265"
"\n"
".\n")
else:
self.push_lit("""\
224 No articles
.""")
def handle_POST(self, *, body=None):
if body is None:
if self.allow_posting:
self.push_lit("340 Input article; end with <CR-LF>.<CR-LF>")
self.expect_body()
else:
self.push_lit("440 Posting not permitted")
else:
assert self.allow_posting
self.push_lit("240 Article received OK")
self.posted_body = body
def handle_IHAVE(self, message_id, *, body=None):
if body is None:
if (self.allow_posting and
message_id == "<i.am.an.article.you.will.want@example.com>"):
self.push_lit("335 Send it; end with <CR-LF>.<CR-LF>")
self.expect_body()
else:
self.push_lit("435 Article not wanted")
else:
assert self.allow_posting
self.push_lit("235 Article transferred OK")
self.posted_body = body
sample_head = """\
From: "Demo User" <nobody@example.net>
Subject: I am just a test article
Content-Type: text/plain; charset=UTF-8; format=flowed
Message-ID: <i.am.an.article.you.will.want@example.com>"""
sample_body = """\
This is just a test article.
..Here is a dot-starting line.
-- Signed by Andr\xe9."""
sample_article = sample_head + "\n\n" + sample_body
def handle_ARTICLE(self, message_spec=None):
if message_spec is None:
self.push_lit("220 3000237 <45223423@example.com>")
elif message_spec == "<45223423@example.com>":
self.push_lit("220 0 <45223423@example.com>")
elif message_spec == "3000234":
self.push_lit("220 3000234 <45223423@example.com>")
else:
self.push_lit("430 No Such Article Found")
return
self.push_lit(self.sample_article)
self.push_lit(".")
def handle_HEAD(self, message_spec=None):
if message_spec is None:
self.push_lit("221 3000237 <45223423@example.com>")
elif message_spec == "<45223423@example.com>":
self.push_lit("221 0 <45223423@example.com>")
elif message_spec == "3000234":
self.push_lit("221 3000234 <45223423@example.com>")
else:
self.push_lit("430 No Such Article Found")
return
self.push_lit(self.sample_head)
self.push_lit(".")
def handle_BODY(self, message_spec=None):
if message_spec is None:
self.push_lit("222 3000237 <45223423@example.com>")
elif message_spec == "<45223423@example.com>":
self.push_lit("222 0 <45223423@example.com>")
elif message_spec == "3000234":
self.push_lit("222 3000234 <45223423@example.com>")
else:
self.push_lit("430 No Such Article Found")
return
self.push_lit(self.sample_body)
self.push_lit(".")
def handle_AUTHINFO(self, cred_type, data):
if self._logged_in:
self.push_lit('502 Already Logged In')
elif cred_type == 'user':
if self._user_sent:
self.push_lit('482 User Credential Already Sent')
else:
self.push_lit('381 Password Required')
self._user_sent = True
elif cred_type == 'pass':
self.push_lit('281 Login Successful')
self._logged_in = True
else:
raise Exception('Unknown cred type {}'.format(cred_type))
class NNTPv2Handler(NNTPv1Handler):
"""A handler for RFC 3977 (NNTP "v2")"""
def handle_CAPABILITIES(self):
fmt = """\
101 Capability list:
VERSION 2 3
IMPLEMENTATION INN 2.5.1{}
HDR
LIST ACTIVE ACTIVE.TIMES DISTRIB.PATS HEADERS NEWSGROUPS OVERVIEW.FMT
OVER
POST
READER
."""
if not self._logged_in:
self.push_lit(fmt.format('\n AUTHINFO USER'))
else:
self.push_lit(fmt.format(''))
def handle_MODE(self, _):
raise Exception('MODE READER sent despite READER has been advertised')
def handle_OVER(self, message_spec=None):
return self.handle_XOVER(message_spec)
class CapsAfterLoginNNTPv2Handler(NNTPv2Handler):
"""A handler that allows CAPABILITIES only after login"""
def handle_CAPABILITIES(self):
if not self._logged_in:
self.push_lit('480 You must log in.')
else:
super().handle_CAPABILITIES()
class ModeSwitchingNNTPv2Handler(NNTPv2Handler):
"""A server that starts in transit mode"""
def __init__(self):
self._switched = False
def handle_CAPABILITIES(self):
fmt = """\
101 Capability list:
VERSION 2 3
IMPLEMENTATION INN 2.5.1
HDR
LIST ACTIVE ACTIVE.TIMES DISTRIB.PATS HEADERS NEWSGROUPS OVERVIEW.FMT
OVER
POST
{}READER
."""
if self._switched:
self.push_lit(fmt.format(''))
else:
self.push_lit(fmt.format('MODE-'))
def handle_MODE(self, what):
assert not self._switched and what == 'reader'
self._switched = True
self.push_lit('200 Posting allowed')
class NNTPv1v2TestsMixin:
def setUp(self):
super().setUp()
def test_welcome(self):
self.assertEqual(self.server.welcome, self.handler.welcome)
def test_authinfo(self):
if self.nntp_version == 2:
self.assertIn('AUTHINFO', self.server._caps)
self.server.login('testuser', 'testpw')
# if AUTHINFO is gone from _caps we also know that getcapabilities()
# has been called after login as it should
self.assertNotIn('AUTHINFO', self.server._caps)
def test_date(self):
resp, date = self.server.date()
self.assertEqual(resp, "111 20100914001155")
self.assertEqual(date, datetime.datetime(2010, 9, 14, 0, 11, 55))
def test_quit(self):
self.assertFalse(self.sio.closed)
resp = self.server.quit()
self.assertEqual(resp, "205 Bye!")
self.assertTrue(self.sio.closed)
def test_help(self):
resp, help = self.server.help()
self.assertEqual(resp, "100 Legal commands")
self.assertEqual(help, [
' authinfo user Name|pass Password|generic <prog> <args>',
' date',
' help',
'Report problems to <root@example.org>',
])
def test_list(self):
resp, groups = self.server.list()
self.assertEqual(len(groups), 6)
g = groups[1]
self.assertEqual(g,
GroupInfo("comp.lang.python.announce", "0000001153",
"0000000993", "m"))
resp, groups = self.server.list("*distutils*")
self.assertEqual(len(groups), 2)
g = groups[0]
self.assertEqual(g,
GroupInfo("gmane.comp.python.distutils.devel", "0000014104",
"0000000001", "m"))
def test_stat(self):
resp, art_num, message_id = self.server.stat(3000234)
self.assertEqual(resp, "223 3000234 <45223423@example.com>")
self.assertEqual(art_num, 3000234)
self.assertEqual(message_id, "<45223423@example.com>")
resp, art_num, message_id = self.server.stat("<45223423@example.com>")
self.assertEqual(resp, "223 0 <45223423@example.com>")
self.assertEqual(art_num, 0)
self.assertEqual(message_id, "<45223423@example.com>")
with self.assertRaises(nntplib.NNTPTemporaryError) as cm:
self.server.stat("<non.existent.id>")
self.assertEqual(cm.exception.response, "430 No Such Article Found")
with self.assertRaises(nntplib.NNTPTemporaryError) as cm:
self.server.stat()
self.assertEqual(cm.exception.response, "412 No newsgroup selected")
def test_next(self):
resp, art_num, message_id = self.server.next()
self.assertEqual(resp, "223 3000237 <668929@example.org> retrieved")
self.assertEqual(art_num, 3000237)
self.assertEqual(message_id, "<668929@example.org>")
def test_last(self):
resp, art_num, message_id = self.server.last()
self.assertEqual(resp, "223 3000234 <45223423@example.com> retrieved")
self.assertEqual(art_num, 3000234)
self.assertEqual(message_id, "<45223423@example.com>")
def test_description(self):
desc = self.server.description("comp.lang.python")
self.assertEqual(desc, "The Python computer language.")
desc = self.server.description("comp.lang.pythonx")
self.assertEqual(desc, "")
def test_descriptions(self):
resp, groups = self.server.descriptions("comp.lang.python")
self.assertEqual(resp, '215 Descriptions in form "group description".')
self.assertEqual(groups, {
"comp.lang.python": "The Python computer language.",
})
resp, groups = self.server.descriptions("comp.lang.python*")
self.assertEqual(groups, {
"comp.lang.python": "The Python computer language.",
"comp.lang.python.announce": "Announcements about the Python language. (Moderated)",
})
resp, groups = self.server.descriptions("comp.lang.pythonx")
self.assertEqual(groups, {})
def test_group(self):
resp, count, first, last, group = self.server.group("fr.comp.lang.python")
self.assertTrue(resp.startswith("211 "), resp)
self.assertEqual(first, 761)
self.assertEqual(last, 1265)
self.assertEqual(count, 486)
self.assertEqual(group, "fr.comp.lang.python")
with self.assertRaises(nntplib.NNTPTemporaryError) as cm:
self.server.group("comp.lang.python.devel")
exc = cm.exception
self.assertTrue(exc.response.startswith("411 No such group"),
exc.response)
def test_newnews(self):
# NEWNEWS comp.lang.python [20]100913 082004
dt = datetime.datetime(2010, 9, 13, 8, 20, 4)
resp, ids = self.server.newnews("comp.lang.python", dt)
expected = (
"230 list of newsarticles (NNTP v{0}) "
"created after Mon Sep 13 08:20:04 2010 follows"
).format(self.nntp_version)
self.assertEqual(resp, expected)
self.assertEqual(ids, [
"<a4929a40-6328-491a-aaaf-cb79ed7309a2@q2g2000vbk.googlegroups.com>",
"<f30c0419-f549-4218-848f-d7d0131da931@y3g2000vbm.googlegroups.com>",
])
# NEWNEWS fr.comp.lang.python [20]100913 082004
dt = datetime.datetime(2010, 9, 13, 8, 20, 4)
resp, ids = self.server.newnews("fr.comp.lang.python", dt)
self.assertEqual(resp, "230 An empty list of newsarticles follows")
self.assertEqual(ids, [])
def _check_article_body(self, lines):
self.assertEqual(len(lines), 4)
self.assertEqual(lines[-1].decode('utf-8'), "-- Signed by André.")
self.assertEqual(lines[-2], b"")
self.assertEqual(lines[-3], b".Here is a dot-starting line.")
self.assertEqual(lines[-4], b"This is just a test article.")
def _check_article_head(self, lines):
self.assertEqual(len(lines), 4)
self.assertEqual(lines[0], b'From: "Demo User" <nobody@example.net>')
self.assertEqual(lines[3], b"Message-ID: <i.am.an.article.you.will.want@example.com>")
def _check_article_data(self, lines):
self.assertEqual(len(lines), 9)
self._check_article_head(lines[:4])
self._check_article_body(lines[-4:])
self.assertEqual(lines[4], b"")
def test_article(self):
# ARTICLE
resp, info = self.server.article()
self.assertEqual(resp, "220 3000237 <45223423@example.com>")
art_num, message_id, lines = info
self.assertEqual(art_num, 3000237)
self.assertEqual(message_id, "<45223423@example.com>")
self._check_article_data(lines)
# ARTICLE num
resp, info = self.server.article(3000234)
self.assertEqual(resp, "220 3000234 <45223423@example.com>")
art_num, message_id, lines = info
self.assertEqual(art_num, 3000234)
self.assertEqual(message_id, "<45223423@example.com>")
self._check_article_data(lines)
# ARTICLE id
resp, info = self.server.article("<45223423@example.com>")
self.assertEqual(resp, "220 0 <45223423@example.com>")
art_num, message_id, lines = info
self.assertEqual(art_num, 0)
self.assertEqual(message_id, "<45223423@example.com>")
self._check_article_data(lines)
# Non-existent id
with self.assertRaises(nntplib.NNTPTemporaryError) as cm:
self.server.article("<non-existent@example.com>")
self.assertEqual(cm.exception.response, "430 No Such Article Found")
def test_article_file(self):
# With a "file" argument
f = io.BytesIO()
resp, info = self.server.article(file=f)
self.assertEqual(resp, "220 3000237 <45223423@example.com>")
art_num, message_id, lines = info
self.assertEqual(art_num, 3000237)
self.assertEqual(message_id, "<45223423@example.com>")
self.assertEqual(lines, [])
data = f.getvalue()
self.assertTrue(data.startswith(
b'From: "Demo User" <nobody@example.net>\r\n'
b'Subject: I am just a test article\r\n'
), ascii(data))
self.assertTrue(data.endswith(
b'This is just a test article.\r\n'
b'.Here is a dot-starting line.\r\n'
b'\r\n'
b'-- Signed by Andr\xc3\xa9.\r\n'
), ascii(data))
def test_head(self):
# HEAD
resp, info = self.server.head()
self.assertEqual(resp, "221 3000237 <45223423@example.com>")
art_num, message_id, lines = info
self.assertEqual(art_num, 3000237)
self.assertEqual(message_id, "<45223423@example.com>")
self._check_article_head(lines)
# HEAD num
resp, info = self.server.head(3000234)
self.assertEqual(resp, "221 3000234 <45223423@example.com>")
art_num, message_id, lines = info
self.assertEqual(art_num, 3000234)
self.assertEqual(message_id, "<45223423@example.com>")
self._check_article_head(lines)
# HEAD id
resp, info = self.server.head("<45223423@example.com>")
self.assertEqual(resp, "221 0 <45223423@example.com>")
art_num, message_id, lines = info
self.assertEqual(art_num, 0)
self.assertEqual(message_id, "<45223423@example.com>")
self._check_article_head(lines)
# Non-existent id
with self.assertRaises(nntplib.NNTPTemporaryError) as cm:
self.server.head("<non-existent@example.com>")
self.assertEqual(cm.exception.response, "430 No Such Article Found")
def test_head_file(self):
f = io.BytesIO()
resp, info = self.server.head(file=f)
self.assertEqual(resp, "221 3000237 <45223423@example.com>")
art_num, message_id, lines = info
self.assertEqual(art_num, 3000237)
self.assertEqual(message_id, "<45223423@example.com>")
self.assertEqual(lines, [])
data = f.getvalue()
self.assertTrue(data.startswith(
b'From: "Demo User" <nobody@example.net>\r\n'
b'Subject: I am just a test article\r\n'
), ascii(data))
self.assertFalse(data.endswith(
b'This is just a test article.\r\n'
b'.Here is a dot-starting line.\r\n'
b'\r\n'
b'-- Signed by Andr\xc3\xa9.\r\n'
), ascii(data))
def test_body(self):
# BODY
resp, info = self.server.body()
self.assertEqual(resp, "222 3000237 <45223423@example.com>")
art_num, message_id, lines = info
self.assertEqual(art_num, 3000237)
self.assertEqual(message_id, "<45223423@example.com>")
self._check_article_body(lines)
# BODY num
resp, info = self.server.body(3000234)
self.assertEqual(resp, "222 3000234 <45223423@example.com>")
art_num, message_id, lines = info
self.assertEqual(art_num, 3000234)
self.assertEqual(message_id, "<45223423@example.com>")
self._check_article_body(lines)
# BODY id
resp, info = self.server.body("<45223423@example.com>")
self.assertEqual(resp, "222 0 <45223423@example.com>")
art_num, message_id, lines = info
self.assertEqual(art_num, 0)
self.assertEqual(message_id, "<45223423@example.com>")
self._check_article_body(lines)
# Non-existent id
with self.assertRaises(nntplib.NNTPTemporaryError) as cm:
self.server.body("<non-existent@example.com>")
self.assertEqual(cm.exception.response, "430 No Such Article Found")
def test_body_file(self):
f = io.BytesIO()
resp, info = self.server.body(file=f)
self.assertEqual(resp, "222 3000237 <45223423@example.com>")
art_num, message_id, lines = info
self.assertEqual(art_num, 3000237)
self.assertEqual(message_id, "<45223423@example.com>")
self.assertEqual(lines, [])
data = f.getvalue()
self.assertFalse(data.startswith(
b'From: "Demo User" <nobody@example.net>\r\n'
b'Subject: I am just a test article\r\n'
), ascii(data))
self.assertTrue(data.endswith(
b'This is just a test article.\r\n'
b'.Here is a dot-starting line.\r\n'
b'\r\n'
b'-- Signed by Andr\xc3\xa9.\r\n'
), ascii(data))
def check_over_xover_resp(self, resp, overviews):
self.assertTrue(resp.startswith("224 "), resp)
self.assertEqual(len(overviews), 3)
art_num, over = overviews[0]
self.assertEqual(art_num, 57)
self.assertEqual(over, {
"from": "Doug Hellmann <doug.hellmann-Re5JQEeQqe8AvxtiuMwx3w@public.gmane.org>",
"subject": "Re: ANN: New Plone book with strong Python (and Zope) themes throughout",
"date": "Sat, 19 Jun 2010 18:04:08 -0400",
"message-id": "<4FD05F05-F98B-44DC-8111-C6009C925F0C@gmail.com>",
"references": "<hvalf7$ort$1@dough.gmane.org>",
":bytes": "7103",
":lines": "16",
"xref": "news.gmane.org gmane.comp.python.authors:57"
})
art_num, over = overviews[1]
self.assertEqual(over["xref"], None)
art_num, over = overviews[2]
self.assertEqual(over["subject"],
"Re: Message d'erreur incompréhensible (par moi)")
def test_xover(self):
resp, overviews = self.server.xover(57, 59)
self.check_over_xover_resp(resp, overviews)
def test_over(self):
# In NNTP "v1", this will fallback on XOVER
resp, overviews = self.server.over((57, 59))
self.check_over_xover_resp(resp, overviews)
sample_post = (
b'From: "Demo User" <nobody@example.net>\r\n'
b'Subject: I am just a test article\r\n'
b'Content-Type: text/plain; charset=UTF-8; format=flowed\r\n'
b'Message-ID: <i.am.an.article.you.will.want@example.com>\r\n'
b'\r\n'
b'This is just a test article.\r\n'
b'.Here is a dot-starting line.\r\n'
b'\r\n'
b'-- Signed by Andr\xc3\xa9.\r\n'
)
def _check_posted_body(self):
# Check the raw body as received by the server
lines = self.handler.posted_body
# One additional line for the "." terminator
self.assertEqual(len(lines), 10)
self.assertEqual(lines[-1], b'.\r\n')
self.assertEqual(lines[-2], b'-- Signed by Andr\xc3\xa9.\r\n')
self.assertEqual(lines[-3], b'\r\n')
self.assertEqual(lines[-4], b'..Here is a dot-starting line.\r\n')
self.assertEqual(lines[0], b'From: "Demo User" <nobody@example.net>\r\n')
def _check_post_ihave_sub(self, func, *args, file_factory):
# First the prepared post with CRLF endings
post = self.sample_post
func_args = args + (file_factory(post),)
self.handler.posted_body = None
resp = func(*func_args)
self._check_posted_body()
# Then the same post with "normal" line endings - they should be
# converted by NNTP.post and NNTP.ihave.
post = self.sample_post.replace(b"\r\n", b"\n")
func_args = args + (file_factory(post),)
self.handler.posted_body = None
resp = func(*func_args)
self._check_posted_body()
return resp
def check_post_ihave(self, func, success_resp, *args):
# With a bytes object
resp = self._check_post_ihave_sub(func, *args, file_factory=bytes)
self.assertEqual(resp, success_resp)
# With a bytearray object
resp = self._check_post_ihave_sub(func, *args, file_factory=bytearray)
self.assertEqual(resp, success_resp)
# With a file object
resp = self._check_post_ihave_sub(func, *args, file_factory=io.BytesIO)
self.assertEqual(resp, success_resp)
# With an iterable of terminated lines
def iterlines(b):
return iter(b.splitlines(keepends=True))
resp = self._check_post_ihave_sub(func, *args, file_factory=iterlines)
self.assertEqual(resp, success_resp)
# With an iterable of non-terminated lines
def iterlines(b):
return iter(b.splitlines(keepends=False))
resp = self._check_post_ihave_sub(func, *args, file_factory=iterlines)
self.assertEqual(resp, success_resp)
def test_post(self):
self.check_post_ihave(self.server.post, "240 Article received OK")
self.handler.allow_posting = False
with self.assertRaises(nntplib.NNTPTemporaryError) as cm:
self.server.post(self.sample_post)
self.assertEqual(cm.exception.response,
"440 Posting not permitted")
def test_ihave(self):
self.check_post_ihave(self.server.ihave, "235 Article transferred OK",
"<i.am.an.article.you.will.want@example.com>")
with self.assertRaises(nntplib.NNTPTemporaryError) as cm:
self.server.ihave("<another.message.id>", self.sample_post)
self.assertEqual(cm.exception.response,
"435 Article not wanted")
def test_too_long_lines(self):
dt = datetime.datetime(2010, 1, 1, 9, 0, 0)
self.assertRaises(nntplib.NNTPDataError,
self.server.newnews, "comp.lang.python", dt)
class NNTPv1Tests(NNTPv1v2TestsMixin, MockedNNTPTestsMixin, unittest.TestCase):
"""Tests an NNTP v1 server (no capabilities)."""
nntp_version = 1
handler_class = NNTPv1Handler
def test_caps(self):
caps = self.server.getcapabilities()
self.assertEqual(caps, {})
self.assertEqual(self.server.nntp_version, 1)
self.assertEqual(self.server.nntp_implementation, None)
class NNTPv2Tests(NNTPv1v2TestsMixin, MockedNNTPTestsMixin, unittest.TestCase):
"""Tests an NNTP v2 server (with capabilities)."""
nntp_version = 2
handler_class = NNTPv2Handler
def test_caps(self):
caps = self.server.getcapabilities()
self.assertEqual(caps, {
'VERSION': ['2', '3'],
'IMPLEMENTATION': ['INN', '2.5.1'],
'AUTHINFO': ['USER'],
'HDR': [],
'LIST': ['ACTIVE', 'ACTIVE.TIMES', 'DISTRIB.PATS',
'HEADERS', 'NEWSGROUPS', 'OVERVIEW.FMT'],
'OVER': [],
'POST': [],
'READER': [],
})
self.assertEqual(self.server.nntp_version, 3)
self.assertEqual(self.server.nntp_implementation, 'INN 2.5.1')
class CapsAfterLoginNNTPv2Tests(MockedNNTPTestsMixin, unittest.TestCase):
"""Tests a probably NNTP v2 server with capabilities only after login."""
nntp_version = 2
handler_class = CapsAfterLoginNNTPv2Handler
def test_caps_only_after_login(self):
self.assertEqual(self.server._caps, {})
self.server.login('testuser', 'testpw')
self.assertIn('VERSION', self.server._caps)
class SendReaderNNTPv2Tests(MockedNNTPWithReaderModeMixin,
unittest.TestCase):
"""Same tests as for v2 but we tell NTTP to send MODE READER to a server
that isn't in READER mode by default."""
nntp_version = 2
handler_class = ModeSwitchingNNTPv2Handler
def test_we_are_in_reader_mode_after_connect(self):
self.assertIn('READER', self.server._caps)
class MiscTests(unittest.TestCase):
def test_decode_header(self):
def gives(a, b):
self.assertEqual(nntplib.decode_header(a), b)
gives("" , "")
gives("a plain header", "a plain header")
gives(" with extra spaces ", " with extra spaces ")
gives("=?ISO-8859-15?Q?D=E9buter_en_Python?=", "Débuter en Python")
gives("=?utf-8?q?Re=3A_=5Bsqlite=5D_probl=C3=A8me_avec_ORDER_BY_sur_des_cha?="
" =?utf-8?q?=C3=AEnes_de_caract=C3=A8res_accentu=C3=A9es?=",
"Re: [sqlite] problème avec ORDER BY sur des chaînes de caractères accentuées")
gives("Re: =?UTF-8?B?cHJvYmzDqG1lIGRlIG1hdHJpY2U=?=",
"Re: problème de matrice")
# A natively utf-8 header (found in the real world!)
gives("Re: Message d'erreur incompréhensible (par moi)",
"Re: Message d'erreur incompréhensible (par moi)")
def test_parse_overview_fmt(self):
# The minimal (default) response
lines = ["Subject:", "From:", "Date:", "Message-ID:",
"References:", ":bytes", ":lines"]
self.assertEqual(nntplib._parse_overview_fmt(lines),
["subject", "from", "date", "message-id", "references",
":bytes", ":lines"])
# The minimal response using alternative names
lines = ["Subject:", "From:", "Date:", "Message-ID:",
"References:", "Bytes:", "Lines:"]
self.assertEqual(nntplib._parse_overview_fmt(lines),
["subject", "from", "date", "message-id", "references",
":bytes", ":lines"])
# Variations in casing
lines = ["subject:", "FROM:", "DaTe:", "message-ID:",
"References:", "BYTES:", "Lines:"]
self.assertEqual(nntplib._parse_overview_fmt(lines),
["subject", "from", "date", "message-id", "references",
":bytes", ":lines"])
# First example from RFC 3977
lines = ["Subject:", "From:", "Date:", "Message-ID:",
"References:", ":bytes", ":lines", "Xref:full",
"Distribution:full"]
self.assertEqual(nntplib._parse_overview_fmt(lines),
["subject", "from", "date", "message-id", "references",
":bytes", ":lines", "xref", "distribution"])
# Second example from RFC 3977
lines = ["Subject:", "From:", "Date:", "Message-ID:",
"References:", "Bytes:", "Lines:", "Xref:FULL",
"Distribution:FULL"]
self.assertEqual(nntplib._parse_overview_fmt(lines),
["subject", "from", "date", "message-id", "references",
":bytes", ":lines", "xref", "distribution"])
# A classic response from INN
lines = ["Subject:", "From:", "Date:", "Message-ID:",
"References:", "Bytes:", "Lines:", "Xref:full"]
self.assertEqual(nntplib._parse_overview_fmt(lines),
["subject", "from", "date", "message-id", "references",
":bytes", ":lines", "xref"])
def test_parse_overview(self):
fmt = nntplib._DEFAULT_OVERVIEW_FMT + ["xref"]
# First example from RFC 3977
lines = [
'3000234\tI am just a test article\t"Demo User" '
'<nobody@example.com>\t6 Oct 1998 04:38:40 -0500\t'
'<45223423@example.com>\t<45454@example.net>\t1234\t'
'17\tXref: news.example.com misc.test:3000363',
]
overview = nntplib._parse_overview(lines, fmt)
(art_num, fields), = overview
self.assertEqual(art_num, 3000234)
self.assertEqual(fields, {
'subject': 'I am just a test article',
'from': '"Demo User" <nobody@example.com>',
'date': '6 Oct 1998 04:38:40 -0500',
'message-id': '<45223423@example.com>',
'references': '<45454@example.net>',
':bytes': '1234',
':lines': '17',
'xref': 'news.example.com misc.test:3000363',
})
# Second example; here the "Xref" field is totally absent (including
# the header name) and comes out as None
lines = [
'3000234\tI am just a test article\t"Demo User" '
'<nobody@example.com>\t6 Oct 1998 04:38:40 -0500\t'
'<45223423@example.com>\t<45454@example.net>\t1234\t'
'17\t\t',
]
overview = nntplib._parse_overview(lines, fmt)
(art_num, fields), = overview
self.assertEqual(fields['xref'], None)
# Third example; the "Xref" is an empty string, while "references"
# is a single space.
lines = [
'3000234\tI am just a test article\t"Demo User" '
'<nobody@example.com>\t6 Oct 1998 04:38:40 -0500\t'
'<45223423@example.com>\t \t1234\t'
'17\tXref: \t',
]
overview = nntplib._parse_overview(lines, fmt)
(art_num, fields), = overview
self.assertEqual(fields['references'], ' ')
self.assertEqual(fields['xref'], '')
def test_parse_datetime(self):
def gives(a, b, *c):
self.assertEqual(nntplib._parse_datetime(a, b),
datetime.datetime(*c))
# Output of DATE command
gives("19990623135624", None, 1999, 6, 23, 13, 56, 24)
# Variations
gives("19990623", "135624", 1999, 6, 23, 13, 56, 24)
gives("990623", "135624", 1999, 6, 23, 13, 56, 24)
gives("090623", "135624", 2009, 6, 23, 13, 56, 24)
def test_unparse_datetime(self):
# Test non-legacy mode
# 1) with a datetime
def gives(y, M, d, h, m, s, date_str, time_str):
dt = datetime.datetime(y, M, d, h, m, s)
self.assertEqual(nntplib._unparse_datetime(dt),
(date_str, time_str))
self.assertEqual(nntplib._unparse_datetime(dt, False),
(date_str, time_str))
gives(1999, 6, 23, 13, 56, 24, "19990623", "135624")
gives(2000, 6, 23, 13, 56, 24, "20000623", "135624")
gives(2010, 6, 5, 1, 2, 3, "20100605", "010203")
# 2) with a date
def gives(y, M, d, date_str, time_str):
dt = datetime.date(y, M, d)
self.assertEqual(nntplib._unparse_datetime(dt),
(date_str, time_str))
self.assertEqual(nntplib._unparse_datetime(dt, False),
(date_str, time_str))
gives(1999, 6, 23, "19990623", "000000")
gives(2000, 6, 23, "20000623", "000000")
gives(2010, 6, 5, "20100605", "000000")
def test_unparse_datetime_legacy(self):
# Test legacy mode (RFC 977)
# 1) with a datetime
def gives(y, M, d, h, m, s, date_str, time_str):
dt = datetime.datetime(y, M, d, h, m, s)
self.assertEqual(nntplib._unparse_datetime(dt, True),
(date_str, time_str))
gives(1999, 6, 23, 13, 56, 24, "990623", "135624")
gives(2000, 6, 23, 13, 56, 24, "000623", "135624")
gives(2010, 6, 5, 1, 2, 3, "100605", "010203")
# 2) with a date
def gives(y, M, d, date_str, time_str):
dt = datetime.date(y, M, d)
self.assertEqual(nntplib._unparse_datetime(dt, True),
(date_str, time_str))
gives(1999, 6, 23, "990623", "000000")
gives(2000, 6, 23, "000623", "000000")
gives(2010, 6, 5, "100605", "000000")
@unittest.skipUnless(ssl, 'requires SSL support')
def test_ssl_support(self):
self.assertTrue(hasattr(nntplib, 'NNTP_SSL'))
class PublicAPITests(unittest.TestCase):
"""Ensures that the correct values are exposed in the public API."""
def test_module_all_attribute(self):
self.assertTrue(hasattr(nntplib, '__all__'))
target_api = ['NNTP', 'NNTPError', 'NNTPReplyError',
'NNTPTemporaryError', 'NNTPPermanentError',
'NNTPProtocolError', 'NNTPDataError', 'decode_header']
if ssl is not None:
target_api.append('NNTP_SSL')
self.assertEqual(set(nntplib.__all__), set(target_api))
class MockSocketTests(unittest.TestCase):
"""Tests involving a mock socket object
Used where the _NNTPServerIO file object is not enough."""
nntp_class = nntplib.NNTP
def check_constructor_error_conditions(
self, handler_class,
expected_error_type, expected_error_msg,
login=None, password=None):
class mock_socket_module:
def create_connection(address, timeout):
return MockSocket()
class MockSocket:
def close(self):
nonlocal socket_closed
socket_closed = True
def makefile(socket, mode):
handler = handler_class()
_, file = make_mock_file(handler)
files.append(file)
return file
socket_closed = False
files = []
with patch('nntplib.socket', mock_socket_module), \
self.assertRaisesRegex(expected_error_type, expected_error_msg):
self.nntp_class('dummy', user=login, password=password)
self.assertTrue(socket_closed)
for f in files:
self.assertTrue(f.closed)
def test_bad_welcome(self):
#Test a bad welcome message
class Handler(NNTPv1Handler):
welcome = 'Bad Welcome'
self.check_constructor_error_conditions(
Handler, nntplib.NNTPProtocolError, Handler.welcome)
def test_service_temporarily_unavailable(self):
#Test service temporarily unavailable
class Handler(NNTPv1Handler):
welcome = '400 Service temporarily unavilable'
self.check_constructor_error_conditions(
Handler, nntplib.NNTPTemporaryError, Handler.welcome)
def test_service_permanently_unavailable(self):
#Test service permanently unavailable
class Handler(NNTPv1Handler):
welcome = '502 Service permanently unavilable'
self.check_constructor_error_conditions(
Handler, nntplib.NNTPPermanentError, Handler.welcome)
def test_bad_capabilities(self):
#Test a bad capabilities response
class Handler(NNTPv1Handler):
def handle_CAPABILITIES(self):
self.push_lit(capabilities_response)
capabilities_response = '201 bad capability'
self.check_constructor_error_conditions(
Handler, nntplib.NNTPReplyError, capabilities_response)
def test_login_aborted(self):
#Test a bad authinfo response
login = 't@e.com'
password = 'python'
class Handler(NNTPv1Handler):
def handle_AUTHINFO(self, *args):
self.push_lit(authinfo_response)
authinfo_response = '503 Mechanism not recognized'
self.check_constructor_error_conditions(
Handler, nntplib.NNTPPermanentError, authinfo_response,
login, password)
class bypass_context:
"""Bypass encryption and actual SSL module"""
def wrap_socket(sock, **args):
return sock
@unittest.skipUnless(ssl, 'requires SSL support')
class MockSslTests(MockSocketTests):
@staticmethod
def nntp_class(*pos, **kw):
return nntplib.NNTP_SSL(*pos, ssl_context=bypass_context, **kw)
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
jonathanverner/brython | www/src/Lib/test/test_metaclass.py | 132 | 6350 | doctests = """
Basic class construction.
>>> class C:
... def meth(self): print("Hello")
...
>>> C.__class__ is type
True
>>> a = C()
>>> a.__class__ is C
True
>>> a.meth()
Hello
>>>
Use *args notation for the bases.
>>> class A: pass
>>> class B: pass
>>> bases = (A, B)
>>> class C(*bases): pass
>>> C.__bases__ == bases
True
>>>
Use a trivial metaclass.
>>> class M(type):
... pass
...
>>> class C(metaclass=M):
... def meth(self): print("Hello")
...
>>> C.__class__ is M
True
>>> a = C()
>>> a.__class__ is C
True
>>> a.meth()
Hello
>>>
Use **kwds notation for the metaclass keyword.
>>> kwds = {'metaclass': M}
>>> class C(**kwds): pass
...
>>> C.__class__ is M
True
>>> a = C()
>>> a.__class__ is C
True
>>>
Use a metaclass with a __prepare__ static method.
>>> class M(type):
... @staticmethod
... def __prepare__(*args, **kwds):
... print("Prepare called:", args, kwds)
... return dict()
... def __new__(cls, name, bases, namespace, **kwds):
... print("New called:", kwds)
... return type.__new__(cls, name, bases, namespace)
... def __init__(cls, *args, **kwds):
... pass
...
>>> class C(metaclass=M):
... def meth(self): print("Hello")
...
Prepare called: ('C', ()) {}
New called: {}
>>>
Also pass another keyword.
>>> class C(object, metaclass=M, other="haha"):
... pass
...
Prepare called: ('C', (<class 'object'>,)) {'other': 'haha'}
New called: {'other': 'haha'}
>>> C.__class__ is M
True
>>> C.__bases__ == (object,)
True
>>> a = C()
>>> a.__class__ is C
True
>>>
Check that build_class doesn't mutate the kwds dict.
>>> kwds = {'metaclass': type}
>>> class C(**kwds): pass
...
>>> kwds == {'metaclass': type}
True
>>>
Use various combinations of explicit keywords and **kwds.
>>> bases = (object,)
>>> kwds = {'metaclass': M, 'other': 'haha'}
>>> class C(*bases, **kwds): pass
...
Prepare called: ('C', (<class 'object'>,)) {'other': 'haha'}
New called: {'other': 'haha'}
>>> C.__class__ is M
True
>>> C.__bases__ == (object,)
True
>>> class B: pass
>>> kwds = {'other': 'haha'}
>>> class C(B, metaclass=M, *bases, **kwds): pass
...
Prepare called: ('C', (<class 'test.test_metaclass.B'>, <class 'object'>)) {'other': 'haha'}
New called: {'other': 'haha'}
>>> C.__class__ is M
True
>>> C.__bases__ == (B, object)
True
>>>
Check for duplicate keywords.
>>> class C(metaclass=type, metaclass=type): pass
...
Traceback (most recent call last):
[...]
SyntaxError: keyword argument repeated
>>>
Another way.
>>> kwds = {'metaclass': type}
>>> class C(metaclass=type, **kwds): pass
...
Traceback (most recent call last):
[...]
TypeError: __build_class__() got multiple values for keyword argument 'metaclass'
>>>
Use a __prepare__ method that returns an instrumented dict.
>>> class LoggingDict(dict):
... def __setitem__(self, key, value):
... print("d[%r] = %r" % (key, value))
... dict.__setitem__(self, key, value)
...
>>> class Meta(type):
... @staticmethod
... def __prepare__(name, bases):
... return LoggingDict()
...
>>> class C(metaclass=Meta):
... foo = 2+2
... foo = 42
... bar = 123
...
d['__module__'] = 'test.test_metaclass'
d['__qualname__'] = 'C'
d['foo'] = 4
d['foo'] = 42
d['bar'] = 123
>>>
Use a metaclass that doesn't derive from type.
>>> def meta(name, bases, namespace, **kwds):
... print("meta:", name, bases)
... print("ns:", sorted(namespace.items()))
... print("kw:", sorted(kwds.items()))
... return namespace
...
>>> class C(metaclass=meta):
... a = 42
... b = 24
...
meta: C ()
ns: [('__module__', 'test.test_metaclass'), ('__qualname__', 'C'), ('a', 42), ('b', 24)]
kw: []
>>> type(C) is dict
True
>>> print(sorted(C.items()))
[('__module__', 'test.test_metaclass'), ('__qualname__', 'C'), ('a', 42), ('b', 24)]
>>>
And again, with a __prepare__ attribute.
>>> def prepare(name, bases, **kwds):
... print("prepare:", name, bases, sorted(kwds.items()))
... return LoggingDict()
...
>>> meta.__prepare__ = prepare
>>> class C(metaclass=meta, other="booh"):
... a = 1
... a = 2
... b = 3
...
prepare: C () [('other', 'booh')]
d['__module__'] = 'test.test_metaclass'
d['__qualname__'] = 'C'
d['a'] = 1
d['a'] = 2
d['b'] = 3
meta: C ()
ns: [('__module__', 'test.test_metaclass'), ('__qualname__', 'C'), ('a', 2), ('b', 3)]
kw: [('other', 'booh')]
>>>
The default metaclass must define a __prepare__() method.
>>> type.__prepare__()
{}
>>>
Make sure it works with subclassing.
>>> class M(type):
... @classmethod
... def __prepare__(cls, *args, **kwds):
... d = super().__prepare__(*args, **kwds)
... d["hello"] = 42
... return d
...
>>> class C(metaclass=M):
... print(hello)
...
42
>>> print(C.hello)
42
>>>
Test failures in looking up the __prepare__ method work.
>>> class ObscureException(Exception):
... pass
>>> class FailDescr:
... def __get__(self, instance, owner):
... raise ObscureException
>>> class Meta(type):
... __prepare__ = FailDescr()
>>> class X(metaclass=Meta):
... pass
Traceback (most recent call last):
[...]
test.test_metaclass.ObscureException
"""
import sys
# Trace function introduces __locals__ which causes various tests to fail.
if hasattr(sys, 'gettrace') and sys.gettrace():
__test__ = {}
else:
__test__ = {'doctests' : doctests}
def test_main(verbose=False):
from test import support
from test import test_metaclass
support.run_doctest(test_metaclass, verbose)
if __name__ == "__main__":
test_main(verbose=True)
| bsd-3-clause |
maxamillion/ansible-modules-core | database/postgresql/postgresql_db.py | 11 | 11257 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: postgresql_db
short_description: Add or remove PostgreSQL databases from a remote host.
description:
- Add or remove PostgreSQL databases from a remote host.
version_added: "0.6"
options:
name:
description:
- name of the database to add or remove
required: true
default: null
login_user:
description:
- The username used to authenticate with
required: false
default: null
login_password:
description:
- The password used to authenticate with
required: false
default: null
login_host:
description:
- Host running the database
required: false
default: localhost
login_unix_socket:
description:
- Path to a Unix domain socket for local connections
required: false
default: null
owner:
description:
- Name of the role to set as owner of the database
required: false
default: null
port:
description:
- Database port to connect to.
required: false
default: 5432
template:
description:
- Template used to create the database
required: false
default: null
encoding:
description:
- Encoding of the database
required: false
default: null
lc_collate:
description:
- Collation order (LC_COLLATE) to use in the database. Must match collation order of template database unless C(template0) is used as template.
required: false
default: null
lc_ctype:
description:
- Character classification (LC_CTYPE) to use in the database (e.g. lower, upper, ...) Must match LC_CTYPE of template database unless C(template0) is used as template.
required: false
default: null
state:
description:
- The database state
required: false
default: present
choices: [ "present", "absent" ]
notes:
- The default authentication assumes that you are either logging in as or sudo'ing to the C(postgres) account on the host.
- This module uses I(psycopg2), a Python PostgreSQL database adapter. You must ensure that psycopg2 is installed on
the host before using this module. If the remote host is the PostgreSQL server (which is the default case), then PostgreSQL must also be installed on the remote host. For Ubuntu-based systems, install the C(postgresql), C(libpq-dev), and C(python-psycopg2) packages on the remote host before using this module.
requirements: [ psycopg2 ]
author: "Ansible Core Team"
'''
EXAMPLES = '''
# Create a new database with name "acme"
- postgresql_db:
name: acme
# Create a new database with name "acme" and specific encoding and locale
# settings. If a template different from "template0" is specified, encoding
# and locale settings must match those of the template.
- postgresql_db:
name: acme
encoding: UTF-8
lc_collate: de_DE.UTF-8
lc_ctype: de_DE.UTF-8
template: template0
'''
try:
import psycopg2
import psycopg2.extras
except ImportError:
postgresqldb_found = False
else:
postgresqldb_found = True
from ansible.module_utils.six import iteritems
class NotSupportedError(Exception):
pass
# ===========================================
# PostgreSQL module specific support methods.
#
def set_owner(cursor, db, owner):
query = "ALTER DATABASE %s OWNER TO %s" % (
pg_quote_identifier(db, 'database'),
pg_quote_identifier(owner, 'role'))
cursor.execute(query)
return True
def get_encoding_id(cursor, encoding):
query = "SELECT pg_char_to_encoding(%(encoding)s) AS encoding_id;"
cursor.execute(query, {'encoding': encoding})
return cursor.fetchone()['encoding_id']
def get_db_info(cursor, db):
query = """
SELECT rolname AS owner,
pg_encoding_to_char(encoding) AS encoding, encoding AS encoding_id,
datcollate AS lc_collate, datctype AS lc_ctype
FROM pg_database JOIN pg_roles ON pg_roles.oid = pg_database.datdba
WHERE datname = %(db)s
"""
cursor.execute(query, {'db': db})
return cursor.fetchone()
def db_exists(cursor, db):
query = "SELECT * FROM pg_database WHERE datname=%(db)s"
cursor.execute(query, {'db': db})
return cursor.rowcount == 1
def db_delete(cursor, db):
if db_exists(cursor, db):
query = "DROP DATABASE %s" % pg_quote_identifier(db, 'database')
cursor.execute(query)
return True
else:
return False
def db_create(cursor, db, owner, template, encoding, lc_collate, lc_ctype):
params = dict(enc=encoding, collate=lc_collate, ctype=lc_ctype)
if not db_exists(cursor, db):
query_fragments = ['CREATE DATABASE %s' % pg_quote_identifier(db, 'database')]
if owner:
query_fragments.append('OWNER %s' % pg_quote_identifier(owner, 'role'))
if template:
query_fragments.append('TEMPLATE %s' % pg_quote_identifier(template, 'database'))
if encoding:
query_fragments.append('ENCODING %(enc)s')
if lc_collate:
query_fragments.append('LC_COLLATE %(collate)s')
if lc_ctype:
query_fragments.append('LC_CTYPE %(ctype)s')
query = ' '.join(query_fragments)
cursor.execute(query, params)
return True
else:
db_info = get_db_info(cursor, db)
if (encoding and
get_encoding_id(cursor, encoding) != db_info['encoding_id']):
raise NotSupportedError(
'Changing database encoding is not supported. '
'Current encoding: %s' % db_info['encoding']
)
elif lc_collate and lc_collate != db_info['lc_collate']:
raise NotSupportedError(
'Changing LC_COLLATE is not supported. '
'Current LC_COLLATE: %s' % db_info['lc_collate']
)
elif lc_ctype and lc_ctype != db_info['lc_ctype']:
raise NotSupportedError(
'Changing LC_CTYPE is not supported.'
'Current LC_CTYPE: %s' % db_info['lc_ctype']
)
elif owner and owner != db_info['owner']:
return set_owner(cursor, db, owner)
else:
return False
def db_matches(cursor, db, owner, template, encoding, lc_collate, lc_ctype):
if not db_exists(cursor, db):
return False
else:
db_info = get_db_info(cursor, db)
if (encoding and
get_encoding_id(cursor, encoding) != db_info['encoding_id']):
return False
elif lc_collate and lc_collate != db_info['lc_collate']:
return False
elif lc_ctype and lc_ctype != db_info['lc_ctype']:
return False
elif owner and owner != db_info['owner']:
return False
else:
return True
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default="postgres"),
login_password=dict(default=""),
login_host=dict(default=""),
login_unix_socket=dict(default=""),
port=dict(default="5432"),
db=dict(required=True, aliases=['name']),
owner=dict(default=""),
template=dict(default=""),
encoding=dict(default=""),
lc_collate=dict(default=""),
lc_ctype=dict(default=""),
state=dict(default="present", choices=["absent", "present"]),
),
supports_check_mode = True
)
if not postgresqldb_found:
module.fail_json(msg="the python psycopg2 module is required")
db = module.params["db"]
port = module.params["port"]
owner = module.params["owner"]
template = module.params["template"]
encoding = module.params["encoding"]
lc_collate = module.params["lc_collate"]
lc_ctype = module.params["lc_ctype"]
state = module.params["state"]
changed = False
# To use defaults values, keyword arguments must be absent, so
# check which values are empty and don't include in the **kw
# dictionary
params_map = {
"login_host":"host",
"login_user":"user",
"login_password":"password",
"port":"port"
}
kw = dict( (params_map[k], v) for (k, v) in iteritems(module.params)
if k in params_map and v != '' )
# If a login_unix_socket is specified, incorporate it here.
is_localhost = "host" not in kw or kw["host"] == "" or kw["host"] == "localhost"
if is_localhost and module.params["login_unix_socket"] != "":
kw["host"] = module.params["login_unix_socket"]
try:
db_connection = psycopg2.connect(database="postgres", **kw)
# Enable autocommit so we can create databases
if psycopg2.__version__ >= '2.4.2':
db_connection.autocommit = True
else:
db_connection.set_isolation_level(psycopg2
.extensions
.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = db_connection.cursor(
cursor_factory=psycopg2.extras.DictCursor)
except Exception:
e = get_exception()
module.fail_json(msg="unable to connect to database: %s" % e)
try:
if module.check_mode:
if state == "absent":
changed = db_exists(cursor, db)
elif state == "present":
changed = not db_matches(cursor, db, owner, template, encoding,
lc_collate, lc_ctype)
module.exit_json(changed=changed, db=db)
if state == "absent":
try:
changed = db_delete(cursor, db)
except SQLParseError:
e = get_exception()
module.fail_json(msg=str(e))
elif state == "present":
try:
changed = db_create(cursor, db, owner, template, encoding,
lc_collate, lc_ctype)
except SQLParseError:
e = get_exception()
module.fail_json(msg=str(e))
except NotSupportedError:
e = get_exception()
module.fail_json(msg=str(e))
except SystemExit:
# Avoid catching this on Python 2.4
raise
except Exception:
e = get_exception()
module.fail_json(msg="Database query failed: %s" % e)
module.exit_json(changed=changed, db=db)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.database import *
if __name__ == '__main__':
main()
| gpl-3.0 |
Kao9/Kao9.github.io | node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py | 1524 | 22178 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import gyp.input
import optparse
import os.path
import re
import shlex
import sys
import traceback
from gyp.common import GypError
# Default debug modes for GYP
debug = {}
# List of "official" debug modes, but you can use anything you like.
DEBUG_GENERAL = 'general'
DEBUG_VARIABLES = 'variables'
DEBUG_INCLUDES = 'includes'
def DebugOutput(mode, message, *args):
if 'all' in gyp.debug or mode in gyp.debug:
ctx = ('unknown', 0, 'unknown')
try:
f = traceback.extract_stack(limit=2)
if f:
ctx = f[0][:3]
except:
pass
if args:
message %= args
print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
ctx[1], ctx[2], message)
def FindBuildFiles():
extension = '.gyp'
files = os.listdir(os.getcwd())
build_files = []
for file in files:
if file.endswith(extension):
build_files.append(file)
return build_files
def Load(build_files, format, default_variables={},
includes=[], depth='.', params=None, check=False,
circular_check=True, duplicate_basename_check=True):
"""
Loads one or more specified build files.
default_variables and includes will be copied before use.
Returns the generator for the specified format and the
data returned by loading the specified build files.
"""
if params is None:
params = {}
if '-' in format:
format, params['flavor'] = format.split('-', 1)
default_variables = copy.copy(default_variables)
# Default variables provided by this program and its modules should be
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
# avoiding collisions with user and automatic variables.
default_variables['GENERATOR'] = format
default_variables['GENERATOR_FLAVOR'] = params.get('flavor', '')
# Format can be a custom python file, or by default the name of a module
# within gyp.generator.
if format.endswith('.py'):
generator_name = os.path.splitext(format)[0]
path, generator_name = os.path.split(generator_name)
# Make sure the path to the custom generator is in sys.path
# Don't worry about removing it once we are done. Keeping the path
# to each generator that is used in sys.path is likely harmless and
# arguably a good idea.
path = os.path.abspath(path)
if path not in sys.path:
sys.path.insert(0, path)
else:
generator_name = 'gyp.generator.' + format
# These parameters are passed in order (as opposed to by key)
# because ActivePython cannot handle key parameters to __import__.
generator = __import__(generator_name, globals(), locals(), generator_name)
for (key, val) in generator.generator_default_variables.items():
default_variables.setdefault(key, val)
# Give the generator the opportunity to set additional variables based on
# the params it will receive in the output phase.
if getattr(generator, 'CalculateVariables', None):
generator.CalculateVariables(default_variables, params)
# Give the generator the opportunity to set generator_input_info based on
# the params it will receive in the output phase.
if getattr(generator, 'CalculateGeneratorInputInfo', None):
generator.CalculateGeneratorInputInfo(params)
# Fetch the generator specific info that gets fed to input, we use getattr
# so we can default things and the generators only have to provide what
# they need.
generator_input_info = {
'non_configuration_keys':
getattr(generator, 'generator_additional_non_configuration_keys', []),
'path_sections':
getattr(generator, 'generator_additional_path_sections', []),
'extra_sources_for_rules':
getattr(generator, 'generator_extra_sources_for_rules', []),
'generator_supports_multiple_toolsets':
getattr(generator, 'generator_supports_multiple_toolsets', False),
'generator_wants_static_library_dependencies_adjusted':
getattr(generator,
'generator_wants_static_library_dependencies_adjusted', True),
'generator_wants_sorted_dependencies':
getattr(generator, 'generator_wants_sorted_dependencies', False),
'generator_filelist_paths':
getattr(generator, 'generator_filelist_paths', None),
}
# Process the input specific to this generator.
result = gyp.input.Load(build_files, default_variables, includes[:],
depth, generator_input_info, check, circular_check,
duplicate_basename_check,
params['parallel'], params['root_targets'])
return [generator] + result
def NameValueListToDict(name_value_list):
"""
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
of the pairs. If a string is simply NAME, then the value in the dictionary
is set to True. If VALUE can be converted to an integer, it is.
"""
result = { }
for item in name_value_list:
tokens = item.split('=', 1)
if len(tokens) == 2:
# If we can make it an int, use that, otherwise, use the string.
try:
token_value = int(tokens[1])
except ValueError:
token_value = tokens[1]
# Set the variable to the supplied value.
result[tokens[0]] = token_value
else:
# No value supplied, treat it as a boolean and set it.
result[tokens[0]] = True
return result
def ShlexEnv(env_name):
flags = os.environ.get(env_name, [])
if flags:
flags = shlex.split(flags)
return flags
def FormatOpt(opt, value):
if opt.startswith('--'):
return '%s=%s' % (opt, value)
return opt + value
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
"""Regenerate a list of command line flags, for an option of action='append'.
The |env_name|, if given, is checked in the environment and used to generate
an initial list of options, then the options that were specified on the
command line (given in |values|) are appended. This matches the handling of
environment variables and command line flags where command line flags override
the environment, while not requiring the environment to be set when the flags
are used again.
"""
flags = []
if options.use_environment and env_name:
for flag_value in ShlexEnv(env_name):
value = FormatOpt(flag, predicate(flag_value))
if value in flags:
flags.remove(value)
flags.append(value)
if values:
for flag_value in values:
flags.append(FormatOpt(flag, predicate(flag_value)))
return flags
def RegenerateFlags(options):
"""Given a parsed options object, and taking the environment variables into
account, returns a list of flags that should regenerate an equivalent options
object (even in the absence of the environment variables.)
Any path options will be normalized relative to depth.
The format flag is not included, as it is assumed the calling generator will
set that as appropriate.
"""
def FixPath(path):
path = gyp.common.FixIfRelativePath(path, options.depth)
if not path:
return os.path.curdir
return path
def Noop(value):
return value
# We always want to ignore the environment when regenerating, to avoid
# duplicate or changed flags in the environment at the time of regeneration.
flags = ['--ignore-environment']
for name, metadata in options._regeneration_metadata.iteritems():
opt = metadata['opt']
value = getattr(options, name)
value_predicate = metadata['type'] == 'path' and FixPath or Noop
action = metadata['action']
env_name = metadata['env_name']
if action == 'append':
flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
env_name, options))
elif action in ('store', None): # None is a synonym for 'store'.
if value:
flags.append(FormatOpt(opt, value_predicate(value)))
elif options.use_environment and env_name and os.environ.get(env_name):
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
elif action in ('store_true', 'store_false'):
if ((action == 'store_true' and value) or
(action == 'store_false' and not value)):
flags.append(opt)
elif options.use_environment and env_name:
print >>sys.stderr, ('Warning: environment regeneration unimplemented '
'for %s flag %r env_name %r' % (action, opt,
env_name))
else:
print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
'flag %r' % (action, opt))
return flags
class RegeneratableOptionParser(optparse.OptionParser):
def __init__(self):
self.__regeneratable_options = {}
optparse.OptionParser.__init__(self)
def add_option(self, *args, **kw):
"""Add an option to the parser.
This accepts the same arguments as OptionParser.add_option, plus the
following:
regenerate: can be set to False to prevent this option from being included
in regeneration.
env_name: name of environment variable that additional values for this
option come from.
type: adds type='path', to tell the regenerator that the values of
this option need to be made relative to options.depth
"""
env_name = kw.pop('env_name', None)
if 'dest' in kw and kw.pop('regenerate', True):
dest = kw['dest']
# The path type is needed for regenerating, for optparse we can just treat
# it as a string.
type = kw.get('type')
if type == 'path':
kw['type'] = 'string'
self.__regeneratable_options[dest] = {
'action': kw.get('action'),
'type': type,
'env_name': env_name,
'opt': args[0],
}
optparse.OptionParser.add_option(self, *args, **kw)
def parse_args(self, *args):
values, args = optparse.OptionParser.parse_args(self, *args)
values._regeneration_metadata = self.__regeneratable_options
return values, args
def gyp_main(args):
my_name = os.path.basename(sys.argv[0])
parser = RegeneratableOptionParser()
usage = 'usage: %s [options ...] [build_file ...]'
parser.set_usage(usage.replace('%s', '%prog'))
parser.add_option('--build', dest='configs', action='append',
help='configuration for build after project generation')
parser.add_option('--check', dest='check', action='store_true',
help='check format of gyp files')
parser.add_option('--config-dir', dest='config_dir', action='store',
env_name='GYP_CONFIG_DIR', default=None,
help='The location for configuration files like '
'include.gypi.')
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
action='append', default=[], help='turn on a debugging '
'mode for debugging GYP. Supported modes are "variables", '
'"includes" and "general" or "all" for all of them.')
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
env_name='GYP_DEFINES',
help='sets variable VAR to value VAL')
parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
help='set DEPTH gyp variable to a relative path to PATH')
parser.add_option('-f', '--format', dest='formats', action='append',
env_name='GYP_GENERATORS', regenerate=False,
help='output formats to generate')
parser.add_option('-G', dest='generator_flags', action='append', default=[],
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
help='sets generator flag FLAG to VAL')
parser.add_option('--generator-output', dest='generator_output',
action='store', default=None, metavar='DIR', type='path',
env_name='GYP_GENERATOR_OUTPUT',
help='puts generated build files under DIR')
parser.add_option('--ignore-environment', dest='use_environment',
action='store_false', default=True, regenerate=False,
help='do not read options from environment variables')
parser.add_option('-I', '--include', dest='includes', action='append',
metavar='INCLUDE', type='path',
help='files to include in all loaded .gyp files')
# --no-circular-check disables the check for circular relationships between
# .gyp files. These relationships should not exist, but they've only been
# observed to be harmful with the Xcode generator. Chromium's .gyp files
# currently have some circular relationships on non-Mac platforms, so this
# option allows the strict behavior to be used on Macs and the lenient
# behavior to be used elsewhere.
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
parser.add_option('--no-circular-check', dest='circular_check',
action='store_false', default=True, regenerate=False,
help="don't check for circular relationships between files")
# --no-duplicate-basename-check disables the check for duplicate basenames
# in a static_library/shared_library project. Visual C++ 2008 generator
# doesn't support this configuration. Libtool on Mac also generates warnings
# when duplicate basenames are passed into Make generator on Mac.
# TODO(yukawa): Remove this option when these legacy generators are
# deprecated.
parser.add_option('--no-duplicate-basename-check',
dest='duplicate_basename_check', action='store_false',
default=True, regenerate=False,
help="don't check for duplicate basenames")
parser.add_option('--no-parallel', action='store_true', default=False,
help='Disable multiprocessing')
parser.add_option('-S', '--suffix', dest='suffix', default='',
help='suffix to add to generated files')
parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
default=None, metavar='DIR', type='path',
help='directory to use as the root of the source tree')
parser.add_option('-R', '--root-target', dest='root_targets',
action='append', metavar='TARGET',
help='include only TARGET and its deep dependencies')
options, build_files_arg = parser.parse_args(args)
build_files = build_files_arg
# Set up the configuration directory (defaults to ~/.gyp)
if not options.config_dir:
home = None
home_dot_gyp = None
if options.use_environment:
home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
if home_dot_gyp:
home_dot_gyp = os.path.expanduser(home_dot_gyp)
if not home_dot_gyp:
home_vars = ['HOME']
if sys.platform in ('cygwin', 'win32'):
home_vars.append('USERPROFILE')
for home_var in home_vars:
home = os.getenv(home_var)
if home != None:
home_dot_gyp = os.path.join(home, '.gyp')
if not os.path.exists(home_dot_gyp):
home_dot_gyp = None
else:
break
else:
home_dot_gyp = os.path.expanduser(options.config_dir)
if home_dot_gyp and not os.path.exists(home_dot_gyp):
home_dot_gyp = None
if not options.formats:
# If no format was given on the command line, then check the env variable.
generate_formats = []
if options.use_environment:
generate_formats = os.environ.get('GYP_GENERATORS', [])
if generate_formats:
generate_formats = re.split(r'[\s,]', generate_formats)
if generate_formats:
options.formats = generate_formats
else:
# Nothing in the variable, default based on platform.
if sys.platform == 'darwin':
options.formats = ['xcode']
elif sys.platform in ('win32', 'cygwin'):
options.formats = ['msvs']
else:
options.formats = ['make']
if not options.generator_output and options.use_environment:
g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
if g_o:
options.generator_output = g_o
options.parallel = not options.no_parallel
for mode in options.debug:
gyp.debug[mode] = 1
# Do an extra check to avoid work when we're not debugging.
if DEBUG_GENERAL in gyp.debug:
DebugOutput(DEBUG_GENERAL, 'running with these options:')
for option, value in sorted(options.__dict__.items()):
if option[0] == '_':
continue
if isinstance(value, basestring):
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
else:
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
if not build_files:
build_files = FindBuildFiles()
if not build_files:
raise GypError((usage + '\n\n%s: error: no build_file') %
(my_name, my_name))
# TODO(mark): Chromium-specific hack!
# For Chromium, the gyp "depth" variable should always be a relative path
# to Chromium's top-level "src" directory. If no depth variable was set
# on the command line, try to find a "src" directory by looking at the
# absolute path to each build file's directory. The first "src" component
# found will be treated as though it were the path used for --depth.
if not options.depth:
for build_file in build_files:
build_file_dir = os.path.abspath(os.path.dirname(build_file))
build_file_dir_components = build_file_dir.split(os.path.sep)
components_len = len(build_file_dir_components)
for index in xrange(components_len - 1, -1, -1):
if build_file_dir_components[index] == 'src':
options.depth = os.path.sep.join(build_file_dir_components)
break
del build_file_dir_components[index]
# If the inner loop found something, break without advancing to another
# build file.
if options.depth:
break
if not options.depth:
raise GypError('Could not automatically locate src directory. This is'
'a temporary Chromium feature that will be removed. Use'
'--depth as a workaround.')
# If toplevel-dir is not set, we assume that depth is the root of our source
# tree.
if not options.toplevel_dir:
options.toplevel_dir = options.depth
# -D on the command line sets variable defaults - D isn't just for define,
# it's for default. Perhaps there should be a way to force (-F?) a
# variable's value so that it can't be overridden by anything else.
cmdline_default_variables = {}
defines = []
if options.use_environment:
defines += ShlexEnv('GYP_DEFINES')
if options.defines:
defines += options.defines
cmdline_default_variables = NameValueListToDict(defines)
if DEBUG_GENERAL in gyp.debug:
DebugOutput(DEBUG_GENERAL,
"cmdline_default_variables: %s", cmdline_default_variables)
# Set up includes.
includes = []
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every
# .gyp file that's loaded, before anything else is included.
if home_dot_gyp != None:
default_include = os.path.join(home_dot_gyp, 'include.gypi')
if os.path.exists(default_include):
print 'Using overrides found in ' + default_include
includes.append(default_include)
# Command-line --include files come after the default include.
if options.includes:
includes.extend(options.includes)
# Generator flags should be prefixed with the target generator since they
# are global across all generator runs.
gen_flags = []
if options.use_environment:
gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
if options.generator_flags:
gen_flags += options.generator_flags
generator_flags = NameValueListToDict(gen_flags)
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
# Generate all requested formats (use a set in case we got one format request
# twice)
for format in set(options.formats):
params = {'options': options,
'build_files': build_files,
'generator_flags': generator_flags,
'cwd': os.getcwd(),
'build_files_arg': build_files_arg,
'gyp_binary': sys.argv[0],
'home_dot_gyp': home_dot_gyp,
'parallel': options.parallel,
'root_targets': options.root_targets,
'target_arch': cmdline_default_variables.get('target_arch', '')}
# Start with the default variables from the command line.
[generator, flat_list, targets, data] = Load(
build_files, format, cmdline_default_variables, includes, options.depth,
params, options.check, options.circular_check,
options.duplicate_basename_check)
# TODO(mark): Pass |data| for now because the generator needs a list of
# build files that came in. In the future, maybe it should just accept
# a list, and not the whole data dict.
# NOTE: flat_list is the flattened dependency graph specifying the order
# that targets may be built. Build systems that operate serially or that
# need to have dependencies defined before dependents reference them should
# generate targets in the order specified in flat_list.
generator.GenerateOutput(flat_list, targets, data, params)
if options.configs:
valid_configs = targets[flat_list[0]]['configurations'].keys()
for conf in options.configs:
if conf not in valid_configs:
raise GypError('Invalid config specified via --build: %s' % conf)
generator.PerformBuild(data, options.configs, params)
# Done
return 0
def main(args):
try:
return gyp_main(args)
except GypError, e:
sys.stderr.write("gyp: %s\n" % e)
return 1
# NOTE: setuptools generated console_scripts calls function with no arguments
def script_main():
return main(sys.argv[1:])
if __name__ == '__main__':
sys.exit(script_main())
| apache-2.0 |
ltiao/networkx | networkx/algorithms/connectivity/kcutsets.py | 12 | 7220 | # -*- coding: utf-8 -*-
"""
Kanevsky all minimum node k cutsets algorithm.
"""
from operator import itemgetter
import networkx as nx
from .utils import build_auxiliary_node_connectivity
from networkx.algorithms.flow import (
build_residual_network,
edmonds_karp,
shortest_augmenting_path,
)
default_flow_func = edmonds_karp
__author__ = '\n'.join(['Jordi Torrents <jtorrents@milnou.net>'])
__all__ = ['all_node_cuts']
def all_node_cuts(G, k=None, flow_func=None):
r"""Returns all minimum k cutsets of an undirected graph G.
This implementation is based on Kanevsky's algorithm [1]_ for finding all
minimum-size node cut-sets of an undirected graph G; ie the set (or sets)
of nodes of cardinality equal to the node connectivity of G. Thus if
removed, would break G into two or more connected components.
Parameters
----------
G : NetworkX graph
Undirected graph
k : Integer
Node connectivity of the input graph. If k is None, then it is
computed. Default value: None.
flow_func : function
Function to perform the underlying flow computations. Default value
edmonds_karp. This function performs better in sparse graphs with
right tailed degree distributions. shortest_augmenting_path will
perform better in denser graphs.
Returns
-------
cuts : a generator of node cutsets
Each node cutset has cardinality equal to the node connectivity of
the input graph.
Examples
--------
>>> # A two-dimensional grid graph has 4 cutsets of cardinality 2
>>> G = nx.grid_2d_graph(5, 5)
>>> cutsets = list(nx.all_node_cuts(G))
>>> len(cutsets)
4
>>> all(2 == len(cutset) for cutset in cutsets)
True
>>> nx.node_connectivity(G)
2
Notes
-----
This implementation is based on the sequential algorithm for finding all
minimum-size separating vertex sets in a graph [1]_. The main idea is to
compute minimum cuts using local maximum flow computations among a set
of nodes of highest degree and all other non-adjacent nodes in the Graph.
Once we find a minimum cut, we add an edge between the high degree
node and the target node of the local maximum flow computation to make
sure that we will not find that minimum cut again.
See also
--------
node_connectivity
edmonds_karp
shortest_augmenting_path
References
----------
.. [1] Kanevsky, A. (1993). Finding all minimum-size separating vertex
sets in a graph. Networks 23(6), 533--541.
http://onlinelibrary.wiley.com/doi/10.1002/net.3230230604/abstract
"""
if not nx.is_connected(G):
raise nx.NetworkXError('Input graph is disconnected.')
# Initialize data structures.
# Keep track of the cuts already computed so we do not repeat them.
seen = []
# Even-Tarjan reduction is what we call auxiliary digraph
# for node connectivity.
H = build_auxiliary_node_connectivity(G)
mapping = H.graph['mapping']
R = build_residual_network(H, 'capacity')
kwargs = dict(capacity='capacity', residual=R)
# Define default flow function
if flow_func is None:
flow_func = default_flow_func
if flow_func is shortest_augmenting_path:
kwargs['two_phase'] = True
# Begin the actual algorithm
# step 1: Find node connectivity k of G
if k is None:
k = nx.node_connectivity(G, flow_func=flow_func)
# step 2:
# Find k nodes with top degree, call it X:
X = {n for n, d in sorted(G.degree(), key=itemgetter(1), reverse=True)[:k]}
# Check if X is a k-node-cutset
if _is_separating_set(G, X):
seen.append(X)
yield X
for x in X:
# step 3: Compute local connectivity flow of x with all other
# non adjacent nodes in G
non_adjacent = set(G) - X - set(G[x])
for v in non_adjacent:
# step 4: compute maximum flow in an Even-Tarjan reduction H of G
# and step:5 build the associated residual network R
R = flow_func(H, '%sB' % mapping[x], '%sA' % mapping[v], **kwargs)
flow_value = R.graph['flow_value']
if flow_value == k:
## Remove saturated edges form the residual network
saturated_edges = [(u, w, d) for (u, w, d) in
R.edges(data=True)
if d['capacity'] == d['flow']]
R.remove_edges_from(saturated_edges)
# step 6: shrink the strongly connected components of
# residual flow network R and call it L
L = nx.condensation(R)
cmap = L.graph['mapping']
# step 7: Compute antichains of L; they map to closed sets in H
# Any edge in H that links a closed set is part of a cutset
for antichain in nx.antichains(L):
# Nodes in an antichain of the condensation graph of
# the residual network map to a closed set of nodes that
# define a node partition of the auxiliary digraph H.
S = {n for n, scc in cmap.items() if scc in antichain}
# Find the cutset that links the node partition (S,~S) in H
cutset = set()
for u in S:
cutset.update((u, w) for w in H[u] if w not in S)
# The edges in H that form the cutset are internal edges
# (ie edges that represent a node of the original graph G)
node_cut = {H.node[n]['id'] for edge in cutset for n in edge}
if len(node_cut) == k:
if node_cut not in seen:
yield node_cut
seen.append(node_cut)
# Add an edge (x, v) to make sure that we do not
# find this cutset again. This is equivalent
# of adding the edge in the input graph
# G.add_edge(x, v) and then regenerate H and R:
# Add edges to the auxiliary digraph.
H.add_edge('%sB' % mapping[x], '%sA' % mapping[v],
capacity=1)
H.add_edge('%sB' % mapping[v], '%sA' % mapping[x],
capacity=1)
# Add edges to the residual network.
R.add_edge('%sB' % mapping[x], '%sA' % mapping[v],
capacity=1)
R.add_edge('%sA' % mapping[v], '%sB' % mapping[x],
capacity=1)
break
# Add again the saturated edges to reuse the residual network
R.add_edges_from(saturated_edges)
def _is_separating_set(G, cut):
"""Assumes that the input graph is connected"""
if len(cut) == len(G) - 1:
return True
H = G.copy()
H.remove_nodes_from(cut)
if nx.is_connected(H):
return False
return True
| bsd-3-clause |
Ebag333/Pyfa | gui/builtinStatsViews/resourcesViewFull.py | 1 | 14704 | # =============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of pyfa.
#
# pyfa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyfa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
# =============================================================================
import wx
from gui.statsView import StatsView
from gui.bitmapLoader import BitmapLoader
from gui import pygauge as PG
import gui.mainFrame
from gui.chromeTabs import EVT_NOTEBOOK_PAGE_CHANGED
from eos.types import Hardpoint
from gui.utils.numberFormatter import formatAmount
class ResourcesViewFull(StatsView):
name = "resourcesViewFull"
contexts = ["drone", "fighter", "cargo"]
def __init__(self, parent):
StatsView.__init__(self)
self.parent = parent
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
self.mainFrame.additionsPane.notebook.Bind(EVT_NOTEBOOK_PAGE_CHANGED, self.pageChanged)
def pageChanged(self, event):
page = self.mainFrame.additionsPane.getName(event.GetSelection())
if page == "Cargo":
self.toggleContext("cargo")
elif page == "Fighters":
self.toggleContext("fighter")
else:
self.toggleContext("drone")
def toggleContext(self, context):
# Apparently you cannot .Hide(True) on a Window, otherwise I would just .Hide(context !== x).
# This is a gimpy way to toggle this shit
for x in self.contexts:
bitmap = getattr(self, "bitmapFull{}Bay".format(x.capitalize()))
base = getattr(self, "baseFull{}Bay".format(x.capitalize()))
if context == x:
bitmap.Show()
base.Show(True)
else:
bitmap.Hide()
base.Hide(True)
fighter_sizer = getattr(self, "boxSizerFighter")
drone_sizer = getattr(self, "boxSizerDrones")
if context != "fighter":
fighter_sizer.ShowItems(False)
drone_sizer.ShowItems(True)
else:
fighter_sizer.ShowItems(True)
drone_sizer.ShowItems(False)
self.panel.Layout()
self.headerPanel.Layout()
def getHeaderText(self, fit):
return "Resources"
def getTextExtentW(self, text):
width, height = self.parent.GetTextExtent(text)
return width
def populatePanel(self, contentPanel, headerPanel):
contentSizer = contentPanel.GetSizer()
root = wx.BoxSizer(wx.VERTICAL)
contentSizer.Add(root, 0, wx.EXPAND, 0)
sizer = wx.BoxSizer(wx.HORIZONTAL)
root.Add(sizer, 0, wx.EXPAND)
root.Add(wx.StaticLine(contentPanel, wx.ID_ANY, style=wx.HORIZONTAL), 0, wx.EXPAND)
sizerResources = wx.BoxSizer(wx.HORIZONTAL)
root.Add(sizerResources, 1, wx.EXPAND, 0)
parent = self.panel = contentPanel
self.headerPanel = headerPanel
panel = "full"
base = sizerResources
sizer.AddSpacer((0, 0), 1, wx.EXPAND, 5)
# Turrets & launcher hardslots display
tooltipText = {"turret": "Turret hardpoints", "launcher": "Launcher hardpoints", "drones": "Drones active",
"fighter": "Fighter squadrons active", "calibration": "Calibration"}
for type_ in ("turret", "launcher", "drones", "fighter", "calibration"):
box = wx.BoxSizer(wx.HORIZONTAL)
bitmap = BitmapLoader.getStaticBitmap("%s_big" % type_, parent, "gui")
tooltip = wx.ToolTip(tooltipText[type_])
bitmap.SetToolTip(tooltip)
box.Add(bitmap, 0, wx.ALIGN_CENTER)
sizer.Add(box, 0, wx.ALIGN_CENTER)
suffix = {'turret': 'Hardpoints', 'launcher': 'Hardpoints', 'drones': 'Active', 'fighter': 'Tubes',
'calibration': 'Points'}
lbl = wx.StaticText(parent, wx.ID_ANY, "0")
setattr(self, "label%sUsed%s%s" % (panel.capitalize(), type_.capitalize(), suffix[type_].capitalize()), lbl)
box.Add(lbl, 0, wx.ALIGN_CENTER | wx.LEFT, 5)
box.Add(wx.StaticText(parent, wx.ID_ANY, "/"), 0, wx.ALIGN_CENTER)
lbl = wx.StaticText(parent, wx.ID_ANY, "0")
setattr(self, "label%sTotal%s%s" % (panel.capitalize(), type_.capitalize(), suffix[type_].capitalize()),
lbl)
box.Add(lbl, 0, wx.ALIGN_CENTER)
setattr(self, "boxSizer{}".format(type_.capitalize()), box)
# Hack - We add a spacer after each thing, but we are always hiding something. The spacer is stil there.
# This way, we only have one space after the drones/fighters
if type_ != "drones":
sizer.AddSpacer((0, 0), 1, wx.EXPAND, 5)
# PG, Cpu & drone stuff
tooltipText = {"cpu": "CPU", "pg": "PowerGrid", "droneBay": "Drone bay", "fighterBay": "Fighter bay",
"droneBandwidth": "Drone bandwidth", "cargoBay": "Cargo bay"}
for i, group in enumerate((("cpu", "pg"), ("cargoBay", "droneBay", "fighterBay", "droneBandwidth"))):
main = wx.BoxSizer(wx.VERTICAL)
base.Add(main, 1, wx.ALIGN_CENTER)
for type_ in group:
capitalizedType = type_[0].capitalize() + type_[1:]
bitmap = BitmapLoader.getStaticBitmap(type_ + "_big", parent, "gui")
tooltip = wx.ToolTip(tooltipText[type_])
bitmap.SetToolTip(tooltip)
stats = wx.BoxSizer(wx.VERTICAL)
absolute = wx.BoxSizer(wx.HORIZONTAL)
stats.Add(absolute, 0, wx.EXPAND)
b = wx.BoxSizer(wx.HORIZONTAL)
main.Add(b, 1, wx.ALIGN_CENTER)
b.Add(bitmap, 0, wx.ALIGN_BOTTOM)
b.Add(stats, 1, wx.EXPAND)
lbl = wx.StaticText(parent, wx.ID_ANY, "0")
setattr(self, "label%sUsed%s" % (panel.capitalize(), capitalizedType), lbl)
absolute.Add(lbl, 0, wx.ALIGN_LEFT | wx.LEFT, 3)
absolute.Add(wx.StaticText(parent, wx.ID_ANY, "/"), 0, wx.ALIGN_LEFT)
lbl = wx.StaticText(parent, wx.ID_ANY, "0")
setattr(self, "label%sTotal%s" % (panel.capitalize(), capitalizedType), lbl)
absolute.Add(lbl, 0, wx.ALIGN_LEFT)
units = {"cpu": " tf", "pg": " MW", "droneBandwidth": " mbit/s", "droneBay": u" m\u00B3",
"fighterBay": u" m\u00B3", "cargoBay": u" m\u00B3"}
lbl = wx.StaticText(parent, wx.ID_ANY, "%s" % units[type_])
absolute.Add(lbl, 0, wx.ALIGN_LEFT)
# Gauges modif. - Darriele
gauge = PG.PyGauge(parent, wx.ID_ANY, 1)
gauge.SetValueRange(0, 0)
gauge.SetMinSize((self.getTextExtentW("1.999M/1.99M MW"), 23))
gauge.SetFractionDigits(2)
setattr(self, "gauge%s%s" % (panel.capitalize(), capitalizedType), gauge)
stats.Add(gauge, 0, wx.ALIGN_CENTER)
setattr(self, "base%s%s" % (panel.capitalize(), capitalizedType), b)
setattr(self, "bitmap%s%s" % (panel.capitalize(), capitalizedType), bitmap)
self.toggleContext("drone")
def refreshPanel(self, fit):
# If we did anything intresting, we'd update our labels to reflect the new fit's stats here
stats = (
("label%sUsedTurretHardpoints", lambda: fit.getHardpointsUsed(Hardpoint.TURRET), 0, 0, 0),
("label%sTotalTurretHardpoints", lambda: fit.ship.getModifiedItemAttr('turretSlotsLeft'), 0, 0, 0),
("label%sUsedLauncherHardpoints", lambda: fit.getHardpointsUsed(Hardpoint.MISSILE), 0, 0, 0),
("label%sTotalLauncherHardpoints", lambda: fit.ship.getModifiedItemAttr('launcherSlotsLeft'), 0, 0, 0),
("label%sUsedDronesActive", lambda: fit.activeDrones, 0, 0, 0),
("label%sTotalDronesActive", lambda: fit.extraAttributes["maxActiveDrones"], 0, 0, 0),
("label%sUsedFighterTubes", lambda: fit.fighterTubesUsed, 3, 0, 9),
("label%sTotalFighterTubes", lambda: fit.ship.getModifiedItemAttr("fighterTubes"), 3, 0, 9),
("label%sUsedCalibrationPoints", lambda: fit.calibrationUsed, 0, 0, 0),
("label%sTotalCalibrationPoints", lambda: fit.ship.getModifiedItemAttr('upgradeCapacity'), 0, 0, 0),
("label%sUsedPg", lambda: fit.pgUsed, 4, 0, 9),
("label%sUsedCpu", lambda: fit.cpuUsed, 4, 0, 9),
("label%sTotalPg", lambda: fit.ship.getModifiedItemAttr("powerOutput"), 4, 0, 9),
("label%sTotalCpu", lambda: fit.ship.getModifiedItemAttr("cpuOutput"), 4, 0, 9),
("label%sUsedDroneBay", lambda: fit.droneBayUsed, 3, 0, 9),
("label%sUsedFighterBay", lambda: fit.fighterBayUsed, 3, 0, 9),
("label%sUsedDroneBandwidth", lambda: fit.droneBandwidthUsed, 3, 0, 9),
("label%sTotalDroneBay", lambda: fit.ship.getModifiedItemAttr("droneCapacity"), 3, 0, 9),
("label%sTotalDroneBandwidth", lambda: fit.ship.getModifiedItemAttr("droneBandwidth"), 3, 0, 9),
("label%sTotalFighterBay", lambda: fit.ship.getModifiedItemAttr("fighterCapacity"), 3, 0, 9),
("label%sUsedCargoBay", lambda: fit.cargoBayUsed, 3, 0, 9),
("label%sTotalCargoBay", lambda: fit.ship.getModifiedItemAttr("capacity"), 3, 0, 9),
)
panel = "Full"
usedTurretHardpoints = 0
totalTurretHardpoints = 0
usedLauncherHardpoints = 0
totalLauncherHardPoints = 0
for labelName, value, prec, lowest, highest in stats:
label = getattr(self, labelName % panel)
value = value() if fit is not None else 0
value = value if value is not None else 0
if labelName % panel == "label%sUsedTurretHardpoints" % panel:
usedTurretHardpoints = value
labelUTH = label
if labelName % panel == "label%sTotalTurretHardpoints" % panel:
totalTurretHardpoints = value
labelTTH = label
if labelName % panel == "label%sUsedLauncherHardpoints" % panel:
usedLauncherHardpoints = value
labelULH = label
if labelName % panel == "label%sTotalLauncherHardpoints" % panel:
totalLauncherHardPoints = value
labelTLH = label
if labelName % panel == "label%sUsedDronesActive" % panel:
usedDronesActive = value
labelUDA = label
if labelName % panel == "label%sTotalDronesActive" % panel:
totalDronesActive = value
labelTDA = label
if labelName % panel == "label%sUsedFighterTubes" % panel:
usedFighterTubes = value
labelUFT = label
if labelName % panel == "label%sTotalFighterTubes" % panel:
totalFighterTubes = value
labelTFT = label
if labelName % panel == "label%sUsedCalibrationPoints" % panel:
usedCalibrationPoints = value
labelUCP = label
if labelName % panel == "label%sTotalCalibrationPoints" % panel:
totalCalibrationPoints = value
labelTCP = label
if isinstance(value, basestring):
label.SetLabel(value)
label.SetToolTip(wx.ToolTip(value))
else:
label.SetLabel(formatAmount(value, prec, lowest, highest))
label.SetToolTip(wx.ToolTip("%.1f" % value))
colorWarn = wx.Colour(204, 51, 51)
colorNormal = wx.SystemSettings_GetColour(wx.SYS_COLOUR_WINDOWTEXT)
if usedTurretHardpoints > totalTurretHardpoints:
colorT = colorWarn
else:
colorT = colorNormal
if usedLauncherHardpoints > totalLauncherHardPoints:
colorL = colorWarn
else:
colorL = colorNormal
if usedDronesActive > totalDronesActive:
colorD = colorWarn
else:
colorD = colorNormal
if usedFighterTubes > totalFighterTubes:
colorF = colorWarn
else:
colorF = colorNormal
if usedCalibrationPoints > totalCalibrationPoints:
colorC = colorWarn
else:
colorC = colorNormal
labelUTH.SetForegroundColour(colorT)
labelTTH.SetForegroundColour(colorT)
labelULH.SetForegroundColour(colorL)
labelTLH.SetForegroundColour(colorL)
labelUDA.SetForegroundColour(colorD)
labelTDA.SetForegroundColour(colorD)
labelUFT.SetForegroundColour(colorF)
labelTFT.SetForegroundColour(colorF)
labelUCP.SetForegroundColour(colorC)
labelTCP.SetForegroundColour(colorC)
if fit is not None:
resMax = (
lambda: fit.ship.getModifiedItemAttr("cpuOutput"),
lambda: fit.ship.getModifiedItemAttr("powerOutput"),
lambda: fit.ship.getModifiedItemAttr("droneCapacity"),
lambda: fit.ship.getModifiedItemAttr("fighterCapacity"),
lambda: fit.ship.getModifiedItemAttr("droneBandwidth"),
lambda: fit.ship.getModifiedItemAttr("capacity"),
)
i = 0
for resourceType in ("cpu", "pg", "droneBay", "fighterBay", "droneBandwidth", "cargoBay"):
if fit is not None:
capitalizedType = resourceType[0].capitalize() + resourceType[1:]
gauge = getattr(self, "gauge%s%s" % (panel, capitalizedType))
resUsed = getattr(fit, "%sUsed" % resourceType)
gauge.SetValueRange(resUsed or 0, resMax[i]() or 0)
i += 1
else:
capitalizedType = resourceType[0].capitalize() + resourceType[1:]
gauge = getattr(self, "gauge%s%s" % (panel, capitalizedType))
gauge.SetValueRange(0, 0)
i += 1
self.panel.Layout()
self.headerPanel.Layout()
ResourcesViewFull.register()
| gpl-3.0 |
MaxTyutyunnikov/lino | docs/tutorials/matrix_tutorial/models.py | 1 | 4630 | ## Copyright 2013 Luc Saffre
## This file is part of the Lino project.
import datetime
from django.db import models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from lino import dd
contacts = dd.resolve_app('contacts')
class EntryType(dd.BabelNamed):
class Meta:
verbose_name = _("Entry Type")
verbose_name_plural = _("Entry Types")
#~ def after_ui_save(self,ar):
#~ CompaniesWithEntryTypes.setup_columns()
class EntryTypes(dd.Table):
model = EntryType
class Entry(dd.UserAuthored):
class Meta:
verbose_name = _("Entry")
verbose_name_plural = _("Entries")
date = models.DateField(_("Date"))
entry_type = models.ForeignKey(EntryType)
subject = models.CharField(_("Subject"),blank=True,max_length=200)
body = dd.RichTextField(_("Body"),blank=True)
company = models.ForeignKey('contacts.Company')
class Entries(dd.Table):
model = Entry
detail_layout = """
id user date company
subject
body
"""
insert_layout = """
user date company
subject
"""
parameters = dd.ObservedPeriod(
entry_type = models.ForeignKey(EntryType,
blank=True,null=True,
help_text=_("Show only entries of this type.")),
company = models.ForeignKey('contacts.Company',
blank=True,null=True,
help_text=_("Show only entries of this company.")),
user = models.ForeignKey(settings.SITE.user_model,
blank=True,null=True,
help_text=_("Show only entries by this user.")),
)
params_layout = """
user start_date end_date
company entry_type
"""
@classmethod
def get_request_queryset(cls,ar):
qs = super(Entries,cls).get_request_queryset(ar)
if ar.param_values.end_date:
qs = qs.filter(date__lte=ar.param_values.end_date)
if ar.param_values.start_date:
qs = qs.filter(date__gte=ar.param_values.start_date)
if ar.param_values.user:
qs = qs.filter(user=ar.param_values.user)
if ar.param_values.entry_type:
qs = qs.filter(entry_type=ar.param_values.entry_type)
if ar.param_values.company:
qs = qs.filter(company=ar.param_values.company)
return qs
@classmethod
def param_defaults(cls,ar,**kw):
kw = super(Entries,cls).param_defaults(ar,**kw)
kw.update(user=ar.get_user())
return kw
class EntriesByCompany(Entries):
master_key = 'company'
#~ class MyEntries(Entries,dd.ByUser):
#~ pass
class CompaniesWithEntryTypes(dd.VentilatingTable,contacts.Companies):
label = _("Companies with Entry Types")
hide_zero_rows = True
parameters = dd.ObservedPeriod()
params_layout = "start_date end_date"
editable = False
auto_fit_column_widths = True
@classmethod
def param_defaults(cls,ar,**kw):
kw = super(CompaniesWithEntryTypes,cls).param_defaults(ar,**kw)
kw.update(end_date=datetime.date.today())
#~ kw.update(start_date=datetime.date.today())
return kw
@classmethod
def get_ventilated_columns(self):
def w(et):
# return a getter function for a RequestField on the given EntryType
def func(fld,obj,ar):
#~ mi = ar.master_instance
#~ if mi is None: return None
pv = dict(start_date=ar.param_values.start_date,end_date=ar.param_values.end_date)
if et is not None:
pv.update(entry_type=et)
pv.update(company=obj)
return Entries.request(param_values=pv)
return func
for et in EntryType.objects.all():
yield dd.RequestField(w(et),verbose_name=unicode(et))
yield dd.RequestField(w(None),verbose_name=_("Total"))
@dd.receiver(dd.post_save,sender=EntryType)
def my_setup_columns(sender,**kw):
CompaniesWithEntryTypes.setup_columns()
@dd.receiver(dd.post_startup)
def my_details_setup(sender,**kw):
self = sender
self.modules.contacts.Companies.add_detail_tab('entries','matrix_tutorial.EntriesByCompany')
def setup_main_menu(site,ui,profile,m):
m = m.add_menu("entries",_("Entries"))
m.add_action(Entries)
m.add_action(EntryTypes)
m.add_action(CompaniesWithEntryTypes)
| gpl-3.0 |
IndexBraille/liblouis | tools/lou_maketable.d/make_suggestions.py | 8 | 4835 | # liblouis Braille Translation and Back-Translation Library
#
# Copyright (C) 2017 Bert Frees
#
# This file is part of liblouis.
#
# liblouis is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 2.1 of the License, or
# (at your option) any later version.
#
# liblouis is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with liblouis. If not, see <http://www.gnu.org/licenses/>.
#
import argparse
from utils import *
def update_counters(total, init=False):
if not init:
sys.stderr.write("\033[1A\033[K")
sys.stderr.write("\033[1A\033[K")
sys.stderr.write(("%d words processed\n" % total))
sys.stderr.flush()
def main():
parser = argparse.ArgumentParser(description="Suggest dictionary rows and liblouis rules")
parser.add_argument('-d', '--dictionary', required=True, dest="DICTIONARY",
help="dictionary file")
parser.add_argument('-t', '--table', required=True, dest="TABLE",
help="translation table including hyphenation patterns")
parser.add_argument('-n', type=int, default=10, dest="MAX_PROBLEMS",
help="maximal number of suggestions")
parser.add_argument('--non-interactive', type=bool, default=False, dest="NON_INTERACTIVE",
help="commit all suggestions without providing details")
parser.add_argument('--print-total-rate', type=bool, default=False, dest="TOTAL_RATE",
help="print the total number of correctly translated words (takes longer)")
args = parser.parse_args()
println("# -*- tab-width: 30; -*-")
conn, c = open_dictionary(args.DICTIONARY)
c.execute("SELECT text,braille FROM dictionary WHERE braille IS NOT NULL ORDER BY frequency DESC")
load_table(args.TABLE)
problems = 0
correct = 0
total = 0
update_counters(total, init=True)
for text, braille in c.fetchall():
problem = False
if braille:
actual_braille, applied_rules = translate(text)
if actual_braille != braille:
problem = True
if problems < args.MAX_PROBLEMS:
comments = []
comments.append("wrong braille\t\t" + actual_braille)
suggest_rows = []
suggest_rules = []
non_letters = False
if not is_letter(text):
non_letters = True
comments.append("word has non-letters")
comments.append("applied rules")
applied_rules = [get_rule(x) for x in applied_rules if x is not None]
for rule in applied_rules:
comments.append("> " + "\t".join(rule))
other_relevant_rules = set(find_relevant_rules(text)) - set(applied_rules)
if other_relevant_rules:
comments.append("other relevant rules")
for rule in other_relevant_rules:
comments.append("> " + "\t".join(rule))
suggest_rules.append({"opcode": "word", "text": text, "braille": braille})
if problem and problems < args.MAX_PROBLEMS:
if args.NON_INTERACTIVE and suggest_rules:
for rule in suggest_rules:
println("%(opcode)s\t%(text)s\t%(braille)s" % rule)
else:
println("# >>>\t%s\t%s" % (text, braille or ""))
for comment in comments:
println("# | " + comment)
println("# |__")
for row in suggest_rows:
println("#\t%(text)s\t%(braille)s" % row)
for rule in suggest_rules:
println("# %(opcode)s\t%(text)s\t%(braille)s" % rule)
println()
problems += 1
if not args.TOTAL_RATE and problems >= args.MAX_PROBLEMS:
break
else:
correct += 1
total += 1
update_counters(total)
if args.TOTAL_RATE:
println("### %d out of %d (%.1f %%) words translated correctly" % (correct, total, math.floor(1000 * correct / total) / 10))
elif correct > 0:
println("### %d words translated correctly" % correct)
conn.close()
if __name__ == "__main__": main() | lgpl-2.1 |
peterwilletts24/Python-Scripts | EMBRACE/Surface_Pressure_Diurnal_Monsoon_Trough.py | 2 | 2835 | import os, sys
import datetime
import iris
import iris.unit as unit
import iris.analysis.cartography
import numpy as np
import iris.analysis.geometry
from shapely.geometry import Polygon
from iris.coord_categorisation import add_categorised_coord
import imp
imp.load_source('UnrotateUpdateCube', '/nfs/see-fs-01_users/eepdw/python_scripts/Monsoon_Python_Scripts/modules/unrotate_and_update_pole.py')
from UnrotateUpdateCube import *
import linecache
def PrintException():
exc_type, exc_obj, tb = sys.exc_info()
f = tb.tb_frame
lineno = tb.tb_lineno
filename = f.f_code.co_filename
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
print 'EXCEPTION IN ({}, LINE {} "{}"): {}'.format(filename, lineno, line.strip(), exc_obj)
diag = '_408_on_p_levs_mean_by_hour'
pp_file_path='/nfs/a90/eepdw/Data/EMBRACE/'
experiment_ids = ['djznw', 'djzny', 'djznq', 'djzns', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq', 'dkbhu', 'djznu', 'dkhgu' ] # All 12
#experiment_ids = ['dklyu', 'dkmbq', 'dklwu', 'dklzq']
#experiment_ids = ['djzns']
# Min and max lats lons from smallest model domain (dkbhu) - see spreadsheet
#latmin=-10
#latmax=5
#lonmin=64.115
#lonmax=80
# Monsoon Trough
polygon = Polygon(((73., 21.), (83., 16.), (87., 22.), (75., 27.)))
#lat_constraint=iris.Constraint(grid_latitude= lambda la: latmin <= la.point <= latmax)
#lon_constraint=iris.Constraint(grid_longitude= lambda lo: lonmin <= lo.point <= lonmax)
for experiment_id in experiment_ids:
expmin1 = experiment_id[:-1]
fu = '%s%s/%s/%s%s.pp' % (pp_file_path, expmin1, experiment_id, experiment_id, diag)
flsm = '%s%s/%s/30.pp' % (pp_file_path, expmin1, experiment_id)
print experiment_id
sys.stdout.flush()
try:
#cube_names = ['%s' % cube_name_param, '%s' % cube_name_explicit]
cube = iris.load_cube(fu)
cube= unrotate_pole_update_cube(cube)
cube.coord('grid_longitude').guess_bounds()
cube.coord('grid_latitude').guess_bounds()
# Calculate weights
l=iris.analysis.geometry.geometry_area_weights(cube, polygon)
# For Sea and Land, mask area and calculate mean of each hour for sea/land and SAVE as numpy array
coords = ('grid_latitude', 'grid_longitude')
collapsed_cube = cube.collapsed(coords,
iris.analysis.MEAN,
weights=l)
np.savez('%s%s/%s/%s%s_diurnal_monsoon_trough' % (pp_file_path, expmin1, experiment_id, experiment_id, diag), \
data=collapsed_cube.data.data, time=collapsed_cube.coord('time').points, pressure=collapsed_cube.coord('pressure').points)
except iris.exceptions.ConstraintMismatchError:
PrintException()
| mit |
bikong2/django | tests/template_tests/syntax_tests/test_if_equal.py | 368 | 9892 | from django.test import SimpleTestCase
from ..utils import setup
class IfEqualTagTests(SimpleTestCase):
@setup({'ifequal01': '{% ifequal a b %}yes{% endifequal %}'})
def test_ifequal01(self):
output = self.engine.render_to_string('ifequal01', {'a': 1, 'b': 2})
self.assertEqual(output, '')
@setup({'ifequal02': '{% ifequal a b %}yes{% endifequal %}'})
def test_ifequal02(self):
output = self.engine.render_to_string('ifequal02', {'a': 1, 'b': 1})
self.assertEqual(output, 'yes')
@setup({'ifequal03': '{% ifequal a b %}yes{% else %}no{% endifequal %}'})
def test_ifequal03(self):
output = self.engine.render_to_string('ifequal03', {'a': 1, 'b': 2})
self.assertEqual(output, 'no')
@setup({'ifequal04': '{% ifequal a b %}yes{% else %}no{% endifequal %}'})
def test_ifequal04(self):
output = self.engine.render_to_string('ifequal04', {'a': 1, 'b': 1})
self.assertEqual(output, 'yes')
@setup({'ifequal05': '{% ifequal a \'test\' %}yes{% else %}no{% endifequal %}'})
def test_ifequal05(self):
output = self.engine.render_to_string('ifequal05', {'a': 'test'})
self.assertEqual(output, 'yes')
@setup({'ifequal06': '{% ifequal a \'test\' %}yes{% else %}no{% endifequal %}'})
def test_ifequal06(self):
output = self.engine.render_to_string('ifequal06', {'a': 'no'})
self.assertEqual(output, 'no')
@setup({'ifequal07': '{% ifequal a "test" %}yes{% else %}no{% endifequal %}'})
def test_ifequal07(self):
output = self.engine.render_to_string('ifequal07', {'a': 'test'})
self.assertEqual(output, 'yes')
@setup({'ifequal08': '{% ifequal a "test" %}yes{% else %}no{% endifequal %}'})
def test_ifequal08(self):
output = self.engine.render_to_string('ifequal08', {'a': 'no'})
self.assertEqual(output, 'no')
@setup({'ifequal09': '{% ifequal a "test" %}yes{% else %}no{% endifequal %}'})
def test_ifequal09(self):
output = self.engine.render_to_string('ifequal09')
self.assertEqual(output, 'no')
@setup({'ifequal10': '{% ifequal a b %}yes{% else %}no{% endifequal %}'})
def test_ifequal10(self):
output = self.engine.render_to_string('ifequal10')
self.assertEqual(output, 'yes')
# SMART SPLITTING
@setup({'ifequal-split01': '{% ifequal a "test man" %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split01(self):
output = self.engine.render_to_string('ifequal-split01')
self.assertEqual(output, 'no')
@setup({'ifequal-split02': '{% ifequal a "test man" %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split02(self):
output = self.engine.render_to_string('ifequal-split02', {'a': 'foo'})
self.assertEqual(output, 'no')
@setup({'ifequal-split03': '{% ifequal a "test man" %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split03(self):
output = self.engine.render_to_string('ifequal-split03', {'a': 'test man'})
self.assertEqual(output, 'yes')
@setup({'ifequal-split04': '{% ifequal a \'test man\' %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split04(self):
output = self.engine.render_to_string('ifequal-split04', {'a': 'test man'})
self.assertEqual(output, 'yes')
@setup({'ifequal-split05': '{% ifequal a \'i "love" you\' %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split05(self):
output = self.engine.render_to_string('ifequal-split05', {'a': ''})
self.assertEqual(output, 'no')
@setup({'ifequal-split06': '{% ifequal a \'i "love" you\' %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split06(self):
output = self.engine.render_to_string('ifequal-split06', {'a': 'i "love" you'})
self.assertEqual(output, 'yes')
@setup({'ifequal-split07': '{% ifequal a \'i "love" you\' %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split07(self):
output = self.engine.render_to_string('ifequal-split07', {'a': 'i love you'})
self.assertEqual(output, 'no')
@setup({'ifequal-split08': r"{% ifequal a 'I\'m happy' %}yes{% else %}no{% endifequal %}"})
def test_ifequal_split08(self):
output = self.engine.render_to_string('ifequal-split08', {'a': "I'm happy"})
self.assertEqual(output, 'yes')
@setup({'ifequal-split09': r"{% ifequal a 'slash\man' %}yes{% else %}no{% endifequal %}"})
def test_ifequal_split09(self):
output = self.engine.render_to_string('ifequal-split09', {'a': 'slash\man'})
self.assertEqual(output, 'yes')
@setup({'ifequal-split10': r"{% ifequal a 'slash\man' %}yes{% else %}no{% endifequal %}"})
def test_ifequal_split10(self):
output = self.engine.render_to_string('ifequal-split10', {'a': 'slashman'})
self.assertEqual(output, 'no')
# NUMERIC RESOLUTION
@setup({'ifequal-numeric01': '{% ifequal x 5 %}yes{% endifequal %}'})
def test_ifequal_numeric01(self):
output = self.engine.render_to_string('ifequal-numeric01', {'x': '5'})
self.assertEqual(output, '')
@setup({'ifequal-numeric02': '{% ifequal x 5 %}yes{% endifequal %}'})
def test_ifequal_numeric02(self):
output = self.engine.render_to_string('ifequal-numeric02', {'x': 5})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric03': '{% ifequal x 5.2 %}yes{% endifequal %}'})
def test_ifequal_numeric03(self):
output = self.engine.render_to_string('ifequal-numeric03', {'x': 5})
self.assertEqual(output, '')
@setup({'ifequal-numeric04': '{% ifequal x 5.2 %}yes{% endifequal %}'})
def test_ifequal_numeric04(self):
output = self.engine.render_to_string('ifequal-numeric04', {'x': 5.2})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric05': '{% ifequal x 0.2 %}yes{% endifequal %}'})
def test_ifequal_numeric05(self):
output = self.engine.render_to_string('ifequal-numeric05', {'x': 0.2})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric06': '{% ifequal x .2 %}yes{% endifequal %}'})
def test_ifequal_numeric06(self):
output = self.engine.render_to_string('ifequal-numeric06', {'x': 0.2})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric07': '{% ifequal x 2. %}yes{% endifequal %}'})
def test_ifequal_numeric07(self):
output = self.engine.render_to_string('ifequal-numeric07', {'x': 2})
self.assertEqual(output, '')
@setup({'ifequal-numeric08': '{% ifequal x "5" %}yes{% endifequal %}'})
def test_ifequal_numeric08(self):
output = self.engine.render_to_string('ifequal-numeric08', {'x': 5})
self.assertEqual(output, '')
@setup({'ifequal-numeric09': '{% ifequal x "5" %}yes{% endifequal %}'})
def test_ifequal_numeric09(self):
output = self.engine.render_to_string('ifequal-numeric09', {'x': '5'})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric10': '{% ifequal x -5 %}yes{% endifequal %}'})
def test_ifequal_numeric10(self):
output = self.engine.render_to_string('ifequal-numeric10', {'x': -5})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric11': '{% ifequal x -5.2 %}yes{% endifequal %}'})
def test_ifequal_numeric11(self):
output = self.engine.render_to_string('ifequal-numeric11', {'x': -5.2})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric12': '{% ifequal x +5 %}yes{% endifequal %}'})
def test_ifequal_numeric12(self):
output = self.engine.render_to_string('ifequal-numeric12', {'x': 5})
self.assertEqual(output, 'yes')
# FILTER EXPRESSIONS AS ARGUMENTS
@setup({'ifequal-filter01': '{% ifequal a|upper "A" %}x{% endifequal %}'})
def test_ifequal_filter01(self):
output = self.engine.render_to_string('ifequal-filter01', {'a': 'a'})
self.assertEqual(output, 'x')
@setup({'ifequal-filter02': '{% ifequal "A" a|upper %}x{% endifequal %}'})
def test_ifequal_filter02(self):
output = self.engine.render_to_string('ifequal-filter02', {'a': 'a'})
self.assertEqual(output, 'x')
@setup({'ifequal-filter03': '{% ifequal a|upper b|upper %}x{% endifequal %}'})
def test_ifequal_filter03(self):
output = self.engine.render_to_string('ifequal-filter03', {'a': 'x', 'b': 'X'})
self.assertEqual(output, 'x')
@setup({'ifequal-filter04': '{% ifequal x|slice:"1" "a" %}x{% endifequal %}'})
def test_ifequal_filter04(self):
output = self.engine.render_to_string('ifequal-filter04', {'x': 'aaa'})
self.assertEqual(output, 'x')
@setup({'ifequal-filter05': '{% ifequal x|slice:"1"|upper "A" %}x{% endifequal %}'})
def test_ifequal_filter05(self):
output = self.engine.render_to_string('ifequal-filter05', {'x': 'aaa'})
self.assertEqual(output, 'x')
class IfNotEqualTagTests(SimpleTestCase):
@setup({'ifnotequal01': '{% ifnotequal a b %}yes{% endifnotequal %}'})
def test_ifnotequal01(self):
output = self.engine.render_to_string('ifnotequal01', {'a': 1, 'b': 2})
self.assertEqual(output, 'yes')
@setup({'ifnotequal02': '{% ifnotequal a b %}yes{% endifnotequal %}'})
def test_ifnotequal02(self):
output = self.engine.render_to_string('ifnotequal02', {'a': 1, 'b': 1})
self.assertEqual(output, '')
@setup({'ifnotequal03': '{% ifnotequal a b %}yes{% else %}no{% endifnotequal %}'})
def test_ifnotequal03(self):
output = self.engine.render_to_string('ifnotequal03', {'a': 1, 'b': 2})
self.assertEqual(output, 'yes')
@setup({'ifnotequal04': '{% ifnotequal a b %}yes{% else %}no{% endifnotequal %}'})
def test_ifnotequal04(self):
output = self.engine.render_to_string('ifnotequal04', {'a': 1, 'b': 1})
self.assertEqual(output, 'no')
| bsd-3-clause |
PW-Sat2/PWSat2OBC | integration_tests/devices/comm_beacon.py | 1 | 5684 | from struct import pack
from bitarray import bitarray
from emulator.beacon_parser.full_beacon_parser import FullBeaconParser
from emulator.beacon_parser.parser import BitReader, BeaconStorage
import experiment_type
class BeaconFrame(object):
def __init__(self, payload):
self._payload = payload
try:
all_bits = bitarray(endian='little')
all_bits.frombytes(''.join(map(lambda x: pack('B', x), payload)))
reader = BitReader(all_bits)
store = BeaconStorage()
parsers = FullBeaconParser().GetParsers(reader, store)
parsers.reverse()
while len(parsers) > 0:
parser = parsers.pop()
parser.parse()
self._parsed = store.storage
except:
self._parsed = None
def payload(self):
return self._payload
def _get_proper_crc(self):
try:
from build_config import config
return config['VALID_CRC']
except Exception as e:
return ""
def __repr__(self):
if self._parsed is None:
return '{} (parse failed)'.format(self.__class__.__name__)
proper_crc = self._get_proper_crc()
v = lambda group, key: str(self._parsed[group][key])
check_range = lambda group, key, low, high: low <= self._parsed[group][key].converted and self._parsed[group][key].converted <= high
crc_ok = v('02: Program State', '0056: Program CRC') == proper_crc
bp_volt_ok = ' '
if not check_range('14: Controller A', '1019: BATC.VOLT_A', 7.0, 7.8) \
or not check_range('15: Controller B', '1204: BATC.VOLT_B', 7.0, 7.8):
bp_volt_ok = "!!!!! "
bp_temp_ok = ' '
if not check_range('14: Controller A', '1062: BP.Temperature A', 0, 45) \
or not check_range('14: Controller A', '1075: BP.Temperature B', 0, 45)\
or not check_range('15: Controller B', '1194: BP.Temperature', 0, 45):
bp_temp_ok = "!!!!! "
pa_temp_ok = ' '
if not check_range('11: Comm', '0756: [Now] Power Amplifier Temperature', 0, 45) \
or not check_range('11: Comm', '0605: [Last transmission] Power Amplifier Temperature', 0, 56):
pa_temp_ok = "!!!!! "
rx_current_ok = ' '
if check_range('11: Comm', '0720: [Now] Receiver Current', 68, 75):
rx_current_ok = "!!!!! "
transmitter_bitrate = v('11: Comm', '0591: Transmitter Bitrate')
experiment_status = ''
experiment_startup_status = ''
experiment_status_ok = ' '
if v('09: Experiments', '0490: Current experiment code').strip() != 'None':
experiment_status = ' {}'.format(v('09: Experiments', '0502: Last Experiment Iteration Status'))
if experiment_status.strip() == 'Failure':
experiment_status_ok = "!!!!! "
if v('09: Experiments', '0494: Experiment Startup Result').strip() == 'Failure':
experiment_startup_status = ' START Failure'
if experiment_status_ok == ' ':
experiment_status_ok = '!!!!! '
mcu_temperature_ok = ' '
mcu_temperature = self._parsed['13: MCU']['0781: Temperature']
gyro_temperature = self._parsed['10: Gyroscope']['0558: Temperature']
if mcu_temperature.converted > gyro_temperature.converted + 10:
mcu_temperature_ok = "!!!!! "
current_3v3_ok = ' '
current_3v3 = self._parsed['14: Controller A']['0956: DISTR.CURR_3V3']
if current_3v3.converted > 0.1:
current_3v3_ok = "!!!!! "
lines = [
'{} @ {} bps'.format(self.__class__.__name__, transmitter_bitrate),
'{}BP VOLT {}, {}'.format(
bp_volt_ok,
v('14: Controller A', '1019: BATC.VOLT_A'),
v('15: Controller B', '1204: BATC.VOLT_B')
),
'{}BP TEMP {}, {}, {}'.format(
bp_temp_ok,
v('14: Controller A', '1062: BP.Temperature A'),
v('14: Controller A', '1075: BP.Temperature B'),
v('15: Controller B', '1194: BP.Temperature'),
),
'{}COMM PA TEMP NOW {} LAST {}'.format(
pa_temp_ok,
v('11: Comm', '0756: [Now] Power Amplifier Temperature'),
v('11: Comm', '0605: [Last transmission] Power Amplifier Temperature')
),
'{}COMM RX Current {}'.format(
rx_current_ok,
v('11: Comm', '0720: [Now] Receiver Current'),
)]
if not crc_ok:
lines.append(
'!!!!! OBC CRC {}'.format(
v('02: Program State', '0056: Program CRC')
))
lines.extend([
' GYRO UNCAL {}, {}, {}'.format(
v('10: Gyroscope', '0510: X measurement'),
v('10: Gyroscope', '0526: Y measurement'),
v('10: Gyroscope', '0542: Z measurement')
),
'{}EXPERIMENT {}{}{}'.format(
experiment_status_ok,
v('09: Experiments', '0490: Current experiment code'),
experiment_status,
experiment_startup_status
),
'{}MCU TEMP {}'.format(
mcu_temperature_ok,
mcu_temperature,
),
'{}3V3 DISTR CURRENT {:.0f} mA\n'.format(
current_3v3_ok,
1000.*current_3v3.converted,
)
])
return '\n'.join(lines)
| agpl-3.0 |
mbrukman/libcloud | libcloud/loadbalancer/drivers/softlayer.py | 49 | 15112 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance withv
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'SoftlayerLBDriver'
]
from libcloud.common.types import LibcloudError
from libcloud.common.softlayer import SoftLayerConnection
from libcloud.utils.misc import find, reverse_dict
from libcloud.loadbalancer.types import State
from libcloud.loadbalancer.base import Algorithm, Driver, LoadBalancer
from libcloud.loadbalancer.base import DEFAULT_ALGORITHM, Member
lb_service = 'SoftLayer_Network_Application_Delivery_Controller_LoadBalancer_'\
'VirtualIpAddress'
class LBPackage(object):
"""
Defines a single Softlayer package to be used when placing orders (
e.g. via ex_place_balancer_order method).
:param id: Package id.
:type id: ``int``
:param name: Package name.
:type name: ``str``
:param description: Package short description.
:type description: ``str``
:param price_id: Id of the price for this package.
:type price_id: ``int``
:param capacity: Provides a numerical representation of the capacity given
in the description of this package.
:type capacity: ``int``
"""
def __init__(self, id, name, description, price_id, capacity):
self.id = id
self.name = name
self.description = description
self.price_id = price_id
self.capacity = capacity
def __repr__(self):
return (
'<LBPackage: id=%s, name=%s, description=%s, price_id=%s, '
'capacity=%s>' % (self.id, self.name, self.description,
self.price_id, self.capacity))
class SoftlayerLBDriver(Driver):
name = 'Softlayer Load Balancing'
website = 'http://www.softlayer.com/'
connectionCls = SoftLayerConnection
_VALUE_TO_ALGORITHM_MAP = {
'ROUND_ROBIN': Algorithm.ROUND_ROBIN,
'LEAST_CONNECTIONS': Algorithm.LEAST_CONNECTIONS,
'SHORTEST_RESPONSE': Algorithm.SHORTEST_RESPONSE,
'PERSISTENT_IP': Algorithm.PERSISTENT_IP
}
_ALGORITHM_TO_VALUE_MAP = reverse_dict(_VALUE_TO_ALGORITHM_MAP)
def list_balancers(self):
mask = {
'adcLoadBalancers': {
'ipAddress': '',
'loadBalancerHardware': {
'datacenter': ''
},
'virtualServers': {
'serviceGroups': {
'routingMethod': '',
'routingType': '',
'services': {
'ipAddress': ''
}
}
}
}
}
res = self.connection.request(
'SoftLayer_Account', 'getAdcLoadBalancers',
object_mask=mask).object
return [self._to_balancer(lb) for lb in res]
def get_balancer(self, balancer_id):
balancers = self.list_balancers()
balancer = find(balancers, lambda b: b.id == balancer_id)
if not balancer:
raise LibcloudError(value='No balancer found for id: %s' %
balancer_id, driver=self)
return balancer
def list_protocols(self):
"""
Return a list of supported protocols.
:rtype: ``list`` of ``str``
"""
return ['dns', 'ftp', 'http', 'https', 'tcp', 'udp']
def balancer_list_members(self, balancer):
lb = self._get_balancer_model(balancer.id)
members = []
vs = self._locate_service_group(lb, balancer.port)
if vs:
if vs['serviceGroups']:
srvgrp = vs['serviceGroups'][0]
members = [self._to_member(srv, balancer) for
srv in srvgrp['services']]
return members
def balancer_attach_member(self, balancer, member):
lb = self._get_balancer_model(balancer.id)
vs = self._locate_service_group(lb, balancer.port)
if not vs:
raise LibcloudError(value='No service_group found for balancer '
'port: %s' % balancer.port, driver=self)
if vs['serviceGroups']:
services = vs['serviceGroups'][0]['services']
services.append(self._to_service_template(member.ip,
member.port))
self.connection.request(lb_service, 'editObject', lb, id=balancer.id)
return [m for m in balancer.list_members() if m.ip == member.ip][0]
def balancer_detach_member(self, balancer, member):
svc_lbsrv = 'SoftLayer_Network_Application_Delivery_Controller_'\
'LoadBalancer_Service'
self.connection.request(svc_lbsrv, 'deleteObject', id=member.id)
return True
def destroy_balancer(self, balancer):
res_billing = self.connection.request(lb_service, 'getBillingItem',
id=balancer.id).object
self.connection.request('SoftLayer_Billing_Item', 'cancelService',
id=res_billing['id'])
return True
def ex_list_balancer_packages(self):
"""
Retrieves the available local load balancer packages.
:rtype: ``list`` of :class:`LBPackage`
"""
mask = {
'prices': ''
}
res = self.connection.request('SoftLayer_Product_Package', 'getItems',
id=0, object_mask=mask).object
res_lb_pkgs = [r for r in res if r['description'].find
('Load Balancer') != -1]
res_lb_pkgs = [r for r in res_lb_pkgs if not r['description'].
startswith('Global')]
return [self._to_lb_package(r) for r in res_lb_pkgs]
def ex_place_balancer_order(self, package, location):
"""
Places an order for a local loadbalancer in the specified
location.
:param package: The package to create the loadbalancer from.
:type package: :class:`LBPackage`
:param string location: The location (datacenter) to create the
loadbalancer.
:type location: :class:`NodeLocation`
:return: ``True`` if ex_place_balancer_order was successful.
:rtype: ``bool``
"""
data = {
'complexType': 'SoftLayer_Container_Product_Order_Network_'
'LoadBalancer',
'quantity': 1,
'packageId': 0,
'location': self._get_location(location.id),
'prices': [{'id': package.price_id}]
}
self.connection.request('SoftLayer_Product_Order', 'placeOrder',
data)
return True
def ex_configure_load_balancer(self, balancer, port=80,
protocol='http',
algorithm=DEFAULT_ALGORITHM,
ex_allocation=100):
"""
Configure the loadbalancer by adding it with a front-end port (aka
a service group in the Softlayer loadbalancer model).
Softlayer loadbalancer may be defined with multiple service
groups (front-end ports) each defined with a unique port number.
:param balancer: The loadbalancer.
:type balancer: :class:`LoadBalancer`
:param port: Port of the service group, defaults to 80.
:type port: ``int``
:param protocol: Loadbalancer protocol, defaults to http.
:type protocol: ``str``
:param algorithm: Load balancing algorithm, defaults to
Algorithm.ROUND_ROBIN
:type algorithm: :class:`Algorithm`
:param ex_allocation: The percentage of the total connection
allocations to allocate for this group.
:type ex_allocation: ``int``
:return: ``True`` if ex_add_service_group was successful.
:rtype: ``bool``
"""
_types = self._get_routing_types()
_methods = self._get_routing_methods()
rt = find(_types, lambda t: t['keyname'] == protocol.upper())
if not rt:
raise LibcloudError(value='Invalid protocol %s' % protocol,
driver=self)
value = self._algorithm_to_value(algorithm)
meth = find(_methods, lambda m: m['keyname'] == value)
if not meth:
raise LibcloudError(value='Invalid algorithm %s' % algorithm,
driver=self)
service_group_template = {
'port': port,
'allocation': ex_allocation,
'serviceGroups': [{
'routingTypeId': rt['id'],
'routingMethodId': meth['id']
}]
}
lb = self._get_balancer_model(balancer.id)
if len(lb['virtualServers']) > 0:
port = lb['virtualServers'][0]['port']
raise LibcloudError(value='Loadbalancer already configured with '
'a service group (front-end port)' % port,
driver=self)
lb['virtualServers'].append(service_group_template)
self.connection.request(lb_service, 'editObject', lb, id=balancer.id)
return True
def _get_balancer_model(self, balancer_id):
"""
Retrieve Softlayer loadbalancer model.
"""
lb_mask = {
'virtualServers': {
'serviceGroups': {
'services': {
'ipAddress': '',
'groupReferences': '',
}
}
}
}
lb_res = self.connection.request(lb_service, 'getObject',
object_mask=lb_mask, id=balancer_id).\
object
return lb_res
def _locate_service_group(self, lb, port):
"""
Locate service group with given port.
Return virtualServers (vs) entry whose port matches the
supplied parameter port. For a negative port, just return
the first vs entry.
None is returned if no match found.
:param lb: Softlayer loadbalancer model.
:type lb: ``dict``
:param port: loadbalancer front-end port.
:type port: ``int``
:return: Matched entry in the virtualServers array of the supplied
model.
:rtype: ``dict``
"""
vs = None
if port < 0:
vs = lb['virtualServers'][0] if lb['virtualServers']\
else None
else:
vs = find(lb['virtualServers'], lambda v: v['port'] == port)
return vs
def _get_routing_types(self):
svc_rtype = 'SoftLayer_Network_Application_Delivery_Controller_'\
'LoadBalancer_Routing_Type'
return self.connection.request(svc_rtype, 'getAllObjects').object
def _get_routing_methods(self):
svc_rmeth = 'SoftLayer_Network_Application_Delivery_Controller_'\
'LoadBalancer_Routing_Method'
return self.connection.request(svc_rmeth, 'getAllObjects').object
def _get_location(self, location_id):
res = self.connection.request('SoftLayer_Location_Datacenter',
'getDatacenters').object
dcenter = find(res, lambda d: d['name'] == location_id)
if not dcenter:
raise LibcloudError(value='Invalid value %s' % location_id,
driver=self)
return dcenter['id']
def _get_ipaddress(self, ip):
svc_ipaddress = 'SoftLayer_Network_Subnet_IpAddress'
return self.connection.request(svc_ipaddress, 'getByIpAddress',
ip).object
def _to_lb_package(self, pkg):
try:
price_id = pkg['prices'][0]['id']
except:
price_id = -1
capacity = int(pkg.get('capacity', 0))
return LBPackage(id=pkg['id'], name=pkg['keyName'],
description=pkg['description'],
price_id=price_id, capacity=capacity)
def _to_service_template(self, ip, port):
"""
Builds single member entry in Softlayer loadbalancer model
"""
template = {
'enabled': 1, # enable the service
'port': port, # back-end port
'ipAddressId': self._get_ipaddress(ip)['id'],
'healthChecks': [{
'healthCheckTypeId': 21 # default health check
}],
'groupReferences': [{
'weight': 1
}]
}
return template
def _to_balancer(self, lb):
ipaddress = lb['ipAddress']['ipAddress']
extra = {}
extra['connection_limit'] = lb['connectionLimit']
extra['ssl_active'] = lb['sslActiveFlag']
extra['ssl_enabled'] = lb['sslEnabledFlag']
extra['ha'] = lb['highAvailabilityFlag']
extra['datacenter'] = \
lb['loadBalancerHardware'][0]['datacenter']['name']
# In Softlayer, there could be multiple group of members (aka service
# groups), so retrieve the first one
vs = self._locate_service_group(lb, -1)
if vs:
port = vs['port']
if vs['serviceGroups']:
srvgrp = vs['serviceGroups'][0]
routing_method = srvgrp['routingMethod']['keyname']
routing_type = srvgrp['routingType']['keyname']
try:
extra['algorithm'] = self.\
_value_to_algorithm(routing_method)
except:
pass
extra['protocol'] = routing_type.lower()
if not vs:
port = -1
balancer = LoadBalancer(
id=lb['id'],
name='',
state=State.UNKNOWN,
ip=ipaddress,
port=port,
driver=self.connection.driver,
extra=extra
)
return balancer
def _to_member(self, srv, balancer=None):
svc_id = srv['id']
ip = srv['ipAddress']['ipAddress']
port = srv['port']
extra = {}
extra['status'] = srv['status']
extra['enabled'] = srv['enabled']
return Member(id=svc_id, ip=ip, port=port, balancer=balancer,
extra=extra)
| apache-2.0 |
mottosso/mindbender-setup | bin/pythonpath/raven/conf/defaults.py | 20 | 1428 | """
raven.conf.defaults
~~~~~~~~~~~~~~~~~~~
Represents the default values for all Sentry settings.
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import os
import os.path
import socket
ROOT = os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir))
TIMEOUT = 1
# TODO: this is specific to Django
CLIENT = 'raven.contrib.django.DjangoClient'
# Not all environments have access to socket module, for example Google App Engine
# Need to check to see if the socket module has ``gethostname``, if it doesn't we
# will set it to None and require it passed in to ``Client`` on initializtion.
NAME = socket.gethostname() if hasattr(socket, 'gethostname') else None
# The maximum number of elements to store for a list-like structure.
MAX_LENGTH_LIST = 50
# The maximum length to store of a string-like structure.
MAX_LENGTH_STRING = 400
# Automatically log frame stacks from all ``logging`` messages.
AUTO_LOG_STACKS = False
# Collect locals variables
CAPTURE_LOCALS = True
# Client-side data processors to apply
PROCESSORS = (
'raven.processors.SanitizePasswordsProcessor',
)
try:
# Try for certifi first since they likely keep their bundle more up to date
import certifi
CA_BUNDLE = certifi.where()
except ImportError:
CA_BUNDLE = os.path.join(ROOT, 'data', 'cacert.pem')
| mit |
SkyroverTech/SkyroverCF | lib/cppcheck-1.71/tools/rundaca2.py | 4 | 2790 | #!/usr/bin/python
import subprocess
import pexpect
import os
import shutil
import time
import sys
PASSWORD = ''
if len(sys.argv) == 2:
PASSWORD = sys.argv[1]
# Upload file to sourceforge web server using scp
def upload(file_to_upload, destination):
if not os.path.isfile(file_to_upload):
return
try:
child = pexpect.spawn(
'scp ' + file_to_upload + ' upload@trac.cppcheck.net:' + destination)
child.expect('upload@trac.cppcheck.net\'s password:')
child.sendline(PASSWORD)
child.interact()
except IOError:
pass
except OSError:
pass
except pexpect.TIMEOUT:
pass
def daca2(foldernum):
folders = '0123456789abcdefghijklmnopqrstuvwxyz'
folder = folders[foldernum % len(folders)]
print('Daca2 folder=' + folder)
os.chdir(os.path.expanduser('~/cppcheck'))
subprocess.call(['git', 'pull'])
p = subprocess.Popen(['git', 'show', '--format=%h'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
comm = p.communicate()
rev = comm[0]
rev = rev[:rev.find('\n')]
# compile cppcheck
subprocess.call(['nice', 'make', 'SRCDIR=build', 'CFGDIR=' + os.path.expanduser('~/cppcheck/cfg'), 'CXXFLAGS=-O2', 'CPPFLAGS=-DMAXTIME=600'])
subprocess.call(['mv', 'cppcheck', os.path.expanduser('~/daca2/cppcheck-O2')])
# run cppcheck
subprocess.call(['rm', '-rf', os.path.expanduser('~/daca2/' + folder)])
subprocess.call(['nice', '--adjustment=19', 'python', os.path.expanduser('~/cppcheck/tools/daca2.py'), folder, '--rev=' + rev])
upload(os.path.expanduser('~/daca2/' + folder + '/results.txt'), 'evidente/results-' + folder + '.txt')
subprocess.call(['rm', '-rf', os.path.expanduser('~/daca2/lib' + folder)])
subprocess.call(['nice', '--adjustment=19', 'python', os.path.expanduser('~/cppcheck/tools/daca2.py'), 'lib' + folder, '--rev=' + rev])
upload(os.path.expanduser('~/daca2/lib' + folder + '/results.txt'), 'evidente/results-lib' + folder + '.txt')
# run cppcheck addons
subprocess.call(['rm', '-rf', os.path.expanduser('~/daca2/' + folder)])
subprocess.call(['nice', '--adjustment=19', 'python', os.path.expanduser('~/cppcheck/tools/daca2-addons.py'), folder, '--rev=' + rev])
upload(os.path.expanduser('~/daca2/'+folder+'/results.txt'), 'evidente/addons-'+folder+'.txt')
subprocess.call(['rm', '-rf', os.path.expanduser('~/daca2/lib' + folder)])
subprocess.call(['nice', '--adjustment=19', 'python', os.path.expanduser('~/cppcheck/tools/daca2-addons.py'), 'lib' + folder, '--rev=' + rev])
upload(os.path.expanduser('~/daca2/lib'+folder+'/results.txt'), 'evidente/addons-lib'+folder+'.txt')
foldernum = 0
while True:
daca2(foldernum)
foldernum = foldernum + 1
| gpl-3.0 |
shaggytwodope/qutebrowser | qutebrowser/browser/webengine/interceptor.py | 2 | 2630 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""A request interceptor taking care of adblocking and custom headers."""
# pylint: disable=no-name-in-module,import-error,useless-suppression
from PyQt5.QtWebEngineCore import QWebEngineUrlRequestInterceptor
# pylint: enable=no-name-in-module,import-error,useless-suppression
from qutebrowser.browser import shared
from qutebrowser.utils import utils, log
class RequestInterceptor(QWebEngineUrlRequestInterceptor):
"""Handle ad blocking and custom headers."""
def __init__(self, host_blocker, parent=None):
super().__init__(parent)
self._host_blocker = host_blocker
def install(self, profile):
"""Install the interceptor on the given QWebEngineProfile."""
profile.setRequestInterceptor(self)
# Gets called in the IO thread -> showing crash window will fail
@utils.prevent_exceptions(None)
def interceptRequest(self, info):
"""Handle the given request.
Reimplementing this virtual function and setting the interceptor on a
profile makes it possible to intercept URL requests. This function is
executed on the IO thread, and therefore running long tasks here will
block networking.
info contains the information about the URL request and will track
internally whether its members have been altered.
Args:
info: QWebEngineUrlRequestInfo &info
"""
# FIXME:qtwebengine only block ads for NavigationTypeOther?
if (bytes(info.requestMethod()) == b'GET' and
self._host_blocker.is_blocked(info.requestUrl())):
log.webview.info("Request to {} blocked by host blocker.".format(
info.requestUrl().host()))
info.block(True)
for header, value in shared.custom_headers():
info.setHttpHeader(header, value)
| gpl-3.0 |
M1cha/android_kernel_xiaomi_aries | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 11088 | 3246 | # Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
| gpl-2.0 |
conejoninja/plugin.video.pelisalacarta | pelisalacarta/channels/stagevusite.py | 8 | 7919 | # -*- coding: iso-8859-1 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Canal para buscar en stagevu
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
import sys
import xbmc
import xbmcgui
import xbmcplugin
from core import scrapertools
from core import config
from core import logger
from platformcode.xbmc import xbmctools
from core.item import Item
from servers import servertools
from servers import vk
from pelisalacarta import buscador
__channel__ = "stagevusite"
__category__ = "G"
__type__ = "xbmc"
__title__ = "Stagevu"
__language__ = ""
DEBUG = config.get_setting("debug")
# Esto permite su ejecución en modo emulado
try:
pluginhandle = int( sys.argv[ 1 ] )
except:
pluginhandle = ""
logger.info("[stagevusite.py] init")
def mainlist(params,url,category):
logger.info("[stagevusite.py] mainlist")
# Añade al listado de XBMC
addfolder("Buscar","http://stagevu.com","search")
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=int( sys.argv[ 1 ] ), category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=int( sys.argv[ 1 ] ), succeeded=True )
def search(params,url,category):
logger.info("[stagevusite.py] list")
keyboard = xbmc.Keyboard('')
keyboard.doModal()
if (keyboard.isConfirmed()):
tecleado = keyboard.getText()
if len(tecleado)>0:
#convert to HTML
tecleado = tecleado.replace(" ", "+")
searchUrl = 'http://stagevu.com/search?for='+tecleado+'&in=Videos&x=0&y=0&perpage=25&page=2'
list(params,searchUrl,category)
def performsearch(texto):
logger.info("[stagevusite.py] performsearch")
url = 'http://stagevu.com/search?for='+texto+'&in=Videos&x=0&y=0&perpage=25&page=2'
# Descarga la página
data = scrapertools.cachePage(url)
# Extrae las entradas (carpetas)
patronvideos = '<div class="result[^>]+>[^<]+<div class="resultcont">[^<]+<h2><a href="([^"]+)">([^<]+)</a>.*?<img src="([^"]+)".*?</a>(.*?)</div>'
matches = re.compile(patronvideos,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
resultados = []
for match in matches:
# Titulo
try:
scrapedtitle = unicode( match[1], "utf-8" ).encode("iso-8859-1")
except:
scrapedtitle = match[1]
# URL
scrapedurl = match[0]
# Thumbnail
scrapedthumbnail = match[2]
# procesa el resto
try:
scrapedplot = unicode( match[3], "utf-8" ).encode("iso-8859-1")
except:
scrapedplot = match[3]
scrapedplot = scrapedplot.replace("\\t","")
scrapedplot = scrapedplot.replace("<p>"," ")
scrapedplot = scrapedplot.replace("</p>"," ")
scrapedplot = scrapedplot.strip()
if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
resultados.append( [__channel__ , "play" , "buscador" , scrapedtitle , scrapedurl , scrapedthumbnail, scrapedplot ] )
return resultados
def list(params,url,category):
logger.info("[stagevusite.py] list")
# Descarga la página
data = scrapertools.cachePage(url)
#logger.info(data)
# Extrae las entradas (carpetas)
patronvideos = '<div class="result[^>]+>[^<]+<div class="resultcont">[^<]+<h2><a href="([^"]+)">([^<]+)</a>.*?<img src="([^"]+)".*?</a>(.*?)</div>'
matches = re.compile(patronvideos,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
for match in matches:
# Titulo
try:
scrapedtitle = unicode( match[1], "utf-8" ).encode("iso-8859-1")
except:
scrapedtitle = match[1]
# URL
scrapedurl = match[0]
# Thumbnail
scrapedthumbnail = match[2]
# procesa el resto
try:
scrapeddescription = unicode( match[3], "utf-8" ).encode("iso-8859-1")
except:
scrapeddescription = match[3]
scrapeddescription = scrapeddescription.replace("\\t","")
scrapeddescription = scrapeddescription.replace("<p>"," ")
scrapeddescription = scrapeddescription.replace("</p>"," ")
scrapeddescription = scrapeddescription.strip()
# Depuracion
if (DEBUG):
logger.info("scrapedtitle="+scrapedtitle)
logger.info("scrapedurl="+scrapedurl)
logger.info("scrapedthumbnail="+scrapedthumbnail)
# Añade al listado de XBMC
addthumbnailvideo(scrapedtitle,scrapedurl,scrapedthumbnail,scrapeddescription,category,"Stagevu")
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=int( sys.argv[ 1 ] ), category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=int( sys.argv[ 1 ] ), succeeded=True )
def play(params,url,category):
logger.info("[stagevusite.py] play")
title = unicode( xbmc.getInfoLabel( "ListItem.Title" ), "utf-8" )
thumbnail = xbmc.getInfoImage( "ListItem.Thumb" )
plot = unicode( xbmc.getInfoLabel( "ListItem.Plot" ), "utf-8" )
logger.info("[stagevusite.py] thumbnail="+thumbnail)
server="stagevu"
xbmctools.play_video(__channel__,server,url,category,title,thumbnail,plot)
def addfolder(nombre,url,accion):
logger.info('[stagevusite.py] addfolder( "'+nombre+'" , "' + url + '" , "'+accion+'")"')
listitem = xbmcgui.ListItem( nombre , iconImage="DefaultFolder.png")
itemurl = '%s?channel=stagevusite&action=%s&category=%s&url=%s' % ( sys.argv[ 0 ] , accion , urllib.quote_plus(nombre) , urllib.quote_plus(url) )
xbmcplugin.addDirectoryItem( handle = int(sys.argv[ 1 ]), url = itemurl , listitem=listitem, isFolder=True)
def addvideo(nombre,url,category,server):
logger.info('[stagevusite.py] addvideo( "'+nombre+'" , "' + url + '" , "'+server+'")"')
listitem = xbmcgui.ListItem( nombre, iconImage="DefaultVideo.png" )
listitem.setInfo( "video", { "Title" : nombre, "Plot" : nombre } )
itemurl = '%s?channel=stagevusite&action=play&category=%s&url=%s&server=%s' % ( sys.argv[ 0 ] , category , urllib.quote_plus(url) , server )
xbmcplugin.addDirectoryItem( handle=int(sys.argv[ 1 ]), url=itemurl, listitem=listitem, isFolder=False)
def addthumbnailvideo(nombre,url,thumbnail,descripcion,category,server):
logger.info('[stagevusite.py] addvideo( "'+nombre+'" , "' + url + '" , "'+thumbnail+'" , "'+server+'")"')
listitem = xbmcgui.ListItem( nombre, iconImage="DefaultVideo.png", thumbnailImage=thumbnail )
listitem.setInfo( "video", { "Title" : nombre, "Plot" : descripcion } )
itemurl = '%s?channel=stagevusite&action=play&category=%s&url=%s&server=%s' % ( sys.argv[ 0 ] , category , url , server )
xbmcplugin.addDirectoryItem( handle=int(sys.argv[ 1 ]), url=itemurl, listitem=listitem, isFolder=False)
def addthumbnailfolder( scrapedtitle , scrapedurl , scrapedthumbnail , accion ):
logger.info('[stagevusite.py] addthumbnailfolder( "'+scrapedtitle+'" , "' + scrapedurl + '" , "'+scrapedthumbnail+'" , "'+accion+'")"')
listitem = xbmcgui.ListItem( scrapedtitle, iconImage="DefaultFolder.png", thumbnailImage=scrapedthumbnail )
itemurl = '%s?channel=stagevusite&action=%s&category=%s&url=%s' % ( sys.argv[ 0 ] , accion , urllib.quote_plus( scrapedtitle ) , urllib.quote_plus( scrapedurl ) )
xbmcplugin.addDirectoryItem( handle = int(sys.argv[ 1 ]), url = itemurl , listitem=listitem, isFolder=True)
| gpl-3.0 |
Aravinthu/odoo | setup/odoo-wsgi.example.py | 36 | 1723 | # WSGI Handler sample configuration file.
#
# Change the appropriate settings below, in order to provide the parameters
# that would normally be passed in the command-line.
# (at least conf['addons_path'])
#
# For generic wsgi handlers a global application is defined.
# For uwsgi this should work:
# $ uwsgi_python --http :9090 --pythonpath . --wsgi-file openerp-wsgi.py
#
# For gunicorn additional globals need to be defined in the Gunicorn section.
# Then the following command should run:
# $ gunicorn odoo:service.wsgi_server.application -c openerp-wsgi.py
import odoo
#----------------------------------------------------------
# Common
#----------------------------------------------------------
odoo.multi_process = True # Nah!
# Equivalent of --load command-line option
odoo.conf.server_wide_modules = ['web']
conf = odoo.tools.config
# Path to the OpenERP Addons repository (comma-separated for
# multiple locations)
conf['addons_path'] = '../../addons/trunk,../../web/trunk/addons'
# Optional database config if not using local socket
#conf['db_name'] = 'mycompany'
#conf['db_host'] = 'localhost'
#conf['db_user'] = 'foo'
#conf['db_port'] = 5432
#conf['db_password'] = 'secret'
#----------------------------------------------------------
# Generic WSGI handlers application
#----------------------------------------------------------
application = odoo.service.wsgi_server.application
odoo.service.server.load_server_wide_modules()
#----------------------------------------------------------
# Gunicorn
#----------------------------------------------------------
# Standard OpenERP XML-RPC port is 8069
bind = '127.0.0.1:8069'
pidfile = '.gunicorn.pid'
workers = 4
timeout = 240
max_requests = 2000
| agpl-3.0 |
40223136/-2015cd_midterm | static/Brython3.1.0-20150301-090019/Lib/fractions.py | 722 | 23203 | # Originally contributed by Sjoerd Mullender.
# Significantly modified by Jeffrey Yasskin <jyasskin at gmail.com>.
"""Fraction, infinite-precision, real numbers."""
from decimal import Decimal
import math
import numbers
import operator
import re
import sys
__all__ = ['Fraction', 'gcd']
def gcd(a, b):
"""Calculate the Greatest Common Divisor of a and b.
Unless b==0, the result will have the same sign as b (so that when
b is divided by it, the result comes out positive).
"""
while b:
a, b = b, a%b
return a
# Constants related to the hash implementation; hash(x) is based
# on the reduction of x modulo the prime _PyHASH_MODULUS.
_PyHASH_MODULUS = sys.hash_info.modulus
# Value to be used for rationals that reduce to infinity modulo
# _PyHASH_MODULUS.
_PyHASH_INF = sys.hash_info.inf
_RATIONAL_FORMAT = re.compile(r"""
\A\s* # optional whitespace at the start, then
(?P<sign>[-+]?) # an optional sign, then
(?=\d|\.\d) # lookahead for digit or .digit
(?P<num>\d*) # numerator (possibly empty)
(?: # followed by
(?:/(?P<denom>\d+))? # an optional denominator
| # or
(?:\.(?P<decimal>\d*))? # an optional fractional part
(?:E(?P<exp>[-+]?\d+))? # and optional exponent
)
\s*\Z # and optional whitespace to finish
""", re.VERBOSE | re.IGNORECASE)
class Fraction(numbers.Rational):
"""This class implements rational numbers.
In the two-argument form of the constructor, Fraction(8, 6) will
produce a rational number equivalent to 4/3. Both arguments must
be Rational. The numerator defaults to 0 and the denominator
defaults to 1 so that Fraction(3) == 3 and Fraction() == 0.
Fractions can also be constructed from:
- numeric strings similar to those accepted by the
float constructor (for example, '-2.3' or '1e10')
- strings of the form '123/456'
- float and Decimal instances
- other Rational instances (including integers)
"""
__slots__ = ('_numerator', '_denominator')
# We're immutable, so use __new__ not __init__
def __new__(cls, numerator=0, denominator=None):
"""Constructs a Rational.
Takes a string like '3/2' or '1.5', another Rational instance, a
numerator/denominator pair, or a float.
Examples
--------
>>> Fraction(10, -8)
Fraction(-5, 4)
>>> Fraction(Fraction(1, 7), 5)
Fraction(1, 35)
>>> Fraction(Fraction(1, 7), Fraction(2, 3))
Fraction(3, 14)
>>> Fraction('314')
Fraction(314, 1)
>>> Fraction('-35/4')
Fraction(-35, 4)
>>> Fraction('3.1415') # conversion from numeric string
Fraction(6283, 2000)
>>> Fraction('-47e-2') # string may include a decimal exponent
Fraction(-47, 100)
>>> Fraction(1.47) # direct construction from float (exact conversion)
Fraction(6620291452234629, 4503599627370496)
>>> Fraction(2.25)
Fraction(9, 4)
>>> Fraction(Decimal('1.47'))
Fraction(147, 100)
"""
self = super(Fraction, cls).__new__(cls)
if denominator is None:
if isinstance(numerator, numbers.Rational):
self._numerator = numerator.numerator
self._denominator = numerator.denominator
return self
elif isinstance(numerator, float):
# Exact conversion from float
value = Fraction.from_float(numerator)
self._numerator = value._numerator
self._denominator = value._denominator
return self
elif isinstance(numerator, Decimal):
value = Fraction.from_decimal(numerator)
self._numerator = value._numerator
self._denominator = value._denominator
return self
elif isinstance(numerator, str):
# Handle construction from strings.
m = _RATIONAL_FORMAT.match(numerator)
if m is None:
raise ValueError('Invalid literal for Fraction: %r' %
numerator)
numerator = int(m.group('num') or '0')
denom = m.group('denom')
if denom:
denominator = int(denom)
else:
denominator = 1
decimal = m.group('decimal')
if decimal:
scale = 10**len(decimal)
numerator = numerator * scale + int(decimal)
denominator *= scale
exp = m.group('exp')
if exp:
exp = int(exp)
if exp >= 0:
numerator *= 10**exp
else:
denominator *= 10**-exp
if m.group('sign') == '-':
numerator = -numerator
else:
raise TypeError("argument should be a string "
"or a Rational instance")
elif (isinstance(numerator, numbers.Rational) and
isinstance(denominator, numbers.Rational)):
numerator, denominator = (
numerator.numerator * denominator.denominator,
denominator.numerator * numerator.denominator
)
else:
raise TypeError("both arguments should be "
"Rational instances")
if denominator == 0:
raise ZeroDivisionError('Fraction(%s, 0)' % numerator)
g = gcd(numerator, denominator)
self._numerator = numerator // g
self._denominator = denominator // g
return self
@classmethod
def from_float(cls, f):
"""Converts a finite float to a rational number, exactly.
Beware that Fraction.from_float(0.3) != Fraction(3, 10).
"""
if isinstance(f, numbers.Integral):
return cls(f)
elif not isinstance(f, float):
raise TypeError("%s.from_float() only takes floats, not %r (%s)" %
(cls.__name__, f, type(f).__name__))
if math.isnan(f):
raise ValueError("Cannot convert %r to %s." % (f, cls.__name__))
if math.isinf(f):
raise OverflowError("Cannot convert %r to %s." % (f, cls.__name__))
return cls(*f.as_integer_ratio())
@classmethod
def from_decimal(cls, dec):
"""Converts a finite Decimal instance to a rational number, exactly."""
from decimal import Decimal
if isinstance(dec, numbers.Integral):
dec = Decimal(int(dec))
elif not isinstance(dec, Decimal):
raise TypeError(
"%s.from_decimal() only takes Decimals, not %r (%s)" %
(cls.__name__, dec, type(dec).__name__))
if dec.is_infinite():
raise OverflowError(
"Cannot convert %s to %s." % (dec, cls.__name__))
if dec.is_nan():
raise ValueError("Cannot convert %s to %s." % (dec, cls.__name__))
sign, digits, exp = dec.as_tuple()
digits = int(''.join(map(str, digits)))
if sign:
digits = -digits
if exp >= 0:
return cls(digits * 10 ** exp)
else:
return cls(digits, 10 ** -exp)
def limit_denominator(self, max_denominator=1000000):
"""Closest Fraction to self with denominator at most max_denominator.
>>> Fraction('3.141592653589793').limit_denominator(10)
Fraction(22, 7)
>>> Fraction('3.141592653589793').limit_denominator(100)
Fraction(311, 99)
>>> Fraction(4321, 8765).limit_denominator(10000)
Fraction(4321, 8765)
"""
# Algorithm notes: For any real number x, define a *best upper
# approximation* to x to be a rational number p/q such that:
#
# (1) p/q >= x, and
# (2) if p/q > r/s >= x then s > q, for any rational r/s.
#
# Define *best lower approximation* similarly. Then it can be
# proved that a rational number is a best upper or lower
# approximation to x if, and only if, it is a convergent or
# semiconvergent of the (unique shortest) continued fraction
# associated to x.
#
# To find a best rational approximation with denominator <= M,
# we find the best upper and lower approximations with
# denominator <= M and take whichever of these is closer to x.
# In the event of a tie, the bound with smaller denominator is
# chosen. If both denominators are equal (which can happen
# only when max_denominator == 1 and self is midway between
# two integers) the lower bound---i.e., the floor of self, is
# taken.
if max_denominator < 1:
raise ValueError("max_denominator should be at least 1")
if self._denominator <= max_denominator:
return Fraction(self)
p0, q0, p1, q1 = 0, 1, 1, 0
n, d = self._numerator, self._denominator
while True:
a = n//d
q2 = q0+a*q1
if q2 > max_denominator:
break
p0, q0, p1, q1 = p1, q1, p0+a*p1, q2
n, d = d, n-a*d
k = (max_denominator-q0)//q1
bound1 = Fraction(p0+k*p1, q0+k*q1)
bound2 = Fraction(p1, q1)
if abs(bound2 - self) <= abs(bound1-self):
return bound2
else:
return bound1
@property
def numerator(a):
return a._numerator
@property
def denominator(a):
return a._denominator
def __repr__(self):
"""repr(self)"""
return ('Fraction(%s, %s)' % (self._numerator, self._denominator))
def __str__(self):
"""str(self)"""
if self._denominator == 1:
return str(self._numerator)
else:
return '%s/%s' % (self._numerator, self._denominator)
def _operator_fallbacks(monomorphic_operator, fallback_operator):
"""Generates forward and reverse operators given a purely-rational
operator and a function from the operator module.
Use this like:
__op__, __rop__ = _operator_fallbacks(just_rational_op, operator.op)
In general, we want to implement the arithmetic operations so
that mixed-mode operations either call an implementation whose
author knew about the types of both arguments, or convert both
to the nearest built in type and do the operation there. In
Fraction, that means that we define __add__ and __radd__ as:
def __add__(self, other):
# Both types have numerators/denominator attributes,
# so do the operation directly
if isinstance(other, (int, Fraction)):
return Fraction(self.numerator * other.denominator +
other.numerator * self.denominator,
self.denominator * other.denominator)
# float and complex don't have those operations, but we
# know about those types, so special case them.
elif isinstance(other, float):
return float(self) + other
elif isinstance(other, complex):
return complex(self) + other
# Let the other type take over.
return NotImplemented
def __radd__(self, other):
# radd handles more types than add because there's
# nothing left to fall back to.
if isinstance(other, numbers.Rational):
return Fraction(self.numerator * other.denominator +
other.numerator * self.denominator,
self.denominator * other.denominator)
elif isinstance(other, Real):
return float(other) + float(self)
elif isinstance(other, Complex):
return complex(other) + complex(self)
return NotImplemented
There are 5 different cases for a mixed-type addition on
Fraction. I'll refer to all of the above code that doesn't
refer to Fraction, float, or complex as "boilerplate". 'r'
will be an instance of Fraction, which is a subtype of
Rational (r : Fraction <: Rational), and b : B <:
Complex. The first three involve 'r + b':
1. If B <: Fraction, int, float, or complex, we handle
that specially, and all is well.
2. If Fraction falls back to the boilerplate code, and it
were to return a value from __add__, we'd miss the
possibility that B defines a more intelligent __radd__,
so the boilerplate should return NotImplemented from
__add__. In particular, we don't handle Rational
here, even though we could get an exact answer, in case
the other type wants to do something special.
3. If B <: Fraction, Python tries B.__radd__ before
Fraction.__add__. This is ok, because it was
implemented with knowledge of Fraction, so it can
handle those instances before delegating to Real or
Complex.
The next two situations describe 'b + r'. We assume that b
didn't know about Fraction in its implementation, and that it
uses similar boilerplate code:
4. If B <: Rational, then __radd_ converts both to the
builtin rational type (hey look, that's us) and
proceeds.
5. Otherwise, __radd__ tries to find the nearest common
base ABC, and fall back to its builtin type. Since this
class doesn't subclass a concrete type, there's no
implementation to fall back to, so we need to try as
hard as possible to return an actual value, or the user
will get a TypeError.
"""
def forward(a, b):
if isinstance(b, (int, Fraction)):
return monomorphic_operator(a, b)
elif isinstance(b, float):
return fallback_operator(float(a), b)
elif isinstance(b, complex):
return fallback_operator(complex(a), b)
else:
return NotImplemented
forward.__name__ = '__' + fallback_operator.__name__ + '__'
forward.__doc__ = monomorphic_operator.__doc__
def reverse(b, a):
if isinstance(a, numbers.Rational):
# Includes ints.
return monomorphic_operator(a, b)
elif isinstance(a, numbers.Real):
return fallback_operator(float(a), float(b))
elif isinstance(a, numbers.Complex):
return fallback_operator(complex(a), complex(b))
else:
return NotImplemented
reverse.__name__ = '__r' + fallback_operator.__name__ + '__'
reverse.__doc__ = monomorphic_operator.__doc__
return forward, reverse
def _add(a, b):
"""a + b"""
return Fraction(a.numerator * b.denominator +
b.numerator * a.denominator,
a.denominator * b.denominator)
__add__, __radd__ = _operator_fallbacks(_add, operator.add)
def _sub(a, b):
"""a - b"""
return Fraction(a.numerator * b.denominator -
b.numerator * a.denominator,
a.denominator * b.denominator)
__sub__, __rsub__ = _operator_fallbacks(_sub, operator.sub)
def _mul(a, b):
"""a * b"""
return Fraction(a.numerator * b.numerator, a.denominator * b.denominator)
__mul__, __rmul__ = _operator_fallbacks(_mul, operator.mul)
def _div(a, b):
"""a / b"""
return Fraction(a.numerator * b.denominator,
a.denominator * b.numerator)
__truediv__, __rtruediv__ = _operator_fallbacks(_div, operator.truediv)
def __floordiv__(a, b):
"""a // b"""
return math.floor(a / b)
def __rfloordiv__(b, a):
"""a // b"""
return math.floor(a / b)
def __mod__(a, b):
"""a % b"""
div = a // b
return a - b * div
def __rmod__(b, a):
"""a % b"""
div = a // b
return a - b * div
def __pow__(a, b):
"""a ** b
If b is not an integer, the result will be a float or complex
since roots are generally irrational. If b is an integer, the
result will be rational.
"""
if isinstance(b, numbers.Rational):
if b.denominator == 1:
power = b.numerator
if power >= 0:
return Fraction(a._numerator ** power,
a._denominator ** power)
else:
return Fraction(a._denominator ** -power,
a._numerator ** -power)
else:
# A fractional power will generally produce an
# irrational number.
return float(a) ** float(b)
else:
return float(a) ** b
def __rpow__(b, a):
"""a ** b"""
if b._denominator == 1 and b._numerator >= 0:
# If a is an int, keep it that way if possible.
return a ** b._numerator
if isinstance(a, numbers.Rational):
return Fraction(a.numerator, a.denominator) ** b
if b._denominator == 1:
return a ** b._numerator
return a ** float(b)
def __pos__(a):
"""+a: Coerces a subclass instance to Fraction"""
return Fraction(a._numerator, a._denominator)
def __neg__(a):
"""-a"""
return Fraction(-a._numerator, a._denominator)
def __abs__(a):
"""abs(a)"""
return Fraction(abs(a._numerator), a._denominator)
def __trunc__(a):
"""trunc(a)"""
if a._numerator < 0:
return -(-a._numerator // a._denominator)
else:
return a._numerator // a._denominator
def __floor__(a):
"""Will be math.floor(a) in 3.0."""
return a.numerator // a.denominator
def __ceil__(a):
"""Will be math.ceil(a) in 3.0."""
# The negations cleverly convince floordiv to return the ceiling.
return -(-a.numerator // a.denominator)
def __round__(self, ndigits=None):
"""Will be round(self, ndigits) in 3.0.
Rounds half toward even.
"""
if ndigits is None:
floor, remainder = divmod(self.numerator, self.denominator)
if remainder * 2 < self.denominator:
return floor
elif remainder * 2 > self.denominator:
return floor + 1
# Deal with the half case:
elif floor % 2 == 0:
return floor
else:
return floor + 1
shift = 10**abs(ndigits)
# See _operator_fallbacks.forward to check that the results of
# these operations will always be Fraction and therefore have
# round().
if ndigits > 0:
return Fraction(round(self * shift), shift)
else:
return Fraction(round(self / shift) * shift)
def __hash__(self):
"""hash(self)"""
# XXX since this method is expensive, consider caching the result
# In order to make sure that the hash of a Fraction agrees
# with the hash of a numerically equal integer, float or
# Decimal instance, we follow the rules for numeric hashes
# outlined in the documentation. (See library docs, 'Built-in
# Types').
# dinv is the inverse of self._denominator modulo the prime
# _PyHASH_MODULUS, or 0 if self._denominator is divisible by
# _PyHASH_MODULUS.
dinv = pow(self._denominator, _PyHASH_MODULUS - 2, _PyHASH_MODULUS)
if not dinv:
hash_ = _PyHASH_INF
else:
hash_ = abs(self._numerator) * dinv % _PyHASH_MODULUS
result = hash_ if self >= 0 else -hash_
return -2 if result == -1 else result
def __eq__(a, b):
"""a == b"""
if isinstance(b, numbers.Rational):
return (a._numerator == b.numerator and
a._denominator == b.denominator)
if isinstance(b, numbers.Complex) and b.imag == 0:
b = b.real
if isinstance(b, float):
if math.isnan(b) or math.isinf(b):
# comparisons with an infinity or nan should behave in
# the same way for any finite a, so treat a as zero.
return 0.0 == b
else:
return a == a.from_float(b)
else:
# Since a doesn't know how to compare with b, let's give b
# a chance to compare itself with a.
return NotImplemented
def _richcmp(self, other, op):
"""Helper for comparison operators, for internal use only.
Implement comparison between a Rational instance `self`, and
either another Rational instance or a float `other`. If
`other` is not a Rational instance or a float, return
NotImplemented. `op` should be one of the six standard
comparison operators.
"""
# convert other to a Rational instance where reasonable.
if isinstance(other, numbers.Rational):
return op(self._numerator * other.denominator,
self._denominator * other.numerator)
if isinstance(other, float):
if math.isnan(other) or math.isinf(other):
return op(0.0, other)
else:
return op(self, self.from_float(other))
else:
return NotImplemented
def __lt__(a, b):
"""a < b"""
return a._richcmp(b, operator.lt)
def __gt__(a, b):
"""a > b"""
return a._richcmp(b, operator.gt)
def __le__(a, b):
"""a <= b"""
return a._richcmp(b, operator.le)
def __ge__(a, b):
"""a >= b"""
return a._richcmp(b, operator.ge)
def __bool__(a):
"""a != 0"""
return a._numerator != 0
# support for pickling, copy, and deepcopy
def __reduce__(self):
return (self.__class__, (str(self),))
def __copy__(self):
if type(self) == Fraction:
return self # I'm immutable; therefore I am my own clone
return self.__class__(self._numerator, self._denominator)
def __deepcopy__(self, memo):
if type(self) == Fraction:
return self # My components are also immutable
return self.__class__(self._numerator, self._denominator)
| gpl-3.0 |
marquesarthur/mahadeva_agenda | oauth2client/clientsecrets.py | 226 | 4405 | # Copyright (C) 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for reading OAuth 2.0 client secret files.
A client_secrets.json file contains all the information needed to interact with
an OAuth 2.0 protected service.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
from anyjson import simplejson
# Properties that make a client_secrets.json file valid.
TYPE_WEB = 'web'
TYPE_INSTALLED = 'installed'
VALID_CLIENT = {
TYPE_WEB: {
'required': [
'client_id',
'client_secret',
'redirect_uris',
'auth_uri',
'token_uri',
],
'string': [
'client_id',
'client_secret',
],
},
TYPE_INSTALLED: {
'required': [
'client_id',
'client_secret',
'redirect_uris',
'auth_uri',
'token_uri',
],
'string': [
'client_id',
'client_secret',
],
},
}
class Error(Exception):
"""Base error for this module."""
pass
class InvalidClientSecretsError(Error):
"""Format of ClientSecrets file is invalid."""
pass
def _validate_clientsecrets(obj):
if obj is None or len(obj) != 1:
raise InvalidClientSecretsError('Invalid file format.')
client_type = obj.keys()[0]
if client_type not in VALID_CLIENT.keys():
raise InvalidClientSecretsError('Unknown client type: %s.' % client_type)
client_info = obj[client_type]
for prop_name in VALID_CLIENT[client_type]['required']:
if prop_name not in client_info:
raise InvalidClientSecretsError(
'Missing property "%s" in a client type of "%s".' % (prop_name,
client_type))
for prop_name in VALID_CLIENT[client_type]['string']:
if client_info[prop_name].startswith('[['):
raise InvalidClientSecretsError(
'Property "%s" is not configured.' % prop_name)
return client_type, client_info
def load(fp):
obj = simplejson.load(fp)
return _validate_clientsecrets(obj)
def loads(s):
obj = simplejson.loads(s)
return _validate_clientsecrets(obj)
def _loadfile(filename):
try:
fp = file(filename, 'r')
try:
obj = simplejson.load(fp)
finally:
fp.close()
except IOError:
raise InvalidClientSecretsError('File not found: "%s"' % filename)
return _validate_clientsecrets(obj)
def loadfile(filename, cache=None):
"""Loading of client_secrets JSON file, optionally backed by a cache.
Typical cache storage would be App Engine memcache service,
but you can pass in any other cache client that implements
these methods:
- get(key, namespace=ns)
- set(key, value, namespace=ns)
Usage:
# without caching
client_type, client_info = loadfile('secrets.json')
# using App Engine memcache service
from google.appengine.api import memcache
client_type, client_info = loadfile('secrets.json', cache=memcache)
Args:
filename: string, Path to a client_secrets.json file on a filesystem.
cache: An optional cache service client that implements get() and set()
methods. If not specified, the file is always being loaded from
a filesystem.
Raises:
InvalidClientSecretsError: In case of a validation error or some
I/O failure. Can happen only on cache miss.
Returns:
(client_type, client_info) tuple, as _loadfile() normally would.
JSON contents is validated only during first load. Cache hits are not
validated.
"""
_SECRET_NAMESPACE = 'oauth2client:secrets#ns'
if not cache:
return _loadfile(filename)
obj = cache.get(filename, namespace=_SECRET_NAMESPACE)
if obj is None:
client_type, client_info = _loadfile(filename)
obj = {client_type: client_info}
cache.set(filename, obj, namespace=_SECRET_NAMESPACE)
return obj.iteritems().next()
| gpl-2.0 |
homecon/homecon | homecon/core/__init__.py | 2 | 3191 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# import sys
# import os
# import importlib
#
#
# db = None
# measurements_db = None
#
# event = None
# states = None
# components = None
# plugins = None
# websocket = None
#
#
# def initialize(dbpath='{}/lib/homecon/'.format(sys.prefix), dbname='homecon'):
# global db, measurements_db, event, states, components, plugins, websocket
#
# corepackage = 'homecon.core'
#
# ############################################################################
# # database
# ############################################################################
# if '{}.database'.format(corepackage) in sys.modules:
# importlib.reload(database)
# else:
# importlib.import_module('.database',package=corepackage)
#
# db = database.Database(database=os.path.join(dbpath,'{}.db'.format(dbname)))
# measurements_db = database.Database(database=os.path.join(dbpath,'{}_measurements.db'.format(dbname)))
# database.db = db
# database.measurements_db = measurements_db
#
#
# ############################################################################
# # event
# ############################################################################
# if '{}.event'.format(corepackage) in sys.modules:
# importlib.reload(event)
# else:
# importlib.import_module('.event',package=corepackage)
#
#
# ############################################################################
# # states
# ############################################################################
# if '{}.state'.format(corepackage) in sys.modules:
# importlib.reload(state)
# else:
# importlib.import_module('.state',package=corepackage)
#
# states = state.States()
# state.states = states
#
#
# ############################################################################
# # components
# ############################################################################
# if '{}.component'.format(corepackage) in sys.modules:
# importlib.reload(component)
# else:
# importlib.import_module('.component',package=corepackage)
#
# components = component.Components()
# component.components = components
#
#
# ############################################################################
# # plugins
# ############################################################################
# if '{}.plugin'.format(corepackage) in sys.modules:
# importlib.reload(plugin)
# else:
# importlib.import_module('.plugin',package=corepackage)
#
# plugins = plugin.Plugins()
# plugin.plugins = plugins
#
#
# ############################################################################
# # websocket
# ############################################################################
# if '{}.ws'.format(corepackage) in sys.modules:
# importlib.reload(ws)
# else:
# importlib.import_module('.ws',package=corepackage)
#
# websocket = ws.Websocket()
# ws.websocket = websocket
#
| gpl-3.0 |
njoubert/MAVProxy | MAVProxy/modules/mavproxy_help.py | 2 | 5131 | """
MAVProxy help/versioning module
"""
import os, time, platform, re
from urllib2 import Request, urlopen, URLError, HTTPError
from pymavlink import mavwp, mavutil
from MAVProxy.modules.lib import mp_util
from MAVProxy.modules.lib import mp_module
if mp_util.has_wxpython:
from MAVProxy.modules.lib.mp_menu import *
import wxversion
class HelpModule(mp_module.MPModule):
def __init__(self, mpstate):
super(HelpModule, self).__init__(mpstate, "mavhelp", "Help and version information", public = True)
self.enabled = False
self.add_command('mavhelp', self.cmd_help, "help and version information", "<about|site>")
self.have_list = False
#versioning info
#pkg_resources doesn't work in the windows exe build, so read the version file
try:
import pkg_resources
self.version = pkg_resources.require("mavproxy")[0].version
except:
start_script = os.path.join(os.environ['LOCALAPPDATA'], "MAVProxy", "version.txt")
f = open(start_script, 'r')
self.version = f.readline()
self.host = platform.system() + platform.release()
self.pythonversion = str(platform.python_version())
if mp_util.has_wxpython:
self.wxVersion = str(wxversion.getInstalled())
else:
self.wxVersion = ''
#check for updates, if able
if platform.system() == 'Windows':
req = Request('http://firmware.ardupilot.org/Tools/MAVProxy/')
html = ''
self.newversion = '1.0'
try:
filehandle = urlopen(req)
html = filehandle.read()
except HTTPError as e:
self.newversion = 'Error: ', e.code
except URLError as e:
self.newversion = 'Error: ', e.reason
else:
#parse the webpage for the latest version
begtags = [m.start() for m in re.finditer('>MAVProxySetup-', html)]
for begtag in begtags:
endtag = html.find('.exe', begtag)
versiontag = html[begtag+15:endtag]
if not re.search('[a-zA-Z]', versiontag):
if self.mycmp(self.newversion, versiontag) < 0:
self.newversion = versiontag
elif platform.system() == 'Linux':
import xmlrpclib, pip
pypi = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
available = pypi.package_releases('MAVProxy')
if not available:
self.newversion = 'Error finding update'
else:
self.newversion = available[0]
#and format the update string
if not isinstance(self.newversion, basestring):
self.newversion = "Error finding update"
elif re.search('[a-zA-Z]', self.newversion):
self.newversion = "Error finding update: " + self.newversion
elif self.newversion.strip() == self.version.strip():
self.newversion = "Running latest version"
else:
self.newversion = "New version " + self.newversion + " available (currently running " + self.version + ")"
if mp_util.has_wxpython:
self.menu_added_console = False
self.menu = MPMenuSubMenu('Help',
items=[MPMenuItem('MAVProxy website', 'MAVProxy website', '', handler=MPMenuOpenWeblink('http://ardupilot.github.io/MAVProxy/')),
MPMenuItem('Check for Updates', 'Check for Updates', '', handler=MPMenuChildMessageDialog(title="Updates", message=self.newversion)),
MPMenuItem('About', 'About', '', handler=MPMenuChildMessageDialog(title="About MAVProxy", message="MAVProxy Version " + self.version + "\nOS: " + self.host + "\nPython " + self.pythonversion + "\nWXPython " + self.wxVersion))])
#version number comparison for update checking
def mycmp(self, version1, version2):
def normalize(v):
return [int(x) for x in re.sub(r'(\.0+)*$','', v).split(".")]
return cmp(normalize(version1), normalize(version2))
def idle_task(self):
'''called on idle'''
if self.module('console') is not None and not self.menu_added_console:
self.menu_added_console = True
self.module('console').add_menu(self.menu)
def cmd_help(self, args):
'''help commands'''
if len(args) < 1:
self.print_usage()
return
if args[0] == "about":
print("MAVProxy Version " + self.version + "\nOS: " + self.host + "\nPython " + self.pythonversion)
elif args[0] == "site":
print("See http://ardupilot.github.io/MAVProxy/ for documentation")
else:
self.print_usage()
def mavlink_packet(self, m):
'''handle and incoming mavlink packets'''
def print_usage(self):
print("usage: mavhelp <about|site>")
def init(mpstate):
'''initialise module'''
return HelpModule(mpstate)
| gpl-3.0 |
mancoast/CPythonPyc_test | cpython/279_test_transformer.py | 136 | 1120 | import unittest
from test import test_support
# Silence Py3k warning
test_support.import_module('compiler', deprecated=True)
from compiler import transformer, ast
from compiler import compile
class Tests(unittest.TestCase):
def testMultipleLHS(self):
""" Test multiple targets on the left hand side. """
snippets = ['a, b = 1, 2',
'(a, b) = 1, 2',
'((a, b), c) = (1, 2), 3']
for s in snippets:
a = transformer.parse(s)
self.assertIsInstance(a, ast.Module)
child1 = a.getChildNodes()[0]
self.assertIsInstance(child1, ast.Stmt)
child2 = child1.getChildNodes()[0]
self.assertIsInstance(child2, ast.Assign)
# This actually tests the compiler, but it's a way to assure the ast
# is correct
c = compile(s, '<string>', 'single')
vals = {}
exec c in vals
assert vals['a'] == 1
assert vals['b'] == 2
def test_main():
test_support.run_unittest(Tests)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
javiercantero/streamlink | tests/test_plugin_bfmtv.py | 5 | 1803 | import unittest
from streamlink.plugins.bfmtv import BFMTV
class TestPluginBFMTV(unittest.TestCase):
def test_can_handle_url(self):
# should match
self.assertTrue(BFMTV.can_handle_url("https://www.bfmtv.com/mediaplayer/live-video/"))
self.assertTrue(BFMTV.can_handle_url("https://bfmbusiness.bfmtv.com/mediaplayer/live-video/"))
self.assertTrue(BFMTV.can_handle_url("https://www.bfmtv.com/mediaplayer/live-bfm-paris/"))
self.assertTrue(BFMTV.can_handle_url("https://rmc.bfmtv.com/mediaplayer/live-audio/"))
self.assertTrue(BFMTV.can_handle_url("https://rmcsport.bfmtv.com/mediaplayer/live-bfm-sport/"))
self.assertTrue(BFMTV.can_handle_url("https://rmcdecouverte.bfmtv.com/mediaplayer-direct/"))
self.assertTrue(BFMTV.can_handle_url("https://www.bfmtv.com/mediaplayer/replay/premiere-edition/"))
self.assertTrue(BFMTV.can_handle_url("https://bfmbusiness.bfmtv.com/mediaplayer/replay/good-morning-business/"))
self.assertTrue(BFMTV.can_handle_url("https://rmc.bfmtv.com/mediaplayer/replay/les-grandes-gueules/"))
self.assertTrue(BFMTV.can_handle_url("https://rmc.bfmtv.com/mediaplayer/replay/after-foot/"))
self.assertTrue(BFMTV.can_handle_url("https://www.01net.com/mediaplayer/replay/jtech/"))
self.assertTrue(BFMTV.can_handle_url("https://www.bfmtv.com/politique/macron-et-le-pen-talonnes-par-fillon-et-melenchon-a-l-approche-du-premier-tour-1142070.html"))
self.assertTrue(BFMTV.can_handle_url("https://rmcdecouverte.bfmtv.com/mediaplayer-replay/?id=6714&title=TOP%20GEAR%20:PASSION%20VINTAGE"))
# shouldn't match
self.assertFalse(BFMTV.can_handle_url("http://www.tvcatchup.com/"))
self.assertFalse(BFMTV.can_handle_url("http://www.youtube.com/"))
| bsd-2-clause |
maciek263/django2 | myvenv/Lib/site-packages/django/contrib/gis/db/backends/oracle/schema.py | 608 | 4050 | from django.contrib.gis.db.models.fields import GeometryField
from django.db.backends.oracle.schema import DatabaseSchemaEditor
from django.db.backends.utils import truncate_name
class OracleGISSchemaEditor(DatabaseSchemaEditor):
sql_add_geometry_metadata = ("""
INSERT INTO USER_SDO_GEOM_METADATA
("TABLE_NAME", "COLUMN_NAME", "DIMINFO", "SRID")
VALUES (
%(table)s,
%(column)s,
MDSYS.SDO_DIM_ARRAY(
MDSYS.SDO_DIM_ELEMENT('LONG', %(dim0)s, %(dim2)s, %(tolerance)s),
MDSYS.SDO_DIM_ELEMENT('LAT', %(dim1)s, %(dim3)s, %(tolerance)s)
),
%(srid)s
)""")
sql_add_spatial_index = 'CREATE INDEX %(index)s ON %(table)s(%(column)s) INDEXTYPE IS MDSYS.SPATIAL_INDEX'
sql_drop_spatial_index = 'DROP INDEX %(index)s'
sql_clear_geometry_table_metadata = 'DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s'
sql_clear_geometry_field_metadata = (
'DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s '
'AND COLUMN_NAME = %(column)s'
)
def __init__(self, *args, **kwargs):
super(OracleGISSchemaEditor, self).__init__(*args, **kwargs)
self.geometry_sql = []
def geo_quote_name(self, name):
return self.connection.ops.geo_quote_name(name)
def column_sql(self, model, field, include_default=False):
column_sql = super(OracleGISSchemaEditor, self).column_sql(model, field, include_default)
if isinstance(field, GeometryField):
db_table = model._meta.db_table
self.geometry_sql.append(
self.sql_add_geometry_metadata % {
'table': self.geo_quote_name(db_table),
'column': self.geo_quote_name(field.column),
'dim0': field._extent[0],
'dim1': field._extent[1],
'dim2': field._extent[2],
'dim3': field._extent[3],
'tolerance': field._tolerance,
'srid': field.srid,
}
)
if field.spatial_index:
self.geometry_sql.append(
self.sql_add_spatial_index % {
'index': self.quote_name(self._create_spatial_index_name(model, field)),
'table': self.quote_name(db_table),
'column': self.quote_name(field.column),
}
)
return column_sql
def create_model(self, model):
super(OracleGISSchemaEditor, self).create_model(model)
self.run_geometry_sql()
def delete_model(self, model):
super(OracleGISSchemaEditor, self).delete_model(model)
self.execute(self.sql_clear_geometry_table_metadata % {
'table': self.geo_quote_name(model._meta.db_table),
})
def add_field(self, model, field):
super(OracleGISSchemaEditor, self).add_field(model, field)
self.run_geometry_sql()
def remove_field(self, model, field):
if isinstance(field, GeometryField):
self.execute(self.sql_clear_geometry_field_metadata % {
'table': self.geo_quote_name(model._meta.db_table),
'column': self.geo_quote_name(field.column),
})
if field.spatial_index:
self.execute(self.sql_drop_spatial_index % {
'index': self.quote_name(self._create_spatial_index_name(model, field)),
})
super(OracleGISSchemaEditor, self).remove_field(model, field)
def run_geometry_sql(self):
for sql in self.geometry_sql:
self.execute(sql)
self.geometry_sql = []
def _create_spatial_index_name(self, model, field):
# Oracle doesn't allow object names > 30 characters. Use this scheme
# instead of self._create_index_name() for backwards compatibility.
return truncate_name('%s_%s_id' % (model._meta.db_table, field.column), 30)
| mit |
DevSwift/Kernel-3.4-U8500 | Documentation/target/tcm_mod_builder.py | 4981 | 41422 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n"
buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('get_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n"
if re.search('set_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
s40523145/2016fallcp_hw | plugin/liquid_tags/spotify.py | 313 | 1304 | """
Spotify Tag
---------
This implements a Liquid-style spotify tag for Pelican,
based on the jekyll / octopress youtube tag [1]_
Syntax
------
{% spotify id %}
Example
-------
{% spotify 1HNZcRFlIKwHAJD3LxvX4d %}
Output
------
<iframe
src='https://embed.spotify.com/?uri=spotify:track:1HNZcRFlIKwHAJD3LxvX4d'
width='300' height='380' frameborder='0' allowtransparency='true'>
</iframe>
"""
import re
from .mdx_liquid_tags import LiquidTags
SYNTAX = "{% spotify id %}"
SPOTIFY = re.compile(r'(\w+)(\s+(\d+)\s(\d+))?')
@LiquidTags.register('spotify')
def spotify(preprocessor, tag, markup):
spotify_id = None
match = SPOTIFY.search(markup)
if match:
groups = match.groups()
spotify_id = groups[0]
if spotify_id:
spotify_out = """
<iframe src='https://embed.spotify.com/?uri=spotify:track:{}'
width='300'
height='380'
frameborder='0'
allowtransparency='true'></iframe>""".format(spotify_id).strip()
else:
raise ValueError("Error processing input, "
"expected syntax: {0}".format(SYNTAX))
return spotify_out
# ---------------------------------------------------
# This import allows image tag to be a Pelican plugin
from liquid_tags import register # noqa
| agpl-3.0 |
Servir-Mekong/SurfaceWaterTool | lib/google/auth/crypt/_cryptography_rsa.py | 1 | 4988 | # Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""RSA verifier and signer that use the ``cryptography`` library.
This is a much faster implementation than the default (in
``google.auth.crypt._python_rsa``), which depends on the pure-Python
``rsa`` library.
"""
import cryptography.exceptions
from cryptography.hazmat import backends
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import padding
import cryptography.x509
import pkg_resources
from google.auth import _helpers
from google.auth.crypt import base
_IMPORT_ERROR_MSG = (
"cryptography>=1.4.0 is required to use cryptography-based RSA " "implementation."
)
try: # pragma: NO COVER
release = pkg_resources.get_distribution("cryptography").parsed_version
if release < pkg_resources.parse_version("1.4.0"):
raise ImportError(_IMPORT_ERROR_MSG)
except pkg_resources.DistributionNotFound: # pragma: NO COVER
raise ImportError(_IMPORT_ERROR_MSG)
_CERTIFICATE_MARKER = b"-----BEGIN CERTIFICATE-----"
_BACKEND = backends.default_backend()
_PADDING = padding.PKCS1v15()
_SHA256 = hashes.SHA256()
class RSAVerifier(base.Verifier):
"""Verifies RSA cryptographic signatures using public keys.
Args:
public_key (
cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey):
The public key used to verify signatures.
"""
def __init__(self, public_key):
self._pubkey = public_key
@_helpers.copy_docstring(base.Verifier)
def verify(self, message, signature):
message = _helpers.to_bytes(message)
try:
self._pubkey.verify(signature, message, _PADDING, _SHA256)
return True
except (ValueError, cryptography.exceptions.InvalidSignature):
return False
@classmethod
def from_string(cls, public_key):
"""Construct an Verifier instance from a public key or public
certificate string.
Args:
public_key (Union[str, bytes]): The public key in PEM format or the
x509 public key certificate.
Returns:
Verifier: The constructed verifier.
Raises:
ValueError: If the public key can't be parsed.
"""
public_key_data = _helpers.to_bytes(public_key)
if _CERTIFICATE_MARKER in public_key_data:
cert = cryptography.x509.load_pem_x509_certificate(
public_key_data, _BACKEND
)
pubkey = cert.public_key()
else:
pubkey = serialization.load_pem_public_key(public_key_data, _BACKEND)
return cls(pubkey)
class RSASigner(base.Signer, base.FromServiceAccountMixin):
"""Signs messages with an RSA private key.
Args:
private_key (
cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey):
The private key to sign with.
key_id (str): Optional key ID used to identify this private key. This
can be useful to associate the private key with its associated
public key or certificate.
"""
def __init__(self, private_key, key_id=None):
self._key = private_key
self._key_id = key_id
@property
@_helpers.copy_docstring(base.Signer)
def key_id(self):
return self._key_id
@_helpers.copy_docstring(base.Signer)
def sign(self, message):
message = _helpers.to_bytes(message)
return self._key.sign(message, _PADDING, _SHA256)
@classmethod
def from_string(cls, key, key_id=None):
"""Construct a RSASigner from a private key in PEM format.
Args:
key (Union[bytes, str]): Private key in PEM format.
key_id (str): An optional key id used to identify the private key.
Returns:
google.auth.crypt._cryptography_rsa.RSASigner: The
constructed signer.
Raises:
ValueError: If ``key`` is not ``bytes`` or ``str`` (unicode).
UnicodeDecodeError: If ``key`` is ``bytes`` but cannot be decoded
into a UTF-8 ``str``.
ValueError: If ``cryptography`` "Could not deserialize key data."
"""
key = _helpers.to_bytes(key)
private_key = serialization.load_pem_private_key(
key, password=None, backend=_BACKEND
)
return cls(private_key, key_id=key_id)
| gpl-3.0 |
kaedroho/wagtail | wagtail/documents/tests/test_models.py | 7 | 11768 | from django.conf import settings
from django.contrib.auth.models import Group, Permission
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.core.files.base import ContentFile
from django.db import transaction
from django.test import TestCase, TransactionTestCase
from django.test.utils import override_settings
from wagtail.core.models import Collection, GroupCollectionPermission
from wagtail.documents import get_document_model, get_document_model_string, models, signal_handlers
from wagtail.images.tests.utils import get_test_image_file
from wagtail.tests.testapp.models import CustomDocument
from wagtail.tests.utils import WagtailTestUtils
class TestDocumentQuerySet(TestCase):
def test_search_method(self):
# Make a test document
document = models.Document.objects.create(title="Test document")
# Search for it
results = models.Document.objects.search("Test")
self.assertEqual(list(results), [document])
def test_operators(self):
aaa_document = models.Document.objects.create(title="AAA Test document")
zzz_document = models.Document.objects.create(title="ZZZ Test document")
results = models.Document.objects.search("aaa test", operator='and')
self.assertEqual(list(results), [aaa_document])
results = models.Document.objects.search("aaa test", operator='or')
sorted_results = sorted(results, key=lambda doc: doc.title)
self.assertEqual(sorted_results, [aaa_document, zzz_document])
def test_custom_ordering(self):
aaa_document = models.Document.objects.create(title="AAA Test document")
zzz_document = models.Document.objects.create(title="ZZZ Test document")
results = models.Document.objects.order_by('title').search("Test")
self.assertEqual(list(results), [aaa_document, zzz_document])
results = models.Document.objects.order_by('-title').search("Test")
self.assertEqual(list(results), [zzz_document, aaa_document])
class TestDocumentPermissions(TestCase, WagtailTestUtils):
def setUp(self):
# Create some user accounts for testing permissions
self.user = self.create_user(username='user', email='user@email.com', password='password')
self.owner = self.create_user(username='owner', email='owner@email.com', password='password')
self.editor = self.create_user(username='editor', email='editor@email.com', password='password')
self.editor.groups.add(Group.objects.get(name='Editors'))
self.administrator = self.create_superuser(
username='administrator',
email='administrator@email.com',
password='password'
)
# Owner user must have the add_document permission
self.adders_group = Group.objects.create(name='Document adders')
GroupCollectionPermission.objects.create(
group=self.adders_group, collection=Collection.get_first_root_node(),
permission=Permission.objects.get(codename='add_document')
)
self.owner.groups.add(self.adders_group)
# Create a document for running tests on
self.document = models.Document.objects.create(title="Test document", uploaded_by_user=self.owner)
def test_administrator_can_edit(self):
self.assertTrue(self.document.is_editable_by_user(self.administrator))
def test_editor_can_edit(self):
self.assertTrue(self.document.is_editable_by_user(self.editor))
def test_owner_can_edit(self):
self.assertTrue(self.document.is_editable_by_user(self.owner))
def test_user_cant_edit(self):
self.assertFalse(self.document.is_editable_by_user(self.user))
class TestDocumentFilenameProperties(TestCase):
def setUp(self):
self.document = models.Document(title="Test document")
self.document.file.save('example.doc', ContentFile("A boring example document"))
self.pdf_document = models.Document(title="Test document")
self.pdf_document.file.save('example.pdf', ContentFile("A boring example document"))
self.extensionless_document = models.Document(title="Test document")
self.extensionless_document.file.save('example', ContentFile("A boring example document"))
def test_filename(self):
self.assertEqual('example.doc', self.document.filename)
self.assertEqual('example.pdf', self.pdf_document.filename)
self.assertEqual('example', self.extensionless_document.filename)
def test_file_extension(self):
self.assertEqual('doc', self.document.file_extension)
self.assertEqual('pdf', self.pdf_document.file_extension)
self.assertEqual('', self.extensionless_document.file_extension)
def test_content_type(self):
self.assertEqual('application/msword', self.document.content_type)
self.assertEqual('application/pdf', self.pdf_document.content_type)
self.assertEqual('application/octet-stream', self.extensionless_document.content_type)
def test_content_disposition(self):
self.assertEqual(
'''attachment; filename=example.doc; filename*=UTF-8''example.doc''',
self.document.content_disposition
)
self.assertEqual('inline', self.pdf_document.content_disposition)
self.assertEqual(
'''attachment; filename=example; filename*=UTF-8''example''',
self.extensionless_document.content_disposition
)
def tearDown(self):
# delete the FieldFile directly because the TestCase does not commit
# transactions to trigger transaction.on_commit() in the signal handler
self.document.file.delete()
self.pdf_document.file.delete()
self.extensionless_document.file.delete()
class TestFilesDeletedForDefaultModels(TransactionTestCase):
'''
Because we expect file deletion to only happen once a transaction is
successfully committed, we must run these tests using TransactionTestCase
per the following documentation:
Django's TestCase class wraps each test in a transaction and rolls back that
transaction after each test, in order to provide test isolation. This means
that no transaction is ever actually committed, thus your on_commit()
callbacks will never be run. If you need to test the results of an
on_commit() callback, use a TransactionTestCase instead.
https://docs.djangoproject.com/en/1.10/topics/db/transactions/#use-in-tests
'''
def setUp(self):
# Required to create root collection because the TransactionTestCase
# does not make initial data loaded in migrations available and
# serialized_rollback=True causes other problems in the test suite.
# ref: https://docs.djangoproject.com/en/1.10/topics/testing/overview/#rollback-emulation
Collection.objects.get_or_create(
name="Root",
path='0001',
depth=1,
numchild=0,
)
def test_document_file_deleted_oncommit(self):
with transaction.atomic():
document = get_document_model().objects.create(title="Test Image", file=get_test_image_file())
filename = document.file.name
self.assertTrue(document.file.storage.exists(filename))
document.delete()
self.assertTrue(document.file.storage.exists(filename))
self.assertFalse(document.file.storage.exists(filename))
@override_settings(WAGTAILDOCS_EXTENSIONS=["pdf"])
class TestDocumentValidateExtensions(TestCase):
def setUp(self):
self.document_invalid = models.Document.objects.create(
title="Test document", file="test.doc"
)
self.document_valid = models.Document.objects.create(
title="Test document", file="test.pdf"
)
def test_create_doc_invalid_extension(self):
"""
Checks if the uploaded document has the expected extensions
mentioned in settings.WAGTAILDOCS_EXTENSIONS
This is caught in form.error and should be raised during model
creation when called full_clean. This specific testcase invalid
file extension is passed
"""
with self.assertRaises(ValidationError):
self.document_invalid.full_clean()
def test_create_doc_valid_extension(self):
"""
Checks if the uploaded document has the expected extensions
mentioned in settings.WAGTAILDOCS_EXTENSIONS
This is caught in form.error and should be raised during
model creation when called full_clean. In this specific
testcase invalid file extension is passed.
"""
try:
self.document_valid.full_clean()
except ValidationError:
self.fail("Validation error is raised even when valid file name is passed")
def tearDown(self):
self.document_invalid.file.delete()
self.document_valid.file.delete()
@override_settings(WAGTAILDOCS_DOCUMENT_MODEL='tests.CustomDocument')
class TestFilesDeletedForCustomModels(TestFilesDeletedForDefaultModels):
def setUp(self):
# Required to create root collection because the TransactionTestCase
# does not make initial data loaded in migrations available and
# serialized_rollback=True causes other problems in the test suite.
# ref: https://docs.djangoproject.com/en/1.10/topics/testing/overview/#rollback-emulation
Collection.objects.get_or_create(
name="Root",
path='0001',
depth=1,
numchild=0,
)
#: Sadly signal receivers only get connected when starting django.
#: We will re-attach them here to mimic the django startup behavior
#: and get the signals connected to our custom model..
signal_handlers.register_signal_handlers()
def test_document_model(self):
cls = get_document_model()
self.assertEqual('%s.%s' % (cls._meta.app_label, cls.__name__), 'tests.CustomDocument')
class TestGetDocumentModel(WagtailTestUtils, TestCase):
@override_settings(WAGTAILDOCS_DOCUMENT_MODEL='tests.CustomDocument')
def test_custom_get_document_model(self):
"""Test get_document_model with a custom document model"""
self.assertIs(get_document_model(), CustomDocument)
@override_settings(WAGTAILDOCS_DOCUMENT_MODEL='tests.CustomDocument')
def test_custom_get_document_model_string(self):
"""Test get_document_model_string with a custom document model"""
self.assertEqual(get_document_model_string(), 'tests.CustomDocument')
@override_settings()
def test_standard_get_document_model(self):
"""Test get_document_model with no WAGTAILDOCS_DOCUMENT_MODEL"""
del settings.WAGTAILDOCS_DOCUMENT_MODEL
from wagtail.documents.models import Document
self.assertIs(get_document_model(), Document)
@override_settings()
def test_standard_get_document_model_string(self):
"""Test get_document_model_string with no WAGTAILDOCS_DOCUMENT_MODEL"""
del settings.WAGTAILDOCS_DOCUMENT_MODEL
self.assertEqual(get_document_model_string(), 'wagtaildocs.Document')
@override_settings(WAGTAILDOCS_DOCUMENT_MODEL='tests.UnknownModel')
def test_unknown_get_document_model(self):
"""Test get_document_model with an unknown model"""
with self.assertRaises(ImproperlyConfigured):
get_document_model()
@override_settings(WAGTAILDOCS_DOCUMENT_MODEL='invalid-string')
def test_invalid_get_document_model(self):
"""Test get_document_model with an invalid model string"""
with self.assertRaises(ImproperlyConfigured):
get_document_model()
| bsd-3-clause |
dnet/pySSTV | pysstv/sstv.py | 1 | 4354 | #!/usr/bin/env python
from __future__ import division, with_statement
from math import sin, pi
from random import random
from contextlib import closing
from itertools import cycle, chain
from array import array
import wave
FREQ_VIS_BIT1 = 1100
FREQ_SYNC = 1200
FREQ_VIS_BIT0 = 1300
FREQ_BLACK = 1500
FREQ_VIS_START = 1900
FREQ_WHITE = 2300
FREQ_RANGE = FREQ_WHITE - FREQ_BLACK
FREQ_FSKID_BIT1 = 1900
FREQ_FSKID_BIT0 = 2100
MSEC_VIS_START = 300
MSEC_VIS_SYNC = 10
MSEC_VIS_BIT = 30
MSEC_FSKID_BIT = 22
class SSTV(object):
def __init__(self, image, samples_per_sec, bits):
self.image = image
self.samples_per_sec = samples_per_sec
self.bits = bits
self.vox_enabled = False
self.fskid_payload = ''
self.nchannels = 1
self.on_init()
def on_init(self):
pass
BITS_TO_STRUCT = {8: 'b', 16: 'h'}
def write_wav(self, filename):
"""writes the whole image to a Microsoft WAV file"""
fmt = self.BITS_TO_STRUCT[self.bits]
data = array(fmt, self.gen_samples())
if self.nchannels != 1:
data = array(fmt, chain.from_iterable(
zip(*([data] * self.nchannels))))
with closing(wave.open(filename, 'wb')) as wav:
wav.setnchannels(self.nchannels)
wav.setsampwidth(self.bits // 8)
wav.setframerate(self.samples_per_sec)
wav.writeframes(data)
def gen_samples(self):
"""generates discrete samples from gen_values()
performs quantization according to
the bits per sample value given during construction
"""
max_value = 2 ** self.bits
alias = 1 / max_value
amp = max_value // 2
lowest = -amp
highest = amp - 1
alias_cycle = cycle((alias * (random() - 0.5) for _ in range(1024)))
for value, alias_item in zip(self.gen_values(), alias_cycle):
sample = int(value * amp + alias_item)
yield (lowest if sample <= lowest else
sample if sample <= highest else highest)
def gen_values(self):
"""generates samples between -1 and +1 from gen_freq_bits()
performs sampling according to
the samples per second value given during construction
"""
spms = self.samples_per_sec / 1000
offset = 0
samples = 0
factor = 2 * pi / self.samples_per_sec
sample = 0
for freq, msec in self.gen_freq_bits():
samples += spms * msec
tx = int(samples)
freq_factor = freq * factor
for sample in range(tx):
yield sin(sample * freq_factor + offset)
offset += (sample + 1) * freq_factor
samples -= tx
def gen_freq_bits(self):
"""generates tuples (freq, msec) that describe a sine wave segment
frequency "freq" in Hz and duration "msec" in ms
"""
if self.vox_enabled:
for freq in (1900, 1500, 1900, 1500, 2300, 1500, 2300, 1500):
yield freq, 100
yield FREQ_VIS_START, MSEC_VIS_START
yield FREQ_SYNC, MSEC_VIS_SYNC
yield FREQ_VIS_START, MSEC_VIS_START
yield FREQ_SYNC, MSEC_VIS_BIT # start bit
vis = self.VIS_CODE
num_ones = 0
for _ in range(7):
bit = vis & 1
vis >>= 1
num_ones += bit
bit_freq = FREQ_VIS_BIT1 if bit == 1 else FREQ_VIS_BIT0
yield bit_freq, MSEC_VIS_BIT
parity_freq = FREQ_VIS_BIT1 if num_ones % 2 == 1 else FREQ_VIS_BIT0
yield parity_freq, MSEC_VIS_BIT
yield FREQ_SYNC, MSEC_VIS_BIT # stop bit
yield from self.gen_image_tuples()
for fskid_byte in map(ord, self.fskid_payload):
for _ in range(6):
bit = fskid_byte & 1
fskid_byte >>= 1
bit_freq = FREQ_FSKID_BIT1 if bit == 1 else FREQ_FSKID_BIT0
yield bit_freq, MSEC_FSKID_BIT
def gen_image_tuples(self):
return []
def add_fskid_text(self, text):
self.fskid_payload += '\x20\x2a{0}\x01'.format(
''.join(chr(ord(c) - 0x20) for c in text))
def horizontal_sync(self):
yield FREQ_SYNC, self.SYNC
def byte_to_freq(value):
return FREQ_BLACK + FREQ_RANGE * value / 255
| mit |
moijes12/oh-mainline | vendor/packages/celery/celery/tests/test_backends/test_base.py | 18 | 10279 | from __future__ import absolute_import
from __future__ import with_statement
import sys
import types
from mock import Mock
from nose import SkipTest
from celery.utils import serialization
from celery.utils.serialization import subclass_exception
from celery.utils.serialization import \
find_nearest_pickleable_exception as fnpe
from celery.utils.serialization import UnpickleableExceptionWrapper
from celery.utils.serialization import get_pickleable_exception as gpe
from celery import states
from celery.backends.base import BaseBackend, KeyValueStoreBackend
from celery.backends.base import BaseDictBackend, DisabledBackend
from celery.utils import uuid
from celery.tests.utils import unittest
class wrapobject(object):
def __init__(self, *args, **kwargs):
self.args = args
if sys.version_info >= (3, 0):
Oldstyle = None
else:
Oldstyle = types.ClassType("Oldstyle", (), {})
Unpickleable = subclass_exception("Unpickleable", KeyError, "foo.module")
Impossible = subclass_exception("Impossible", object, "foo.module")
Lookalike = subclass_exception("Lookalike", wrapobject, "foo.module")
b = BaseBackend()
class test_serialization(unittest.TestCase):
def test_create_exception_cls(self):
self.assertTrue(serialization.create_exception_cls("FooError", "m"))
self.assertTrue(serialization.create_exception_cls("FooError",
"m",
KeyError))
class test_BaseBackend_interface(unittest.TestCase):
def test_get_status(self):
with self.assertRaises(NotImplementedError):
b.get_status("SOMExx-N0Nex1stant-IDxx-")
def test__forget(self):
with self.assertRaises(NotImplementedError):
b.forget("SOMExx-N0Nex1stant-IDxx-")
def test_store_result(self):
with self.assertRaises(NotImplementedError):
b.store_result("SOMExx-N0nex1stant-IDxx-", 42, states.SUCCESS)
def test_mark_as_started(self):
with self.assertRaises(NotImplementedError):
b.mark_as_started("SOMExx-N0nex1stant-IDxx-")
def test_reload_task_result(self):
with self.assertRaises(NotImplementedError):
b.reload_task_result("SOMExx-N0nex1stant-IDxx-")
def test_reload_taskset_result(self):
with self.assertRaises(NotImplementedError):
b.reload_taskset_result("SOMExx-N0nex1stant-IDxx-")
def test_get_result(self):
with self.assertRaises(NotImplementedError):
b.get_result("SOMExx-N0nex1stant-IDxx-")
def test_restore_taskset(self):
with self.assertRaises(NotImplementedError):
b.restore_taskset("SOMExx-N0nex1stant-IDxx-")
def test_delete_taskset(self):
with self.assertRaises(NotImplementedError):
b.delete_taskset("SOMExx-N0nex1stant-IDxx-")
def test_save_taskset(self):
with self.assertRaises(NotImplementedError):
b.save_taskset("SOMExx-N0nex1stant-IDxx-", "blergh")
def test_get_traceback(self):
with self.assertRaises(NotImplementedError):
b.get_traceback("SOMExx-N0nex1stant-IDxx-")
def test_forget(self):
with self.assertRaises(NotImplementedError):
b.forget("SOMExx-N0nex1stant-IDxx-")
def test_on_chord_apply(self, unlock="celery.chord_unlock"):
from celery.registry import tasks
p, tasks[unlock] = tasks.get(unlock), Mock()
try:
b.on_chord_apply("dakj221", "sdokqweok")
self.assertTrue(tasks[unlock].apply_async.call_count)
finally:
tasks[unlock] = p
class test_exception_pickle(unittest.TestCase):
def test_oldstyle(self):
if Oldstyle is None:
raise SkipTest("py3k does not support old style classes")
self.assertIsNone(fnpe(Oldstyle()))
def test_BaseException(self):
self.assertIsNone(fnpe(Exception()))
def test_get_pickleable_exception(self):
exc = Exception("foo")
self.assertEqual(gpe(exc), exc)
def test_unpickleable(self):
self.assertIsInstance(fnpe(Unpickleable()), KeyError)
self.assertIsNone(fnpe(Impossible()))
class test_prepare_exception(unittest.TestCase):
def test_unpickleable(self):
x = b.prepare_exception(Unpickleable(1, 2, "foo"))
self.assertIsInstance(x, KeyError)
y = b.exception_to_python(x)
self.assertIsInstance(y, KeyError)
def test_impossible(self):
x = b.prepare_exception(Impossible())
self.assertIsInstance(x, UnpickleableExceptionWrapper)
y = b.exception_to_python(x)
self.assertEqual(y.__class__.__name__, "Impossible")
if sys.version_info < (2, 5):
self.assertTrue(y.__class__.__module__)
else:
self.assertEqual(y.__class__.__module__, "foo.module")
def test_regular(self):
x = b.prepare_exception(KeyError("baz"))
self.assertIsInstance(x, KeyError)
y = b.exception_to_python(x)
self.assertIsInstance(y, KeyError)
class KVBackend(KeyValueStoreBackend):
mget_returns_dict = False
def __init__(self, *args, **kwargs):
self.db = {}
super(KVBackend, self).__init__(KeyValueStoreBackend)
def get(self, key):
return self.db.get(key)
def set(self, key, value):
self.db[key] = value
def mget(self, keys):
if self.mget_returns_dict:
return dict((key, self.get(key)) for key in keys)
else:
return [self.get(key) for key in keys]
def delete(self, key):
self.db.pop(key, None)
class DictBackend(BaseDictBackend):
def __init__(self, *args, **kwargs):
BaseDictBackend.__init__(self, *args, **kwargs)
self._data = {"can-delete": {"result": "foo"}}
def _restore_taskset(self, taskset_id):
if taskset_id == "exists":
return {"result": "taskset"}
def _get_task_meta_for(self, task_id):
if task_id == "task-exists":
return {"result": "task"}
def _delete_taskset(self, taskset_id):
self._data.pop(taskset_id, None)
class test_BaseDictBackend(unittest.TestCase):
def setUp(self):
self.b = DictBackend()
def test_delete_taskset(self):
self.b.delete_taskset("can-delete")
self.assertNotIn("can-delete", self.b._data)
def test_save_taskset(self):
b = BaseDictBackend()
b._save_taskset = Mock()
b.save_taskset("foofoo", "xxx")
b._save_taskset.assert_called_with("foofoo", "xxx")
def test_forget_interface(self):
b = BaseDictBackend()
with self.assertRaises(NotImplementedError):
b.forget("foo")
def test_restore_taskset(self):
self.assertIsNone(self.b.restore_taskset("missing"))
self.assertIsNone(self.b.restore_taskset("missing"))
self.assertEqual(self.b.restore_taskset("exists"), "taskset")
self.assertEqual(self.b.restore_taskset("exists"), "taskset")
self.assertEqual(self.b.restore_taskset("exists", cache=False),
"taskset")
def test_reload_taskset_result(self):
self.b._cache = {}
self.b.reload_taskset_result("exists")
self.b._cache["exists"] = {"result": "taskset"}
def test_reload_task_result(self):
self.b._cache = {}
self.b.reload_task_result("task-exists")
self.b._cache["task-exists"] = {"result": "task"}
class test_KeyValueStoreBackend(unittest.TestCase):
def setUp(self):
self.b = KVBackend()
def test_get_store_delete_result(self):
tid = uuid()
self.b.mark_as_done(tid, "Hello world")
self.assertEqual(self.b.get_result(tid), "Hello world")
self.assertEqual(self.b.get_status(tid), states.SUCCESS)
self.b.forget(tid)
self.assertEqual(self.b.get_status(tid), states.PENDING)
def test_strip_prefix(self):
x = self.b.get_key_for_task("x1b34")
self.assertEqual(self.b._strip_prefix(x), "x1b34")
self.assertEqual(self.b._strip_prefix("x1b34"), "x1b34")
def test_get_many(self):
for is_dict in True, False:
self.b.mget_returns_dict = is_dict
ids = dict((uuid(), i) for i in xrange(10))
for id, i in ids.items():
self.b.mark_as_done(id, i)
it = self.b.get_many(ids.keys())
for i, (got_id, got_state) in enumerate(it):
self.assertEqual(got_state["result"], ids[got_id])
self.assertEqual(i, 9)
self.assertTrue(list(self.b.get_many(ids.keys())))
def test_get_missing_meta(self):
self.assertIsNone(self.b.get_result("xxx-missing"))
self.assertEqual(self.b.get_status("xxx-missing"), states.PENDING)
def test_save_restore_delete_taskset(self):
tid = uuid()
self.b.save_taskset(tid, "Hello world")
self.assertEqual(self.b.restore_taskset(tid), "Hello world")
self.b.delete_taskset(tid)
self.assertIsNone(self.b.restore_taskset(tid))
def test_restore_missing_taskset(self):
self.assertIsNone(self.b.restore_taskset("xxx-nonexistant"))
class test_KeyValueStoreBackend_interface(unittest.TestCase):
def test_get(self):
with self.assertRaises(NotImplementedError):
KeyValueStoreBackend().get("a")
def test_set(self):
with self.assertRaises(NotImplementedError):
KeyValueStoreBackend().set("a", 1)
def test_cleanup(self):
self.assertFalse(KeyValueStoreBackend().cleanup())
def test_delete(self):
with self.assertRaises(NotImplementedError):
KeyValueStoreBackend().delete("a")
def test_mget(self):
with self.assertRaises(NotImplementedError):
KeyValueStoreBackend().mget(["a"])
def test_forget(self):
with self.assertRaises(NotImplementedError):
KeyValueStoreBackend().forget("a")
class test_DisabledBackend(unittest.TestCase):
def test_store_result(self):
DisabledBackend().store_result()
def test_is_disabled(self):
with self.assertRaises(NotImplementedError):
DisabledBackend().get_status("foo")
| agpl-3.0 |
hiepcm/update-scripts | tests/ltsi-3.10/controller/common/tty-ping.py | 27 | 11549 | #!/usr/bin/python
# tty-ping.py
#
# Simple test for communication over a serial port-backed TTY
#
# Copyright (C) 2013 Horms Solutions Ltd.
#
# Contact: Simon Horman <horms@verge.net.au>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
import errno
import getopt
import os
import select
import subprocess
import sys
import time
def try_kill (proc):
try:
proc.kill()
except OSError, e:
if e.errno != errno.ESRCH:
print >>sys.stderr, 'error: kill failed:', e
return False
return True
verbose = False
def info (str):
if verbose:
print str
pass
def err (str):
print >>sys.stderr, "error: %s" % str
def fatal_err (str):
err(str)
exit(1)
def err_stdio(msg, outdata, errdata):
msg += '\nStdout:'
if outdata:
msg += '\n' + outdata.rstrip('\r\n') + '\n'
msg += '\nStderr:'
if errdata:
msg += '\n' + errdata.rstrip('\r\n') + '\n'
err(msg.rstrip('\r\n'))
def err_proc(proc, msg, outdata, errdata):
try_kill(proc)
fds = [proc.stdout, proc.stderr]
while fds:
try:
(r, w, e) = select.select(fds, [], fds, 0.1)
if not r:
break;
except select.error, e:
print >>sys.stderr, 'error: select failed:', e
break
for fd in r:
data = fd.read()
if data == '': # EOF
fds.remove(fd)
continue
if fd == proc.stdout:
outdata += data
elif fd == proc.stderr:
errdata += data
else:
break
err_stdio(msg, outdata, errdata)
proc.wait()
class Test:
def __init__(self, local_tty, board_hostname, board_username, board_tty):
self.local_tty = local_tty
self.board_hostname = board_hostname
self.board_username = board_username
self.board_tty = board_tty
self.board_stty = None
self.local_stty = None
def start_cmd(self, info_str, cmd, stdin=None):
info(info_str)
info('start_cmd: ' + ' '.join(cmd))
pipes = {'stdout':subprocess.PIPE, 'stderr':subprocess.PIPE}
if stdin:
pipes['stdin'] = stdin
try:
proc = subprocess.Popen(cmd, **pipes)
except OSError as e:
print >>sys.stderr, 'error: ' + info_str + ': execution failed:', e
return None
return proc
def local_cmd_check_output(self, info_str, cmd):
proc = self.start_cmd(info_str, cmd)
if not proc:
return (False, None)
(outdata, errdata) = proc.communicate()
if proc.returncode != 0:
err_stdio(info_str, outdata, errdata)
return (False, None)
return (True, outdata)
def local_cmd(self, info_str, cmd):
return self.local_cmd_check_output(info_str, cmd)[0]
def board_cmd_args(self, cmd):
return ['ssh', self.board_hostname, '-l', self.board_username] + cmd
def board_cmd_check_output(self, info_str, cmd):
return self.local_cmd_check_output(info_str, self.board_cmd_args(cmd))
def board_cmd(self, info_str, cmd):
return self.local_cmd(info_str, self.board_cmd_args(cmd))
def base_stty_args(self, tty):
return [ 'stty', '-F', tty ]
def set_stty_args(self, tty, speed, parity, stop_bits):
cmd = self.base_stty_args(tty)
cmd.append('speed')
cmd.append(speed)
if parity == 'none':
cmd.append('-evenp')
elif parity == 'even':
cmd.append('evenp')
else:
fatal_err('Unknown stop_bits value \'%s\'. ' % stop_bits +
'Must be \'none\' or \'even\'')
if stop_bits == '1':
cmd.append('-cstopb')
elif stop_bits == '2':
cmd.append('cstopb')
else:
fatal_err('Unknown stop_bits value \'%s\'. ' % stop_bits +
'Must be \'1\' or \'2\'')
return cmd
def local_set_stty(self, info_str, speed, parity, stop_bits):
cmd = self.set_stty_args(self.local_tty, speed, parity, stop_bits)
return self.local_cmd(info_str, cmd)
def board_set_stty(self, info_str, speed, parity, stop_bits):
cmd = self.set_stty_args(self.board_tty, speed, parity, stop_bits)
return self.board_cmd(info_str, cmd)
def set_stty(self, speed, parity, stop_bits):
info_str = 'Set stty'
i_str = info_str + ': board'
retcode = self.board_set_stty(i_str, speed, parity, stop_bits)
if not retcode:
return retcode
i_str = info_str + ': local'
return self.local_set_stty(i_str, speed, parity, stop_bits)
def save_stty_args(self, tty):
return self.base_stty_args(tty) + ['--save']
def save_stty(self):
info_str = 'Save stty'
i_str = info_str + ': local tty'
cmd = self.save_stty_args(self.local_tty)
(retcode, outdata) = self.local_cmd_check_output(i_str, cmd)
if not retcode:
err(i_str)
return False
self.local_stty = outdata.rstrip('\r\n')
i_str = info_str + ': board tty'
cmd = self.save_stty_args(self.board_tty)
(retcode, outdata) = self.board_cmd_check_output(i_str, cmd)
if not retcode:
err(i_str)
return False
self.board_stty = outdata.rstrip('\r\n')
return True
def restore_stty(self):
info_str = 'Restore stty'
retcode = True
time.sleep(1)
if self.board_stty:
i_str = info_str + ': board tty'
cmd = self.base_stty_args(self.board_tty)
cmd.append(self.board_stty)
if not self.board_cmd(i_str, cmd):
err(i_str)
retcode = False
if self.local_stty:
i_str = info_str + ': local tty'
cmd = self.base_stty_args(self.local_tty)
cmd.append(self.local_stty)
if not self.local_cmd(i_str, cmd):
err(i_str)
retcode = False
return retcode
def echo_args(self, tty):
return [ 'dd', 'bs=1', 'of=' + tty ]
def echo(self, info_str, key, to_board):
retcode = True
if to_board:
cmd = self.echo_args(self.local_tty)
else:
cmd = self.board_cmd_args(self.echo_args(self.board_tty))
proc = self.start_cmd(info_str, cmd, stdin=subprocess.PIPE)
if not proc:
return False
(outdata, errdata) = proc.communicate('\n' * 8 + key + '\n');
if proc.returncode != 0:
err_stdio(info_str, outdata, errdata)
retcode = False
#for i in ['\n'] * 8 + [key + '\n']:
# proc.stdin.write(i)
# time.sleep(1)
if not try_kill(proc):
retcode = False
return retcode
def monitor_args(self, tty):
return [ 'dd', 'bs=1', 'if=' + tty ]
def start_monitor(self, info_str, on_board):
if on_board:
cmd = self.monitor_args(self.local_tty)
else:
cmd = self.board_cmd_args(self.monitor_args(self.board_tty))
return self.start_cmd(info_str, cmd)
def collect_monitor(self, proc, info_str, expect):
info(info_str)
line = ""
outdata = ""
errdata = ""
while True:
if proc.poll():
err_proc(proc, info_str, outdata, errdata)
return False
fds = [proc.stdout, proc.stderr]
try:
(r, w, e) = select.select(fds, [], fds, 10)
if e or w:
err_proc(proc, info_str + ': select error', outdata, '')
return False
if not r:
err_proc(proc, info_str + ': select timeout', outdata, '')
return False
except select.error, e:
print >>sys.stderr, 'error: select failed:', e
return False
fd = r[0]
c = fd.read(1)
if c == '': # EOF
err_proc(proc, info_str + ': insufficient data read',
outdata, errdata)
return False
if fd == proc.stderr:
errdata += c
continue
outdata += c
if c != '\n':
line += c
continue
if line == expect:
ret = True
if not try_kill(proc):
ret = False
proc.wait()
return ret
def ping(self, param_str, to_board):
if to_board:
dir_str = 'to'
else:
dir_str = 'from'
print 'Testing: %s board' % dir_str
key = ', direction=' + dir_str
# Start Monitor on Board
info_str = 'Starting monitor'
monitor = self.start_monitor(info_str, not to_board)
if not monitor:
return False
info_str = 'Sending ping'
retcode = self.echo(info_str, key, to_board)
if retcode:
info_str = 'Checking monitor'
retcode = self.collect_monitor(monitor, info_str, key)
info_str = 'Kill monitor'
if not try_kill(monitor):
return False
return retcode
def run_one(self, speed, parity, stop_bits):
retcode = True
param_str = 'speed=\'%s\' parity=\'%s\', stop_bits=\'%s\'' % \
(speed, parity, stop_bits)
print 'Testing: ' + param_str
for to_board in [ True, False ]:
ret = self.ping(param_str, to_board)
if not ret:
retcode = False
return retcode
def run(self):
ok = 0
ng = 0
status = True
if not self.save_stty():
return False
for speed in ['115200', '9600']:
for parity in ['none', 'even']:
for stop_bits in ['1', '2']:
retval = self.run_one(speed, parity, stop_bits)
if retval:
ok = ok + 1
else:
ng = ng + 1
print "Test Complete: Passed=%d Failed=%d" % (ok, ng)
if ng != 0:
status = False
if not self.restore_stty():
status = False
return status
def usage():
fatal_err(
"Usage: tty-ping.py [options] LOCAL_TTY \\\n" +
" BOARD_HOSTNAME BOARD_USERNAME BOARD_TTY\\\n" +
" where:\n" +
"\n"
" LOCAL_TTY: TTY to use on local host\n"
" BOARD_HOSTNAME: Is the hostname of the board to connect to\n" +
" BOARD_USERNAME: Is the username to use when when loging into the board\n" +
" BOARD_TTY: TTY to use on board\n"
"\n" +
" options:\n" +
" -h: Dipslay this help message and exit\n" +
" -v: Be versbose\n" +
"\n" +
" e.g:\n" +
" tty-ping.py /dev/ttyUSB0 armadillo800eva root /dev/ttySC1\n" +
""
)
if len(sys.argv) < 1:
err("Too few arguments\n")
usage()
try:
opts, args = getopt.getopt(sys.argv[1:], "hv", [])
except getopt.GetoptError:
err("Unknown arguments\n")
usage()
if len(sys.argv) < 5:
err("Too few arguments\n")
usage()
for opt, arg in opts:
if opt == '-h':
usage();
if opt == '-v':
verbose = True
test = Test(*args)
retval = test.run()
if retval == False:
exit(1)
| gpl-2.0 |
strk/QGIS | scripts/appinfo2ui.py | 49 | 2095 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
/***************************************************************************
appinfo2cpp.py
-------------------
begin : 2018-09-24
copyright : (C) 2018 by Jürgen E. Fischer
email : jef at norbit dot de
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import sys
from xml.etree import ElementTree as et
from html import escape
strings = {}
d = et.parse('linux/org.qgis.qgis.appdata.xml.in')
r = d.getroot()
for elem in ['name', 'summary', 'description']:
for c in r.iter(elem):
if not c.attrib:
l = list(c)
t = c.text if not l else "".join([et.tostring(x).decode("utf-8") for x in l])
strings[t] = 1
f = open("linux/org.qgis.qgis.desktop.in", "r")
for r in f.readlines():
r = r.strip()
if r.startswith("Name="):
strings[r[5:]] = 1
elif r.startswith("GenericName="):
strings[r[12:]] = 1
f.close()
print("""\
<?xml version="1.0" encoding="UTF-8"?>
<!--
This is NOT a proper UI code. This file is only designed to be caught
by qmake and included in lupdate. It contains all translateable strings collected
by scripts/appinfo2ui.py.
-->
<ui version="4.0">
<class>appinfo</class>;
""")
for k in strings:
print("<property><string>{}</string></property>".format(escape(k)))
print("</ui>")
| gpl-2.0 |
ychen820/microblog | src/lib/wtforms/ext/sqlalchemy/fields.py | 54 | 6679 | """
Useful form fields for use with SQLAlchemy ORM.
"""
from __future__ import unicode_literals
import operator
from wtforms import widgets
from wtforms.compat import text_type, string_types
from wtforms.fields import SelectFieldBase
from wtforms.validators import ValidationError
try:
from sqlalchemy.orm.util import identity_key
has_identity_key = True
except ImportError:
has_identity_key = False
__all__ = (
'QuerySelectField', 'QuerySelectMultipleField',
)
class QuerySelectField(SelectFieldBase):
"""
Will display a select drop-down field to choose between ORM results in a
sqlalchemy `Query`. The `data` property actually will store/keep an ORM
model instance, not the ID. Submitting a choice which is not in the query
will result in a validation error.
This field only works for queries on models whose primary key column(s)
have a consistent string representation. This means it mostly only works
for those composed of string, unicode, and integer types. For the most
part, the primary keys will be auto-detected from the model, alternately
pass a one-argument callable to `get_pk` which can return a unique
comparable key.
The `query` property on the field can be set from within a view to assign
a query per-instance to the field. If the property is not set, the
`query_factory` callable passed to the field constructor will be called to
obtain a query.
Specify `get_label` to customize the label associated with each option. If
a string, this is the name of an attribute on the model object to use as
the label text. If a one-argument callable, this callable will be passed
model instance and expected to return the label text. Otherwise, the model
object's `__str__` or `__unicode__` will be used.
If `allow_blank` is set to `True`, then a blank choice will be added to the
top of the list. Selecting this choice will result in the `data` property
being `None`. The label for this blank choice can be set by specifying the
`blank_text` parameter.
"""
widget = widgets.Select()
def __init__(self, label=None, validators=None, query_factory=None,
get_pk=None, get_label=None, allow_blank=False,
blank_text='', **kwargs):
super(QuerySelectField, self).__init__(label, validators, **kwargs)
self.query_factory = query_factory
if get_pk is None:
if not has_identity_key:
raise Exception('The sqlalchemy identity_key function could not be imported.')
self.get_pk = get_pk_from_identity
else:
self.get_pk = get_pk
if get_label is None:
self.get_label = lambda x: x
elif isinstance(get_label, string_types):
self.get_label = operator.attrgetter(get_label)
else:
self.get_label = get_label
self.allow_blank = allow_blank
self.blank_text = blank_text
self.query = None
self._object_list = None
def _get_data(self):
if self._formdata is not None:
for pk, obj in self._get_object_list():
if pk == self._formdata:
self._set_data(obj)
break
return self._data
def _set_data(self, data):
self._data = data
self._formdata = None
data = property(_get_data, _set_data)
def _get_object_list(self):
if self._object_list is None:
query = self.query or self.query_factory()
get_pk = self.get_pk
self._object_list = list((text_type(get_pk(obj)), obj) for obj in query)
return self._object_list
def iter_choices(self):
if self.allow_blank:
yield ('__None', self.blank_text, self.data is None)
for pk, obj in self._get_object_list():
yield (pk, self.get_label(obj), obj == self.data)
def process_formdata(self, valuelist):
if valuelist:
if self.allow_blank and valuelist[0] == '__None':
self.data = None
else:
self._data = None
self._formdata = valuelist[0]
def pre_validate(self, form):
data = self.data
if data is not None:
for pk, obj in self._get_object_list():
if data == obj:
break
else:
raise ValidationError(self.gettext('Not a valid choice'))
elif self._formdata or not self.allow_blank:
raise ValidationError(self.gettext('Not a valid choice'))
class QuerySelectMultipleField(QuerySelectField):
"""
Very similar to QuerySelectField with the difference that this will
display a multiple select. The data property will hold a list with ORM
model instances and will be an empty list when no value is selected.
If any of the items in the data list or submitted form data cannot be
found in the query, this will result in a validation error.
"""
widget = widgets.Select(multiple=True)
def __init__(self, label=None, validators=None, default=None, **kwargs):
if default is None:
default = []
super(QuerySelectMultipleField, self).__init__(label, validators, default=default, **kwargs)
self._invalid_formdata = False
def _get_data(self):
formdata = self._formdata
if formdata is not None:
data = []
for pk, obj in self._get_object_list():
if not formdata:
break
elif pk in formdata:
formdata.remove(pk)
data.append(obj)
if formdata:
self._invalid_formdata = True
self._set_data(data)
return self._data
def _set_data(self, data):
self._data = data
self._formdata = None
data = property(_get_data, _set_data)
def iter_choices(self):
for pk, obj in self._get_object_list():
yield (pk, self.get_label(obj), obj in self.data)
def process_formdata(self, valuelist):
self._formdata = set(valuelist)
def pre_validate(self, form):
if self._invalid_formdata:
raise ValidationError(self.gettext('Not a valid choice'))
elif self.data:
obj_list = list(x[1] for x in self._get_object_list())
for v in self.data:
if v not in obj_list:
raise ValidationError(self.gettext('Not a valid choice'))
def get_pk_from_identity(obj):
cls, key = identity_key(instance=obj)
return ':'.join(text_type(x) for x in key)
| bsd-3-clause |
Sbalbp/DIRAC | ConfigurationSystem/scripts/dirac-admin-add-resources.py | 2 | 16049 | #!/usr/bin/env python
########################################################################
# $HeadURL$
# File : dirac-admin-add-resources
# Author : Andrei Tsaregorodtsev
########################################################################
"""
Add resources from the BDII database for a given VO
"""
__RCSID__ = "$Id$"
import signal
import pprint
import re
import os
from urlparse import urlparse
from DIRAC.Core.Base import Script
def processScriptSwitches():
global vo, dry, doCEs, doSEs
Script.registerSwitch( "V:", "vo=", "Virtual Organization" )
Script.registerSwitch( "D", "dry", "Dry run" )
Script.registerSwitch( "C", "ce", "Process Computing Elements" )
Script.registerSwitch( "S", "se", "Process Storage Elements" )
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'Usage:',
' %s [option|cfgfile]' % Script.scriptName ] ) )
Script.parseCommandLine( ignoreErrors = True )
vo = ''
dry = False
doCEs = False
doSEs = False
for sw in Script.getUnprocessedSwitches():
if sw[0] in ( "V", "vo" ):
vo = sw[1]
if sw[0] in ( "D", "dry" ):
dry = True
if sw[0] in ( "C", "ce" ):
doCEs = True
if sw[0] in ( "S", "se" ):
doSEs = True
from DIRAC import gLogger, exit as DIRACExit, S_OK
from DIRAC.ConfigurationSystem.Client.Utilities import getGridCEs, getSiteUpdates, getCEsFromCS, \
getGridSRMs, getSRMUpdates
from DIRAC.Core.Utilities.SitesDIRACGOCDBmapping import getDIRACSiteName, getDIRACSesForSRM
from DIRAC.Core.Utilities.Subprocess import shellCall
from DIRAC.ConfigurationSystem.Client.CSAPI import CSAPI
from DIRAC.ConfigurationSystem.Client.Helpers.Path import cfgPath
from DIRAC.Core.Utilities.Grid import ldapService, getBdiiSEInfo
from DIRAC.Core.Utilities.Pfn import pfnparse
from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getVOs, getVOOption
ceBdiiDict = None
def checkUnusedCEs():
global vo, dry, ceBdiiDict
gLogger.notice( 'looking for new computing resources in the BDII database...' )
result = getCEsFromCS()
if not result['OK']:
gLogger.error( 'ERROR: failed to get CEs from CS', result['Message'] )
DIRACExit( -1 )
knownCEs = result['Value']
result = getGridCEs( vo, ceBlackList = knownCEs )
if not result['OK']:
gLogger.error( 'ERROR: failed to get CEs from BDII', result['Message'] )
DIRACExit( -1 )
ceBdiiDict = result['BdiiInfo']
siteDict = result['Value']
if siteDict:
gLogger.notice( 'New resources available:\n' )
for site in siteDict:
diracSite = 'Unknown'
result = getDIRACSiteName( site )
if result['OK']:
diracSite = ','.join( result['Value'] )
ces = siteDict[site].keys()
if ces:
gLogger.notice( " %s, DIRAC site %s" % ( site, diracSite) )
for ce in ces:
gLogger.notice( ' '*4+ce )
gLogger.notice( ' %s, %s' % ( siteDict[site][ce]['CEType'], '%s_%s_%s' % siteDict[site][ce]['System'] ) )
else:
gLogger.notice( 'No new resources available, exiting' )
DIRACExit( 0 )
inp = raw_input( "\nDo you want to add sites ? [default=yes] [yes|no]: ")
inp = inp.strip()
if not inp and inp.lower().startswith( 'n' ):
gLogger.notice( 'Nothing else to be done, exiting' )
DIRACExit( 0 )
gLogger.notice( '\nAdding new sites/CEs interactively\n' )
sitesAdded = []
for site in siteDict:
# Get the country code:
country = ''
ces = siteDict[site].keys()
for ce in ces:
country = ce.strip().split('.')[-1].lower()
if len( country ) == 2:
break
if country == 'gov':
country = 'us'
break
if not country or len( country ) != 2:
country = 'xx'
result = getDIRACSiteName( site )
if not result['OK']:
gLogger.notice( '\nThe site %s is not yet in the CS, give it a name' % site )
diracSite = raw_input( '[help|skip|<domain>.<name>.%s]: ' % country )
if diracSite.lower() == "skip":
continue
if diracSite.lower() == "help":
gLogger.notice( '%s site details:' % site )
for k,v in ceBdiiDict[site].items():
if k != "CEs":
gLogger.notice( '%s\t%s' % (k,v) )
gLogger.notice( '\nEnter DIRAC site name in the form <domain>.<name>.%s\n' % country )
diracSite = raw_input( '[<domain>.<name>.%s]: ' % country )
try:
domain,siteName,country = diracSite.split('.')
except Exception, x:
gLogger.error( 'ERROR: DIRAC site name does not follow convention: %s' % diracSite )
continue
diracSites = [diracSite]
else:
diracSites = result['Value']
if len( diracSites ) > 1:
gLogger.notice( 'Attention! GOC site %s corresponds to more than one DIRAC sites:' % site )
gLogger.notice( str( diracSites ) )
gLogger.notice( 'Please, pay attention which DIRAC site the new CEs will join\n' )
newCEs = {}
addedCEs = []
for ce in ces:
ceType = siteDict[site][ce]['CEType']
for diracSite in diracSites:
if ce in addedCEs:
continue
yn = raw_input( "Add CE %s of type %s to %s? [default yes] [yes|no]: " % ( ce, ceType, diracSite ) )
if yn == '' or yn.lower() == 'y':
newCEs.setdefault( diracSite, [] )
newCEs[diracSite].append( ce )
addedCEs.append( ce )
for diracSite in diracSites:
if diracSite in newCEs:
cmd = "dirac-admin-add-site %s %s %s" % ( diracSite, site, ' '.join( newCEs[diracSite] ) )
gLogger.notice( "\nNew site/CEs will be added with command:\n%s" % cmd )
yn = raw_input( "Add it ? [default yes] [yes|no]: " )
if not ( yn == '' or yn.lower() == 'y' ) :
continue
if dry:
gLogger.notice( "Command is skipped in the dry run" )
else:
result = shellCall( 0, cmd )
if not result['OK']:
gLogger.error( 'Error while executing dirac-admin-add-site command' )
yn = raw_input( "Do you want to continue ? [default no] [yes|no]: " )
if yn == '' or yn.lower().startswith( 'n' ):
if sitesAdded:
gLogger.notice( 'CEs were added at the following sites:' )
for site, diracSite in sitesAdded:
gLogger.notice( "%s\t%s" % ( site, diracSite ) )
DIRACExit( 0 )
else:
exitStatus, stdData, errData = result[ 'Value' ]
if exitStatus:
gLogger.error( 'Error while executing dirac-admin-add-site command\n', '\n'.join( [stdData, errData] ) )
yn = raw_input( "Do you want to continue ? [default no] [yes|no]: " )
if yn == '' or yn.lower().startswith( 'n' ):
if sitesAdded:
gLogger.notice( 'CEs were added at the following sites:' )
for site, diracSite in sitesAdded:
gLogger.notice( "%s\t%s" % ( site, diracSite ) )
DIRACExit( 0 )
else:
sitesAdded.append( ( site, diracSite ) )
gLogger.notice( stdData )
if sitesAdded:
gLogger.notice( 'CEs were added at the following sites:' )
for site, diracSite in sitesAdded:
gLogger.notice( "%s\t%s" % ( site, diracSite ) )
else:
gLogger.notice( 'No new CEs were added this time' )
def updateCS( changeSet ):
global vo, dry, ceBdiiDict
changeList = list( changeSet )
changeList.sort()
if dry:
gLogger.notice( 'The following needed changes are detected:\n' )
else:
gLogger.notice( 'We are about to make the following changes to CS:\n' )
for entry in changeList:
gLogger.notice( "%s/%s %s -> %s" % entry )
if not dry:
csAPI = CSAPI()
csAPI.initialize()
result = csAPI.downloadCSData()
if not result['OK']:
gLogger.error( 'Failed to initialize CSAPI object', result['Message'] )
DIRACExit( -1 )
for section, option, value, new_value in changeSet:
if value == 'Unknown' or not value:
csAPI.setOption( cfgPath( section, option ), new_value )
else:
csAPI.modifyValue( cfgPath( section, option ), new_value )
yn = raw_input( 'Do you want to commit changes to CS ? [default yes] [yes|no]: ' )
if yn == '' or yn.lower().startswith( 'y' ):
result = csAPI.commit()
if not result['OK']:
gLogger.error( "Error while commit to CS", result['Message'] )
else:
gLogger.notice( "Successfully committed %d changes to CS" % len( changeSet ) )
def updateSites():
global vo, dry, ceBdiiDict
result = getSiteUpdates( vo, bdiiInfo = ceBdiiDict )
if not result['OK']:
gLogger.error( 'Failed to get site updates', result['Message'] )
DIRACExit( -1 )
changeSet = result['Value']
updateCS( changeSet )
def checkUnusedSEs():
global vo, dry
result = getGridSRMs( vo, unUsed = True )
if not result['OK']:
gLogger.error( 'Failed to look up SRMs in BDII', result['Message'] )
siteSRMDict = result['Value']
# Evaluate VOs
result = getVOs()
if result['OK']:
csVOs = set( result['Value'] )
else:
csVOs = set( [vo] )
changeSetFull = set()
for site in siteSRMDict:
for gridSE in siteSRMDict[site]:
changeSet = set()
seDict = siteSRMDict[site][gridSE]['SE']
srmDict = siteSRMDict[site][gridSE]['SRM']
# Check the SRM version
version = srmDict.get( 'GlueServiceVersion', '' )
if not ( version and version.startswith( '2' ) ):
gLogger.debug( 'Skipping SRM service with version %s' % version )
continue
result = getDIRACSiteName( site )
if not result['OK']:
gLogger.notice( 'Unused se %s is detected at unused site %s' % ( gridSE, site ) )
gLogger.notice( 'Consider adding site %s to the DIRAC CS' % site )
continue
diracSites = result['Value']
yn = raw_input( '\nDo you want to add new SRM SE %s at site(s) %s ? default yes [yes|no]: ' % ( gridSE, str( diracSites ) ) )
if not yn or yn.lower().startswith( 'y' ):
if len( diracSites ) > 1:
prompt = 'Which DIRAC site the new SE should be attached to ?'
for i, s in enumerate( diracSites ):
prompt += '\n[%d] %s' % ( i, s )
prompt += '\nEnter your choice number: '
inp = raw_input( prompt )
try:
ind = int( inp )
except:
gLogger.notice( 'Can not interpret your choice: %s, try again later' % inp )
continue
diracSite = diracSites[ind]
else:
diracSite = diracSites[0]
domain, siteName, country = diracSite.split( '.' )
recName = '%s-disk' % siteName
inp = raw_input( 'Give a DIRAC name to the grid SE %s, default %s : ' % ( gridSE, recName ) )
diracSEName = inp
if not inp:
diracSEName = recName
gLogger.notice( 'Adding new SE %s at site %s' % ( diracSEName, diracSite ) )
seSection = cfgPath( '/Resources/StorageElements', diracSEName )
changeSet.add( ( seSection, 'BackendType', seDict.get( 'GlueSEImplementationName', 'Unknown' ) ) )
changeSet.add( ( seSection, 'Description', seDict.get( 'GlueSEName', 'Unknown' ) ) )
bdiiVOs = set( [ re.sub( '^VO:', '', rule ) for rule in srmDict.get( 'GlueServiceAccessControlBaseRule', [] ) ] )
seVOs = csVOs.intersection( bdiiVOs )
changeSet.add( ( seSection, 'VO', ','.join( seVOs ) ) )
accessSection = cfgPath( seSection, 'AccessProtocol.1' )
changeSet.add( ( accessSection, 'Protocol', 'srm' ) )
changeSet.add( ( accessSection, 'ProtocolName', 'SRM2' ) )
endPoint = srmDict.get( 'GlueServiceEndpoint', '' )
host = urlparse( endPoint ).hostname
port = result['Value']['Port']
changeSet.add( ( accessSection, 'Host', host ) )
changeSet.add( ( accessSection, 'Port', port ) )
changeSet.add( ( accessSection, 'Access', 'remote' ) )
voPathSection = cfgPath( accessSection, 'VOPath' )
if 'VOPath' in seDict:
path = seDict['VOPath']
voFromPath = os.path.basename( path )
if voFromPath != diracVO:
gLogger.notice( '\n!!! Warning: non-conventional VO path: %s\n' % path )
changeSet.add( ( voPathSection, diracVO, path ) )
path = os.path.dirname( path )
else:
# Try to guess the Path
domain = '.'.join( host.split( '.' )[-2:] )
path = '/dpm/%s/home' % domain
changeSet.add( ( accessSection, 'Path', path ) )
changeSet.add( ( accessSection, 'SpaceToken', '' ) )
changeSet.add( ( accessSection, 'WSUrl', '/srm/managerv2?SFN=' ) )
gLogger.notice( 'SE %s will be added with the following parameters' % diracSEName )
changeList = list( changeSet )
changeList.sort()
for entry in changeList:
gLogger.notice( entry )
yn = raw_input( 'Do you want to add new SE %s ? default yes [yes|no]: ' % diracSEName )
if not yn or yn.lower().startswith( 'y' ):
changeSetFull = changeSetFull.union( changeSet )
if dry:
if changeSetFull:
gLogger.notice( 'Skipping commit of the new SE data in a dry run' )
else:
gLogger.notice( "No new SE to be added" )
return S_OK()
if changeSetFull:
csAPI = CSAPI()
csAPI.initialize()
result = csAPI.downloadCSData()
if not result['OK']:
gLogger.error( 'Failed to initialize CSAPI object', result['Message'] )
DIRACExit( -1 )
changeList = list( changeSetFull )
changeList.sort()
for section, option, value in changeList:
csAPI.setOption( cfgPath( section, option ), value )
yn = raw_input( 'New SE data is accumulated\n Do you want to commit changes to CS ? default yes [yes|no]: ' )
if not yn or yn.lower().startswith( 'y' ):
result = csAPI.commit()
if not result['OK']:
gLogger.error( "Error while commit to CS", result['Message'] )
else:
gLogger.notice( "Successfully committed %d changes to CS" % len( changeSetFull ) )
else:
gLogger.notice( "No new SE to be added" )
return S_OK()
def updateSEs():
global vo, dry
result = getSRMUpdates( vo )
if not result['OK']:
gLogger.error( 'Failed to get SRM updates', result['Message'] )
DIRACExit( -1 )
changeSet = result['Value']
updateCS( changeSet )
def handler( signum, frame ):
gLogger.notice( '\nExit is forced, bye...' )
DIRACExit( -1 )
if __name__ == "__main__":
signal.signal( signal.SIGTERM, handler )
signal.signal( signal.SIGINT, handler )
vo = ''
dry = False
doCEs = False
doSEs = False
ceBdiiDict = None
processScriptSwitches()
if not vo:
gLogger.error( 'No VO specified' )
DIRACExit( -1 )
diracVO = vo
vo = getVOOption( vo, 'VOMSName', vo )
if doCEs:
yn = raw_input( 'Do you want to check/add new sites to CS ? [default yes] [yes|no]: ' )
yn = yn.strip()
if yn == '' or yn.lower().startswith( 'y' ):
checkUnusedCEs()
yn = raw_input( 'Do you want to update CE details in the CS ? [default yes] [yes|no]: ' )
yn = yn.strip()
if yn == '' or yn.lower().startswith( 'y' ):
updateSites()
if doSEs:
yn = raw_input( 'Do you want to check/add new storage elements to CS ? [default yes] [yes|no]: ' )
yn = yn.strip()
if yn == '' or yn.lower().startswith( 'y' ):
result = checkUnusedSEs()
yn = raw_input( 'Do you want to update SE details in the CS ? [default yes] [yes|no]: ' )
yn = yn.strip()
if yn == '' or yn.lower().startswith( 'y' ):
updateSEs()
| gpl-3.0 |
ContextLogic/eventmaster | plugin_scripts/flush_to_es.py | 1 | 5132 | import argparse
from collections import defaultdict
import datetime
import requests
import socket
import sys
import time
import ujson
from elasticsearch import Elasticsearch
from eventmaster_pb2 import *
from eventmaster_pb2_grpc import *
BATCH_LIMIT = 200
parser = argparse.ArgumentParser(description='Configuration settings')
parser.add_argument('--es_service_name',
help='Service name of elasticsearch client (used for service lookup')
parser.add_argument('--es_ips', nargs='+', help='Ip addresses of elasticsearch')
parser.add_argument('--es_port', help='Port of elasticsearch')
parser.add_argument('--em_addr', help='Addr of Eventmaster server')
parser.add_argument('--start_time', help='Unix timestamp of time to start flushing events')
opts = parser.parse_args()
if opts.em_addr is None:
sys.stderr.write('No Eventmaster address provided')
sys.exit(1)
if opts.es_port is None:
sys.stderr.write('Must specify port of Elasticsearch')
sys.exit(1)
# use service discovery to find ip addrs of elasticsearch
if opts.es_service_name is not None:
try:
resp = requests.get(
"http://169.254.169.254/latest/meta-data/placement/availability-zone",
timeout=5)
dc = resp.content.strip() % "%s."
except Exception as e:
print "Error getting dc", e
print "Using empty string as dc"
dc = ""
service_name = "%s.service.%sconsul." % (opts.es_service_name, dc)
addrs = socket.getaddrinfo(service_name, 80)
ips = list()
for addr in addrs:
sockaddr = addr[4]
ips.append(sockaddr[0])
elif opts.es_ips is not None:
ips = opts.es_ips
else:
sys.stederr.write('Must provide service name for servicelookup or ip addrs of elasticsearch')
sys.exit(1)
es = Elasticsearch(hosts=[{'host':ip, 'port':opts.es_port} for ip in ips])
print [{'host':ip, 'port':opts.es_port} for ip in ips]
channel = grpc.insecure_channel(opts.em_addr)
stub = EventMasterStub(channel)
def get_index(event_time):
suffix = datetime.datetime.fromtimestamp(event_time).strftime('%Y-%m-%d')
return "eventmaster-%s" % suffix
if opts.start_time is not None:
start_time = int(opts.start_time)
else:
# start flushing events from 30 mins ago by default
start_time = int(time.time())-1800
while 1:
try:
if int(time.time()) - start_time < 5:
# sleep for 5 seconds to prevent spamming
print "All caught up, sleeping for 5 seconds..."
time.sleep(5)
print "Flushing events from time:", start_time
ids = list()
cur_end_time=int(time.time())
for id in stub.GetEventIds(TimeQuery(start_event_time=start_time,
end_event_time=cur_end_time, limit=BATCH_LIMIT, ascending=True)):
ids.append(id.event_id)
print "Retrieved", len(ids), "items"
indexed_events = defaultdict(list)
for id in ids:
event = {}
result = stub.GetEventById(EventId(event_id=id))
if result is None:
continue
if isinstance(result, grpc.RpcError):
continue
event['event_id'] = result.event_id
event['topic_name'] = result.topic_name
event['dc'] = result.dc
event['host'] = result.host
event['event_time'] = result.event_time
if result.parent_event_id is not None:
event['parent_event_id'] = result.parent_event_id
if result.tag_set is not None:
event['tag_set'] = list()
for tag in result.tag_set:
event['tag_set'].append(tag)
if result.target_host_set is not None:
event['target_host_set'] = list()
for thost in result.target_host_set:
event['target_host_set'].append(thost)
if result.user is not None:
event['user'] = result.user
if result.data is not None:
event['data'] = ujson.loads(result.data)
indexed_events[get_index(result.event_time)].append(event)
# no more elements left in current time frame
if len(ids) < BATCH_LIMIT:
start_time = cur_end_time
for index, events in indexed_events.iteritems():
print "Indexing %d events in index %s" % (len(events), index)
if not es.indices.exists(index):
res = es.indices.create(index=index)
print "Create index response:", res
bulk_data = []
for evt in events:
op_dict = {
"index": {
"_index": index,
"_type": "event",
"_id": evt['event_id']
}
}
bulk_data.append(op_dict)
bulk_data.append(evt)
start_time = max(start_time, evt['event_time'])
res = es.bulk(index=index, body=bulk_data)
print "Bulk Response:", res
except Exception as e:
sys.stderr.write(str(e))
break
| mit |
michael-dev2rights/ansible | contrib/inventory/mdt_dynamic_inventory.py | 117 | 4538 | #!/usr/bin/env python
# (c) 2016, Julian Barnett <jbarnett@tableau.com>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
'''
MDT external inventory script
=================================
author: J Barnett 06/23/2016 01:15
maintainer: J Barnett (github @jbarnett1981)
'''
import argparse
import json
import pymssql
try:
import configparser
except ImportError:
import ConfigParser as configparser
class MDTInventory(object):
def __init__(self):
''' Main execution path '''
self.conn = None
# Initialize empty inventory
self.inventory = self._empty_inventory()
# Read CLI arguments
self.read_settings()
self.parse_cli_args()
# Get Hosts
if self.args.list:
self.get_hosts()
# Get specific host vars
if self.args.host:
self.get_hosts(self.args.host)
def _connect(self, query):
'''
Connect to MDT and dump contents of dbo.ComputerIdentity database
'''
if not self.conn:
self.conn = pymssql.connect(server=self.mdt_server + "\\" + self.mdt_instance, user=self.mdt_user, password=self.mdt_password,
database=self.mdt_database)
cursor = self.conn.cursor()
cursor.execute(query)
self.mdt_dump = cursor.fetchall()
self.conn.close()
def get_hosts(self, hostname=False):
'''
Gets host from MDT Database
'''
if hostname:
query = ("SELECT t1.ID, t1.Description, t1.MacAddress, t2.Role "
"FROM ComputerIdentity as t1 join Settings_Roles as t2 on t1.ID = t2.ID where t1.Description = '%s'" % hostname)
else:
query = 'SELECT t1.ID, t1.Description, t1.MacAddress, t2.Role FROM ComputerIdentity as t1 join Settings_Roles as t2 on t1.ID = t2.ID'
self._connect(query)
# Configure to group name configured in Ansible Tower for this inventory
groupname = self.mdt_groupname
# Initialize empty host list
hostlist = []
# Parse through db dump and populate inventory
for hosts in self.mdt_dump:
self.inventory['_meta']['hostvars'][hosts[1]] = {'id': hosts[0], 'name': hosts[1], 'mac': hosts[2], 'role': hosts[3]}
hostlist.append(hosts[1])
self.inventory[groupname] = hostlist
# Print it all out
print(json.dumps(self.inventory, indent=2))
def _empty_inventory(self):
'''
Create empty inventory dictionary
'''
return {"_meta": {"hostvars": {}}}
def read_settings(self):
'''
Reads the settings from the mdt.ini file
'''
config = configparser.SafeConfigParser()
config.read('mdt.ini')
# MDT Server and instance and database
self.mdt_server = config.get('mdt', 'server')
self.mdt_instance = config.get('mdt', 'instance')
self.mdt_database = config.get('mdt', 'database')
# MDT Login credentials
if config.has_option('mdt', 'user'):
self.mdt_user = config.get('mdt', 'user')
if config.has_option('mdt', 'password'):
self.mdt_password = config.get('mdt', 'password')
# Group name in Tower
if config.has_option('tower', 'groupname'):
self.mdt_groupname = config.get('tower', 'groupname')
def parse_cli_args(self):
'''
Command line argument processing
'''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on MDT')
parser.add_argument('--list', action='store_true', default=False, help='List instances')
parser.add_argument('--host', action='store', help='Get all the variables about a specific instance')
self.args = parser.parse_args()
if __name__ == "__main__":
# Run the script
MDTInventory()
| gpl-3.0 |
rosmo/ansible | lib/ansible/module_utils/oracle/oci_utils.py | 29 | 81039 | # Copyright (c) 2017, 2018, 2019 Oracle and/or its affiliates.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import
import logging
import logging.config
import os
import tempfile
from datetime import datetime
from operator import eq
import time
try:
import yaml
import oci
from oci.constants import HEADER_NEXT_PAGE
from oci.exceptions import (
InvalidConfig,
InvalidPrivateKey,
MissingPrivateKeyPassphrase,
ConfigFileNotFound,
ServiceError,
MaximumWaitTimeExceeded,
)
from oci.identity.identity_client import IdentityClient
from oci.object_storage.models import CreateBucketDetails
from oci.object_storage.models import UpdateBucketDetails
from oci.retry import RetryStrategyBuilder
from oci.util import to_dict, Sentinel
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
from ansible.module_utils._text import to_bytes
from ansible.module_utils.six import iteritems
__version__ = "1.6.0-dev"
MAX_WAIT_TIMEOUT_IN_SECONDS = 1200
# If a resource is in one of these states it would be considered inactive
DEAD_STATES = [
"TERMINATING",
"TERMINATED",
"FAULTY",
"FAILED",
"DELETING",
"DELETED",
"UNKNOWN_ENUM_VALUE",
"DETACHING",
"DETACHED",
]
# If a resource is in one of these states it would be considered available
DEFAULT_READY_STATES = [
"AVAILABLE",
"ACTIVE",
"RUNNING",
"PROVISIONED",
"ATTACHED",
"ASSIGNED",
"SUCCEEDED",
"PENDING_PROVIDER",
]
# If a resource is in one of these states, it would be considered deleted
DEFAULT_TERMINATED_STATES = ["TERMINATED", "DETACHED", "DELETED"]
def get_common_arg_spec(supports_create=False, supports_wait=False):
"""
Return the common set of module arguments for all OCI cloud modules.
:param supports_create: Variable to decide whether to add options related to idempotency of create operation.
:param supports_wait: Variable to decide whether to add options related to waiting for completion.
:return: A dict with applicable module options.
"""
# Note: This method is used by most OCI ansible resource modules during initialization. When making changes to this
# method, ensure that no `oci` python sdk dependencies are introduced in this method. This ensures that the modules
# can check for absence of OCI Python SDK and fail with an appropriate message. Introducing an OCI dependency in
# this method would break that error handling logic.
common_args = dict(
config_file_location=dict(type="str"),
config_profile_name=dict(type="str", default="DEFAULT"),
api_user=dict(type="str"),
api_user_fingerprint=dict(type="str", no_log=True),
api_user_key_file=dict(type="str"),
api_user_key_pass_phrase=dict(type="str", no_log=True),
auth_type=dict(
type="str",
required=False,
choices=["api_key", "instance_principal"],
default="api_key",
),
tenancy=dict(type="str"),
region=dict(type="str"),
)
if supports_create:
common_args.update(
key_by=dict(type="list"),
force_create=dict(type="bool", default=False),
)
if supports_wait:
common_args.update(
wait=dict(type="bool", default=True),
wait_timeout=dict(
type="int", default=MAX_WAIT_TIMEOUT_IN_SECONDS
),
wait_until=dict(type="str"),
)
return common_args
def get_facts_module_arg_spec(filter_by_name=False):
# Note: This method is used by most OCI ansible fact modules during initialization. When making changes to this
# method, ensure that no `oci` python sdk dependencies are introduced in this method. This ensures that the modules
# can check for absence of OCI Python SDK and fail with an appropriate message. Introducing an OCI dependency in
# this method would break that error handling logic.
facts_module_arg_spec = get_common_arg_spec()
if filter_by_name:
facts_module_arg_spec.update(name=dict(type="str"))
else:
facts_module_arg_spec.update(display_name=dict(type="str"))
return facts_module_arg_spec
def get_oci_config(module, service_client_class=None):
"""Return the OCI configuration to use for all OCI API calls. The effective OCI configuration is derived by merging
any overrides specified for configuration attributes through Ansible module options or environment variables. The
order of precedence for deriving the effective configuration dict is:
1. If a config file is provided, use that to setup the initial config dict.
2. If a config profile is specified, use that config profile to setup the config dict.
3. For each authentication attribute, check if an override is provided either through
a. Ansible Module option
b. Environment variable
and override the value in the config dict in that order."""
config = {}
config_file = module.params.get("config_file_location")
_debug("Config file through module options - {0} ".format(config_file))
if not config_file:
if "OCI_CONFIG_FILE" in os.environ:
config_file = os.environ["OCI_CONFIG_FILE"]
_debug(
"Config file through OCI_CONFIG_FILE environment variable - {0}".format(
config_file
)
)
else:
config_file = "~/.oci/config"
_debug("Config file (fallback) - {0} ".format(config_file))
config_profile = module.params.get("config_profile_name")
if not config_profile:
if "OCI_CONFIG_PROFILE" in os.environ:
config_profile = os.environ["OCI_CONFIG_PROFILE"]
else:
config_profile = "DEFAULT"
try:
config = oci.config.from_file(
file_location=config_file, profile_name=config_profile
)
except (
ConfigFileNotFound,
InvalidConfig,
InvalidPrivateKey,
MissingPrivateKeyPassphrase,
) as ex:
if not _is_instance_principal_auth(module):
# When auth_type is not instance_principal, config file is required
module.fail_json(msg=str(ex))
else:
_debug(
"Ignore {0} as the auth_type is set to instance_principal".format(
str(ex)
)
)
# if instance_principal auth is used, an empty 'config' map is used below.
config["additional_user_agent"] = "Oracle-Ansible/{0}".format(__version__)
# Merge any overrides through other IAM options
_merge_auth_option(
config,
module,
module_option_name="api_user",
env_var_name="OCI_USER_ID",
config_attr_name="user",
)
_merge_auth_option(
config,
module,
module_option_name="api_user_fingerprint",
env_var_name="OCI_USER_FINGERPRINT",
config_attr_name="fingerprint",
)
_merge_auth_option(
config,
module,
module_option_name="api_user_key_file",
env_var_name="OCI_USER_KEY_FILE",
config_attr_name="key_file",
)
_merge_auth_option(
config,
module,
module_option_name="api_user_key_pass_phrase",
env_var_name="OCI_USER_KEY_PASS_PHRASE",
config_attr_name="pass_phrase",
)
_merge_auth_option(
config,
module,
module_option_name="tenancy",
env_var_name="OCI_TENANCY",
config_attr_name="tenancy",
)
_merge_auth_option(
config,
module,
module_option_name="region",
env_var_name="OCI_REGION",
config_attr_name="region",
)
# Redirect calls to home region for IAM service.
do_not_redirect = module.params.get(
"do_not_redirect_to_home_region", False
) or os.environ.get("OCI_IDENTITY_DO_NOT_REDIRECT_TO_HOME_REGION")
if service_client_class == IdentityClient and not do_not_redirect:
_debug("Region passed for module invocation - {0} ".format(config["region"]))
identity_client = IdentityClient(config)
region_subscriptions = identity_client.list_region_subscriptions(
config["tenancy"]
).data
# Replace the region in the config with the home region.
[config["region"]] = [
rs.region_name for rs in region_subscriptions if rs.is_home_region is True
]
_debug(
"Setting region in the config to home region - {0} ".format(
config["region"]
)
)
return config
def create_service_client(module, service_client_class):
"""
Creates a service client using the common module options provided by the user.
:param module: An AnsibleModule that represents user provided options for a Task
:param service_client_class: A class that represents a client to an OCI Service
:return: A fully configured client
"""
config = get_oci_config(module, service_client_class)
kwargs = {}
if _is_instance_principal_auth(module):
try:
signer = oci.auth.signers.InstancePrincipalsSecurityTokenSigner()
except Exception as ex:
message = (
"Failed retrieving certificates from localhost. Instance principal based authentication is only"
"possible from within OCI compute instances. Exception: {0}".format(
str(ex)
)
)
module.fail_json(msg=message)
kwargs["signer"] = signer
# XXX: Validate configuration -- this may be redundant, as all Client constructors perform a validation
try:
oci.config.validate_config(config, **kwargs)
except oci.exceptions.InvalidConfig as ic:
module.fail_json(
msg="Invalid OCI configuration. Exception: {0}".format(str(ic))
)
# Create service client class with the signer
client = service_client_class(config, **kwargs)
return client
def _is_instance_principal_auth(module):
# check if auth type is overridden via module params
instance_principal_auth = (
"auth_type" in module.params
and module.params["auth_type"] == "instance_principal"
)
if not instance_principal_auth:
instance_principal_auth = (
"OCI_ANSIBLE_AUTH_TYPE" in os.environ
and os.environ["OCI_ANSIBLE_AUTH_TYPE"] == "instance_principal"
)
return instance_principal_auth
def _merge_auth_option(
config, module, module_option_name, env_var_name, config_attr_name
):
"""Merge the values for an authentication attribute from ansible module options and
environment variables with the values specified in a configuration file"""
_debug("Merging {0}".format(module_option_name))
auth_attribute = module.params.get(module_option_name)
_debug(
"\t Ansible module option {0} = {1}".format(module_option_name, auth_attribute)
)
if not auth_attribute:
if env_var_name in os.environ:
auth_attribute = os.environ[env_var_name]
_debug(
"\t Environment variable {0} = {1}".format(env_var_name, auth_attribute)
)
# An authentication attribute has been provided through an env-variable or an ansible
# option and must override the corresponding attribute's value specified in the
# config file [profile].
if auth_attribute:
_debug(
"Updating config attribute {0} -> {1} ".format(
config_attr_name, auth_attribute
)
)
config.update({config_attr_name: auth_attribute})
def bucket_details_factory(bucket_details_type, module):
bucket_details = None
if bucket_details_type == "create":
bucket_details = CreateBucketDetails()
elif bucket_details_type == "update":
bucket_details = UpdateBucketDetails()
bucket_details.compartment_id = module.params["compartment_id"]
bucket_details.name = module.params["name"]
bucket_details.public_access_type = module.params["public_access_type"]
bucket_details.metadata = module.params["metadata"]
return bucket_details
def filter_resources(all_resources, filter_params):
if not filter_params:
return all_resources
filtered_resources = []
filtered_resources.extend(
[
resource
for resource in all_resources
for key, value in filter_params.items()
if getattr(resource, key) == value
]
)
return filtered_resources
def list_all_resources(target_fn, **kwargs):
"""
Return all resources after paging through all results returned by target_fn. If a `display_name` or `name` is
provided as a kwarg, then only resources matching the specified name are returned.
:param target_fn: The target OCI SDK paged function to call
:param kwargs: All arguments that the OCI SDK paged function expects
:return: List of all objects returned by target_fn
:raises ServiceError: When the Service returned an Error response
:raises MaximumWaitTimeExceededError: When maximum wait time is exceeded while invoking target_fn
"""
filter_params = None
try:
response = call_with_backoff(target_fn, **kwargs)
except ValueError as ex:
if "unknown kwargs" in str(ex):
if "display_name" in kwargs:
if kwargs["display_name"]:
filter_params = {"display_name": kwargs["display_name"]}
del kwargs["display_name"]
elif "name" in kwargs:
if kwargs["name"]:
filter_params = {"name": kwargs["name"]}
del kwargs["name"]
response = call_with_backoff(target_fn, **kwargs)
existing_resources = response.data
while response.has_next_page:
kwargs.update(page=response.headers.get(HEADER_NEXT_PAGE))
response = call_with_backoff(target_fn, **kwargs)
existing_resources += response.data
# If the underlying SDK Service list* method doesn't support filtering by name or display_name, filter the resources
# and return the matching list of resources
return filter_resources(existing_resources, filter_params)
def _debug(s):
get_logger("oci_utils").debug(s)
def get_logger(module_name):
oci_logging = setup_logging()
return oci_logging.getLogger(module_name)
def setup_logging(
default_level="INFO",
):
"""Setup logging configuration"""
env_log_path = "LOG_PATH"
env_log_level = "LOG_LEVEL"
default_log_path = tempfile.gettempdir()
log_path = os.getenv(env_log_path, default_log_path)
log_level_str = os.getenv(env_log_level, default_level)
log_level = logging.getLevelName(log_level_str)
log_file_path = os.path.join(log_path, "oci_ansible_module.log")
logging.basicConfig(filename=log_file_path, filemode="a", level=log_level)
return logging
def check_and_update_attributes(
target_instance, attr_name, input_value, existing_value, changed
):
"""
This function checks the difference between two resource attributes of literal types and sets the attrbute
value in the target instance type holding the attribute.
:param target_instance: The instance which contains the attribute whose values to be compared
:param attr_name: Name of the attribute whose value required to be compared
:param input_value: The value of the attribute provided by user
:param existing_value: The value of the attribute in the existing resource
:param changed: Flag to indicate whether there is any difference between the values
:return: Returns a boolean value indicating whether there is any difference between the values
"""
if input_value is not None and not eq(input_value, existing_value):
changed = True
target_instance.__setattr__(attr_name, input_value)
else:
target_instance.__setattr__(attr_name, existing_value)
return changed
def check_and_update_resource(
resource_type,
get_fn,
kwargs_get,
update_fn,
primitive_params_update,
kwargs_non_primitive_update,
module,
update_attributes,
client=None,
sub_attributes_of_update_model=None,
wait_applicable=True,
states=None,
):
"""
This function handles update operation on a resource. It checks whether update is required and accordingly returns
the resource and the changed status.
:param wait_applicable: Indicates if the resource support wait
:param client: The resource Client class to use to perform the wait checks. This param must be specified if
wait_applicable is True
:param resource_type: The type of the resource. e.g. "private_ip"
:param get_fn: Function used to get the resource. e.g. virtual_network_client.get_private_ip
:param kwargs_get: Dictionary containing the arguments to be used to call get function.
e.g. {"private_ip_id": module.params["private_ip_id"]}
:param update_fn: Function used to update the resource. e.g virtual_network_client.update_private_ip
:param primitive_params_update: List of primitive parameters used for update function. e.g. ['private_ip_id']
:param kwargs_non_primitive_update: Dictionary containing the non-primitive arguments to be used to call get
function with key as the non-primitive argument type & value as the name of the non-primitive argument to be passed
to the update function. e.g. {UpdatePrivateIpDetails: "update_private_ip_details"}
:param module: Instance of AnsibleModule
:param update_attributes: Attributes in update model.
:param states: List of lifecycle states to watch for while waiting after create_fn is called.
e.g. [module.params['wait_until'], "FAULTY"]
:param sub_attributes_of_update_model: Dictionary of non-primitive sub-attributes of update model. for example,
{'services': [ServiceIdRequestDetails()]} as in UpdateServiceGatewayDetails.
:return: Returns a dictionary containing the "changed" status and the resource.
"""
try:
result = dict(changed=False)
attributes_to_update, resource = get_attr_to_update(
get_fn, kwargs_get, module, update_attributes
)
if attributes_to_update:
kwargs_update = get_kwargs_update(
attributes_to_update,
kwargs_non_primitive_update,
module,
primitive_params_update,
sub_attributes_of_update_model,
)
resource = call_with_backoff(update_fn, **kwargs_update).data
if wait_applicable:
if client is None:
module.fail_json(
msg="wait_applicable is True, but client is not specified."
)
resource = wait_for_resource_lifecycle_state(
client, module, True, kwargs_get, get_fn, None, resource, states
)
result["changed"] = True
result[resource_type] = to_dict(resource)
return result
except ServiceError as ex:
module.fail_json(msg=ex.message)
def get_kwargs_update(
attributes_to_update,
kwargs_non_primitive_update,
module,
primitive_params_update,
sub_attributes_of_update_model=None,
):
kwargs_update = dict()
for param in primitive_params_update:
kwargs_update[param] = module.params[param]
for param in kwargs_non_primitive_update:
update_object = param()
for key in update_object.attribute_map:
if key in attributes_to_update:
if (
sub_attributes_of_update_model
and key in sub_attributes_of_update_model
):
setattr(update_object, key, sub_attributes_of_update_model[key])
else:
setattr(update_object, key, module.params[key])
kwargs_update[kwargs_non_primitive_update[param]] = update_object
return kwargs_update
def is_dictionary_subset(sub, super_dict):
"""
This function checks if `sub` dictionary is a subset of `super` dictionary.
:param sub: subset dictionary, for example user_provided_attr_value.
:param super_dict: super dictionary, for example resources_attr_value.
:return: True if sub is contained in super.
"""
for key in sub:
if sub[key] != super_dict[key]:
return False
return True
def are_lists_equal(s, t):
if s is None and t is None:
return True
if (s is None and len(t) >= 0) or (t is None and len(s) >= 0) or (len(s) != len(t)):
return False
if len(s) == 0:
return True
s = to_dict(s)
t = to_dict(t)
if type(s[0]) == dict:
# Handle list of dicts. Dictionary returned by the API may have additional keys. For example, a get call on
# service gateway has an attribute `services` which is a list of `ServiceIdResponseDetails`. This has a key
# `service_name` which is not provided in the list of `services` by a user while making an update call; only
# `service_id` is provided by the user in the update call.
sorted_s = sort_list_of_dictionary(s)
sorted_t = sort_list_of_dictionary(t)
for index, d in enumerate(sorted_s):
if not is_dictionary_subset(d, sorted_t[index]):
return False
return True
else:
# Handle lists of primitive types.
try:
for elem in s:
t.remove(elem)
except ValueError:
return False
return not t
def get_attr_to_update(get_fn, kwargs_get, module, update_attributes):
try:
resource = call_with_backoff(get_fn, **kwargs_get).data
except ServiceError as ex:
module.fail_json(msg=ex.message)
attributes_to_update = []
for attr in update_attributes:
resources_attr_value = getattr(resource, attr, None)
user_provided_attr_value = module.params.get(attr, None)
unequal_list_attr = (
type(resources_attr_value) == list or type(user_provided_attr_value) == list
) and not are_lists_equal(user_provided_attr_value, resources_attr_value)
unequal_attr = type(resources_attr_value) != list and to_dict(
resources_attr_value
) != to_dict(user_provided_attr_value)
if unequal_list_attr or unequal_attr:
# only update if the user has explicitly provided a value for this attribute
# otherwise, no update is necessary because the user hasn't expressed a particular
# value for that attribute
if module.params.get(attr, None):
attributes_to_update.append(attr)
return attributes_to_update, resource
def get_taggable_arg_spec(supports_create=False, supports_wait=False):
"""
Returns an arg_spec that is valid for taggable OCI resources.
:return: A dict that represents an ansible arg spec that builds over the common_arg_spec and adds free-form and
defined tags.
"""
tag_arg_spec = get_common_arg_spec(supports_create, supports_wait)
tag_arg_spec.update(
dict(freeform_tags=dict(type="dict"), defined_tags=dict(type="dict"))
)
return tag_arg_spec
def add_tags_to_model_from_module(model, module):
"""
Adds free-form and defined tags from an ansible module to a resource model
:param model: A resource model instance that supports 'freeform_tags' and 'defined_tags' as attributes
:param module: An AnsibleModule representing the options provided by the user
:return: The updated model class with the tags specified by the user.
"""
freeform_tags = module.params.get("freeform_tags", None)
defined_tags = module.params.get("defined_tags", None)
return add_tags_to_model_class(model, freeform_tags, defined_tags)
def add_tags_to_model_class(model, freeform_tags, defined_tags):
"""
Add free-form and defined tags to a resource model.
:param model: A resource model instance that supports 'freeform_tags' and 'defined_tags' as attributes
:param freeform_tags: A dict representing the freeform_tags to be applied to the model
:param defined_tags: A dict representing the defined_tags to be applied to the model
:return: The updated model class with the tags specified by the user
"""
try:
if freeform_tags is not None:
_debug("Model {0} set freeform tags to {1}".format(model, freeform_tags))
model.__setattr__("freeform_tags", freeform_tags)
if defined_tags is not None:
_debug("Model {0} set defined tags to {1}".format(model, defined_tags))
model.__setattr__("defined_tags", defined_tags)
except AttributeError as ae:
_debug("Model {0} doesn't support tags. Error {1}".format(model, ae))
return model
def check_and_create_resource(
resource_type,
create_fn,
kwargs_create,
list_fn,
kwargs_list,
module,
model,
existing_resources=None,
exclude_attributes=None,
dead_states=None,
default_attribute_values=None,
supports_sort_by_time_created=True,
):
"""
This function checks whether there is a resource with same attributes as specified in the module options. If not,
it creates and returns the resource.
:param resource_type: Type of the resource to be created.
:param create_fn: Function used in the module to handle create operation. The function should return a dict with
keys as resource & changed.
:param kwargs_create: Dictionary of parameters for create operation.
:param list_fn: List function in sdk to list all the resources of type resource_type.
:param kwargs_list: Dictionary of parameters for list operation.
:param module: Instance of AnsibleModule
:param model: Model used to create a resource.
:param exclude_attributes: The attributes which should not be used to distinguish the resource. e.g. display_name,
dns_label.
:param dead_states: List of states which can't transition to any of the usable states of the resource. This deafults
to ["TERMINATING", "TERMINATED", "FAULTY", "FAILED", "DELETING", "DELETED", "UNKNOWN_ENUM_VALUE"]
:param default_attribute_values: A dictionary containing default values for attributes.
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
"""
if module.params.get("force_create", None):
_debug("Force creating {0}".format(resource_type))
result = call_with_backoff(create_fn, **kwargs_create)
return result
# Get the existing resources list sorted by creation time in descending order. Return the latest matching resource
# in case of multiple resource matches.
if exclude_attributes is None:
exclude_attributes = {}
if default_attribute_values is None:
default_attribute_values = {}
try:
if existing_resources is None:
if supports_sort_by_time_created:
kwargs_list["sort_by"] = "TIMECREATED"
existing_resources = list_all_resources(list_fn, **kwargs_list)
except ValueError:
# list_fn doesn't support sort_by, so remove the sort_by key in kwargs_list and retry
kwargs_list.pop("sort_by", None)
try:
existing_resources = list_all_resources(list_fn, **kwargs_list)
# Handle errors like 404 due to bad arguments to the list_all_resources call.
except ServiceError as ex:
module.fail_json(msg=ex.message)
except ServiceError as ex:
module.fail_json(msg=ex.message)
result = dict()
attributes_to_consider = _get_attributes_to_consider(
exclude_attributes, model, module
)
if "defined_tags" not in default_attribute_values:
default_attribute_values["defined_tags"] = {}
resource_matched = None
_debug(
"Trying to find a match within {0} existing resources".format(
len(existing_resources)
)
)
for resource in existing_resources:
if _is_resource_active(resource, dead_states):
_debug(
"Comparing user specified values {0} against an existing resource's "
"values {1}".format(module.params, to_dict(resource))
)
if does_existing_resource_match_user_inputs(
to_dict(resource),
module,
attributes_to_consider,
exclude_attributes,
default_attribute_values,
):
resource_matched = to_dict(resource)
break
if resource_matched:
_debug("Resource with same attributes found: {0}.".format(resource_matched))
result[resource_type] = resource_matched
result["changed"] = False
else:
_debug("No matching resource found. Attempting to create a new resource.")
result = call_with_backoff(create_fn, **kwargs_create)
return result
def _get_attributes_to_consider(exclude_attributes, model, module):
"""
Determine the attributes to detect if an existing resource already matches the requested resource state
:param exclude_attributes: Attributes to not consider for matching
:param model: The model class used to create the Resource
:param module: An instance of AnsibleModule that contains user's desires around a resource's state
:return: A list of attributes that needs to be matched
"""
# If a user explicitly requests us to match only against a set of resources (using 'key_by', use that as the list
# of attributes to consider for matching.
if "key_by" in module.params and module.params["key_by"] is not None:
attributes_to_consider = module.params["key_by"]
else:
# Consider all attributes except freeform_tags as freeform tags do not distinguish a resource.
attributes_to_consider = list(model.attribute_map)
if "freeform_tags" in attributes_to_consider:
attributes_to_consider.remove("freeform_tags")
# Temporarily removing node_count as the exisiting resource does not reflect it
if "node_count" in attributes_to_consider:
attributes_to_consider.remove("node_count")
_debug("attributes to consider: {0}".format(attributes_to_consider))
return attributes_to_consider
def _is_resource_active(resource, dead_states):
if dead_states is None:
dead_states = DEAD_STATES
if "lifecycle_state" not in resource.attribute_map:
return True
return resource.lifecycle_state not in dead_states
def is_attr_assigned_default(default_attribute_values, attr, assigned_value):
if not default_attribute_values:
return False
if attr in default_attribute_values:
default_val_for_attr = default_attribute_values.get(attr, None)
if isinstance(default_val_for_attr, dict):
# When default value for a resource's attribute is empty dictionary, check if the corresponding value of the
# existing resource's attribute is also empty.
if not default_val_for_attr:
return not assigned_value
# only compare keys that are in default_attribute_values[attr]
# this is to ensure forward compatibility when the API returns new keys that are not known during
# the time when the module author provided default values for the attribute
keys = {}
for k, v in iteritems(assigned_value.items()):
if k in default_val_for_attr:
keys[k] = v
return default_val_for_attr == keys
# non-dict, normal comparison
return default_val_for_attr == assigned_value
else:
# module author has not provided a default value for attr
return True
def create_resource(resource_type, create_fn, kwargs_create, module):
"""
Create an OCI resource
:param resource_type: Type of the resource to be created. e.g.: "vcn"
:param create_fn: Function in the SDK to create the resource. e.g. virtual_network_client.create_vcn
:param kwargs_create: Dictionary containing arguments to be used to call the create function create_fn
:param module: Instance of AnsibleModule
"""
result = dict(changed=False)
try:
resource = to_dict(call_with_backoff(create_fn, **kwargs_create).data)
_debug("Created {0}, {1}".format(resource_type, resource))
result["changed"] = True
result[resource_type] = resource
return result
except (ServiceError, TypeError) as ex:
module.fail_json(msg=str(ex))
def does_existing_resource_match_user_inputs(
existing_resource,
module,
attributes_to_compare,
exclude_attributes,
default_attribute_values=None,
):
"""
Check if 'attributes_to_compare' in an existing_resource match the desired state provided by a user in 'module'.
:param existing_resource: A dictionary representing an existing resource's values.
:param module: The AnsibleModule representing the options provided by the user.
:param attributes_to_compare: A list of attributes of a resource that are used to compare if an existing resource
matches the desire state of the resource expressed by the user in 'module'.
:param exclude_attributes: The attributes, that a module author provides, which should not be used to match the
resource. This dictionary typically includes: (a) attributes which are initialized with dynamic default values
like 'display_name', 'security_list_ids' for subnets and (b) attributes that don't have any defaults like
'dns_label' in VCNs. The attributes are part of keys and 'True' is the value for all existing keys.
:param default_attribute_values: A dictionary containing default values for attributes.
:return: True if the values for the list of attributes is the same in the existing_resource and module instances.
"""
if not default_attribute_values:
default_attribute_values = {}
for attr in attributes_to_compare:
attribute_with_default_metadata = None
if attr in existing_resource:
resources_value_for_attr = existing_resource[attr]
# Check if the user has explicitly provided the value for attr.
user_provided_value_for_attr = _get_user_provided_value(module, attr)
if user_provided_value_for_attr is not None:
res = [True]
check_if_user_value_matches_resources_attr(
attr,
resources_value_for_attr,
user_provided_value_for_attr,
exclude_attributes,
default_attribute_values,
res,
)
if not res[0]:
_debug(
"Mismatch on attribute '{0}'. User provided value is {1} & existing resource's value"
"is {2}.".format(
attr, user_provided_value_for_attr, resources_value_for_attr
)
)
return False
else:
# If the user has not explicitly provided the value for attr and attr is in exclude_list, we can
# consider this as a 'pass'. For example, if an attribute 'display_name' is not specified by user and
# that attribute is in the 'exclude_list' according to the module author(Not User), then exclude
if (
exclude_attributes.get(attr) is None
and resources_value_for_attr is not None
):
if module.argument_spec.get(attr):
attribute_with_default_metadata = module.argument_spec.get(attr)
default_attribute_value = attribute_with_default_metadata.get(
"default", None
)
if default_attribute_value is not None:
if existing_resource[attr] != default_attribute_value:
return False
# Check if attr has a value that is not default. For example, a custom `security_list_id`
# is assigned to the subnet's attribute `security_list_ids`. If the attribute is assigned a
# value that is not the default, then it must be considered a mismatch and false returned.
elif not is_attr_assigned_default(
default_attribute_values, attr, existing_resource[attr]
):
return False
else:
_debug(
"Attribute {0} is in the create model of resource {1}"
"but doesn't exist in the get model of the resource".format(
attr, existing_resource.__class__
)
)
return True
def tuplize(d):
"""
This function takes a dictionary and converts it to a list of tuples recursively.
:param d: A dictionary.
:return: List of tuples.
"""
list_of_tuples = []
key_list = sorted(list(d.keys()))
for key in key_list:
if type(d[key]) == list:
# Convert a value which is itself a list of dict to a list of tuples.
if d[key] and type(d[key][0]) == dict:
sub_tuples = []
for sub_dict in d[key]:
sub_tuples.append(tuplize(sub_dict))
# To handle comparing two None values, while creating a tuple for a {key: value}, make the first element
# in the tuple a boolean `True` if value is None so that attributes with None value are put at last
# in the sorted list.
list_of_tuples.append((sub_tuples is None, key, sub_tuples))
else:
list_of_tuples.append((d[key] is None, key, d[key]))
elif type(d[key]) == dict:
tupled_value = tuplize(d[key])
list_of_tuples.append((tupled_value is None, key, tupled_value))
else:
list_of_tuples.append((d[key] is None, key, d[key]))
return list_of_tuples
def get_key_for_comparing_dict(d):
tuple_form_of_d = tuplize(d)
return tuple_form_of_d
def sort_dictionary(d):
"""
This function sorts values of a dictionary recursively.
:param d: A dictionary.
:return: Dictionary with sorted elements.
"""
sorted_d = {}
for key in d:
if type(d[key]) == list:
if d[key] and type(d[key][0]) == dict:
sorted_value = sort_list_of_dictionary(d[key])
sorted_d[key] = sorted_value
else:
sorted_d[key] = sorted(d[key])
elif type(d[key]) == dict:
sorted_d[key] = sort_dictionary(d[key])
else:
sorted_d[key] = d[key]
return sorted_d
def sort_list_of_dictionary(list_of_dict):
"""
This functions sorts a list of dictionaries. It first sorts each value of the dictionary and then sorts the list of
individually sorted dictionaries. For sorting, each dictionary's tuple equivalent is used.
:param list_of_dict: List of dictionaries.
:return: A sorted dictionary.
"""
list_with_sorted_dict = []
for d in list_of_dict:
sorted_d = sort_dictionary(d)
list_with_sorted_dict.append(sorted_d)
return sorted(list_with_sorted_dict, key=get_key_for_comparing_dict)
def check_if_user_value_matches_resources_attr(
attribute_name,
resources_value_for_attr,
user_provided_value_for_attr,
exclude_attributes,
default_attribute_values,
res,
):
if isinstance(default_attribute_values.get(attribute_name), dict):
default_attribute_values = default_attribute_values.get(attribute_name)
if isinstance(exclude_attributes.get(attribute_name), dict):
exclude_attributes = exclude_attributes.get(attribute_name)
if isinstance(resources_value_for_attr, list) or isinstance(
user_provided_value_for_attr, list
):
# Perform a deep equivalence check for a List attribute
if exclude_attributes.get(attribute_name):
return
if (
user_provided_value_for_attr is None
and default_attribute_values.get(attribute_name) is not None
):
user_provided_value_for_attr = default_attribute_values.get(attribute_name)
if resources_value_for_attr is None and user_provided_value_for_attr is None:
return
if (
resources_value_for_attr is None
and len(user_provided_value_for_attr) >= 0
or user_provided_value_for_attr is None
and len(resources_value_for_attr) >= 0
):
res[0] = False
return
if (
resources_value_for_attr is not None
and user_provided_value_for_attr is not None
and len(resources_value_for_attr) != len(user_provided_value_for_attr)
):
res[0] = False
return
if (
user_provided_value_for_attr
and type(user_provided_value_for_attr[0]) == dict
):
# Process a list of dict
sorted_user_provided_value_for_attr = sort_list_of_dictionary(
user_provided_value_for_attr
)
sorted_resources_value_for_attr = sort_list_of_dictionary(
resources_value_for_attr
)
else:
sorted_user_provided_value_for_attr = sorted(user_provided_value_for_attr)
sorted_resources_value_for_attr = sorted(resources_value_for_attr)
# Walk through the sorted list values of the resource's value for this attribute, and compare against user
# provided values.
for index, resources_value_for_attr_part in enumerate(
sorted_resources_value_for_attr
):
check_if_user_value_matches_resources_attr(
attribute_name,
resources_value_for_attr_part,
sorted_user_provided_value_for_attr[index],
exclude_attributes,
default_attribute_values,
res,
)
elif isinstance(resources_value_for_attr, dict):
# Perform a deep equivalence check for dict typed attributes
if not resources_value_for_attr and user_provided_value_for_attr:
res[0] = False
for key in resources_value_for_attr:
if (
user_provided_value_for_attr is not None
and user_provided_value_for_attr
):
check_if_user_value_matches_resources_attr(
key,
resources_value_for_attr.get(key),
user_provided_value_for_attr.get(key),
exclude_attributes,
default_attribute_values,
res,
)
else:
if exclude_attributes.get(key) is None:
if default_attribute_values.get(key) is not None:
user_provided_value_for_attr = default_attribute_values.get(key)
check_if_user_value_matches_resources_attr(
key,
resources_value_for_attr.get(key),
user_provided_value_for_attr,
exclude_attributes,
default_attribute_values,
res,
)
else:
res[0] = is_attr_assigned_default(
default_attribute_values,
attribute_name,
resources_value_for_attr.get(key),
)
elif resources_value_for_attr != user_provided_value_for_attr:
if (
exclude_attributes.get(attribute_name) is None
and default_attribute_values.get(attribute_name) is not None
):
# As the user has not specified a value for an optional attribute, if the existing resource's
# current state has a DEFAULT value for that attribute, we must not consider this incongruence
# an issue and continue with other checks. If the existing resource's value for the attribute
# is not the default value, then the existing resource is not a match.
if not is_attr_assigned_default(
default_attribute_values, attribute_name, resources_value_for_attr
):
res[0] = False
elif user_provided_value_for_attr is not None:
res[0] = False
def are_dicts_equal(
option_name,
existing_resource_dict,
user_provided_dict,
exclude_list,
default_attribute_values,
):
if not user_provided_dict:
# User has not provided a value for the map option. In this case, the user hasn't expressed an intent around
# this optional attribute. Check if existing_resource_dict matches default.
# For example, source_details attribute in volume is optional and does not have any defaults.
return is_attr_assigned_default(
default_attribute_values, option_name, existing_resource_dict
)
# If the existing resource has an empty dict, while the user has provided entries, dicts are not equal
if not existing_resource_dict and user_provided_dict:
return False
# check if all keys of an existing resource's dict attribute matches user-provided dict's entries
for sub_attr in existing_resource_dict:
# If user has provided value for sub-attribute, then compare it with corresponding key in existing resource.
if sub_attr in user_provided_dict:
if existing_resource_dict[sub_attr] != user_provided_dict[sub_attr]:
_debug(
"Failed to match: Existing resource's attr {0} sub-attr {1} value is {2}, while user "
"provided value is {3}".format(
option_name,
sub_attr,
existing_resource_dict[sub_attr],
user_provided_dict.get(sub_attr, None),
)
)
return False
# If sub_attr not provided by user, check if the sub-attribute value of existing resource matches default value.
else:
if not should_dict_attr_be_excluded(option_name, sub_attr, exclude_list):
default_value_for_dict_attr = default_attribute_values.get(
option_name, None
)
if default_value_for_dict_attr:
# if a default value for the sub-attr was provided by the module author, fail if the existing
# resource's value for the sub-attr is not the default
if not is_attr_assigned_default(
default_value_for_dict_attr,
sub_attr,
existing_resource_dict[sub_attr],
):
return False
else:
# No default value specified by module author for sub_attr
_debug(
"Consider as match: Existing resource's attr {0} sub-attr {1} value is {2}, while user did"
"not provide a value for it. The module author also has not provided a default value for it"
"or marked it for exclusion. So ignoring this attribute during matching and continuing with"
"other checks".format(
option_name, sub_attr, existing_resource_dict[sub_attr]
)
)
return True
def should_dict_attr_be_excluded(map_option_name, option_key, exclude_list):
"""An entry for the Exclude list for excluding a map's key is specifed as a dict with the map option name as the
key, and the value as a list of keys to be excluded within that map. For example, if the keys "k1" and "k2" of a map
option named "m1" needs to be excluded, the exclude list must have an entry {'m1': ['k1','k2']} """
for exclude_item in exclude_list:
if isinstance(exclude_item, dict):
if map_option_name in exclude_item:
if option_key in exclude_item[map_option_name]:
return True
return False
def create_and_wait(
resource_type,
client,
create_fn,
kwargs_create,
get_fn,
get_param,
module,
states=None,
wait_applicable=True,
kwargs_get=None,
):
"""
A utility function to create a resource and wait for the resource to get into the state as specified in the module
options.
:param wait_applicable: Specifies if wait for create is applicable for this resource
:param resource_type: Type of the resource to be created. e.g. "vcn"
:param client: OCI service client instance to call the service periodically to retrieve data.
e.g. VirtualNetworkClient()
:param create_fn: Function in the SDK to create the resource. e.g. virtual_network_client.create_vcn
:param kwargs_create: Dictionary containing arguments to be used to call the create function create_fn.
:param get_fn: Function in the SDK to get the resource. e.g. virtual_network_client.get_vcn
:param get_param: Name of the argument in the SDK get function. e.g. "vcn_id"
:param module: Instance of AnsibleModule.
:param states: List of lifecycle states to watch for while waiting after create_fn is called.
e.g. [module.params['wait_until'], "FAULTY"]
:param kwargs_get: Dictionary containing arguments to be used to call a multi-argument `get` function
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
"""
try:
return create_or_update_resource_and_wait(
resource_type,
create_fn,
kwargs_create,
module,
wait_applicable,
get_fn,
get_param,
states,
client,
kwargs_get,
)
except MaximumWaitTimeExceeded as ex:
module.fail_json(msg=str(ex))
except ServiceError as ex:
module.fail_json(msg=ex.message)
def update_and_wait(
resource_type,
client,
update_fn,
kwargs_update,
get_fn,
get_param,
module,
states=None,
wait_applicable=True,
kwargs_get=None,
):
"""
A utility function to update a resource and wait for the resource to get into the state as specified in the module
options. It wraps the create_and_wait method as apart from the method and arguments, everything else is similar.
:param wait_applicable: Specifies if wait for create is applicable for this resource
:param resource_type: Type of the resource to be created. e.g. "vcn"
:param client: OCI service client instance to call the service periodically to retrieve data.
e.g. VirtualNetworkClient()
:param update_fn: Function in the SDK to update the resource. e.g. virtual_network_client.update_vcn
:param kwargs_update: Dictionary containing arguments to be used to call the update function update_fn.
:param get_fn: Function in the SDK to get the resource. e.g. virtual_network_client.get_vcn
:param get_param: Name of the argument in the SDK get function. e.g. "vcn_id"
:param module: Instance of AnsibleModule.
:param kwargs_get: Dictionary containing arguments to be used to call the get function which requires multiple arguments.
:param states: List of lifecycle states to watch for while waiting after update_fn is called.
e.g. [module.params['wait_until'], "FAULTY"]
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
"""
try:
return create_or_update_resource_and_wait(
resource_type,
update_fn,
kwargs_update,
module,
wait_applicable,
get_fn,
get_param,
states,
client,
kwargs_get=kwargs_get,
)
except MaximumWaitTimeExceeded as ex:
module.fail_json(msg=str(ex))
except ServiceError as ex:
module.fail_json(msg=ex.message)
def create_or_update_resource_and_wait(
resource_type,
function,
kwargs_function,
module,
wait_applicable,
get_fn,
get_param,
states,
client,
update_target_resource_id_in_get_param=False,
kwargs_get=None,
):
"""
A utility function to create or update a resource and wait for the resource to get into the state as specified in
the module options.
:param resource_type: Type of the resource to be created. e.g. "vcn"
:param function: Function in the SDK to create or update the resource.
:param kwargs_function: Dictionary containing arguments to be used to call the create or update function
:param module: Instance of AnsibleModule.
:param wait_applicable: Specifies if wait for create is applicable for this resource
:param get_fn: Function in the SDK to get the resource. e.g. virtual_network_client.get_vcn
:param get_param: Name of the argument in the SDK get function. e.g. "vcn_id"
:param states: List of lifecycle states to watch for while waiting after create_fn is called.
e.g. [module.params['wait_until'], "FAULTY"]
:param client: OCI service client instance to call the service periodically to retrieve data.
e.g. VirtualNetworkClient()
:param kwargs_get: Dictionary containing arguments to be used to call the get function which requires multiple arguments.
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
"""
result = create_resource(resource_type, function, kwargs_function, module)
resource = result[resource_type]
result[resource_type] = wait_for_resource_lifecycle_state(
client,
module,
wait_applicable,
kwargs_get,
get_fn,
get_param,
resource,
states,
resource_type,
)
return result
def wait_for_resource_lifecycle_state(
client,
module,
wait_applicable,
kwargs_get,
get_fn,
get_param,
resource,
states,
resource_type=None,
):
"""
A utility function to wait for the resource to get into the state as specified in
the module options.
:param client: OCI service client instance to call the service periodically to retrieve data.
e.g. VirtualNetworkClient
:param module: Instance of AnsibleModule.
:param wait_applicable: Specifies if wait for create is applicable for this resource
:param kwargs_get: Dictionary containing arguments to be used to call the get function which requires multiple arguments.
:param get_fn: Function in the SDK to get the resource. e.g. virtual_network_client.get_vcn
:param get_param: Name of the argument in the SDK get function. e.g. "vcn_id"
:param resource_type: Type of the resource to be created. e.g. "vcn"
:param states: List of lifecycle states to watch for while waiting after create_fn is called.
e.g. [module.params['wait_until'], "FAULTY"]
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
"""
if wait_applicable and module.params.get("wait", None):
if resource_type == "compartment":
# An immediate attempt to retrieve a compartment after a compartment is created fails with
# 'Authorization failed or requested resource not found', 'status': 404}.
# This is because it takes few seconds for the permissions on a compartment to be ready.
# Wait for few seconds before attempting a get call on compartment.
_debug(
"Pausing execution for permission on the newly created compartment to be ready."
)
time.sleep(15)
if kwargs_get:
_debug(
"Waiting for resource to reach READY state. get_args: {0}".format(
kwargs_get
)
)
response_get = call_with_backoff(get_fn, **kwargs_get)
else:
_debug(
"Waiting for resource with id {0} to reach READY state.".format(
resource["id"]
)
)
response_get = call_with_backoff(get_fn, **{get_param: resource["id"]})
if states is None:
states = module.params.get("wait_until") or DEFAULT_READY_STATES
resource = to_dict(
oci.wait_until(
client,
response_get,
evaluate_response=lambda r: r.data.lifecycle_state in states,
max_wait_seconds=module.params.get(
"wait_timeout", MAX_WAIT_TIMEOUT_IN_SECONDS
),
).data
)
return resource
def wait_on_work_request(client, response, module):
try:
if module.params.get("wait", None):
_debug(
"Waiting for work request with id {0} to reach SUCCEEDED state.".format(
response.data.id
)
)
wait_response = oci.wait_until(
client,
response,
evaluate_response=lambda r: r.data.status == "SUCCEEDED",
max_wait_seconds=module.params.get(
"wait_timeout", MAX_WAIT_TIMEOUT_IN_SECONDS
),
)
else:
_debug(
"Waiting for work request with id {0} to reach ACCEPTED state.".format(
response.data.id
)
)
wait_response = oci.wait_until(
client,
response,
evaluate_response=lambda r: r.data.status == "ACCEPTED",
max_wait_seconds=module.params.get(
"wait_timeout", MAX_WAIT_TIMEOUT_IN_SECONDS
),
)
except MaximumWaitTimeExceeded as ex:
_debug(str(ex))
module.fail_json(msg=str(ex))
except ServiceError as ex:
_debug(str(ex))
module.fail_json(msg=str(ex))
return wait_response.data
def delete_and_wait(
resource_type,
client,
get_fn,
kwargs_get,
delete_fn,
kwargs_delete,
module,
states=None,
wait_applicable=True,
process_work_request=False,
):
"""A utility function to delete a resource and wait for the resource to get into the state as specified in the
module options.
:param wait_applicable: Specifies if wait for delete is applicable for this resource
:param resource_type: Type of the resource to be deleted. e.g. "vcn"
:param client: OCI service client instance to call the service periodically to retrieve data.
e.g. VirtualNetworkClient()
:param get_fn: Function in the SDK to get the resource. e.g. virtual_network_client.get_vcn
:param kwargs_get: Dictionary of arguments for get function get_fn. e.g. {"vcn_id": module.params["id"]}
:param delete_fn: Function in the SDK to delete the resource. e.g. virtual_network_client.delete_vcn
:param kwargs_delete: Dictionary of arguments for delete function delete_fn. e.g. {"vcn_id": module.params["id"]}
:param module: Instance of AnsibleModule.
:param states: List of lifecycle states to watch for while waiting after delete_fn is called. If nothing is passed,
defaults to ["TERMINATED", "DETACHED", "DELETED"].
:param process_work_request: Whether a work request is generated on an API call and if it needs to be handled.
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
"""
states_set = set(["DETACHING", "DETACHED", "DELETING", "DELETED", "TERMINATING", "TERMINATED"])
result = dict(changed=False)
result[resource_type] = dict()
try:
resource = to_dict(call_with_backoff(get_fn, **kwargs_get).data)
if resource:
if "lifecycle_state" not in resource or resource["lifecycle_state"] not in states_set:
response = call_with_backoff(delete_fn, **kwargs_delete)
if process_work_request:
wr_id = response.headers.get("opc-work-request-id")
get_wr_response = call_with_backoff(
client.get_work_request, work_request_id=wr_id
)
result["work_request"] = to_dict(
wait_on_work_request(client, get_wr_response, module)
)
# Set changed to True as work request has been created to delete the resource.
result["changed"] = True
resource = to_dict(call_with_backoff(get_fn, **kwargs_get).data)
else:
_debug("Deleted {0}, {1}".format(resource_type, resource))
result["changed"] = True
if wait_applicable and module.params.get("wait", None):
if states is None:
states = (
module.params.get("wait_until")
or DEFAULT_TERMINATED_STATES
)
try:
wait_response = oci.wait_until(
client,
get_fn(**kwargs_get),
evaluate_response=lambda r: r.data.lifecycle_state
in states,
max_wait_seconds=module.params.get(
"wait_timeout", MAX_WAIT_TIMEOUT_IN_SECONDS
),
succeed_on_not_found=True,
)
except MaximumWaitTimeExceeded as ex:
module.fail_json(msg=str(ex))
except ServiceError as ex:
if ex.status != 404:
module.fail_json(msg=ex.message)
else:
# While waiting for resource to get into terminated state, if the resource is not found.
_debug(
"API returned Status:404(Not Found) while waiting for resource to get into"
" terminated state."
)
resource["lifecycle_state"] = "DELETED"
result[resource_type] = resource
return result
# oci.wait_until() returns an instance of oci.util.Sentinel in case the resource is not found.
if type(wait_response) is not Sentinel:
resource = to_dict(wait_response.data)
else:
resource["lifecycle_state"] = "DELETED"
result[resource_type] = resource
else:
_debug(
"Resource {0} with {1} already deleted. So returning changed=False".format(
resource_type, kwargs_get
)
)
except ServiceError as ex:
# DNS API throws a 400 InvalidParameter when a zone id is provided for zone_name_or_id and if the zone
# resource is not available, instead of the expected 404. So working around this for now.
if type(client) == oci.dns.DnsClient:
if ex.status == 400 and ex.code == "InvalidParameter":
_debug(
"Resource {0} with {1} already deleted. So returning changed=False".format(
resource_type, kwargs_get
)
)
elif ex.status != 404:
module.fail_json(msg=ex.message)
result[resource_type] = dict()
return result
def are_attrs_equal(current_resource, module, attributes):
"""
Check if the specified attributes are equal in the specified 'model' and 'module'. This is used to check if an OCI
Model instance already has the values specified by an Ansible user while invoking an OCI Ansible module and if a
resource needs to be updated.
:param current_resource: A resource model instance
:param module: The AnsibleModule representing the options provided by the user
:param attributes: A list of attributes that would need to be compared in the model and the module instances.
:return: True if the values for the list of attributes is the same in the model and module instances
"""
for attr in attributes:
curr_value = getattr(current_resource, attr, None)
user_provided_value = _get_user_provided_value(module, attribute_name=attr)
if user_provided_value is not None:
if curr_value != user_provided_value:
_debug(
"are_attrs_equal - current resource's attribute "
+ attr
+ " value is "
+ str(curr_value)
+ " and this doesn't match user provided value of "
+ str(user_provided_value)
)
return False
return True
def _get_user_provided_value(module, attribute_name):
"""
Returns the user provided value for "attribute_name". We consider aliases in the module.
"""
user_provided_value = module.params.get(attribute_name, None)
if user_provided_value is None:
# If the attribute_name is set as an alias for some option X and user has provided value in the playbook using
# option X, then user provided value for attribute_name is equal to value for X.
# Get option name for attribute_name from module.aliases.
# module.aliases is a dictionary with key as alias name and its value as option name.
option_alias_for_attribute = module.aliases.get(attribute_name, None)
if option_alias_for_attribute is not None:
user_provided_value = module.params.get(option_alias_for_attribute, None)
return user_provided_value
def update_model_with_user_options(curr_model, update_model, module):
"""
Update the 'update_model' with user provided values in 'module' for the specified 'attributes' if they are different
from the values in the 'curr_model'.
:param curr_model: A resource model instance representing the state of the current resource
:param update_model: An instance of the update resource model for the current resource's type
:param module: An AnsibleModule representing the options provided by the user
:return: An updated 'update_model' instance filled with values that would need to be updated in the current resource
state to satisfy the user's requested state.
"""
attributes = update_model.attribute_map.keys()
for attr in attributes:
curr_value_for_attr = getattr(curr_model, attr, None)
user_provided_value = _get_user_provided_value(module, attribute_name=attr)
if curr_value_for_attr != user_provided_value:
if user_provided_value is not None:
# Only update if a user has specified a value for an option
_debug(
"User requested {0} for attribute {1}, whereas the current value is {2}. So adding it "
"to the update model".format(
user_provided_value, attr, curr_value_for_attr
)
)
setattr(update_model, attr, user_provided_value)
else:
# Always set current values of the resource in the update model if there is no request for change in
# values
setattr(update_model, attr, curr_value_for_attr)
return update_model
def _get_retry_strategy():
retry_strategy_builder = RetryStrategyBuilder(
max_attempts_check=True,
max_attempts=10,
retry_max_wait_between_calls_seconds=30,
retry_base_sleep_time_seconds=3,
backoff_type=oci.retry.BACKOFF_FULL_JITTER_EQUAL_ON_THROTTLE_VALUE,
)
retry_strategy_builder.add_service_error_check(
service_error_retry_config={
429: [],
400: ["QuotaExceeded", "LimitExceeded"],
409: ["Conflict"],
},
service_error_retry_on_any_5xx=True,
)
return retry_strategy_builder.get_retry_strategy()
def call_with_backoff(fn, **kwargs):
if "retry_strategy" not in kwargs:
kwargs["retry_strategy"] = _get_retry_strategy()
try:
return fn(**kwargs)
except TypeError as te:
if "unexpected keyword argument" in str(te):
# to handle older SDKs that did not support retry_strategy
del kwargs["retry_strategy"]
return fn(**kwargs)
else:
# A validation error raised by the SDK, throw it back
raise
def generic_hash(obj):
"""
Compute a hash of all the fields in the object
:param obj: Object whose hash needs to be computed
:return: a hash value for the object
"""
sum = 0
for field in obj.attribute_map.keys():
field_value = getattr(obj, field)
if isinstance(field_value, list):
for value in field_value:
sum = sum + hash(value)
elif isinstance(field_value, dict):
for k, v in field_value.items():
sum = sum + hash(hash(k) + hash(":") + hash(v))
else:
sum = sum + hash(getattr(obj, field))
return sum
def generic_eq(s, other):
if other is None:
return False
return s.__dict__ == other.__dict__
def generate_subclass(parent_class):
"""Make a class hash-able by generating a subclass with a __hash__ method that returns the sum of all fields within
the parent class"""
dict_of_method_in_subclass = {
"__init__": parent_class.__init__,
"__hash__": generic_hash,
"__eq__": generic_eq,
}
subclass_name = "GeneratedSub" + parent_class.__name__
generated_sub_class = type(
subclass_name, (parent_class,), dict_of_method_in_subclass
)
return generated_sub_class
def create_hashed_instance(class_type):
hashed_class = generate_subclass(class_type)
return hashed_class()
def get_hashed_object_list(class_type, object_with_values, attributes_class_type=None):
if object_with_values is None:
return None
hashed_class_instances = []
for object_with_value in object_with_values:
hashed_class_instances.append(
get_hashed_object(class_type, object_with_value, attributes_class_type)
)
return hashed_class_instances
def get_hashed_object(
class_type, object_with_value, attributes_class_type=None, supported_attributes=None
):
"""
Convert any class instance into hashable so that the
instances are eligible for various comparison
operation available under set() object.
:param class_type: Any class type whose instances needs to be hashable
:param object_with_value: Instance of the class type with values which
would be set in the resulting isinstance
:param attributes_class_type: A list of class types of attributes, if attribute is a custom class instance
:param supported_attributes: A list of attributes which should be considered while populating the instance
with the values in the object. This helps in avoiding new attributes of the class_type which are still not
supported by the current implementation.
:return: A hashable instance with same state of the provided object_with_value
"""
if object_with_value is None:
return None
HashedClass = generate_subclass(class_type)
hashed_class_instance = HashedClass()
if supported_attributes:
class_attributes = list(
set(hashed_class_instance.attribute_map) & set(supported_attributes)
)
else:
class_attributes = hashed_class_instance.attribute_map
for attribute in class_attributes:
attribute_value = getattr(object_with_value, attribute)
if attributes_class_type:
for attribute_class_type in attributes_class_type:
if isinstance(attribute_value, attribute_class_type):
attribute_value = get_hashed_object(
attribute_class_type, attribute_value
)
hashed_class_instance.__setattr__(attribute, attribute_value)
return hashed_class_instance
def update_class_type_attr_difference(
update_class_details, existing_instance, attr_name, attr_class, input_attr_value
):
"""
Checks the difference and updates an attribute which is represented by a class
instance. Not aplicable if the attribute type is a primitive value.
For example, if a class name is A with an attribute x, then if A.x = X(), then only
this method works.
:param update_class_details The instance which should be updated if there is change in
attribute value
:param existing_instance The instance whose attribute value is compared with input
attribute value
:param attr_name Name of the attribute whose value should be compared
:param attr_class Class type of the attribute
:param input_attr_value The value of input attribute which should replaced the current
value in case of mismatch
:return: A boolean value indicating whether attribute value has been replaced
"""
changed = False
# Here existing attribute values is an instance
existing_attr_value = get_hashed_object(
attr_class, getattr(existing_instance, attr_name)
)
if input_attr_value is None:
update_class_details.__setattr__(attr_name, existing_attr_value)
else:
changed = not input_attr_value.__eq__(existing_attr_value)
if changed:
update_class_details.__setattr__(attr_name, input_attr_value)
else:
update_class_details.__setattr__(attr_name, existing_attr_value)
return changed
def get_existing_resource(target_fn, module, **kwargs):
"""
Returns the requested resource if it exists based on the input arguments.
:param target_fn The function which should be used to find the requested resource
:param module Instance of AnsibleModule attribute value
:param kwargs A map of arguments consisting of values based on which requested resource should be searched
:return: Instance of requested resource
"""
existing_resource = None
try:
response = call_with_backoff(target_fn, **kwargs)
existing_resource = response.data
except ServiceError as ex:
if ex.status != 404:
module.fail_json(msg=ex.message)
return existing_resource
def get_attached_instance_info(
module, lookup_attached_instance, list_attachments_fn, list_attachments_args
):
config = get_oci_config(module)
identity_client = create_service_client(module, IdentityClient)
volume_attachments = []
if lookup_attached_instance:
# Get all the compartments in the tenancy
compartments = to_dict(
identity_client.list_compartments(
config.get("tenancy"), compartment_id_in_subtree=True
).data
)
# For each compartment, get the volume attachments for the compartment_id with the other args in
# list_attachments_args.
for compartment in compartments:
list_attachments_args["compartment_id"] = compartment["id"]
try:
volume_attachments += list_all_resources(
list_attachments_fn, **list_attachments_args
)
# Pass ServiceError due to authorization issue in accessing volume attachments of a compartment
except ServiceError as ex:
if ex.status == 404:
pass
else:
volume_attachments = list_all_resources(
list_attachments_fn, **list_attachments_args
)
volume_attachments = to_dict(volume_attachments)
# volume_attachments has attachments in DETACHING or DETACHED state. Return the volume attachment in ATTACHING or
# ATTACHED state
return next(
(
volume_attachment
for volume_attachment in volume_attachments
if volume_attachment["lifecycle_state"] in ["ATTACHING", "ATTACHED"]
),
None,
)
def check_mode(fn):
def wrapper(*args, **kwargs):
if os.environ.get("OCI_ANSIBLE_EXPERIMENTAL", None):
return fn(*args, **kwargs)
return None
return wrapper
def check_and_return_component_list_difference(
input_component_list, existing_components, purge_components, delete_components=False
):
if input_component_list:
existing_components, changed = get_component_list_difference(
input_component_list,
existing_components,
purge_components,
delete_components,
)
else:
existing_components = []
changed = True
return existing_components, changed
def get_component_list_difference(
input_component_list, existing_components, purge_components, delete_components=False
):
if delete_components:
if existing_components is None:
return None, False
component_differences = set(existing_components).intersection(
set(input_component_list)
)
if component_differences:
return list(set(existing_components) - component_differences), True
else:
return None, False
if existing_components is None:
return input_component_list, True
if purge_components:
components_differences = set(input_component_list).symmetric_difference(
set(existing_components)
)
if components_differences:
return input_component_list, True
components_differences = set(input_component_list).difference(
set(existing_components)
)
if components_differences:
return list(components_differences) + existing_components, True
return None, False
def write_to_file(path, content):
with open(to_bytes(path), "wb") as dest_file:
dest_file.write(content)
def get_target_resource_from_list(
module, list_resource_fn, target_resource_id=None, **kwargs
):
"""
Returns a resource filtered by identifer from a list of resources. This method should be
used as an alternative of 'get resource' method when 'get resource' is nor provided by
resource api. This method returns a wrapper of response object but that should not be
used as an input to 'wait_until' utility as this is only a partial wrapper of response object.
:param module The AnsibleModule representing the options provided by the user
:param list_resource_fn The function which lists all the resources
:param target_resource_id The identifier of the resource which should be filtered from the list
:param kwargs A map of arguments consisting of values based on which requested resource should be searched
:return: A custom wrapper which partially wraps a response object where the data field contains the target
resource, if found.
"""
class ResponseWrapper:
def __init__(self, data):
self.data = data
try:
resources = list_all_resources(list_resource_fn, **kwargs)
if resources is not None:
for resource in resources:
if resource.id == target_resource_id:
# Returning an object that mimics an OCI response as oci_utils methods assumes an Response-ish
# object
return ResponseWrapper(data=resource)
return ResponseWrapper(data=None)
except ServiceError as ex:
module.fail_json(msg=ex.message)
| gpl-3.0 |
shadowmint/nwidget | lib/cocos2d-0.5.5/test/test_flip_y.py | 1 | 1153 | # This code is so you can run the samples without installing the package
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
#
testinfo = "s, t 1, s, t 2, s, t 3, s, t 4.1, s, t 4.2, s, q"
tags = "FlipY"
import cocos
from cocos.director import director
from cocos.actions import *
from cocos.sprite import *
from cocos.layer import *
import pyglet
class BackgroundLayer(cocos.layer.Layer):
def __init__(self):
super(BackgroundLayer, self).__init__()
self.img = pyglet.resource.image('background_image.png')
def draw( self ):
glColor4ub(255, 255, 255, 255)
glPushMatrix()
self.transform()
self.img.blit(0,0)
glPopMatrix()
def main():
director.init( resizable=True )
main_scene = cocos.scene.Scene()
main_scene.add( BackgroundLayer(), z=0 )
# In real code after a sequence of grid actions the StopGrid() action
# should be called. Omited here to stay in the last grid action render
main_scene.do( FlipY(duration=4) )
director.run (main_scene)
if __name__ == '__main__':
main()
| apache-2.0 |
yakky/django | django/utils/decorators.py | 31 | 7500 | "Functions that help with dynamically creating decorators for views."
try:
from contextlib import ContextDecorator
except ImportError:
ContextDecorator = None
from functools import WRAPPER_ASSIGNMENTS, update_wrapper, wraps
from django.utils import six
class classonlymethod(classmethod):
def __get__(self, instance, cls=None):
if instance is not None:
raise AttributeError("This method is available only on the class, not on instances.")
return super(classonlymethod, self).__get__(instance, cls)
def method_decorator(decorator, name=''):
"""
Converts a function decorator into a method decorator
"""
# 'obj' can be a class or a function. If 'obj' is a function at the time it
# is passed to _dec, it will eventually be a method of the class it is
# defined on. If 'obj' is a class, the 'name' is required to be the name
# of the method that will be decorated.
def _dec(obj):
is_class = isinstance(obj, type)
if is_class:
if name and hasattr(obj, name):
func = getattr(obj, name)
if not callable(func):
raise TypeError(
"Cannot decorate '{0}' as it isn't a callable "
"attribute of {1} ({2})".format(name, obj, func)
)
else:
raise ValueError(
"The keyword argument `name` must be the name of a method "
"of the decorated class: {0}. Got '{1}' instead".format(
obj, name,
)
)
else:
func = obj
def decorate(function):
"""
Apply a list/tuple of decorators if decorator is one. Decorator
functions are applied so that the call order is the same as the
order in which they appear in the iterable.
"""
if hasattr(decorator, '__iter__'):
for dec in decorator[::-1]:
function = dec(function)
return function
return decorator(function)
def _wrapper(self, *args, **kwargs):
@decorate
def bound_func(*args2, **kwargs2):
return func.__get__(self, type(self))(*args2, **kwargs2)
# bound_func has the signature that 'decorator' expects i.e. no
# 'self' argument, but it is a closure over self so it can call
# 'func' correctly.
return bound_func(*args, **kwargs)
# In case 'decorator' adds attributes to the function it decorates, we
# want to copy those. We don't have access to bound_func in this scope,
# but we can cheat by using it on a dummy function.
@decorate
def dummy(*args, **kwargs):
pass
update_wrapper(_wrapper, dummy)
# Need to preserve any existing attributes of 'func', including the name.
update_wrapper(_wrapper, func)
if is_class:
setattr(obj, name, _wrapper)
return obj
return _wrapper
# Don't worry about making _dec look similar to a list/tuple as it's rather
# meaningless.
if not hasattr(decorator, '__iter__'):
update_wrapper(_dec, decorator, assigned=available_attrs(decorator))
# Change the name to aid debugging.
if hasattr(decorator, '__name__'):
_dec.__name__ = 'method_decorator(%s)' % decorator.__name__
else:
_dec.__name__ = 'method_decorator(%s)' % decorator.__class__.__name__
return _dec
def decorator_from_middleware_with_args(middleware_class):
"""
Like decorator_from_middleware, but returns a function
that accepts the arguments to be passed to the middleware_class.
Use like::
cache_page = decorator_from_middleware_with_args(CacheMiddleware)
# ...
@cache_page(3600)
def my_view(request):
# ...
"""
return make_middleware_decorator(middleware_class)
def decorator_from_middleware(middleware_class):
"""
Given a middleware class (not an instance), returns a view decorator. This
lets you use middleware functionality on a per-view basis. The middleware
is created with no params passed.
"""
return make_middleware_decorator(middleware_class)()
def available_attrs(fn):
"""
Return the list of functools-wrappable attributes on a callable.
This is required as a workaround for http://bugs.python.org/issue3445
under Python 2.
"""
if six.PY3:
return WRAPPER_ASSIGNMENTS
else:
return tuple(a for a in WRAPPER_ASSIGNMENTS if hasattr(fn, a))
def make_middleware_decorator(middleware_class):
def _make_decorator(*m_args, **m_kwargs):
middleware = middleware_class(*m_args, **m_kwargs)
def _decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if hasattr(middleware, 'process_request'):
result = middleware.process_request(request)
if result is not None:
return result
if hasattr(middleware, 'process_view'):
result = middleware.process_view(request, view_func, args, kwargs)
if result is not None:
return result
try:
response = view_func(request, *args, **kwargs)
except Exception as e:
if hasattr(middleware, 'process_exception'):
result = middleware.process_exception(request, e)
if result is not None:
return result
raise
if hasattr(response, 'render') and callable(response.render):
if hasattr(middleware, 'process_template_response'):
response = middleware.process_template_response(request, response)
# Defer running of process_response until after the template
# has been rendered:
if hasattr(middleware, 'process_response'):
callback = lambda response: middleware.process_response(request, response)
response.add_post_render_callback(callback)
else:
if hasattr(middleware, 'process_response'):
return middleware.process_response(request, response)
return response
return _wrapped_view
return _decorator
return _make_decorator
if ContextDecorator is None:
# ContextDecorator was introduced in Python 3.2
# See https://docs.python.org/3/library/contextlib.html#contextlib.ContextDecorator
class ContextDecorator(object):
"""
A base class that enables a context manager to also be used as a decorator.
"""
def __call__(self, func):
@wraps(func, assigned=available_attrs(func))
def inner(*args, **kwargs):
with self:
return func(*args, **kwargs)
return inner
class classproperty(object):
def __init__(self, method=None):
self.fget = method
def __get__(self, instance, cls=None):
return self.fget(cls)
def getter(self, method):
self.fget = method
return self
| bsd-3-clause |
rcucui/Pisa-util-fix | demo/tgpisa/tgpisa/commands.py | 14 | 1682 | # -*- coding: utf-8 -*-
"""This module contains functions called from console script entry points."""
import os
import sys
from os.path import dirname, exists, join
import pkg_resources
pkg_resources.require("TurboGears")
import turbogears
import cherrypy
cherrypy.lowercase_api = True
class ConfigurationError(Exception):
pass
def start():
"""Start the CherryPy application server."""
setupdir = dirname(dirname(__file__))
curdir = os.getcwd()
# First look on the command line for a desired config file,
# if it's not on the command line, then look for 'setup.py'
# in the current directory. If there, load configuration
# from a file called 'dev.cfg'. If it's not there, the project
# is probably installed and we'll look first for a file called
# 'prod.cfg' in the current directory and then for a default
# config file called 'default.cfg' packaged in the egg.
if len(sys.argv) > 1:
configfile = sys.argv[1]
elif exists(join(setupdir, "setup.py")):
configfile = join(setupdir, "dev.cfg")
elif exists(join(curdir, "prod.cfg")):
configfile = join(curdir, "prod.cfg")
else:
try:
configfile = pkg_resources.resource_filename(
pkg_resources.Requirement.parse("tgpisa"),
"config/default.cfg")
except pkg_resources.DistributionNotFound:
raise ConfigurationError("Could not find default configuration.")
turbogears.update_config(configfile=configfile,
modulename="tgpisa.config")
from tgpisa.controllers import Root
turbogears.start_server(Root())
| apache-2.0 |
fw1121/genomics | NGS-general/fastq_sniffer.py | 1 | 5079 | #!/usr/bin/env python
#
# fastq_sniffer.py: "sniff" FASTQ file to determine quality encoding
# Copyright (C) University of Manchester 2013 Peter Briggs
#
########################################################################
#
# fastq_sniffer.py
#
########################################################################
__version__ = "0.0.2"
"""fastq_sniffer.py
Usage: fastq_sniffer.py [ --subset N ] <fastq_file>
"Sniff" FASTQ file to try and determine likely format and quality encoding.
"""
#######################################################################
# Import modules that this module depends on
#######################################################################
import sys
import os
import optparse
# Set up for bcftbx modules
SHARE_DIR = os.path.abspath(
os.path.normpath(
os.path.join(os.path.dirname(sys.argv[0]),'..')))
sys.path.append(SHARE_DIR)
import bcftbx.FASTQFile as FASTQFile
#######################################################################
# Main program
#######################################################################
if __name__ == "__main__":
# Process command line using optparse
p = optparse.OptionParser(usage="%prog [options] <fastq_file>",
version="%prog "+__version__,
description=
"'Sniff' FASTQ file to determine likely quality encoding.")
p.add_option('--subset',action="store",dest="n_subset",default=None,
help="try to determine encoding from a subset of consisting of the first "
"N_SUBSET reads. (Quicker than using all reads but may not be accurate "
"if subset is not representative of the file as a whole.)")
# Process the command line
options,arguments = p.parse_args()
if len(arguments) != 1:
p.error("input FASTQ file required")
else:
fastq_file = arguments[0]
if not os.path.exists(fastq_file):
p.error("Input file '%s' not found" % fastq_file)
# Get broad format type
print "Sniffing %s" % fastq_file
print "\nData from first read:"
for read in FASTQFile.FastqIterator(fastq_file):
fastq_format = read.seqid.format
if fastq_format is None and read.is_colorspace:
fastq_format = 'colorspace'
print "\tHeader format:\t%s" % str(fastq_format)
print "\tSeq length:\t%d" % read.seqlen
break
# Determine the quality score range (and count reads)
try:
n_subset = int(options.n_subset)
except TypeError:
n_subset = None
n_reads = 0
min_max_qual = (None,None)
for read in FASTQFile.FastqIterator(fastq_file):
n_reads += 1
if min_max_qual == (None,None):
min_max_qual = (ord(read.minquality),ord(read.maxquality))
else:
min_max_qual = (min(min_max_qual[0],ord(read.minquality)),
max(min_max_qual[1],ord(read.maxquality)))
if n_subset is not None and n_reads == n_subset:
break
# Number of reads
print "\nProcessed %d reads" % n_reads
# Print min,max quality values
min_qual = min_max_qual[0]
max_qual = min_max_qual[1]
print "Min,max quality scores:\t%d,%d\t(%s,%s)" % \
(min_qual,max_qual,chr(min_qual),chr(max_qual))
# Match to possible formats and quality encodings
print "\nIdentifying possible formats/quality encodings..."
encodings = []
galaxy_types = []
if min_qual >= ord('!') and max_qual <= ord('I'):
print "\tPossible Sanger/Phred+33"
encodings.append('Phred+33')
if fastq_format != 'colorspace':
if min_qual >= ord(';') and max_qual <= ord('h'):
print "\tPossible Solexa/Solexa+64"
encodings.append('Solexa+64')
galaxy_types.append('fastqsolexa')
if min_qual >= ord('@') and max_qual <= ord('h'):
print "\tPossible Illumina 1.3+/Phred+64"
encodings.append('Phred+64')
galaxy_types.append('fastqillumina')
if min_qual >= ord('C') and max_qual <= ord('h'):
print "\tPossible Illumina 1.5+/Phred+64"
encodings.append('Phred+64')
galaxy_types.append('fastqillumina')
if min_qual >= ord('!') and max_qual <= ord('I'):
print "\tPossible Illumina 1.8+/Phred+33"
encodings.append('Phred+33')
galaxy_types.append('fastqsanger')
else:
galaxy_types.append('fastqcssanger')
print "\nLikely encodings:"
if encodings:
# Make sure list only has unique values
encodings = list(set(encodings))
for encoding in encodings:
print "\t%s" % encoding
else:
print "\tNone identified"
print "\nLikely galaxy types:"
if galaxy_types:
# Make sure list only has unique values
galaxy_types = list(set(galaxy_types))
for galaxy_type in galaxy_types:
print "\t%s" % galaxy_type
else:
print "\tNone identified"
| artistic-2.0 |
DARKPOP/external_chromium_org_third_party_WebKit | Tools/Scripts/webkitpy/tool/main.py | 44 | 3144 | # Copyright (c) 2010 Google Inc. All rights reserved.
# Copyright (c) 2009 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# A tool for automating dealing with bugzilla, posting patches, committing patches, etc.
from optparse import make_option
from webkitpy.common.host import Host
from webkitpy.tool.multicommandtool import MultiCommandTool
from webkitpy.tool import commands
class WebKitPatch(MultiCommandTool, Host):
global_options = [
make_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="enable all logging"),
make_option("-d", "--directory", action="append", dest="patch_directories", default=[], help="Directory to look at for changed files"),
]
def __init__(self, path):
MultiCommandTool.__init__(self)
Host.__init__(self)
self._path = path
def path(self):
return self._path
def should_show_in_main_help(self, command):
if not command.show_in_main_help:
return False
if command.requires_local_commits:
return self.scm().supports_local_commits()
return True
# FIXME: This may be unnecessary since we pass global options to all commands during execute() as well.
def handle_global_options(self, options):
self.initialize_scm(options.patch_directories)
def should_execute_command(self, command):
if command.requires_local_commits and not self.scm().supports_local_commits():
failure_reason = "%s requires local commits using %s in %s." % (command.name, self.scm().display_name(), self.scm().checkout_root)
return (False, failure_reason)
return (True, None)
| bsd-3-clause |
praneethkumarpidugu/matchmaking | lib/python2.7/site-packages/pycparser/c_parser.py | 39 | 62212 | #------------------------------------------------------------------------------
# pycparser: c_parser.py
#
# CParser class: Parser and AST builder for the C language
#
# Copyright (C) 2008-2015, Eli Bendersky
# License: BSD
#------------------------------------------------------------------------------
import re
from .ply import yacc
from . import c_ast
from .c_lexer import CLexer
from .plyparser import PLYParser, Coord, ParseError
from .ast_transforms import fix_switch_cases
class CParser(PLYParser):
def __init__(
self,
lex_optimize=True,
lextab='pycparser.lextab',
yacc_optimize=True,
yacctab='pycparser.yacctab',
yacc_debug=False,
taboutputdir=''):
""" Create a new CParser.
Some arguments for controlling the debug/optimization
level of the parser are provided. The defaults are
tuned for release/performance mode.
The simple rules for using them are:
*) When tweaking CParser/CLexer, set these to False
*) When releasing a stable parser, set to True
lex_optimize:
Set to False when you're modifying the lexer.
Otherwise, changes in the lexer won't be used, if
some lextab.py file exists.
When releasing with a stable lexer, set to True
to save the re-generation of the lexer table on
each run.
lextab:
Points to the lex table that's used for optimized
mode. Only if you're modifying the lexer and want
some tests to avoid re-generating the table, make
this point to a local lex table file (that's been
earlier generated with lex_optimize=True)
yacc_optimize:
Set to False when you're modifying the parser.
Otherwise, changes in the parser won't be used, if
some parsetab.py file exists.
When releasing with a stable parser, set to True
to save the re-generation of the parser table on
each run.
yacctab:
Points to the yacc table that's used for optimized
mode. Only if you're modifying the parser, make
this point to a local yacc table file
yacc_debug:
Generate a parser.out file that explains how yacc
built the parsing table from the grammar.
taboutputdir:
Set this parameter to control the location of generated
lextab and yacctab files.
"""
self.clex = CLexer(
error_func=self._lex_error_func,
on_lbrace_func=self._lex_on_lbrace_func,
on_rbrace_func=self._lex_on_rbrace_func,
type_lookup_func=self._lex_type_lookup_func)
self.clex.build(
optimize=lex_optimize,
lextab=lextab,
outputdir=taboutputdir)
self.tokens = self.clex.tokens
rules_with_opt = [
'abstract_declarator',
'assignment_expression',
'declaration_list',
'declaration_specifiers',
'designation',
'expression',
'identifier_list',
'init_declarator_list',
'initializer_list',
'parameter_type_list',
'specifier_qualifier_list',
'block_item_list',
'type_qualifier_list',
'struct_declarator_list'
]
for rule in rules_with_opt:
self._create_opt_rule(rule)
self.cparser = yacc.yacc(
module=self,
start='translation_unit_or_empty',
debug=yacc_debug,
optimize=yacc_optimize,
tabmodule=yacctab,
outputdir=taboutputdir)
# Stack of scopes for keeping track of symbols. _scope_stack[-1] is
# the current (topmost) scope. Each scope is a dictionary that
# specifies whether a name is a type. If _scope_stack[n][name] is
# True, 'name' is currently a type in the scope. If it's False,
# 'name' is used in the scope but not as a type (for instance, if we
# saw: int name;
# If 'name' is not a key in _scope_stack[n] then 'name' was not defined
# in this scope at all.
self._scope_stack = [dict()]
# Keeps track of the last token given to yacc (the lookahead token)
self._last_yielded_token = None
def parse(self, text, filename='', debuglevel=0):
""" Parses C code and returns an AST.
text:
A string containing the C source code
filename:
Name of the file being parsed (for meaningful
error messages)
debuglevel:
Debug level to yacc
"""
self.clex.filename = filename
self.clex.reset_lineno()
self._scope_stack = [dict()]
self._last_yielded_token = None
return self.cparser.parse(
input=text,
lexer=self.clex,
debug=debuglevel)
######################-- PRIVATE --######################
def _push_scope(self):
self._scope_stack.append(dict())
def _pop_scope(self):
assert len(self._scope_stack) > 1
self._scope_stack.pop()
def _add_typedef_name(self, name, coord):
""" Add a new typedef name (ie a TYPEID) to the current scope
"""
if not self._scope_stack[-1].get(name, True):
self._parse_error(
"Typedef %r previously declared as non-typedef "
"in this scope" % name, coord)
self._scope_stack[-1][name] = True
def _add_identifier(self, name, coord):
""" Add a new object, function, or enum member name (ie an ID) to the
current scope
"""
if self._scope_stack[-1].get(name, False):
self._parse_error(
"Non-typedef %r previously declared as typedef "
"in this scope" % name, coord)
self._scope_stack[-1][name] = False
def _is_type_in_scope(self, name):
""" Is *name* a typedef-name in the current scope?
"""
for scope in reversed(self._scope_stack):
# If name is an identifier in this scope it shadows typedefs in
# higher scopes.
in_scope = scope.get(name)
if in_scope is not None: return in_scope
return False
def _lex_error_func(self, msg, line, column):
self._parse_error(msg, self._coord(line, column))
def _lex_on_lbrace_func(self):
self._push_scope()
def _lex_on_rbrace_func(self):
self._pop_scope()
def _lex_type_lookup_func(self, name):
""" Looks up types that were previously defined with
typedef.
Passed to the lexer for recognizing identifiers that
are types.
"""
is_type = self._is_type_in_scope(name)
return is_type
def _get_yacc_lookahead_token(self):
""" We need access to yacc's lookahead token in certain cases.
This is the last token yacc requested from the lexer, so we
ask the lexer.
"""
return self.clex.last_token
# To understand what's going on here, read sections A.8.5 and
# A.8.6 of K&R2 very carefully.
#
# A C type consists of a basic type declaration, with a list
# of modifiers. For example:
#
# int *c[5];
#
# The basic declaration here is 'int c', and the pointer and
# the array are the modifiers.
#
# Basic declarations are represented by TypeDecl (from module c_ast) and the
# modifiers are FuncDecl, PtrDecl and ArrayDecl.
#
# The standard states that whenever a new modifier is parsed, it should be
# added to the end of the list of modifiers. For example:
#
# K&R2 A.8.6.2: Array Declarators
#
# In a declaration T D where D has the form
# D1 [constant-expression-opt]
# and the type of the identifier in the declaration T D1 is
# "type-modifier T", the type of the
# identifier of D is "type-modifier array of T"
#
# This is what this method does. The declarator it receives
# can be a list of declarators ending with TypeDecl. It
# tacks the modifier to the end of this list, just before
# the TypeDecl.
#
# Additionally, the modifier may be a list itself. This is
# useful for pointers, that can come as a chain from the rule
# p_pointer. In this case, the whole modifier list is spliced
# into the new location.
def _type_modify_decl(self, decl, modifier):
""" Tacks a type modifier on a declarator, and returns
the modified declarator.
Note: the declarator and modifier may be modified
"""
#~ print '****'
#~ decl.show(offset=3)
#~ modifier.show(offset=3)
#~ print '****'
modifier_head = modifier
modifier_tail = modifier
# The modifier may be a nested list. Reach its tail.
#
while modifier_tail.type:
modifier_tail = modifier_tail.type
# If the decl is a basic type, just tack the modifier onto
# it
#
if isinstance(decl, c_ast.TypeDecl):
modifier_tail.type = decl
return modifier
else:
# Otherwise, the decl is a list of modifiers. Reach
# its tail and splice the modifier onto the tail,
# pointing to the underlying basic type.
#
decl_tail = decl
while not isinstance(decl_tail.type, c_ast.TypeDecl):
decl_tail = decl_tail.type
modifier_tail.type = decl_tail.type
decl_tail.type = modifier_head
return decl
# Due to the order in which declarators are constructed,
# they have to be fixed in order to look like a normal AST.
#
# When a declaration arrives from syntax construction, it has
# these problems:
# * The innermost TypeDecl has no type (because the basic
# type is only known at the uppermost declaration level)
# * The declaration has no variable name, since that is saved
# in the innermost TypeDecl
# * The typename of the declaration is a list of type
# specifiers, and not a node. Here, basic identifier types
# should be separated from more complex types like enums
# and structs.
#
# This method fixes these problems.
#
def _fix_decl_name_type(self, decl, typename):
""" Fixes a declaration. Modifies decl.
"""
# Reach the underlying basic type
#
type = decl
while not isinstance(type, c_ast.TypeDecl):
type = type.type
decl.name = type.declname
type.quals = decl.quals
# The typename is a list of types. If any type in this
# list isn't an IdentifierType, it must be the only
# type in the list (it's illegal to declare "int enum ..")
# If all the types are basic, they're collected in the
# IdentifierType holder.
#
for tn in typename:
if not isinstance(tn, c_ast.IdentifierType):
if len(typename) > 1:
self._parse_error(
"Invalid multiple types specified", tn.coord)
else:
type.type = tn
return decl
if not typename:
# Functions default to returning int
#
if not isinstance(decl.type, c_ast.FuncDecl):
self._parse_error(
"Missing type in declaration", decl.coord)
type.type = c_ast.IdentifierType(
['int'],
coord=decl.coord)
else:
# At this point, we know that typename is a list of IdentifierType
# nodes. Concatenate all the names into a single list.
#
type.type = c_ast.IdentifierType(
[name for id in typename for name in id.names],
coord=typename[0].coord)
return decl
def _add_declaration_specifier(self, declspec, newspec, kind):
""" Declaration specifiers are represented by a dictionary
with the entries:
* qual: a list of type qualifiers
* storage: a list of storage type qualifiers
* type: a list of type specifiers
* function: a list of function specifiers
This method is given a declaration specifier, and a
new specifier of a given kind.
Returns the declaration specifier, with the new
specifier incorporated.
"""
spec = declspec or dict(qual=[], storage=[], type=[], function=[])
spec[kind].insert(0, newspec)
return spec
def _build_declarations(self, spec, decls, typedef_namespace=False):
""" Builds a list of declarations all sharing the given specifiers.
If typedef_namespace is true, each declared name is added
to the "typedef namespace", which also includes objects,
functions, and enum constants.
"""
is_typedef = 'typedef' in spec['storage']
declarations = []
# Bit-fields are allowed to be unnamed.
#
if decls[0].get('bitsize') is not None:
pass
# When redeclaring typedef names as identifiers in inner scopes, a
# problem can occur where the identifier gets grouped into
# spec['type'], leaving decl as None. This can only occur for the
# first declarator.
#
elif decls[0]['decl'] is None:
if len(spec['type']) < 2 or len(spec['type'][-1].names) != 1 or \
not self._is_type_in_scope(spec['type'][-1].names[0]):
coord = '?'
for t in spec['type']:
if hasattr(t, 'coord'):
coord = t.coord
break
self._parse_error('Invalid declaration', coord)
# Make this look as if it came from "direct_declarator:ID"
decls[0]['decl'] = c_ast.TypeDecl(
declname=spec['type'][-1].names[0],
type=None,
quals=None,
coord=spec['type'][-1].coord)
# Remove the "new" type's name from the end of spec['type']
del spec['type'][-1]
# A similar problem can occur where the declaration ends up looking
# like an abstract declarator. Give it a name if this is the case.
#
elif not isinstance(decls[0]['decl'],
(c_ast.Struct, c_ast.Union, c_ast.IdentifierType)):
decls_0_tail = decls[0]['decl']
while not isinstance(decls_0_tail, c_ast.TypeDecl):
decls_0_tail = decls_0_tail.type
if decls_0_tail.declname is None:
decls_0_tail.declname = spec['type'][-1].names[0]
del spec['type'][-1]
for decl in decls:
assert decl['decl'] is not None
if is_typedef:
declaration = c_ast.Typedef(
name=None,
quals=spec['qual'],
storage=spec['storage'],
type=decl['decl'],
coord=decl['decl'].coord)
else:
declaration = c_ast.Decl(
name=None,
quals=spec['qual'],
storage=spec['storage'],
funcspec=spec['function'],
type=decl['decl'],
init=decl.get('init'),
bitsize=decl.get('bitsize'),
coord=decl['decl'].coord)
if isinstance(declaration.type,
(c_ast.Struct, c_ast.Union, c_ast.IdentifierType)):
fixed_decl = declaration
else:
fixed_decl = self._fix_decl_name_type(declaration, spec['type'])
# Add the type name defined by typedef to a
# symbol table (for usage in the lexer)
#
if typedef_namespace:
if is_typedef:
self._add_typedef_name(fixed_decl.name, fixed_decl.coord)
else:
self._add_identifier(fixed_decl.name, fixed_decl.coord)
declarations.append(fixed_decl)
return declarations
def _build_function_definition(self, spec, decl, param_decls, body):
""" Builds a function definition.
"""
assert 'typedef' not in spec['storage']
declaration = self._build_declarations(
spec=spec,
decls=[dict(decl=decl, init=None)],
typedef_namespace=True)[0]
return c_ast.FuncDef(
decl=declaration,
param_decls=param_decls,
body=body,
coord=decl.coord)
def _select_struct_union_class(self, token):
""" Given a token (either STRUCT or UNION), selects the
appropriate AST class.
"""
if token == 'struct':
return c_ast.Struct
else:
return c_ast.Union
##
## Precedence and associativity of operators
##
precedence = (
('left', 'LOR'),
('left', 'LAND'),
('left', 'OR'),
('left', 'XOR'),
('left', 'AND'),
('left', 'EQ', 'NE'),
('left', 'GT', 'GE', 'LT', 'LE'),
('left', 'RSHIFT', 'LSHIFT'),
('left', 'PLUS', 'MINUS'),
('left', 'TIMES', 'DIVIDE', 'MOD')
)
##
## Grammar productions
## Implementation of the BNF defined in K&R2 A.13
##
# Wrapper around a translation unit, to allow for empty input.
# Not strictly part of the C99 Grammar, but useful in practice.
#
def p_translation_unit_or_empty(self, p):
""" translation_unit_or_empty : translation_unit
| empty
"""
if p[1] is None:
p[0] = c_ast.FileAST([])
else:
p[0] = c_ast.FileAST(p[1])
def p_translation_unit_1(self, p):
""" translation_unit : external_declaration
"""
# Note: external_declaration is already a list
#
p[0] = p[1]
def p_translation_unit_2(self, p):
""" translation_unit : translation_unit external_declaration
"""
if p[2] is not None:
p[1].extend(p[2])
p[0] = p[1]
# Declarations always come as lists (because they can be
# several in one line), so we wrap the function definition
# into a list as well, to make the return value of
# external_declaration homogenous.
#
def p_external_declaration_1(self, p):
""" external_declaration : function_definition
"""
p[0] = [p[1]]
def p_external_declaration_2(self, p):
""" external_declaration : declaration
"""
p[0] = p[1]
def p_external_declaration_3(self, p):
""" external_declaration : pp_directive
"""
p[0] = p[1]
def p_external_declaration_4(self, p):
""" external_declaration : SEMI
"""
p[0] = None
def p_pp_directive(self, p):
""" pp_directive : PPHASH
"""
self._parse_error('Directives not supported yet',
self._coord(p.lineno(1)))
# In function definitions, the declarator can be followed by
# a declaration list, for old "K&R style" function definitios.
#
def p_function_definition_1(self, p):
""" function_definition : declarator declaration_list_opt compound_statement
"""
# no declaration specifiers - 'int' becomes the default type
spec = dict(
qual=[],
storage=[],
type=[c_ast.IdentifierType(['int'],
coord=self._coord(p.lineno(1)))],
function=[])
p[0] = self._build_function_definition(
spec=spec,
decl=p[1],
param_decls=p[2],
body=p[3])
def p_function_definition_2(self, p):
""" function_definition : declaration_specifiers declarator declaration_list_opt compound_statement
"""
spec = p[1]
p[0] = self._build_function_definition(
spec=spec,
decl=p[2],
param_decls=p[3],
body=p[4])
def p_statement(self, p):
""" statement : labeled_statement
| expression_statement
| compound_statement
| selection_statement
| iteration_statement
| jump_statement
"""
p[0] = p[1]
# In C, declarations can come several in a line:
# int x, *px, romulo = 5;
#
# However, for the AST, we will split them to separate Decl
# nodes.
#
# This rule splits its declarations and always returns a list
# of Decl nodes, even if it's one element long.
#
def p_decl_body(self, p):
""" decl_body : declaration_specifiers init_declarator_list_opt
"""
spec = p[1]
# p[2] (init_declarator_list_opt) is either a list or None
#
if p[2] is None:
# By the standard, you must have at least one declarator unless
# declaring a structure tag, a union tag, or the members of an
# enumeration.
#
ty = spec['type']
s_u_or_e = (c_ast.Struct, c_ast.Union, c_ast.Enum)
if len(ty) == 1 and isinstance(ty[0], s_u_or_e):
decls = [c_ast.Decl(
name=None,
quals=spec['qual'],
storage=spec['storage'],
funcspec=spec['function'],
type=ty[0],
init=None,
bitsize=None,
coord=ty[0].coord)]
# However, this case can also occur on redeclared identifiers in
# an inner scope. The trouble is that the redeclared type's name
# gets grouped into declaration_specifiers; _build_declarations
# compensates for this.
#
else:
decls = self._build_declarations(
spec=spec,
decls=[dict(decl=None, init=None)],
typedef_namespace=True)
else:
decls = self._build_declarations(
spec=spec,
decls=p[2],
typedef_namespace=True)
p[0] = decls
# The declaration has been split to a decl_body sub-rule and
# SEMI, because having them in a single rule created a problem
# for defining typedefs.
#
# If a typedef line was directly followed by a line using the
# type defined with the typedef, the type would not be
# recognized. This is because to reduce the declaration rule,
# the parser's lookahead asked for the token after SEMI, which
# was the type from the next line, and the lexer had no chance
# to see the updated type symbol table.
#
# Splitting solves this problem, because after seeing SEMI,
# the parser reduces decl_body, which actually adds the new
# type into the table to be seen by the lexer before the next
# line is reached.
def p_declaration(self, p):
""" declaration : decl_body SEMI
"""
p[0] = p[1]
# Since each declaration is a list of declarations, this
# rule will combine all the declarations and return a single
# list
#
def p_declaration_list(self, p):
""" declaration_list : declaration
| declaration_list declaration
"""
p[0] = p[1] if len(p) == 2 else p[1] + p[2]
def p_declaration_specifiers_1(self, p):
""" declaration_specifiers : type_qualifier declaration_specifiers_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'qual')
def p_declaration_specifiers_2(self, p):
""" declaration_specifiers : type_specifier declaration_specifiers_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'type')
def p_declaration_specifiers_3(self, p):
""" declaration_specifiers : storage_class_specifier declaration_specifiers_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'storage')
def p_declaration_specifiers_4(self, p):
""" declaration_specifiers : function_specifier declaration_specifiers_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'function')
def p_storage_class_specifier(self, p):
""" storage_class_specifier : AUTO
| REGISTER
| STATIC
| EXTERN
| TYPEDEF
"""
p[0] = p[1]
def p_function_specifier(self, p):
""" function_specifier : INLINE
"""
p[0] = p[1]
def p_type_specifier_1(self, p):
""" type_specifier : VOID
| _BOOL
| CHAR
| SHORT
| INT
| LONG
| FLOAT
| DOUBLE
| _COMPLEX
| SIGNED
| UNSIGNED
"""
p[0] = c_ast.IdentifierType([p[1]], coord=self._coord(p.lineno(1)))
def p_type_specifier_2(self, p):
""" type_specifier : typedef_name
| enum_specifier
| struct_or_union_specifier
"""
p[0] = p[1]
def p_type_qualifier(self, p):
""" type_qualifier : CONST
| RESTRICT
| VOLATILE
"""
p[0] = p[1]
def p_init_declarator_list_1(self, p):
""" init_declarator_list : init_declarator
| init_declarator_list COMMA init_declarator
"""
p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
# If the code is declaring a variable that was declared a typedef in an
# outer scope, yacc will think the name is part of declaration_specifiers,
# not init_declarator, and will then get confused by EQUALS. Pass None
# up in place of declarator, and handle this at a higher level.
#
def p_init_declarator_list_2(self, p):
""" init_declarator_list : EQUALS initializer
"""
p[0] = [dict(decl=None, init=p[2])]
# Similarly, if the code contains duplicate typedefs of, for example,
# array types, the array portion will appear as an abstract declarator.
#
def p_init_declarator_list_3(self, p):
""" init_declarator_list : abstract_declarator
"""
p[0] = [dict(decl=p[1], init=None)]
# Returns a {decl=<declarator> : init=<initializer>} dictionary
# If there's no initializer, uses None
#
def p_init_declarator(self, p):
""" init_declarator : declarator
| declarator EQUALS initializer
"""
p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
def p_specifier_qualifier_list_1(self, p):
""" specifier_qualifier_list : type_qualifier specifier_qualifier_list_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'qual')
def p_specifier_qualifier_list_2(self, p):
""" specifier_qualifier_list : type_specifier specifier_qualifier_list_opt
"""
p[0] = self._add_declaration_specifier(p[2], p[1], 'type')
# TYPEID is allowed here (and in other struct/enum related tag names), because
# struct/enum tags reside in their own namespace and can be named the same as types
#
def p_struct_or_union_specifier_1(self, p):
""" struct_or_union_specifier : struct_or_union ID
| struct_or_union TYPEID
"""
klass = self._select_struct_union_class(p[1])
p[0] = klass(
name=p[2],
decls=None,
coord=self._coord(p.lineno(2)))
def p_struct_or_union_specifier_2(self, p):
""" struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close
"""
klass = self._select_struct_union_class(p[1])
p[0] = klass(
name=None,
decls=p[3],
coord=self._coord(p.lineno(2)))
def p_struct_or_union_specifier_3(self, p):
""" struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close
| struct_or_union TYPEID brace_open struct_declaration_list brace_close
"""
klass = self._select_struct_union_class(p[1])
p[0] = klass(
name=p[2],
decls=p[4],
coord=self._coord(p.lineno(2)))
def p_struct_or_union(self, p):
""" struct_or_union : STRUCT
| UNION
"""
p[0] = p[1]
# Combine all declarations into a single list
#
def p_struct_declaration_list(self, p):
""" struct_declaration_list : struct_declaration
| struct_declaration_list struct_declaration
"""
p[0] = p[1] if len(p) == 2 else p[1] + p[2]
def p_struct_declaration_1(self, p):
""" struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI
"""
spec = p[1]
assert 'typedef' not in spec['storage']
if p[2] is not None:
decls = self._build_declarations(
spec=spec,
decls=p[2])
elif len(spec['type']) == 1:
# Anonymous struct/union, gcc extension, C1x feature.
# Although the standard only allows structs/unions here, I see no
# reason to disallow other types since some compilers have typedefs
# here, and pycparser isn't about rejecting all invalid code.
#
node = spec['type'][0]
if isinstance(node, c_ast.Node):
decl_type = node
else:
decl_type = c_ast.IdentifierType(node)
decls = self._build_declarations(
spec=spec,
decls=[dict(decl=decl_type)])
else:
# Structure/union members can have the same names as typedefs.
# The trouble is that the member's name gets grouped into
# specifier_qualifier_list; _build_declarations compensates.
#
decls = self._build_declarations(
spec=spec,
decls=[dict(decl=None, init=None)])
p[0] = decls
def p_struct_declaration_2(self, p):
""" struct_declaration : specifier_qualifier_list abstract_declarator SEMI
"""
# "Abstract declarator?!", you ask? Structure members can have the
# same names as typedefs. The trouble is that the member's name gets
# grouped into specifier_qualifier_list, leaving any remainder to
# appear as an abstract declarator, as in:
# typedef int Foo;
# struct { Foo Foo[3]; };
#
p[0] = self._build_declarations(
spec=p[1],
decls=[dict(decl=p[2], init=None)])
def p_struct_declarator_list(self, p):
""" struct_declarator_list : struct_declarator
| struct_declarator_list COMMA struct_declarator
"""
p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
# struct_declarator passes up a dict with the keys: decl (for
# the underlying declarator) and bitsize (for the bitsize)
#
def p_struct_declarator_1(self, p):
""" struct_declarator : declarator
"""
p[0] = {'decl': p[1], 'bitsize': None}
def p_struct_declarator_2(self, p):
""" struct_declarator : declarator COLON constant_expression
| COLON constant_expression
"""
if len(p) > 3:
p[0] = {'decl': p[1], 'bitsize': p[3]}
else:
p[0] = {'decl': c_ast.TypeDecl(None, None, None), 'bitsize': p[2]}
def p_enum_specifier_1(self, p):
""" enum_specifier : ENUM ID
| ENUM TYPEID
"""
p[0] = c_ast.Enum(p[2], None, self._coord(p.lineno(1)))
def p_enum_specifier_2(self, p):
""" enum_specifier : ENUM brace_open enumerator_list brace_close
"""
p[0] = c_ast.Enum(None, p[3], self._coord(p.lineno(1)))
def p_enum_specifier_3(self, p):
""" enum_specifier : ENUM ID brace_open enumerator_list brace_close
| ENUM TYPEID brace_open enumerator_list brace_close
"""
p[0] = c_ast.Enum(p[2], p[4], self._coord(p.lineno(1)))
def p_enumerator_list(self, p):
""" enumerator_list : enumerator
| enumerator_list COMMA
| enumerator_list COMMA enumerator
"""
if len(p) == 2:
p[0] = c_ast.EnumeratorList([p[1]], p[1].coord)
elif len(p) == 3:
p[0] = p[1]
else:
p[1].enumerators.append(p[3])
p[0] = p[1]
def p_enumerator(self, p):
""" enumerator : ID
| ID EQUALS constant_expression
"""
if len(p) == 2:
enumerator = c_ast.Enumerator(
p[1], None,
self._coord(p.lineno(1)))
else:
enumerator = c_ast.Enumerator(
p[1], p[3],
self._coord(p.lineno(1)))
self._add_identifier(enumerator.name, enumerator.coord)
p[0] = enumerator
def p_declarator_1(self, p):
""" declarator : direct_declarator
"""
p[0] = p[1]
def p_declarator_2(self, p):
""" declarator : pointer direct_declarator
"""
p[0] = self._type_modify_decl(p[2], p[1])
# Since it's impossible for a type to be specified after a pointer, assume
# it's intended to be the name for this declaration. _add_identifier will
# raise an error if this TYPEID can't be redeclared.
#
def p_declarator_3(self, p):
""" declarator : pointer TYPEID
"""
decl = c_ast.TypeDecl(
declname=p[2],
type=None,
quals=None,
coord=self._coord(p.lineno(2)))
p[0] = self._type_modify_decl(decl, p[1])
def p_direct_declarator_1(self, p):
""" direct_declarator : ID
"""
p[0] = c_ast.TypeDecl(
declname=p[1],
type=None,
quals=None,
coord=self._coord(p.lineno(1)))
def p_direct_declarator_2(self, p):
""" direct_declarator : LPAREN declarator RPAREN
"""
p[0] = p[2]
def p_direct_declarator_3(self, p):
""" direct_declarator : direct_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET
"""
quals = (p[3] if len(p) > 5 else []) or []
# Accept dimension qualifiers
# Per C99 6.7.5.3 p7
arr = c_ast.ArrayDecl(
type=None,
dim=p[4] if len(p) > 5 else p[3],
dim_quals=quals,
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
def p_direct_declarator_4(self, p):
""" direct_declarator : direct_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET
| direct_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET
"""
# Using slice notation for PLY objects doesn't work in Python 3 for the
# version of PLY embedded with pycparser; see PLY Google Code issue 30.
# Work around that here by listing the two elements separately.
listed_quals = [item if isinstance(item, list) else [item]
for item in [p[3],p[4]]]
dim_quals = [qual for sublist in listed_quals for qual in sublist
if qual is not None]
arr = c_ast.ArrayDecl(
type=None,
dim=p[5],
dim_quals=dim_quals,
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
# Special for VLAs
#
def p_direct_declarator_5(self, p):
""" direct_declarator : direct_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET
"""
arr = c_ast.ArrayDecl(
type=None,
dim=c_ast.ID(p[4], self._coord(p.lineno(4))),
dim_quals=p[3] if p[3] != None else [],
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
def p_direct_declarator_6(self, p):
""" direct_declarator : direct_declarator LPAREN parameter_type_list RPAREN
| direct_declarator LPAREN identifier_list_opt RPAREN
"""
func = c_ast.FuncDecl(
args=p[3],
type=None,
coord=p[1].coord)
# To see why _get_yacc_lookahead_token is needed, consider:
# typedef char TT;
# void foo(int TT) { TT = 10; }
# Outside the function, TT is a typedef, but inside (starting and
# ending with the braces) it's a parameter. The trouble begins with
# yacc's lookahead token. We don't know if we're declaring or
# defining a function until we see LBRACE, but if we wait for yacc to
# trigger a rule on that token, then TT will have already been read
# and incorrectly interpreted as TYPEID. We need to add the
# parameters to the scope the moment the lexer sees LBRACE.
#
if self._get_yacc_lookahead_token().type == "LBRACE":
if func.args is not None:
for param in func.args.params:
if isinstance(param, c_ast.EllipsisParam): break
self._add_identifier(param.name, param.coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=func)
def p_pointer(self, p):
""" pointer : TIMES type_qualifier_list_opt
| TIMES type_qualifier_list_opt pointer
"""
coord = self._coord(p.lineno(1))
# Pointer decls nest from inside out. This is important when different
# levels have different qualifiers. For example:
#
# char * const * p;
#
# Means "pointer to const pointer to char"
#
# While:
#
# char ** const p;
#
# Means "const pointer to pointer to char"
#
# So when we construct PtrDecl nestings, the leftmost pointer goes in
# as the most nested type.
nested_type = c_ast.PtrDecl(quals=p[2] or [], type=None, coord=coord)
if len(p) > 3:
tail_type = p[3]
while tail_type.type is not None:
tail_type = tail_type.type
tail_type.type = nested_type
p[0] = p[3]
else:
p[0] = nested_type
def p_type_qualifier_list(self, p):
""" type_qualifier_list : type_qualifier
| type_qualifier_list type_qualifier
"""
p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]]
def p_parameter_type_list(self, p):
""" parameter_type_list : parameter_list
| parameter_list COMMA ELLIPSIS
"""
if len(p) > 2:
p[1].params.append(c_ast.EllipsisParam(self._coord(p.lineno(3))))
p[0] = p[1]
def p_parameter_list(self, p):
""" parameter_list : parameter_declaration
| parameter_list COMMA parameter_declaration
"""
if len(p) == 2: # single parameter
p[0] = c_ast.ParamList([p[1]], p[1].coord)
else:
p[1].params.append(p[3])
p[0] = p[1]
def p_parameter_declaration_1(self, p):
""" parameter_declaration : declaration_specifiers declarator
"""
spec = p[1]
if not spec['type']:
spec['type'] = [c_ast.IdentifierType(['int'],
coord=self._coord(p.lineno(1)))]
p[0] = self._build_declarations(
spec=spec,
decls=[dict(decl=p[2])])[0]
def p_parameter_declaration_2(self, p):
""" parameter_declaration : declaration_specifiers abstract_declarator_opt
"""
spec = p[1]
if not spec['type']:
spec['type'] = [c_ast.IdentifierType(['int'],
coord=self._coord(p.lineno(1)))]
# Parameters can have the same names as typedefs. The trouble is that
# the parameter's name gets grouped into declaration_specifiers, making
# it look like an old-style declaration; compensate.
#
if len(spec['type']) > 1 and len(spec['type'][-1].names) == 1 and \
self._is_type_in_scope(spec['type'][-1].names[0]):
decl = self._build_declarations(
spec=spec,
decls=[dict(decl=p[2], init=None)])[0]
# This truly is an old-style parameter declaration
#
else:
decl = c_ast.Typename(
name='',
quals=spec['qual'],
type=p[2] or c_ast.TypeDecl(None, None, None),
coord=self._coord(p.lineno(2)))
typename = spec['type']
decl = self._fix_decl_name_type(decl, typename)
p[0] = decl
def p_identifier_list(self, p):
""" identifier_list : identifier
| identifier_list COMMA identifier
"""
if len(p) == 2: # single parameter
p[0] = c_ast.ParamList([p[1]], p[1].coord)
else:
p[1].params.append(p[3])
p[0] = p[1]
def p_initializer_1(self, p):
""" initializer : assignment_expression
"""
p[0] = p[1]
def p_initializer_2(self, p):
""" initializer : brace_open initializer_list_opt brace_close
| brace_open initializer_list COMMA brace_close
"""
if p[2] is None:
p[0] = c_ast.InitList([], self._coord(p.lineno(1)))
else:
p[0] = p[2]
def p_initializer_list(self, p):
""" initializer_list : designation_opt initializer
| initializer_list COMMA designation_opt initializer
"""
if len(p) == 3: # single initializer
init = p[2] if p[1] is None else c_ast.NamedInitializer(p[1], p[2])
p[0] = c_ast.InitList([init], p[2].coord)
else:
init = p[4] if p[3] is None else c_ast.NamedInitializer(p[3], p[4])
p[1].exprs.append(init)
p[0] = p[1]
def p_designation(self, p):
""" designation : designator_list EQUALS
"""
p[0] = p[1]
# Designators are represented as a list of nodes, in the order in which
# they're written in the code.
#
def p_designator_list(self, p):
""" designator_list : designator
| designator_list designator
"""
p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]]
def p_designator(self, p):
""" designator : LBRACKET constant_expression RBRACKET
| PERIOD identifier
"""
p[0] = p[2]
def p_type_name(self, p):
""" type_name : specifier_qualifier_list abstract_declarator_opt
"""
#~ print '=========='
#~ print p[1]
#~ print p[2]
#~ print p[2].children()
#~ print '=========='
typename = c_ast.Typename(
name='',
quals=p[1]['qual'],
type=p[2] or c_ast.TypeDecl(None, None, None),
coord=self._coord(p.lineno(2)))
p[0] = self._fix_decl_name_type(typename, p[1]['type'])
def p_abstract_declarator_1(self, p):
""" abstract_declarator : pointer
"""
dummytype = c_ast.TypeDecl(None, None, None)
p[0] = self._type_modify_decl(
decl=dummytype,
modifier=p[1])
def p_abstract_declarator_2(self, p):
""" abstract_declarator : pointer direct_abstract_declarator
"""
p[0] = self._type_modify_decl(p[2], p[1])
def p_abstract_declarator_3(self, p):
""" abstract_declarator : direct_abstract_declarator
"""
p[0] = p[1]
# Creating and using direct_abstract_declarator_opt here
# instead of listing both direct_abstract_declarator and the
# lack of it in the beginning of _1 and _2 caused two
# shift/reduce errors.
#
def p_direct_abstract_declarator_1(self, p):
""" direct_abstract_declarator : LPAREN abstract_declarator RPAREN """
p[0] = p[2]
def p_direct_abstract_declarator_2(self, p):
""" direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET
"""
arr = c_ast.ArrayDecl(
type=None,
dim=p[3],
dim_quals=[],
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
def p_direct_abstract_declarator_3(self, p):
""" direct_abstract_declarator : LBRACKET assignment_expression_opt RBRACKET
"""
p[0] = c_ast.ArrayDecl(
type=c_ast.TypeDecl(None, None, None),
dim=p[2],
dim_quals=[],
coord=self._coord(p.lineno(1)))
def p_direct_abstract_declarator_4(self, p):
""" direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET
"""
arr = c_ast.ArrayDecl(
type=None,
dim=c_ast.ID(p[3], self._coord(p.lineno(3))),
dim_quals=[],
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
def p_direct_abstract_declarator_5(self, p):
""" direct_abstract_declarator : LBRACKET TIMES RBRACKET
"""
p[0] = c_ast.ArrayDecl(
type=c_ast.TypeDecl(None, None, None),
dim=c_ast.ID(p[3], self._coord(p.lineno(3))),
dim_quals=[],
coord=self._coord(p.lineno(1)))
def p_direct_abstract_declarator_6(self, p):
""" direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN
"""
func = c_ast.FuncDecl(
args=p[3],
type=None,
coord=p[1].coord)
p[0] = self._type_modify_decl(decl=p[1], modifier=func)
def p_direct_abstract_declarator_7(self, p):
""" direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN
"""
p[0] = c_ast.FuncDecl(
args=p[2],
type=c_ast.TypeDecl(None, None, None),
coord=self._coord(p.lineno(1)))
# declaration is a list, statement isn't. To make it consistent, block_item
# will always be a list
#
def p_block_item(self, p):
""" block_item : declaration
| statement
"""
p[0] = p[1] if isinstance(p[1], list) else [p[1]]
# Since we made block_item a list, this just combines lists
#
def p_block_item_list(self, p):
""" block_item_list : block_item
| block_item_list block_item
"""
# Empty block items (plain ';') produce [None], so ignore them
p[0] = p[1] if (len(p) == 2 or p[2] == [None]) else p[1] + p[2]
def p_compound_statement_1(self, p):
""" compound_statement : brace_open block_item_list_opt brace_close """
p[0] = c_ast.Compound(
block_items=p[2],
coord=self._coord(p.lineno(1)))
def p_labeled_statement_1(self, p):
""" labeled_statement : ID COLON statement """
p[0] = c_ast.Label(p[1], p[3], self._coord(p.lineno(1)))
def p_labeled_statement_2(self, p):
""" labeled_statement : CASE constant_expression COLON statement """
p[0] = c_ast.Case(p[2], [p[4]], self._coord(p.lineno(1)))
def p_labeled_statement_3(self, p):
""" labeled_statement : DEFAULT COLON statement """
p[0] = c_ast.Default([p[3]], self._coord(p.lineno(1)))
def p_selection_statement_1(self, p):
""" selection_statement : IF LPAREN expression RPAREN statement """
p[0] = c_ast.If(p[3], p[5], None, self._coord(p.lineno(1)))
def p_selection_statement_2(self, p):
""" selection_statement : IF LPAREN expression RPAREN statement ELSE statement """
p[0] = c_ast.If(p[3], p[5], p[7], self._coord(p.lineno(1)))
def p_selection_statement_3(self, p):
""" selection_statement : SWITCH LPAREN expression RPAREN statement """
p[0] = fix_switch_cases(
c_ast.Switch(p[3], p[5], self._coord(p.lineno(1))))
def p_iteration_statement_1(self, p):
""" iteration_statement : WHILE LPAREN expression RPAREN statement """
p[0] = c_ast.While(p[3], p[5], self._coord(p.lineno(1)))
def p_iteration_statement_2(self, p):
""" iteration_statement : DO statement WHILE LPAREN expression RPAREN SEMI """
p[0] = c_ast.DoWhile(p[5], p[2], self._coord(p.lineno(1)))
def p_iteration_statement_3(self, p):
""" iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement """
p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._coord(p.lineno(1)))
def p_iteration_statement_4(self, p):
""" iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement """
p[0] = c_ast.For(c_ast.DeclList(p[3], self._coord(p.lineno(1))),
p[4], p[6], p[8], self._coord(p.lineno(1)))
def p_jump_statement_1(self, p):
""" jump_statement : GOTO ID SEMI """
p[0] = c_ast.Goto(p[2], self._coord(p.lineno(1)))
def p_jump_statement_2(self, p):
""" jump_statement : BREAK SEMI """
p[0] = c_ast.Break(self._coord(p.lineno(1)))
def p_jump_statement_3(self, p):
""" jump_statement : CONTINUE SEMI """
p[0] = c_ast.Continue(self._coord(p.lineno(1)))
def p_jump_statement_4(self, p):
""" jump_statement : RETURN expression SEMI
| RETURN SEMI
"""
p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._coord(p.lineno(1)))
def p_expression_statement(self, p):
""" expression_statement : expression_opt SEMI """
if p[1] is None:
p[0] = c_ast.EmptyStatement(self._coord(p.lineno(1)))
else:
p[0] = p[1]
def p_expression(self, p):
""" expression : assignment_expression
| expression COMMA assignment_expression
"""
if len(p) == 2:
p[0] = p[1]
else:
if not isinstance(p[1], c_ast.ExprList):
p[1] = c_ast.ExprList([p[1]], p[1].coord)
p[1].exprs.append(p[3])
p[0] = p[1]
def p_typedef_name(self, p):
""" typedef_name : TYPEID """
p[0] = c_ast.IdentifierType([p[1]], coord=self._coord(p.lineno(1)))
def p_assignment_expression(self, p):
""" assignment_expression : conditional_expression
| unary_expression assignment_operator assignment_expression
"""
if len(p) == 2:
p[0] = p[1]
else:
p[0] = c_ast.Assignment(p[2], p[1], p[3], p[1].coord)
# K&R2 defines these as many separate rules, to encode
# precedence and associativity. Why work hard ? I'll just use
# the built in precedence/associativity specification feature
# of PLY. (see precedence declaration above)
#
def p_assignment_operator(self, p):
""" assignment_operator : EQUALS
| XOREQUAL
| TIMESEQUAL
| DIVEQUAL
| MODEQUAL
| PLUSEQUAL
| MINUSEQUAL
| LSHIFTEQUAL
| RSHIFTEQUAL
| ANDEQUAL
| OREQUAL
"""
p[0] = p[1]
def p_constant_expression(self, p):
""" constant_expression : conditional_expression """
p[0] = p[1]
def p_conditional_expression(self, p):
""" conditional_expression : binary_expression
| binary_expression CONDOP expression COLON conditional_expression
"""
if len(p) == 2:
p[0] = p[1]
else:
p[0] = c_ast.TernaryOp(p[1], p[3], p[5], p[1].coord)
def p_binary_expression(self, p):
""" binary_expression : cast_expression
| binary_expression TIMES binary_expression
| binary_expression DIVIDE binary_expression
| binary_expression MOD binary_expression
| binary_expression PLUS binary_expression
| binary_expression MINUS binary_expression
| binary_expression RSHIFT binary_expression
| binary_expression LSHIFT binary_expression
| binary_expression LT binary_expression
| binary_expression LE binary_expression
| binary_expression GE binary_expression
| binary_expression GT binary_expression
| binary_expression EQ binary_expression
| binary_expression NE binary_expression
| binary_expression AND binary_expression
| binary_expression OR binary_expression
| binary_expression XOR binary_expression
| binary_expression LAND binary_expression
| binary_expression LOR binary_expression
"""
if len(p) == 2:
p[0] = p[1]
else:
p[0] = c_ast.BinaryOp(p[2], p[1], p[3], p[1].coord)
def p_cast_expression_1(self, p):
""" cast_expression : unary_expression """
p[0] = p[1]
def p_cast_expression_2(self, p):
""" cast_expression : LPAREN type_name RPAREN cast_expression """
p[0] = c_ast.Cast(p[2], p[4], self._coord(p.lineno(1)))
def p_unary_expression_1(self, p):
""" unary_expression : postfix_expression """
p[0] = p[1]
def p_unary_expression_2(self, p):
""" unary_expression : PLUSPLUS unary_expression
| MINUSMINUS unary_expression
| unary_operator cast_expression
"""
p[0] = c_ast.UnaryOp(p[1], p[2], p[2].coord)
def p_unary_expression_3(self, p):
""" unary_expression : SIZEOF unary_expression
| SIZEOF LPAREN type_name RPAREN
"""
p[0] = c_ast.UnaryOp(
p[1],
p[2] if len(p) == 3 else p[3],
self._coord(p.lineno(1)))
def p_unary_operator(self, p):
""" unary_operator : AND
| TIMES
| PLUS
| MINUS
| NOT
| LNOT
"""
p[0] = p[1]
def p_postfix_expression_1(self, p):
""" postfix_expression : primary_expression """
p[0] = p[1]
def p_postfix_expression_2(self, p):
""" postfix_expression : postfix_expression LBRACKET expression RBRACKET """
p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
def p_postfix_expression_3(self, p):
""" postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN
| postfix_expression LPAREN RPAREN
"""
p[0] = c_ast.FuncCall(p[1], p[3] if len(p) == 5 else None, p[1].coord)
def p_postfix_expression_4(self, p):
""" postfix_expression : postfix_expression PERIOD ID
| postfix_expression PERIOD TYPEID
| postfix_expression ARROW ID
| postfix_expression ARROW TYPEID
"""
field = c_ast.ID(p[3], self._coord(p.lineno(3)))
p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord)
def p_postfix_expression_5(self, p):
""" postfix_expression : postfix_expression PLUSPLUS
| postfix_expression MINUSMINUS
"""
p[0] = c_ast.UnaryOp('p' + p[2], p[1], p[1].coord)
def p_postfix_expression_6(self, p):
""" postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close
| LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close
"""
p[0] = c_ast.CompoundLiteral(p[2], p[5])
def p_primary_expression_1(self, p):
""" primary_expression : identifier """
p[0] = p[1]
def p_primary_expression_2(self, p):
""" primary_expression : constant """
p[0] = p[1]
def p_primary_expression_3(self, p):
""" primary_expression : unified_string_literal
| unified_wstring_literal
"""
p[0] = p[1]
def p_primary_expression_4(self, p):
""" primary_expression : LPAREN expression RPAREN """
p[0] = p[2]
def p_primary_expression_5(self, p):
""" primary_expression : OFFSETOF LPAREN type_name COMMA identifier RPAREN
"""
coord = self._coord(p.lineno(1))
p[0] = c_ast.FuncCall(c_ast.ID(p[1], coord),
c_ast.ExprList([p[3], p[5]], coord),
coord)
def p_argument_expression_list(self, p):
""" argument_expression_list : assignment_expression
| argument_expression_list COMMA assignment_expression
"""
if len(p) == 2: # single expr
p[0] = c_ast.ExprList([p[1]], p[1].coord)
else:
p[1].exprs.append(p[3])
p[0] = p[1]
def p_identifier(self, p):
""" identifier : ID """
p[0] = c_ast.ID(p[1], self._coord(p.lineno(1)))
def p_constant_1(self, p):
""" constant : INT_CONST_DEC
| INT_CONST_OCT
| INT_CONST_HEX
| INT_CONST_BIN
"""
p[0] = c_ast.Constant(
'int', p[1], self._coord(p.lineno(1)))
def p_constant_2(self, p):
""" constant : FLOAT_CONST
| HEX_FLOAT_CONST
"""
p[0] = c_ast.Constant(
'float', p[1], self._coord(p.lineno(1)))
def p_constant_3(self, p):
""" constant : CHAR_CONST
| WCHAR_CONST
"""
p[0] = c_ast.Constant(
'char', p[1], self._coord(p.lineno(1)))
# The "unified" string and wstring literal rules are for supporting
# concatenation of adjacent string literals.
# I.e. "hello " "world" is seen by the C compiler as a single string literal
# with the value "hello world"
#
def p_unified_string_literal(self, p):
""" unified_string_literal : STRING_LITERAL
| unified_string_literal STRING_LITERAL
"""
if len(p) == 2: # single literal
p[0] = c_ast.Constant(
'string', p[1], self._coord(p.lineno(1)))
else:
p[1].value = p[1].value[:-1] + p[2][1:]
p[0] = p[1]
def p_unified_wstring_literal(self, p):
""" unified_wstring_literal : WSTRING_LITERAL
| unified_wstring_literal WSTRING_LITERAL
"""
if len(p) == 2: # single literal
p[0] = c_ast.Constant(
'string', p[1], self._coord(p.lineno(1)))
else:
p[1].value = p[1].value.rstrip()[:-1] + p[2][2:]
p[0] = p[1]
def p_brace_open(self, p):
""" brace_open : LBRACE
"""
p[0] = p[1]
def p_brace_close(self, p):
""" brace_close : RBRACE
"""
p[0] = p[1]
def p_empty(self, p):
'empty : '
p[0] = None
def p_error(self, p):
# If error recovery is added here in the future, make sure
# _get_yacc_lookahead_token still works!
#
if p:
self._parse_error(
'before: %s' % p.value,
self._coord(lineno=p.lineno,
column=self.clex.find_tok_column(p)))
else:
self._parse_error('At end of input', '')
#------------------------------------------------------------------------------
if __name__ == "__main__":
import pprint
import time, sys
#t1 = time.time()
#parser = CParser(lex_optimize=True, yacc_debug=True, yacc_optimize=False)
#sys.write(time.time() - t1)
#buf = '''
#int (*k)(int);
#'''
## set debuglevel to 2 for debugging
#t = parser.parse(buf, 'x.c', debuglevel=0)
#t.show(showcoord=True)
| mit |
tseaver/google-cloud-python | error_reporting/google/cloud/errorreporting_v1beta1/gapic/error_stats_service_client.py | 2 | 24986 | # -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.devtools.clouderrorreporting.v1beta1 ErrorStatsService API."""
import functools
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.client_options
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.gapic_v1.routing_header
import google.api_core.grpc_helpers
import google.api_core.page_iterator
import google.api_core.path_template
import grpc
from google.cloud.errorreporting_v1beta1.gapic import enums
from google.cloud.errorreporting_v1beta1.gapic import error_stats_service_client_config
from google.cloud.errorreporting_v1beta1.gapic.transports import (
error_stats_service_grpc_transport,
)
from google.cloud.errorreporting_v1beta1.proto import common_pb2
from google.cloud.errorreporting_v1beta1.proto import error_group_service_pb2
from google.cloud.errorreporting_v1beta1.proto import error_group_service_pb2_grpc
from google.cloud.errorreporting_v1beta1.proto import error_stats_service_pb2
from google.cloud.errorreporting_v1beta1.proto import error_stats_service_pb2_grpc
from google.protobuf import duration_pb2
from google.protobuf import timestamp_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
"google-cloud-error-reporting"
).version
class ErrorStatsServiceClient(object):
"""
An API for retrieving and managing error statistics as well as data for
individual events.
"""
SERVICE_ADDRESS = "clouderrorreporting.googleapis.com:443"
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService"
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ErrorStatsServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def project_path(cls, project):
"""Return a fully-qualified project string."""
return google.api_core.path_template.expand(
"projects/{project}", project=project
)
def __init__(
self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None,
client_options=None,
):
"""Constructor.
Args:
transport (Union[~.ErrorStatsServiceGrpcTransport,
Callable[[~.Credentials, type], ~.ErrorStatsServiceGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
client_options (Union[dict, google.api_core.client_options.ClientOptions]):
Client options used to set user options on the client. API Endpoint
should be set through client_options.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
"The `client_config` argument is deprecated.",
PendingDeprecationWarning,
stacklevel=2,
)
else:
client_config = error_stats_service_client_config.config
if channel:
warnings.warn(
"The `channel` argument is deprecated; use " "`transport` instead.",
PendingDeprecationWarning,
stacklevel=2,
)
api_endpoint = self.SERVICE_ADDRESS
if client_options:
if type(client_options) == dict:
client_options = google.api_core.client_options.from_dict(
client_options
)
if client_options.api_endpoint:
api_endpoint = client_options.api_endpoint
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=error_stats_service_grpc_transport.ErrorStatsServiceGrpcTransport,
address=api_endpoint,
)
else:
if credentials:
raise ValueError(
"Received both a transport instance and "
"credentials; these are mutually exclusive."
)
self.transport = transport
else:
self.transport = error_stats_service_grpc_transport.ErrorStatsServiceGrpcTransport(
address=api_endpoint, channel=channel, credentials=credentials
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config["interfaces"][self._INTERFACE_NAME]
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def list_group_stats(
self,
project_name,
time_range,
group_id=None,
service_filter=None,
timed_count_duration=None,
alignment=None,
alignment_time=None,
order=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists the specified groups.
Example:
>>> from google.cloud import errorreporting_v1beta1
>>>
>>> client = errorreporting_v1beta1.ErrorStatsServiceClient()
>>>
>>> project_name = client.project_path('[PROJECT]')
>>>
>>> # TODO: Initialize `time_range`:
>>> time_range = {}
>>>
>>> # Iterate over all results
>>> for element in client.list_group_stats(project_name, time_range):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_group_stats(project_name, time_range).pages:
... for element in page:
... # process element
... pass
Args:
project_name (str): [Required] The resource name of the Google Cloud Platform project.
Written as projects/ plus the Google Cloud Platform project ID.
Example: projects/my-project-123.
time_range (Union[dict, ~google.cloud.errorreporting_v1beta1.types.QueryTimeRange]): [Optional] List data for the given time range. If not set a default time
range is used. The field time\_range\_begin in the response will specify
the beginning of this time range. Only ErrorGroupStats with a non-zero
count in the given time range are returned, unless the request contains
an explicit group\_id list. If a group\_id list is given, also
ErrorGroupStats with zero occurrences are returned.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.errorreporting_v1beta1.types.QueryTimeRange`
group_id (list[str]): [Optional] List all ErrorGroupStats with these IDs.
service_filter (Union[dict, ~google.cloud.errorreporting_v1beta1.types.ServiceContextFilter]): [Optional] List only ErrorGroupStats which belong to a service context
that matches the filter. Data for all service contexts is returned if
this field is not specified.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.errorreporting_v1beta1.types.ServiceContextFilter`
timed_count_duration (Union[dict, ~google.cloud.errorreporting_v1beta1.types.Duration]): [Optional] The preferred duration for a single returned ``TimedCount``.
If not set, no timed counts are returned.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.errorreporting_v1beta1.types.Duration`
alignment (~google.cloud.errorreporting_v1beta1.types.TimedCountAlignment): [Optional] The alignment of the timed counts to be returned. Default is
``ALIGNMENT_EQUAL_AT_END``.
alignment_time (Union[dict, ~google.cloud.errorreporting_v1beta1.types.Timestamp]): [Optional] Time where the timed counts shall be aligned if rounded
alignment is chosen. Default is 00:00 UTC.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.errorreporting_v1beta1.types.Timestamp`
order (~google.cloud.errorreporting_v1beta1.types.ErrorGroupOrder): [Optional] The sort order in which the results are returned. Default is
``COUNT_DESC``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.page_iterator.PageIterator` instance.
An iterable of :class:`~google.cloud.errorreporting_v1beta1.types.ErrorGroupStats` instances.
You can also iterate over the pages of the response
using its `pages` property.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_group_stats" not in self._inner_api_calls:
self._inner_api_calls[
"list_group_stats"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_group_stats,
default_retry=self._method_configs["ListGroupStats"].retry,
default_timeout=self._method_configs["ListGroupStats"].timeout,
client_info=self._client_info,
)
request = error_stats_service_pb2.ListGroupStatsRequest(
project_name=project_name,
time_range=time_range,
group_id=group_id,
service_filter=service_filter,
timed_count_duration=timed_count_duration,
alignment=alignment,
alignment_time=alignment_time,
order=order,
page_size=page_size,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("project_name", project_name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_group_stats"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="error_group_stats",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def list_events(
self,
project_name,
group_id,
service_filter=None,
time_range=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists the specified events.
Example:
>>> from google.cloud import errorreporting_v1beta1
>>>
>>> client = errorreporting_v1beta1.ErrorStatsServiceClient()
>>>
>>> project_name = client.project_path('[PROJECT]')
>>>
>>> # TODO: Initialize `group_id`:
>>> group_id = ''
>>>
>>> # Iterate over all results
>>> for element in client.list_events(project_name, group_id):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_events(project_name, group_id).pages:
... for element in page:
... # process element
... pass
Args:
project_name (str): [Required] The resource name of the Google Cloud Platform project.
Written as ``projects/`` plus the `Google Cloud Platform project
ID <https://support.google.com/cloud/answer/6158840>`__. Example:
``projects/my-project-123``.
group_id (str): [Required] The group for which events shall be returned.
service_filter (Union[dict, ~google.cloud.errorreporting_v1beta1.types.ServiceContextFilter]): [Optional] List only ErrorGroups which belong to a service context that
matches the filter. Data for all service contexts is returned if this
field is not specified.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.errorreporting_v1beta1.types.ServiceContextFilter`
time_range (Union[dict, ~google.cloud.errorreporting_v1beta1.types.QueryTimeRange]): [Optional] List only data for the given time range. If not set a default
time range is used. The field time\_range\_begin in the response will
specify the beginning of this time range.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.errorreporting_v1beta1.types.QueryTimeRange`
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.page_iterator.PageIterator` instance.
An iterable of :class:`~google.cloud.errorreporting_v1beta1.types.ErrorEvent` instances.
You can also iterate over the pages of the response
using its `pages` property.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_events" not in self._inner_api_calls:
self._inner_api_calls[
"list_events"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_events,
default_retry=self._method_configs["ListEvents"].retry,
default_timeout=self._method_configs["ListEvents"].timeout,
client_info=self._client_info,
)
request = error_stats_service_pb2.ListEventsRequest(
project_name=project_name,
group_id=group_id,
service_filter=service_filter,
time_range=time_range,
page_size=page_size,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("project_name", project_name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_events"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="error_events",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def delete_events(
self,
project_name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes all error events of a given project.
Example:
>>> from google.cloud import errorreporting_v1beta1
>>>
>>> client = errorreporting_v1beta1.ErrorStatsServiceClient()
>>>
>>> project_name = client.project_path('[PROJECT]')
>>>
>>> response = client.delete_events(project_name)
Args:
project_name (str): [Required] The resource name of the Google Cloud Platform project.
Written as ``projects/`` plus the `Google Cloud Platform project
ID <https://support.google.com/cloud/answer/6158840>`__. Example:
``projects/my-project-123``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.errorreporting_v1beta1.types.DeleteEventsResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "delete_events" not in self._inner_api_calls:
self._inner_api_calls[
"delete_events"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_events,
default_retry=self._method_configs["DeleteEvents"].retry,
default_timeout=self._method_configs["DeleteEvents"].timeout,
client_info=self._client_info,
)
request = error_stats_service_pb2.DeleteEventsRequest(project_name=project_name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("project_name", project_name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["delete_events"](
request, retry=retry, timeout=timeout, metadata=metadata
)
| apache-2.0 |
saradbowman/osf.io | admin_tests/preprints/test_views.py | 3 | 27624 | import pytest
import mock
from django.test import RequestFactory
from django.core.urlresolvers import reverse
from django.core.exceptions import PermissionDenied
from django.contrib.auth.models import Permission, Group, AnonymousUser
from django.contrib.messages.storage.fallback import FallbackStorage
from tests.base import AdminTestCase
from osf.models import Preprint, OSFUser, PreprintLog
from osf_tests.factories import (
AuthUserFactory,
PreprintFactory,
PreprintProviderFactory,
PreprintRequestFactory,
NodeFactory,
SubjectFactory,
)
from osf.models.admin_log_entry import AdminLogEntry
from osf.models.spam import SpamStatus
from osf.utils.workflows import DefaultStates, RequestTypes
from admin_tests.utilities import setup_view, setup_log_view
from admin.preprints import views
from admin.preprints.forms import ChangeProviderForm
pytestmark = pytest.mark.django_db
@pytest.fixture()
def preprint():
return PreprintFactory()
@pytest.fixture()
def user():
return AuthUserFactory()
@pytest.fixture()
def req(user):
req = RequestFactory().get('/fake_path')
req.user = user
return req
@pytest.mark.urls('admin.base.urls')
class TestPreprintView:
@pytest.fixture()
def plain_view(self):
return views.PreprintView
@pytest.fixture()
def view(self, req, plain_view):
view = plain_view()
setup_view(view, req)
return view
@pytest.fixture()
def ham_preprint(self):
ham_preprint = PreprintFactory()
ham_preprint.spam_status = SpamStatus.HAM
ham_preprint.save()
return ham_preprint
@pytest.fixture()
def spam_preprint(self):
spam_preprint = PreprintFactory()
spam_preprint.spam_status = SpamStatus.SPAM
spam_preprint.save()
return spam_preprint
@pytest.fixture()
def flagged_preprint(self):
flagged_preprint = PreprintFactory()
flagged_preprint.spam_status = SpamStatus.FLAGGED
flagged_preprint.save()
return flagged_preprint
@pytest.fixture()
def superuser(self):
superuser = AuthUserFactory()
superuser.is_superuser = True
superuser.save()
return superuser
def test_no_guid(self, view):
preprint = view.get_object()
assert preprint is None
def test_get_object(self, req, preprint, plain_view):
view = setup_view(plain_view(), req, guid=preprint._id)
res = view.get_object()
assert isinstance(res, Preprint)
def test_no_user_permissions_raises_error(self, user, preprint, plain_view):
request = RequestFactory().get(reverse('preprints:preprint', kwargs={'guid': preprint._id}))
request.user = user
with pytest.raises(PermissionDenied):
plain_view.as_view()(request, guid=preprint._id)
def test_get_flagged_spam(self, superuser, preprint, ham_preprint, spam_preprint, flagged_preprint):
request = RequestFactory().get(reverse('preprints:flagged-spam'))
request.user = superuser
response = views.PreprintFlaggedSpamList.as_view()(request)
assert response.status_code == 200
response_ids = [res['id'] for res in response.context_data['preprints']]
assert preprint._id not in response.context_data['preprints'][0]['id']
assert len(response.context_data['preprints']) == 1
assert flagged_preprint._id in response_ids
assert ham_preprint._id not in response_ids
assert spam_preprint._id not in response_ids
assert preprint._id not in response_ids
def test_get_known_spam(self, superuser, preprint, ham_preprint, spam_preprint, flagged_preprint):
request = RequestFactory().get(reverse('preprints:known-spam'))
request.user = superuser
response = views.PreprintKnownSpamList.as_view()(request)
assert response.status_code == 200
response_ids = [res['id'] for res in response.context_data['preprints']]
assert preprint._id not in response.context_data['preprints'][0]['id']
assert len(response.context_data['preprints']) == 1
assert flagged_preprint._id not in response_ids
assert ham_preprint._id not in response_ids
assert spam_preprint._id in response_ids
assert preprint._id not in response_ids
def test_get_known_ham(self, superuser, preprint, ham_preprint, spam_preprint, flagged_preprint):
request = RequestFactory().get(reverse('preprints:known-ham'))
request.user = superuser
response = views.PreprintKnownHamList.as_view()(request)
assert response.status_code == 200
response_ids = [res['id'] for res in response.context_data['preprints']]
assert preprint._id not in response.context_data['preprints'][0]['id']
assert len(response.context_data['preprints']) == 1
assert flagged_preprint._id not in response_ids
assert ham_preprint._id in response_ids
assert spam_preprint._id not in response_ids
assert preprint._id not in response_ids
def test_confirm_spam(self, flagged_preprint, superuser):
request = RequestFactory().post('/fake_path')
request.user = superuser
view = views.PreprintConfirmSpamView()
view = setup_view(view, request, guid=flagged_preprint._id)
view.delete(request)
assert flagged_preprint.is_public
flagged_preprint.refresh_from_db()
flagged_preprint.refresh_from_db()
assert flagged_preprint.is_spam
assert flagged_preprint.is_spam
assert not flagged_preprint.is_public
def test_confirm_ham(self, preprint, superuser):
request = RequestFactory().post('/fake_path')
request.user = superuser
view = views.PreprintConfirmHamView()
view = setup_view(view, request, guid=preprint._id)
view.delete(request)
preprint.refresh_from_db()
preprint.refresh_from_db()
assert preprint.spam_status == SpamStatus.HAM
assert preprint.spam_status == SpamStatus.HAM
assert preprint.is_public
def test_correct_view_permissions(self, user, preprint, plain_view):
view_permission = Permission.objects.get(codename='view_preprint')
user.user_permissions.add(view_permission)
user.save()
request = RequestFactory().get(reverse('preprints:preprint', kwargs={'guid': preprint._id}))
request.user = user
response = plain_view.as_view()(request, guid=preprint._id)
assert response.status_code == 200
def test_change_preprint_provider_no_permission(self, user, preprint, plain_view):
request = RequestFactory().post(reverse('preprints:preprint', kwargs={'guid': preprint._id}))
request.user = user
with pytest.raises(PermissionDenied):
plain_view.as_view()(request, guid=preprint._id)
def test_change_preprint_provider_correct_permission(self, user, preprint, plain_view):
change_permission = Permission.objects.get(codename='change_preprint')
view_permission = Permission.objects.get(codename='view_preprint')
user.user_permissions.add(change_permission)
user.user_permissions.add(view_permission)
user.save()
request = RequestFactory().post(reverse('preprints:preprint', kwargs={'guid': preprint._id}))
request.user = user
response = plain_view.as_view()(request, guid=preprint._id)
assert response.status_code == 302
def test_change_preprint_provider_form(self, plain_view, preprint):
new_provider = PreprintProviderFactory()
plain_view.kwargs = {'guid': preprint._id}
form_data = {
'provider': new_provider.id
}
form = ChangeProviderForm(data=form_data, instance=preprint)
plain_view().form_valid(form)
assert preprint.provider == new_provider
@pytest.fixture
def provider_one(self):
return PreprintProviderFactory()
@pytest.fixture
def provider_two(self):
return PreprintProviderFactory()
@pytest.fixture
def provider_osf(self):
return PreprintProviderFactory(_id='osf')
@pytest.fixture
def preprint_user(self, user):
change_permission = Permission.objects.get(codename='change_preprint')
view_permission = Permission.objects.get(codename='view_preprint')
user.user_permissions.add(change_permission)
user.user_permissions.add(view_permission)
return user
@pytest.fixture
def subject_osf(self, provider_osf):
return SubjectFactory(provider=provider_osf)
@pytest.fixture
def subject_one(self, provider_one):
return SubjectFactory(provider=provider_one)
def test_change_preprint_provider_subjects_custom_taxonomies(self, plain_view, preprint_user, provider_one, provider_two, subject_one):
""" Testing that subjects are changed when providers are changed between two custom taxonomies.
"""
subject_two = SubjectFactory(provider=provider_two,
bepress_subject=subject_one.bepress_subject)
preprint = PreprintFactory(subjects=[[subject_one._id]], provider=provider_one, creator=preprint_user)
request = RequestFactory().post(reverse('preprints:preprint', kwargs={'guid': preprint._id}), data={'provider': provider_two.id})
request.user = preprint_user
response = plain_view.as_view()(request, guid=preprint._id)
assert response.status_code == 302
preprint.refresh_from_db()
assert preprint.provider == provider_two
assert subject_two in preprint.subjects.all()
def test_change_preprint_provider_subjects_from_osf(self, plain_view, preprint_user, provider_one, provider_osf, subject_osf):
""" Testing that subjects are changed when a provider is changed from osf using the bepress subject id of the new subject.
"""
subject_two = SubjectFactory(provider=provider_one,
bepress_subject=subject_osf)
preprint = PreprintFactory(subjects=[[subject_osf._id]], provider=provider_osf, creator=preprint_user)
request = RequestFactory().post(reverse('preprints:preprint', kwargs={'guid': preprint._id}), data={'provider': provider_one.id})
request.user = preprint_user
response = plain_view.as_view()(request, guid=preprint._id)
assert response.status_code == 302
preprint.refresh_from_db()
assert preprint.provider == provider_one
assert subject_two in preprint.subjects.all()
def test_change_preprint_provider_subjects_to_osf(self, plain_view, preprint_user, provider_one, provider_osf, subject_osf):
""" Testing that subjects are changed when providers are changed to osf using the bepress subject id of the old subject
"""
subject_one = SubjectFactory(provider=provider_one,
bepress_subject=subject_osf)
preprint = PreprintFactory(subjects=[[subject_one._id]], provider=provider_one, creator=preprint_user)
request = RequestFactory().post(reverse('preprints:preprint', kwargs={'guid': preprint._id}), data={'provider': provider_osf.id})
request.user = preprint_user
response = plain_view.as_view()(request, guid=preprint._id)
assert response.status_code == 302
preprint.refresh_from_db()
assert preprint.provider == provider_osf
assert subject_osf in preprint.subjects.all()
def test_change_preprint_provider_subjects_problem_subject(self, plain_view, preprint_user, provider_one, provider_osf, subject_osf):
""" Testing that subjects are changed when providers are changed and theres no related mapping between subjects, the old subject stays in place.
"""
preprint = PreprintFactory(subjects=[[subject_osf._id]], provider=provider_osf, creator=preprint_user)
request = RequestFactory().post(reverse('preprints:preprint', kwargs={'guid': preprint._id}), data={'provider': provider_one.id})
request.user = preprint_user
# django.contrib.messages has a bug which effects unittests
# more info here -> https://code.djangoproject.com/ticket/17971
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = plain_view.as_view()(request, guid=preprint._id)
assert response.status_code == 302
preprint.refresh_from_db()
assert preprint.provider == provider_one
assert subject_osf in preprint.subjects.all()
def test_change_preprint_provider_subjects_change_permissions(self, plain_view, preprint_user, provider_one, provider_osf, subject_osf):
""" Testing that subjects are changed when providers are changed and theres no related mapping between subjects, the old subject stays in place.
"""
auth_user = AuthUserFactory()
change_permission = Permission.objects.get(codename='change_preprint')
view_permission = Permission.objects.get(codename='view_preprint')
auth_user.user_permissions.add(change_permission)
auth_user.user_permissions.add(view_permission)
preprint = PreprintFactory(subjects=[[subject_osf._id]], provider=provider_osf, creator=preprint_user)
request = RequestFactory().post(reverse('preprints:preprint', kwargs={'guid': preprint._id}), data={'provider': provider_one.id})
request.user = auth_user
# django.contrib.messages has a bug which effects unittests
# more info here -> https://code.djangoproject.com/ticket/17971
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = plain_view.as_view()(request, guid=preprint._id)
assert response.status_code == 302
preprint.refresh_from_db()
assert preprint.provider == provider_one
assert subject_osf in preprint.subjects.all()
@pytest.mark.urls('admin.base.urls')
class TestPreprintFormView:
@pytest.fixture()
def view(self):
return views.PreprintFormView
@pytest.fixture()
def url(self):
return reverse('preprints:search')
def test_no_user_permissions_raises_error(self, url, user, view):
request = RequestFactory().get(url)
request.user = user
with pytest.raises(PermissionDenied):
view.as_view()(request)
def test_correct_view_permissions(self, url, user, view):
view_permission = Permission.objects.get(codename='view_preprint')
user.user_permissions.add(view_permission)
user.save()
request = RequestFactory().get(url)
request.user = user
response = view.as_view()(request)
assert response.status_code == 200
@pytest.mark.urls('admin.base.urls')
@pytest.mark.enable_search
@pytest.mark.enable_enqueue_task
@pytest.mark.enable_implicit_clean
class TestPreprintReindex:
@mock.patch('website.preprints.tasks.send_share_preprint_data')
@mock.patch('website.settings.SHARE_URL', 'ima_real_website')
@mock.patch('website.project.tasks.settings.SHARE_API_TOKEN', 'totaly_real_token')
def test_reindex_preprint_share(self, mock_reindex_preprint, preprint, req):
preprint.provider.access_token = 'totally real access token I bought from a guy wearing a trenchcoat in the summer'
preprint.provider.save()
count = AdminLogEntry.objects.count()
view = views.PreprintReindexShare()
view = setup_log_view(view, req, guid=preprint._id)
view.delete(req)
assert mock_reindex_preprint.called
assert AdminLogEntry.objects.count() == count + 1
@mock.patch('website.search.search.update_preprint')
def test_reindex_preprint_elastic(self, mock_update_search, preprint, req):
count = AdminLogEntry.objects.count()
view = views.PreprintReindexElastic()
view = setup_log_view(view, req, guid=preprint._id)
view.delete(req)
assert mock_update_search.called
assert AdminLogEntry.objects.count() == count + 1
class TestPreprintDeleteView(AdminTestCase):
def setUp(self):
super(TestPreprintDeleteView, self).setUp()
self.user = AuthUserFactory()
self.preprint = PreprintFactory(creator=self.user)
self.request = RequestFactory().post('/fake_path')
self.plain_view = views.PreprintDeleteView
self.view = setup_log_view(self.plain_view(), self.request,
guid=self.preprint._id)
self.url = reverse('preprints:remove', kwargs={'guid': self.preprint._id})
def test_get_object(self):
obj = self.view.get_object()
assert isinstance(obj, Preprint)
def test_get_context(self):
res = self.view.get_context_data(object=self.preprint)
assert 'guid' in res
assert res.get('guid') == self.preprint._id
def test_remove_preprint(self):
count = AdminLogEntry.objects.count()
self.view.delete(self.request)
self.preprint.refresh_from_db()
assert self.preprint.deleted is not None
assert AdminLogEntry.objects.count() == count + 1
def test_restore_preprint(self):
self.view.delete(self.request)
self.preprint.refresh_from_db()
assert self.preprint.deleted is not None
count = AdminLogEntry.objects.count()
self.view.delete(self.request)
self.preprint.reload()
assert self.preprint.deleted is None
assert AdminLogEntry.objects.count() == count + 1
def test_no_user_permissions_raises_error(self):
guid = self.preprint._id
request = RequestFactory().get(self.url)
request.user = self.user
with pytest.raises(PermissionDenied):
self.plain_view.as_view()(request, guid=guid, user_id=self.user)
def test_correct_view_permissions(self):
user = AuthUserFactory()
guid = self.preprint._id
change_permission = Permission.objects.get(codename='delete_preprint')
view_permission = Permission.objects.get(codename='view_preprint')
user.user_permissions.add(change_permission)
user.user_permissions.add(view_permission)
user.save()
request = RequestFactory().get(self.url)
request.user = user
response = self.plain_view.as_view()(request, guid=guid)
assert response.status_code == 200
class TestRemoveContributor(AdminTestCase):
def setUp(self):
super(TestRemoveContributor, self).setUp()
self.user = AuthUserFactory()
self.preprint = PreprintFactory(creator=self.user)
self.user_2 = AuthUserFactory()
self.preprint.add_contributor(self.user_2)
self.preprint.save()
self.view = views.PreprintRemoveContributorView
self.request = RequestFactory().post('/fake_path')
self.url = reverse('preprints:remove_user', kwargs={'guid': self.preprint._id, 'user_id': self.user._id})
def test_get_object(self):
view = setup_log_view(self.view(), self.request, guid=self.preprint._id,
user_id=self.user._id)
preprint, user = view.get_object()
assert isinstance(preprint, Preprint)
assert isinstance(user, OSFUser)
@mock.patch('admin.preprints.views.Preprint.remove_contributor')
def test_remove_contributor(self, mock_remove_contributor):
user_id = self.user_2._id
preprint_id = self.preprint._id
view = setup_log_view(self.view(), self.request, guid=preprint_id,
user_id=user_id)
view.delete(self.request)
mock_remove_contributor.assert_called_with(self.user_2, None, log=False)
def test_integration_remove_contributor(self):
assert self.user_2 in self.preprint.contributors
view = setup_log_view(self.view(), self.request, guid=self.preprint._id,
user_id=self.user_2._id)
count = AdminLogEntry.objects.count()
view.delete(self.request)
assert self.user_2 not in self.preprint.contributors
assert AdminLogEntry.objects.count() == count + 1
def test_do_not_remove_last_admin(self):
assert len(list(self.preprint.get_admin_contributors(self.preprint.contributors))) == 1
view = setup_log_view(self.view(), self.request, guid=self.preprint._id,
user_id=self.user._id)
count = AdminLogEntry.objects.count()
view.delete(self.request)
self.preprint.reload() # Reloads instance to show that nothing was removed
assert len(list(self.preprint.contributors)) == 2
assert len(list(self.preprint.get_admin_contributors(self.preprint.contributors))) == 1
assert AdminLogEntry.objects.count() == count
def test_no_log(self):
view = setup_log_view(self.view(), self.request, guid=self.preprint._id,
user_id=self.user_2._id)
view.delete(self.request)
assert self.preprint.logs.latest().action != PreprintLog.CONTRIB_REMOVED
def test_no_user_permissions_raises_error(self):
request = RequestFactory().get(self.url)
request.user = self.user
with pytest.raises(PermissionDenied):
self.view.as_view()(request, guid=self.preprint._id, user_id=self.user)
def test_correct_view_permissions(self):
change_permission = Permission.objects.get(codename='change_preprint')
view_permission = Permission.objects.get(codename='view_preprint')
self.user.user_permissions.add(change_permission)
self.user.user_permissions.add(view_permission)
self.user.save()
request = RequestFactory().get(self.url)
request.user = self.user
response = self.view.as_view()(request, guid=self.preprint._id, user_id=self.user._id)
assert response.status_code == 200
class TestPreprintConfirmHamSpamViews(AdminTestCase):
def setUp(self):
super(TestPreprintConfirmHamSpamViews, self).setUp()
self.request = RequestFactory().post('/fake_path')
self.user = AuthUserFactory()
self.preprint = PreprintFactory(creator=self.user)
def test_confirm_preprint_as_ham(self):
view = views.PreprintConfirmHamView()
view = setup_log_view(view, self.request, guid=self.preprint._id)
view.delete(self.request)
self.preprint.refresh_from_db()
assert self.preprint.spam_status == 4
def test_confirm_preprint_as_spam(self):
assert self.preprint.is_public
view = views.PreprintConfirmSpamView()
view = setup_log_view(view, self.request, guid=self.preprint._id)
view.delete(self.request)
self.preprint.refresh_from_db()
assert self.preprint.spam_status == 2
assert not self.preprint.is_public
@pytest.mark.urls('admin.base.urls')
class TestPreprintWithdrawalRequests:
@pytest.fixture()
def submitter(self):
return AuthUserFactory()
@pytest.fixture()
def admin(self):
admin = AuthUserFactory()
osf_admin = Group.objects.get(name='osf_admin')
admin.groups.add(osf_admin)
return admin
@pytest.fixture()
def project(self, submitter):
return NodeFactory(creator=submitter)
@pytest.fixture()
def preprint(self, project):
return PreprintFactory(project=project)
@pytest.fixture()
def withdrawal_request(self, preprint, submitter):
withdrawal_request = PreprintRequestFactory(
creator=submitter,
target=preprint,
request_type=RequestTypes.WITHDRAWAL.value,
machine_state=DefaultStates.INITIAL.value,
)
withdrawal_request.run_submit(submitter)
return withdrawal_request
def test_can_approve_withdrawal_request(self, withdrawal_request, submitter, preprint, admin):
assert withdrawal_request.machine_state == DefaultStates.PENDING.value
original_comment = withdrawal_request.comment
request = RequestFactory().post(reverse('preprints:approve-withdrawal', kwargs={'guid': preprint._id}))
request.user = admin
response = views.PreprintApproveWithdrawalRequest.as_view()(request, guid=preprint._id)
assert response.status_code == 302
withdrawal_request.refresh_from_db()
withdrawal_request.target.refresh_from_db()
assert withdrawal_request.machine_state == DefaultStates.ACCEPTED.value
assert original_comment == withdrawal_request.target.withdrawal_justification
def test_can_reject_withdrawal_request(self, withdrawal_request, admin, preprint):
assert withdrawal_request.machine_state == DefaultStates.PENDING.value
request = RequestFactory().post(reverse('preprints:reject-withdrawal', kwargs={'guid': preprint._id}))
request.user = admin
response = views.PreprintRejectWithdrawalRequest.as_view()(request, guid=preprint._id)
assert response.status_code == 302
withdrawal_request.refresh_from_db()
withdrawal_request.target.refresh_from_db()
assert withdrawal_request.machine_state == DefaultStates.REJECTED.value
assert not withdrawal_request.target.withdrawal_justification
def test_permissions_errors(self, user, submitter):
# with auth, no permissions
request = RequestFactory().get(reverse('preprints:withdrawal-requests'))
request.user = user
with pytest.raises(PermissionDenied):
views.PreprintWithdrawalRequestList.as_view()(request)
# request submitter
request = RequestFactory().get(reverse('preprints:withdrawal-requests'))
request.user = submitter
with pytest.raises(PermissionDenied):
views.PreprintWithdrawalRequestList.as_view()(request)
# no auth
request = RequestFactory().get(reverse('preprints:withdrawal-requests'))
request.user = AnonymousUser()
with pytest.raises(PermissionDenied):
views.PreprintWithdrawalRequestList.as_view()(request)
def test_osf_admin_has_correct_view_permissions(self, withdrawal_request, admin):
request = RequestFactory().get(reverse('preprints:withdrawal-requests'))
request.user = admin
response = views.PreprintWithdrawalRequestList.as_view()(request)
assert response.status_code == 200
@pytest.mark.parametrize('intent, final_state', [
('approveRequest', DefaultStates.ACCEPTED.value),
('rejectRequest', DefaultStates.REJECTED.value)])
def test_approve_reject_on_list_view(self, withdrawal_request, admin, intent, final_state):
assert withdrawal_request.machine_state == DefaultStates.PENDING.value
original_comment = withdrawal_request.comment
request = RequestFactory().post(reverse('preprints:withdrawal-requests'), {intent: 'foo', '{}'.format(withdrawal_request._id): 'bar'})
request.user = admin
response = views.PreprintWithdrawalRequestList.as_view()(request)
assert response.status_code == 302
withdrawal_request.refresh_from_db()
withdrawal_request.target.refresh_from_db()
withdrawal_request.machine_state == final_state
if intent == 'approveRequest':
assert original_comment == withdrawal_request.target.withdrawal_justification
else:
assert not withdrawal_request.target.withdrawal_justification
| apache-2.0 |
marcore/edx-platform | pavelib/paver_tests/test_paver_quality.py | 11 | 15407 | """
Tests for paver quality tasks
"""
import os
from path import Path as path
import tempfile
import textwrap
import unittest
from mock import patch, MagicMock, mock_open
from ddt import ddt, file_data
import pavelib.quality
import paver.easy
import paver.tasks
from paver.easy import BuildFailure
@ddt
class TestPaverQualityViolations(unittest.TestCase):
"""
For testing the paver violations-counting tasks
"""
def setUp(self):
super(TestPaverQualityViolations, self).setUp()
self.f = tempfile.NamedTemporaryFile(delete=False)
self.f.close()
self.addCleanup(os.remove, self.f.name)
def test_pylint_parser_other_string(self):
with open(self.f.name, 'w') as f:
f.write("hello")
num = pavelib.quality._count_pylint_violations(f.name) # pylint: disable=protected-access
self.assertEqual(num, 0)
def test_pylint_parser_pep8(self):
# Pep8 violations should be ignored.
with open(self.f.name, 'w') as f:
f.write("foo/hello/test.py:304:15: E203 whitespace before ':'")
num = pavelib.quality._count_pylint_violations(f.name) # pylint: disable=protected-access
self.assertEqual(num, 0)
@file_data('pylint_test_list.json')
def test_pylint_parser_count_violations(self, value):
"""
Tests:
- Different types of violations
- One violation covering multiple lines
"""
with open(self.f.name, 'w') as f:
f.write(value)
num = pavelib.quality._count_pylint_violations(f.name) # pylint: disable=protected-access
self.assertEqual(num, 1)
def test_pep8_parser(self):
with open(self.f.name, 'w') as f:
f.write("hello\nhithere")
num, _violations = pavelib.quality._pep8_violations(f.name) # pylint: disable=protected-access
self.assertEqual(num, 2)
class TestPaverReportViolationsCounts(unittest.TestCase):
"""
For testing utility functions for getting counts from reports for
run_jshint, run_complexity, run_safelint, and run_safecommit_report.
"""
def setUp(self):
super(TestPaverReportViolationsCounts, self).setUp()
# Mock the paver @needs decorator
self._mock_paver_needs = patch.object(pavelib.quality.run_quality, 'needs').start()
self._mock_paver_needs.return_value = 0
# Temporary file infrastructure
self.f = tempfile.NamedTemporaryFile(delete=False)
self.f.close()
# Cleanup various mocks and tempfiles
self.addCleanup(self._mock_paver_needs.stop)
self.addCleanup(os.remove, self.f.name)
def test_get_jshint_violations_count(self):
with open(self.f.name, 'w') as f:
f.write("3000 violations found")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "jshint") # pylint: disable=protected-access
self.assertEqual(actual_count, 3000)
def test_get_violations_no_number_found(self):
with open(self.f.name, 'w') as f:
f.write("Not expected string regex")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "jshint") # pylint: disable=protected-access
self.assertEqual(actual_count, None)
def test_get_violations_count_truncated_report(self):
"""
A truncated report (i.e. last line is just a violation)
"""
with open(self.f.name, 'w') as f:
f.write("foo/bar/js/fizzbuzz.js: line 45, col 59, Missing semicolon.")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "jshint") # pylint: disable=protected-access
self.assertEqual(actual_count, None)
def test_complexity_value(self):
with open(self.f.name, 'w') as f:
f.write("Average complexity: A (1.93953443446)")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "python_complexity") # pylint: disable=protected-access
self.assertEqual(actual_count, 1.93953443446)
def test_truncated_complexity_report(self):
with open(self.f.name, 'w') as f:
f.write("M 110:4 FooBar.default - A")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "python_complexity") # pylint: disable=protected-access
self.assertEqual(actual_count, None)
def test_no_complexity_report(self):
with self.assertRaises(BuildFailure):
pavelib.quality._get_count_from_last_line("non-existent-file", "python_complexity") # pylint: disable=protected-access
def test_generic_value(self):
"""
Default behavior is to look for an integer appearing at head of line
"""
with open(self.f.name, 'w') as f:
f.write("5.777 good to see you")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "foo") # pylint: disable=protected-access
self.assertEqual(actual_count, 5)
def test_generic_value_none_found(self):
"""
Default behavior is to look for an integer appearing at head of line
"""
with open(self.f.name, 'w') as f:
f.write("hello 5.777 good to see you")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "foo") # pylint: disable=protected-access
self.assertEqual(actual_count, None)
def test_get_safelint_counts_happy(self):
"""
Test happy path getting violation counts from safelint report.
"""
report = textwrap.dedent("""
test.html: 30:53: javascript-jquery-append: $('#test').append(print_tos);
javascript-concat-html: 310 violations
javascript-escape: 7 violations
2608 violations total
""")
with open(self.f.name, 'w') as f:
f.write(report)
counts = pavelib.quality._get_safelint_counts(self.f.name) # pylint: disable=protected-access
self.assertDictEqual(counts, {
'rules': {
'javascript-concat-html': 310,
'javascript-escape': 7,
},
'total': 2608,
})
def test_get_safelint_counts_bad_counts(self):
"""
Test getting violation counts from truncated and malformed safelint
report.
"""
report = textwrap.dedent("""
javascript-concat-html: violations
""")
with open(self.f.name, 'w') as f:
f.write(report)
counts = pavelib.quality._get_safelint_counts(self.f.name) # pylint: disable=protected-access
self.assertDictEqual(counts, {
'rules': {},
'total': None,
})
def test_get_safecommit_count_happy(self):
"""
Test happy path getting violation count from safecommit report.
"""
report = textwrap.dedent("""
Linting lms/templates/navigation.html:
2 violations total
Linting scripts/tests/templates/test.underscore:
3 violations total
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_safecommit_count(self.f.name) # pylint: disable=protected-access
self.assertEqual(count, 5)
def test_get_safecommit_count_bad_counts(self):
"""
Test getting violation count from truncated safecommit report.
"""
report = textwrap.dedent("""
Linting lms/templates/navigation.html:
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_safecommit_count(self.f.name) # pylint: disable=protected-access
self.assertIsNone(count)
def test_get_safecommit_count_no_files(self):
"""
Test getting violation count from safecommit report where no files were
linted.
"""
report = textwrap.dedent("""
No files linted.
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_safecommit_count(self.f.name) # pylint: disable=protected-access
self.assertEqual(count, 0)
class TestPrepareReportDir(unittest.TestCase):
"""
Tests the report directory preparation
"""
def setUp(self):
super(TestPrepareReportDir, self).setUp()
self.test_dir = tempfile.mkdtemp()
self.test_file = tempfile.NamedTemporaryFile(delete=False, dir=self.test_dir)
self.addCleanup(os.removedirs, self.test_dir)
def test_report_dir_with_files(self):
self.assertTrue(os.path.exists(self.test_file.name))
pavelib.quality._prepare_report_dir(path(self.test_dir)) # pylint: disable=protected-access
self.assertFalse(os.path.exists(self.test_file.name))
def test_report_dir_without_files(self):
os.remove(self.test_file.name)
pavelib.quality._prepare_report_dir(path(self.test_dir)) # pylint: disable=protected-access
self.assertEqual(os.listdir(path(self.test_dir)), [])
class TestPaverRunQuality(unittest.TestCase):
"""
For testing the paver run_quality task
"""
def setUp(self):
super(TestPaverRunQuality, self).setUp()
# test_no_diff_quality_failures seems to alter the way that paver
# executes these lines is subsequent tests.
# https://github.com/paver/paver/blob/master/paver/tasks.py#L175-L180
#
# The other tests don't appear to have the same impact. This was
# causing a test order dependency. This line resets that state
# of environment._task_in_progress so that the paver commands in the
# tests will be considered top level tasks by paver, and we can predict
# which path it will chose in the above code block.
#
# TODO: Figure out why one test is altering the state to begin with.
paver.tasks.environment = paver.tasks.Environment()
# mock the @needs decorator to skip it
self._mock_paver_needs = patch.object(pavelib.quality.run_quality, 'needs').start()
self._mock_paver_needs.return_value = 0
patcher = patch('pavelib.quality.sh')
self._mock_paver_sh = patcher.start()
self.addCleanup(patcher.stop)
self.addCleanup(self._mock_paver_needs.stop)
@patch('__builtin__.open', mock_open())
def test_failure_on_diffquality_pep8(self):
"""
If pep8 finds errors, pylint and jshint should still be run
"""
# Mock _get_pep8_violations to return a violation
_mock_pep8_violations = MagicMock(
return_value=(1, ['lms/envs/common.py:32:2: E225 missing whitespace around operator'])
)
with patch('pavelib.quality._get_pep8_violations', _mock_pep8_violations):
with self.assertRaises(SystemExit):
pavelib.quality.run_quality("")
# Test that pep8, pylint, and jshint were called by counting the calls to
# _get_pep8_violations (for pep8) and sh (for diff-quality pylint & jshint)
self.assertEqual(_mock_pep8_violations.call_count, 1)
self.assertEqual(self._mock_paver_sh.call_count, 2)
@patch('__builtin__.open', mock_open())
def test_failure_on_diffquality_pylint(self):
"""
If diff-quality fails on pylint, the paver task should also fail
"""
# Underlying sh call must fail when it is running the pylint diff-quality task
self._mock_paver_sh.side_effect = CustomShMock().fail_on_pylint
_mock_pep8_violations = MagicMock(return_value=(0, []))
with patch('pavelib.quality._get_pep8_violations', _mock_pep8_violations):
with self.assertRaises(SystemExit):
pavelib.quality.run_quality("")
# Test that both pep8 and pylint were called by counting the calls
# Assert that _get_pep8_violations (which calls "pep8") is called once
self.assertEqual(_mock_pep8_violations.call_count, 1)
# And assert that sh was called twice (for the calls to pylint & jshint). This means that even in
# the event of a diff-quality pylint failure, jshint is still called.
self.assertEqual(self._mock_paver_sh.call_count, 2)
@patch('__builtin__.open', mock_open())
def test_failure_on_diffquality_jshint(self):
"""
If diff-quality fails on jshint, the paver task should also fail
"""
# Underlying sh call must fail when it is running the jshint diff-quality task
self._mock_paver_sh.side_effect = CustomShMock().fail_on_jshint
_mock_pep8_violations = MagicMock(return_value=(0, []))
with patch('pavelib.quality._get_pep8_violations', _mock_pep8_violations):
with self.assertRaises(SystemExit):
pavelib.quality.run_quality("")
self.assertRaises(BuildFailure)
# Test that both pep8 and pylint were called by counting the calls
# Assert that _get_pep8_violations (which calls "pep8") is called once
self.assertEqual(_mock_pep8_violations.call_count, 1)
# And assert that sh was called twice (for the calls to pep8 and pylint)
self.assertEqual(self._mock_paver_sh.call_count, 2)
@patch('__builtin__.open', mock_open())
def test_other_exception(self):
"""
If diff-quality fails for an unknown reason on the first run (pep8), then
pylint should not be run
"""
self._mock_paver_sh.side_effect = [Exception('unrecognized failure!'), 0]
with self.assertRaises(SystemExit):
pavelib.quality.run_quality("")
self.assertRaises(Exception)
# Test that pylint is NOT called by counting calls
self.assertEqual(self._mock_paver_sh.call_count, 1)
@patch('__builtin__.open', mock_open())
def test_no_diff_quality_failures(self):
# Assert nothing is raised
_mock_pep8_violations = MagicMock(return_value=(0, []))
with patch('pavelib.quality._get_pep8_violations', _mock_pep8_violations):
pavelib.quality.run_quality("")
# Assert that _get_pep8_violations (which calls "pep8") is called once
self.assertEqual(_mock_pep8_violations.call_count, 1)
# And assert that sh was called twice (for the call to "pylint" & "jshint")
self.assertEqual(self._mock_paver_sh.call_count, 2)
class CustomShMock(object):
"""
Diff-quality makes a number of sh calls. None of those calls should be made during tests; however, some
of them need to have certain responses.
"""
def fail_on_pylint(self, arg):
"""
For our tests, we need the call for diff-quality running pep8 reports to fail, since that is what
is going to fail when we pass in a percentage ("p") requirement.
"""
if "pylint" in arg:
# Essentially mock diff-quality exiting with 1
paver.easy.sh("exit 1")
else:
return
def fail_on_jshint(self, arg):
"""
For our tests, we need the call for diff-quality running pep8 reports to fail, since that is what
is going to fail when we pass in a percentage ("p") requirement.
"""
if "jshint" in arg:
# Essentially mock diff-quality exiting with 1
paver.easy.sh("exit 1")
else:
return
| agpl-3.0 |
sandvine/sandvine-nfv-descriptors | attic/rift/tse_pktgen_nsd/scripts/pts_scale.py | 2 | 5967 | #!/usr/bin/env python
import argparse
import logging
import os
import subprocess
import sys
import time
from pysvapi.elementdriver.sshdriver import sshdriver
from pysvapi.svapiclient import client
import yaml
import re
vnfd_names={"tse": "TSE",
"pts": "PTS",
"spb": "SPB"}
class TSEConfigTool():
def __init__(self,_logger):
self.logger=_logger;
def get_vnfr(self,yaml_cfg,name):
for item in yaml_cfg:
if name in item['name']:
return item
return None
def get_vnf_record_from_initial(self,vnfd_name,yaml_cfg,record):
self.logger.debug("get {} from initial".format(vnfd_name))
for index, vnfr in yaml_cfg['vnfr'].items():
self.logger.debug("VNFR {}: {}".format(index, vnfr))
if re.search(vnfd_name,vnfr['name']):
print("record: {}".format(vnfr))
mgmt=vnfr[record]
self.logger.debug("{} {} {}".format(vnfd_name,record,mgmt))
return mgmt
self.logger.info("ERROR: cannot find {}".format(vnfd_name))
sys.exit(1)
def get_pts_mgmt(self,yaml_cfg):
if 'vnfrs_in_group' in yaml_cfg:
self.logger.debug("get_pts: from scale")
mgmt = yaml_cfg['vnfrs_in_group'][0]['rw_mgmt_ip']
self.logger.debug("pts mgmt {}".format(mgmt))
return mgmt
else:
return self.get_vnf_record_from_initial(vnfd_names['pts'],yaml_cfg,'mgmt_ip_address')
def get_pts_name(self,yaml_cfg):
if 'vnfrs_in_group' in yaml_cfg:
self.logger.debug("get_pts: from scale")
name = yaml_cfg['vnfrs_in_group'][0]['name']
self.logger.debug("pts name {}".format(name))
return name
else:
return self.get_vnf_record_from_initial(vnfd_names['pts'],yaml_cfg,'name')
def get_vnf_mgmt(self,yaml_cfg,vnf_name):
if 'vnfrs_others' in yaml_cfg:
vnfr = self.get_vnfr(yaml_cfg['vnfrs_others'],vnfd_names[vnf_name])
if vnfr is None:
return None
mgmt = vnfr['rw_mgmt_ip']
self.logger.debug("vnf name {} mgmt {}".format(vnfr['name'],mgmt))
return mgmt
else:
return self.get_vnf_record_from_initial(vnfd_names[vnf_name],yaml_cfg,'mgmt_ip_address')
def configure(self,yaml_cfg):
pts_mgmt=self.get_pts_mgmt(yaml_cfg)
tse_mgmt=self.get_vnf_mgmt(yaml_cfg,'tse')
spb_mgmt=self.get_vnf_mgmt(yaml_cfg,'spb')
if pts_mgmt is None:
self.logger.info("pts mgmt None")
sys.exit(1)
if tse_mgmt is None:
self.logger.info("tse mgmt None")
sys.exit(1)
pts_sess=sshdriver.ElementDriverSSH(pts_mgmt,private_key_file=os.path.join(os.environ['RIFT_INSTALL'], "usr/bin/pts_vnfd-key"))
tse_sess=sshdriver.ElementDriverSSH(tse_mgmt,private_key_file=os.path.join(os.environ['RIFT_INSTALL'], "usr/bin/tse_vnfd-key"))
self.logger.info("connecting to pts {}".format(self.get_pts_mgmt(yaml_cfg)))
if not pts_sess.wait_for_api_ready():
self.logger.info("PTS API did not become ready")
sys.exit(1)
self.logger.info("pts api is ready")
if not tse_sess.wait_for_api_ready():
logger.info("TSE API did not become ready")
sys.exit(1)
self.logger.info("tse api is ready")
pts_cli = client.Client(pts_sess)
tse_cli = client.Client(tse_sess)
# get the pts mac address
mac=pts_cli.get_interface_mac('1-3')
cli_pts_name = self.get_pts_name(yaml_cfg).replace(' ','_')
self.logger.debug("retrieved pts {} interface 1-3 mac {}".format(cli_pts_name,mac))
#need a name without spaces
tse_sess.add_cmd('add config traffic-steering service-locator mac-nsh-locator ' + cli_pts_name + ' mac ' + mac )
tse_sess.add_cmd('add config traffic-steering service-function ' + cli_pts_name + ' transport mac-nsh locator ' + cli_pts_name )
tse_sess.add_cmd('add config traffic-steering service-group-member ' + cli_pts_name + ' service-group pts-group' )
if spb_mgmt is not None:
spb_cmd='set config service spb servers {}'.format(spb_mgmt)
tse_sess.add_cmd(spb_cmd)
pts_sess.add_cmd(spb_cmd)
pts_sess.configuration_commit()
tse_sess.configuration_commit()
self.logger.info("configuration complete")
def main(argv=sys.argv[1:]):
try:
parser = argparse.ArgumentParser()
parser.add_argument("yaml_cfg_file", type=argparse.FileType('r'))
parser.add_argument("-q", "--quiet", dest="verbose", action="store_false")
parser.add_argument("-r", "--rundir", dest='run_dir', action='store')
args = parser.parse_args()
yaml_str = args.yaml_cfg_file.read()
yaml_cfg = yaml.load(yaml_str)
run_dir = args.run_dir
if not run_dir:
run_dir = os.path.join(os.environ['RIFT_INSTALL'], "var/run/rift")
if not os.path.exists(run_dir):
os.makedirs(run_dir)
log_file = "{}/pts-scale-{}.log".format(run_dir, time.strftime("%Y%m%d%H%M%S"))
logging.basicConfig(filename=log_file, level=logging.DEBUG)
logger = logging.getLogger()
except Exception as e:
print("Exception in {}: {}".format(__file__, e))
sys.exit(1)
try:
ch = logging.StreamHandler()
if args.verbose:
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.INFO)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
except Exception as e:
logger.exception(e)
raise e
try:
logger.debug("Input YAML: {}".format(yaml_cfg))
config_tool=TSEConfigTool(logger)
config_tool.configure(yaml_cfg)
except Exception as e:
logger.exception(e)
raise e
if __name__ == "__main__":
main()
| apache-2.0 |
kmonsoor/python-for-android | python3-alpha/python3-src/Lib/unittest/result.py | 50 | 6292 | """Test result object"""
import os
import io
import sys
import traceback
from . import util
from functools import wraps
__unittest = True
def failfast(method):
@wraps(method)
def inner(self, *args, **kw):
if getattr(self, 'failfast', False):
self.stop()
return method(self, *args, **kw)
return inner
STDOUT_LINE = '\nStdout:\n%s'
STDERR_LINE = '\nStderr:\n%s'
class TestResult(object):
"""Holder for test result information.
Test results are automatically managed by the TestCase and TestSuite
classes, and do not need to be explicitly manipulated by writers of tests.
Each instance holds the total number of tests run, and collections of
failures and errors that occurred among those test runs. The collections
contain tuples of (testcase, exceptioninfo), where exceptioninfo is the
formatted traceback of the error that occurred.
"""
_previousTestClass = None
_testRunEntered = False
_moduleSetUpFailed = False
def __init__(self, stream=None, descriptions=None, verbosity=None):
self.failfast = False
self.failures = []
self.errors = []
self.testsRun = 0
self.skipped = []
self.expectedFailures = []
self.unexpectedSuccesses = []
self.shouldStop = False
self.buffer = False
self._stdout_buffer = None
self._stderr_buffer = None
self._original_stdout = sys.stdout
self._original_stderr = sys.stderr
self._mirrorOutput = False
def printErrors(self):
"Called by TestRunner after test run"
def startTest(self, test):
"Called when the given test is about to be run"
self.testsRun += 1
self._mirrorOutput = False
self._setupStdout()
def _setupStdout(self):
if self.buffer:
if self._stderr_buffer is None:
self._stderr_buffer = io.StringIO()
self._stdout_buffer = io.StringIO()
sys.stdout = self._stdout_buffer
sys.stderr = self._stderr_buffer
def startTestRun(self):
"""Called once before any tests are executed.
See startTest for a method called before each test.
"""
def stopTest(self, test):
"""Called when the given test has been run"""
self._restoreStdout()
self._mirrorOutput = False
def _restoreStdout(self):
if self.buffer:
if self._mirrorOutput:
output = sys.stdout.getvalue()
error = sys.stderr.getvalue()
if output:
if not output.endswith('\n'):
output += '\n'
self._original_stdout.write(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
self._original_stderr.write(STDERR_LINE % error)
sys.stdout = self._original_stdout
sys.stderr = self._original_stderr
self._stdout_buffer.seek(0)
self._stdout_buffer.truncate()
self._stderr_buffer.seek(0)
self._stderr_buffer.truncate()
def stopTestRun(self):
"""Called once after all tests are executed.
See stopTest for a method called after each test.
"""
@failfast
def addError(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info().
"""
self.errors.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
@failfast
def addFailure(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info()."""
self.failures.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
def addSuccess(self, test):
"Called when a test has completed successfully"
pass
def addSkip(self, test, reason):
"""Called when a test is skipped."""
self.skipped.append((test, reason))
def addExpectedFailure(self, test, err):
"""Called when an expected failure/error occured."""
self.expectedFailures.append(
(test, self._exc_info_to_string(err, test)))
@failfast
def addUnexpectedSuccess(self, test):
"""Called when a test was expected to fail, but succeed."""
self.unexpectedSuccesses.append(test)
def wasSuccessful(self):
"Tells whether or not this result was a success"
return len(self.failures) == len(self.errors) == 0
def stop(self):
"Indicates that the tests should be aborted"
self.shouldStop = True
def _exc_info_to_string(self, err, test):
"""Converts a sys.exc_info()-style tuple of values into a string."""
exctype, value, tb = err
# Skip test runner traceback levels
while tb and self._is_relevant_tb_level(tb):
tb = tb.tb_next
if exctype is test.failureException:
# Skip assert*() traceback levels
length = self._count_relevant_tb_levels(tb)
msgLines = traceback.format_exception(exctype, value, tb, length)
else:
msgLines = traceback.format_exception(exctype, value, tb)
if self.buffer:
output = sys.stdout.getvalue()
error = sys.stderr.getvalue()
if output:
if not output.endswith('\n'):
output += '\n'
msgLines.append(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
msgLines.append(STDERR_LINE % error)
return ''.join(msgLines)
def _is_relevant_tb_level(self, tb):
return '__unittest' in tb.tb_frame.f_globals
def _count_relevant_tb_levels(self, tb):
length = 0
while tb and not self._is_relevant_tb_level(tb):
length += 1
tb = tb.tb_next
return length
def __repr__(self):
return ("<%s run=%i errors=%i failures=%i>" %
(util.strclass(self.__class__), self.testsRun, len(self.errors),
len(self.failures)))
| apache-2.0 |
NathanSegerlind/incubator-spot | spot-ingest/pipelines/flow/worker.py | 10 | 5076 | #!/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import subprocess
import datetime
import logging
import os
import json
from multiprocessing import Process
from common.utils import Util
class Worker(object):
def __init__(self,db_name,hdfs_app_path,kafka_consumer,conf_type,processes=None):
self._initialize_members(db_name,hdfs_app_path,kafka_consumer,conf_type)
def _initialize_members(self,db_name,hdfs_app_path,kafka_consumer,conf_type):
# get logger instance.
self._logger = Util.get_logger('SPOT.INGEST.WRK.FLOW')
self._db_name = db_name
self._hdfs_app_path = hdfs_app_path
# read proxy configuration.
self._script_path = os.path.dirname(os.path.abspath(__file__))
conf_file = "{0}/ingest_conf.json".format(os.path.dirname(os.path.dirname(self._script_path)))
conf = json.loads(open(conf_file).read())
self._conf = conf["pipelines"][conf_type]
self._process_opt = self._conf['process_opt']
self._local_staging = self._conf['local_staging']
self.kafka_consumer = kafka_consumer
def start(self):
self._logger.info("Listening topic:{0}".format(self.kafka_consumer.Topic))
for message in self.kafka_consumer.start():
self._new_file(message.value)
def _new_file(self,file):
self._logger.info("-------------------------------------- New File received --------------------------------------")
self._logger.info("File: {0} ".format(file))
p = Process(target=self._process_new_file, args=(file,))
p.start()
p.join()
def _process_new_file(self,file):
# get file from hdfs
get_file_cmd = "hadoop fs -get {0} {1}.".format(file,self._local_staging)
self._logger.info("Getting file from hdfs: {0}".format(get_file_cmd))
Util.execute_cmd(get_file_cmd,self._logger)
# get file name and date
file_name_parts = file.split('/')
file_name = file_name_parts[len(file_name_parts)-1]
flow_date = file_name.split('.')[1]
flow_year = flow_date[0:4]
flow_month = flow_date[4:6]
flow_day = flow_date[6:8]
flow_hour = flow_date[8:10]
# build process cmd.
process_cmd = "nfdump -o csv -r {0}{1} {2} > {0}{1}.csv".format(self._local_staging,file_name,self._process_opt)
self._logger.info("Processing file: {0}".format(process_cmd))
Util.execute_cmd(process_cmd,self._logger)
# create hdfs staging.
hdfs_path = "{0}/flow".format(self._hdfs_app_path)
staging_timestamp = datetime.datetime.now().strftime('%M%S%f')[:-4]
hdfs_staging_path = "{0}/stage/{1}".format(hdfs_path,staging_timestamp)
create_staging_cmd = "hadoop fs -mkdir -p {0}".format(hdfs_staging_path)
self._logger.info("Creating staging: {0}".format(create_staging_cmd))
Util.execute_cmd(create_staging_cmd,self._logger)
# move to stage.
mv_to_staging ="hadoop fs -moveFromLocal {0}{1}.csv {2}/.".format(self._local_staging,file_name,hdfs_staging_path)
self._logger.info("Moving data to staging: {0}".format(mv_to_staging))
subprocess.call(mv_to_staging,shell=True)
#load to avro
load_to_avro_cmd = "hive -hiveconf dbname={0} -hiveconf y={1} -hiveconf m={2} -hiveconf d={3} -hiveconf h={4} -hiveconf data_location='{5}' -f pipelines/flow/load_flow_avro_parquet.hql".format(self._db_name,flow_year,flow_month,flow_day,flow_hour,hdfs_staging_path)
self._logger.info( "Loading data to hive: {0}".format(load_to_avro_cmd))
Util.execute_cmd(load_to_avro_cmd,self._logger)
# remove from hdfs staging
rm_hdfs_staging_cmd = "hadoop fs -rm -R -skipTrash {0}".format(hdfs_staging_path)
self._logger.info("Removing staging path: {0}".format(rm_hdfs_staging_cmd))
Util.execute_cmd(rm_hdfs_staging_cmd,self._logger)
# remove from local staging.
rm_local_staging = "rm {0}{1}".format(self._local_staging,file_name)
self._logger.info("Removing files from local staging: {0}".format(rm_local_staging))
Util.execute_cmd(rm_local_staging,self._logger)
self._logger.info("File {0} was successfully processed.".format(file_name))
| apache-2.0 |
Russell-IO/ansible | lib/ansible/modules/network/eos/eos_eapi.py | 69 | 14834 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: eos_eapi
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Manage and configure Arista EOS eAPI.
requirements:
- "EOS v4.12 or greater"
description:
- Use to enable or disable eAPI access, and set the port and state
of http, https, local_http and unix-socket servers.
- When enabling eAPI access the default is to enable HTTP on port
80, enable HTTPS on port 443, disable local HTTP, and disable
Unix socket server. Use the options listed below to override the
default configuration.
- Requires EOS v4.12 or greater.
extends_documentation_fragment: eos
options:
http:
description:
- The C(http) argument controls the operating state of the HTTP
transport protocol when eAPI is present in the running-config.
When the value is set to True, the HTTP protocol is enabled and
when the value is set to False, the HTTP protocol is disabled.
By default, when eAPI is first configured, the HTTP protocol is
disabled.
type: bool
default: 'no'
aliases: ['enable_http']
http_port:
description:
- Configures the HTTP port that will listen for connections when
the HTTP transport protocol is enabled. This argument accepts
integer values in the valid range of 1 to 65535.
default: 80
https:
description:
- The C(https) argument controls the operating state of the HTTPS
transport protocol when eAPI is present in the running-config.
When the value is set to True, the HTTPS protocol is enabled and
when the value is set to False, the HTTPS protocol is disabled.
By default, when eAPI is first configured, the HTTPS protocol is
enabled.
type: bool
default: 'yes'
aliases: ['enable_https']
https_port:
description:
- Configures the HTTP port that will listen for connections when
the HTTP transport protocol is enabled. This argument accepts
integer values in the valid range of 1 to 65535.
default: 443
local_http:
description:
- The C(local_http) argument controls the operating state of the
local HTTP transport protocol when eAPI is present in the
running-config. When the value is set to True, the HTTP protocol
is enabled and restricted to connections from localhost only. When
the value is set to False, the HTTP local protocol is disabled.
- Note is value is independent of the C(http) argument
type: bool
default: 'no'
aliases: ['enable_local_http']
local_http_port:
description:
- Configures the HTTP port that will listen for connections when
the HTTP transport protocol is enabled. This argument accepts
integer values in the valid range of 1 to 65535.
default: 8080
socket:
description:
- The C(socket) argument controls the operating state of the UNIX
Domain Socket used to receive eAPI requests. When the value
of this argument is set to True, the UDS will listen for eAPI
requests. When the value is set to False, the UDS will not be
available to handle requests. By default when eAPI is first
configured, the UDS is disabled.
type: bool
default: 'no'
aliases: ['enable_socket']
vrf:
description:
- The C(vrf) argument will configure eAPI to listen for connections
in the specified VRF. By default, eAPI transports will listen
for connections in the global table. This value requires the
VRF to already be created otherwise the task will fail.
default: default
version_added: "2.2"
config:
description:
- The module, by default, will connect to the remote device and
retrieve the current running-config to use as a base for comparing
against the contents of source. There are times when it is not
desirable to have the task get the current running-config for
every task in a playbook. The I(config) argument allows the
implementer to pass in the configuration to use as the base
config for comparison.
version_added: "2.2"
state:
description:
- The C(state) argument controls the operational state of eAPI
on the remote device. When this argument is set to C(started),
eAPI is enabled to receive requests and when this argument is
C(stopped), eAPI is disabled and will not receive requests.
default: started
choices: ['started', 'stopped']
"""
EXAMPLES = """
- name: Enable eAPI access with default configuration
eos_eapi:
state: started
- name: Enable eAPI with no HTTP, HTTPS at port 9443, local HTTP at port 80, and socket enabled
eos_eapi:
state: started
http: false
https_port: 9443
local_http: yes
local_http_port: 80
socket: yes
- name: Shutdown eAPI access
eos_eapi:
state: stopped
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- management api http-commands
- protocol http port 81
- no protocol https
urls:
description: Hash of URL endpoints eAPI is listening on per interface
returned: when eAPI is started
type: dict
sample: {'Management1': ['http://172.26.10.1:80']}
session_name:
description: The EOS config session name used to load the configuration
returned: when changed is True
type: str
sample: ansible_1479315771
"""
import re
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.eos.eos import run_commands, load_config
from ansible.module_utils.six import iteritems
from ansible.module_utils.network.eos.eos import eos_argument_spec, check_args
def check_transport(module):
transport = module.params['transport']
provider_transport = (module.params['provider'] or {}).get('transport')
if 'eapi' in (transport, provider_transport):
module.fail_json(msg='eos_eapi module is only supported over cli transport')
def validate_http_port(value, module):
if not 1 <= value <= 65535:
module.fail_json(msg='http_port must be between 1 and 65535')
def validate_https_port(value, module):
if not 1 <= value <= 65535:
module.fail_json(msg='http_port must be between 1 and 65535')
def validate_local_http_port(value, module):
if not 1 <= value <= 65535:
module.fail_json(msg='http_port must be between 1 and 65535')
def validate_vrf(value, module):
out = run_commands(module, ['show vrf'])
configured_vrfs = []
lines = out[0].strip().splitlines()[3:]
for l in lines:
if not l:
continue
splitted_line = re.split(r'\s{2,}', l.strip())
if len(splitted_line) > 2:
configured_vrfs.append(splitted_line[0])
configured_vrfs.append('default')
if value not in configured_vrfs:
module.fail_json(msg='vrf `%s` is not configured on the system' % value)
def map_obj_to_commands(updates, module, warnings):
commands = list()
want, have = updates
def needs_update(x):
return want.get(x) is not None and (want.get(x) != have.get(x))
def add(cmd):
if 'management api http-commands' not in commands:
commands.insert(0, 'management api http-commands')
commands.append(cmd)
if any((needs_update('http'), needs_update('http_port'))):
if want['http'] is False:
add('no protocol http')
else:
if have['http'] is False and want['http'] in (False, None):
warnings.append('protocol http is not enabled, not configuring http port value')
else:
port = want['http_port'] or 80
add('protocol http port %s' % port)
if any((needs_update('https'), needs_update('https_port'))):
if want['https'] is False:
add('no protocol https')
else:
if have['https'] is False and want['https'] in (False, None):
warnings.append('protocol https is not enabled, not configuring https port value')
else:
port = want['https_port'] or 443
add('protocol https port %s' % port)
if any((needs_update('local_http'), needs_update('local_http_port'))):
if want['local_http'] is False:
add('no protocol http localhost')
else:
if have['local_http'] is False and want['local_http'] in (False, None):
warnings.append('protocol local_http is not enabled, not configuring local_http port value')
else:
port = want['local_http_port'] or 8080
add('protocol http localhost port %s' % port)
if any((needs_update('socket'), needs_update('socket'))):
if want['socket'] is False:
add('no protocol unix-socket')
else:
add('protocol unix-socket')
if needs_update('state') and not needs_update('vrf'):
if want['state'] == 'stopped':
add('shutdown')
elif want['state'] == 'started':
add('no shutdown')
if needs_update('vrf'):
add('vrf %s' % want['vrf'])
# switching operational vrfs here
# need to add the desired state as well
if want['state'] == 'stopped':
add('shutdown')
elif want['state'] == 'started':
add('no shutdown')
return commands
def parse_state(data):
if data[0]['enabled']:
return 'started'
else:
return 'stopped'
def map_config_to_obj(module):
out = run_commands(module, ['show management api http-commands | json'])
return {
'http': out[0]['httpServer']['configured'],
'http_port': out[0]['httpServer']['port'],
'https': out[0]['httpsServer']['configured'],
'https_port': out[0]['httpsServer']['port'],
'local_http': out[0]['localHttpServer']['configured'],
'local_http_port': out[0]['localHttpServer']['port'],
'socket': out[0]['unixSocketServer']['configured'],
'vrf': out[0]['vrf'],
'state': parse_state(out)
}
def map_params_to_obj(module):
obj = {
'http': module.params['http'],
'http_port': module.params['http_port'],
'https': module.params['https'],
'https_port': module.params['https_port'],
'local_http': module.params['local_http'],
'local_http_port': module.params['local_http_port'],
'socket': module.params['socket'],
'vrf': module.params['vrf'],
'state': module.params['state']
}
for key, value in iteritems(obj):
if value:
validator = globals().get('validate_%s' % key)
if validator:
validator(value, module)
return obj
def verify_state(updates, module):
want, have = updates
invalid_state = [('http', 'httpServer'),
('https', 'httpsServer'),
('local_http', 'localHttpServer'),
('socket', 'unixSocketServer')]
timeout = module.params['timeout'] or 30
state = module.params['state']
while invalid_state:
out = run_commands(module, ['show management api http-commands | json'])
for index, item in enumerate(invalid_state):
want_key, eapi_key = item
if want[want_key] is not None:
if want[want_key] == out[0][eapi_key]['running']:
del invalid_state[index]
elif state == 'stopped':
if not out[0][eapi_key]['running']:
del invalid_state[index]
else:
del invalid_state[index]
time.sleep(1)
timeout -= 1
if timeout == 0:
module.fail_json(msg='timeout expired before eapi running state changed')
def collect_facts(module, result):
out = run_commands(module, ['show management api http-commands | json'])
facts = dict(eos_eapi_urls=dict())
for each in out[0]['urls']:
intf, url = each.split(' : ')
key = str(intf).strip()
if key not in facts['eos_eapi_urls']:
facts['eos_eapi_urls'][key] = list()
facts['eos_eapi_urls'][key].append(str(url).strip())
result['ansible_facts'] = facts
def main():
""" main entry point for module execution
"""
argument_spec = dict(
http=dict(aliases=['enable_http'], type='bool'),
http_port=dict(type='int'),
https=dict(aliases=['enable_https'], type='bool'),
https_port=dict(type='int'),
local_http=dict(aliases=['enable_local_http'], type='bool'),
local_http_port=dict(type='int'),
socket=dict(aliases=['enable_socket'], type='bool'),
vrf=dict(default='default'),
config=dict(),
state=dict(default='started', choices=['stopped', 'started']),
)
argument_spec.update(eos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
check_transport(module)
result = {'changed': False}
warnings = list()
if module.params['config']:
warnings.append('config parameter is no longer necessary and will be ignored')
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands((want, have), module, warnings)
result['commands'] = commands
if commands:
commit = not module.check_mode
response = load_config(module, commands, commit=commit)
if response.get('diff') and module._diff:
result['diff'] = {'prepared': response.get('diff')}
result['session_name'] = response.get('session')
result['changed'] = True
if result['changed']:
verify_state((want, have), module)
collect_facts(module, result)
if warnings:
result['warnings'] = warnings
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
Konbonix/DisasterSupplyTracker | apptest.py | 2 | 1316 | #!/usr/bin/python
import sys, os
import unittest2
import warnings
# silences Python's complaints about imports
warnings.filterwarnings('ignore',category=UserWarning)
USAGE = """
Path to your sdk must be the first argument. To run type:
$ apptest.py path/to/your/appengine/installation
Remember to set environment variable FLASK_CONF to TEST.
Loading configuration depending on the value of
environment variable allows you to add your own
testing configuration in src/application/settings.py
"""
def main(sdk_path, test_path):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
sys.path.insert(1, os.path.join(os.path.abspath('.'), 'lib'))
suite = unittest2.loader.TestLoader().discover(test_path)
unittest2.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
#See: http://code.google.com/appengine/docs/python/tools/localunittesting.html
try:
#Path to the SDK installation
SDK_PATH = sys.argv[1] # ...or hardcoded path
#Path to tests folder
TEST_PATH = os.path.join(os.path.dirname(os.path.abspath(__name__)),'tests')
main(SDK_PATH, TEST_PATH)
except IndexError:
# you probably forgot about path as first argument
print USAGE | apache-2.0 |
pyecs/servo | tests/wpt/web-platform-tests/tools/html5lib/doc/conf.py | 436 | 9028 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# html5lib documentation build configuration file, created by
# sphinx-quickstart on Wed May 8 00:04:49 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'html5lib'
copyright = '2006 - 2013, James Graham, Geoffrey Sneddon, and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
sys.path.append(os.path.abspath('..'))
from html5lib import __version__
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'theme']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'html5libdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'html5lib.tex', 'html5lib Documentation',
'James Graham, Geoffrey Sneddon, and contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'html5lib', 'html5lib Documentation',
['James Graham, Geoffrey Sneddon, and contributors'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'html5lib', 'html5lib Documentation',
'James Graham, Geoffrey Sneddon, and contributors', 'html5lib', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
class CExtMock(object):
"""Required for autodoc on readthedocs.org where you cannot build C extensions."""
def __init__(self, *args, **kwargs):
pass
def __call__(self, *args, **kwargs):
return CExtMock()
@classmethod
def __getattr__(cls, name):
if name in ('__file__', '__path__'):
return '/dev/null'
else:
return CExtMock()
try:
import lxml # flake8: noqa
except ImportError:
sys.modules['lxml'] = CExtMock()
sys.modules['lxml.etree'] = CExtMock()
print("warning: lxml modules mocked.")
try:
import genshi # flake8: noqa
except ImportError:
sys.modules['genshi'] = CExtMock()
sys.modules['genshi.core'] = CExtMock()
print("warning: genshi modules mocked.")
| mpl-2.0 |
akhilman/cement | cement/ext/ext_configobj.py | 1 | 5599 | """
The ConfigObj Extension provides configuration handling based on
`configobj <http://www.voidspace.org.uk/python/configobj.html>`_. It is a
drop-in replacement for the default config handler
:class:`cement.ext.ext_configparser.ConfigParserConfigHandler`.
One of the primary features of ConfigObj is that you can access the
application configuration as a dictionary object.
Requirements
------------
* ConfigObj (``pip install configobj``)
Configuration
-------------
This extension does not honor any application configuration settings.
Usage
-----
.. code-block:: python
from cement.core.foundation import CementApp
class MyApp(CementApp):
class Meta:
label = 'myapp'
extensions = ['configobj']
config_handler = 'configobj'
with MyApp() as app:
app.run()
# get a config setting
app.config['myapp']['foo']
# set a config setting
app.config['myapp']['foo'] = 'bar2'
# etc.
"""
from ..core import config
from ..utils.misc import minimal_logger
from configobj import ConfigObj
LOG = minimal_logger(__name__)
class ConfigObjConfigHandler(config.CementConfigHandler, ConfigObj):
"""
This class implements the :ref:`IConfig <cement.core.config>`
interface, and sub-classes from `configobj.ConfigObj
<http://www.voidspace.org.uk/python/configobj.html>`_,
which is an external library and not included with Python. Please
reference the ConfigObj documentation for full usage of the class.
Arguments and keyword arguments are passed directly to ConfigObj
on initialization.
"""
class Meta:
"""Handler meta-data."""
interface = config.IConfig
label = 'configobj'
def __init__(self, *args, **kw):
super(ConfigObjConfigHandler, self).__init__(*args, **kw)
self.app = None
def _setup(self, app_obj):
self.app = app_obj
def get_sections(self):
"""
Return a list of [section] that exist in the configuration.
:returns: list
"""
return self.sections
def get_section_dict(self, section):
"""
Return a dict representation of a section.
:param section: The section of the configuration.
I.e. ``[block_section]``
:returns: dict
"""
dict_obj = dict()
for key in self.keys(section):
dict_obj[key] = self.get(section, key)
return dict_obj
def _parse_file(self, file_path):
"""
Parse a configuration file at `file_path` and store it.
:param file_path: The file system path to the configuration file.
:returns: boolean (True if file was read properly, False otherwise)
"""
_c = ConfigObj(file_path)
self.merge(_c.dict())
# FIX ME: Should check that file was read properly, however if not it
# will likely raise an exception anyhow.
return True
def keys(self, section):
"""
Return a list of keys for a given section.
:param section: The configuration [section].
"""
return self[section].keys()
def get(self, section, key):
"""
Get a value for a given key under section.
:param section: The configuration [section].
:param key: The configuration key under the section.
:returns: unknown (the value of the key)
"""
return self[section][key]
def set(self, section, key, value):
"""
Set a configuration key value under [section].
:param section: The configuration [section].
:param key: The configuration key under the section.
:param value: The value to set the key to.
:returns: None
"""
self[section][key] = value
def has_section(self, section):
"""
Return True/False whether the configuration [section] exists.
:param section: The section to check for.
:returns: bool
"""
if section in self.get_sections():
return True
else:
return False
def add_section(self, section):
"""
Add a section to the configuration.
:param section: The configuration [section] to add.
"""
if not self.has_section(section):
self[section] = dict()
def merge(self, dict_obj, override=True):
"""
Merge a dictionary into our config. If override is True then
existing config values are overridden by those passed in.
:param dict_obj: A dictionary of configuration keys/values to merge
into our existing config (self).
:param override: Whether or not to override existing values in the
config.
:returns: None
"""
for section in list(dict_obj.keys()):
if type(dict_obj[section]) == dict:
if section not in self.get_sections():
self.add_section(section)
for key in list(dict_obj[section].keys()):
if override:
self.set(section, key, dict_obj[section][key])
else:
# only set it if the key doesn't exist
if key not in self.keys(section):
self.set(section, key, dict_obj[section][key])
# we don't support nested config blocks, so no need to go
# further down to more nested dicts.
def load(app):
app.handler.register(ConfigObjConfigHandler)
| bsd-3-clause |
jseabold/statsmodels | statsmodels/examples/ex_generic_mle.py | 5 | 14932 |
from functools import partial
import numpy as np
from scipy import stats
import statsmodels.api as sm
from statsmodels.base.model import GenericLikelihoodModel
from statsmodels.tools.numdiff import approx_fprime, approx_hess
data = sm.datasets.spector.load(as_pandas=False)
data.exog = sm.add_constant(data.exog, prepend=False)
# in this dir
probit_mod = sm.Probit(data.endog, data.exog)
probit_res = probit_mod.fit()
loglike = probit_mod.loglike
score = probit_mod.score
mod = GenericLikelihoodModel(data.endog, data.exog*2, loglike, score)
res = mod.fit(method="nm", maxiter = 500)
def probitloglike(params, endog, exog):
"""
Log likelihood for the probit
"""
q = 2*endog - 1
X = exog
return np.add.reduce(stats.norm.logcdf(q*np.dot(X,params)))
model_loglike = partial(probitloglike, endog=data.endog, exog=data.exog)
mod = GenericLikelihoodModel(data.endog, data.exog, loglike=model_loglike)
res = mod.fit(method="nm", maxiter=500)
print(res)
np.allclose(res.params, probit_res.params, rtol=1e-4)
print(res.params, probit_res.params)
#datal = sm.datasets.longley.load(as_pandas=False)
datal = sm.datasets.ccard.load(as_pandas=False)
datal.exog = sm.add_constant(datal.exog, prepend=False)
# Instance of GenericLikelihood model does not work directly, because loglike
# cannot get access to data in self.endog, self.exog
nobs = 5000
rvs = np.random.randn(nobs,6)
datal.exog = rvs[:,:-1]
datal.exog = sm.add_constant(datal.exog, prepend=False)
datal.endog = 1 + rvs.sum(1)
show_error = False
show_error2 = 1#False
if show_error:
def loglike_norm_xb(self, params):
beta = params[:-1]
sigma = params[-1]
xb = np.dot(self.exog, beta)
return stats.norm.logpdf(self.endog, loc=xb, scale=sigma)
mod_norm = GenericLikelihoodModel(datal.endog, datal.exog, loglike_norm_xb)
res_norm = mod_norm.fit(method="nm", maxiter = 500)
print(res_norm.params)
if show_error2:
def loglike_norm_xb(params, endog, exog):
beta = params[:-1]
sigma = params[-1]
#print exog.shape, beta.shape
xb = np.dot(exog, beta)
#print xb.shape, stats.norm.logpdf(endog, loc=xb, scale=sigma).shape
return stats.norm.logpdf(endog, loc=xb, scale=sigma).sum()
model_loglike3 = partial(loglike_norm_xb,
endog=datal.endog, exog=datal.exog)
mod_norm = GenericLikelihoodModel(datal.endog, datal.exog, model_loglike3)
res_norm = mod_norm.fit(start_params=np.ones(datal.exog.shape[1]+1),
method="nm", maxiter = 5000)
print(res_norm.params)
class MygMLE(GenericLikelihoodModel):
# just for testing
def loglike(self, params):
beta = params[:-1]
sigma = params[-1]
xb = np.dot(self.exog, beta)
return stats.norm.logpdf(self.endog, loc=xb, scale=sigma).sum()
def loglikeobs(self, params):
beta = params[:-1]
sigma = params[-1]
xb = np.dot(self.exog, beta)
return stats.norm.logpdf(self.endog, loc=xb, scale=sigma)
mod_norm2 = MygMLE(datal.endog, datal.exog)
#res_norm = mod_norm.fit(start_params=np.ones(datal.exog.shape[1]+1), method="nm", maxiter = 500)
res_norm2 = mod_norm2.fit(start_params=[1.]*datal.exog.shape[1]+[1], method="nm", maxiter = 500)
np.allclose(res_norm.params, res_norm2.params)
print(res_norm2.params)
res2 = sm.OLS(datal.endog, datal.exog).fit()
start_params = np.hstack((res2.params, np.sqrt(res2.mse_resid)))
res_norm3 = mod_norm2.fit(start_params=start_params, method="nm", maxiter = 500,
retall=0)
print(start_params)
print(res_norm3.params)
print(res2.bse)
print(res_norm3.bse)
print('llf', res2.llf, res_norm3.llf)
bse = np.sqrt(np.diag(np.linalg.inv(res_norm3.model.hessian(res_norm3.params))))
res_norm3.model.score(res_norm3.params)
#fprime in fit option cannot be overwritten, set to None, when score is defined
# exception is fixed, but I do not think score was supposed to be called
res_bfgs = mod_norm2.fit(start_params=start_params, method="bfgs", fprime=None,
maxiter=500, retall=0)
hb=-approx_hess(res_norm3.params, mod_norm2.loglike, epsilon=-1e-4)
hf=-approx_hess(res_norm3.params, mod_norm2.loglike, epsilon=1e-4)
hh = (hf+hb)/2.
print(np.linalg.eigh(hh))
grad = -approx_fprime(res_norm3.params, mod_norm2.loglike, epsilon=-1e-4)
print(grad)
gradb = -approx_fprime(res_norm3.params, mod_norm2.loglike, epsilon=-1e-4)
gradf = -approx_fprime(res_norm3.params, mod_norm2.loglike, epsilon=1e-4)
print((gradb+gradf)/2.)
print(res_norm3.model.score(res_norm3.params))
print(res_norm3.model.score(start_params))
mod_norm2.loglike(start_params/2.)
print(np.linalg.inv(-1*mod_norm2.hessian(res_norm3.params)))
print(np.sqrt(np.diag(res_bfgs.cov_params())))
print(res_norm3.bse)
print("MLE - OLS parameter estimates")
print(res_norm3.params[:-1] - res2.params)
print("bse diff in percent")
print((res_norm3.bse[:-1] / res2.bse)*100. - 100)
'''
Optimization terminated successfully.
Current function value: 12.818804
Iterations 6
Optimization terminated successfully.
Current function value: 12.818804
Iterations: 439
Function evaluations: 735
Optimization terminated successfully.
Current function value: 12.818804
Iterations: 439
Function evaluations: 735
<statsmodels.model.LikelihoodModelResults object at 0x02131290>
[ 1.6258006 0.05172931 1.42632252 -7.45229732] [ 1.62581004 0.05172895 1.42633234 -7.45231965]
Warning: Maximum number of function evaluations has been exceeded.
[ -1.18109149 246.94438535 -16.21235536 24.05282629 -324.80867176
274.07378453]
Warning: Maximum number of iterations has been exceeded
[ 17.57107 -149.87528787 19.89079376 -72.49810777 -50.06067953
306.14170418]
Optimization terminated successfully.
Current function value: 506.488765
Iterations: 339
Function evaluations: 550
[ -3.08181404 234.34702702 -14.99684418 27.94090839 -237.1465136
284.75079529]
[ -3.08181304 234.34701361 -14.99684381 27.94088692 -237.14649571
274.6857294 ]
[ 5.51471653 80.36595035 7.46933695 82.92232357 199.35166485]
llf -506.488764864 -506.488764864
Optimization terminated successfully.
Current function value: 506.488765
Iterations: 9
Function evaluations: 13
Gradient evaluations: 13
(array([ 2.41772580e-05, 1.62492628e-04, 2.79438138e-04,
1.90996240e-03, 2.07117946e-01, 1.28747174e+00]), array([[ 1.52225754e-02, 2.01838216e-02, 6.90127235e-02,
-2.57002471e-04, -5.25941060e-01, -8.47339404e-01],
[ 2.39797491e-01, -2.32325602e-01, -9.36235262e-01,
3.02434938e-03, 3.95614029e-02, -1.02035585e-01],
[ -2.11381471e-02, 3.01074776e-02, 7.97208277e-02,
-2.94955832e-04, 8.49402362e-01, -5.20391053e-01],
[ -1.55821981e-01, -9.66926643e-01, 2.01517298e-01,
1.52397702e-03, 4.13805882e-03, -1.19878714e-02],
[ -9.57881586e-01, 9.87911166e-02, -2.67819451e-01,
1.55192932e-03, -1.78717579e-02, -2.55757014e-02],
[ -9.96486655e-04, -2.03697290e-03, -2.98130314e-03,
-9.99992985e-01, -1.71500426e-05, 4.70854949e-06]]))
[[ -4.91007768e-05 -7.28732630e-07 -2.51941401e-05 -2.50111043e-08
-4.77484718e-08 -9.72022463e-08]]
[[ -1.64845915e-08 -2.87059265e-08 -2.88764568e-07 -6.82121026e-09
2.84217094e-10 -1.70530257e-09]]
[ -4.90678076e-05 -6.71320777e-07 -2.46166110e-05 -1.13686838e-08
-4.83169060e-08 -9.37916411e-08]
[ -4.56753924e-05 -6.50857146e-07 -2.31756303e-05 -1.70530257e-08
-4.43378667e-08 -1.75592936e-02]
[[ 2.99386348e+01 -1.24442928e+02 9.67254672e+00 -1.58968536e+02
-5.91960010e+02 -2.48738183e+00]
[ -1.24442928e+02 5.62972166e+03 -5.00079203e+02 -7.13057475e+02
-7.82440674e+03 -1.05126925e+01]
[ 9.67254672e+00 -5.00079203e+02 4.87472259e+01 3.37373299e+00
6.96960872e+02 7.69866589e-01]
[ -1.58968536e+02 -7.13057475e+02 3.37373299e+00 6.82417837e+03
4.84485862e+03 3.21440021e+01]
[ -5.91960010e+02 -7.82440674e+03 6.96960872e+02 4.84485862e+03
3.43753691e+04 9.37524459e+01]
[ -2.48738183e+00 -1.05126925e+01 7.69866589e-01 3.21440021e+01
9.37524459e+01 5.23915258e+02]]
>>> res_norm3.bse
array([ 5.47162086, 75.03147114, 6.98192136, 82.60858536,
185.40595756, 22.88919522])
>>> print res_norm3.model.score(res_norm3.params)
[ -4.90678076e-05 -6.71320777e-07 -2.46166110e-05 -1.13686838e-08
-4.83169060e-08 -9.37916411e-08]
>>> print res_norm3.model.score(start_params)
[ -4.56753924e-05 -6.50857146e-07 -2.31756303e-05 -1.70530257e-08
-4.43378667e-08 -1.75592936e-02]
>>> mod_norm2.loglike(start_params/2.)
-598.56178102781314
>>> print np.linalg.inv(-1*mod_norm2.hessian(res_norm3.params))
[[ 2.99386348e+01 -1.24442928e+02 9.67254672e+00 -1.58968536e+02
-5.91960010e+02 -2.48738183e+00]
[ -1.24442928e+02 5.62972166e+03 -5.00079203e+02 -7.13057475e+02
-7.82440674e+03 -1.05126925e+01]
[ 9.67254672e+00 -5.00079203e+02 4.87472259e+01 3.37373299e+00
6.96960872e+02 7.69866589e-01]
[ -1.58968536e+02 -7.13057475e+02 3.37373299e+00 6.82417837e+03
4.84485862e+03 3.21440021e+01]
[ -5.91960010e+02 -7.82440674e+03 6.96960872e+02 4.84485862e+03
3.43753691e+04 9.37524459e+01]
[ -2.48738183e+00 -1.05126925e+01 7.69866589e-01 3.21440021e+01
9.37524459e+01 5.23915258e+02]]
>>> print np.sqrt(np.diag(res_bfgs.cov_params()))
[ 5.10032831 74.34988912 6.96522122 76.7091604 169.8117832
22.91695494]
>>> print res_norm3.bse
[ 5.47162086 75.03147114 6.98192136 82.60858536 185.40595756
22.88919522]
>>> res_norm3.conf_int
<bound method LikelihoodModelResults.conf_int of <statsmodels.model.LikelihoodModelResults object at 0x021317F0>>
>>> res_norm3.conf_int()
array([[0.96421437, 1.01999835],
[0.99251725, 1.04863332],
[0.95721328, 1.01246222],
[0.97134549, 1.02695393],
[0.97050081, 1.02660988],
[0.97773434, 1.03290028],
[0.97529207, 1.01428874]])
>>> res_norm3.params
array([ -3.08181304, 234.34701361, -14.99684381, 27.94088692,
-237.14649571, 274.6857294 ])
>>> res2.params
array([ -3.08181404, 234.34702702, -14.99684418, 27.94090839,
-237.1465136 ])
>>>
>>> res_norm3.params - res2.params
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ValueError: shape mismatch: objects cannot be broadcast to a single shape
>>> res_norm3.params[:-1] - res2.params
array([ 9.96859735e-07, -1.34122981e-05, 3.72278400e-07,
-2.14645839e-05, 1.78919019e-05])
>>>
>>> res_norm3.bse[:-1] - res2.bse
array([ -0.04309567, -5.33447922, -0.48741559, -0.31373822, -13.94570729])
>>> (res_norm3.bse[:-1] / res2.bse) - 1
array([-0.00781467, -0.06637735, -0.06525554, -0.00378352, -0.06995531])
>>> (res_norm3.bse[:-1] / res2.bse)*100. - 100
array([-0.7814667 , -6.6377355 , -6.52555369, -0.37835193, -6.99553089])
>>> np.sqrt(np.diag(np.linalg.inv(res_norm3.model.hessian(res_bfgs.params))))
array([ NaN, NaN, NaN, NaN, NaN, NaN])
>>> np.sqrt(np.diag(np.linalg.inv(-res_norm3.model.hessian(res_bfgs.params))))
array([ 5.10032831, 74.34988912, 6.96522122, 76.7091604 ,
169.8117832 , 22.91695494])
>>> res_norm3.bse
array([ 5.47162086, 75.03147114, 6.98192136, 82.60858536,
185.40595756, 22.88919522])
>>> res2.bse
array([ 5.51471653, 80.36595035, 7.46933695, 82.92232357,
199.35166485])
>>>
>>> bse_bfgs = np.sqrt(np.diag(np.linalg.inv(-res_norm3.model.hessian(res_bfgs.params))))
>>> (bse_bfgs[:-1] / res2.bse)*100. - 100
array([ -7.51422527, -7.4858335 , -6.74913633, -7.49275094, -14.8179759 ])
>>> hb=-approx_hess(res_bfgs.params, mod_norm2.loglike, epsilon=-1e-4)
>>> hf=-approx_hess(res_bfgs.params, mod_norm2.loglike, epsilon=1e-4)
>>> hh = (hf+hb)/2.
>>> bse_bfgs = np.sqrt(np.diag(np.linalg.inv(-hh)))
>>> bse_bfgs
array([ NaN, NaN, NaN, NaN, NaN, NaN])
>>> bse_bfgs = np.sqrt(np.diag(np.linalg.inv(hh)))
>>> np.diag(hh)
array([ 9.81680159e-01, 1.39920076e-02, 4.98101826e-01,
3.60955710e-04, 9.57811608e-04, 1.90709670e-03])
>>> np.diag(np.inv(hh))
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'module' object has no attribute 'inv'
>>> np.diag(np.linalg.inv(hh))
array([ 2.64875153e+01, 5.91578496e+03, 5.13279911e+01,
6.11533345e+03, 3.33775960e+04, 5.24357391e+02])
>>> res2.bse**2
array([ 3.04120984e+01, 6.45868598e+03, 5.57909945e+01,
6.87611175e+03, 3.97410863e+04])
>>> bse_bfgs
array([ 5.14660231, 76.91414015, 7.1643556 , 78.20059751,
182.69536402, 22.89885131])
>>> bse_bfgs - res_norm3.bse
array([-0.32501855, 1.88266901, 0.18243424, -4.40798785, -2.71059354,
0.00965609])
>>> (bse_bfgs[:-1] / res2.bse)*100. - 100
array([-6.67512508, -4.29511526, -4.0831115 , -5.69415552, -8.35523538])
>>> (res_norm3.bse[:-1] / res2.bse)*100. - 100
array([-0.7814667 , -6.6377355 , -6.52555369, -0.37835193, -6.99553089])
>>> (bse_bfgs / res_norm3.bse)*100. - 100
array([-5.94007812, 2.50917247, 2.61295176, -5.33599242, -1.46197759,
0.04218624])
>>> bse_bfgs
array([ 5.14660231, 76.91414015, 7.1643556 , 78.20059751,
182.69536402, 22.89885131])
>>> res_norm3.bse
array([ 5.47162086, 75.03147114, 6.98192136, 82.60858536,
185.40595756, 22.88919522])
>>> res2.bse
array([ 5.51471653, 80.36595035, 7.46933695, 82.92232357,
199.35166485])
>>> dir(res_bfgs)
['__class__', '__delattr__', '__dict__', '__doc__', '__getattribute__', '__hash__', '__init__', '__module__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__str__', '__weakref__', 'bse', 'conf_int', 'cov_params', 'f_test', 'initialize', 'llf', 'mle_retvals', 'mle_settings', 'model', 'normalized_cov_params', 'params', 'scale', 't', 't_test']
>>> res_bfgs.scale
1.0
>>> res2.scale
81083.015420213851
>>> res2.mse_resid
81083.015420213851
>>> print np.sqrt(np.diag(np.linalg.inv(-1*mod_norm2.hessian(res_bfgs.params))))
[ 5.10032831 74.34988912 6.96522122 76.7091604 169.8117832
22.91695494]
>>> print np.sqrt(np.diag(np.linalg.inv(-1*res_bfgs.model.hessian(res_bfgs.params))))
[ 5.10032831 74.34988912 6.96522122 76.7091604 169.8117832
22.91695494]
Is scale a misnomer, actually scale squared, i.e. variance of error term ?
'''
print(res_norm3.model.score_obs(res_norm3.params).shape)
jac = res_norm3.model.score_obs(res_norm3.params)
print(np.sqrt(np.diag(np.dot(jac.T, jac)))/start_params)
jac2 = res_norm3.model.score_obs(res_norm3.params, centered=True)
print(np.sqrt(np.diag(np.linalg.inv(np.dot(jac.T, jac)))))
print(res_norm3.bse)
print(res2.bse)
| bsd-3-clause |
cbingos/cpro | jieba/posseg/__init__.py | 3 | 7424 | from __future__ import with_statement
import re
import os
import viterbi
import jieba
import sys
import marshal
from functools import wraps
default_encoding = sys.getfilesystemencoding()
PROB_START_P = "prob_start.p"
PROB_TRANS_P = "prob_trans.p"
PROB_EMIT_P = "prob_emit.p"
CHAR_STATE_TAB_P = "char_state_tab.p"
userdict_loaded = False
def load_model(f_name,isJython=True):
_curpath=os.path.normpath( os.path.join( os.getcwd(), os.path.dirname(__file__) ) )
result = {}
with file(f_name, "rb") as f:
for line in open(f_name,"rb"):
line = line.strip()
if line=="":continue
word, _, tag = line.split(' ')
result[word.decode('utf-8')]=tag
f.closed
if not isJython:
return result
start_p = {}
abs_path = os.path.join(_curpath, PROB_START_P)
with open(abs_path, mode='rb') as f:
start_p = marshal.load(f)
f.closed
trans_p = {}
abs_path = os.path.join(_curpath, PROB_TRANS_P)
with open(abs_path, 'rb') as f:
trans_p = marshal.load(f)
f.closed
emit_p = {}
abs_path = os.path.join(_curpath, PROB_EMIT_P)
with file(abs_path, 'rb') as f:
emit_p = marshal.load(f)
f.closed
state = {}
abs_path = os.path.join(_curpath, CHAR_STATE_TAB_P)
with file(abs_path, 'rb') as f:
state = marshal.load(f)
f.closed
return state, start_p, trans_p, emit_p, result
if sys.platform.startswith("java"):
char_state_tab_P, start_P, trans_P, emit_P, word_tag_tab = load_model(jieba.get_abs_path_dict())
else:
import char_state_tab, prob_start, prob_trans, prob_emit
char_state_tab_P, start_P, trans_P, emit_P = char_state_tab.P, prob_start.P, prob_trans.P, prob_emit.P
word_tag_tab = load_model(jieba.get_abs_path_dict(),isJython=False)
def makesure_userdict_loaded(fn):
@wraps(fn)
def wrapped(*args,**kwargs):
global userdict_loaded
if userdict_loaded:
return fn(*args,**kwargs)
else:
word_tag_tab.update(jieba.user_word_tag_tab)
userdict_loaded = True
return fn(*args,**kwargs)
return wrapped
class pair(object):
def __init__(self,word,flag):
self.word = word
self.flag = flag
def __unicode__(self):
return self.word+u"/"+self.flag
def __repr__(self):
return self.__str__()
def __str__(self):
return self.__unicode__().encode(default_encoding)
def encode(self,arg):
return self.__unicode__().encode(arg)
def __cut(sentence):
prob, pos_list = viterbi.viterbi(sentence,char_state_tab_P, start_P, trans_P, emit_P)
begin, next = 0,0
for i,char in enumerate(sentence):
pos = pos_list[i][0]
if pos=='B':
begin = i
elif pos=='E':
yield pair(sentence[begin:i+1], pos_list[i][1])
next = i+1
elif pos=='S':
yield pair(char,pos_list[i][1])
next = i+1
if next<len(sentence):
yield pair(sentence[next:], pos_list[next][1] )
def __cut_detail(sentence):
re_han, re_skip = re.compile(ur"([\u4E00-\u9FA5]+)"), re.compile(ur"([\.0-9]+|[a-zA-Z0-9]+)")
re_eng,re_num = re.compile(ur"[a-zA-Z0-9]+"), re.compile(ur"[\.0-9]+")
blocks = re_han.split(sentence)
for blk in blocks:
if re_han.match(blk):
for word in __cut(blk):
yield word
else:
tmp = re_skip.split(blk)
for x in tmp:
if x!="":
if re_num.match(x):
yield pair(x,'m')
elif re_eng.match(x):
yield pair(x,'eng')
else:
yield pair(x,'x')
def __cut_DAG_NO_HMM(sentence):
DAG = jieba.get_DAG(sentence)
route ={}
jieba.calc(sentence,DAG,0,route=route)
x = 0
N = len(sentence)
buf =u''
re_eng = re.compile(ur'[a-zA-Z0-9]',re.U)
while x<N:
y = route[x][1]+1
l_word = sentence[x:y]
if re_eng.match(l_word) and len(l_word)==1:
buf += l_word
x = y
else:
if len(buf)>0:
yield pair(buf,'eng')
buf = u''
yield pair(l_word,word_tag_tab.get(l_word,'x'))
x =y
if len(buf)>0:
yield pair(buf,'eng')
buf = u''
def __cut_DAG(sentence):
DAG = jieba.get_DAG(sentence)
route ={}
jieba.calc(sentence,DAG,0,route=route)
x = 0
buf =u''
N = len(sentence)
while x<N:
y = route[x][1]+1
l_word = sentence[x:y]
if y-x==1:
buf+= l_word
else:
if len(buf)>0:
if len(buf)==1:
yield pair(buf,word_tag_tab.get(buf,'x'))
buf=u''
else:
if (buf not in jieba.FREQ):
regognized = __cut_detail(buf)
for t in regognized:
yield t
else:
for elem in buf:
yield pair(elem,word_tag_tab.get(elem,'x'))
buf=u''
yield pair(l_word,word_tag_tab.get(l_word,'x'))
x =y
if len(buf)>0:
if len(buf)==1:
yield pair(buf,word_tag_tab.get(buf,'x'))
else:
if (buf not in jieba.FREQ):
regognized = __cut_detail(buf)
for t in regognized:
yield t
else:
for elem in buf:
yield pair(elem,word_tag_tab.get(elem,'x'))
def __cut_internal(sentence,HMM=True):
if not ( type(sentence) is unicode):
try:
sentence = sentence.decode('utf-8')
except:
sentence = sentence.decode('gbk','ignore')
re_han, re_skip = re.compile(ur"([\u4E00-\u9FA5a-zA-Z0-9+#&\._]+)"), re.compile(ur"(\r\n|\s)")
re_eng,re_num = re.compile(ur"[a-zA-Z0-9]+"), re.compile(ur"[\.0-9]+")
blocks = re_han.split(sentence)
if HMM:
__cut_blk = __cut_DAG
else:
__cut_blk = __cut_DAG_NO_HMM
for blk in blocks:
if re_han.match(blk):
for word in __cut_blk(blk):
yield word
else:
tmp = re_skip.split(blk)
for x in tmp:
if re_skip.match(x):
yield pair(x,'x')
else:
for xx in x:
if re_num.match(xx):
yield pair(xx,'m')
elif re_eng.match(x):
yield pair(xx,'eng')
else:
yield pair(xx,'x')
def __lcut_internal(sentence):
return list(__cut_internal(sentence))
def __lcut_internal_no_hmm(sentence):
return list(__cut_internal(sentence,False))
@makesure_userdict_loaded
def cut(sentence,HMM=True):
if (not hasattr(jieba,'pool')) or (jieba.pool==None):
for w in __cut_internal(sentence,HMM=HMM):
yield w
else:
parts = re.compile('([\r\n]+)').split(sentence)
if HMM:
result = jieba.pool.map(__lcut_internal,parts)
else:
result = jieba.pool.map(__lcut_internal_no_hmm,parts)
for r in result:
for w in r:
yield w
| mit |
mancoast/CPythonPyc_test | fail/310_test_codecmaps_kr.py | 9 | 1412 | #!/usr/bin/env python
#
# test_codecmaps_kr.py
# Codec mapping tests for ROK encodings
#
from test import support
from test import test_multibytecodec_support
import unittest
class TestCP949Map(test_multibytecodec_support.TestBase_Mapping,
unittest.TestCase):
encoding = 'cp949'
mapfileurl = 'http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT' \
'/WINDOWS/CP949.TXT'
class TestEUCKRMap(test_multibytecodec_support.TestBase_Mapping,
unittest.TestCase):
encoding = 'euc_kr'
mapfileurl = 'http://people.freebsd.org/~perky/i18n/EUC-KR.TXT'
# A4D4 HANGUL FILLER indicates the begin of 8-bytes make-up sequence.
pass_enctest = [(b'\xa4\xd4', '\u3164')]
pass_dectest = [(b'\xa4\xd4', '\u3164')]
class TestJOHABMap(test_multibytecodec_support.TestBase_Mapping,
unittest.TestCase):
encoding = 'johab'
mapfileurl = 'http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/' \
'KSC/JOHAB.TXT'
# KS X 1001 standard assigned 0x5c as WON SIGN.
# but, in early 90s that is the only era used johab widely,
# the most softwares implements it as REVERSE SOLIDUS.
# So, we ignore the standard here.
pass_enctest = [(b'\\', '\u20a9')]
pass_dectest = [(b'\\', '\u20a9')]
def test_main():
support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
lmcro/webserver | admin/plugins/round_robin.py | 5 | 1034 | # -*- coding: utf-8 -*-
#
# Cherokee-admin
#
# Authors:
# Alvaro Lopez Ortega <alvaro@alobbs.com>
#
# Copyright (C) 2001-2014 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import CTK
import Balancer
class Plugin_round_robin (Balancer.PluginBalancer):
def __init__ (self, key, **kwargs):
Balancer.PluginBalancer.__init__ (self, key, **kwargs)
Balancer.PluginBalancer.AddCommon (self)
| gpl-2.0 |
jmcguire/learning | design_patterns/composite/composite_iterator.py | 1 | 1535 | from menu import MenuComponent, MenuItem, MenuComposite, create_test_data, Waitress
class MenuComponentIterator(object):
"""an iterator for MenuComponent"""
def __init__(self, component):
self.component = component
self.i = 0
self.sub_iter = None
def __iter__(self):
return self
def next(self):
# if we're in a submenu, continue handling it
if self.sub_iter:
try:
return_ = self.sub_iter.next()
return return_
except StopIteration:
self.sub_iter = None
# check that we're still in bounds
if self.i >= len(self.component.components):
raise StopIteration()
# get the next item
item = self.component.get_child(self.i)
self.i += 1
# and handle it
if isinstance(item, MenuItem):
return item
elif isinstance(item, MenuComposite):
self.sub_iter = iter(item)
return self.next()
else:
raise Exception("Something has gone terribly wrong.")
# teach MenuComponent how to iterate over itself
def menu_component_iter(self):
return MenuComponentIterator(self)
MenuComponent.__iter__ = menu_component_iter
# give Waitress the ability to print a vegetarian menu
def print_veggie_menu(self):
print "\nVegetarian Menu\n%s" % ("-" * 20)
for item in self.menu:
if item.is_veggie:
item.print_()
Waitress.print_veggie_menu = print_veggie_menu
# testing
if __name__ == '__main__':
restaurant_menu = create_test_data()
waitress = Waitress(restaurant_menu)
waitress.print_veggie_menu()
| mit |
vprime/puuuu | env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.py | 1727 | 10500 | from __future__ import absolute_import, division, unicode_literals
from . import _base
class Filter(_base.Filter):
def slider(self):
previous1 = previous2 = None
for token in self.source:
if previous1 is not None:
yield previous2, previous1, token
previous2 = previous1
previous1 = token
yield previous2, previous1, None
def __iter__(self):
for previous, token, next in self.slider():
type = token["type"]
if type == "StartTag":
if (token["data"] or
not self.is_optional_start(token["name"], previous, next)):
yield token
elif type == "EndTag":
if not self.is_optional_end(token["name"], next):
yield token
else:
yield token
def is_optional_start(self, tagname, previous, next):
type = next and next["type"] or None
if tagname in 'html':
# An html element's start tag may be omitted if the first thing
# inside the html element is not a space character or a comment.
return type not in ("Comment", "SpaceCharacters")
elif tagname == 'head':
# A head element's start tag may be omitted if the first thing
# inside the head element is an element.
# XXX: we also omit the start tag if the head element is empty
if type in ("StartTag", "EmptyTag"):
return True
elif type == "EndTag":
return next["name"] == "head"
elif tagname == 'body':
# A body element's start tag may be omitted if the first thing
# inside the body element is not a space character or a comment,
# except if the first thing inside the body element is a script
# or style element and the node immediately preceding the body
# element is a head element whose end tag has been omitted.
if type in ("Comment", "SpaceCharacters"):
return False
elif type == "StartTag":
# XXX: we do not look at the preceding event, so we never omit
# the body element's start tag if it's followed by a script or
# a style element.
return next["name"] not in ('script', 'style')
else:
return True
elif tagname == 'colgroup':
# A colgroup element's start tag may be omitted if the first thing
# inside the colgroup element is a col element, and if the element
# is not immediately preceeded by another colgroup element whose
# end tag has been omitted.
if type in ("StartTag", "EmptyTag"):
# XXX: we do not look at the preceding event, so instead we never
# omit the colgroup element's end tag when it is immediately
# followed by another colgroup element. See is_optional_end.
return next["name"] == "col"
else:
return False
elif tagname == 'tbody':
# A tbody element's start tag may be omitted if the first thing
# inside the tbody element is a tr element, and if the element is
# not immediately preceeded by a tbody, thead, or tfoot element
# whose end tag has been omitted.
if type == "StartTag":
# omit the thead and tfoot elements' end tag when they are
# immediately followed by a tbody element. See is_optional_end.
if previous and previous['type'] == 'EndTag' and \
previous['name'] in ('tbody', 'thead', 'tfoot'):
return False
return next["name"] == 'tr'
else:
return False
return False
def is_optional_end(self, tagname, next):
type = next and next["type"] or None
if tagname in ('html', 'head', 'body'):
# An html element's end tag may be omitted if the html element
# is not immediately followed by a space character or a comment.
return type not in ("Comment", "SpaceCharacters")
elif tagname in ('li', 'optgroup', 'tr'):
# A li element's end tag may be omitted if the li element is
# immediately followed by another li element or if there is
# no more content in the parent element.
# An optgroup element's end tag may be omitted if the optgroup
# element is immediately followed by another optgroup element,
# or if there is no more content in the parent element.
# A tr element's end tag may be omitted if the tr element is
# immediately followed by another tr element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] == tagname
else:
return type == "EndTag" or type is None
elif tagname in ('dt', 'dd'):
# A dt element's end tag may be omitted if the dt element is
# immediately followed by another dt element or a dd element.
# A dd element's end tag may be omitted if the dd element is
# immediately followed by another dd element or a dt element,
# or if there is no more content in the parent element.
if type == "StartTag":
return next["name"] in ('dt', 'dd')
elif tagname == 'dd':
return type == "EndTag" or type is None
else:
return False
elif tagname == 'p':
# A p element's end tag may be omitted if the p element is
# immediately followed by an address, article, aside,
# blockquote, datagrid, dialog, dir, div, dl, fieldset,
# footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu,
# nav, ol, p, pre, section, table, or ul, element, or if
# there is no more content in the parent element.
if type in ("StartTag", "EmptyTag"):
return next["name"] in ('address', 'article', 'aside',
'blockquote', 'datagrid', 'dialog',
'dir', 'div', 'dl', 'fieldset', 'footer',
'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
'header', 'hr', 'menu', 'nav', 'ol',
'p', 'pre', 'section', 'table', 'ul')
else:
return type == "EndTag" or type is None
elif tagname == 'option':
# An option element's end tag may be omitted if the option
# element is immediately followed by another option element,
# or if it is immediately followed by an <code>optgroup</code>
# element, or if there is no more content in the parent
# element.
if type == "StartTag":
return next["name"] in ('option', 'optgroup')
else:
return type == "EndTag" or type is None
elif tagname in ('rt', 'rp'):
# An rt element's end tag may be omitted if the rt element is
# immediately followed by an rt or rp element, or if there is
# no more content in the parent element.
# An rp element's end tag may be omitted if the rp element is
# immediately followed by an rt or rp element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] in ('rt', 'rp')
else:
return type == "EndTag" or type is None
elif tagname == 'colgroup':
# A colgroup element's end tag may be omitted if the colgroup
# element is not immediately followed by a space character or
# a comment.
if type in ("Comment", "SpaceCharacters"):
return False
elif type == "StartTag":
# XXX: we also look for an immediately following colgroup
# element. See is_optional_start.
return next["name"] != 'colgroup'
else:
return True
elif tagname in ('thead', 'tbody'):
# A thead element's end tag may be omitted if the thead element
# is immediately followed by a tbody or tfoot element.
# A tbody element's end tag may be omitted if the tbody element
# is immediately followed by a tbody or tfoot element, or if
# there is no more content in the parent element.
# A tfoot element's end tag may be omitted if the tfoot element
# is immediately followed by a tbody element, or if there is no
# more content in the parent element.
# XXX: we never omit the end tag when the following element is
# a tbody. See is_optional_start.
if type == "StartTag":
return next["name"] in ['tbody', 'tfoot']
elif tagname == 'tbody':
return type == "EndTag" or type is None
else:
return False
elif tagname == 'tfoot':
# A tfoot element's end tag may be omitted if the tfoot element
# is immediately followed by a tbody element, or if there is no
# more content in the parent element.
# XXX: we never omit the end tag when the following element is
# a tbody. See is_optional_start.
if type == "StartTag":
return next["name"] == 'tbody'
else:
return type == "EndTag" or type is None
elif tagname in ('td', 'th'):
# A td element's end tag may be omitted if the td element is
# immediately followed by a td or th element, or if there is
# no more content in the parent element.
# A th element's end tag may be omitted if the th element is
# immediately followed by a td or th element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] in ('td', 'th')
else:
return type == "EndTag" or type is None
return False
| mit |
gmt/portage | pym/portage/tests/resolver/test_slot_collisions.py | 9 | 9455 | # Copyright 2010-2014 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase
class SlotCollisionTestCase(TestCase):
def testSlotCollision(self):
ebuilds = {
"dev-libs/A-1": { "PDEPEND": "foo? ( dev-libs/B )", "IUSE": "foo" },
"dev-libs/B-1": { "IUSE": "foo" },
"dev-libs/C-1": { "DEPEND": "dev-libs/A[foo]", "EAPI": 2 },
"dev-libs/D-1": { "DEPEND": "dev-libs/A[foo=] dev-libs/B[foo=]", "IUSE": "foo", "EAPI": 2 },
"dev-libs/E-1": { },
"dev-libs/E-2": { "IUSE": "foo" },
"app-misc/Z-1": { },
"app-misc/Z-2": { },
"app-misc/Y-1": { "DEPEND": "=app-misc/Z-1" },
"app-misc/Y-2": { "DEPEND": ">app-misc/Z-1" },
"app-misc/X-1": { "DEPEND": "=app-misc/Z-2" },
"app-misc/X-2": { "DEPEND": "<app-misc/Z-2" },
"sci-libs/K-1": { "IUSE": "+foo", "EAPI": 1 },
"sci-libs/L-1": { "DEPEND": "sci-libs/K[-foo]", "EAPI": 2 },
"sci-libs/M-1": { "DEPEND": "sci-libs/K[foo=]", "IUSE": "+foo", "EAPI": 2 },
"sci-libs/Q-1": { "SLOT": "1", "IUSE": "+bar foo", "EAPI": 1 },
"sci-libs/Q-2": { "SLOT": "2", "IUSE": "+bar +foo", "EAPI": 2, "PDEPEND": "sci-libs/Q:1[bar?,foo?]" },
"sci-libs/P-1": { "DEPEND": "sci-libs/Q:1[foo=]", "IUSE": "foo", "EAPI": 2 },
"sys-libs/A-1": { "RDEPEND": "foo? ( sys-libs/J[foo=] )", "IUSE": "+foo", "EAPI": "4" },
"sys-libs/B-1": { "RDEPEND": "bar? ( sys-libs/J[bar=] )", "IUSE": "+bar", "EAPI": "4" },
"sys-libs/C-1": { "RDEPEND": "sys-libs/J[bar]", "EAPI": "4" },
"sys-libs/D-1": { "RDEPEND": "sys-libs/J[bar?]", "IUSE": "bar", "EAPI": "4" },
"sys-libs/E-1": { "RDEPEND": "sys-libs/J[foo(+)?]", "IUSE": "+foo", "EAPI": "4" },
"sys-libs/F-1": { "RDEPEND": "sys-libs/J[foo(+)]", "EAPI": "4" },
"sys-libs/J-1": { "IUSE": "+foo", "EAPI": "4" },
"sys-libs/J-2": { "IUSE": "+bar", "EAPI": "4" },
"app-misc/A-1": { "IUSE": "foo +bar", "REQUIRED_USE": "^^ ( foo bar )", "EAPI": "4" },
"app-misc/B-1": { "DEPEND": "=app-misc/A-1[foo=]", "IUSE": "foo", "EAPI": 2 },
"app-misc/C-1": { "DEPEND": "=app-misc/A-1[foo]", "EAPI": 2 },
"app-misc/E-1": { "RDEPEND": "dev-libs/E[foo?]", "IUSE": "foo", "EAPI": "2" },
"app-misc/F-1": { "RDEPEND": "=dev-libs/E-1", "IUSE": "foo", "EAPI": "2" },
"dev-lang/perl-5.12": {"SLOT": "0/5.12", "EAPI": "4-slot-abi"},
"dev-lang/perl-5.16": {"SLOT": "0/5.16", "EAPI": "4-slot-abi"},
}
installed = {
"dev-libs/A-1": { "PDEPEND": "foo? ( dev-libs/B )", "IUSE": "foo", "USE": "foo" },
"dev-libs/B-1": { "IUSE": "foo", "USE": "foo" },
"dev-libs/C-1": { "DEPEND": "dev-libs/A[foo]", "EAPI": 2 },
"dev-libs/D-1": { "DEPEND": "dev-libs/A[foo=] dev-libs/B[foo=]", "IUSE": "foo", "USE": "foo", "EAPI": 2 },
"sci-libs/K-1": { "IUSE": "foo", "USE": "" },
"sci-libs/L-1": { "DEPEND": "sci-libs/K[-foo]" },
"sci-libs/Q-1": { "SLOT": "1", "IUSE": "+bar +foo", "USE": "bar foo", "EAPI": 1 },
"sci-libs/Q-2": { "SLOT": "2", "IUSE": "+bar +foo", "USE": "bar foo", "EAPI": 2, "PDEPEND": "sci-libs/Q:1[bar?,foo?]" },
"app-misc/A-1": { "IUSE": "+foo bar", "USE": "foo", "REQUIRED_USE": "^^ ( foo bar )", "EAPI": "4" },
}
test_cases = (
#A qt-*[qt3support] like mess.
ResolverPlaygroundTestCase(
["dev-libs/A", "dev-libs/B", "dev-libs/C", "dev-libs/D"],
options = { "--autounmask": 'n' },
success = False,
mergelist = ["dev-libs/A-1", "dev-libs/B-1", "dev-libs/C-1", "dev-libs/D-1"],
ignore_mergelist_order = True,
slot_collision_solutions = [ {"dev-libs/A-1": {"foo": True}, "dev-libs/D-1": {"foo": True}} ]),
ResolverPlaygroundTestCase(
["sys-libs/A", "sys-libs/B", "sys-libs/C", "sys-libs/D", "sys-libs/E", "sys-libs/F"],
options = { "--autounmask": 'n' },
success = False,
ignore_mergelist_order = True,
slot_collision_solutions = [],
mergelist = ['sys-libs/J-2', 'sys-libs/J-1', 'sys-libs/A-1', 'sys-libs/B-1', 'sys-libs/C-1', 'sys-libs/D-1', 'sys-libs/E-1', 'sys-libs/F-1'],
),
#A version based conflicts, nothing we can do.
ResolverPlaygroundTestCase(
["=app-misc/X-1", "=app-misc/Y-1"],
success = False,
mergelist = ["app-misc/Z-1", "app-misc/Z-2", "app-misc/X-1", "app-misc/Y-1"],
ignore_mergelist_order = True,
slot_collision_solutions = []
),
ResolverPlaygroundTestCase(
["=app-misc/X-2", "=app-misc/Y-2"],
success = False,
mergelist = ["app-misc/Z-1", "app-misc/Z-2", "app-misc/X-2", "app-misc/Y-2"],
ignore_mergelist_order = True,
slot_collision_solutions = []
),
ResolverPlaygroundTestCase(
["=app-misc/E-1", "=app-misc/F-1"],
success = False,
mergelist = ["dev-libs/E-1", "dev-libs/E-2", "app-misc/E-1", "app-misc/F-1"],
ignore_mergelist_order = True,
slot_collision_solutions = []
),
# sub-slot
ResolverPlaygroundTestCase(
["dev-lang/perl:0/5.12", "dev-lang/perl:0/5.16", "=dev-lang/perl-5.12*"],
success = False,
mergelist = ["dev-lang/perl-5.12", "dev-lang/perl-5.16"],
ignore_mergelist_order = True,
slot_collision_solutions = []
),
#Simple cases.
ResolverPlaygroundTestCase(
["sci-libs/L", "sci-libs/M"],
success = False,
mergelist = ["sci-libs/L-1", "sci-libs/M-1", "sci-libs/K-1"],
ignore_mergelist_order = True,
slot_collision_solutions = [{"sci-libs/K-1": {"foo": False}, "sci-libs/M-1": {"foo": False}}]
),
#Avoid duplicates.
ResolverPlaygroundTestCase(
["sci-libs/P", "sci-libs/Q:2"],
success = False,
options = { "--update": True, "--complete-graph": True, "--autounmask": 'n' },
mergelist = ["sci-libs/P-1", "sci-libs/Q-1"],
ignore_mergelist_order = True,
all_permutations=True,
slot_collision_solutions = [{"sci-libs/Q-1": {"foo": True}, "sci-libs/P-1": {"foo": True}}]
),
#Conflict with REQUIRED_USE
ResolverPlaygroundTestCase(
["=app-misc/C-1", "=app-misc/B-1"],
all_permutations = True,
slot_collision_solutions = [],
mergelist = ["app-misc/A-1", "app-misc/C-1", "app-misc/B-1"],
ignore_mergelist_order = True,
success = False),
)
playground = ResolverPlayground(ebuilds=ebuilds, installed=installed)
try:
for test_case in test_cases:
playground.run_TestCase(test_case)
self.assertEqual(test_case.test_success, True, test_case.fail_msg)
finally:
playground.cleanup()
def testConnectedCollision(self):
"""
Ensure that we are able to solve connected slot conflicts
which cannot be solved each on their own.
"""
ebuilds = {
"dev-libs/A-1": { "RDEPEND": "=dev-libs/X-1" },
"dev-libs/B-1": { "RDEPEND": "dev-libs/X" },
"dev-libs/X-1": { "RDEPEND": "=dev-libs/Y-1" },
"dev-libs/X-2": { "RDEPEND": "=dev-libs/Y-2" },
"dev-libs/Y-1": { "PDEPEND": "=dev-libs/X-1" },
"dev-libs/Y-2": { "PDEPEND": "=dev-libs/X-2" },
}
test_cases = (
ResolverPlaygroundTestCase(
["dev-libs/A", "dev-libs/B"],
all_permutations = True,
options = { "--backtrack": 0 },
success = True,
ambiguous_merge_order = True,
mergelist = ["dev-libs/Y-1", "dev-libs/X-1", ("dev-libs/A-1", "dev-libs/B-1")]),
)
playground = ResolverPlayground(ebuilds=ebuilds, debug=False)
try:
for test_case in test_cases:
playground.run_TestCase(test_case)
self.assertEqual(test_case.test_success, True, test_case.fail_msg)
finally:
playground.cleanup()
def testDeeplyConnectedCollision(self):
"""
Like testConnectedCollision, except that there is another
level of dependencies between the two conflicts.
"""
ebuilds = {
"dev-libs/A-1": { "RDEPEND": "=dev-libs/X-1" },
"dev-libs/B-1": { "RDEPEND": "dev-libs/X" },
"dev-libs/X-1": { "RDEPEND": "dev-libs/K" },
"dev-libs/X-2": { "RDEPEND": "dev-libs/L" },
"dev-libs/K-1": { "RDEPEND": "=dev-libs/Y-1" },
"dev-libs/L-1": { "RDEPEND": "=dev-libs/Y-2" },
"dev-libs/Y-1": { "PDEPEND": "=dev-libs/X-1" },
"dev-libs/Y-2": { "PDEPEND": "=dev-libs/X-2" },
}
test_cases = (
ResolverPlaygroundTestCase(
["dev-libs/A", "dev-libs/B"],
all_permutations = True,
options = { "--backtrack": 0 },
success = True,
ignore_mergelist_order = True,
mergelist = ["dev-libs/Y-1", "dev-libs/X-1", "dev-libs/K-1", \
"dev-libs/A-1", "dev-libs/B-1"]),
)
playground = ResolverPlayground(ebuilds=ebuilds, debug=False)
try:
for test_case in test_cases:
playground.run_TestCase(test_case)
self.assertEqual(test_case.test_success, True, test_case.fail_msg)
finally:
playground.cleanup()
def testSelfDEPENDRemovalCrash(self):
"""
Make sure we don't try to remove a packages twice. This happened
in the past when a package had a DEPEND on itself.
"""
ebuilds = {
"dev-libs/A-1": { "RDEPEND": "=dev-libs/X-1" },
"dev-libs/B-1": { "RDEPEND": "dev-libs/X" },
"dev-libs/X-1": { },
"dev-libs/X-2": { "DEPEND": ">=dev-libs/X-2" },
}
test_cases = (
ResolverPlaygroundTestCase(
["dev-libs/A", "dev-libs/B"],
all_permutations = True,
success = True,
ignore_mergelist_order = True,
mergelist = ["dev-libs/X-1", "dev-libs/A-1", "dev-libs/B-1"]),
)
playground = ResolverPlayground(ebuilds=ebuilds, debug=False)
try:
for test_case in test_cases:
playground.run_TestCase(test_case)
self.assertEqual(test_case.test_success, True, test_case.fail_msg)
finally:
playground.cleanup()
| gpl-2.0 |
seann1/portfolio5 | .meteor/dev_bundle/python/Lib/rlcompleter.py | 57 | 5816 | """Word completion for GNU readline.
The completer completes keywords, built-ins and globals in a selectable
namespace (which defaults to __main__); when completing NAME.NAME..., it
evaluates (!) the expression up to the last dot and completes its attributes.
It's very cool to do "import sys" type "sys.", hit the completion key (twice),
and see the list of names defined by the sys module!
Tip: to use the tab key as the completion key, call
readline.parse_and_bind("tab: complete")
Notes:
- Exceptions raised by the completer function are *ignored* (and generally cause
the completion to fail). This is a feature -- since readline sets the tty
device in raw (or cbreak) mode, printing a traceback wouldn't work well
without some complicated hoopla to save, reset and restore the tty state.
- The evaluation of the NAME.NAME... form may cause arbitrary application
defined code to be executed if an object with a __getattr__ hook is found.
Since it is the responsibility of the application (or the user) to enable this
feature, I consider this an acceptable risk. More complicated expressions
(e.g. function calls or indexing operations) are *not* evaluated.
- GNU readline is also used by the built-in functions input() and
raw_input(), and thus these also benefit/suffer from the completer
features. Clearly an interactive application can benefit by
specifying its own completer function and using raw_input() for all
its input.
- When the original stdin is not a tty device, GNU readline is never
used, and this module (and the readline module) are silently inactive.
"""
import __builtin__
import __main__
__all__ = ["Completer"]
class Completer:
def __init__(self, namespace = None):
"""Create a new completer for the command line.
Completer([namespace]) -> completer instance.
If unspecified, the default namespace where completions are performed
is __main__ (technically, __main__.__dict__). Namespaces should be
given as dictionaries.
Completer instances should be used as the completion mechanism of
readline via the set_completer() call:
readline.set_completer(Completer(my_namespace).complete)
"""
if namespace and not isinstance(namespace, dict):
raise TypeError,'namespace must be a dictionary'
# Don't bind to namespace quite yet, but flag whether the user wants a
# specific namespace or to use __main__.__dict__. This will allow us
# to bind to __main__.__dict__ at completion time, not now.
if namespace is None:
self.use_main_ns = 1
else:
self.use_main_ns = 0
self.namespace = namespace
def complete(self, text, state):
"""Return the next possible completion for 'text'.
This is called successively with state == 0, 1, 2, ... until it
returns None. The completion should begin with 'text'.
"""
if self.use_main_ns:
self.namespace = __main__.__dict__
if state == 0:
if "." in text:
self.matches = self.attr_matches(text)
else:
self.matches = self.global_matches(text)
try:
return self.matches[state]
except IndexError:
return None
def _callable_postfix(self, val, word):
if hasattr(val, '__call__'):
word = word + "("
return word
def global_matches(self, text):
"""Compute matches when text is a simple name.
Return a list of all keywords, built-in functions and names currently
defined in self.namespace that match.
"""
import keyword
matches = []
n = len(text)
for word in keyword.kwlist:
if word[:n] == text:
matches.append(word)
for nspace in [__builtin__.__dict__, self.namespace]:
for word, val in nspace.items():
if word[:n] == text and word != "__builtins__":
matches.append(self._callable_postfix(val, word))
return matches
def attr_matches(self, text):
"""Compute matches when text contains a dot.
Assuming the text is of the form NAME.NAME....[NAME], and is
evaluable in self.namespace, it will be evaluated and its attributes
(as revealed by dir()) are used as possible completions. (For class
instances, class members are also considered.)
WARNING: this can still invoke arbitrary C code, if an object
with a __getattr__ hook is evaluated.
"""
import re
m = re.match(r"(\w+(\.\w+)*)\.(\w*)", text)
if not m:
return []
expr, attr = m.group(1, 3)
try:
thisobject = eval(expr, self.namespace)
except Exception:
return []
# get the content of the object, except __builtins__
words = dir(thisobject)
if "__builtins__" in words:
words.remove("__builtins__")
if hasattr(thisobject, '__class__'):
words.append('__class__')
words.extend(get_class_members(thisobject.__class__))
matches = []
n = len(attr)
for word in words:
if word[:n] == attr and hasattr(thisobject, word):
val = getattr(thisobject, word)
word = self._callable_postfix(val, "%s.%s" % (expr, word))
matches.append(word)
return matches
def get_class_members(klass):
ret = dir(klass)
if hasattr(klass,'__bases__'):
for base in klass.__bases__:
ret = ret + get_class_members(base)
return ret
try:
import readline
except ImportError:
pass
else:
readline.set_completer(Completer().complete)
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.