repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
arborh/tensorflow
|
refs/heads/master
|
tensorflow/python/kernel_tests/linalg/linear_operator_composition_test.py
|
6
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg import linalg as linalg_lib
from tensorflow.python.ops.linalg import linear_operator_test_util
from tensorflow.python.platform import test
linalg = linalg_lib
rng = np.random.RandomState(0)
class SquareLinearOperatorCompositionTest(
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest."""
def setUp(self):
# Increase from 1e-6 to 1e-4 and 2e-4.
self._atol[dtypes.float32] = 2e-4
self._atol[dtypes.complex64] = 1e-4
self._rtol[dtypes.float32] = 2e-4
self._rtol[dtypes.complex64] = 1e-4
@staticmethod
def skip_these_tests():
# Cholesky not implemented.
return ["cholesky"]
def operator_and_matrix(self, build_info, dtype, use_placeholder,
ensure_self_adjoint_and_pd=False):
shape = list(build_info.shape)
# Either 1 or 2 matrices, depending.
num_operators = rng.randint(low=1, high=3)
if ensure_self_adjoint_and_pd:
# The random PD matrices are also symmetric. Here we are computing
# A @ A ... @ A. Since A is symmetric and PD, so are any powers of it.
matrices = [
linear_operator_test_util.random_positive_definite_matrix(
shape, dtype, force_well_conditioned=True)] * num_operators
else:
matrices = [
linear_operator_test_util.random_positive_definite_matrix(
shape, dtype, force_well_conditioned=True)
for _ in range(num_operators)
]
lin_op_matrices = matrices
if use_placeholder:
lin_op_matrices = [
array_ops.placeholder_with_default(
matrix, shape=None) for matrix in matrices]
operator = linalg.LinearOperatorComposition(
[linalg.LinearOperatorFullMatrix(l) for l in lin_op_matrices],
is_positive_definite=True if ensure_self_adjoint_and_pd else None,
is_self_adjoint=True if ensure_self_adjoint_and_pd else None,
is_square=True)
matmul_order_list = list(reversed(matrices))
mat = matmul_order_list[0]
for other_mat in matmul_order_list[1:]:
mat = math_ops.matmul(other_mat, mat)
return operator, mat
def test_is_x_flags(self):
# Matrix with two positive eigenvalues, 1, and 1.
# The matrix values do not effect auto-setting of the flags.
matrix = [[1., 0.], [1., 1.]]
operator = linalg.LinearOperatorComposition(
[linalg.LinearOperatorFullMatrix(matrix)],
is_positive_definite=True,
is_non_singular=True,
is_self_adjoint=False)
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
self.assertFalse(operator.is_self_adjoint)
def test_is_non_singular_auto_set(self):
# Matrix with two positive eigenvalues, 11 and 8.
# The matrix values do not effect auto-setting of the flags.
matrix = [[11., 0.], [1., 8.]]
operator_1 = linalg.LinearOperatorFullMatrix(matrix, is_non_singular=True)
operator_2 = linalg.LinearOperatorFullMatrix(matrix, is_non_singular=True)
operator = linalg.LinearOperatorComposition(
[operator_1, operator_2],
is_positive_definite=False, # No reason it HAS to be False...
is_non_singular=None)
self.assertFalse(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
with self.assertRaisesRegexp(ValueError, "always non-singular"):
linalg.LinearOperatorComposition(
[operator_1, operator_2], is_non_singular=False)
def test_name(self):
matrix = [[11., 0.], [1., 8.]]
operator_1 = linalg.LinearOperatorFullMatrix(matrix, name="left")
operator_2 = linalg.LinearOperatorFullMatrix(matrix, name="right")
operator = linalg.LinearOperatorComposition([operator_1, operator_2])
self.assertEqual("left_o_right", operator.name)
def test_different_dtypes_raises(self):
operators = [
linalg.LinearOperatorFullMatrix(rng.rand(2, 3, 3)),
linalg.LinearOperatorFullMatrix(rng.rand(2, 3, 3).astype(np.float32))
]
with self.assertRaisesRegexp(TypeError, "same dtype"):
linalg.LinearOperatorComposition(operators)
def test_empty_operators_raises(self):
with self.assertRaisesRegexp(ValueError, "non-empty"):
linalg.LinearOperatorComposition([])
class NonSquareLinearOperatorCompositionTest(
linear_operator_test_util.NonSquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest."""
def setUp(self):
# Increase from 1e-6 to 1e-4
self._atol[dtypes.float32] = 1e-4
self._atol[dtypes.complex64] = 1e-4
self._rtol[dtypes.float32] = 1e-4
self._rtol[dtypes.complex64] = 1e-4
def operator_and_matrix(
self, build_info, dtype, use_placeholder,
ensure_self_adjoint_and_pd=False):
del ensure_self_adjoint_and_pd
shape = list(build_info.shape)
# Create 2 matrices/operators, A1, A2, which becomes A = A1 A2.
# Use inner dimension of 2.
k = 2
batch_shape = shape[:-2]
shape_1 = batch_shape + [shape[-2], k]
shape_2 = batch_shape + [k, shape[-1]]
matrices = [
linear_operator_test_util.random_normal(
shape_1, dtype=dtype), linear_operator_test_util.random_normal(
shape_2, dtype=dtype)
]
lin_op_matrices = matrices
if use_placeholder:
lin_op_matrices = [
array_ops.placeholder_with_default(
matrix, shape=None) for matrix in matrices]
operator = linalg.LinearOperatorComposition(
[linalg.LinearOperatorFullMatrix(l) for l in lin_op_matrices])
matmul_order_list = list(reversed(matrices))
mat = matmul_order_list[0]
for other_mat in matmul_order_list[1:]:
mat = math_ops.matmul(other_mat, mat)
return operator, mat
@test_util.run_deprecated_v1
def test_static_shapes(self):
operators = [
linalg.LinearOperatorFullMatrix(rng.rand(2, 3, 4)),
linalg.LinearOperatorFullMatrix(rng.rand(2, 4, 5))
]
operator = linalg.LinearOperatorComposition(operators)
self.assertAllEqual((2, 3, 5), operator.shape)
@test_util.run_deprecated_v1
def test_shape_tensors_when_statically_available(self):
operators = [
linalg.LinearOperatorFullMatrix(rng.rand(2, 3, 4)),
linalg.LinearOperatorFullMatrix(rng.rand(2, 4, 5))
]
operator = linalg.LinearOperatorComposition(operators)
with self.cached_session():
self.assertAllEqual((2, 3, 5), operator.shape_tensor().eval())
@test_util.run_deprecated_v1
def test_shape_tensors_when_only_dynamically_available(self):
mat_1 = rng.rand(1, 2, 3, 4)
mat_2 = rng.rand(1, 2, 4, 5)
mat_ph_1 = array_ops.placeholder(dtypes.float64)
mat_ph_2 = array_ops.placeholder(dtypes.float64)
feed_dict = {mat_ph_1: mat_1, mat_ph_2: mat_2}
operators = [
linalg.LinearOperatorFullMatrix(mat_ph_1),
linalg.LinearOperatorFullMatrix(mat_ph_2)
]
operator = linalg.LinearOperatorComposition(operators)
with self.cached_session():
self.assertAllEqual(
(1, 2, 3, 5), operator.shape_tensor().eval(feed_dict=feed_dict))
if __name__ == "__main__":
linear_operator_test_util.add_tests(SquareLinearOperatorCompositionTest)
linear_operator_test_util.add_tests(NonSquareLinearOperatorCompositionTest)
test.main()
|
rajpushkar83/base
|
refs/heads/master
|
fabric/pypi.py
|
3
|
from fabric.api import task, local
import build
__all__ = ['upload', 'register']
@task
def upload():
"""upload the dist to pypi"""
build.sdist()
local("python shell_plugins.py.in sdist upload")
@task
def register():
"""register with pypi. Needs only to be done once."""
local("python shell_plugins.py.in register")
|
jlzeller/django-ajax-filtered-fields
|
refs/heads/master
|
ajax_filtered_fields/urls.py
|
1
|
from django.conf.urls import patterns, url, include
from ajax_filtered_fields import settings
from ajax_filtered_fields.views import json_index
if settings.AUTH_DECORATOR:
json_index = settings.AUTH_DECORATOR(json_index)
urlpatterns = patterns('',
(r'^json_index/$', json_index),
)
|
wesm/statsmodels
|
refs/heads/master
|
scikits/statsmodels/datasets/longley/data.py
|
1
|
"""Longley dataset"""
__docformat__ = 'restructuredtext'
COPYRIGHT = """This is public domain."""
TITLE = __doc__
SOURCE = """
The classic 1967 Longley Data
http://www.itl.nist.gov/div898/strd/lls/data/Longley.shtml
::
Longley, J.W. (1967) "An Appraisal of Least Squares Programs for the
Electronic Comptuer from the Point of View of the User." Journal of
the American Statistical Association. 62.319, 819-41.
"""
DESCRSHORT = """"""
DESCRLONG = """The Longley dataset contains various US macroeconomic
variables that are known to be highly collinear. It has been used to appraise
the accuracy of least squares routines."""
NOTE = """
Number of Observations - 16
Number of Variables - 6
Variable name definitions::
TOTEMP - Total Employment
GNPDEFL - GNP deflator
GNP - GNP
UNEMP - Number of unemployed
ARMED - Size of armed forces
POP - Population
YEAR - Year (1947 - 1962)
"""
from numpy import recfromtxt, array, column_stack
import scikits.statsmodels.tools.datautils as du
from os.path import dirname, abspath
def load():
"""
Load the Longley data and return a Dataset class.
Returns
-------
Dataset instance
See DATASET_PROPOSAL.txt for more information.
"""
data = _get_data()
return du.process_recarray(data, endog_idx=0, dtype=float)
def load_pandas():
"""
Load the Longley data and return a Dataset class.
Returns
-------
Dataset instance
See DATASET_PROPOSAL.txt for more information.
"""
data = _get_data()
return du.process_recarray_pandas(data, endog_idx=0)
def _get_data():
filepath = dirname(abspath(__file__))
data = recfromtxt(open(filepath+'/longley.csv',"rb"), delimiter=",",
names=True, dtype=float, usecols=(1,2,3,4,5,6,7))
return data
|
ghber/My-Django-Nonrel
|
refs/heads/master
|
django/contrib/sites/__init__.py
|
12133432
| |
jvanz/core
|
refs/heads/master
|
wizards/com/sun/star/wizards/web/__init__.py
|
12133432
| |
precompiler/python-101
|
refs/heads/master
|
mastering-python/ch05/__init__.py
|
12133432
| |
galtys/odoo
|
refs/heads/8.0
|
addons/crm_mass_mailing/__openerp__.py
|
333
|
{
'name': 'Campaign in Mass Mailing',
'version': '1.0',
'summary': 'This module allow to specify a campaign, a source and a channel for a mass mailing campaign.',
'author': 'OpenERP SA',
'description': """
Mass Mailing with Crm Marketing
================================
Link module mass mailing with the marketing mixin from crm.
""",
'depends': ['crm', 'mass_mailing'],
'data': [
'mass_mailing.xml',
],
'installable': True,
'auto_install': True,
}
|
TeamEOS/external_chromium_org_third_party_WebKit
|
refs/heads/lp5.1
|
Tools/Scripts/webkitpy/common/system/workspace_mock.py
|
191
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class MockWorkspace(object):
def find_unused_filename(self, directory, name, extension, search_limit=10):
return "%s/%s.%s" % (directory, name, extension)
def create_zip(self, zip_path, source_path):
self.zip_path = zip_path
self.source_path = source_path
return object() # Something that is not None
|
soulfx/gmusic-playlist
|
refs/heads/master
|
test/test-common.py
|
4
|
from atestframe import *
from common import *
class TestCommon(unittest.TestCase):
def test_get_csv_fields(self):
""" test that quoted and unquoted fields are being recognized """
fields = get_csv_fields(u'something,"good",to "eat","like a ""hot""",dog',u',')
self.assertEqual(fields[0],u'something')
self.assertEqual(fields[1],u'good')
self.assertEqual(fields[2],u'to "eat"')
self.assertEqual(fields[3],u'like a "hot"')
self.assertEqual(fields[4],u'dog')
fields = get_csv_fields(u',hello',u',')
self.assertEqual(fields[0],u'')
self.assertEqual(fields[1],u'hello')
fields = get_csv_fields(u'test,"commas, in, the, field"',u',')
self.assertEqual(len(fields),2)
self.assertEqual(fields[0],u'test')
self.assertEqual(fields[1],u'commas, in, the, field')
def test_handle_quote_input(self):
""" test that quotes are being removed as expected """
self.assertEqual(handle_quote_input(u''),u'')
self.assertEqual(handle_quote_input(u'a'),u'a')
self.assertEqual(handle_quote_input(u'""'),u'')
self.assertEqual(handle_quote_input(u'""asdf""'),u'"asdf"')
self.assertEqual(handle_quote_input(u'"asdf"'),u'asdf')
def test_handle_quote_output(self):
""" test that quotes are applied only when needed """
self.assertEqual(handle_quote_output("nothing to quote"),"nothing to quote")
self.assertEqual(handle_quote_output('this "needs" quoting'),'"this ""needs"" quoting"')
self.assertEqual(handle_quote_output('tsep, in field'),'"tsep, in field"')
def test_quote_unquote(self):
""" test for verifying the quoting and unquoting that occurs in track values """
test_values = (("", ""),
("bog", "bog"),
("\"bog", "\"\"\"bog\""),
("\"bog\"", "\"\"\"bog\"\"\""),
("b\"o\"g", "\"b\"\"o\"\"g\""),
("\"", "\"\"\"\""))
for (invalue, expected) in test_values:
actual_out = handle_quote_output(invalue)
self.assertEqual(actual_out, expected)
actual_in = handle_quote_input(actual_out)
self.assertEqual(actual_in, invalue)
run_test()
|
fredmorcos/attic
|
refs/heads/master
|
projects/grafeo/attic/grafeo_20100227_python/grafeo/ui/gtk/MainWindow.py
|
1
|
import gtk
from grafeo.config import Paths
class MainWindow(gtk.Window):
def __init__(self):
gtk.Window.__init__(self)
self.set_title('Grafeo')
self.set_size_request(600, 400)
self.set_default_size(600, 400)
self.set_icon_from_file(Paths.get_img_filename('grafeo.svg'))
builder = gtk.Builder()
builder.add_from_file(Paths.get_gtk_ui_filename('MainVBox.ui'))
self.mainVBox = builder.get_object('mainVBox')
self.add(self.mainVBox)
|
KSG-IT/ksg-nett
|
refs/heads/develop
|
common/templatetags/__init__.py
|
12133432
| |
seijim/cloud-robotics-azure-platform-v1-sdk
|
refs/heads/master
|
SampleCode_Client/Python/MqttApi_D2D/paho/mqtt/__init__.py
|
10
|
__version__ = "1.2"
class MQTTException(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
|
tomsilver/nupic
|
refs/heads/master
|
tests/unit/nupic/support/configuration_test.py
|
7
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import os
import shutil
from StringIO import StringIO
import sys
import tempfile
import unittest2 as unittest
import uuid
from mock import Mock, patch
from pkg_resources import resource_filename
from xml.parsers.expat import ExpatError
# ParseError not present in xml module for python2.6
try:
from xml.etree.ElementTree import ParseError
except ImportError:
from xml.parsers.expat import ExpatError as ParseError
import nupic
import nupic.support.configuration_base as configuration
class ConfigurationTest(unittest.TestCase):
def setUp(self):
"""configuration.Configuration relies on static methods
which load files by name. Since we need to be able to run tests and
potentially change the content of those files between tests without
interfering with one another and with the system configuration, this
setUp() function will allocate temporary files used only during the using
conf/nupic-default.xml and conf/nupic-site.xml (relative to the unit tests)
as templates.
"""
self.files = {}
with tempfile.NamedTemporaryFile(
prefix='nupic-default.xml-unittest-', delete=False) as outp:
self.addCleanup(os.remove, outp.name)
with open(resource_filename(__name__, 'conf/nupic-default.xml')) as inp:
outp.write(inp.read())
self.files['nupic-default.xml'] = outp.name
with tempfile.NamedTemporaryFile(
prefix='nupic-site.xml-unittest-', delete=False) as outp:
self.addCleanup(os.remove, outp.name)
with open(resource_filename(__name__, 'conf/nupic-site.xml')) as inp:
outp.write(inp.read())
self.files['nupic-site.xml'] = outp.name
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testGetStringMissingRaisesKeyError(self, findConfigFileMock, environMock):
findConfigFileMock.side_effect = self.files.get
environMock.get.return_value = None
configuration.Configuration.clear()
with self.assertRaises(KeyError):
configuration.Configuration.getString(uuid.uuid1().hex)
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testGetString(self, findConfigFileMock, environMock):
environMock.get.return_value = None
findConfigFileMock.side_effect = self.files.get
configuration.Configuration.clear()
configuration.Configuration.set('foo', 'bar')
result = configuration.Configuration.getString('foo')
self.assertEqual(result, 'bar')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testGetBoolMissingRaisesKeyError(self, findConfigFileMock, environMock):
findConfigFileMock.side_effect = self.files.get
environMock.get.return_value = None
configuration.Configuration.clear()
with self.assertRaises(KeyError):
configuration.Configuration.getBool(uuid.uuid1().hex)
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testGetBoolOutOfRangeRaisesValueError(self, findConfigFileMock,
environMock):
environMock.get.return_value = None
findConfigFileMock.side_effect = self.files.get
configuration.Configuration.clear()
configuration.Configuration.set('foobool2', '2')
with self.assertRaises(ValueError):
configuration.Configuration.getBool('foobool2')
configuration.Configuration.set('fooboolneg1', '-1')
with self.assertRaises(ValueError):
configuration.Configuration.getBool('fooboolneg1')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testGetBool(self, findConfigFileMock, environMock):
environMock.get.return_value = None
findConfigFileMock.side_effect = self.files.get
configuration.Configuration.clear()
configuration.Configuration.set('foobool0', '0')
result = configuration.Configuration.getBool('foobool0')
self.assertEqual(result, False)
configuration.Configuration.set('foobool1', '1')
result = configuration.Configuration.getBool('foobool1')
self.assertEqual(result, True)
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testGetIntMissingRaisesKeyError(self, findConfigFileMock, environMock):
findConfigFileMock.side_effect = self.files.get
environMock.get.return_value = None
configuration.Configuration.clear()
with self.assertRaises(KeyError):
configuration.Configuration.getInt(uuid.uuid1().hex)
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testGetInt(self, findConfigFileMock, environMock):
environMock.get.return_value = None
findConfigFileMock.side_effect = self.files.get
configuration.Configuration.clear()
configuration.Configuration.set('fooint', '-127')
result = configuration.Configuration.getInt('fooint')
self.assertEqual(result, -127)
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testGetFloatMissingRaisesKeyError(self, findConfigFileMock, environMock):
findConfigFileMock.side_effect = self.files.get
environMock.get.return_value = None
configuration.Configuration.clear()
with self.assertRaises(KeyError):
configuration.Configuration.getFloat(uuid.uuid1().hex)
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testGetFloat(self, findConfigFileMock, environMock):
environMock.get.return_value = None
findConfigFileMock.side_effect = self.files.get
configuration.Configuration.clear()
configuration.Configuration.set('foofloat', '-127.65')
result = configuration.Configuration.getFloat('foofloat')
self.assertEqual(result, -127.65)
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testGetMissingReturnsNone(self, findConfigFile, environ):
findConfigFile.side_effect = self.files.get
environ.get.return_value = None
configuration.Configuration.clear()
result = configuration.Configuration.get(uuid.uuid1().hex)
self.assertTrue(result is None)
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testSetAndGet(self, findConfigFile, environ):
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
configuration.Configuration.set('foo', 'bar')
result = configuration.Configuration.get('foo')
self.assertTrue(result == 'bar')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testDict(self, findConfigFile, environ):
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
configuration.Configuration.set('foo', 'bar')
configuration.Configuration.set('apple', 'banana')
result = configuration.Configuration.dict()
self.assertTrue(isinstance(result, dict))
self.assertTrue('foo' in result)
self.assertTrue(result['foo'] == 'bar')
self.assertTrue('apple' in result)
self.assertTrue(result['apple'] == 'banana')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testDictReadsFilesFirstTime(self, findConfigFile,
environ): # pylint: disable=W0613
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
result = configuration.Configuration.dict()
self.assertTrue(isinstance(result, dict))
self.assertTrue(len(result) == 1, result)
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testDictReplacesKeysFromEnvironment(self, findConfigFile, environ):
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
key = uuid.uuid1().hex
env = {'NTA_CONF_PROP_' + key: 'foo'}
environ.keys.side_effect = env.keys
environ.__getitem__.side_effect = env.__getitem__
result = configuration.Configuration.dict()
self.assertTrue(key in result)
self.assertTrue(result[key] == 'foo')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testClear(self, findConfigFile, environ):
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
configuration.Configuration.set('foo', 'bar')
configuration.Configuration.set('apple', 'banana')
self.assertTrue(configuration.Configuration.get('foo') == 'bar')
self.assertTrue(configuration.Configuration.get('apple') == 'banana')
configuration.Configuration.clear()
self.assertTrue(configuration.Configuration.get('foo') is None)
self.assertTrue(configuration.Configuration.get('apple') is None)
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testGetFromEnvironment(self, findConfigFile, environ):
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
key = uuid.uuid1().hex
environ.get.side_effect = {'NTA_CONF_PROP_' + key: 'foo'}.get
self.assertTrue(configuration.Configuration.get(key) == 'foo')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testReadConfigFileFromPath(self, findConfigFile, environ):
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
prefix, _, filename = self.files['nupic-default.xml'].rpartition(os.sep)
configuration.Configuration.readConfigFile(filename, prefix)
self.assertTrue(configuration.Configuration.get('dummy') == 'dummy value')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testReadConfigFileUnexpectedElementAtRoot(self, findConfigFile, environ):
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
with open(self.files['nupic-default.xml'], 'w') as outp:
outp.write('\n'.join((
'<?xml version="1.0"?>',
'<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>',
'<foo/>')))
outp.flush()
with patch('sys.stderr', new_callable=StringIO):
self.assertRaises(RuntimeError, configuration.Configuration.get, 'foo')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testReadConfigFileMissingDocumentRoot(self, findConfigFile, environ):
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
with open(self.files['nupic-default.xml'], 'w') as outp:
outp.write('\n'.join((
'<?xml version="1.0"?>',
'<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>')))
outp.flush()
with patch('sys.stderr', new_callable=StringIO):
self.assertRaises((ExpatError, ParseError), configuration.Configuration.get, 'foo')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testReadConfigFileMissingNonPropertyConfigurationChildren(
self, findConfigFile, environ):
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
with open(self.files['nupic-default.xml'], 'w') as outp:
outp.write('\n'.join((
'<?xml version="1.0"?>',
'<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>',
'<configuration>',
' <foo>bar<baz/></foo>',
'</configuration>')))
outp.flush()
self.assertEqual(configuration.Configuration.dict(), \
dict(dummy='dummy value'))
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testReadConfigFileEmptyValue(self, findConfigFile, environ):
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
with open(self.files['nupic-default.xml'], 'w') as outp:
outp.write('\n'.join((
'<?xml version="1.0"?>',
'<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>',
'<configuration>',
' <property>',
' <name>foo</name>',
' </property>',
'</configuration>')))
outp.flush()
with patch('sys.stderr', new_callable=StringIO):
self.assertRaises(Exception, configuration.Configuration.get, 'foo')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testReadConfigFileEmptyNameAndValue(self, findConfigFile, environ):
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
with open(self.files['nupic-default.xml'], 'w') as outp:
outp.write('\n'.join((
'<?xml version="1.0"?>',
'<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>',
'<configuration>',
' <property>',
' <name></name>',
' <value></value>',
' </property>',
'</configuration>')))
outp.flush()
with patch('sys.stderr', new_callable=StringIO):
self.assertRaises(RuntimeError, configuration.Configuration.get, 'foo')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testReadConfigFileMissingEnvVars(self, findConfigFile, environ):
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
with open(self.files['nupic-default.xml'], 'w') as outp:
outp.write('\n'.join((
'<?xml version="1.0"?>',
'<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>',
'<configuration>',
' <property>',
' <name>foo</name>',
' <value>${env.foo}</value>',
' </property>',
'</configuration>')))
outp.flush()
with patch('sys.stderr', new_callable=StringIO):
self.assertRaises(RuntimeError, configuration.Configuration.get, 'foo')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testReadConfigFileMalformedEnvReference(self, findConfigFile,
environ): # pylint: disable=W0613
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
with open(self.files['nupic-default.xml'], 'w') as outp:
outp.write('\n'.join((
'<?xml version="1.0"?>',
'<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>',
'<configuration>',
' <property>',
' <name>foo</name>',
' <value>${env.foo</value>',
' </property>',
'</configuration>')))
outp.flush()
with patch('sys.stderr', new_callable=StringIO):
self.assertRaises(RuntimeError, configuration.Configuration.get, 'foo')
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testReadConfigFileEnvironmentOverride(self, findConfigFile, environ):
environ.get.return_value = None
findConfigFile.side_effect = self.files.get
configuration.Configuration.clear()
with open(self.files['nupic-default.xml'], 'w') as outp:
outp.write('\n'.join((
'<?xml version="1.0"?>',
'<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>',
'<configuration>',
' <property>',
' <name>foo</name>',
' <value>${env.NTA_CONF_PROP_foo}</value>',
' </property>',
'</configuration>')))
outp.flush()
env = {'NTA_CONF_PROP_foo': 'bar'}
environ.__getitem__.side_effect = env.__getitem__
environ.get.side_effect = env.get
environ.__contains__.side_effect = env.__contains__
result = configuration.Configuration.get('foo')
self.assertEqual(result, 'bar')
@patch.object(configuration.Configuration, 'getConfigPaths',
spec=configuration.Configuration.getConfigPaths)
def testFindConfigFile(self, getConfigPaths):
prefix, _, filename = self.files['nupic-default.xml'].rpartition(os.sep)
def replacePaths(**_):
return [prefix]
getConfigPaths.side_effect = replacePaths
configuration.Configuration.clear()
result = configuration.Configuration.findConfigFile(filename)
self.assertTrue(result == self.files['nupic-default.xml'])
getConfigPaths.assert_called_with()
@patch.object(configuration.Configuration, 'getConfigPaths',
spec=configuration.Configuration.getConfigPaths)
def testFindConfigFileReturnsNoneForMissingFile(self, getConfigPaths):
prefix, _, _ = self.files['nupic-default.xml'].rpartition(os.sep)
def replacePaths(**_):
return [prefix]
getConfigPaths.side_effect = replacePaths
configuration.Configuration.clear()
result = configuration.Configuration.findConfigFile(uuid.uuid1().hex)
self.assertTrue(result is None)
getConfigPaths.assert_called_with()
@patch.object(configuration.Configuration, '_configPaths',
spec=configuration.Configuration._configPaths)
@patch.object(configuration.os, 'environ', spec=dict)
def testGetConfigPaths(
self, environ, configPaths): # pylint: disable=W0613
result = configuration.Configuration.getConfigPaths()
self.assertEqual(result, configPaths)
@unittest.skip('NUP-2081')
@patch.object(configuration.Configuration, '_configPaths',
spec=configuration.Configuration._configPaths)
@patch.object(configuration.os, 'environ', spec=dict)
def testGetConfigPathsForNone(
self, environ, configPaths): # pylint: disable=W0613
configuration.Configuration._configPaths = None # pylint: disable=W0212
result = configuration.Configuration.getConfigPaths()
self.assertTrue(isinstance(result, list))
self.assertEqual(result, [os.path.join(os.environ['NUPIC'],
'config', 'default')])
@patch.object(configuration.Configuration, '_configPaths',
spec=configuration.Configuration._configPaths)
@patch.object(configuration.os, 'environ', spec=dict)
def testGetConfigPathsForNoneWithNTA_CONF_PATHInEnv(
self, environ, configPaths): # pylint: disable=W0613
configuration.Configuration._configPaths = None # pylint: disable=W0212
env = {'NTA_CONF_PATH': ''}
environ.__getitem__.side_effect = env.__getitem__
environ.get.side_effect = env.get
environ.__contains__.side_effect = env.__contains__
result = configuration.Configuration.getConfigPaths()
self.assertTrue(isinstance(result, list))
self.assertEqual(len(result), 1)
self.assertEqual(result[0], env['NTA_CONF_PATH'])
def testSetConfigPathsForNoneWithNTA_CONF_PATHInEnv(self):
paths = [Mock()]
configuration.Configuration.setConfigPaths(paths)
self.assertEqual(
paths,
configuration.Configuration._configPaths) # pylint: disable=W0212
@patch.object(configuration.os, 'environ', spec=dict)
@patch.object(configuration.Configuration, 'findConfigFile',
spec=configuration.Configuration.findConfigFile)
def testConfiguration(self, findConfigFile, environ):
configuration.Configuration.clear()
findConfigFile.side_effect = self.files.get
with open(self.files['nupic-default.xml'], 'w') as outp:
with open(resource_filename(__name__, 'conf/testFile1.xml')) as inp:
outp.write(inp.read())
with open(self.files['nupic-site.xml'], 'w') as outp:
with open(resource_filename(__name__, 'conf/testFile2.xml')) as inp:
outp.write(inp.read())
env = {'USER': 'foo', 'HOME': 'bar'}
environ.__getitem__.side_effect = env.__getitem__
environ.get.side_effect = env.get
environ.__contains__.side_effect = env.__contains__
environ.keys.side_effect = env.keys
# Test the resulting configuration
self.assertEqual(configuration.Configuration.get('database.host'),
'TestHost')
self.assertEqual(configuration.Configuration.get('database.password'),
'pass')
self.assertEqual(configuration.Configuration.get('database.emptypassword'),
'')
self.assertEqual(configuration.Configuration.get('database.missingfield'),
None)
self.assertEqual(configuration.Configuration.get('database.user'), 'root')
expectedValue = 'foo'
actualValue = configuration.Configuration.get(
'var.environment.standalone.user')
self.assertTrue(actualValue == expectedValue,
"expected %r, but got %r" % (expectedValue, actualValue))
expectedValue = "The user " + os.environ['USER'] + " rocks!"
actualValue = configuration.Configuration.get(
'var.environment.user.in.the.middle')
self.assertTrue(actualValue == expectedValue,
"expected %r, but got %r" % (expectedValue, actualValue))
expectedValue = ("User " + os.environ['USER'] + " and home " +
os.environ['HOME'] + " in the middle")
actualValue = configuration.Configuration.get(
'var.environment.user.and.home.in.the.middle')
self.assertTrue(actualValue == expectedValue,
"expected %r, but got %r" % (expectedValue, actualValue))
env['NTA_CONF_PROP_database_host'] = 'FooBar'
self.assertEqual(configuration.Configuration.get('database.host'), 'FooBar')
allProps = configuration.Configuration.dict()
self.assertTrue(allProps['database.host'] == 'FooBar')
del env['NTA_CONF_PROP_database_host']
environ.__getitem__.side_effect = env.__getitem__
environ.get.side_effect = env.get
environ.__contains__.side_effect = env.__contains__
# Change a property
configuration.Configuration.set('database.host', 'matrix')
self.assertEqual(configuration.Configuration.get('database.host'), 'matrix')
@patch.object(configuration.os, 'environ', spec=dict)
def testConfiguration2(self, environ):
configuration.Configuration.clear()
tmpDir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmpDir)
with open(os.path.join(tmpDir, 'nupic-default.xml'), 'w') as outp:
with open(resource_filename(__name__, 'conf/testFile1.xml')) as inp:
outp.write(inp.read())
with open(os.path.join(tmpDir, 'nupic-site.xml'), 'w') as outp:
with open(resource_filename(__name__, 'conf/testFile2.xml')) as inp:
outp.write(inp.read())
env = {
'USER': 'foo',
'HOME': 'bar',
'NTA_CONF_PATH': tmpDir
}
environ.__getitem__.side_effect = env.__getitem__
environ.get.side_effect = env.get
environ.__contains__.side_effect = env.__contains__
environ.keys.side_effect = env.keys
# Test the resulting configuration
self.assertEqual(configuration.Configuration.get('database.host'),
'TestHost')
self.assertEqual(configuration.Configuration.get('database.password'),
'pass')
self.assertEqual(
configuration.Configuration.get('database.emptypassword'), '')
self.assertEqual(configuration.Configuration.get('database.missingfield'),
None)
self.assertEqual(configuration.Configuration.get('database.user'), 'root')
expectedValue = 'foo'
actualValue = configuration.Configuration.get(
'var.environment.standalone.user')
self.assertEqual(actualValue, expectedValue,
"expected %r, but got %r" % (expectedValue, actualValue))
expectedValue = "The user " + os.environ['USER'] + " rocks!"
actualValue = configuration.Configuration.get(
'var.environment.user.in.the.middle')
self.assertEqual(actualValue, expectedValue,
"expected %r, but got %r" % (expectedValue, actualValue))
expectedValue = ("User " + os.environ['USER'] + " and home " +
os.environ['HOME'] + " in the middle")
actualValue = configuration.Configuration.get(
'var.environment.user.and.home.in.the.middle')
self.assertEqual(actualValue, expectedValue,
"expected %r, but got %r" % (expectedValue, actualValue))
env['NTA_CONF_PROP_database_host'] = 'FooBar'
self.assertEqual(configuration.Configuration.get('database.host'),
'FooBar')
allProps = configuration.Configuration.dict()
self.assertEqual(allProps['database.host'], 'FooBar')
del env['NTA_CONF_PROP_database_host']
# Change a property
configuration.Configuration.set('database.host', 'matrix')
self.assertEqual(configuration.Configuration.get('database.host'),
'matrix')
configuration.Configuration.clear()
tmpDir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmpDir)
with open(os.path.join(tmpDir, 'nupic-default.xml'), 'w') as outp:
with open(resource_filename(__name__, 'conf/testFile1.xml')) as inp:
outp.write(inp.read())
with open(os.path.join(tmpDir, 'nupic-site.xml'), 'w') as outp:
with open(resource_filename(__name__, 'conf/testFile2.xml')) as inp:
outp.write(inp.read())
tmpDir2 = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmpDir2)
with open(os.path.join(tmpDir2, 'nupic-site.xml'), 'w') as outp:
with open(resource_filename(__name__, 'conf/testFile3.xml')) as inp:
outp.write(inp.read())
env['NTA_CONF_PATH'] = os.pathsep.join([tmpDir, tmpDir2])
# Test the resulting configuration
self.assertEqual(configuration.Configuration.get('database.host'),
'TestHost')
self.assertEqual(configuration.Configuration.get('database.password'),
'pass')
self.assertEqual(
configuration.Configuration.get('database.emptypassword'), '')
self.assertEqual(configuration.Configuration.get('database.missingfield'),
None)
self.assertEqual(configuration.Configuration.get('database.user'),
'root')
# Change a property
configuration.Configuration.set('database.host', 'matrix')
self.assertEqual(configuration.Configuration.get('database.host'),
'matrix')
if __name__ == '__main__':
unittest.main(argv=[sys.argv[0], "--verbose"] + sys.argv[1:])
|
jusdng/odoo
|
refs/heads/8.0
|
addons/account_anglo_saxon/product.py
|
384
|
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class product_category(osv.osv):
_inherit = "product.category"
_columns = {
'property_account_creditor_price_difference_categ': fields.property(
type='many2one',
relation='account.account',
string="Price Difference Account",
help="This account will be used to value price difference between purchase price and cost price."),
#Redefine fields to change help text for anglo saxon methodology.
'property_account_income_categ': fields.property(
type='many2one',
relation='account.account',
string="Income Account",
help="This account will be used to value outgoing stock using sale price."),
'property_account_expense_categ': fields.property(
type='many2one',
relation='account.account',
string="Expense Account",
help="This account will be used to value outgoing stock using cost price."),
}
class product_template(osv.osv):
_inherit = "product.template"
_columns = {
'property_account_creditor_price_difference': fields.property(
type='many2one',
relation='account.account',
string="Price Difference Account",
help="This account will be used to value price difference between purchase price and cost price."),
#Redefine fields to change help text for anglo saxon methodology.
'property_account_income': fields.property(
type='many2one',
relation='account.account',
string="Income Account",
help="This account will be used to value outgoing stock using sale price."),
'property_account_expense': fields.property(
type='many2one',
relation='account.account',
string="Expense Account",
help="This account will be used to value outgoing stock using cost price."),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
pombreda/syzygy
|
refs/heads/master
|
third_party/numpy/files/numpy/setupscons.py
|
42
|
#!/usr/bin/env python
from os.path import join as pjoin
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
from numpy.distutils.misc_util import scons_generate_config_py
pkgname = 'numpy'
config = Configuration(pkgname, parent_package, top_path,
setup_name = 'setupscons.py')
config.add_subpackage('distutils')
config.add_subpackage('testing')
config.add_subpackage('f2py')
config.add_subpackage('core')
config.add_subpackage('lib')
config.add_subpackage('oldnumeric')
config.add_subpackage('numarray')
config.add_subpackage('fft')
config.add_subpackage('linalg')
config.add_subpackage('random')
config.add_subpackage('ma')
config.add_subpackage('matrixlib')
config.add_subpackage('compat')
config.add_subpackage('polynomial')
config.add_subpackage('doc')
config.add_data_dir('doc')
config.add_data_dir('tests')
def add_config(*args, **kw):
# Generate __config__, handle inplace issues.
if kw['scons_cmd'].inplace:
target = pjoin(kw['pkg_name'], '__config__.py')
else:
target = pjoin(kw['scons_cmd'].build_lib, kw['pkg_name'],
'__config__.py')
scons_generate_config_py(target)
config.add_sconscript(None, post_hook = add_config)
return config
if __name__ == '__main__':
print 'This is the wrong setup.py file to run'
|
sander76/home-assistant
|
refs/heads/dev
|
homeassistant/helpers/httpx_client.py
|
5
|
"""Helper for httpx."""
from __future__ import annotations
import sys
from typing import Any, Callable
import httpx
from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE, __version__
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.helpers.frame import warn_use
from homeassistant.loader import bind_hass
DATA_ASYNC_CLIENT = "httpx_async_client"
DATA_ASYNC_CLIENT_NOVERIFY = "httpx_async_client_noverify"
SERVER_SOFTWARE = "HomeAssistant/{0} httpx/{1} Python/{2[0]}.{2[1]}".format(
__version__, httpx.__version__, sys.version_info
)
USER_AGENT = "User-Agent"
@callback
@bind_hass
def get_async_client(hass: HomeAssistant, verify_ssl: bool = True) -> httpx.AsyncClient:
"""Return default httpx AsyncClient.
This method must be run in the event loop.
"""
key = DATA_ASYNC_CLIENT if verify_ssl else DATA_ASYNC_CLIENT_NOVERIFY
client: httpx.AsyncClient | None = hass.data.get(key)
if client is None:
client = hass.data[key] = create_async_httpx_client(hass, verify_ssl)
return client
class HassHttpXAsyncClient(httpx.AsyncClient):
"""httpx AsyncClient that suppresses context management."""
async def __aenter__(self: HassHttpXAsyncClient) -> HassHttpXAsyncClient:
"""Prevent an integration from reopen of the client via context manager."""
return self
async def __aexit__(self, *args: Any) -> None:
"""Prevent an integration from close of the client via context manager."""
@callback
def create_async_httpx_client(
hass: HomeAssistant,
verify_ssl: bool = True,
auto_cleanup: bool = True,
**kwargs: Any,
) -> httpx.AsyncClient:
"""Create a new httpx.AsyncClient with kwargs, i.e. for cookies.
If auto_cleanup is False, the client will be
automatically closed on homeassistant_stop.
This method must be run in the event loop.
"""
client = HassHttpXAsyncClient(
verify=verify_ssl,
headers={USER_AGENT: SERVER_SOFTWARE},
**kwargs,
)
original_aclose = client.aclose
client.aclose = warn_use( # type: ignore
client.aclose, "closes the Home Assistant httpx client"
)
if auto_cleanup:
_async_register_async_client_shutdown(hass, client, original_aclose)
return client
@callback
def _async_register_async_client_shutdown(
hass: HomeAssistant,
client: httpx.AsyncClient,
original_aclose: Callable[..., Any],
) -> None:
"""Register httpx AsyncClient aclose on Home Assistant shutdown.
This method must be run in the event loop.
"""
async def _async_close_client(event: Event) -> None:
"""Close httpx client."""
await original_aclose()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, _async_close_client)
|
Snifer/BurpSuite-Plugins
|
refs/heads/master
|
Sqlmap/plugins/dbms/hsqldb/takeover.py
|
7
|
#!/usr/bin/env python
"""
Copyright (c) 2006-2014 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.exception import SqlmapUnsupportedFeatureException
from plugins.generic.takeover import Takeover as GenericTakeover
class Takeover(GenericTakeover):
def __init__(self):
GenericTakeover.__init__(self)
def osCmd(self):
errMsg = "on HSQLDB it is not possible to execute commands"
raise SqlmapUnsupportedFeatureException(errMsg)
def osShell(self):
errMsg = "on HSQLDB it is not possible to execute commands"
raise SqlmapUnsupportedFeatureException(errMsg)
def osPwn(self):
errMsg = "on HSQLDB it is not possible to establish an "
errMsg += "out-of-band connection"
raise SqlmapUnsupportedFeatureException(errMsg)
def osSmb(self):
errMsg = "on HSQLDB it is not possible to establish an "
errMsg += "out-of-band connection"
raise SqlmapUnsupportedFeatureException(errMsg)
|
moijes12/oh-mainline
|
refs/heads/master
|
mysite/search/tests.py
|
8
|
# This file is part of OpenHatch.
# Copyright (C) 2010 Parker Phinney
# Copyright (C) 2010 Jack Grigg
# Copyright (C) 2009, 2010 OpenHatch, Inc.
# Copyright (C) 2010 Jessica McKellar
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from mysite.base.tests import make_twill_url, TwillTests
import mysite.base.models
import mysite.account.tests
from mysite.profile.models import Person
import mysite.profile.models
import mysite.search.view_helpers
from mysite.search.models import (Project, Bug, ProjectInvolvementQuestion,
Answer, BugAlert)
from mysite.search import views
import datetime
import logging
import mysite.project.views
from django.utils.unittest import skipIf
from django.utils.unittest import expectedFailure
import django.db
import django.conf
from django.utils import http
import json
import mock
from twill import commands as tc
from django.test import TestCase
import django.core.cache
from django.core.urlresolvers import reverse
from django.core.files.base import ContentFile
from django.contrib.auth.models import User
logger = logging.getLogger(__name__)
class SearchTest(TwillTests):
def search_via_twill(self, query=None):
search_url = "http://openhatch.org/search/"
if query:
search_url += '?q=%s' % query
tc.go(make_twill_url(search_url))
def search_via_client(self, query=None):
search_url = "/search/"
return self.client.get(search_url, {u'q': query})
def compare_lists(self, one, two):
self.assertEqual(len(one), len(two))
self.assertEqual(set(one), set(two))
def compare_lists_of_dicts(self, one, two, sort_key=None):
if sort_key is not None:
sort_fn = lambda thing: thing[sort_key]
else:
sort_fn = None
sorted_one = sorted(one, key=sort_fn)
sorted_two = sorted(two, key=sort_fn)
for k in range(len(sorted_one)):
try:
self.assertEqual(sorted_one[k], sorted_two[k])
except AssertionError:
import sys
print >> sys.stderr, sorted_one
print >> sys.stderr, sorted_two
raise
for k in range(len(sorted_two)):
try:
self.assertEqual(sorted_one[k], sorted_two[k])
except AssertionError:
import sys
print >> sys.stderr, sorted_one
print >> sys.stderr, sorted_two
raise
class TestThatQueryTokenizesRespectingQuotationMarks(TwillTests):
def test(self):
difficult = "With spaces (and parens)"
query = mysite.search.view_helpers.Query.create_from_GET_data(
{u'q': u'"%s"' % difficult})
self.assertEqual(query.terms, [difficult])
# Make there be a bug to find
project = Project.create_dummy(name=difficult)
Bug.create_dummy(project=project)
# How many bugs?
num_bugs = query.get_bugs_unordered().count()
self.assertEqual(num_bugs, 1)
class SearchResults(TwillTests):
fixtures = [u'bugs-for-two-projects.json']
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
def test_query_object_is_false_when_no_terms_or_facets(self):
query = mysite.search.view_helpers.Query.create_from_GET_data({})
self.assertFalse(query)
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
def test_show_no_bugs_if_no_query(self):
# Call up search page with no query.
response = self.client.get(u'/search/')
# The variable u'bunch_of_bugs', passed to the template, is a blank
# list.
self.assertEqual(response.context[0][u'bunch_of_bugs'], [])
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
def test_json_view(self):
tc.go(make_twill_url((
u'http://openhatch.org/search/?format=json&jsoncallback=callback&'
u'q=python')))
response = tc.show()
self.assert_(response.startswith(u'callback'))
json_string_with_parens = response.split(u'callback', 1)[1]
self.assert_(json_string_with_parens[0] == u'(')
self.assert_(json_string_with_parens[-1] == u')')
json_string = json_string_with_parens[1:-1]
objects = json.loads(json_string)
self.assert_(u'pk' in objects[0][u'bugs'][0])
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
@expectedFailure
def testPagination(self):
url = u'http://openhatch.org/search/'
tc.go(make_twill_url(url))
tc.fv(u'search_opps', u'q', u'python')
tc.submit()
# Grab descriptions of first 10 Exaile bugs
bugs = Bug.all_bugs.filter(
project__name=u'Exaile').order_by(u'-last_touched')[:10]
for bug in bugs:
tc.find(bug.description)
# Hit the next button
tc.follow(u'Next')
# Grab descriptions of next 10 Exaile bugs
bugs = Bug.all_bugs.filter(
project__name=u'Exaile').order_by(u'-last_touched')[10:20]
for bug in bugs:
tc.find(bug.description)
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
def testPaginationWithAnyFacet(self):
url = u'http://openhatch.org/search/?q=&language='
tc.go(make_twill_url(url))
bugs = Bug.all_bugs.order_by(u'-last_touched')
for bug in bugs[:10]:
tc.find(bug.description)
print "Found bug ", bug
tc.follow(u'Next')
bugs = bugs[10:20]
for bug in bugs:
tc.find(bug.description)
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
@expectedFailure
def testPaginationAndChangingSearchQuery(self):
url = u'http://openhatch.org/search/'
tc.go(make_twill_url(url))
tc.fv(u'search_opps', u'q', u'python')
tc.submit()
# Grab descriptions of first 10 Exaile bugs
bugs = Bug.all_bugs.filter(
project__name=u'Exaile').order_by(u'-last_touched')[:10]
for bug in bugs:
tc.find(bug.description)
# Hit the next button
tc.follow(u'Next')
# Grab descriptions of next 10 Exaile bugs
bugs = Bug.all_bugs.filter(
project__name=u'Exaile').order_by(u'-last_touched')[10:20]
for bug in bugs:
tc.find(bug.description)
# Now, change the query - do we stay that paginated?
tc.fv(u'search_opps', u'q', u'c#')
tc.submit()
# Grab descriptions of first 10 GNOME-Do bugs
bugs = Bug.all_bugs.filter(
project__name=u'GNOME-Do').order_by(u'-last_touched')[:10]
for bug in bugs:
tc.find(bug.description)
class SplitIntoTerms(TestCase):
def test_split_into_terms(self):
easy = '1 2 3'
self.assertEqual(
mysite.search.view_helpers.Query.split_into_terms(easy),
['1', '2', '3'])
easy = '"1"'
self.assertEqual(
mysite.search.view_helpers.Query.split_into_terms(easy),
['1'])
easy = 'c#'
self.assertEqual(
mysite.search.view_helpers.Query.split_into_terms(easy),
['c#'])
class IconGetsScaled(SearchTest):
@skipIf(not mysite.base.depends.Image, ("Skipping this test. Install PIL "
"to run it; see "
"ADVANCED_INSTALLATION.mkd."))
def test_project_scales_its_icon_down_for_use_in_badge(self):
'''This test shows that the Project class successfully stores
a scaled-down version of its icon in the icon_smaller_for_badge
field.'''
# Step 1: Create a project with an icon
p = mysite.search.models.Project.create_dummy()
image_data = open(
mysite.account.tests.photo('static/sample-photo.png')).read()
p.icon_raw.save('', ContentFile(image_data))
p.save()
# Assertion 1: p.icon_smaller_for_badge is false (since not scaled yet)
self.assertFalse(p.icon_smaller_for_badge)
# Step 2: Call the scaling method
p.update_scaled_icons_from_self_icon()
p.save()
# Assertion 2: Verify that it is now a true value
self.assert_(p.icon_smaller_for_badge,
"Expected p.icon_smaller_for_badge to be a true value.")
# Assertion 3: Verify that it has the right width
self.assertEqual(
p.icon_smaller_for_badge.width, 40,
"Expected p.icon_smaller_for_badge to be 40 pixels wide.")
@skipIf(not mysite.base.depends.Image, ("Skipping this test. Install PIL "
"to run it; see "
"ADVANCED_INSTALLATION.mkd."))
def test_short_icon_is_scaled_correctly(self):
'''Sometimes icons are rectangular and more wide than long. These
icons shouldn't be trammeled into a square, but scaled respectfully
of their original ratios.'''
# Step 1: Create a project with an icon
p = mysite.search.models.Project.create_dummy()
# account.tests.photo finds the right path.
image_data = open(mysite.account.tests.photo(
'static/images/icons/test-project-icon-64px-by-18px.png')).read()
p.icon_raw.save('', ContentFile(image_data))
p.save()
# Assertion 1: p.icon_smaller_for_badge is false (since not scaled yet)
self.assertFalse(p.icon_smaller_for_badge)
# Step 2: Call the scaling method
p.update_scaled_icons_from_self_icon()
p.save()
# Assertion 2: Verify that it is now a true value
self.assert_(p.icon_smaller_for_badge,
"Expected p.icon_smaller_for_badge to be a true value.")
# Assertion 3: Verify that it has the right width
self.assertEqual(
p.icon_smaller_for_badge.width, 40,
"Expected p.icon_smaller_for_badge to be 40 pixels wide.")
# Assertion 3: Verify that it has the right height
# If we want to scale exactly we'll get 11.25 pixels, which rounds to
# 11.
self.assertEqual(p.icon_smaller_for_badge.height, 11)
class SearchOnFullWords(SearchTest):
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
@expectedFailure
def test_find_perl_not_properly(self):
Project.create_dummy()
Bug.create_dummy(description='properly')
perl_bug = Bug.create_dummy(description='perl')
self.assertEqual(Bug.all_bugs.all().count(), 2)
results = mysite.search.view_helpers.Query(
terms=['perl']).get_bugs_unordered()
self.assertEqual(list(results), [perl_bug])
class SearchTemplateDecodesQueryString(SearchTest):
def test_facets_appear_in_search_template_context(self):
response = self.client.get('/search/', {'language': 'Python'})
expected_facets = {'language': 'Python'}
self.assertEqual(response.context['query'].active_facet_options,
expected_facets)
class FacetsFilterResults(SearchTest):
def test_facets_filter_results(self):
facets = {u'language': u'Python'}
# Those facets should pick up this bug:
python_project = Project.create_dummy(language='Python')
python_bug = Bug.create_dummy(project=python_project)
# But not this bug
not_python_project = Project.create_dummy(language='Nohtyp')
Bug.create_dummy(project=not_python_project)
results = mysite.search.view_helpers.Query(
terms=[], active_facet_options=facets).get_bugs_unordered()
self.assertEqual(list(results), [python_bug])
def test_any_facet(self):
"""In the search_index() method in the search module, the
truthfulness of the Query object is evaluated to determine whether
or not any results should be returned.
Here, we test that if a facet in the GET data is the empty string,
the query is still considered to be True. A facet
set to the empty string is used to signify that the user selected the
"any" option on the search page.
If a facet is not provided at all, the user did not select anything
on the search page, meaning no results should be returned.
"""
language_query = mysite.search.view_helpers.Query.create_from_GET_data(
{'language': ''})
project_query = mysite.search.view_helpers.Query.create_from_GET_data(
{'project': ''})
self.assertTrue(language_query)
self.assertTrue(project_query)
class QueryGetPossibleFacets(SearchTest):
"""Ask a query, what facets are you going to show on the left?
E.g., search for gtk, it says C (541)."""
def test_get_possible_facets(self):
# Create three projects
project1 = Project.create_dummy(language=u'c')
project2 = Project.create_dummy(language=u'd')
project3 = Project.create_dummy(language=u'e')
# Give each project a bug
Bug.create_dummy(project=project1, description=u'bug',
good_for_newcomers=True)
Bug.create_dummy(project=project2, description=u'bug')
Bug.create_dummy(project=project3, description=u'bAg')
# Search for bugs matching "bug", while constraining to the language C
query = mysite.search.view_helpers.Query(
terms=[u'bug'],
terms_string=u'bug',
active_facet_options={u'language': u'c'})
possible_facets = dict(query.get_possible_facets())
self.assertEqual(query.get_bugs_unordered().count(), 1)
# We expect that, language-wise, you should be able to select any of
# the other languages, or 'deselect' your language constraint.
self.compare_lists_of_dicts(
possible_facets[u'language'][u'options'],
[
{u'name': u'c', u'query_string': u'q=bug&language=c',
u'is_active': True, u'count': 1},
{u'name': u'd', u'query_string': u'q=bug&language=d',
u'is_active': False, u'count': 1},
# e is excluded because its bug (u'bAg') doesn't match the
# term 'bug'
],
sort_key=u'name'
)
self.compare_lists_of_dicts(
possible_facets[u'toughness'][u'options'],
[
# There's no 'any' option for toughness unless you've
# selected a specific toughness value
{u'name': u'bitesize',
u'is_active': False,
u'query_string': u'q=bug&toughness=bitesize&language=c',
u'count': 1},
],
sort_key=u'name'
)
self.assertEqual(
possible_facets['language']['the_any_option'],
{u'name': u'any', u'query_string': u'q=bug&language=',
u'is_active': False, u'count': 2},
)
def test_possible_facets_always_includes_active_facet(self):
# even when active facet has no results.
c = Project.create_dummy(language=u'c')
Project.create_dummy(language=u'd')
Project.create_dummy(language=u'e')
Bug.create_dummy(project=c, description=u'bug')
query = mysite.search.view_helpers.Query.create_from_GET_data(
{u'q': u'nothing matches this', u'language': u'c'})
language_options = dict(
query.get_possible_facets())['language']['options']
language_options_named_c = [
opt for opt in language_options if opt['name'] == 'c']
self.assertEqual(len(language_options_named_c), 1)
class SingleTerm(SearchTest):
"""Search for just a single term."""
def setUp(self):
SearchTest.setUp(self)
python_project = Project.create_dummy(language='Python')
perl_project = Project.create_dummy(language='Perl')
c_project = Project.create_dummy(language='C')
# bitesize, matching bug in Python
Bug.create_dummy(project=python_project, good_for_newcomers=True,
description='screensaver')
# nonbitesize, matching bug in Python
Bug.create_dummy(project=python_project, good_for_newcomers=False,
description='screensaver')
# nonbitesize, matching bug in Perl
Bug.create_dummy(project=perl_project, good_for_newcomers=False,
description='screensaver')
# nonbitesize, nonmatching bug in C
Bug.create_dummy(project=c_project, good_for_newcomers=False,
description='toast')
GET_data = {'q': 'screensaver'}
query = mysite.search.view_helpers.Query.create_from_GET_data(GET_data)
self.assertEqual(query.terms, ['screensaver'])
self.assertFalse(query.active_facet_options) # No facets
self.output_possible_facets = dict(query.get_possible_facets())
def test_toughness_facet(self):
# What options do we expect?
toughness_option_bitesize = {
'name': 'bitesize',
'count': 1,
'is_active': False,
'query_string': 'q=screensaver&toughness=bitesize'
}
toughness_option_any = {
'name': 'any',
'count': 3,
'is_active': True,
'query_string': 'q=screensaver&toughness='
}
expected_toughness_facet_options = [toughness_option_bitesize]
self.assertEqual(
self.output_possible_facets['toughness']['options'],
expected_toughness_facet_options
)
self.assertEqual(
self.output_possible_facets['toughness']['the_any_option'],
toughness_option_any
)
def test_languages_facet(self):
# What options do we expect?
languages_option_python = {
'name': 'Python',
'count': 2,
'is_active': False,
'query_string': 'q=screensaver&language=Python'
}
languages_option_perl = {
'name': 'Perl',
'count': 1,
'is_active': False,
'query_string': 'q=screensaver&language=Perl'
}
languages_option_any = {
'name': 'any',
'count': 3,
'is_active': True,
'query_string': 'q=screensaver&language='
}
expected_languages_facet_options = [
languages_option_python,
languages_option_perl,
]
self.compare_lists_of_dicts(
self.output_possible_facets['language']['options'],
expected_languages_facet_options
)
self.assertEqual(
self.output_possible_facets['language']['the_any_option'],
languages_option_any)
class SingleFacetOption(SearchTest):
"""Browse bugs matching a single facet option."""
def setUp(self):
SearchTest.setUp(self)
python_project = Project.create_dummy(language='Python')
perl_project = Project.create_dummy(language='Perl')
c_project = Project.create_dummy(language='C')
# bitesize, matching bug in Python
Bug.create_dummy(project=python_project, good_for_newcomers=True,
description='screensaver')
# nonbitesize, matching bug in Python
Bug.create_dummy(project=python_project, good_for_newcomers=False,
description='screensaver')
# nonbitesize, matching bug in Perl
Bug.create_dummy(project=perl_project, good_for_newcomers=False,
description='screensaver')
# nonbitesize, nonmatching bug in C
Bug.create_dummy(project=c_project, good_for_newcomers=False,
description='toast')
GET_data = {u'language': u'Python'}
query = mysite.search.view_helpers.Query.create_from_GET_data(GET_data)
self.assertFalse(query.terms) # No terms
self.assertEqual(query.active_facet_options, {u'language': u'Python'})
self.output_possible_facets = dict(query.get_possible_facets())
def test_toughness_facet(self):
# What options do we expect?
toughness_option_bitesize = {
u'name': u'bitesize',
u'count': 1,
u'is_active': False,
u'query_string': u'q=&toughness=bitesize&language=Python'
}
toughness_option_any = {
u'name': u'any',
u'count': 2,
u'is_active': True,
u'query_string': u'q=&toughness=&language=Python'
}
expected_toughness_facet_options = [toughness_option_bitesize]
self.compare_lists_of_dicts(
self.output_possible_facets[u'toughness'][u'options'],
expected_toughness_facet_options
)
self.assertEqual(
self.output_possible_facets[u'toughness'][u'the_any_option'],
toughness_option_any
)
def test_languages_facet(self):
# What options do we expect?
languages_option_python = {
u'name': u'Python',
u'count': 2,
u'is_active': True,
u'query_string': u'q=&language=Python'
}
languages_option_perl = {
u'name': u'Perl',
u'count': 1,
u'is_active': False,
u'query_string': u'q=&language=Perl'
}
languages_option_c = {
u'name': u'C',
u'count': 1,
u'is_active': False,
u'query_string': u'q=&language=C'
}
languages_option_any = {
u'name': u'any',
u'count': 4,
u'is_active': False,
u'query_string': u'q=&language='
}
expected_languages_facet_options = [
languages_option_python,
languages_option_perl,
languages_option_c,
]
self.compare_lists_of_dicts(
self.output_possible_facets[u'language'][u'options'],
expected_languages_facet_options
)
self.assertEqual(
self.output_possible_facets[u'language'][u'the_any_option'],
languages_option_any,
)
class QueryGetToughnessFacetOptions(SearchTest):
def test_get_toughness_facet_options(self):
# We create three "bitesize" bugs, but constrain the Query so
# that we're only looking at bugs in Python.
# Since only two of the bitesize bugs are in Python (one is
# in a project whose language is Perl), we expect only 1 bitesize
# bug to show up, and 2 total bugs.
python_project = Project.create_dummy(language=u'Python')
perl_project = Project.create_dummy(language=u'Perl')
Bug.create_dummy(project=python_project, good_for_newcomers=True)
Bug.create_dummy(project=python_project, good_for_newcomers=False)
Bug.create_dummy(project=perl_project, good_for_newcomers=True)
query = mysite.search.view_helpers.Query(
active_facet_options={u'language': u'Python'},
terms_string=u'')
output = query.get_facet_options(u'toughness', [u'bitesize', u''])
bitesize_dict = [d for d in output if d[u'name'] == u'bitesize'][0]
all_dict = [d for d in output if d[u'name'] == u'any'][0]
self.assertEqual(bitesize_dict[u'count'], 1)
self.assertEqual(all_dict[u'count'], 2)
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
@expectedFailure
def test_get_toughness_facet_options_with_terms(self):
python_project = Project.create_dummy(language=u'Python')
perl_project = Project.create_dummy(language=u'Perl')
Bug.create_dummy(project=python_project, good_for_newcomers=True,
description=u'a')
Bug.create_dummy(project=python_project, good_for_newcomers=False,
description=u'a')
Bug.create_dummy(project=perl_project, good_for_newcomers=True,
description=u'b')
GET_data = {u'q': u'a'}
query = mysite.search.view_helpers.Query.create_from_GET_data(GET_data)
output = query.get_facet_options(u'toughness', [u'bitesize', u''])
bitesize_dict = [d for d in output if d[u'name'] == u'bitesize'][0]
all_dict = [d for d in output if d[u'name'] == u'any'][0]
self.assertEqual(bitesize_dict[u'count'], 1)
self.assertEqual(all_dict[u'count'], 2)
class QueryGetPossibleLanguageFacetOptionNames(SearchTest):
def setUp(self):
SearchTest.setUp(self)
python_project = Project.create_dummy(language=u'Python')
perl_project = Project.create_dummy(language=u'Perl')
c_project = Project.create_dummy(language=u'C')
unknown_project = Project.create_dummy(language=u'')
Bug.create_dummy(project=python_project, title=u'a')
Bug.create_dummy(project=perl_project, title=u'a')
Bug.create_dummy(project=c_project, title=u'b')
Bug.create_dummy(project=unknown_project, title=u'unknowable')
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
@expectedFailure
def test_with_term(self):
# In the setUp we create three bugs, but only two of them would match
# a search for 'a'. They are in two different languages, so let's make
# sure that we show only those two languages.
GET_data = {u'q': u'a'}
query = mysite.search.view_helpers.Query.create_from_GET_data(GET_data)
language_names = query.get_language_names()
self.assertEqual(
sorted(language_names),
sorted([u'Python', u'Perl']))
def test_with_active_language_facet(self):
# In the setUp we create bugs in three languages.
# Here, we verify that the get_language_names() method correctly
# returns all three languages, even though the GET data shows that
# we are browsing by language.
GET_data = {u'language': u'Python'}
query = mysite.search.view_helpers.Query.create_from_GET_data(GET_data)
language_names = query.get_language_names()
self.assertEqual(
sorted(language_names),
sorted([u'Python', u'Perl', u'C', u'Unknown']))
def test_with_language_as_unknown(self):
# In the setUp we create bugs in three languages.
# Here, we verify that the get_language_names() method correctly
# returns all three languages, even though the GET data shows that
# we are browsing by language.
GET_data = {u'language': u'Unknown'}
query = mysite.search.view_helpers.Query.create_from_GET_data(GET_data)
language_names = query.get_language_names()
self.assertEqual(
sorted(language_names),
sorted([u'Python', u'Perl', u'C', u'Unknown']))
def test_with_language_as_unknown_and_query(self):
# In the setUp we create bugs in three languages.
# Here, we verify that the get_language_names() method correctly
# returns all three languages, even though the GET data shows that
# we are browsing by language.
GET_data = {u'language': u'Unknown', u'q': u'unknowable'}
query = mysite.search.view_helpers.Query.create_from_GET_data(GET_data)
match_count = query.get_bugs_unordered().count()
self.assertEqual(match_count, 1)
class QueryGetPossibleProjectFacetOptions(SearchTest):
def setUp(self):
SearchTest.setUp(self)
projects = [
Project.create_dummy(name=u'Miro'),
Project.create_dummy(name=u'Dali'),
Project.create_dummy(name=u'Magritte')
]
for p in projects:
Bug.create_dummy(project=p)
def test_select_a_project_and_see_other_project_options(self):
GET_data = {u'project': u'Miro'}
query = mysite.search.view_helpers.Query.create_from_GET_data(GET_data)
possible_project_names = [x['name'] for x in dict(
query.get_possible_facets())['project']['options']]
self.assertEqual(
sorted(possible_project_names),
sorted(list(Project.objects.values_list('name', flat=True))))
class QueryContributionType(SearchTest):
def setUp(self):
SearchTest.setUp(self)
python_project = Project.create_dummy(language=u'Python')
perl_project = Project.create_dummy(language=u'Perl')
c_project = Project.create_dummy(language=u'C')
Bug.create_dummy(project=python_project, title=u'a')
Bug.create_dummy(project=perl_project, title=u'a',
concerns_just_documentation=True)
Bug.create_dummy(project=c_project, title=u'b')
def test_contribution_type_is_an_available_facet(self):
GET_data = {}
starting_query = mysite.search.view_helpers.Query.create_from_GET_data(
GET_data)
self.assert_(
u'contribution_type' in dict(starting_query.get_possible_facets()))
def test_contribution_type_options_are_reasonable(self):
GET_data = {}
starting_query = mysite.search.view_helpers.Query.create_from_GET_data(
GET_data)
cto = starting_query.get_facet_options(u'contribution_type',
[u'documentation'])
documentation_one, = [k for k in cto if k[u'name'] == u'documentation']
any_one = starting_query.get_facet_options(
u'contribution_type', [u''])[0]
self.assertEqual(documentation_one[u'count'], 1)
self.assertEqual(any_one[u'count'], 3)
class QueryProject(SearchTest):
def setUp(self):
SearchTest.setUp(self)
python_project = Project.create_dummy(language=u'Python',
name='thingamajig')
c_project = Project.create_dummy(language=u'C',
name='thingamabob')
Bug.create_dummy(project=python_project, title=u'a')
Bug.create_dummy(project=python_project, title=u'a',
concerns_just_documentation=True)
Bug.create_dummy(project=c_project, title=u'b')
def test_project_is_an_available_facet(self):
GET_data = {}
starting_query = mysite.search.view_helpers.Query.create_from_GET_data(
GET_data)
self.assert_(u'project' in dict(starting_query.get_possible_facets()))
def test_contribution_type_options_are_reasonable(self):
GET_data = {}
starting_query = mysite.search.view_helpers.Query.create_from_GET_data(
GET_data)
cto = starting_query.get_facet_options(u'project',
[u'thingamajig',
u'thingamabob'])
jig_ones, = [k for k in cto if k[u'name'] == u'thingamajig']
any_one = starting_query.get_facet_options(u'project', [u''])[0]
self.assertEqual(jig_ones[u'count'], 2)
self.assertEqual(any_one[u'count'], 3)
class QueryStringCaseInsensitive(SearchTest):
def test_Language(self):
"""Do we redirect queries that use non-lowercase facet keys to pages
that use lowercase facet keys?"""
redirects = self.client.get(
u'/search/', {u'LANguaGE': u'pytHon'}, follow=True).redirect_chain
self.assertEqual(
redirects, [(u'http://testserver/search/?language=pytHon', 302)])
class HashQueryData(SearchTest):
def test_queries_with_identical_data_hash_alike(self):
GET_data = {u'q': u'socialguides', u'language': u'looxii'}
one = mysite.search.view_helpers.Query.create_from_GET_data(GET_data)
two = mysite.search.view_helpers.Query.create_from_GET_data(GET_data)
self.assertEqual(one.get_sha1(), two.get_sha1())
def test_queries_with_equiv_data_expressed_differently_hash_alike(self):
GET_data_1 = {u'q': u'socialguides zetapage', u'language': u'looxii'}
GET_data_2 = {u'q': u'zetapage socialguides', u'language': u'looxii'}
one = mysite.search.view_helpers.Query.create_from_GET_data(GET_data_1)
two = mysite.search.view_helpers.Query.create_from_GET_data(GET_data_2)
self.assertEqual(one.get_sha1(), two.get_sha1())
def test_queries_with_different_data_hash_differently(self):
GET_data_1 = {u'q': u'socialguides zetapage', u'language': u'looxii'}
GET_data_2 = {u'q': u'socialguides ninjapost', u'language': u'looxii'}
one = mysite.search.view_helpers.Query.create_from_GET_data(GET_data_1)
two = mysite.search.view_helpers.Query.create_from_GET_data(GET_data_2)
self.assertNotEqual(one.get_sha1(), two.get_sha1())
# How on earth do we test for collisions?
class FakeCache(object):
def __init__(self):
self._data = {}
def get(self, key):
return self._data.get(key, None)
def set(self, key, value):
self._data[key] = value
class QueryGrabHitCount(SearchTest):
@mock.patch('django.core.cache')
def test_eventhive_grab_hitcount_once_stored(self, fake_cache):
fake_cache.cache = FakeCache()
data = {u'q': u'eventhive', u'language': u'shoutNOW'}
query = mysite.search.view_helpers.Query.create_from_GET_data(data)
stored_hit_count = 10
# Get the cache key used to store the hit count.
hit_count_cache_key = query.get_hit_count_cache_key()
# Set the cache value.
django.core.cache.cache.set(hit_count_cache_key, stored_hit_count)
# Test that it is fetched correctly.
self.assertEqual(stored_hit_count,
django.core.cache.cache.get(hit_count_cache_key))
self.assertEqual(query.get_or_create_cached_hit_count(),
stored_hit_count)
@mock.patch('django.core.cache')
def test_shoutnow_cache_hitcount_on_grab(self, fake_cache):
fake_cache.cache = FakeCache()
project = Project.create_dummy(language=u'shoutNOW')
Bug.create_dummy(project=project)
data = {u'language': u'shoutNOW'}
query = mysite.search.view_helpers.Query.create_from_GET_data(data)
expected_hit_count = 1
self.assertEqual(query.get_or_create_cached_hit_count(),
expected_hit_count)
# Get the cache key used to store the hit count.
hit_count_cache_key = query.get_hit_count_cache_key()
# Get the cache value.
stored_hit_count = django.core.cache.cache.get(hit_count_cache_key)
logger.debug("Stored: %s" % stored_hit_count)
# Test that it was stored correctly.
self.assertEqual(stored_hit_count, expected_hit_count)
class ClearCacheWhenBugsChange(SearchTest):
@expectedFailure
def test_cached_cleared_after_bug_save_or_delete(self):
data = {u'language': u'shoutNOW'}
query = mysite.search.view_helpers.Query.create_from_GET_data(data)
old_hcc_timestamp = (
mysite.base.models.Timestamp.get_timestamp_for_string(
'hit_count_cache_timestamp')
)
# Cache entry created after hit count retrieval
query.get_or_create_cached_hit_count()
new_hcc_timestamp = (
mysite.base.models.Timestamp.get_timestamp_for_string(
'hit_count_cache_timestamp')
)
self.assertEqual(old_hcc_timestamp, new_hcc_timestamp)
# Cache cleared after bug save
project = Project.create_dummy(language=u'shoutNOW')
bug = Bug.create_dummy(project=project)
newer_hcc_timestamp = (
mysite.base.models.Timestamp.get_timestamp_for_string(
'hit_count_cache_timestamp')
)
self.assertNotEqual(new_hcc_timestamp, newer_hcc_timestamp)
# Cache entry created after hit count retrieval
query.get_or_create_cached_hit_count()
newest_hcc_timestamp = (
mysite.base.models.Timestamp.get_timestamp_for_string(
'hit_count_cache_timestamp')
)
self.assertEqual(newer_hcc_timestamp, newest_hcc_timestamp)
# Cache cleared after bug deletion
bug.delete()
newester_hcc_timestamp = (
mysite.base.models.Timestamp.get_timestamp_for_string(
'hit_count_cache_timestamp')
)
self.assertNotEqual(newest_hcc_timestamp, newester_hcc_timestamp)
class DontRecommendFutileSearchTerms(TwillTests):
def test_removal_of_futile_terms(self):
mysite.search.models.Bug.create_dummy_with_project(
description=u'useful')
self.assertEqual(
Person.only_terms_with_results([u'useful', u'futile']),
[u'useful'])
class PublicizeBugTrackerIndex(SearchTest):
def setUp(self):
SearchTest.setUp(self)
self.search_page_response = self.client.get(
reverse(mysite.search.views.search_index))
self.bug_tracker_count = mysite.search.view_helpers.get_project_count()
def test_search_template_contains_bug_tracker_count(self):
self.assertEqual(
self.search_page_response.context[0][u'project_count'],
self.bug_tracker_count)
class TestPotentialMentors(TwillTests):
fixtures = ['user-paulproteus', 'person-paulproteus']
def test(self):
'''Create a Banshee project mentor and verify that the Banshee project
has one mentor.'''
banshee = Project.create_dummy(name='Banshee', language='C#')
can_mentor, _ = mysite.profile.models.TagType.objects.get_or_create(
name=u'can_mentor')
willing_to_mentor_banshee, _ = (
mysite.profile.models.Tag.objects.
get_or_create(tag_type=can_mentor, text=u'Banshee'))
link = mysite.profile.models.Link_Person_Tag(
person=Person.objects.get(user__username=u'paulproteus'),
tag=willing_to_mentor_banshee)
link.save()
banshee_mentor_count = banshee.mentor_count
self.assertEqual(1, banshee_mentor_count)
class SuggestAlertOnLastResultsPage(TwillTests):
fixtures = ['user-paulproteus']
def exercise_alert(self, anonymous=True):
"""The 'anonymous' parameter allows the alert functionality to be
tested for anonymous and logged-in users."""
if not anonymous:
self.login_with_twill()
# Create some dummy data
p = Project.create_dummy(language='ruby')
# 15 bugs matching 'ruby'
for i in range(15):
b = Bug.create_dummy(description='ruby')
b.project = p
b.save()
# Visit the first page of a vol. opp. search results page.
opps_view = mysite.search.views.search_index
query = u'ruby'
opps_query_string = {u'q': query, u'start': 1, u'end': 10}
opps_url = make_twill_url('http://openhatch.org' + reverse(opps_view)
+ '?' + http.urlencode(opps_query_string))
tc.go(opps_url)
# Make sure we *don't* have the comment that flags this as a page that
# offers an email alert subscription button
tc.notfind(
"this page should offer a link to sign up for an email alert")
# Visit the last page of results
GET = {u'q': query, u'start': 11, u'end': 20}
query_string = http.urlencode(GET)
opps_url = make_twill_url(
'http://openhatch.org' + reverse(opps_view) + '?' + query_string)
tc.go(opps_url)
# make sure we /do/ have the comment that flags this as a page that
# offers an email alert subscription button
tc.find("this page should offer a link to sign up for an email alert")
if not anonymous:
# if the user is logged in, make sure that we have autopopulated
# the form with her email address
tc.find(User.objects.get(username='paulproteus').email)
# Submit the 'alert' form.
email_address = 'yetanother@ema.il'
tc.fv('alert', 'email', email_address)
tc.submit()
if anonymous:
client = self.client
else:
client = self.login_with_client()
alert_data_in_form = {
'query_string': query_string,
'how_many_bugs_at_time_of_request':
Bug.open_ones.filter(project=p).count(),
'email': email_address,
}
# Twill fails here for some reason, so let's continue the journey with
# Django's built-in testing sweeeet
response = client.post(
reverse(mysite.search.views.subscribe_to_bug_alert_do),
alert_data_in_form)
# This response should be a HTTP redirect instruction
self.assertEqual(response.status_code, 302)
redirect_target_url = response._headers['location'][1]
self.assert_(query_string in redirect_target_url)
# The page redirects to the old kk
response = client.get(redirect_target_url)
self.assertContains(
response,
"this page should confirm that an email alert has been registered")
# At this point, make sure that the DB contains a record of
# * What the query was.
# * When the request was made.
# * How many bugs were returned by the query at the time of
# request.
# There should be only one alert
all_alerts = BugAlert.objects.all()
self.assertEqual(all_alerts.count(), 1)
alert_record = all_alerts[0]
self.assert_(alert_record)
assert_that_record_has_this_data = alert_data_in_form
# For the logged-in user, also check that the record contains the
# identity of the user who made the alert request.
if not anonymous:
assert_that_record_has_this_data['user'] = (
User.objects.get(username='paulproteus'))
for key, expected_value in assert_that_record_has_this_data.items():
self.assertEqual(
alert_record.__getattribute__(key), expected_value,
'alert.%s = %s not (expected) %s' % (
key, alert_record.__getattribute__(key), expected_value)
)
# run the above test for our two use cases: logged in and not
def test_alert_anon(self):
self.exercise_alert(anonymous=True)
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
@expectedFailure
def test_alert_logged_in(self):
self.exercise_alert(anonymous=False)
class DeleteAnswer(TwillTests):
fixtures = ['user-paulproteus']
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
@expectedFailure
def test_delete_paragraph_answer(self):
# create dummy question
p = Project.create_dummy(name='Ubuntu')
question__pk = 0
q = ProjectInvolvementQuestion.create_dummy(
pk=question__pk, is_bug_style=False)
# create our dummy answer
a = Answer.create_dummy(
text='i am saying thigns',
question=q,
project=p,
author=User.objects.get(username='paulproteus'))
# delete our answer
POST_data = {
'answer__pk': a.pk,
}
POST_handler = reverse(mysite.project.views.delete_paragraph_answer_do)
response = self.login_with_client().post(POST_handler, POST_data)
# go back to the project page and make sure that our answer isn't there
# anymore
project_url = p.get_url()
self.assertRedirects(response, project_url)
project_page = self.login_with_client().get(project_url)
self.assertNotContains(project_page, a.text)
# and make sure our answer isn't in the db anymore
self.assertEqual(Answer.objects.filter(pk=a.pk).count(), 0)
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
@expectedFailure
def test_delete_bug_answer(self):
# create dummy question
p = Project.create_dummy(name='Ubuntu')
# it's important that this pk correspond to the pk of an actual
# bug_style question, as specified in our view otherwise, we'll
# get_or_create will try to create, but it won't be able to because of
# a unique key error
question__pk = 2
q = ProjectInvolvementQuestion.create_dummy(
pk=question__pk, is_bug_style=True)
# create our dummy answer
a = Answer.create_dummy(
title='i want this bug fixed',
text='for these reasons',
question=q, project=p,
author=User.objects.get(username='paulproteus'))
# delete our answer
POST_data = {
'answer__pk': a.pk,
}
POST_handler = reverse(mysite.project.views.delete_paragraph_answer_do)
response = self.login_with_client().post(POST_handler, POST_data)
# go back to the project page and make sure that our answer isn't there
# anymore
project_url = p.get_url()
self.assertRedirects(response, project_url)
project_page = self.login_with_client().get(project_url)
self.assertNotContains(project_page, a.title)
# and make sure our answer isn't in the db anymore
self.assertEqual(Answer.objects.filter(pk=a.pk).count(), 0)
class CreateBugAnswer(TwillTests):
fixtures = ['user-paulproteus']
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
@expectedFailure
def test_create_bug_answer(self):
# go to the project page
p = Project.create_dummy(name='Ubuntu')
question__pk = 1
question = ProjectInvolvementQuestion.create_dummy(
key_string='non_code_participation', is_bug_style=True)
question.save()
title = 'omfg i wish this bug would go away'
text = 'kthxbai'
POST_data = {
'project__pk': p.pk,
'question__pk': str(question__pk),
'answer__title': title,
'answer__text': text
}
POST_handler = reverse(mysite.project.views.create_answer_do)
response = self.login_with_client().post(POST_handler, POST_data)
# try to get the BugAnswer which we just submitted from the database
our_bug_answer = Answer.objects.get(title=title)
# make sure it has the right attributes
self.assertEqual(our_bug_answer.text, text)
self.assertEqual(our_bug_answer.question.pk, question__pk)
self.assertEqual(our_bug_answer.project.pk, p.pk)
project_url = p.get_url()
self.assertRedirects(response, project_url)
project_page = self.login_with_client().get(project_url)
# make sure that our data shows up on the page
self.assertContains(project_page, title)
self.assertContains(project_page, text)
class WeTakeOwnershipOfAnswersAtLogin(TwillTests):
fixtures = ['user-paulproteus', 'person-paulproteus']
def test_create_answer_but_take_ownership_at_login_time(self):
session = {}
# Create the Answer object, but set its User to None
answer = Answer.create_dummy()
answer.author = None
answer.is_published = False
answer.save()
# Verify that the Answer object is not available by .objects()
self.assertFalse(Answer.objects.all())
# Store the Answer IDs in the session
mysite.project.view_helpers.note_in_session_we_control_answer_id(
session, answer.id)
self.assertEqual(session['answer_ids_that_are_ours'], [answer.id])
# If you want to look at those answers, you can this way:
stored_answers = (mysite.project.view_helpers.
get_unsaved_answers_from_session(session))
self.assertEqual([answer.id for answer in stored_answers],
[answer.id])
# Verify that the Answer object is still not available by .objects()
self.assertFalse(Answer.objects.all())
# At login time, take ownership of those Answer IDs
mysite.project.view_helpers.take_control_of_our_answers(
User.objects.get(username='paulproteus'), session)
# And now we own it!
self.assertEqual(Answer.objects.all().count(), 1)
class CreateAnonymousAnswer(TwillTests):
fixtures = ['user-paulproteus']
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
@expectedFailure
def test_create_answer_anonymously(self):
# Steps for this test
# 1. User fills in the form anonymously
# 2. We test that the Answer is not yet saved
# 3. User logs in
# 4. We test that the Answer is saved
p = Project.create_dummy(name='Myproject')
q = ProjectInvolvementQuestion.create_dummy(
key_string='where_to_start', is_bug_style=False)
# Do a GET on the project page to prove cookies work.
self.client.get(p.get_url())
# POST some text to the answer creation post handler
answer_text = ('Help produce official documentation, share the '
'solution to a problem, or check, proof and test '
'other documents for accuracy.')
POST_data = {
'project__pk': p.pk,
'question__pk': q.pk,
'answer__text': answer_text,
}
response = self.client.post(
reverse(mysite.project.views.create_answer_do),
POST_data, follow=True)
self.assertEqual(response.redirect_chain, [('http://testserver/account/login/?next=%2Fprojects%2FMyproject', 302)])
# If this were an Ajaxy post handler, we might assert something about
# the response, like
# self.assertEqual(response.content, '1')
# check that the db contains a record with this text
try:
record = Answer.all_even_unowned.get(
text=POST_data['answer__text'])
except Answer.DoesNotExist:
print "All Answers:", Answer.all_even_unowned.all()
raise Answer.DoesNotExist
self.assertEqual(record.project, p)
self.assertEqual(record.question, q)
self.assertFalse(Answer.objects.all()) # it's unowned
# Now, the session will know about the answer, but the answer will
# not be published.
# Visit the login page, assert that the page contains the text of the
# answer.
response = self.client.get(reverse('oh_login'))
self.assertContains(response, POST_data['answer__text'])
# But when the user is logged in and *then* visits the project page
login_worked = self.client.login(
username='paulproteus',
password="paulproteus's unbreakable password")
self.assert_(login_worked)
self.client.get(p.get_url())
# Now, the Answer should have an author whose username is paulproteus
answer = Answer.objects.get()
self.assertEqual(answer.text, POST_data['answer__text'])
self.assertEqual(answer.author.username, 'paulproteus')
# Finally, go to the project page and make sure that our Answer has
# appeared
response = self.client.get(p.get_url())
self.assertContains(response, answer_text)
class CreateAnswer(TwillTests):
fixtures = ['user-paulproteus']
def test_create_answer(self):
p = Project.create_dummy()
q = ProjectInvolvementQuestion.create_dummy(
key_string='where_to_start', is_bug_style=False)
# POST some text to the answer creation post handler
POST_data = {
'project__pk': p.pk,
'question__pk': q.pk,
'answer__text': ('Help produce official documentation, share the '
'solution to a problem, or check, proof and '
'test other documents for accuracy.'),
}
self.login_with_client().post(
reverse(mysite.project.views.create_answer_do), POST_data)
# If this were an Ajaxy post handler, we might assert something about
# the response, like
# self.assertEqual(response.content, '1')
# check that the db contains a record with this text
try:
record = Answer.objects.get(text=POST_data['answer__text'])
except Answer.DoesNotExist:
print "All Answers:", Answer.objects.all()
raise Answer.DoesNotExist
self.assertEqual(record.author,
User.objects.get(username='paulproteus'))
self.assertEqual(record.project, p)
self.assertEqual(record.question, q)
# check that the project page now includes this text
project_page = self.client.get(p.get_url())
self.assertContains(project_page, POST_data['answer__text'])
self.assertContains(project_page, record.author.username)
@skipIf(django.db.connection.vendor == 'sqlite',
"Skipping because using sqlite database")
@expectedFailure
def test_multiparagraph_answer(self):
"""
If a multi-paragraph answer is submitted, display it as a
multi-paragraph answer.
"""
# go to the project page
p = Project.create_dummy(name='Ubuntu')
q = ProjectInvolvementQuestion.create_dummy(
key_string='where_to_start', is_bug_style=False)
q.save()
text = ['This is a multiparagraph answer.',
'This is the second paragraph.',
'This is the third paragraph.']
POST_data = {
'project__pk': p.pk,
'question__pk': q.pk,
'answer__text': "\n".join(text)
}
POST_handler = reverse(mysite.project.views.create_answer_do)
self.login_with_client().post(POST_handler, POST_data)
project_page = self.login_with_client().get(p.get_url())
# Django documents publicly that linebreaks replaces one "\n" with
# "<br />".
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#linebreaks
self.assertContains(project_page, "<br />".join(text))
def test_answer_with_background_color(self):
"""
If a user submits HTML with embedded styles, they should be dropped.
"""
# go to the project page
p = Project.create_dummy(name='Ubuntu')
q = ProjectInvolvementQuestion.create_dummy(
key_string='where_to_start', is_bug_style=False)
q.save()
text = u'<p style="background-color: red;">red</p>'
POST_data = {
'project__pk': p.pk,
'question__pk': q.pk,
'answer__text': text
}
# Submit the data while logged in
POST_handler = reverse(mysite.project.views.create_answer_do)
self.login_with_client().post(POST_handler, POST_data)
# Look at page while logged out (so we see the anonymous rendering)
project_page = self.client.get(p.get_url())
# The urlize filter in the template should make sure we get a link
self.assertNotContains(project_page, '''background-color: red''')
class BugKnowsItsFreshness(TestCase):
def test(self):
b = mysite.search.models.Bug.create_dummy_with_project()
b.last_polled = datetime.datetime.now()
self.assertTrue(b.data_is_more_fresh_than_one_day())
b.last_polled -= datetime.timedelta(
days=1, hours=1)
self.assertFalse(b.data_is_more_fresh_than_one_day())
class WeCanPollSomethingToCheckIfAProjectIconIsLoaded(TestCase):
def test(self):
# Create a dummy project
p = Project.create_dummy()
# Make sure its ohloh icon download time is null
self.assertEqual(p.date_icon_was_fetched_from_ohloh, None)
# get the thing we poll
response = self.client.get(reverse(
mysite.search.views.project_has_icon,
kwargs={'project_name': p.name}))
self.assertEqual(response.content, 'keep polling')
# okay, so now say we finished polling
p.date_icon_was_fetched_from_ohloh = datetime.datetime.utcnow()
p.save()
# so what now?
response = self.client.get(reverse(
mysite.search.views.project_has_icon,
kwargs={'project_name': p.name}))
self.assertEqual(response.content, p.get_url_of_icon_or_generic())
|
fxfitz/ansible
|
refs/heads/devel
|
lib/ansible/modules/files/copy.py
|
10
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: copy
version_added: "historical"
short_description: Copies files to remote locations
description:
- The C(copy) module copies a file from the local or remote machine to a location on the remote machine.
Use the M(fetch) module to copy files from remote locations to the local box.
If you need variable interpolation in copied files, use the M(template) module.
- For Windows targets, use the M(win_copy) module instead.
options:
src:
description:
- Local path to a file to copy to the remote server; can be absolute or relative.
If path is a directory, it is copied recursively. In this case, if path ends
with "/", only inside contents of that directory are copied to destination.
Otherwise, if it does not end with "/", the directory itself with all contents
is copied. This behavior is similar to Rsync.
content:
description:
- When used instead of I(src), sets the contents of a file directly to the specified value.
For anything advanced or with formatting also look at the template module.
version_added: "1.1"
dest:
description:
- 'Remote absolute path where the file should be copied to. If I(src) is a directory, this must be a directory too.
If I(dest) is a nonexistent path and if either I(dest) ends with "/" or I(src) is a directory, I(dest) is created.
If I(src) and I(dest) are files, the parent directory of I(dest) isn''t created: the task fails if it doesn''t already exist.'
required: yes
backup:
description:
- Create a backup file including the timestamp information so you can get
the original file back if you somehow clobbered it incorrectly.
type: bool
default: 'no'
version_added: "0.7"
force:
description:
- the default is C(yes), which will replace the remote file when contents
are different than the source. If C(no), the file will only be transferred
if the destination does not exist.
type: bool
default: 'yes'
aliases: [ thirsty ]
version_added: "1.1"
mode:
description:
- "Mode the file or directory should be. For those used to I(/usr/bin/chmod) remember that
modes are actually octal numbers. You must either specify the leading zero so that
Ansible's YAML parser knows it is an octal number (like C(0644) or C(01777)) or quote it
(like C('644') or C('0644') so Ansible receives a string and can do its own conversion from
string into number. Giving Ansible a number without following one of these rules will end
up with a decimal number which will have unexpected results. As of version 1.8, the mode
may be specified as a symbolic mode (for example, C(u+rwx) or C(u=rw,g=r,o=r)). As of
version 2.3, the mode may also be the special string C(preserve). C(preserve) means that
the file will be given the same permissions as the source file."
directory_mode:
description:
- When doing a recursive copy set the mode for the directories. If this is not set we will use the system
defaults. The mode is only set on directories which are newly created, and will not affect those that
already existed.
version_added: "1.5"
remote_src:
description:
- If C(no), it will search for I(src) at originating/master machine.
- If C(yes) it will go to the remote/target machine for the I(src). Default is C(no).
- Currently I(remote_src) does not support recursive copying.
- I(remote_src) only works with C(mode=preserve) as of version 2.6.
type: bool
default: 'no'
version_added: "2.0"
follow:
description:
- This flag indicates that filesystem links in the destination, if they exist, should be followed.
type: bool
default: 'no'
version_added: "1.8"
local_follow:
description:
- This flag indicates that filesystem links in the source tree, if they exist, should be followed.
type: bool
default: 'yes'
version_added: "2.4"
checksum:
description:
- SHA1 checksum of the file being transferred. Used to validate that the copy of the file was successful.
- If this is not provided, ansible will use the local calculated checksum of the src file.
version_added: '2.5'
extends_documentation_fragment:
- files
- validate
- decrypt
author:
- Ansible Core Team
- Michael DeHaan
notes:
- The M(copy) module recursively copy facility does not scale to lots (>hundreds) of files.
For alternative, see M(synchronize) module, which is a wrapper around C(rsync).
- For Windows targets, use the M(win_copy) module instead.
'''
EXAMPLES = r'''
- name: example copying file with owner and permissions
copy:
src: /srv/myfiles/foo.conf
dest: /etc/foo.conf
owner: foo
group: foo
mode: 0644
- name: The same example as above, but using a symbolic mode equivalent to 0644
copy:
src: /srv/myfiles/foo.conf
dest: /etc/foo.conf
owner: foo
group: foo
mode: u=rw,g=r,o=r
- name: Another symbolic mode example, adding some permissions and removing others
copy:
src: /srv/myfiles/foo.conf
dest: /etc/foo.conf
owner: foo
group: foo
mode: u+rw,g-wx,o-rwx
- name: Copy a new "ntp.conf file into place, backing up the original if it differs from the copied version
copy:
src: /mine/ntp.conf
dest: /etc/ntp.conf
owner: root
group: root
mode: 0644
backup: yes
- name: Copy a new "sudoers" file into place, after passing validation with visudo
copy:
src: /mine/sudoers
dest: /etc/sudoers
validate: /usr/sbin/visudo -cf %s
- name: Copy a "sudoers" file on the remote machine for editing
copy:
src: /etc/sudoers
dest: /etc/sudoers.edit
remote_src: yes
validate: /usr/sbin/visudo -cf %s
- name: Copy using the 'content' for inline data
copy:
content: '# This file was moved to /etc/other.conf'
dest: /etc/mine.conf'
'''
RETURN = r'''
dest:
description: destination file/path
returned: success
type: string
sample: /path/to/file.txt
src:
description: source file used for the copy on the target machine
returned: changed
type: string
sample: /home/httpd/.ansible/tmp/ansible-tmp-1423796390.97-147729857856000/source
md5sum:
description: md5 checksum of the file after running copy
returned: when supported
type: string
sample: 2a5aeecc61dc98c4d780b14b330e3282
checksum:
description: sha1 checksum of the file after running copy
returned: success
type: string
sample: 6e642bb8dd5c2e027bf21dd923337cbb4214f827
backup_file:
description: name of backup file created
returned: changed and if backup=yes
type: string
sample: /path/to/file.txt.2015-02-12@22:09~
gid:
description: group id of the file, after execution
returned: success
type: int
sample: 100
group:
description: group of the file, after execution
returned: success
type: string
sample: httpd
owner:
description: owner of the file, after execution
returned: success
type: string
sample: httpd
uid:
description: owner id of the file, after execution
returned: success
type: int
sample: 100
mode:
description: permissions of the target, after execution
returned: success
type: string
sample: 0644
size:
description: size of the target, after execution
returned: success
type: int
sample: 1220
state:
description: state of the target, after execution
returned: success
type: string
sample: file
'''
import os
import os.path
import shutil
import stat
import errno
import tempfile
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_bytes, to_native
class AnsibleModuleError(Exception):
def __init__(self, results):
self.results = results
def split_pre_existing_dir(dirname):
'''
Return the first pre-existing directory and a list of the new directories that will be created.
'''
head, tail = os.path.split(dirname)
b_head = to_bytes(head, errors='surrogate_or_strict')
if head == '':
return ('.', [tail])
if not os.path.exists(b_head):
if head == '/':
raise AnsibleModuleError(results={'msg': "The '/' directory doesn't exist on this machine."})
(pre_existing_dir, new_directory_list) = split_pre_existing_dir(head)
else:
return (head, [tail])
new_directory_list.append(tail)
return (pre_existing_dir, new_directory_list)
def adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed):
'''
Walk the new directories list and make sure that permissions are as we would expect
'''
if new_directory_list:
working_dir = os.path.join(pre_existing_dir, new_directory_list.pop(0))
directory_args['path'] = working_dir
changed = module.set_fs_attributes_if_different(directory_args, changed)
changed = adjust_recursive_directory_permissions(working_dir, new_directory_list, module, directory_args, changed)
return changed
def main():
module = AnsibleModule(
# not checking because of daisy chain to file module
argument_spec=dict(
src=dict(type='path'),
_original_basename=dict(type='str'), # used to handle 'dest is a directory' via template, a slight hack
content=dict(type='str', no_log=True),
dest=dict(type='path', required=True),
backup=dict(type='bool', default=False),
force=dict(type='bool', default=True, aliases=['thirsty']),
validate=dict(type='str'),
directory_mode=dict(type='raw'),
remote_src=dict(type='bool'),
local_follow=dict(type='bool'),
checksum=dict(),
),
add_file_common_args=True,
supports_check_mode=True,
)
src = module.params['src']
b_src = to_bytes(src, errors='surrogate_or_strict')
dest = module.params['dest']
# Make sure we always have a directory component for later processing
if os.path.sep not in dest:
dest = '.{0}{1}'.format(os.path.sep, dest)
b_dest = to_bytes(dest, errors='surrogate_or_strict')
backup = module.params['backup']
force = module.params['force']
_original_basename = module.params.get('_original_basename', None)
validate = module.params.get('validate', None)
follow = module.params['follow']
mode = module.params['mode']
owner = module.params['owner']
group = module.params['group']
remote_src = module.params['remote_src']
checksum = module.params['checksum']
if not os.path.exists(b_src):
module.fail_json(msg="Source %s not found" % (src))
if not os.access(b_src, os.R_OK):
module.fail_json(msg="Source %s not readable" % (src))
if os.path.isdir(b_src):
module.fail_json(msg="Remote copy does not support recursive copy of directory: %s" % (src))
# Preserve is usually handled in the action plugin but mode + remote_src has to be done on the
# remote host
if module.params['mode'] == 'preserve':
module.params['mode'] = '0%03o' % stat.S_IMODE(os.stat(b_src).st_mode)
mode = module.params['mode']
checksum_src = module.sha1(src)
checksum_dest = None
# Backwards compat only. This will be None in FIPS mode
try:
md5sum_src = module.md5(src)
except ValueError:
md5sum_src = None
changed = False
if checksum and checksum_src != checksum:
module.fail_json(
msg='Copied file does not match the expected checksum. Transfer failed.',
checksum=checksum_src,
expected_checksum=checksum
)
# Special handling for recursive copy - create intermediate dirs
if _original_basename and dest.endswith(os.sep):
dest = os.path.join(dest, _original_basename)
b_dest = to_bytes(dest, errors='surrogate_or_strict')
dirname = os.path.dirname(dest)
b_dirname = to_bytes(dirname, errors='surrogate_or_strict')
if not os.path.exists(b_dirname):
try:
(pre_existing_dir, new_directory_list) = split_pre_existing_dir(dirname)
except AnsibleModuleError as e:
e.result['msg'] += ' Could not copy to {0}'.format(dest)
module.fail_json(**e.results)
os.makedirs(b_dirname)
directory_args = module.load_file_common_arguments(module.params)
directory_mode = module.params["directory_mode"]
if directory_mode is not None:
directory_args['mode'] = directory_mode
else:
directory_args['mode'] = None
adjust_recursive_directory_permissions(pre_existing_dir, new_directory_list, module, directory_args, changed)
if os.path.isdir(b_dest):
basename = os.path.basename(src)
if _original_basename:
basename = _original_basename
dest = os.path.join(dest, basename)
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if os.path.exists(b_dest):
if os.path.islink(b_dest) and follow:
b_dest = os.path.realpath(b_dest)
dest = to_native(b_dest, errors='surrogate_or_strict')
if not force:
module.exit_json(msg="file already exists", src=src, dest=dest, changed=False)
if os.access(b_dest, os.R_OK):
checksum_dest = module.sha1(dest)
else:
if not os.path.exists(os.path.dirname(b_dest)):
try:
# os.path.exists() can return false in some
# circumstances where the directory does not have
# the execute bit for the current user set, in
# which case the stat() call will raise an OSError
os.stat(os.path.dirname(b_dest))
except OSError as e:
if "permission denied" in to_native(e).lower():
module.fail_json(msg="Destination directory %s is not accessible" % (os.path.dirname(dest)))
module.fail_json(msg="Destination directory %s does not exist" % (os.path.dirname(dest)))
if not os.access(os.path.dirname(b_dest), os.W_OK) and not module.params['unsafe_writes']:
module.fail_json(msg="Destination %s not writable" % (os.path.dirname(dest)))
backup_file = None
if checksum_src != checksum_dest or os.path.islink(b_dest):
if not module.check_mode:
try:
if backup:
if os.path.exists(b_dest):
backup_file = module.backup_local(dest)
# allow for conversion from symlink.
if os.path.islink(b_dest):
os.unlink(b_dest)
open(b_dest, 'w').close()
if validate:
# if we have a mode, make sure we set it on the temporary
# file source as some validations may require it
if mode is not None:
module.set_mode_if_different(src, mode, False)
if owner is not None:
module.set_owner_if_different(src, owner, False)
if group is not None:
module.set_group_if_different(src, group, False)
if "%s" not in validate:
module.fail_json(msg="validate must contain %%s: %s" % (validate))
(rc, out, err) = module.run_command(validate % src)
if rc != 0:
module.fail_json(msg="failed to validate", exit_status=rc, stdout=out, stderr=err)
b_mysrc = b_src
if remote_src:
_, b_mysrc = tempfile.mkstemp(dir=os.path.dirname(b_dest))
shutil.copyfile(b_src, b_mysrc)
try:
shutil.copystat(b_src, b_mysrc)
except OSError as err:
if err.errno == errno.ENOSYS and mode == "preserve":
module.warn("Unable to copy stats {0}".format(to_native(b_src)))
else:
raise
module.atomic_move(b_mysrc, dest, unsafe_writes=module.params['unsafe_writes'])
except (IOError, OSError):
module.fail_json(msg="failed to copy: %s to %s" % (src, dest), traceback=traceback.format_exc())
changed = True
else:
changed = False
res_args = dict(
dest=dest, src=src, md5sum=md5sum_src, checksum=checksum_src, changed=changed
)
if backup_file:
res_args['backup_file'] = backup_file
module.params['dest'] = dest
if not module.check_mode:
file_args = module.load_file_common_arguments(module.params)
res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'])
module.exit_json(**res_args)
if __name__ == '__main__':
main()
|
syphar/django
|
refs/heads/master
|
django/db/backends/oracle/features.py
|
8
|
from django.db.backends.base.features import BaseDatabaseFeatures
from django.db.utils import InterfaceError
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
interprets_empty_strings_as_nulls = True
uses_savepoints = True
has_select_for_update = True
has_select_for_update_nowait = True
has_select_for_update_skip_locked = True
can_return_id_from_insert = True
allow_sliced_subqueries = False
supports_subqueries_in_group_by = False
supports_transactions = True
supports_timezones = False
supports_bitwise_or = False
has_native_duration_field = True
can_defer_constraint_checks = True
supports_partially_nullable_unique_constraints = False
truncates_names = True
has_bulk_insert = True
supports_tablespaces = True
supports_sequence_reset = False
can_introspect_default = False # Pending implementation by an interested person.
can_introspect_max_length = False
can_introspect_time_field = False
atomic_transactions = False
supports_combined_alters = False
nulls_order_largest = True
requires_literal_defaults = True
closed_cursor_error_class = InterfaceError
bare_select_suffix = " FROM DUAL"
uppercases_column_names = True
# select for update with limit can be achieved on Oracle, but not with the current backend.
supports_select_for_update_with_limit = False
supports_temporal_subtraction = True
# Oracle doesn't ignore quoted identifiers case but the current backend
# does by uppercasing all identifiers.
ignores_quoted_identifier_case = True
def introspected_boolean_field_type(self, field=None, created_separately=False):
"""
Some versions of Oracle -- we've seen this on 11.2.0.1 and suspect
it goes back -- have a weird bug where, when an integer column is
added to an existing table with a default, its precision is later
reported on introspection as 0, regardless of the real precision.
For Django introspection, this means that such columns are reported
as IntegerField even if they are really BigIntegerField or BooleanField.
The bug is solved in Oracle 11.2.0.2 and up.
"""
if self.connection.oracle_full_version < '11.2.0.2' and field and field.has_default() and created_separately:
return 'IntegerField'
return super(DatabaseFeatures, self).introspected_boolean_field_type(field, created_separately)
|
kcah27/HnTool
|
refs/heads/master
|
HnTool/modules/vsftpd.py
|
1
|
# -*- coding: utf-8 -*-
#
# HnTool rules - vsftpd
# Copyright (C) 2010 Hugo Doria <mail@hugodoria.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import os
from HnTool.modules.rule import Rule as MasterRule
class Rule(MasterRule):
def __init__(self, options):
MasterRule.__init__(self, options)
self.short_name="vsftpd"
self.long_name="Checks security problems on VsFTPd servers"
self.type="config"
self.required_files = ['/etc/vsftpd.conf', '/etc/vsftpd/vsftpd.conf']
def requires(self):
return self.required_files
def vsftpdParser(self, pfile):
'''Method to parse a vsftpd.conf file. Returns a dict with
all [key, value] of the file.'''
if os.path.isfile(pfile):
fp = open(pfile,'r')
keysValues = {}
for line in fp.readlines():
if not line.startswith('#'):
line = line.strip().split('=')
if len(line) >= 2:
keysValues[line[0]] = line[1]
fp.close()
return keysValues
def analyze(self, options):
check_results = self.check_results
vsftpd_conf_file = self.required_files
# getting the lines in a [key. value] format
for vsftpd_conf in vsftpd_conf_file:
if os.path.isfile(vsftpd_conf):
lines = self.vsftpdParser(vsftpd_conf)
# checking if VsFTPd is running on Standalone method
if 'listen' in lines:
if lines['listen'].upper() == 'YES':
check_results['info'].append('Running on StandAlone')
else:
check_results['info'].append('Not running on StandAlone')
else:
check_results['info'].append('Running on StandAlone')
# checking if VsFTPd is using the default port
if 'listen_port' in lines:
if int(lines['listen_port']) == 21:
check_results['info'].append('Using the default port (21)')
else:
check_results['info'].append('Not using the default port (21)')
else:
check_results['info'].append('Using the default port (21)')
# checking if chroot is enabled on VsFTPd
if 'chroot_local_user' in lines:
if lines['chroot_local_user'].upper() == 'YES':
check_results['ok'].append('Chrooting local users is enabled')
else:
check_results['high'].append('Chrooting local users is disabled')
else:
check_results['high'].append('Chrooting local users is disabled')
# checking if anonymous login is enabled
if 'anonymous_enable' in lines:
if lines['anonymous_enable'].upper() == 'YES':
check_results['info'].append('Anonymous login is allowed')
else:
check_results['info'].append('Anonymous login is not allowed')
else:
check_results['info'].append('Anonymous login is allowed')
# checking if ascii_download_enable or ascii_upload_enable is enabled
if 'ascii_download_enable' in lines or 'ascii_upload_enable' in lines:
if lines['ascii_download_enable'].upper() == 'YES' or \
lines['ascii_upload_enable'].upper() == 'YES':
check_results['high'].append('ASCII mode data transfers is allowed (DoS is possible)')
else:
check_results['ok'].append('ASCII mode data transfers is not allowed')
else:
check_results['high'].append('ASCII mode data transfers is allowed (DoS is possible)')
return check_results
|
sklnet/opendroid-enigma2
|
refs/heads/master
|
lib/python/Plugins/SystemPlugins/NetworkWizard/plugin.py
|
19
|
from Screens.Screen import Screen
from Plugins.Plugin import PluginDescriptor
from Components.config import getConfigListEntry, config, ConfigBoolean
config.misc.firstrun = ConfigBoolean(default = True)
def NetworkWizardMain(session, **kwargs):
session.open(NetworkWizard)
def startSetup(menuid):
if menuid != "system":
return [ ]
return [(_("Network wizard"), NetworkWizardMain, "nw_wizard", 40)]
def NetworkWizard(*args, **kwargs):
from NetworkWizard import NetworkWizard
return NetworkWizard(*args, **kwargs)
def Plugins(**kwargs):
list = []
if config.misc.firstrun.value:
list.append(PluginDescriptor(name=_("Network wizard"), where = PluginDescriptor.WHERE_WIZARD, needsRestart = False, fnc=(25, NetworkWizard)))
return list
|
blueboxgroup/nova
|
refs/heads/master
|
nova/tests/unit/api/openstack/compute/contrib/test_block_device_mapping.py
|
7
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from mox3 import mox
from oslo_config import cfg
from oslo_serialization import jsonutils
from webob import exc
from nova.api.openstack.compute import extensions
from nova.api.openstack.compute import plugins
from nova.api.openstack.compute.plugins.v3 import block_device_mapping
from nova.api.openstack.compute.plugins.v3 import servers as servers_v21
from nova.api.openstack.compute import servers as servers_v2
from nova import block_device
from nova.compute import api as compute_api
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit.image import fake
from nova.tests.unit import matchers
CONF = cfg.CONF
class BlockDeviceMappingTestV21(test.TestCase):
validation_error = exception.ValidationError
def _setup_controller(self):
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers_v21.ServersController(
extension_info=ext_info)
CONF.set_override('extensions_blacklist', 'os-block-device-mapping',
'osapi_v3')
self.no_bdm_v2_controller = servers_v21.ServersController(
extension_info=ext_info)
CONF.set_override('extensions_blacklist', '', 'osapi_v3')
def setUp(self):
super(BlockDeviceMappingTestV21, self).setUp()
self._setup_controller()
fake.stub_out_image_service(self.stubs)
self.bdm = [{
'no_device': None,
'source_type': 'volume',
'destination_type': 'volume',
'uuid': 'fake',
'device_name': 'vdb',
'delete_on_termination': False,
}]
def _get_servers_body(self, no_image=False):
body = {
'server': {
'name': 'server_test',
'imageRef': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
'flavorRef': 'http://localhost/123/flavors/3',
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
if no_image:
del body['server']['imageRef']
return body
def _test_create(self, params, no_image=False, override_controller=None):
body = self._get_servers_body(no_image)
body['server'].update(params)
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.headers['content-type'] = 'application/json'
req.body = jsonutils.dumps(body)
if override_controller:
override_controller.create(req, body=body).obj['server']
else:
self.controller.create(req, body=body).obj['server']
def test_create_instance_with_block_device_mapping_disabled(self):
bdm = [{'device_name': 'foo'}]
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('block_device_mapping', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: bdm}
self._test_create(params,
override_controller=self.no_bdm_v2_controller)
def test_create_instance_with_volumes_enabled_no_image(self):
"""Test that the create will fail if there is no image
and no bdms supplied in the request
"""
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.assertRaises(exc.HTTPBadRequest,
self._test_create, {}, no_image=True)
def test_create_instance_with_bdms_and_no_image(self):
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertThat(
block_device.BlockDeviceDict(self.bdm[0]),
matchers.DictMatches(kwargs['block_device_mapping'][0])
)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.mox.StubOutWithMock(compute_api.API, '_validate_bdm')
self.mox.StubOutWithMock(compute_api.API, '_get_bdm_image_metadata')
compute_api.API._validate_bdm(
mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(True)
compute_api.API._get_bdm_image_metadata(
mox.IgnoreArg(), mox.IgnoreArg(), False).AndReturn({})
self.mox.ReplayAll()
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self._test_create(params, no_image=True)
def test_create_instance_with_device_name_not_string(self):
self.bdm[0]['device_name'] = 123
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(self.validation_error,
self._test_create, params, no_image=True)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_bdm_param_not_list(self, mock_create):
self.params = {'block_device_mapping': '/dev/vdb'}
self.assertRaises(self.validation_error,
self._test_create, self.params)
def test_create_instance_with_device_name_empty(self):
self.bdm[0]['device_name'] = ''
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(self.validation_error,
self._test_create, params, no_image=True)
def test_create_instance_with_device_name_too_long(self):
self.bdm[0]['device_name'] = 'a' * 256
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(self.validation_error,
self._test_create, params, no_image=True)
def test_create_instance_with_space_in_device_name(self):
self.bdm[0]['device_name'] = 'v da'
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertTrue(kwargs['legacy_bdm'])
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(self.validation_error,
self._test_create, params, no_image=True)
def test_create_instance_with_invalid_size(self):
self.bdm[0]['volume_size'] = 'hello world'
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(self.validation_error,
self._test_create, params, no_image=True)
def test_create_instance_bdm(self):
bdm = [{
'source_type': 'volume',
'device_name': 'fake_dev',
'uuid': 'fake_vol'
}]
bdm_expected = [{
'source_type': 'volume',
'device_name': 'fake_dev',
'volume_id': 'fake_vol'
}]
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertFalse(kwargs['legacy_bdm'])
for expected, received in zip(bdm_expected,
kwargs['block_device_mapping']):
self.assertThat(block_device.BlockDeviceDict(expected),
matchers.DictMatches(received))
return old_create(*args, **kwargs)
def _validate_bdm(*args, **kwargs):
pass
self.stubs.Set(compute_api.API, 'create', create)
self.stubs.Set(compute_api.API, '_validate_bdm', _validate_bdm)
params = {block_device_mapping.ATTRIBUTE_NAME: bdm}
self._test_create(params, no_image=True)
def test_create_instance_bdm_missing_device_name(self):
del self.bdm[0]['device_name']
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertFalse(kwargs['legacy_bdm'])
self.assertNotIn(None,
kwargs['block_device_mapping'][0]['device_name'])
return old_create(*args, **kwargs)
def _validate_bdm(*args, **kwargs):
pass
self.stubs.Set(compute_api.API, 'create', create)
self.stubs.Set(compute_api.API, '_validate_bdm', _validate_bdm)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self._test_create(params, no_image=True)
def test_create_instance_bdm_validation_error(self):
def _validate(*args, **kwargs):
raise exception.InvalidBDMFormat(details='Wrong BDM')
self.stubs.Set(block_device.BlockDeviceDict,
'_validate', _validate)
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
self.assertRaises(exc.HTTPBadRequest,
self._test_create, params, no_image=True)
@mock.patch('nova.compute.api.API._get_bdm_image_metadata')
def test_create_instance_non_bootable_volume_fails(self, fake_bdm_meta):
params = {block_device_mapping.ATTRIBUTE_NAME: self.bdm}
fake_bdm_meta.side_effect = exception.InvalidBDMVolumeNotBootable(id=1)
self.assertRaises(exc.HTTPBadRequest, self._test_create, params,
no_image=True)
def test_create_instance_bdm_api_validation_fails(self):
self.validation_fail_test_validate_called = False
self.validation_fail_instance_destroy_called = False
bdm_exceptions = ((exception.InvalidBDMSnapshot, {'id': 'fake'}),
(exception.InvalidBDMVolume, {'id': 'fake'}),
(exception.InvalidBDMImage, {'id': 'fake'}),
(exception.InvalidBDMBootSequence, {}),
(exception.InvalidBDMLocalsLimit, {}))
ex_iter = iter(bdm_exceptions)
def _validate_bdm(*args, **kwargs):
self.validation_fail_test_validate_called = True
ex, kargs = ex_iter.next()
raise ex(**kargs)
def _instance_destroy(*args, **kwargs):
self.validation_fail_instance_destroy_called = True
self.stubs.Set(compute_api.API, '_validate_bdm', _validate_bdm)
self.stubs.Set(objects.Instance, 'destroy', _instance_destroy)
for _unused in xrange(len(bdm_exceptions)):
params = {block_device_mapping.ATTRIBUTE_NAME:
[self.bdm[0].copy()]}
self.assertRaises(exc.HTTPBadRequest,
self._test_create, params)
self.assertTrue(self.validation_fail_test_validate_called)
self.assertTrue(self.validation_fail_instance_destroy_called)
self.validation_fail_test_validate_called = False
self.validation_fail_instance_destroy_called = False
class BlockDeviceMappingTestV2(BlockDeviceMappingTestV21):
validation_error = exc.HTTPBadRequest
def _setup_controller(self):
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {'os-volumes': 'fake',
'os-block-device-mapping-v2-boot': 'fake'}
self.controller = servers_v2.Controller(self.ext_mgr)
self.ext_mgr_bdm_v2 = extensions.ExtensionManager()
self.ext_mgr_bdm_v2.extensions = {'os-volumes': 'fake'}
self.no_bdm_v2_controller = servers_v2.Controller(
self.ext_mgr_bdm_v2)
def test_create_instance_with_block_device_mapping_disabled(self):
bdm = [{'device_name': 'foo'}]
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsNone(kwargs['block_device_mapping'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
params = {block_device_mapping.ATTRIBUTE_NAME: bdm}
self._test_create(params,
override_controller=self.no_bdm_v2_controller)
|
datalogics/scons
|
refs/heads/master
|
test/runtest/baseline/pass.py
|
2
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test how we handle a passing test specified on the command line.
"""
import TestRuntest
test = TestRuntest.TestRuntest()
test.subdir('test')
test.write_passing_test(['test', 'pass.py'])
# NOTE: The "test/pass.py : PASS" line has spaces at the end.
expect = r"""qmtest.py run --output baseline.qmr --format none --result-stream="scons_tdb.AegisBaselineStream" test/pass.py
--- TEST RESULTS -------------------------------------------------------------
test/pass.py : PASS
--- TESTS WITH UNEXPECTED OUTCOMES -------------------------------------------
test/pass.py : PASS
--- STATISTICS ---------------------------------------------------------------
1 (100%) tests unexpected PASS
"""
test.run(arguments = '-b . test/pass.py', stdout = expect)
test.pass_test()
|
jonatanSh/challenge-framework
|
refs/heads/master
|
challenge_framework/users/urls.py
|
1
|
from django.conf.urls import url
from .views import SignUp, Api, Login, Logout
app_name = 'users'
urlpatterns = [
url(r'^signup$', SignUp.as_view(), name="signup"),
url(r'^api$', Api.as_view()),
url(r'^login$', Login.as_view(), name="login"),
url(r'^logout$', Logout.as_view(), name="logout"),
]
|
luoyetx/mxnet
|
refs/heads/master
|
example/autoencoder/data.py
|
18
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=missing-docstring
from __future__ import print_function
import os
import numpy as np
from sklearn.datasets import fetch_mldata
def get_mnist():
np.random.seed(1234) # set seed for deterministic ordering
data_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
data_path = os.path.join(data_path, '../../data')
mnist = fetch_mldata('MNIST original', data_home=data_path)
p = np.random.permutation(mnist.data.shape[0])
X = mnist.data[p].astype(np.float32)*0.02
Y = mnist.target[p]
return X, Y
|
saurabh6790/med_lib_rels
|
refs/heads/master
|
core/doctype/default_home_page/default_home_page.py
|
578
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
class DocType:
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
|
jmhsi/justin_tinker
|
refs/heads/master
|
data_science/courses/learning_dl_packages/models/research/syntaxnet/dragnn/python/composite_optimizer.py
|
12
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""An optimizer that switches between several methods."""
import tensorflow as tf
from tensorflow.python.training import optimizer
class CompositeOptimizer(optimizer.Optimizer):
"""Optimizer that switches between several methods.
"""
def __init__(self,
optimizer1,
optimizer2,
switch,
use_locking=False,
name='Composite'):
"""Construct a new Composite optimizer.
Args:
optimizer1: A tf.python.training.optimizer.Optimizer object.
optimizer2: A tf.python.training.optimizer.Optimizer object.
switch: A tf.bool Tensor, selecting whether to use the first or the second
optimizer.
use_locking: Bool. If True apply use locks to prevent concurrent updates
to variables.
name: Optional name prefix for the operations created when applying
gradients. Defaults to "Composite".
"""
super(CompositeOptimizer, self).__init__(use_locking, name)
self._optimizer1 = optimizer1
self._optimizer2 = optimizer2
self._switch = switch
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
return tf.cond(
self._switch,
lambda: self._optimizer1.apply_gradients(grads_and_vars,
global_step, name),
lambda: self._optimizer2.apply_gradients(grads_and_vars,
global_step, name)
)
def get_slot(self, var, name):
slot1 = self._optimizer1.get_slot(var, name)
slot2 = self._optimizer2.get_slot(var, name)
if slot1 and slot2:
raise LookupError('Slot named %s for variable %s populated for both '
'optimizers' % (name, var.name))
return slot1 or slot2
def get_slot_names(self):
return sorted(self._optimizer1.get_slot_names() +
self._optimizer2.get_slot_names())
|
Soya93/Extract-Refactoring
|
refs/heads/master
|
python/testData/inspections/unusedImport/subpackageInInitPy/module_a.py
|
83
|
#! /usr/bin/env python
from package1 import ClassB
b = ClassB()
|
gotomypc/scikit-learn
|
refs/heads/master
|
sklearn/utils/graph.py
|
289
|
"""
Graph utilities and algorithms
Graphs are represented with their adjacency matrices, preferably using
sparse matrices.
"""
# Authors: Aric Hagberg <hagberg@lanl.gov>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Jake Vanderplas <vanderplas@astro.washington.edu>
# License: BSD 3 clause
import numpy as np
from scipy import sparse
from .validation import check_array
from .graph_shortest_path import graph_shortest_path
###############################################################################
# Path and connected component analysis.
# Code adapted from networkx
def single_source_shortest_path_length(graph, source, cutoff=None):
"""Return the shortest path length from source to all reachable nodes.
Returns a dictionary of shortest path lengths keyed by target.
Parameters
----------
graph: sparse matrix or 2D array (preferably LIL matrix)
Adjacency matrix of the graph
source : node label
Starting node for path
cutoff : integer, optional
Depth to stop the search - only
paths of length <= cutoff are returned.
Examples
--------
>>> from sklearn.utils.graph import single_source_shortest_path_length
>>> import numpy as np
>>> graph = np.array([[ 0, 1, 0, 0],
... [ 1, 0, 1, 0],
... [ 0, 1, 0, 1],
... [ 0, 0, 1, 0]])
>>> single_source_shortest_path_length(graph, 0)
{0: 0, 1: 1, 2: 2, 3: 3}
>>> single_source_shortest_path_length(np.ones((6, 6)), 2)
{0: 1, 1: 1, 2: 0, 3: 1, 4: 1, 5: 1}
"""
if sparse.isspmatrix(graph):
graph = graph.tolil()
else:
graph = sparse.lil_matrix(graph)
seen = {} # level (number of hops) when seen in BFS
level = 0 # the current level
next_level = [source] # dict of nodes to check at next level
while next_level:
this_level = next_level # advance to next level
next_level = set() # and start a new list (fringe)
for v in this_level:
if v not in seen:
seen[v] = level # set the level of vertex v
next_level.update(graph.rows[v])
if cutoff is not None and cutoff <= level:
break
level += 1
return seen # return all path lengths as dictionary
if hasattr(sparse, 'connected_components'):
connected_components = sparse.connected_components
else:
from .sparsetools import connected_components
###############################################################################
# Graph laplacian
def graph_laplacian(csgraph, normed=False, return_diag=False):
""" Return the Laplacian matrix of a directed graph.
For non-symmetric graphs the out-degree is used in the computation.
Parameters
----------
csgraph : array_like or sparse matrix, 2 dimensions
compressed-sparse graph, with shape (N, N).
normed : bool, optional
If True, then compute normalized Laplacian.
return_diag : bool, optional
If True, then return diagonal as well as laplacian.
Returns
-------
lap : ndarray
The N x N laplacian matrix of graph.
diag : ndarray
The length-N diagonal of the laplacian matrix.
diag is returned only if return_diag is True.
Notes
-----
The Laplacian matrix of a graph is sometimes referred to as the
"Kirchoff matrix" or the "admittance matrix", and is useful in many
parts of spectral graph theory. In particular, the eigen-decomposition
of the laplacian matrix can give insight into many properties of the graph.
For non-symmetric directed graphs, the laplacian is computed using the
out-degree of each node.
"""
if csgraph.ndim != 2 or csgraph.shape[0] != csgraph.shape[1]:
raise ValueError('csgraph must be a square matrix or array')
if normed and (np.issubdtype(csgraph.dtype, np.int)
or np.issubdtype(csgraph.dtype, np.uint)):
csgraph = check_array(csgraph, dtype=np.float64, accept_sparse=True)
if sparse.isspmatrix(csgraph):
return _laplacian_sparse(csgraph, normed=normed,
return_diag=return_diag)
else:
return _laplacian_dense(csgraph, normed=normed,
return_diag=return_diag)
def _laplacian_sparse(graph, normed=False, return_diag=False):
n_nodes = graph.shape[0]
if not graph.format == 'coo':
lap = (-graph).tocoo()
else:
lap = -graph.copy()
diag_mask = (lap.row == lap.col)
if not diag_mask.sum() == n_nodes:
# The sparsity pattern of the matrix has holes on the diagonal,
# we need to fix that
diag_idx = lap.row[diag_mask]
diagonal_holes = list(set(range(n_nodes)).difference(diag_idx))
new_data = np.concatenate([lap.data, np.ones(len(diagonal_holes))])
new_row = np.concatenate([lap.row, diagonal_holes])
new_col = np.concatenate([lap.col, diagonal_holes])
lap = sparse.coo_matrix((new_data, (new_row, new_col)),
shape=lap.shape)
diag_mask = (lap.row == lap.col)
lap.data[diag_mask] = 0
w = -np.asarray(lap.sum(axis=1)).squeeze()
if normed:
w = np.sqrt(w)
w_zeros = (w == 0)
w[w_zeros] = 1
lap.data /= w[lap.row]
lap.data /= w[lap.col]
lap.data[diag_mask] = (1 - w_zeros[lap.row[diag_mask]]).astype(
lap.data.dtype)
else:
lap.data[diag_mask] = w[lap.row[diag_mask]]
if return_diag:
return lap, w
return lap
def _laplacian_dense(graph, normed=False, return_diag=False):
n_nodes = graph.shape[0]
lap = -np.asarray(graph) # minus sign leads to a copy
# set diagonal to zero
lap.flat[::n_nodes + 1] = 0
w = -lap.sum(axis=0)
if normed:
w = np.sqrt(w)
w_zeros = (w == 0)
w[w_zeros] = 1
lap /= w
lap /= w[:, np.newaxis]
lap.flat[::n_nodes + 1] = (1 - w_zeros).astype(lap.dtype)
else:
lap.flat[::n_nodes + 1] = w.astype(lap.dtype)
if return_diag:
return lap, w
return lap
|
zhoulingjun/jieba
|
refs/heads/master
|
test/test.py
|
62
|
#encoding=utf-8
import sys
sys.path.append("../")
import jieba
def cuttest(test_sent):
result = jieba.cut(test_sent)
print(" / ".join(result))
if __name__ == "__main__":
cuttest("这是一个伸手不见五指的黑夜。我叫孙悟空,我爱北京,我爱Python和C++。")
cuttest("我不喜欢日本和服。")
cuttest("雷猴回归人间。")
cuttest("工信处女干事每月经过下属科室都要亲口交代24口交换机等技术性器件的安装工作")
cuttest("我需要廉租房")
cuttest("永和服装饰品有限公司")
cuttest("我爱北京天安门")
cuttest("abc")
cuttest("隐马尔可夫")
cuttest("雷猴是个好网站")
cuttest("“Microsoft”一词由“MICROcomputer(微型计算机)”和“SOFTware(软件)”两部分组成")
cuttest("草泥马和欺实马是今年的流行词汇")
cuttest("伊藤洋华堂总府店")
cuttest("中国科学院计算技术研究所")
cuttest("罗密欧与朱丽叶")
cuttest("我购买了道具和服装")
cuttest("PS: 我觉得开源有一个好处,就是能够敦促自己不断改进,避免敞帚自珍")
cuttest("湖北省石首市")
cuttest("湖北省十堰市")
cuttest("总经理完成了这件事情")
cuttest("电脑修好了")
cuttest("做好了这件事情就一了百了了")
cuttest("人们审美的观点是不同的")
cuttest("我们买了一个美的空调")
cuttest("线程初始化时我们要注意")
cuttest("一个分子是由好多原子组织成的")
cuttest("祝你马到功成")
cuttest("他掉进了无底洞里")
cuttest("中国的首都是北京")
cuttest("孙君意")
cuttest("外交部发言人马朝旭")
cuttest("领导人会议和第四届东亚峰会")
cuttest("在过去的这五年")
cuttest("还需要很长的路要走")
cuttest("60周年首都阅兵")
cuttest("你好人们审美的观点是不同的")
cuttest("买水果然后来世博园")
cuttest("买水果然后去世博园")
cuttest("但是后来我才知道你是对的")
cuttest("存在即合理")
cuttest("的的的的的在的的的的就以和和和")
cuttest("I love你,不以为耻,反以为rong")
cuttest("因")
cuttest("")
cuttest("hello你好人们审美的观点是不同的")
cuttest("很好但主要是基于网页形式")
cuttest("hello你好人们审美的观点是不同的")
cuttest("为什么我不能拥有想要的生活")
cuttest("后来我才")
cuttest("此次来中国是为了")
cuttest("使用了它就可以解决一些问题")
cuttest(",使用了它就可以解决一些问题")
cuttest("其实使用了它就可以解决一些问题")
cuttest("好人使用了它就可以解决一些问题")
cuttest("是因为和国家")
cuttest("老年搜索还支持")
cuttest("干脆就把那部蒙人的闲法给废了拉倒!RT @laoshipukong : 27日,全国人大常委会第三次审议侵权责任法草案,删除了有关医疗损害责任“举证倒置”的规定。在医患纠纷中本已处于弱势地位的消费者由此将陷入万劫不复的境地。 ")
cuttest("大")
cuttest("")
cuttest("他说的确实在理")
cuttest("长春市长春节讲话")
cuttest("结婚的和尚未结婚的")
cuttest("结合成分子时")
cuttest("旅游和服务是最好的")
cuttest("这件事情的确是我的错")
cuttest("供大家参考指正")
cuttest("哈尔滨政府公布塌桥原因")
cuttest("我在机场入口处")
cuttest("邢永臣摄影报道")
cuttest("BP神经网络如何训练才能在分类时增加区分度?")
cuttest("南京市长江大桥")
cuttest("应一些使用者的建议,也为了便于利用NiuTrans用于SMT研究")
cuttest('长春市长春药店')
cuttest('邓颖超生前最喜欢的衣服')
cuttest('胡锦涛是热爱世界和平的政治局常委')
cuttest('程序员祝海林和朱会震是在孙健的左面和右面, 范凯在最右面.再往左是李松洪')
cuttest('一次性交多少钱')
cuttest('两块五一套,三块八一斤,四块七一本,五块六一条')
cuttest('小和尚留了一个像大和尚一样的和尚头')
cuttest('我是中华人民共和国公民;我爸爸是共和党党员; 地铁和平门站')
cuttest('张晓梅去人民医院做了个B超然后去买了件T恤')
cuttest('AT&T是一件不错的公司,给你发offer了吗?')
cuttest('C++和c#是什么关系?11+122=133,是吗?PI=3.14159')
cuttest('你认识那个和主席握手的的哥吗?他开一辆黑色的士。')
cuttest('枪杆子中出政权')
cuttest('张三风同学走上了不归路')
cuttest('阿Q腰间挂着BB机手里拿着大哥大,说:我一般吃饭不AA制的。')
cuttest('在1号店能买到小S和大S八卦的书,还有3D电视。')
|
twidi/satchmo
|
refs/heads/master
|
satchmo/apps/satchmo_ext/product_feeds/urls.py
|
6
|
"""
Urls for product feeds, note that this does not have to get added manually to the urls, it will be added automatically by satchmo core if this app is installed.
"""
from django.conf.urls.defaults import *
import logging
log = logging.getLogger('product_feeds.urls')
urlpatterns = patterns('satchmo_ext.product_feeds.views',
(r'atom/$', 'product_feed', {}, 'satchmo_atom_feed'),
(r'atom/(?P<category>([-\w])*)/$', 'product_feed', {}, 'satchmo_atom_category_feed'),
(r'products.csv$', 'admin_product_feed', {'template' : "product_feeds/product_feed.csv"}, 'satchmo_product_feed'),
)
feedpatterns = patterns('',
(r'^feed/', include('satchmo_ext.product_feeds.urls'))
)
def add_feed_urls(sender, patterns=(), **kwargs):
log.debug("Adding product_feed urls")
patterns += feedpatterns
|
anistark/mozillians
|
refs/heads/master
|
mozillians/api/tests/test_models.py
|
5
|
from django.test import TestCase
from nose.tools import ok_
from mozillians.users.tests import UserFactory
from mozillians.api.models import APIApp
class APIAppTests(TestCase):
def test_save_generates_key(self):
owner = UserFactory.create()
api_app = APIApp.objects.create(owner=owner, name='Test',
description='Foo',
key='')
ok_(api_app.key != '')
|
wojons/rethinkdb
|
refs/heads/next
|
test/rql_test/connections/http_support/flask/testsuite/test_apps/flask_broken/__init__.py
|
629
|
import flask.ext.broken.b
import missing_module
|
geekboxzone/lollipop_external_chromium_org
|
refs/heads/geekbox
|
tools/python/google/platform_utils_linux.py
|
193
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Platform-specific utility methods shared by several scripts."""
import os
import subprocess
import google.path_utils
class PlatformUtility(object):
def __init__(self, base_dir):
"""Args:
base_dir: the base dir for running tests.
"""
self._base_dir = base_dir
self._httpd_cmd_string = None # used for starting/stopping httpd
self._bash = "/bin/bash"
def _UnixRoot(self):
"""Returns the path to root."""
return "/"
def GetFilesystemRoot(self):
"""Returns the root directory of the file system."""
return self._UnixRoot()
def GetTempDirectory(self):
"""Returns the file system temp directory
Note that this does not use a random subdirectory, so it's not
intrinsically secure. If you need a secure subdir, use the tempfile
package.
"""
return os.getenv("TMPDIR", "/tmp")
def FilenameToUri(self, path, use_http=False, use_ssl=False, port=8000):
"""Convert a filesystem path to a URI.
Args:
path: For an http URI, the path relative to the httpd server's
DocumentRoot; for a file URI, the full path to the file.
use_http: if True, returns a URI of the form http://127.0.0.1:8000/.
If False, returns a file:/// URI.
use_ssl: if True, returns HTTPS URL (https://127.0.0.1:8000/).
This parameter is ignored if use_http=False.
port: The port number to append when returning an HTTP URI
"""
if use_http:
protocol = 'http'
if use_ssl:
protocol = 'https'
return "%s://127.0.0.1:%d/%s" % (protocol, port, path)
return "file://" + path
def GetStartHttpdCommand(self, output_dir,
httpd_conf_path, mime_types_path,
document_root=None, apache2=False):
"""Prepares the config file and output directory to start an httpd server.
Returns a list of strings containing the server's command line+args.
Args:
output_dir: the path to the server's output directory, for log files.
It will be created if necessary.
httpd_conf_path: full path to the httpd.conf file to be used.
mime_types_path: full path to the mime.types file to be used.
document_root: full path to the DocumentRoot. If None, the DocumentRoot
from the httpd.conf file will be used. Note that the httpd.conf
file alongside this script does not specify any DocumentRoot, so if
you're using that one, be sure to specify a document_root here.
apache2: boolean if true will cause this function to return start
command for Apache 2.x as opposed to Apache 1.3.x. This flag
is ignored on Linux (but preserved here for compatibility in
function signature with win), where apache2 is used always
"""
exe_name = "apache2"
cert_file = google.path_utils.FindUpward(self._base_dir, 'tools',
'python', 'google',
'httpd_config', 'httpd2.pem')
ssl_enabled = os.path.exists('/etc/apache2/mods-enabled/ssl.conf')
httpd_vars = {
"httpd_executable_path":
os.path.join(self._UnixRoot(), "usr", "sbin", exe_name),
"httpd_conf_path": httpd_conf_path,
"ssl_certificate_file": cert_file,
"document_root" : document_root,
"server_root": os.path.join(self._UnixRoot(), "usr"),
"mime_types_path": mime_types_path,
"output_dir": output_dir,
"ssl_mutex": "file:"+os.path.join(output_dir, "ssl_mutex"),
"ssl_session_cache":
"shmcb:" + os.path.join(output_dir, "ssl_scache") + "(512000)",
"user": os.environ.get("USER", "#%d" % os.geteuid()),
"lock_file": os.path.join(output_dir, "accept.lock"),
}
google.path_utils.MaybeMakeDirectory(output_dir)
# We have to wrap the command in bash
# -C: process directive before reading config files
# -c: process directive after reading config files
# Apache wouldn't run CGIs with permissions==700 unless we add
# -c User "<username>"
httpd_cmd_string = (
'%(httpd_executable_path)s'
' -f %(httpd_conf_path)s'
' -c \'TypesConfig "%(mime_types_path)s"\''
' -c \'CustomLog "%(output_dir)s/access_log.txt" common\''
' -c \'ErrorLog "%(output_dir)s/error_log.txt"\''
' -c \'PidFile "%(output_dir)s/httpd.pid"\''
' -C \'User "%(user)s"\''
' -C \'ServerRoot "%(server_root)s"\''
' -c \'LockFile "%(lock_file)s"\''
)
if document_root:
httpd_cmd_string += ' -C \'DocumentRoot "%(document_root)s"\''
if ssl_enabled:
httpd_cmd_string += (
' -c \'SSLCertificateFile "%(ssl_certificate_file)s"\''
' -c \'SSLMutex "%(ssl_mutex)s"\''
' -c \'SSLSessionCache "%(ssl_session_cache)s"\''
)
# Save a copy of httpd_cmd_string to use for stopping httpd
self._httpd_cmd_string = httpd_cmd_string % httpd_vars
httpd_cmd = [self._bash, "-c", self._httpd_cmd_string]
return httpd_cmd
def GetStopHttpdCommand(self):
"""Returns a list of strings that contains the command line+args needed to
stop the http server used in the http tests.
This tries to fetch the pid of httpd (if available) and returns the
command to kill it. If pid is not available, kill all httpd processes
"""
if not self._httpd_cmd_string:
return ["true"] # Haven't been asked for the start cmd yet. Just pass.
# Add a sleep after the shutdown because sometimes it takes some time for
# the port to be available again.
return [self._bash, "-c", self._httpd_cmd_string + ' -k stop && sleep 5']
|
siosio/intellij-community
|
refs/heads/master
|
python/testData/completion/rbStringPath/a.py
|
12
|
def f(storage_path):
pass
f(rb"./foo<caret>")
|
TalkingCactus/tgstation
|
refs/heads/master
|
tools/midi2piano/pyperclip/__init__.py
|
110
|
"""
Pyperclip
A cross-platform clipboard module for Python. (only handles plain text for now)
By Al Sweigart al@inventwithpython.com
BSD License
Usage:
import pyperclip
pyperclip.copy('The text to be copied to the clipboard.')
spam = pyperclip.paste()
if not pyperclip.copy:
print("Copy functionality unavailable!")
On Windows, no additional modules are needed.
On Mac, the module uses pbcopy and pbpaste, which should come with the os.
On Linux, install xclip or xsel via package manager. For example, in Debian:
sudo apt-get install xclip
Otherwise on Linux, you will need the gtk or PyQt4 modules installed.
gtk and PyQt4 modules are not available for Python 3,
and this module does not work with PyGObject yet.
"""
__version__ = '1.5.27'
import platform
import os
import subprocess
from .clipboards import (init_osx_clipboard,
init_gtk_clipboard, init_qt_clipboard,
init_xclip_clipboard, init_xsel_clipboard,
init_klipper_clipboard, init_no_clipboard)
from .windows import init_windows_clipboard
# `import PyQt4` sys.exit()s if DISPLAY is not in the environment.
# Thus, we need to detect the presence of $DISPLAY manually
# and not load PyQt4 if it is absent.
HAS_DISPLAY = os.getenv("DISPLAY", False)
CHECK_CMD = "where" if platform.system() == "Windows" else "which"
def _executable_exists(name):
return subprocess.call([CHECK_CMD, name],
stdout=subprocess.PIPE, stderr=subprocess.PIPE) == 0
def determine_clipboard():
# Determine the OS/platform and set
# the copy() and paste() functions accordingly.
if 'cygwin' in platform.system().lower():
# FIXME: pyperclip currently does not support Cygwin,
# see https://github.com/asweigart/pyperclip/issues/55
pass
elif os.name == 'nt' or platform.system() == 'Windows':
return init_windows_clipboard()
if os.name == 'mac' or platform.system() == 'Darwin':
return init_osx_clipboard()
if HAS_DISPLAY:
# Determine which command/module is installed, if any.
try:
import gtk # check if gtk is installed
except ImportError:
pass
else:
return init_gtk_clipboard()
try:
import PyQt4 # check if PyQt4 is installed
except ImportError:
pass
else:
return init_qt_clipboard()
if _executable_exists("xclip"):
return init_xclip_clipboard()
if _executable_exists("xsel"):
return init_xsel_clipboard()
if _executable_exists("klipper") and _executable_exists("qdbus"):
return init_klipper_clipboard()
return init_no_clipboard()
def set_clipboard(clipboard):
global copy, paste
clipboard_types = {'osx': init_osx_clipboard,
'gtk': init_gtk_clipboard,
'qt': init_qt_clipboard,
'xclip': init_xclip_clipboard,
'xsel': init_xsel_clipboard,
'klipper': init_klipper_clipboard,
'windows': init_windows_clipboard,
'no': init_no_clipboard}
copy, paste = clipboard_types[clipboard]()
copy, paste = determine_clipboard()
__all__ = ["copy", "paste"]
|
jamesrobertlloyd/gpss-research
|
refs/heads/master
|
source/cblparallel/counter.py
|
13
|
"""
Copyright (c) 2011,2012 George Dahl
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject
to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software. THE
SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from sys import stderr
class Counter(object):
def __init__(self, step=10):
self.cur = 0
self.step = step
def tick(self):
self.cur += 1
if self.cur % self.step == 0:
stderr.write( str(self.cur ) )
stderr.write( "\r" )
stderr.flush()
def done(self):
stderr.write( str(self.cur ) )
stderr.write( "\n" )
stderr.flush()
class Progress(object):
def __init__(self, numSteps):
self.total = numSteps
self.cur = 0
self.curPercent = 0
def tick(self):
self.cur += 1
newPercent = (100*self.cur)/self.total
if newPercent > self.curPercent:
self.curPercent = newPercent
stderr.write( str(self.curPercent)+"%" )
stderr.write( "\r" )
stderr.flush()
def done(self):
stderr.write( '100%' )
stderr.write( "\n" )
stderr.flush()
def ProgressLine(line):
stderr.write(line)
stderr.write( "\r" )
stderr.flush()
def main():
from time import sleep
for i in range(500):
s = str(2.379*i)
ProgressLine(s)
sleep(0.02)
c = Counter(5)
for i in range(500):
c.tick()
sleep(.005)
c.done()
p = Progress(5000)
for i in range(5000):
p.tick()
sleep(.0005)
p.done()
if __name__ == "__main__":
main()
|
DakRomo/2017Challenges
|
refs/heads/master
|
challenge_1/python/DTCitron/src/main.py
|
3
|
import sys
if __name__ == "__main__":
# Read in all inputs
input = sys.argv[1:]
# Join list of inputs into a string
input_string = " ".join(input)
# Reverse now
reverse_string = input_string[::-1]
# Print
print reverse_string
|
a-parhom/edx-platform
|
refs/heads/master
|
common/djangoapps/xblock_django/migrations/0002_auto_20160204_0809.py
|
60
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('xblock_django', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='xblockdisableconfig',
name='disabled_create_blocks',
field=models.TextField(default=b'', help_text='Space-separated list of XBlock types whose creation to disable in Studio.', blank=True),
),
]
|
pluckljn/paimei
|
refs/heads/master
|
console/modules/PAIMEIexplorer.py
|
6
|
#
# PaiMei
# Copyright (C) 2006 Pedram Amini <pedram.amini@gmail.com>
#
# $Id: PAIMEIexplorer.py 194 2007-04-05 15:31:53Z cameron $
#
# This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with this program; if not, write to the Free
# Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
'''
@author: Pedram Amini
@license: GNU General Public License 2.0 or later
@contact: pedram.amini@gmail.com
@organization: www.openrce.org
'''
import wx
import wx.html as html
import sys
import _PAIMEIexplorer
#######################################################################################################################
class PAIMEIexplorer (wx.Panel):
'''
The PIDA module explorer panel.
'''
documented_properties = {
"pida_modules" : "Dictionary of loaded PIDA modules.",
"pida_copy(module_name)" : "Copy the specified module from pstalker to the explorer pane.",
}
list_book = None # handle to list book.
main_frame = None # handle to top most frame.
pida_modules = {} # dictionary of loaded PIDA modules.
def __init__(self, *args, **kwds):
# begin wxGlade: PAIMEIexplorer.__init__
kwds["style"] = wx.TAB_TRAVERSAL
wx.Panel.__init__(self, *args, **kwds)
self.log_splitter = wx.SplitterWindow(self, -1, style=wx.SP_3D|wx.SP_BORDER)
self.log_window = wx.Panel(self.log_splitter, -1)
self.top_window = wx.Panel(self.log_splitter, -1)
self.disassmbly_column_staticbox = wx.StaticBox(self.top_window, -1, "Disassembly")
self.special_column_staticbox = wx.StaticBox(self.top_window, -1, "Special")
self.browser_column_staticbox = wx.StaticBox(self.top_window, -1, "Browser")
self.pida_modules_static = wx.StaticText(self.top_window, -1, "PIDA Modules")
self.pida_modules_list = _PAIMEIexplorer.PIDAModulesListCtrl.PIDAModulesListCtrl(self.top_window, -1, top=self, style=wx.LC_REPORT|wx.SUNKEN_BORDER)
self.add_module = wx.Button(self.top_window, -1, "Add Module(s)")
self.explorer = _PAIMEIexplorer.ExplorerTreeCtrl.ExplorerTreeCtrl(self.top_window, -1, top=self, style=wx.TR_HAS_BUTTONS|wx.TR_LINES_AT_ROOT|wx.TR_DEFAULT_STYLE|wx.SUNKEN_BORDER)
self.disassembly = _PAIMEIexplorer.HtmlWindow.HtmlWindow(self.top_window, -1, top=self, style=wx.NO_FULL_REPAINT_ON_RESIZE)
self.special = wx.TextCtrl(self.top_window, -1, "", style=wx.TE_MULTILINE|wx.TE_READONLY)
self.log = wx.TextCtrl(self.log_window, -1, "", style=wx.TE_MULTILINE|wx.TE_READONLY|wx.TE_LINEWRAP)
self.__set_properties()
self.__do_layout()
# end wxGlade
# set the default sash position to be 100 pixels from the bottom (small log window).
self.log_splitter.SetSashPosition(-100)
self.list_book = kwds["parent"] # handle to list book.
self.main_frame = self.list_book.top # handle to top most frame.
# log window bindings.
self.Bind(wx.EVT_TEXT_MAXLEN, self.OnMaxLogLengthReached, self.log)
# explorer tree ctrl.
self.explorer.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self.explorer.on_item_activated)
self.explorer.Bind(wx.EVT_TREE_SEL_CHANGED, self.explorer.on_item_sel_changed)
self.explorer.Bind(wx.EVT_TREE_ITEM_RIGHT_CLICK, self.explorer.on_item_right_click)
self.explorer.Bind(wx.EVT_RIGHT_UP, self.explorer.on_item_right_click)
self.explorer.Bind(wx.EVT_RIGHT_DOWN, self.explorer.on_item_right_down)
# pida modules list ctrl.
self.Bind(wx.EVT_BUTTON, self.pida_modules_list.on_add_module, self.add_module)
self.pida_modules_list.Bind(wx.EVT_COMMAND_RIGHT_CLICK, self.pida_modules_list.on_right_click)
self.pida_modules_list.Bind(wx.EVT_RIGHT_UP, self.pida_modules_list.on_right_click)
self.pida_modules_list.Bind(wx.EVT_RIGHT_DOWN, self.pida_modules_list.on_right_down)
self.pida_modules_list.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.pida_modules_list.on_activated)
self.msg("PaiMei Explorer")
self.msg("Module by Pedram Amini\n")
####################################################################################################################
def __set_properties (self):
# set the max length to whatever the widget supports (typically 32k).
self.log.SetMaxLength(0)
# begin wxGlade: PAIMEIexplorer.__set_properties
self.SetFont(wx.Font(8, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, "MS Shell Dlg 2"))
self.pida_modules_static.SetFont(wx.Font(8, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, "MS Shell Dlg 2"))
self.pida_modules_list.SetFont(wx.Font(8, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, "MS Shell Dlg 2"))
self.add_module.SetFont(wx.Font(8, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, "MS Shell Dlg 2"))
self.explorer.SetFont(wx.Font(8, wx.DEFAULT, wx.NORMAL, wx.NORMAL, 0, "MS Shell Dlg 2"))
self.special.SetFont(wx.Font(10, wx.MODERN, wx.NORMAL, wx.NORMAL, 0, "Courier"))
self.log.SetFont(wx.Font(8, wx.MODERN, wx.NORMAL, wx.NORMAL, 0, "Lucida Console"))
self.log_splitter.SetMinimumPaneSize(25)
# end wxGlade
####################################################################################################################
def __do_layout (self):
# begin wxGlade: PAIMEIexplorer.__do_layout
overall = wx.BoxSizer(wx.HORIZONTAL)
log_window_sizer = wx.BoxSizer(wx.HORIZONTAL)
columns = wx.BoxSizer(wx.HORIZONTAL)
special_column = wx.StaticBoxSizer(self.special_column_staticbox, wx.VERTICAL)
disassmbly_column = wx.StaticBoxSizer(self.disassmbly_column_staticbox, wx.VERTICAL)
browser_column = wx.StaticBoxSizer(self.browser_column_staticbox, wx.VERTICAL)
browser_column.Add(self.pida_modules_static, 0, wx.ADJUST_MINSIZE, 0)
browser_column.Add(self.pida_modules_list, 1, wx.EXPAND, 0)
browser_column.Add(self.add_module, 0, wx.EXPAND|wx.ADJUST_MINSIZE, 0)
browser_column.Add(self.explorer, 5, wx.EXPAND, 0)
columns.Add(browser_column, 1, wx.EXPAND, 0)
disassmbly_column.Add(self.disassembly, 1, wx.GROW, 0)
columns.Add(disassmbly_column, 2, wx.EXPAND, 0)
special_column.Add(self.special, 1, wx.EXPAND|wx.ADJUST_MINSIZE, 0)
columns.Add(special_column, 1, wx.EXPAND, 0)
self.top_window.SetAutoLayout(True)
self.top_window.SetSizer(columns)
columns.Fit(self.top_window)
columns.SetSizeHints(self.top_window)
log_window_sizer.Add(self.log, 1, wx.EXPAND, 0)
self.log_window.SetAutoLayout(True)
self.log_window.SetSizer(log_window_sizer)
log_window_sizer.Fit(self.log_window)
log_window_sizer.SetSizeHints(self.log_window)
self.log_splitter.SplitHorizontally(self.top_window, self.log_window)
overall.Add(self.log_splitter, 1, wx.EXPAND, 0)
self.SetAutoLayout(True)
self.SetSizer(overall)
overall.Fit(self)
overall.SetSizeHints(self)
# end wxGlade
####################################################################################################################
def OnMaxLogLengthReached (self, event):
'''
Clear the log window when the max length is reach.
@todo: Make this smarter by maybe only clearing half the lines.
'''
self.log.SetValue("")
####################################################################################################################
def err (self, message):
'''
Write an error message to log window.
'''
self.log.AppendText("[!] %s\n" % message)
####################################################################################################################
def msg (self, message):
'''
Write a log message to log window.
'''
self.log.AppendText("[*] %s\n" % message)
####################################################################################################################
def pida_copy (self, module_name):
'''
Load the specified module name from the pstalker module directly into the explorer tree control.
@type module_name: String
@param module_name: Name of module to copy and load from pstalker module.
'''
other = self.main_frame.modules["pstalker"].pida_modules
if not other.has_key(module_name):
self.err("Specified module name %s, not found." % module_name)
return
self.pida_modules[module_name] = other[module_name]
# determine the function and basic block counts for this module.
function_count = len(self.pida_modules[module_name].nodes)
basic_block_count = 0
for function in self.pida_modules[module_name].nodes.values():
basic_block_count += len(function.nodes)
idx = len(self.pida_modules) - 1
self.pida_modules_list.InsertStringItem(idx, "")
self.pida_modules_list.SetStringItem(idx, 0, "%d" % function_count)
self.pida_modules_list.SetStringItem(idx, 1, "%d" % basic_block_count)
self.pida_modules_list.SetStringItem(idx, 2, module_name)
|
operasoftware/presto-testo
|
refs/heads/master
|
wpt/websockets/websock_handlers/echo_wsh.py
|
8
|
#!/usr/bin/python
from mod_pywebsocket import msgutil
_GOODBYE_MESSAGE = u'Goodbye'
def web_socket_do_extra_handshake(request):
# This example handler accepts any request. See origin_check_wsh.py for how
# to reject access from untrusted scripts based on origin value.
pass # Always accept.
def web_socket_transfer_data(request):
while True:
line = request.ws_stream.receive_message()
if line is None:
return
if isinstance(line, unicode):
request.ws_stream.send_message(line, binary=False)
if line == _GOODBYE_MESSAGE:
return
else:
request.ws_stream.send_message(line, binary=True)
|
alheinecke/tensorflow-xsmm
|
refs/heads/master
|
tensorflow/tensorboard/scripts/generate_testdata.py
|
39
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generate some standard test data for debugging TensorBoard.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import bisect
import math
import os
import os.path
import random
import shutil
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import summary_pb2
from tensorflow.core.util import event_pb2
from tensorflow.python.client import session as session_lib
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import logging_ops
from tensorflow.python.platform import app
from tensorflow.python.platform import flags
from tensorflow.python.summary.writer import writer as writer_lib
tf.flags.DEFINE_string("target", None, """The directoy where serialized data
will be written""")
flags.DEFINE_boolean("overwrite", False, """Whether to remove and overwrite
TARGET if it already exists.""")
FLAGS = tf.flags.FLAGS
# Hardcode a start time and reseed so script always generates the same data.
_start_time = 0
random.seed(0)
def _MakeHistogramBuckets():
v = 1E-12
buckets = []
neg_buckets = []
while v < 1E20:
buckets.append(v)
neg_buckets.append(-v)
v *= 1.1
# Should include DBL_MAX, but won't bother for test data.
return neg_buckets[::-1] + [0] + buckets
def _MakeHistogram(values):
"""Convert values into a histogram proto using logic from histogram.cc."""
limits = _MakeHistogramBuckets()
counts = [0] * len(limits)
for v in values:
idx = bisect.bisect_left(limits, v)
counts[idx] += 1
limit_counts = [(limits[i], counts[i]) for i in xrange(len(limits))
if counts[i]]
bucket_limit = [lc[0] for lc in limit_counts]
bucket = [lc[1] for lc in limit_counts]
sum_sq = sum(v * v for v in values)
return summary_pb2.HistogramProto(
min=min(values),
max=max(values),
num=len(values),
sum=sum(values),
sum_squares=sum_sq,
bucket_limit=bucket_limit,
bucket=bucket)
def WriteScalarSeries(writer, tag, f, n=5):
"""Write a series of scalar events to writer, using f to create values."""
step = 0
wall_time = _start_time
for i in xrange(n):
v = f(i)
value = summary_pb2.Summary.Value(tag=tag, simple_value=v)
summary = summary_pb2.Summary(value=[value])
event = event_pb2.Event(wall_time=wall_time, step=step, summary=summary)
writer.add_event(event)
step += 1
wall_time += 10
def WriteHistogramSeries(writer, tag, mu_sigma_tuples, n=20):
"""Write a sequence of normally distributed histograms to writer."""
step = 0
wall_time = _start_time
for [mean, stddev] in mu_sigma_tuples:
data = [random.normalvariate(mean, stddev) for _ in xrange(n)]
histo = _MakeHistogram(data)
summary = summary_pb2.Summary(
value=[summary_pb2.Summary.Value(
tag=tag, histo=histo)])
event = event_pb2.Event(wall_time=wall_time, step=step, summary=summary)
writer.add_event(event)
step += 10
wall_time += 100
def WriteImageSeries(writer, tag, n_images=1):
"""Write a few dummy images to writer."""
step = 0
session = session_lib.Session()
p = array_ops.placeholder("uint8", (1, 4, 4, 3))
s = logging_ops.image_summary(tag, p)
for _ in xrange(n_images):
im = np.random.random_integers(0, 255, (1, 4, 4, 3))
summ = session.run(s, feed_dict={p: im})
writer.add_summary(summ, step)
step += 20
session.close()
def WriteAudioSeries(writer, tag, n_audio=1):
"""Write a few dummy audio clips to writer."""
step = 0
session = session_lib.Session()
min_frequency_hz = 440
max_frequency_hz = 880
sample_rate = 4000
duration_frames = sample_rate * 0.5 # 0.5 seconds.
frequencies_per_run = 1
num_channels = 2
p = array_ops.placeholder("float32", (frequencies_per_run, duration_frames,
num_channels))
s = logging_ops.audio_summary(tag, p, sample_rate)
for _ in xrange(n_audio):
# Generate a different frequency for each channel to show stereo works.
frequencies = np.random.random_integers(
min_frequency_hz,
max_frequency_hz,
size=(frequencies_per_run, num_channels))
tiled_frequencies = np.tile(frequencies, (1, duration_frames))
tiled_increments = np.tile(
np.arange(0, duration_frames),
(num_channels, 1)).T.reshape(1, duration_frames * num_channels)
tones = np.sin(2.0 * np.pi * tiled_frequencies * tiled_increments /
sample_rate)
tones = tones.reshape(frequencies_per_run, duration_frames, num_channels)
summ = session.run(s, feed_dict={p: tones})
writer.add_summary(summ, step)
step += 20
session.close()
def GenerateTestData(path):
"""Generates the test data directory."""
run1_path = os.path.join(path, "run1")
os.makedirs(run1_path)
writer1 = writer_lib.FileWriter(run1_path)
WriteScalarSeries(writer1, "foo/square", lambda x: x * x)
WriteScalarSeries(writer1, "bar/square", lambda x: x * x)
WriteScalarSeries(writer1, "foo/sin", math.sin)
WriteScalarSeries(writer1, "foo/cos", math.cos)
WriteHistogramSeries(writer1, "histo1", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1],
[1, 1]])
WriteImageSeries(writer1, "im1")
WriteImageSeries(writer1, "im2")
WriteAudioSeries(writer1, "au1")
run2_path = os.path.join(path, "run2")
os.makedirs(run2_path)
writer2 = writer_lib.FileWriter(run2_path)
WriteScalarSeries(writer2, "foo/square", lambda x: x * x * 2)
WriteScalarSeries(writer2, "bar/square", lambda x: x * x * 3)
WriteScalarSeries(writer2, "foo/cos", lambda x: math.cos(x) * 2)
WriteHistogramSeries(writer2, "histo1", [[0, 2], [0.3, 2], [0.5, 2], [0.7, 2],
[1, 2]])
WriteHistogramSeries(writer2, "histo2", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1],
[1, 1]])
WriteImageSeries(writer2, "im1")
WriteAudioSeries(writer2, "au2")
graph_def = graph_pb2.GraphDef()
node1 = graph_def.node.add()
node1.name = "a"
node1.op = "matmul"
node2 = graph_def.node.add()
node2.name = "b"
node2.op = "matmul"
node2.input.extend(["a:0"])
writer1.add_graph(graph_def)
node3 = graph_def.node.add()
node3.name = "c"
node3.op = "matmul"
node3.input.extend(["a:0", "b:0"])
writer2.add_graph(graph_def)
writer1.close()
writer2.close()
def main(unused_argv=None):
target = FLAGS.target
if not target:
print("The --target flag is required.")
return -1
if os.path.exists(target):
if FLAGS.overwrite:
if os.path.isdir(target):
shutil.rmtree(target)
else:
os.remove(target)
else:
print("Refusing to overwrite target %s without --overwrite" % target)
return -2
GenerateTestData(target)
if __name__ == "__main__":
app.run()
|
PhilipGarnero/django-pyscss
|
refs/heads/master
|
tests/utils.py
|
2
|
import shutil
from django.test import TestCase
from django.core.management import call_command
from django.conf import settings
class CollectStaticTestCase(TestCase):
@classmethod
def setUpClass(cls):
super(CollectStaticTestCase, cls).setUpClass()
call_command('collectstatic', interactive=False, verbosity=0)
class NoCollectStaticTestCase(TestCase):
@classmethod
def setUpClass(cls):
super(NoCollectStaticTestCase, cls).setUpClass()
shutil.rmtree(settings.STATIC_ROOT, ignore_errors=True)
def clean_css(string):
# The output of the compiled CSS doesn't have a newline between the ; and
# the } for some reason.
return string.strip() \
.replace('\n', '') \
.replace('; ', ';')
|
vitorio/bite-project
|
refs/heads/master
|
deps/gdata-python-client/src/gdata/Crypto/test.py
|
225
|
#
# Test script for the Python Cryptography Toolkit.
#
__revision__ = "$Id: test.py,v 1.7 2002/07/11 14:31:19 akuchling Exp $"
import os, sys
# Add the build directory to the front of sys.path
from distutils.util import get_platform
s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
s = os.path.join(os.getcwd(), s)
sys.path.insert(0, s)
s = os.path.join(os.getcwd(), 'test')
sys.path.insert(0, s)
from Crypto.Util import test
args = sys.argv[1:]
quiet = "--quiet" in args
if quiet: args.remove('--quiet')
if not quiet:
print '\nStream Ciphers:'
print '==============='
if args: test.TestStreamModules(args, verbose= not quiet)
else: test.TestStreamModules(verbose= not quiet)
if not quiet:
print '\nBlock Ciphers:'
print '=============='
if args: test.TestBlockModules(args, verbose= not quiet)
else: test.TestBlockModules(verbose= not quiet)
|
PrefPy/opra
|
refs/heads/master
|
compsocsite/polls/migrations/0122_question_single_enabled.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2018-07-11 19:30
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('polls', '0121_remove_question_single_enabled'),
]
operations = [
migrations.AddField(
model_name='question',
name='single_enabled',
field=models.BooleanField(default=False),
),
]
|
ysavchuk/ansible
|
refs/heads/devel
|
lib/ansible/plugins/action/script.py
|
67
|
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
TRANSFERS_FILES = True
def run(self, tmp=None, task_vars=None):
''' handler for file transfer operations '''
if self._play_context.check_mode:
return dict(skipped=True, msg='check mode not supported for this module')
if not tmp:
tmp = self._make_tmp_path()
creates = self._task.args.get('creates')
if creates:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
result = self._execute_module(module_name='stat', module_args=dict(path=creates), task_vars=task_vars, tmp=tmp, persist_files=True)
stat = result.get('stat', None)
if stat and stat.get('exists', False):
return dict(skipped=True, msg=("skipped, since %s exists" % creates))
removes = self._task.args.get('removes')
if removes:
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of command executions.
result = self._execute_module(module_name='stat', module_args=dict(path=removes), task_vars=task_vars, tmp=tmp, persist_files=True)
stat = result.get('stat', None)
if stat and not stat.get('exists', False):
return dict(skipped=True, msg=("skipped, since %s does not exist" % removes))
# the script name is the first item in the raw params, so we split it
# out now so we know the file name we need to transfer to the remote,
# and everything else is an argument to the script which we need later
# to append to the remote command
parts = self._task.args.get('_raw_params', '').strip().split()
source = parts[0]
args = ' '.join(parts[1:])
if self._task._role is not None:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
else:
source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source)
# transfer the file to a remote tmp location
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(source))
self._connection.put_file(source, tmp_src)
sudoable = True
# set file permissions, more permissive when the copy is done as a different user
if self._play_context.become and self._play_context.become_user != 'root':
chmod_mode = 'a+rx'
sudoable = False
else:
chmod_mode = '+rx'
self._remote_chmod(tmp, chmod_mode, tmp_src, sudoable=sudoable)
# add preparation steps to one ssh roundtrip executing the script
env_string = self._compute_environment_string()
script_cmd = ' '.join([env_string, tmp_src, args])
result = self._low_level_execute_command(cmd=script_cmd, tmp=None, sudoable=True)
# clean up after
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES:
self._remove_tmp_path(tmp)
result['changed'] = True
return result
|
storm-computers/odoo
|
refs/heads/9.0
|
addons/mrp/report/bom_structure.py
|
15
|
## -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.osv import osv
from openerp.report import report_sxw
class bom_structure(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(bom_structure, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'get_children': self.get_children,
})
def get_children(self, object, level=0):
result = []
def _get_rec(object, level, qty=1.0):
for l in object:
res = {}
res['pname'] = l.product_id.name_get()[0][1]
res['pcode'] = l.product_id.default_code
res['pqty'] = l.product_qty * qty
res['uname'] = l.product_uom.name
res['level'] = level
res['code'] = l.bom_id.code
result.append(res)
if l.child_line_ids:
if level<6:
level += 1
_get_rec(l.child_line_ids, level, qty=res['pqty'])
if level>0 and level<6:
level -= 1
return result
children = _get_rec(object,level)
return children
class report_mrpbomstructure(osv.AbstractModel):
_name = 'report.mrp.report_mrpbomstructure'
_inherit = 'report.abstract_report'
_template = 'mrp.report_mrpbomstructure'
_wrapped_report_class = bom_structure
|
vitaly4uk/django
|
refs/heads/master
|
django/contrib/sites/shortcuts.py
|
615
|
from __future__ import unicode_literals
from django.apps import apps
def get_current_site(request):
"""
Checks if contrib.sites is installed and returns either the current
``Site`` object or a ``RequestSite`` object based on the request.
"""
# Imports are inside the function because its point is to avoid importing
# the Site models when django.contrib.sites isn't installed.
if apps.is_installed('django.contrib.sites'):
from .models import Site
return Site.objects.get_current(request)
else:
from .requests import RequestSite
return RequestSite(request)
|
azukov/py-orbit
|
refs/heads/master
|
py/orbit/teapot/teapot_matrix_lattice.py
|
2
|
"""
The TEAPOT MATRIX_Lattice is a subclass of a MAXTRIX_Lattice class. The Matrix lattice is generated
by using the TEAPOT lattice. The matrices are the linear part of the TEAPOT elements
tracking. The number of transport matrices in the lattice is equal to the sum of
all parts of TEAPOT elements. The RF cavities in the Matrix lattice are the
TEAPOT RF Cavity class instances.
"""
import os
import math
#import bunch
from bunch import Bunch
# import the function that creates multidimensional arrays
from orbit.utils import orbitFinalize
# import general accelerator elements and lattice
from orbit.lattice import AccLattice, AccNode, AccActionsContainer
# import C++ matrix, phase vector, and generator classes
from orbit.teapot_base import MatrixGenerator
from orbit.matrix_lattice import MATRIX_Lattice, BaseMATRIX
# import the MAD parser to construct lattices of TEAPOT elements.
from orbit.teapot import TEAPOT_Lattice, RingRFTEAPOT, BaseTEAPOT
class TEAPOT_MATRIX_Lattice(MATRIX_Lattice):
"""
The subclass of the MATRIX_Lattice class. Shell class for the BaseMATRIX nodes collection.
The TEAPOT_Lattice instance is needed to create the TEAPOT_MATRIX_Lattice instance.
The nodes for RF elements are the usual TEAPOT RingRFTEAPOT nodes. The Bunch instance
is needed for the MATRIX_Lattice constructor to specify the particle's energy and
other parameters.
"""
def __init__(self, teapot_lattice, bunch, name = None):
MATRIX_Lattice.__init__(self,name)
if(isinstance(teapot_lattice,TEAPOT_Lattice) != True):
orbitFinalize("Constructor orbit.teapot.TEAPOT_MATRIX_Lattice needs the TEAPOT_Lattice instance.")
#memorize the TEAPOT LATTICE
if(name == None):
name = teapot_lattice.getName()
self.setName(name)
self.teapot_lattice = teapot_lattice
self.bunch = Bunch()
self.lost_bunch = Bunch()
bunch.copyEmptyBunchTo(self.bunch)
bunch.copyEmptyBunchTo(self.lost_bunch)
self.matrixGenerator = MatrixGenerator()
#----------make MATRIX lattice from TEAPOT
def twissAction(paramsDict):
node = paramsDict["node"]
bunch = paramsDict["bunch"]
active_index = node.getActivePartIndex()
n_parts = node.getnParts()
length = node.getLength(active_index)
if(isinstance(node,BaseTEAPOT) == True and isinstance(node,RingRFTEAPOT) == False):
self.matrixGenerator.initBunch(bunch)
node.track(paramsDict)
#bunch is ready
matrixNode = BaseMATRIX(node.getName()+"_"+str(active_index))
matrixNode.addParam("matrix_parent_node",node)
matrixNode.addParam("matrix_parent_node_type",node.getType())
matrixNode.addParam("matrix_parent_node_n_nodes",n_parts)
matrixNode.addParam("matrix_parent_node_active_index",active_index)
matrixNode.setLength(length)
self.matrixGenerator.calculateMatrix(bunch,matrixNode.getMatrix())
#print "============= name=",matrixNode.getName(),
#print " type=",matrixNode.getParam("matrix_parent_node_type"),
#print " L=",matrixNode.getLength()
self.addNode(matrixNode)
if(isinstance(node,RingRFTEAPOT) == True):
rf_node = RingRFTEAPOT(node.getName())
rf_node.setParamsDict(node.getParamsDict().copy())
self.addNode(rf_node)
accContainer = AccActionsContainer()
accContainer.addAction(twissAction,AccActionsContainer.BODY)
paramsDict = {}
paramsDict["bunch"] = self.bunch
paramsDict["lostbunch"] = self.lost_bunch
paramsDict["position"] = 0.
paramsDict["useCharge"] = self.teapot_lattice.getUseRealCharge()
self.teapot_lattice.trackActions(accContainer,paramsDict)
self.makeOneTurnMatrix()
self.initialize()
def getKinEnergy(self):
return self.bunch.getSyncParticle().kinEnergy()
def rebuild(self, Ekin = -1.0):
if(Ekin > 0.):
self.bunch.getSyncParticle().kinEnergy(Ekin)
for matrixNode in self.getNodes():
if(isinstance(matrixNode,BaseMATRIX) == True):
node = matrixNode.getParam("matrix_parent_node")
active_index = matrixNode.getParam("matrix_parent_node_active_index")
n_parts = matrixNode.getParam("matrix_parent_node_n_nodes")
if(n_parts != node.getnParts()):
msg = " orbit.teapot.TEAPOT_MATRIX_Lattice class" + os.linesep
msg = msg + " rebuild(Ekin = -1.0) method" + os.linesep
msg = msg + " TEAPOT node="+node.getName() + os.linesep
msg = msg + " has been changed!" + os.linesep
msg = msg + " Stop!" + os.linesep
orbitFinalize(msg)
self.matrixGenerator.initBunch(self.bunch)
paramsDict = {}
paramsDict["bunch"] = self.bunch
paramsDict["node"] = node
node.setActivePartIndex(active_index)
node.track(paramsDict)
self.matrixGenerator.calculateMatrix(self.bunch,matrixNode.getMatrix())
self.makeOneTurnMatrix()
def getRingParametersDict(self):
"""
Returns the dictionary with different ring parametrs
calculated from the one turn transport matrix. It overloads the
getRingParametersDict(p,m) method from the parent MATRIX_Lattice
class.
"""
momentum = self.bunch.getSyncParticle().momentum()
mass = self.bunch.getSyncParticle().mass()
return MATRIX_Lattice.getRingParametersDict(self, momentum, mass)
def getRingMatrix(self):
"""
Returns the dictionary with different ring parametrs
calculated from the one turn transport matrix. It overloads the
getRingParametersDict(p,m) method from the parent MATRIX_Lattice
class.
"""
return MATRIX_Lattice.getOneTurnMatrix(self)
def getRingOrbit(self,z0):
"""
Returns the tuple ([(position, X),...],[(position,Y),...] ).
It overloads the getRingTwissDataX(p,m) method from the parent MATRIX_Lattice
class.
"""
return self.trackOrbit(z0)
def getRingTwissDataX(self):
"""
Returns the tuple (tuneX, [(position, alphaX),...],[(position,betaX),...] ).
It overloads the getRingTwissDataX(p,m) method from the parent MATRIX_Lattice
class.
"""
res_dict = self.getRingParametersDict()
alpha_x = res_dict["alpha x"]
beta_x = res_dict["beta x [m]"]
return self.trackTwissData(alpha_x,beta_x,"x")
def getRingTwissDataY(self):
"""
Returns the tuple (tuneY, [(position, alphaY),...],[(position,betaY),...] ).
It overloads the getRingTwissDataY(p,m) method from the parent MATRIX_Lattice
class.
"""
res_dict = self.getRingParametersDict()
alpha_y = res_dict["alpha y"]
beta_y = res_dict["beta y [m]"]
return self.trackTwissData(alpha_y,beta_y,"y")
def getRingDispersionDataX(self):
"""
Returns the tuple ([(position, dispX),...],[(position,disp_pX),...] ).
It overloads the getRingDispersionDataX(p,m) method from the parent MATRIX_Lattice
class.
"""
res_dict = self.getRingParametersDict()
disp = res_dict["dispersion x [m]"]
disp_p = res_dict["dispersion prime x"]
momentum = res_dict["momentum [GeV/c]"]
mass = res_dict["mass [GeV]"]
return self.trackDispersionData(momentum, mass, disp, disp_p,"x")
def getRingDispersionDataY(self):
"""
Returns the tuple ([(position, dispY),...],[(position,disp_pY),...] ).
It overloads the getRingDispersionDataY(p,m) method from the parent MATRIX_Lattice
class.
"""
res_dict = self.getRingParametersDict()
disp = res_dict["dispersion y [m]"]
disp_p = res_dict["dispersion prime y"]
momentum = res_dict["momentum [GeV/c]"]
mass = res_dict["mass [GeV]"]
return self.trackDispersionData(momentum, mass, disp, disp_p,"y")
def getTransferTwissDataX(self,alpha_x,beta_x):
"""
Returns the tuple (tuneX, [(position, alphaX),...],[(position,betaX),...] ).
It overloads the getRingTwissDataX(p,m) method from the parent MATRIX_Lattice
class.
"""
res_dict = self.getRingParametersDict()
return self.trackTwissData(alpha_x,beta_x,"x")
def getTransferTwissDataY(self,alpha_y,beta_y):
"""
Returns the tuple (tuneY, [(position, alphaY),...],[(position,betaY),...] ).
It overloads the getRingTwissDataY(p,m) method from the parent MATRIX_Lattice
class.
"""
res_dict = self.getRingParametersDict()
return self.trackTwissData(alpha_y,beta_y,"y")
def getTransferDispersionDataX(self,disp,disp_p):
"""
Returns the tuple ([(position, dispX),...],[(position,disp_pX),...] ).
It overloads the getRingDispersionDataX(p,m) method from the parent MATRIX_Lattice
class.
"""
res_dict = self.getRingParametersDict()
momentum = res_dict["momentum [GeV/c]"]
mass = res_dict["mass [GeV]"]
return self.trackDispersionData(momentum, mass, disp, disp_p,"x")
def getTransferDispersionDataY(self,disp,disp_p):
"""
Returns the tuple ([(position, dispY),...],[(position,disp_pY),...] ).
It overloads the getRingDispersionDataY(p,m) method from the parent MATRIX_Lattice
class.
"""
res_dict = self.getRingParametersDict()
momentum = res_dict["momentum [GeV/c]"]
mass = res_dict["mass [GeV]"]
return self.trackDispersionData(momentum, mass, disp, disp_p,"y")
def getChromaticitiesXY(self):
"""
Calculates chromaticities for X,Y planes for the whole ring
"""
(tuneX,tmp0,tmp1) = self.getRingTwissDataX()
(tuneY,tmp0,tmp1) = self.getRingTwissDataY()
tuneX = tuneX[1][-1]
tuneY = tuneY[1][-1]
self.matrixGenerator.initBunchChromCoeff(self.bunch)
#track bunch through the TEAPOT nodes
def twissAction(paramsDict):
node = paramsDict["node"]
if(isinstance(node, BaseTEAPOT) == True and isinstance(node,RingRFTEAPOT) == False):
node.track(paramsDict)
accContainer = AccActionsContainer()
accContainer.addAction(twissAction,AccActionsContainer.BODY)
paramsDict = {}
paramsDict["bunch"] = self.bunch
self.teapot_lattice.trackActions(accContainer,paramsDict)
res_coeff = self.matrixGenerator.calcChromCoeff(self.bunch)
(coeff_x_dE,coeff_xp_dE,coeff_y_dE,coeff_yp_dE) = res_coeff
momentum = self.bunch.getSyncParticle().momentum()
mass = self.bunch.getSyncParticle().mass()
Ekin = self.bunch.getSyncParticle().kinEnergy()
chromX = - (momentum/(2*math.sin(2*math.pi*tuneX)))*(coeff_x_dE+coeff_xp_dE)
chromX = (momentum/(mass+Ekin))*chromX
chromY = - (momentum/(2*math.sin(2*math.pi*tuneY)))*(coeff_y_dE+coeff_yp_dE)
chromY = (momentum/(mass+Ekin))*chromY
return (chromX/(2*math.pi),chromY/(2*math.pi))
|
Zhongqilong/mykbengineer
|
refs/heads/master
|
kbe/res/scripts/common/Lib/test/test_plistlib.py
|
78
|
# Copyright (C) 2003-2013 Python Software Foundation
import unittest
import plistlib
import os
import datetime
import codecs
import binascii
import collections
import struct
from test import support
from io import BytesIO
ALL_FORMATS=(plistlib.FMT_XML, plistlib.FMT_BINARY)
# The testdata is generated using Mac/Tools/plistlib_generate_testdata.py
# (which using PyObjC to control the Cocoa classes for generating plists)
TESTDATA={
plistlib.FMT_XML: binascii.a2b_base64(b'''
PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPCFET0NU
WVBFIHBsaXN0IFBVQkxJQyAiLS8vQXBwbGUvL0RURCBQTElTVCAxLjAvL0VO
IiAiaHR0cDovL3d3dy5hcHBsZS5jb20vRFREcy9Qcm9wZXJ0eUxpc3QtMS4w
LmR0ZCI+CjxwbGlzdCB2ZXJzaW9uPSIxLjAiPgo8ZGljdD4KCTxrZXk+YUJp
Z0ludDwva2V5PgoJPGludGVnZXI+OTIyMzM3MjAzNjg1NDc3NTc2NDwvaW50
ZWdlcj4KCTxrZXk+YUJpZ0ludDI8L2tleT4KCTxpbnRlZ2VyPjkyMjMzNzIw
MzY4NTQ3NzU4NTI8L2ludGVnZXI+Cgk8a2V5PmFEYXRlPC9rZXk+Cgk8ZGF0
ZT4yMDA0LTEwLTI2VDEwOjMzOjMzWjwvZGF0ZT4KCTxrZXk+YURpY3Q8L2tl
eT4KCTxkaWN0PgoJCTxrZXk+YUZhbHNlVmFsdWU8L2tleT4KCQk8ZmFsc2Uv
PgoJCTxrZXk+YVRydWVWYWx1ZTwva2V5PgoJCTx0cnVlLz4KCQk8a2V5PmFV
bmljb2RlVmFsdWU8L2tleT4KCQk8c3RyaW5nPk3DpHNzaWcsIE1hw588L3N0
cmluZz4KCQk8a2V5PmFub3RoZXJTdHJpbmc8L2tleT4KCQk8c3RyaW5nPiZs
dDtoZWxsbyAmYW1wOyAnaGknIHRoZXJlISZndDs8L3N0cmluZz4KCQk8a2V5
PmRlZXBlckRpY3Q8L2tleT4KCQk8ZGljdD4KCQkJPGtleT5hPC9rZXk+CgkJ
CTxpbnRlZ2VyPjE3PC9pbnRlZ2VyPgoJCQk8a2V5PmI8L2tleT4KCQkJPHJl
YWw+MzIuNTwvcmVhbD4KCQkJPGtleT5jPC9rZXk+CgkJCTxhcnJheT4KCQkJ
CTxpbnRlZ2VyPjE8L2ludGVnZXI+CgkJCQk8aW50ZWdlcj4yPC9pbnRlZ2Vy
PgoJCQkJPHN0cmluZz50ZXh0PC9zdHJpbmc+CgkJCTwvYXJyYXk+CgkJPC9k
aWN0PgoJPC9kaWN0PgoJPGtleT5hRmxvYXQ8L2tleT4KCTxyZWFsPjAuNTwv
cmVhbD4KCTxrZXk+YUxpc3Q8L2tleT4KCTxhcnJheT4KCQk8c3RyaW5nPkE8
L3N0cmluZz4KCQk8c3RyaW5nPkI8L3N0cmluZz4KCQk8aW50ZWdlcj4xMjwv
aW50ZWdlcj4KCQk8cmVhbD4zMi41PC9yZWFsPgoJCTxhcnJheT4KCQkJPGlu
dGVnZXI+MTwvaW50ZWdlcj4KCQkJPGludGVnZXI+MjwvaW50ZWdlcj4KCQkJ
PGludGVnZXI+MzwvaW50ZWdlcj4KCQk8L2FycmF5PgoJPC9hcnJheT4KCTxr
ZXk+YU5lZ2F0aXZlQmlnSW50PC9rZXk+Cgk8aW50ZWdlcj4tODAwMDAwMDAw
MDA8L2ludGVnZXI+Cgk8a2V5PmFOZWdhdGl2ZUludDwva2V5PgoJPGludGVn
ZXI+LTU8L2ludGVnZXI+Cgk8a2V5PmFTdHJpbmc8L2tleT4KCTxzdHJpbmc+
RG9vZGFoPC9zdHJpbmc+Cgk8a2V5PmFuRW1wdHlEaWN0PC9rZXk+Cgk8ZGlj
dC8+Cgk8a2V5PmFuRW1wdHlMaXN0PC9rZXk+Cgk8YXJyYXkvPgoJPGtleT5h
bkludDwva2V5PgoJPGludGVnZXI+NzI4PC9pbnRlZ2VyPgoJPGtleT5uZXN0
ZWREYXRhPC9rZXk+Cgk8YXJyYXk+CgkJPGRhdGE+CgkJUEd4dmRITWdiMlln
WW1sdVlYSjVJR2QxYm1zK0FBRUNBenhzYjNSeklHOW1JR0pwYm1GeWVTQm5k
VzVyCgkJUGdBQkFnTThiRzkwY3lCdlppQmlhVzVoY25rZ1ozVnVhejRBQVFJ
RFBHeHZkSE1nYjJZZ1ltbHVZWEo1CgkJSUdkMWJtcytBQUVDQXp4c2IzUnpJ
RzltSUdKcGJtRnllU0JuZFc1clBnQUJBZ004Ykc5MGN5QnZaaUJpCgkJYVc1
aGNua2daM1Z1YXo0QUFRSURQR3h2ZEhNZ2IyWWdZbWx1WVhKNUlHZDFibXMr
QUFFQ0F6eHNiM1J6CgkJSUc5bUlHSnBibUZ5ZVNCbmRXNXJQZ0FCQWdNOGJH
OTBjeUJ2WmlCaWFXNWhjbmtnWjNWdWF6NEFBUUlECgkJUEd4dmRITWdiMlln
WW1sdVlYSjVJR2QxYm1zK0FBRUNBdz09CgkJPC9kYXRhPgoJPC9hcnJheT4K
CTxrZXk+c29tZURhdGE8L2tleT4KCTxkYXRhPgoJUEdKcGJtRnllU0JuZFc1
clBnPT0KCTwvZGF0YT4KCTxrZXk+c29tZU1vcmVEYXRhPC9rZXk+Cgk8ZGF0
YT4KCVBHeHZkSE1nYjJZZ1ltbHVZWEo1SUdkMWJtcytBQUVDQXp4c2IzUnpJ
RzltSUdKcGJtRnllU0JuZFc1clBnQUJBZ004CgliRzkwY3lCdlppQmlhVzVo
Y25rZ1ozVnVhejRBQVFJRFBHeHZkSE1nYjJZZ1ltbHVZWEo1SUdkMWJtcytB
QUVDQXp4cwoJYjNSeklHOW1JR0pwYm1GeWVTQm5kVzVyUGdBQkFnTThiRzkw
Y3lCdlppQmlhVzVoY25rZ1ozVnVhejRBQVFJRFBHeHYKCWRITWdiMllnWW1s
dVlYSjVJR2QxYm1zK0FBRUNBenhzYjNSeklHOW1JR0pwYm1GeWVTQm5kVzVy
UGdBQkFnTThiRzkwCgljeUJ2WmlCaWFXNWhjbmtnWjNWdWF6NEFBUUlEUEd4
dmRITWdiMllnWW1sdVlYSjVJR2QxYm1zK0FBRUNBdz09Cgk8L2RhdGE+Cgk8
a2V5PsOFYmVucmFhPC9rZXk+Cgk8c3RyaW5nPlRoYXQgd2FzIGEgdW5pY29k
ZSBrZXkuPC9zdHJpbmc+CjwvZGljdD4KPC9wbGlzdD4K'''),
plistlib.FMT_BINARY: binascii.a2b_base64(b'''
YnBsaXN0MDDfEBABAgMEBQYHCAkKCwwNDg8QERITFCgpLzAxMjM0NTc2OFdh
QmlnSW50WGFCaWdJbnQyVWFEYXRlVWFEaWN0VmFGbG9hdFVhTGlzdF8QD2FO
ZWdhdGl2ZUJpZ0ludFxhTmVnYXRpdmVJbnRXYVN0cmluZ1thbkVtcHR5RGlj
dFthbkVtcHR5TGlzdFVhbkludFpuZXN0ZWREYXRhWHNvbWVEYXRhXHNvbWVN
b3JlRGF0YWcAxQBiAGUAbgByAGEAYRN/////////1BQAAAAAAAAAAIAAAAAA
AAAsM0GcuX30AAAA1RUWFxgZGhscHR5bYUZhbHNlVmFsdWVaYVRydWVWYWx1
ZV1hVW5pY29kZVZhbHVlXWFub3RoZXJTdHJpbmdaZGVlcGVyRGljdAgJawBN
AOQAcwBzAGkAZwAsACAATQBhAN9fEBU8aGVsbG8gJiAnaGknIHRoZXJlIT7T
HyAhIiMkUWFRYlFjEBEjQEBAAAAAAACjJSYnEAEQAlR0ZXh0Iz/gAAAAAAAA
pSorLCMtUUFRQhAMoyUmLhADE////+1foOAAE//////////7VkRvb2RhaNCg
EQLYoTZPEPo8bG90cyBvZiBiaW5hcnkgZ3Vuaz4AAQIDPGxvdHMgb2YgYmlu
YXJ5IGd1bms+AAECAzxsb3RzIG9mIGJpbmFyeSBndW5rPgABAgM8bG90cyBv
ZiBiaW5hcnkgZ3Vuaz4AAQIDPGxvdHMgb2YgYmluYXJ5IGd1bms+AAECAzxs
b3RzIG9mIGJpbmFyeSBndW5rPgABAgM8bG90cyBvZiBiaW5hcnkgZ3Vuaz4A
AQIDPGxvdHMgb2YgYmluYXJ5IGd1bms+AAECAzxsb3RzIG9mIGJpbmFyeSBn
dW5rPgABAgM8bG90cyBvZiBiaW5hcnkgZ3Vuaz4AAQIDTTxiaW5hcnkgZ3Vu
az5fEBdUaGF0IHdhcyBhIHVuaWNvZGUga2V5LgAIACsAMwA8AEIASABPAFUA
ZwB0AHwAiACUAJoApQCuALsAygDTAOQA7QD4AQQBDwEdASsBNgE3ATgBTwFn
AW4BcAFyAXQBdgF/AYMBhQGHAYwBlQGbAZ0BnwGhAaUBpwGwAbkBwAHBAcIB
xQHHAsQC0gAAAAAAAAIBAAAAAAAAADkAAAAAAAAAAAAAAAAAAALs'''),
}
class TestPlistlib(unittest.TestCase):
def tearDown(self):
try:
os.unlink(support.TESTFN)
except:
pass
def _create(self, fmt=None):
pl = dict(
aString="Doodah",
aList=["A", "B", 12, 32.5, [1, 2, 3]],
aFloat = 0.5,
anInt = 728,
aBigInt = 2 ** 63 - 44,
aBigInt2 = 2 ** 63 + 44,
aNegativeInt = -5,
aNegativeBigInt = -80000000000,
aDict=dict(
anotherString="<hello & 'hi' there!>",
aUnicodeValue='M\xe4ssig, Ma\xdf',
aTrueValue=True,
aFalseValue=False,
deeperDict=dict(a=17, b=32.5, c=[1, 2, "text"]),
),
someData = b"<binary gunk>",
someMoreData = b"<lots of binary gunk>\0\1\2\3" * 10,
nestedData = [b"<lots of binary gunk>\0\1\2\3" * 10],
aDate = datetime.datetime(2004, 10, 26, 10, 33, 33),
anEmptyDict = dict(),
anEmptyList = list()
)
pl['\xc5benraa'] = "That was a unicode key."
return pl
def test_create(self):
pl = self._create()
self.assertEqual(pl["aString"], "Doodah")
self.assertEqual(pl["aDict"]["aFalseValue"], False)
def test_io(self):
pl = self._create()
with open(support.TESTFN, 'wb') as fp:
plistlib.dump(pl, fp)
with open(support.TESTFN, 'rb') as fp:
pl2 = plistlib.load(fp)
self.assertEqual(dict(pl), dict(pl2))
self.assertRaises(AttributeError, plistlib.dump, pl, 'filename')
self.assertRaises(AttributeError, plistlib.load, 'filename')
def test_invalid_type(self):
pl = [ object() ]
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
self.assertRaises(TypeError, plistlib.dumps, pl, fmt=fmt)
def test_int(self):
for pl in [0, 2**8-1, 2**8, 2**16-1, 2**16, 2**32-1, 2**32,
2**63-1, 2**64-1, 1, -2**63]:
for fmt in ALL_FORMATS:
with self.subTest(pl=pl, fmt=fmt):
data = plistlib.dumps(pl, fmt=fmt)
pl2 = plistlib.loads(data)
self.assertIsInstance(pl2, int)
self.assertEqual(pl, pl2)
data2 = plistlib.dumps(pl2, fmt=fmt)
self.assertEqual(data, data2)
for fmt in ALL_FORMATS:
for pl in (2 ** 64 + 1, 2 ** 127-1, -2**64, -2 ** 127):
with self.subTest(pl=pl, fmt=fmt):
self.assertRaises(OverflowError, plistlib.dumps,
pl, fmt=fmt)
def test_bytes(self):
pl = self._create()
data = plistlib.dumps(pl)
pl2 = plistlib.loads(data)
self.assertNotIsInstance(pl, plistlib._InternalDict)
self.assertEqual(dict(pl), dict(pl2))
data2 = plistlib.dumps(pl2)
self.assertEqual(data, data2)
def test_indentation_array(self):
data = [[[[[[[[{'test': b'aaaaaa'}]]]]]]]]
self.assertEqual(plistlib.loads(plistlib.dumps(data)), data)
def test_indentation_dict(self):
data = {'1': {'2': {'3': {'4': {'5': {'6': {'7': {'8': {'9': b'aaaaaa'}}}}}}}}}
self.assertEqual(plistlib.loads(plistlib.dumps(data)), data)
def test_indentation_dict_mix(self):
data = {'1': {'2': [{'3': [[[[[{'test': b'aaaaaa'}]]]]]}]}}
self.assertEqual(plistlib.loads(plistlib.dumps(data)), data)
def test_appleformatting(self):
for use_builtin_types in (True, False):
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt, use_builtin_types=use_builtin_types):
pl = plistlib.loads(TESTDATA[fmt],
use_builtin_types=use_builtin_types)
data = plistlib.dumps(pl, fmt=fmt)
self.assertEqual(data, TESTDATA[fmt],
"generated data was not identical to Apple's output")
def test_appleformattingfromliteral(self):
self.maxDiff = None
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
pl = self._create(fmt=fmt)
pl2 = plistlib.loads(TESTDATA[fmt], fmt=fmt)
self.assertEqual(dict(pl), dict(pl2),
"generated data was not identical to Apple's output")
pl2 = plistlib.loads(TESTDATA[fmt])
self.assertEqual(dict(pl), dict(pl2),
"generated data was not identical to Apple's output")
def test_bytesio(self):
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
b = BytesIO()
pl = self._create(fmt=fmt)
plistlib.dump(pl, b, fmt=fmt)
pl2 = plistlib.load(BytesIO(b.getvalue()), fmt=fmt)
self.assertEqual(dict(pl), dict(pl2))
pl2 = plistlib.load(BytesIO(b.getvalue()))
self.assertEqual(dict(pl), dict(pl2))
def test_keysort_bytesio(self):
pl = collections.OrderedDict()
pl['b'] = 1
pl['a'] = 2
pl['c'] = 3
for fmt in ALL_FORMATS:
for sort_keys in (False, True):
with self.subTest(fmt=fmt, sort_keys=sort_keys):
b = BytesIO()
plistlib.dump(pl, b, fmt=fmt, sort_keys=sort_keys)
pl2 = plistlib.load(BytesIO(b.getvalue()),
dict_type=collections.OrderedDict)
self.assertEqual(dict(pl), dict(pl2))
if sort_keys:
self.assertEqual(list(pl2.keys()), ['a', 'b', 'c'])
else:
self.assertEqual(list(pl2.keys()), ['b', 'a', 'c'])
def test_keysort(self):
pl = collections.OrderedDict()
pl['b'] = 1
pl['a'] = 2
pl['c'] = 3
for fmt in ALL_FORMATS:
for sort_keys in (False, True):
with self.subTest(fmt=fmt, sort_keys=sort_keys):
data = plistlib.dumps(pl, fmt=fmt, sort_keys=sort_keys)
pl2 = plistlib.loads(data, dict_type=collections.OrderedDict)
self.assertEqual(dict(pl), dict(pl2))
if sort_keys:
self.assertEqual(list(pl2.keys()), ['a', 'b', 'c'])
else:
self.assertEqual(list(pl2.keys()), ['b', 'a', 'c'])
def test_keys_no_string(self):
pl = { 42: 'aNumber' }
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
self.assertRaises(TypeError, plistlib.dumps, pl, fmt=fmt)
b = BytesIO()
self.assertRaises(TypeError, plistlib.dump, pl, b, fmt=fmt)
def test_skipkeys(self):
pl = {
42: 'aNumber',
'snake': 'aWord',
}
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
data = plistlib.dumps(
pl, fmt=fmt, skipkeys=True, sort_keys=False)
pl2 = plistlib.loads(data)
self.assertEqual(pl2, {'snake': 'aWord'})
fp = BytesIO()
plistlib.dump(
pl, fp, fmt=fmt, skipkeys=True, sort_keys=False)
data = fp.getvalue()
pl2 = plistlib.loads(fp.getvalue())
self.assertEqual(pl2, {'snake': 'aWord'})
def test_tuple_members(self):
pl = {
'first': (1, 2),
'second': (1, 2),
'third': (3, 4),
}
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
data = plistlib.dumps(pl, fmt=fmt)
pl2 = plistlib.loads(data)
self.assertEqual(pl2, {
'first': [1, 2],
'second': [1, 2],
'third': [3, 4],
})
self.assertIsNot(pl2['first'], pl2['second'])
def test_list_members(self):
pl = {
'first': [1, 2],
'second': [1, 2],
'third': [3, 4],
}
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
data = plistlib.dumps(pl, fmt=fmt)
pl2 = plistlib.loads(data)
self.assertEqual(pl2, {
'first': [1, 2],
'second': [1, 2],
'third': [3, 4],
})
self.assertIsNot(pl2['first'], pl2['second'])
def test_dict_members(self):
pl = {
'first': {'a': 1},
'second': {'a': 1},
'third': {'b': 2 },
}
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
data = plistlib.dumps(pl, fmt=fmt)
pl2 = plistlib.loads(data)
self.assertEqual(pl2, {
'first': {'a': 1},
'second': {'a': 1},
'third': {'b': 2 },
})
self.assertIsNot(pl2['first'], pl2['second'])
def test_controlcharacters(self):
for i in range(128):
c = chr(i)
testString = "string containing %s" % c
if i >= 32 or c in "\r\n\t":
# \r, \n and \t are the only legal control chars in XML
plistlib.dumps(testString, fmt=plistlib.FMT_XML)
else:
self.assertRaises(ValueError,
plistlib.dumps,
testString)
def test_nondictroot(self):
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
test1 = "abc"
test2 = [1, 2, 3, "abc"]
result1 = plistlib.loads(plistlib.dumps(test1, fmt=fmt))
result2 = plistlib.loads(plistlib.dumps(test2, fmt=fmt))
self.assertEqual(test1, result1)
self.assertEqual(test2, result2)
def test_invalidarray(self):
for i in ["<key>key inside an array</key>",
"<key>key inside an array2</key><real>3</real>",
"<true/><key>key inside an array3</key>"]:
self.assertRaises(ValueError, plistlib.loads,
("<plist><array>%s</array></plist>"%i).encode())
def test_invaliddict(self):
for i in ["<key><true/>k</key><string>compound key</string>",
"<key>single key</key>",
"<string>missing key</string>",
"<key>k1</key><string>v1</string><real>5.3</real>"
"<key>k1</key><key>k2</key><string>double key</string>"]:
self.assertRaises(ValueError, plistlib.loads,
("<plist><dict>%s</dict></plist>"%i).encode())
self.assertRaises(ValueError, plistlib.loads,
("<plist><array><dict>%s</dict></array></plist>"%i).encode())
def test_invalidinteger(self):
self.assertRaises(ValueError, plistlib.loads,
b"<plist><integer>not integer</integer></plist>")
def test_invalidreal(self):
self.assertRaises(ValueError, plistlib.loads,
b"<plist><integer>not real</integer></plist>")
def test_xml_encodings(self):
base = TESTDATA[plistlib.FMT_XML]
for xml_encoding, encoding, bom in [
(b'utf-8', 'utf-8', codecs.BOM_UTF8),
(b'utf-16', 'utf-16-le', codecs.BOM_UTF16_LE),
(b'utf-16', 'utf-16-be', codecs.BOM_UTF16_BE),
# Expat does not support UTF-32
#(b'utf-32', 'utf-32-le', codecs.BOM_UTF32_LE),
#(b'utf-32', 'utf-32-be', codecs.BOM_UTF32_BE),
]:
pl = self._create(fmt=plistlib.FMT_XML)
with self.subTest(encoding=encoding):
data = base.replace(b'UTF-8', xml_encoding)
data = bom + data.decode('utf-8').encode(encoding)
pl2 = plistlib.loads(data)
self.assertEqual(dict(pl), dict(pl2))
def test_nonstandard_refs_size(self):
# Issue #21538: Refs and offsets are 24-bit integers
data = (b'bplist00'
b'\xd1\x00\x00\x01\x00\x00\x02QaQb'
b'\x00\x00\x08\x00\x00\x0f\x00\x00\x11'
b'\x00\x00\x00\x00\x00\x00'
b'\x03\x03'
b'\x00\x00\x00\x00\x00\x00\x00\x03'
b'\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x13')
self.assertEqual(plistlib.loads(data), {'a': 'b'})
class TestPlistlibDeprecated(unittest.TestCase):
def test_io_deprecated(self):
pl_in = {
'key': 42,
'sub': {
'key': 9,
'alt': 'value',
'data': b'buffer',
}
}
pl_out = plistlib._InternalDict({
'key': 42,
'sub': plistlib._InternalDict({
'key': 9,
'alt': 'value',
'data': plistlib.Data(b'buffer'),
})
})
self.addCleanup(support.unlink, support.TESTFN)
with self.assertWarns(DeprecationWarning):
plistlib.writePlist(pl_in, support.TESTFN)
with self.assertWarns(DeprecationWarning):
pl2 = plistlib.readPlist(support.TESTFN)
self.assertEqual(pl_out, pl2)
os.unlink(support.TESTFN)
with open(support.TESTFN, 'wb') as fp:
with self.assertWarns(DeprecationWarning):
plistlib.writePlist(pl_in, fp)
with open(support.TESTFN, 'rb') as fp:
with self.assertWarns(DeprecationWarning):
pl2 = plistlib.readPlist(fp)
self.assertEqual(pl_out, pl2)
def test_bytes_deprecated(self):
pl = {
'key': 42,
'sub': {
'key': 9,
'alt': 'value',
'data': b'buffer',
}
}
with self.assertWarns(DeprecationWarning):
data = plistlib.writePlistToBytes(pl)
with self.assertWarns(DeprecationWarning):
pl2 = plistlib.readPlistFromBytes(data)
self.assertIsInstance(pl2, plistlib._InternalDict)
self.assertEqual(pl2, plistlib._InternalDict(
key=42,
sub=plistlib._InternalDict(
key=9,
alt='value',
data=plistlib.Data(b'buffer'),
)
))
with self.assertWarns(DeprecationWarning):
data2 = plistlib.writePlistToBytes(pl2)
self.assertEqual(data, data2)
def test_dataobject_deprecated(self):
in_data = { 'key': plistlib.Data(b'hello') }
out_data = { 'key': b'hello' }
buf = plistlib.dumps(in_data)
cur = plistlib.loads(buf)
self.assertEqual(cur, out_data)
self.assertNotEqual(cur, in_data)
cur = plistlib.loads(buf, use_builtin_types=False)
self.assertNotEqual(cur, out_data)
self.assertEqual(cur, in_data)
with self.assertWarns(DeprecationWarning):
cur = plistlib.readPlistFromBytes(buf)
self.assertNotEqual(cur, out_data)
self.assertEqual(cur, in_data)
def test_main():
support.run_unittest(TestPlistlib, TestPlistlibDeprecated)
if __name__ == '__main__':
test_main()
|
bbuubbi/Django-OCR
|
refs/heads/master
|
OCRSite/OCR/tests.py
|
24123
|
from django.test import TestCase
# Create your tests here.
|
android-ia/platform_external_chromium_org
|
refs/heads/master
|
tools/perf/metrics/speedindex.py
|
28
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
from metrics import Metric
from telemetry.core import bitmap
from telemetry.value import scalar
class SpeedIndexMetric(Metric):
"""The speed index metric is one way of measuring page load speed.
It is meant to approximate user perception of page load speed, and it
is based on the amount of time that it takes to paint to the visual
portion of the screen. It includes paint events that occur after the
onload event, and it doesn't include time loading things off-screen.
This speed index metric is based on WebPageTest.org (WPT).
For more info see: http://goo.gl/e7AH5l
"""
def __init__(self):
super(SpeedIndexMetric, self).__init__()
self._impl = None
@classmethod
def CustomizeBrowserOptions(cls, options):
options.AppendExtraBrowserArgs('--disable-infobars')
def Start(self, _, tab):
"""Start recording events.
This method should be called in the WillNavigateToPage method of
a PageTest, so that all the events can be captured. If it's called
in DidNavigateToPage, that will be too late.
"""
self._impl = (VideoSpeedIndexImpl() if tab.video_capture_supported else
PaintRectSpeedIndexImpl())
self._impl.Start(tab)
def Stop(self, _, tab):
"""Stop timeline recording."""
assert self._impl, 'Must call Start() before Stop()'
assert self.IsFinished(tab), 'Must wait for IsFinished() before Stop()'
self._impl.Stop(tab)
# Optional argument chart_name is not in base class Metric.
# pylint: disable=W0221
def AddResults(self, tab, results, chart_name=None):
"""Calculate the speed index and add it to the results."""
index = self._impl.CalculateSpeedIndex(tab)
# Release the tab so that it can be disconnected.
self._impl = None
results.AddValue(scalar.ScalarValue(
results.current_page, '%s.speed_index' % chart_name, 'ms', index))
def IsFinished(self, tab):
"""Decide whether the timeline recording should be stopped.
When the timeline recording is stopped determines which paint events
are used in the speed index metric calculation. In general, the recording
should continue if there has just been some data received, because
this suggests that painting may continue.
A page may repeatedly request resources in an infinite loop; a timeout
should be placed in any measurement that uses this metric, e.g.:
def IsDone():
return self._speedindex.IsFinished(tab)
util.WaitFor(IsDone, 60)
Returns:
True if 2 seconds have passed since last resource received, false
otherwise.
"""
return tab.HasReachedQuiescence()
class SpeedIndexImpl(object):
def Start(self, tab):
raise NotImplementedError()
def Stop(self, tab):
raise NotImplementedError()
def GetTimeCompletenessList(self, tab):
"""Returns a list of time to visual completeness tuples.
In the WPT PHP implementation, this is also called 'visual progress'.
"""
raise NotImplementedError()
def CalculateSpeedIndex(self, tab):
"""Calculate the speed index.
The speed index number conceptually represents the number of milliseconds
that the page was "visually incomplete". If the page were 0% complete for
1000 ms, then the score would be 1000; if it were 0% complete for 100 ms
then 90% complete (ie 10% incomplete) for 900 ms, then the score would be
1.0*100 + 0.1*900 = 190.
Returns:
A single number, milliseconds of visual incompleteness.
"""
time_completeness_list = self.GetTimeCompletenessList(tab)
prev_completeness = 0.0
speed_index = 0.0
prev_time = time_completeness_list[0][0]
for time, completeness in time_completeness_list:
# Add the incemental value for the interval just before this event.
elapsed_time = time - prev_time
incompleteness = (1.0 - prev_completeness)
speed_index += elapsed_time * incompleteness
# Update variables for next iteration.
prev_completeness = completeness
prev_time = time
return int(speed_index)
class VideoSpeedIndexImpl(SpeedIndexImpl):
def __init__(self):
super(VideoSpeedIndexImpl, self).__init__()
self._time_completeness_list = None
def Start(self, tab):
assert tab.video_capture_supported
# Blank out the current page so it doesn't count towards the new page's
# completeness.
tab.Highlight(bitmap.WHITE)
# TODO(tonyg): Bitrate is arbitrary here. Experiment with screen capture
# overhead vs. speed index accuracy and set the bitrate appropriately.
tab.StartVideoCapture(min_bitrate_mbps=4)
def Stop(self, tab):
# Ignore white because Chrome may blank out the page during load and we want
# that to count as 0% complete. Relying on this fact, we also blank out the
# previous page to white. The tolerance of 8 experimentally does well with
# video capture at 4mbps. We should keep this as low as possible with
# supported video compression settings.
video_capture = tab.StopVideoCapture()
histograms = [(time, bmp.ColorHistogram(ignore_color=bitmap.WHITE,
tolerance=8))
for time, bmp in video_capture.GetVideoFrameIter()]
start_histogram = histograms[0][1]
final_histogram = histograms[-1][1]
total_distance = start_histogram.Distance(final_histogram)
def FrameProgress(histogram):
if total_distance == 0:
if histogram.Distance(final_histogram) == 0:
return 1.0
else:
return 0.0
return 1 - histogram.Distance(final_histogram) / total_distance
self._time_completeness_list = [(time, FrameProgress(hist))
for time, hist in histograms]
def GetTimeCompletenessList(self, tab):
assert self._time_completeness_list, 'Must call Stop() first.'
return self._time_completeness_list
class PaintRectSpeedIndexImpl(SpeedIndexImpl):
def __init__(self):
super(PaintRectSpeedIndexImpl, self).__init__()
def Start(self, tab):
tab.StartTimelineRecording()
def Stop(self, tab):
tab.StopTimelineRecording()
def GetTimeCompletenessList(self, tab):
events = tab.timeline_model.GetAllEvents()
viewport = self._GetViewportSize(tab)
paint_events = self._IncludedPaintEvents(events)
time_area_dict = self._TimeAreaDict(paint_events, viewport)
total_area = sum(time_area_dict.values())
assert total_area > 0.0, 'Total paint event area must be greater than 0.'
completeness = 0.0
time_completeness_list = []
# TODO(tonyg): This sets the start time to the start of the first paint
# event. That can't be correct. The start time should be navigationStart.
# Since the previous screen is not cleared at navigationStart, we should
# probably assume the completeness is 0 until the first paint and add the
# time of navigationStart as the start. We need to confirm what WPT does.
time_completeness_list.append(
(tab.timeline_model.GetAllEvents()[0].start, completeness))
for time, area in sorted(time_area_dict.items()):
completeness += float(area) / total_area
# Visual progress is rounded to the nearest percentage point as in WPT.
time_completeness_list.append((time, round(completeness, 2)))
return time_completeness_list
def _GetViewportSize(self, tab):
"""Returns dimensions of the viewport."""
return tab.EvaluateJavaScript('[ window.innerWidth, window.innerHeight ]')
def _IncludedPaintEvents(self, events):
"""Get all events that are counted in the calculation of the speed index.
There's one category of paint event that's filtered out: paint events
that occur before the first 'ResourceReceiveResponse' and 'Layout' events.
Previously in the WPT speed index, paint events that contain children paint
events were also filtered out.
"""
def FirstLayoutTime(events):
"""Get the start time of the first layout after a resource received."""
has_received_response = False
for event in events:
if event.name == 'ResourceReceiveResponse':
has_received_response = True
elif has_received_response and event.name == 'Layout':
return event.start
assert False, 'There were no layout events after resource receive events.'
first_layout_time = FirstLayoutTime(events)
paint_events = [e for e in events
if e.start >= first_layout_time and e.name == 'Paint']
return paint_events
def _TimeAreaDict(self, paint_events, viewport):
"""Make a dict from time to adjusted area value for events at that time.
The adjusted area value of each paint event is determined by how many paint
events cover the same rectangle, and whether it's a full-window paint event.
"Adjusted area" can also be thought of as "points" of visual completeness --
each rectangle has a certain number of points and these points are
distributed amongst the paint events that paint that rectangle.
Args:
paint_events: A list of paint events
viewport: A tuple (width, height) of the window.
Returns:
A dictionary of times of each paint event (in milliseconds) to the
adjusted area that the paint event is worth.
"""
width, height = viewport
fullscreen_area = width * height
def ClippedArea(rectangle):
"""Returns rectangle area clipped to viewport size."""
_, x0, y0, x1, y1 = rectangle
clipped_width = max(0, min(width, x1) - max(0, x0))
clipped_height = max(0, min(height, y1) - max(0, y0))
return clipped_width * clipped_height
grouped = self._GroupEventByRectangle(paint_events)
event_area_dict = collections.defaultdict(int)
for rectangle, events in grouped.items():
# The area points for each rectangle are divided up among the paint
# events in that rectangle.
area = ClippedArea(rectangle)
update_count = len(events)
adjusted_area = float(area) / update_count
# Paint events for the largest-area rectangle are counted as 50%.
if area == fullscreen_area:
adjusted_area /= 2
for event in events:
# The end time for an event is used for that event's time.
event_time = event.end
event_area_dict[event_time] += adjusted_area
return event_area_dict
def _GetRectangle(self, paint_event):
"""Get the specific rectangle on the screen for a paint event.
Each paint event belongs to a frame (as in html <frame> or <iframe>).
This, together with location and dimensions, comprises a rectangle.
In the WPT source, this 'rectangle' is also called a 'region'.
"""
def GetBox(quad):
"""Gets top-left and bottom-right coordinates from paint event.
In the timeline data from devtools, paint rectangle dimensions are
represented x-y coordinates of four corners, clockwise from the top-left.
See: function WebInspector.TimelinePresentationModel.quadFromRectData
in file src/out/Debug/obj/gen/devtools/TimelinePanel.js.
"""
x0, y0, _, _, x1, y1, _, _ = quad
return (x0, y0, x1, y1)
assert paint_event.name == 'Paint'
frame = paint_event.args['frameId']
return (frame,) + GetBox(paint_event.args['data']['clip'])
def _GroupEventByRectangle(self, paint_events):
"""Group all paint events according to the rectangle that they update."""
result = collections.defaultdict(list)
for event in paint_events:
assert event.name == 'Paint'
result[self._GetRectangle(event)].append(event)
return result
|
mapr/hue
|
refs/heads/hue-3.9.0-mapr
|
desktop/core/ext-py/python-ldap-2.3.13/Lib/dsml.py
|
44
|
"""
dsml - generate and parse DSMLv1 data
(see http://www.oasis-open.org/committees/dsml/)
See http://www.python-ldap.org/ for details.
$Id: dsml.py,v 1.16 2010/05/07 08:15:47 stroeder Exp $
Python compability note:
Tested with Python 2.0+.
"""
__version__ = '2.3.12'
import string,base64
def list_dict(l):
"""
return a dictionary with all items of l being the keys of the dictionary
"""
d = {}
for i in l:
d[i]=None
return d
special_entities = (
('&','&'),
('<','<'),
('"','"'),
("'",'''),
)
def replace_char(s):
for char,entity in special_entities:
s = string.replace(s,char,entity)
return s
class DSMLWriter:
def __init__(
self,f,base64_attrs=[],dsml_comment='',indent=' '
):
"""
Parameters:
f
File object for output.
base64_attrs
Attribute types to be base64-encoded.
dsml_comment
Text placed in comment lines behind <dsml:dsml>.
indent
String used for indentiation of next nested level.
"""
self._output_file = f
self._base64_attrs = list_dict(map(string.lower,base64_attrs))
self._dsml_comment = dsml_comment
self._indent = indent
def _needs_base64_encoding(self,attr_type,attr_value):
if self._base64_attrs:
return self._base64_attrs.has_key(string.lower(attr_type))
else:
try:
unicode(attr_value,'utf-8')
except UnicodeError:
return 1
else:
return 0
def writeHeader(self):
"""
Write the header
"""
self._output_file.write('\n'.join([
'<?xml version="1.0" encoding="UTF-8"?>',
'<!DOCTYPE root PUBLIC "dsml.dtd" "http://www.dsml.org/1.0/dsml.dtd">',
'<dsml:dsml xmlns:dsml="http://www.dsml.org/DSML">',
'%s<dsml:directory-entries>\n' % (self._indent),
])
)
if self._dsml_comment:
self._output_file.write('%s<!--\n' % (self._indent))
self._output_file.write('%s%s\n' % (self._indent,self._dsml_comment))
self._output_file.write('%s-->\n' % (self._indent))
def writeFooter(self):
"""
Write the footer
"""
self._output_file.write('%s</dsml:directory-entries>\n' % (self._indent))
self._output_file.write('</dsml:dsml>\n')
def unparse(self,dn,entry):
return self.writeRecord(dn,entry)
def writeRecord(self,dn,entry):
"""
dn
string-representation of distinguished name
entry
dictionary holding the LDAP entry {attr:data}
"""
# Write line dn: first
self._output_file.write(
'%s<dsml:entry dn="%s">\n' % (
self._indent*2,replace_char(dn)
)
)
objectclasses = entry.get('objectclass',entry.get('objectClass',[]))
self._output_file.write('%s<dsml:objectclass>\n' % (self._indent*3))
for oc in objectclasses:
self._output_file.write('%s<dsml:oc-value>%s</dsml:oc-value>\n' % (self._indent*4,oc))
self._output_file.write('%s</dsml:objectclass>\n' % (self._indent*3))
attr_types = entry.keys()[:]
try:
attr_types.remove('objectclass')
attr_types.remove('objectClass')
except ValueError:
pass
attr_types.sort()
for attr_type in attr_types:
self._output_file.write('%s<dsml:attr name="%s">\n' % (self._indent*3,attr_type))
for attr_value_item in entry[attr_type]:
needs_base64_encoding = self._needs_base64_encoding(
attr_type,attr_value_item
)
if needs_base64_encoding:
attr_value_item = base64.encodestring(attr_value_item)
else:
attr_value_item = replace_char(attr_value_item)
self._output_file.write('%s<dsml:value%s>\n' % (
self._indent*4,
' encoding="base64"'*needs_base64_encoding
)
)
self._output_file.write('%s%s\n' % (
self._indent*5,
attr_value_item
)
)
self._output_file.write('%s</dsml:value>\n' % (
self._indent*4,
)
)
self._output_file.write('%s</dsml:attr>\n' % (self._indent*3))
self._output_file.write('%s</dsml:entry>\n' % (self._indent*2))
return
try:
import xml.sax,xml.sax.handler
except ImportError:
pass
else:
class DSMLv1Handler(xml.sax.handler.ContentHandler):
"""
Content handler class for DSMLv1
"""
def __init__(self,parser_instance):
self._parser_instance = parser_instance
xml.sax.handler.ContentHandler.__init__(self)
def startDocument(self):
pass
def endDocument(self):
pass
def startElement(self,raw_name,attrs):
assert raw_name.startswith(''),'Illegal name'
name = raw_name[5:]
if name=='dsml':
pass
elif name=='directory-entries':
self._parsing_entries = 1
elif name=='entry':
self._dn = attrs['dn']
self._entry = {}
elif name=='attr':
self._attr_type = attrs['name'].encode('utf-8')
self._attr_values = []
elif name=='value':
self._attr_value = ''
self._base64_encoding = attrs.get('encoding','').lower()=='base64'
# Handle object class tags
elif name=='objectclass':
self._object_classes = []
elif name=='oc-value':
self._oc_value = ''
# Unhandled tags
else:
raise ValueError,'Unknown tag %s' % (raw_name)
def endElement(self,raw_name):
assert raw_name.startswith('dsml:'),'Illegal name'
name = raw_name[5:]
if name=='dsml':
pass
elif name=='directory-entries':
self._parsing_entries = 0
elif name=='entry':
self._parser_instance.handle(self._dn,self._entry)
del self._dn
del self._entry
elif name=='attr':
self._entry[self._attr_type] = self._attr_values
del self._attr_type
del self._attr_values
elif name=='value':
if self._base64_encoding:
attr_value = base64.decodestring(self._attr_value.strip())
else:
attr_value = self._attr_value.strip().encode('utf-8')
self._attr_values.append(attr_value)
del attr_value
del self._attr_value
del self._base64_encoding
# Handle object class tags
elif name=='objectclass':
self._entry['objectClass'] = self._object_classes
del self._object_classes
elif name=='oc-value':
self._object_classes.append(self._oc_value.strip().encode('utf-8'))
del self._oc_value
# Unhandled tags
else:
raise ValueError,'Unknown tag %s' % (raw_name)
def characters(self,ch):
if self.__dict__.has_key('_oc_value'):
self._oc_value = self._oc_value + ch
elif self.__dict__.has_key('_attr_value'):
self._attr_value = self._attr_value + ch
else:
pass
class DSMLParser:
"""
Base class for a DSMLv1 parser. Applications should sub-class this
class and override method handle() to implement something meaningful.
Public class attributes:
records_read
Counter for records processed so far
"""
def __init__(
self,
input_file,
ContentHandlerClass,
ignored_attr_types=None,
max_entries=0,
):
"""
Parameters:
input_file
File-object to read the DSMLv1 input from
ignored_attr_types
Attributes with these attribute type names will be ignored.
max_entries
If non-zero specifies the maximum number of entries to be
read from f.
line_sep
String used as line separator
"""
self._input_file = input_file
self._max_entries = max_entries
self._ignored_attr_types = list_dict(map(string.lower,(ignored_attr_types or [])))
self._current_record = None,None
self.records_read = 0
self._parser = xml.sax.make_parser()
self._parser.setFeature(xml.sax.handler.feature_namespaces,0)
content_handler = ContentHandlerClass(self)
self._parser.setContentHandler(content_handler)
def handle(self,*args,**kwargs):
"""
Process a single content DSMLv1 record. This method should be
implemented by applications using DSMLParser.
"""
import pprint
pprint.pprint(args)
pprint.pprint(kwargs)
def parse(self):
"""
Continously read and parse DSML records
"""
self._parser.parse(self._input_file)
|
litnimax/addons-yelizariev
|
refs/heads/8.0
|
mass_mailing_extra/__init__.py
|
2148
|
import models
|
2ndquadrant-it/barman
|
refs/heads/master
|
barman/server.py
|
1
|
# Copyright (C) 2011-2020 2ndQuadrant Limited
#
# This file is part of Barman.
#
# Barman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Barman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Barman. If not, see <http://www.gnu.org/licenses/>.
"""
This module represents a Server.
Barman is able to manage multiple servers.
"""
import errno
import json
import logging
import os
import re
import shutil
import sys
import tarfile
import time
from collections import namedtuple
from contextlib import closing, contextmanager
from glob import glob
from tempfile import NamedTemporaryFile
import barman
from barman import output, xlog
from barman.backup import BackupManager
from barman.command_wrappers import BarmanSubProcess, Command, Rsync
from barman.copy_controller import RsyncCopyController
from barman.exceptions import (ArchiverFailure, BadXlogSegmentName,
CommandFailedException, ConninfoException,
LockFileBusy, LockFileException,
LockFilePermissionDenied,
PostgresDuplicateReplicationSlot,
PostgresException,
PostgresInvalidReplicationSlot,
PostgresIsInRecovery,
PostgresReplicationSlotInUse,
PostgresReplicationSlotsFull,
PostgresSuperuserRequired,
PostgresUnsupportedFeature, SyncError,
SyncNothingToDo, SyncToBeDeleted, TimeoutError,
UnknownBackupIdException)
from barman.infofile import BackupInfo, LocalBackupInfo, WalFileInfo
from barman.lockfile import (ServerBackupIdLock, ServerBackupLock,
ServerBackupSyncLock, ServerCronLock,
ServerWalArchiveLock, ServerWalReceiveLock,
ServerWalSyncLock, ServerXLOGDBLock)
from barman.postgres import PostgreSQLConnection, StreamingConnection
from barman.process import ProcessManager
from barman.remote_status import RemoteStatusMixin
from barman.retention_policies import RetentionPolicyFactory
from barman.utils import (BarmanEncoder, file_md5, force_str, fsync_dir,
fsync_file, human_readable_timedelta,
is_power_of_two, mkpath, pretty_size, timeout)
from barman.wal_archiver import (FileWalArchiver, StreamingWalArchiver,
WalArchiver)
PARTIAL_EXTENSION = '.partial'
PRIMARY_INFO_FILE = 'primary.info'
SYNC_WALS_INFO_FILE = 'sync-wals.info'
_logger = logging.getLogger(__name__)
# NamedTuple for a better readability of SyncWalInfo
SyncWalInfo = namedtuple('SyncWalInfo', 'last_wal last_position')
class CheckStrategy(object):
"""
This strategy for the 'check' collects the results of
every check and does not print any message.
This basic class is also responsible for immediately
logging any performed check with an error in case of
check failure and a debug message in case of success.
"""
# create a namedtuple object called CheckResult to manage check results
CheckResult = namedtuple('CheckResult', 'server_name check status')
# Default list used as a filter to identify non-critical checks
NON_CRITICAL_CHECKS = ['minimum redundancy requirements',
'backup maximum age',
'failed backups',
'archiver errors',
'empty incoming directory',
'empty streaming directory',
'incoming WALs directory',
'streaming WALs directory',
]
def __init__(self, ignore_checks=NON_CRITICAL_CHECKS):
"""
Silent Strategy constructor
:param list ignore_checks: list of checks that can be ignored
"""
self.ignore_list = ignore_checks
self.check_result = []
self.has_error = False
self.running_check = None
def init_check(self, check_name):
"""
Mark in the debug log when barman starts the execution of a check
:param str check_name: the name of the check that is starting
"""
self.running_check = check_name
_logger.debug("Starting check: '%s'" % check_name)
def _check_name(self, check):
if not check:
check = self.running_check
assert check
return check
def result(self, server_name, status, hint=None, check=None):
"""
Store the result of a check (with no output).
Log any check result (error or debug level).
:param str server_name: the server is being checked
:param bool status: True if succeeded
:param str,None hint: hint to print if not None:
:param str,None check: the check name
"""
check = self._check_name(check)
if not status:
# If the name of the check is not in the filter list,
# treat it as a blocking error, then notify the error
# and change the status of the strategy
if check not in self.ignore_list:
self.has_error = True
_logger.error(
"Check '%s' failed for server '%s'" %
(check, server_name))
else:
# otherwise simply log the error (as info)
_logger.info(
"Ignoring failed check '%s' for server '%s'" %
(check, server_name))
else:
_logger.debug(
"Check '%s' succeeded for server '%s'" %
(check, server_name))
# Store the result and does not output anything
result = self.CheckResult(server_name, check, status)
self.check_result.append(result)
self.running_check = None
class CheckOutputStrategy(CheckStrategy):
"""
This strategy for the 'check' command immediately sends
the result of a check to the designated output channel.
This class derives from the basic CheckStrategy, reuses
the same logic and adds output messages.
"""
def __init__(self):
"""
Output Strategy constructor
"""
super(CheckOutputStrategy, self).__init__(ignore_checks=())
def result(self, server_name, status, hint=None, check=None):
"""
Store the result of a check.
Log any check result (error or debug level).
Output the result to the user
:param str server_name: the server being checked
:param str check: the check name
:param bool status: True if succeeded
:param str,None hint: hint to print if not None:
"""
check = self._check_name(check)
super(CheckOutputStrategy, self).result(
server_name, status, hint, check)
# Send result to output
output.result('check', server_name, check, status, hint)
class Server(RemoteStatusMixin):
"""
This class represents the PostgreSQL server to backup.
"""
XLOG_DB = "xlog.db"
# the strategy for the management of the results of the various checks
__default_check_strategy = CheckOutputStrategy()
def __init__(self, config):
"""
Server constructor.
:param barman.config.ServerConfig config: the server configuration
"""
super(Server, self).__init__()
self.config = config
self.path = self._build_path(self.config.path_prefix)
self.process_manager = ProcessManager(self.config)
# If 'primary_ssh_command' is specified, the source of the backup
# for this server is a Barman installation (not a Postgres server)
self.passive_node = config.primary_ssh_command is not None
self.enforce_retention_policies = False
self.postgres = None
self.streaming = None
self.archivers = []
# Postgres configuration is available only if node is not passive
if not self.passive_node:
# Initialize the main PostgreSQL connection
try:
# Check that 'conninfo' option is properly set
if config.conninfo is None:
raise ConninfoException(
"Missing 'conninfo' parameter for server '%s'" %
config.name)
self.postgres = PostgreSQLConnection(
config.conninfo,
config.immediate_checkpoint,
config.slot_name)
# If the PostgreSQLConnection creation fails, disable the Server
except ConninfoException as e:
self.config.disabled = True
self.config.msg_list.append(
"PostgreSQL connection: " + force_str(e).strip())
# Initialize the streaming PostgreSQL connection only when
# backup_method is postgres or the streaming_archiver is in use
if config.backup_method == 'postgres' or config.streaming_archiver:
try:
if config.streaming_conninfo is None:
raise ConninfoException(
"Missing 'streaming_conninfo' parameter for "
"server '%s'"
% config.name)
self.streaming = StreamingConnection(
config.streaming_conninfo)
# If the StreamingConnection creation fails, disable the server
except ConninfoException as e:
self.config.disabled = True
self.config.msg_list.append(
"Streaming connection: " + force_str(e).strip())
# Initialize the backup manager
self.backup_manager = BackupManager(self)
if not self.passive_node:
# Initialize the StreamingWalArchiver
# WARNING: Order of items in self.archivers list is important!
# The files will be archived in that order.
if self.config.streaming_archiver:
try:
self.archivers.append(StreamingWalArchiver(
self.backup_manager))
# If the StreamingWalArchiver creation fails,
# disable the server
except AttributeError as e:
_logger.debug(e)
self.config.disabled = True
self.config.msg_list.append('Unable to initialise the '
'streaming archiver')
# IMPORTANT: The following lines of code have been
# temporarily commented in order to make the code
# back-compatible after the introduction of 'archiver=off'
# as default value in Barman 2.0.
# When the back compatibility feature for archiver will be
# removed, the following lines need to be decommented.
# ARCHIVER_OFF_BACKCOMPATIBILITY - START OF CODE
# # At least one of the available archive modes should be enabled
# if len(self.archivers) < 1:
# self.config.disabled = True
# self.config.msg_list.append(
# "No archiver enabled for server '%s'. "
# "Please turn on 'archiver', 'streaming_archiver' or both"
# % config.name)
# ARCHIVER_OFF_BACKCOMPATIBILITY - END OF CODE
# Sanity check: if file based archiver is disabled, and only
# WAL streaming is enabled, a replication slot name must be
# configured.
if not self.config.archiver and \
self.config.streaming_archiver and \
self.config.slot_name is None:
self.config.disabled = True
self.config.msg_list.append(
"Streaming-only archiver requires 'streaming_conninfo' "
"and 'slot_name' options to be properly configured")
# ARCHIVER_OFF_BACKCOMPATIBILITY - START OF CODE
# IMPORTANT: This is a back-compatibility feature that has
# been added in Barman 2.0. It highlights a deprecated
# behaviour, and helps users during this transition phase.
# It forces 'archiver=on' when both archiver and streaming_archiver
# are set to 'off' (default values) and displays a warning,
# requesting users to explicitly set the value in the
# configuration.
# When this back-compatibility feature will be removed from Barman
# (in a couple of major releases), developers will need to remove
# this block completely and reinstate the block of code you find
# a few lines below (search for ARCHIVER_OFF_BACKCOMPATIBILITY
# throughout the code).
if self.config.archiver is False and \
self.config.streaming_archiver is False:
output.warning("No archiver enabled for server '%s'. "
"Please turn on 'archiver', "
"'streaming_archiver' or both",
self.config.name)
output.warning("Forcing 'archiver = on'")
self.config.archiver = True
# ARCHIVER_OFF_BACKCOMPATIBILITY - END OF CODE
# Initialize the FileWalArchiver
# WARNING: Order of items in self.archivers list is important!
# The files will be archived in that order.
if self.config.archiver:
try:
self.archivers.append(FileWalArchiver(self.backup_manager))
except AttributeError as e:
_logger.debug(e)
self.config.disabled = True
self.config.msg_list.append('Unable to initialise the '
'file based archiver')
# Set bandwidth_limit
if self.config.bandwidth_limit:
try:
self.config.bandwidth_limit = int(self.config.bandwidth_limit)
except ValueError:
_logger.warning('Invalid bandwidth_limit "%s" for server "%s" '
'(fallback to "0")' % (
self.config.bandwidth_limit,
self.config.name))
self.config.bandwidth_limit = None
# set tablespace_bandwidth_limit
if self.config.tablespace_bandwidth_limit:
rules = {}
for rule in self.config.tablespace_bandwidth_limit.split():
try:
key, value = rule.split(':', 1)
value = int(value)
if value != self.config.bandwidth_limit:
rules[key] = value
except ValueError:
_logger.warning(
"Invalid tablespace_bandwidth_limit rule '%s'" % rule)
if len(rules) > 0:
self.config.tablespace_bandwidth_limit = rules
else:
self.config.tablespace_bandwidth_limit = None
# Set minimum redundancy (default 0)
if self.config.minimum_redundancy.isdigit():
self.config.minimum_redundancy = int(
self.config.minimum_redundancy)
if self.config.minimum_redundancy < 0:
_logger.warning('Negative value of minimum_redundancy "%s" '
'for server "%s" (fallback to "0")' % (
self.config.minimum_redundancy,
self.config.name))
self.config.minimum_redundancy = 0
else:
_logger.warning('Invalid minimum_redundancy "%s" for server "%s" '
'(fallback to "0")' % (
self.config.minimum_redundancy,
self.config.name))
self.config.minimum_redundancy = 0
# Initialise retention policies
self._init_retention_policies()
def _init_retention_policies(self):
# Set retention policy mode
if self.config.retention_policy_mode != 'auto':
_logger.warning(
'Unsupported retention_policy_mode "%s" for server "%s" '
'(fallback to "auto")' % (
self.config.retention_policy_mode, self.config.name))
self.config.retention_policy_mode = 'auto'
# If retention_policy is present, enforce them
if self.config.retention_policy:
# Check wal_retention_policy
if self.config.wal_retention_policy != 'main':
_logger.warning(
'Unsupported wal_retention_policy value "%s" '
'for server "%s" (fallback to "main")' % (
self.config.wal_retention_policy, self.config.name))
self.config.wal_retention_policy = 'main'
# Create retention policy objects
try:
rp = RetentionPolicyFactory.create(
self, 'retention_policy', self.config.retention_policy)
# Reassign the configuration value (we keep it in one place)
self.config.retention_policy = rp
_logger.debug('Retention policy for server %s: %s' % (
self.config.name, self.config.retention_policy))
try:
rp = RetentionPolicyFactory.create(
self, 'wal_retention_policy',
self.config.wal_retention_policy)
# Reassign the configuration value
# (we keep it in one place)
self.config.wal_retention_policy = rp
_logger.debug(
'WAL retention policy for server %s: %s' % (
self.config.name,
self.config.wal_retention_policy))
except ValueError:
_logger.exception(
'Invalid wal_retention_policy setting "%s" '
'for server "%s" (fallback to "main")' % (
self.config.wal_retention_policy,
self.config.name))
rp = RetentionPolicyFactory.create(
self, 'wal_retention_policy', 'main')
self.config.wal_retention_policy = rp
self.enforce_retention_policies = True
except ValueError:
_logger.exception(
'Invalid retention_policy setting "%s" for server "%s"' % (
self.config.retention_policy, self.config.name))
def get_identity_file_path(self):
"""
Get the path of the file that should contain the identity
of the cluster
:rtype: str
"""
return os.path.join(
self.config.backup_directory,
'identity.json')
def write_identity_file(self):
"""
Store the identity of the server if it doesn't already exist.
"""
file_path = self.get_identity_file_path()
# Do not write the identity if file already exists
if os.path.exists(file_path):
return
systemid = self.systemid
if systemid:
try:
with open(file_path, "w") as fp:
json.dump(
{
"systemid": systemid,
"version": self.postgres.server_major_version
},
fp,
indent=4,
sort_keys=True)
fp.write("\n")
except IOError:
_logger.exception(
'Cannot write system Id file for server "%s"' % (
self.config.name))
def read_identity_file(self):
"""
Read the server identity
:rtype: dict[str,str]
"""
file_path = self.get_identity_file_path()
try:
with open(file_path, "r") as fp:
return json.load(fp)
except IOError:
_logger.exception(
'Cannot read system Id file for server "%s"' % (
self.config.name))
return {}
def close(self):
"""
Close all the open connections to PostgreSQL
"""
if self.postgres:
self.postgres.close()
if self.streaming:
self.streaming.close()
def check(self, check_strategy=__default_check_strategy):
"""
Implements the 'server check' command and makes sure SSH and PostgreSQL
connections work properly. It checks also that backup directories exist
(and if not, it creates them).
The check command will time out after a time interval defined by the
check_timeout configuration value (default 30 seconds)
:param CheckStrategy check_strategy: the strategy for the management
of the results of the various checks
"""
try:
with timeout(self.config.check_timeout):
# Check WAL archive
self.check_archive(check_strategy)
# Postgres configuration is not available on passive nodes
if not self.passive_node:
self.check_postgres(check_strategy)
# Check barman directories from barman configuration
self.check_directories(check_strategy)
# Check retention policies
self.check_retention_policy_settings(check_strategy)
# Check for backup validity
self.check_backup_validity(check_strategy)
# Executes the backup manager set of checks
self.backup_manager.check(check_strategy)
# Check if the msg_list of the server
# contains messages and output eventual failures
self.check_configuration(check_strategy)
# Check the system Id coherence between
# streaming and normal connections
self.check_identity(check_strategy)
# Executes check() for every archiver, passing
# remote status information for efficiency
for archiver in self.archivers:
archiver.check(check_strategy)
# Check archiver errors
self.check_archiver_errors(check_strategy)
except TimeoutError:
# The check timed out.
# Add a failed entry to the check strategy for this.
_logger.debug("Check command timed out executing '%s' check"
% check_strategy.running_check)
check_strategy.result(self.config.name, False,
hint='barman check command timed out',
check='check timeout')
def check_archive(self, check_strategy):
"""
Checks WAL archive
:param CheckStrategy check_strategy: the strategy for the management
of the results of the various checks
"""
check_strategy.init_check("WAL archive")
# Make sure that WAL archiving has been setup
# XLOG_DB needs to exist and its size must be > 0
# NOTE: we do not need to acquire a lock in this phase
xlogdb_empty = True
if os.path.exists(self.xlogdb_file_name):
with open(self.xlogdb_file_name, "rb") as fxlogdb:
if os.fstat(fxlogdb.fileno()).st_size > 0:
xlogdb_empty = False
# NOTE: This check needs to be only visible if it fails
if xlogdb_empty:
# Skip the error if we have a terminated backup
# with status WAITING_FOR_WALS.
# TODO: Improve this check
backup_id = self.get_last_backup_id([BackupInfo.WAITING_FOR_WALS])
if not backup_id:
check_strategy.result(
self.config.name, False,
hint='please make sure WAL shipping is setup')
# Check the number of wals in the incoming directory
self._check_wal_queue(check_strategy,
'incoming',
'archiver')
# Check the number of wals in the streaming directory
self._check_wal_queue(check_strategy,
'streaming',
'streaming_archiver')
def _check_wal_queue(self, check_strategy, dir_name, archiver_name):
"""
Check if one of the wal queue directories beyond the
max file threshold
"""
# Read the wal queue location from the configuration
config_name = "%s_wals_directory" % dir_name
assert hasattr(self.config, config_name)
incoming_dir = getattr(self.config, config_name)
# Check if the archiver is enabled
assert hasattr(self.config, archiver_name)
enabled = getattr(self.config, archiver_name)
# Inspect the wal queue directory
file_count = 0
for file_item in glob(os.path.join(incoming_dir, '*')):
# Ignore temporary files
if file_item.endswith('.tmp'):
continue
file_count += 1
max_incoming_wal = self.config.max_incoming_wals_queue
# Subtract one from the count because of .partial file inside the
# streaming directory
if dir_name == 'streaming':
file_count -= 1
# If this archiver is disabled, check the number of files in the
# corresponding directory.
# If the directory is NOT empty, fail the check and warn the user.
# NOTE: This check is visible only when it fails
check_strategy.init_check("empty %s directory" % dir_name)
if not enabled:
if file_count > 0:
check_strategy.result(
self.config.name, False,
hint="'%s' must be empty when %s=off"
% (incoming_dir, archiver_name))
# No more checks are required if the archiver
# is not enabled
return
# At this point if max_wals_count is none,
# means that no limit is set so we just need to return
if max_incoming_wal is None:
return
check_strategy.init_check("%s WALs directory" % dir_name)
if file_count > max_incoming_wal:
msg = 'there are too many WALs in queue: ' \
'%s, max %s' % (file_count, max_incoming_wal)
check_strategy.result(self.config.name, False, hint=msg)
def check_postgres(self, check_strategy):
"""
Checks PostgreSQL connection
:param CheckStrategy check_strategy: the strategy for the management
of the results of the various checks
"""
check_strategy.init_check('PostgreSQL')
# Take the status of the remote server
remote_status = self.get_remote_status()
if remote_status.get('server_txt_version'):
check_strategy.result(self.config.name, True)
else:
check_strategy.result(self.config.name, False)
return
# Check for superuser privileges or
# privileges needed to perform backups
if remote_status.get('has_backup_privileges') is not None:
check_strategy.init_check(
'superuser or standard user with backup privileges')
if remote_status.get('has_backup_privileges'):
check_strategy.result(
self.config.name, True)
else:
check_strategy.result(
self.config.name, False,
hint='privileges for PostgreSQL backup functions are '
'required (see documentation)',
check='no access to backup functions'
)
if 'streaming_supported' in remote_status:
check_strategy.init_check("PostgreSQL streaming")
hint = None
# If a streaming connection is available,
# add its status to the output of the check
if remote_status['streaming_supported'] is None:
hint = remote_status['connection_error']
elif not remote_status['streaming_supported']:
hint = ('Streaming connection not supported'
' for PostgreSQL < 9.2')
check_strategy.result(self.config.name,
remote_status.get('streaming'), hint=hint)
# Check wal_level parameter: must be different from 'minimal'
# the parameter has been introduced in postgres >= 9.0
if 'wal_level' in remote_status:
check_strategy.init_check("wal_level")
if remote_status['wal_level'] != 'minimal':
check_strategy.result(
self.config.name, True)
else:
check_strategy.result(
self.config.name, False,
hint="please set it to a higher level than 'minimal'")
# Check the presence and the status of the configured replication slot
# This check will be skipped if `slot_name` is undefined
if self.config.slot_name:
check_strategy.init_check("replication slot")
slot = remote_status['replication_slot']
# The streaming_archiver is enabled
if self.config.streaming_archiver is True:
# Error if PostgreSQL is too old
if not remote_status['replication_slot_support']:
check_strategy.result(
self.config.name,
False,
hint="slot_name parameter set but PostgreSQL server "
"is too old (%s < 9.4)" %
remote_status['server_txt_version'])
# Replication slots are supported
else:
# The slot is not present
if slot is None:
check_strategy.result(
self.config.name, False,
hint="replication slot '%s' doesn't exist. "
"Please execute 'barman receive-wal "
"--create-slot %s'" % (self.config.slot_name,
self.config.name))
else:
# The slot is present but not initialised
if slot.restart_lsn is None:
check_strategy.result(
self.config.name, False,
hint="slot '%s' not initialised: is "
"'receive-wal' running?" %
self.config.slot_name)
# The slot is present but not active
elif slot.active is False:
check_strategy.result(
self.config.name, False,
hint="slot '%s' not active: is "
"'receive-wal' running?" %
self.config.slot_name)
else:
check_strategy.result(self.config.name,
True)
else:
# If the streaming_archiver is disabled and the slot_name
# option is present in the configuration, we check that
# a replication slot with the specified name is NOT present
# and NOT active.
# NOTE: This is not a failure, just a warning.
if slot is not None:
if slot.restart_lsn \
is not None:
slot_status = 'initialised'
# Check if the slot is also active
if slot.active:
slot_status = 'active'
# Warn the user
check_strategy.result(
self.config.name,
True,
hint="WARNING: slot '%s' is %s but not required "
"by the current config" % (
self.config.slot_name, slot_status))
def _make_directories(self):
"""
Make backup directories in case they do not exist
"""
for key in self.config.KEYS:
if key.endswith('_directory') and hasattr(self.config, key):
val = getattr(self.config, key)
if val is not None and not os.path.isdir(val):
# noinspection PyTypeChecker
os.makedirs(val)
def check_directories(self, check_strategy):
"""
Checks backup directories and creates them if they do not exist
:param CheckStrategy check_strategy: the strategy for the management
of the results of the various checks
"""
check_strategy.init_check("directories")
if not self.config.disabled:
try:
self._make_directories()
except OSError as e:
check_strategy.result(self.config.name, False,
"%s: %s" % (e.filename, e.strerror))
else:
check_strategy.result(self.config.name, True)
def check_configuration(self, check_strategy):
"""
Check for error messages in the message list
of the server and output eventual errors
:param CheckStrategy check_strategy: the strategy for the management
of the results of the various checks
"""
check_strategy.init_check('configuration')
if len(self.config.msg_list):
check_strategy.result(self.config.name, False)
for conflict_paths in self.config.msg_list:
output.info("\t\t%s" % conflict_paths)
def check_retention_policy_settings(self, check_strategy):
"""
Checks retention policy setting
:param CheckStrategy check_strategy: the strategy for the management
of the results of the various checks
"""
check_strategy.init_check("retention policy settings")
config = self.config
if config.retention_policy and not self.enforce_retention_policies:
check_strategy.result(self.config.name, False, hint='see log')
else:
check_strategy.result(self.config.name, True)
def check_backup_validity(self, check_strategy):
"""
Check if backup validity requirements are satisfied
:param CheckStrategy check_strategy: the strategy for the management
of the results of the various checks
"""
check_strategy.init_check('backup maximum age')
# first check: check backup maximum age
if self.config.last_backup_maximum_age is not None:
# get maximum age information
backup_age = self.backup_manager.validate_last_backup_maximum_age(
self.config.last_backup_maximum_age)
# format the output
check_strategy.result(
self.config.name, backup_age[0],
hint="interval provided: %s, latest backup age: %s" % (
human_readable_timedelta(
self.config.last_backup_maximum_age), backup_age[1]))
else:
# last_backup_maximum_age provided by the user
check_strategy.result(
self.config.name,
True,
hint="no last_backup_maximum_age provided")
def check_archiver_errors(self, check_strategy):
"""
Checks the presence of archiving errors
:param CheckStrategy check_strategy: the strategy for the management
of the results of the check
"""
check_strategy.init_check('archiver errors')
if os.path.isdir(self.config.errors_directory):
errors = os.listdir(self.config.errors_directory)
else:
errors = []
check_strategy.result(
self.config.name,
len(errors) == 0,
hint=WalArchiver.summarise_error_files(errors)
)
def check_identity(self, check_strategy):
"""
Check the systemid retrieved from the streaming connection
is the same that is retrieved from the standard connection,
and then verifies it matches the one stored on disk.
:param CheckStrategy check_strategy: the strategy for the management
of the results of the various checks
"""
check_strategy.init_check("systemid coherence")
remote_status = self.get_remote_status()
# Get system identifier from streaming and standard connections
systemid_from_streaming = remote_status.get('streaming_systemid')
systemid_from_postgres = remote_status.get('postgres_systemid')
# If both available, makes sure they are coherent with each other
if systemid_from_streaming and systemid_from_postgres:
if systemid_from_streaming != systemid_from_postgres:
check_strategy.result(
self.config.name,
systemid_from_streaming == systemid_from_postgres,
hint="is the streaming DSN targeting the same server "
"of the PostgreSQL connection string?")
return
systemid_from_server = (
systemid_from_streaming or systemid_from_postgres)
if not systemid_from_server:
# Can't check without system Id information
check_strategy.result(self.config.name, True,
hint="no system Id available")
return
# Retrieves the content on disk and matches it with the live ID
file_path = self.get_identity_file_path()
if not os.path.exists(file_path):
# We still don't have the systemid cached on disk,
# so let's wait until we store it
check_strategy.result(self.config.name, True,
hint="no system Id stored on disk")
return
identity_from_file = self.read_identity_file()
if systemid_from_server != identity_from_file.get('systemid'):
check_strategy.result(
self.config.name,
False,
hint='the system Id of the connected PostgreSQL server '
'changed, stored in "%s"' % file_path)
else:
check_strategy.result(self.config.name, True)
def status_postgres(self):
"""
Status of PostgreSQL server
"""
remote_status = self.get_remote_status()
if remote_status['server_txt_version']:
output.result('status', self.config.name,
"pg_version",
"PostgreSQL version",
remote_status['server_txt_version'])
else:
output.result('status', self.config.name,
"pg_version",
"PostgreSQL version",
"FAILED trying to get PostgreSQL version")
return
# Define the cluster state as pg_controldata do.
if remote_status['is_in_recovery']:
output.result('status', self.config.name, 'is_in_recovery',
'Cluster state', "in archive recovery")
else:
output.result('status', self.config.name, 'is_in_recovery',
'Cluster state', "in production")
if remote_status['pgespresso_installed']:
output.result('status', self.config.name, 'pgespresso',
'pgespresso extension', "Available")
else:
output.result('status', self.config.name, 'pgespresso',
'pgespresso extension', "Not available")
if remote_status.get('current_size') is not None:
output.result('status', self.config.name,
'current_size',
'Current data size',
pretty_size(remote_status['current_size']))
if remote_status['data_directory']:
output.result('status', self.config.name,
"data_directory",
"PostgreSQL Data directory",
remote_status['data_directory'])
if remote_status['current_xlog']:
output.result('status', self.config.name,
"current_xlog",
"Current WAL segment",
remote_status['current_xlog'])
def status_wal_archiver(self):
"""
Status of WAL archiver(s)
"""
for archiver in self.archivers:
archiver.status()
def status_retention_policies(self):
"""
Status of retention policies enforcement
"""
if self.enforce_retention_policies:
output.result('status', self.config.name,
"retention_policies",
"Retention policies",
"enforced "
"(mode: %s, retention: %s, WAL retention: %s)" % (
self.config.retention_policy_mode,
self.config.retention_policy,
self.config.wal_retention_policy))
else:
output.result('status', self.config.name,
"retention_policies",
"Retention policies",
"not enforced")
def status(self):
"""
Implements the 'server-status' command.
"""
if self.config.description:
output.result('status', self.config.name,
"description",
"Description", self.config.description)
output.result('status', self.config.name,
"active",
"Active", self.config.active)
output.result('status', self.config.name,
"disabled",
"Disabled", self.config.disabled)
# Postgres status is available only if node is not passive
if not self.passive_node:
self.status_postgres()
self.status_wal_archiver()
output.result('status', self.config.name,
"passive_node",
"Passive node",
self.passive_node)
self.status_retention_policies()
# Executes the backup manager status info method
self.backup_manager.status()
def fetch_remote_status(self):
"""
Get the status of the remote server
This method does not raise any exception in case of errors,
but set the missing values to None in the resulting dictionary.
:rtype: dict[str, None|str]
"""
result = {}
# Merge status for a postgres connection
if self.postgres:
result.update(self.postgres.get_remote_status())
# Merge status for a streaming connection
if self.streaming:
result.update(self.streaming.get_remote_status())
# Merge status for each archiver
for archiver in self.archivers:
result.update(archiver.get_remote_status())
# Merge status defined by the BackupManager
result.update(self.backup_manager.get_remote_status())
return result
def show(self):
"""
Shows the server configuration
"""
# Populate result map with all the required keys
result = self.config.to_json()
# Is the server a passive node?
result['passive_node'] = self.passive_node
# Skip remote status if the server is passive
if not self.passive_node:
remote_status = self.get_remote_status()
result.update(remote_status)
# Backup maximum age section
if self.config.last_backup_maximum_age is not None:
age = self.backup_manager.validate_last_backup_maximum_age(
self.config.last_backup_maximum_age)
# If latest backup is between the limits of the
# last_backup_maximum_age configuration, display how old is
# the latest backup.
if age[0]:
msg = "%s (latest backup: %s )" % \
(human_readable_timedelta(
self.config.last_backup_maximum_age),
age[1])
else:
# If latest backup is outside the limits of the
# last_backup_maximum_age configuration (or the configuration
# value is none), warn the user.
msg = "%s (WARNING! latest backup is %s old)" % \
(human_readable_timedelta(
self.config.last_backup_maximum_age),
age[1])
result['last_backup_maximum_age'] = msg
else:
result['last_backup_maximum_age'] = "None"
output.result('show_server', self.config.name, result)
def delete_backup(self, backup):
"""Deletes a backup
:param backup: the backup to delete
"""
try:
# Lock acquisition: if you can acquire a ServerBackupLock
# it means that no backup process is running on that server,
# so there is no need to check the backup status.
# Simply proceed with the normal delete process.
server_backup_lock = ServerBackupLock(
self.config.barman_lock_directory,
self.config.name)
server_backup_lock.acquire(server_backup_lock.raise_if_fail,
server_backup_lock.wait)
server_backup_lock.release()
except LockFileBusy:
# Otherwise if the lockfile is busy, a backup process is actually
# running on that server. To be sure that it's safe
# to delete the backup, we must check its status and its position
# in the catalogue.
# If it is the first and it is STARTED or EMPTY, we are trying to
# remove a running backup. This operation must be forbidden.
# Otherwise, normally delete the backup.
first_backup_id = self.get_first_backup_id(BackupInfo.STATUS_ALL)
if backup.backup_id == first_backup_id \
and backup.status in (BackupInfo.STARTED,
BackupInfo.EMPTY):
output.error("Cannot delete a running backup (%s %s)"
% (self.config.name, backup.backup_id))
return
except LockFilePermissionDenied as e:
# We cannot access the lockfile.
# Exit without removing the backup.
output.error("Permission denied, unable to access '%s'" % e)
return
try:
# Take care of the backup lock.
# Only one process can modify a backup at a time
lock = ServerBackupIdLock(self.config.barman_lock_directory,
self.config.name,
backup.backup_id)
with lock:
deleted = self.backup_manager.delete_backup(backup)
# At this point no-one should try locking a backup that
# doesn't exists, so we can remove the lock
# WARNING: the previous statement is true only as long as
# no-one wait on this lock
if deleted:
os.remove(lock.filename)
return deleted
except LockFileBusy:
# If another process is holding the backup lock,
# warn the user and terminate
output.error(
"Another process is holding the lock for "
"backup %s of server %s." % (
backup.backup_id, self.config.name))
return
except LockFilePermissionDenied as e:
# We cannot access the lockfile.
# warn the user and terminate
output.error("Permission denied, unable to access '%s'" % e)
return
def backup(self, wait=False, wait_timeout=None):
"""
Performs a backup for the server
:param bool wait: wait for all the required WAL files to be archived
:param int|None wait_timeout: the time, in seconds, the backup
will wait for the required WAL files to be archived
before timing out
"""
# The 'backup' command is not available on a passive node.
# We assume that if we get here the node is not passive
assert not self.passive_node
try:
# Default strategy for check in backup is CheckStrategy
# This strategy does not print any output - it only logs checks
strategy = CheckStrategy()
self.check(strategy)
if strategy.has_error:
output.error("Impossible to start the backup. Check the log "
"for more details, or run 'barman check %s'"
% self.config.name)
return
# check required backup directories exist
self._make_directories()
except OSError as e:
output.error('failed to create %s directory: %s',
e.filename, e.strerror)
return
# Save the database identity
self.write_identity_file()
# Make sure we are not wasting an precious streaming PostgreSQL
# connection that may have been opened by the self.check() call
if self.streaming:
self.streaming.close()
try:
# lock acquisition and backup execution
with ServerBackupLock(self.config.barman_lock_directory,
self.config.name):
backup_info = self.backup_manager.backup(
wait=wait, wait_timeout=wait_timeout)
# Archive incoming WALs and update WAL catalogue
self.archive_wal(verbose=False)
# Invoke sanity check of the backup
if backup_info.status == BackupInfo.WAITING_FOR_WALS:
self.check_backup(backup_info)
# At this point is safe to remove any remaining WAL file before the
# first backup
previous_backup = self.get_previous_backup(backup_info.backup_id)
if not previous_backup:
self.backup_manager.remove_wal_before_backup(backup_info)
if backup_info.status == BackupInfo.WAITING_FOR_WALS:
output.warning(
"IMPORTANT: this backup is classified as "
"WAITING_FOR_WALS, meaning that Barman has not received "
"yet all the required WAL files for the backup "
"consistency.\n"
"This is a common behaviour in concurrent backup "
"scenarios, and Barman automatically set the backup as "
"DONE once all the required WAL files have been "
"archived.\n"
"Hint: execute the backup command with '--wait'")
except LockFileBusy:
output.error("Another backup process is running")
except LockFilePermissionDenied as e:
output.error("Permission denied, unable to access '%s'" % e)
def get_available_backups(
self, status_filter=BackupManager.DEFAULT_STATUS_FILTER):
"""
Get a list of available backups
param: status_filter: the status of backups to return,
default to BackupManager.DEFAULT_STATUS_FILTER
"""
return self.backup_manager.get_available_backups(status_filter)
def get_last_backup_id(
self, status_filter=BackupManager.DEFAULT_STATUS_FILTER):
"""
Get the id of the latest/last backup in the catalog (if exists)
:param status_filter: The status of the backup to return,
default to DEFAULT_STATUS_FILTER.
:return string|None: ID of the backup
"""
return self.backup_manager.get_last_backup_id(status_filter)
def get_first_backup_id(
self, status_filter=BackupManager.DEFAULT_STATUS_FILTER):
"""
Get the id of the oldest/first backup in the catalog (if exists)
:param status_filter: The status of the backup to return,
default to DEFAULT_STATUS_FILTER.
:return string|None: ID of the backup
"""
return self.backup_manager.get_first_backup_id(status_filter)
def list_backups(self):
"""
Lists all the available backups for the server
"""
retention_status = self.report_backups()
backups = self.get_available_backups(BackupInfo.STATUS_ALL)
for key in sorted(backups.keys(), reverse=True):
backup = backups[key]
backup_size = backup.size or 0
wal_size = 0
rstatus = None
if backup.status in BackupInfo.STATUS_COPY_DONE:
try:
wal_info = self.get_wal_info(backup)
backup_size += wal_info['wal_size']
wal_size = wal_info['wal_until_next_size']
except BadXlogSegmentName as e:
output.error(
"invalid WAL segment name %r\n"
"HINT: Please run \"barman rebuild-xlogdb %s\" "
"to solve this issue",
force_str(e), self.config.name)
if self.enforce_retention_policies and \
retention_status[backup.backup_id] != BackupInfo.VALID:
rstatus = retention_status[backup.backup_id]
output.result('list_backup', backup, backup_size, wal_size,
rstatus)
def get_backup(self, backup_id):
"""
Return the backup information for the given backup id.
If the backup_id is None or backup.info file doesn't exists,
it returns None.
:param str|None backup_id: the ID of the backup to return
:rtype: barman.infofile.LocalBackupInfo|None
"""
return self.backup_manager.get_backup(backup_id)
def get_previous_backup(self, backup_id):
"""
Get the previous backup (if any) from the catalog
:param backup_id: the backup id from which return the previous
"""
return self.backup_manager.get_previous_backup(backup_id)
def get_next_backup(self, backup_id):
"""
Get the next backup (if any) from the catalog
:param backup_id: the backup id from which return the next
"""
return self.backup_manager.get_next_backup(backup_id)
def get_required_xlog_files(self, backup, target_tli=None,
target_time=None, target_xid=None):
"""
Get the xlog files required for a recovery
"""
begin = backup.begin_wal
end = backup.end_wal
# If timeline isn't specified, assume it is the same timeline
# of the backup
if not target_tli:
target_tli, _, _ = xlog.decode_segment_name(end)
with self.xlogdb() as fxlogdb:
for line in fxlogdb:
wal_info = WalFileInfo.from_xlogdb_line(line)
# Handle .history files: add all of them to the output,
# regardless of their age
if xlog.is_history_file(wal_info.name):
yield wal_info
continue
if wal_info.name < begin:
continue
tli, _, _ = xlog.decode_segment_name(wal_info.name)
if tli > target_tli:
continue
yield wal_info
if wal_info.name > end:
end = wal_info.name
if target_time and target_time < wal_info.time:
break
# return all the remaining history files
for line in fxlogdb:
wal_info = WalFileInfo.from_xlogdb_line(line)
if xlog.is_history_file(wal_info.name):
yield wal_info
# TODO: merge with the previous
def get_wal_until_next_backup(self, backup, include_history=False):
"""
Get the xlog files between backup and the next
:param BackupInfo backup: a backup object, the starting point
to retrieve WALs
:param bool include_history: option for the inclusion of
include_history files into the output
"""
begin = backup.begin_wal
next_end = None
if self.get_next_backup(backup.backup_id):
next_end = self.get_next_backup(backup.backup_id).end_wal
backup_tli, _, _ = xlog.decode_segment_name(begin)
with self.xlogdb() as fxlogdb:
for line in fxlogdb:
wal_info = WalFileInfo.from_xlogdb_line(line)
# Handle .history files: add all of them to the output,
# regardless of their age, if requested (the 'include_history'
# parameter is True)
if xlog.is_history_file(wal_info.name):
if include_history:
yield wal_info
continue
if wal_info.name < begin:
continue
tli, _, _ = xlog.decode_segment_name(wal_info.name)
if tli > backup_tli:
continue
if not xlog.is_wal_file(wal_info.name):
continue
if next_end and wal_info.name > next_end:
break
yield wal_info
def get_wal_full_path(self, wal_name):
"""
Build the full path of a WAL for a server given the name
:param wal_name: WAL file name
"""
# Build the path which contains the file
hash_dir = os.path.join(self.config.wals_directory,
xlog.hash_dir(wal_name))
# Build the WAL file full path
full_path = os.path.join(hash_dir, wal_name)
return full_path
def get_wal_possible_paths(self, wal_name, partial=False):
"""
Build a list of possible positions of a WAL file
:param str wal_name: WAL file name
:param bool partial: add also the '.partial' paths
"""
paths = list()
# Path in the archive
hash_dir = os.path.join(self.config.wals_directory,
xlog.hash_dir(wal_name))
full_path = os.path.join(hash_dir, wal_name)
paths.append(full_path)
# Path in incoming directory
incoming_path = os.path.join(self.config.incoming_wals_directory,
wal_name)
paths.append(incoming_path)
# Path in streaming directory
streaming_path = os.path.join(self.config.streaming_wals_directory,
wal_name)
paths.append(streaming_path)
# If partial files are required check also the '.partial' path
if partial:
paths.append(streaming_path + PARTIAL_EXTENSION)
# Add the streaming_path again to handle races with pg_receivewal
# completing the WAL file
paths.append(streaming_path)
# The following two path are only useful to retrieve the last
# incomplete segment archived before a promotion.
paths.append(full_path + PARTIAL_EXTENSION)
paths.append(incoming_path + PARTIAL_EXTENSION)
# Append the archive path again, to handle races with the archiver
paths.append(full_path)
return paths
def get_wal_info(self, backup_info):
"""
Returns information about WALs for the given backup
:param barman.infofile.LocalBackupInfo backup_info: the target backup
"""
begin = backup_info.begin_wal
end = backup_info.end_wal
# counters
wal_info = dict.fromkeys(
('wal_num', 'wal_size',
'wal_until_next_num', 'wal_until_next_size',
'wal_until_next_compression_ratio',
'wal_compression_ratio'), 0)
# First WAL (always equal to begin_wal) and Last WAL names and ts
wal_info['wal_first'] = None
wal_info['wal_first_timestamp'] = None
wal_info['wal_last'] = None
wal_info['wal_last_timestamp'] = None
# WAL rate (default 0.0 per second)
wal_info['wals_per_second'] = 0.0
for item in self.get_wal_until_next_backup(backup_info):
if item.name == begin:
wal_info['wal_first'] = item.name
wal_info['wal_first_timestamp'] = item.time
if item.name <= end:
wal_info['wal_num'] += 1
wal_info['wal_size'] += item.size
else:
wal_info['wal_until_next_num'] += 1
wal_info['wal_until_next_size'] += item.size
wal_info['wal_last'] = item.name
wal_info['wal_last_timestamp'] = item.time
# Calculate statistics only for complete backups
# If the cron is not running for any reason, the required
# WAL files could be missing
if wal_info['wal_first'] and wal_info['wal_last']:
# Estimate WAL ratio
# Calculate the difference between the timestamps of
# the first WAL (begin of backup) and the last WAL
# associated to the current backup
wal_last_timestamp = wal_info['wal_last_timestamp']
wal_first_timestamp = wal_info['wal_first_timestamp']
wal_info['wal_total_seconds'] = (
wal_last_timestamp - wal_first_timestamp)
if wal_info['wal_total_seconds'] > 0:
wal_num = wal_info['wal_num']
wal_until_next_num = wal_info['wal_until_next_num']
wal_total_seconds = wal_info['wal_total_seconds']
wal_info['wals_per_second'] = (
float(wal_num + wal_until_next_num) / wal_total_seconds)
# evaluation of compression ratio for basebackup WAL files
wal_info['wal_theoretical_size'] = \
wal_info['wal_num'] * float(backup_info.xlog_segment_size)
try:
wal_size = wal_info['wal_size']
wal_info['wal_compression_ratio'] = (
1 - (wal_size / wal_info['wal_theoretical_size']))
except ZeroDivisionError:
wal_info['wal_compression_ratio'] = 0.0
# evaluation of compression ratio of WAL files
wal_until_next_num = wal_info['wal_until_next_num']
wal_info['wal_until_next_theoretical_size'] = (
wal_until_next_num * float(backup_info.xlog_segment_size))
try:
wal_until_next_size = wal_info['wal_until_next_size']
until_next_theoretical_size = (
wal_info['wal_until_next_theoretical_size'])
wal_info['wal_until_next_compression_ratio'] = (
1 - (wal_until_next_size / until_next_theoretical_size))
except ZeroDivisionError:
wal_info['wal_until_next_compression_ratio'] = 0.0
return wal_info
def recover(self, backup_info, dest, tablespaces=None, remote_command=None,
**kwargs):
"""
Performs a recovery of a backup
:param barman.infofile.LocalBackupInfo backup_info: the backup
to recover
:param str dest: the destination directory
:param dict[str,str]|None tablespaces: a tablespace
name -> location map (for relocation)
:param str|None remote_command: default None. The remote command to
recover the base backup, in case of remote backup.
:kwparam str|None target_tli: the target timeline
:kwparam str|None target_time: the target time
:kwparam str|None target_xid: the target xid
:kwparam str|None target_lsn: the target LSN
:kwparam str|None target_name: the target name created previously with
pg_create_restore_point() function call
:kwparam bool|None target_immediate: end recovery as soon as
consistency is reached
:kwparam bool exclusive: whether the recovery is exclusive or not
:kwparam str|None target_action: the recovery target action
:kwparam bool|None standby_mode: the standby mode
"""
return self.backup_manager.recover(
backup_info, dest, tablespaces, remote_command, **kwargs)
def get_wal(self, wal_name, compression=None, output_directory=None,
peek=None, partial=False):
"""
Retrieve a WAL file from the archive
:param str wal_name: id of the WAL file to find into the WAL archive
:param str|None compression: compression format for the output
:param str|None output_directory: directory where to deposit the
WAL file
:param int|None peek: if defined list the next N WAL file
:param bool partial: retrieve also partial WAL files
"""
# If used through SSH identify the client to add it to logs
source_suffix = ''
ssh_connection = os.environ.get('SSH_CONNECTION')
if ssh_connection:
# The client IP is the first value contained in `SSH_CONNECTION`
# which contains four space-separated values: client IP address,
# client port number, server IP address, and server port number.
source_suffix = ' (SSH host: %s)' % (ssh_connection.split()[0],)
# Sanity check
if not xlog.is_any_xlog_file(wal_name):
output.error("'%s' is not a valid wal file name%s",
wal_name, source_suffix)
return
# If peek is requested we only output a list of files
if peek:
# Get the next ``peek`` files following the provided ``wal_name``.
# If ``wal_name`` is not a simple wal file,
# we cannot guess the names of the following WAL files.
# So ``wal_name`` is the only possible result, if exists.
if xlog.is_wal_file(wal_name):
# We can't know what was the segment size of PostgreSQL WAL
# files at backup time. Because of this, we generate all
# the possible names for a WAL segment, and then we check
# if the requested one is included.
wal_peek_list = xlog.generate_segment_names(wal_name)
else:
wal_peek_list = iter([wal_name])
# Output the content of wal_peek_list until we have displayed
# enough files or find a missing file
count = 0
while count < peek:
try:
wal_peek_name = next(wal_peek_list)
except StopIteration:
# No more item in wal_peek_list
break
# Get list of possible location. We do not prefetch
# partial files
wal_peek_paths = self.get_wal_possible_paths(wal_peek_name,
partial=False)
# If the next WAL file is found, output the name
# and continue to the next one
if any(os.path.exists(path) for path in wal_peek_paths):
count += 1
output.info(wal_peek_name, log=False)
continue
# If ``wal_peek_file`` doesn't exist, check if we need to
# look in the following segment
tli, log, seg = xlog.decode_segment_name(wal_peek_name)
# If `seg` is not a power of two, it is not possible that we
# are at the end of a WAL group, so we are done
if not is_power_of_two(seg):
break
# This is a possible WAL group boundary, let's try the
# following group
seg = 0
log += 1
# Install a new generator from the start of the next segment.
# If the file doesn't exists we will terminate because
# zero is not a power of two
wal_peek_name = xlog.encode_segment_name(tli, log, seg)
wal_peek_list = xlog.generate_segment_names(wal_peek_name)
# Do not output anything else
return
# If an output directory was provided write the file inside it
# otherwise we use standard output
if output_directory is not None:
destination_path = os.path.join(output_directory, wal_name)
destination_description = "into '%s' file" % destination_path
# Use the standard output for messages
logger = output
try:
destination = open(destination_path, 'wb')
except IOError as e:
output.error("Unable to open '%s' file%s: %s",
destination_path, source_suffix, e)
return
else:
destination_description = 'to standard output'
# Do not use the standard output for messages, otherwise we would
# taint the output stream
logger = _logger
try:
# Python 3.x
destination = sys.stdout.buffer
except AttributeError:
# Python 2.x
destination = sys.stdout
# Get the list of WAL file possible paths
wal_paths = self.get_wal_possible_paths(wal_name, partial)
for wal_file in wal_paths:
# Check for file existence
if not os.path.exists(wal_file):
continue
logger.info(
"Sending WAL '%s' for server '%s' %s%s",
os.path.basename(wal_file), self.config.name,
destination_description, source_suffix)
try:
# Try returning the wal_file to the client
self.get_wal_sendfile(wal_file, compression, destination)
# We are done, return to the caller
return
except CommandFailedException:
# If an external command fails we cannot really know why,
# but if the WAL file disappeared, we assume
# it has been moved in the archive so we ignore the error.
# This file will be retrieved later, as the last entry
# returned by get_wal_possible_paths() is the archive position
if not os.path.exists(wal_file):
pass
else:
raise
except OSError as exc:
# If the WAL file disappeared just ignore the error
# This file will be retrieved later, as the last entry
# returned by get_wal_possible_paths() is the archive
# position
if exc.errno == errno.ENOENT and exc.filename == wal_file:
pass
else:
raise
logger.info("Skipping vanished WAL file '%s'%s",
wal_file, source_suffix)
output.error("WAL file '%s' not found in server '%s'%s",
wal_name, self.config.name, source_suffix)
def get_wal_sendfile(self, wal_file, compression, destination):
"""
Send a WAL file to the destination file, using the required compression
:param str wal_file: WAL file path
:param str compression: required compression
:param destination: file stream to use to write the data
"""
# Identify the wal file
wal_info = self.backup_manager.compression_manager \
.get_wal_file_info(wal_file)
# Get a decompressor for the file (None if not compressed)
wal_compressor = self.backup_manager.compression_manager \
.get_compressor(wal_info.compression)
# Get a compressor for the output (None if not compressed)
out_compressor = self.backup_manager.compression_manager \
.get_compressor(compression)
# Initially our source is the stored WAL file and we do not have
# any temporary file
source_file = wal_file
uncompressed_file = None
compressed_file = None
# If the required compression is different from the source we
# decompress/compress it into the required format (getattr is
# used here to gracefully handle None objects)
if getattr(wal_compressor, 'compression', None) != \
getattr(out_compressor, 'compression', None):
# If source is compressed, decompress it into a temporary file
if wal_compressor is not None:
uncompressed_file = NamedTemporaryFile(
dir=self.config.wals_directory,
prefix='.%s.' % os.path.basename(wal_file),
suffix='.uncompressed')
# decompress wal file
wal_compressor.decompress(source_file, uncompressed_file.name)
source_file = uncompressed_file.name
# If output compression is required compress the source
# into a temporary file
if out_compressor is not None:
compressed_file = NamedTemporaryFile(
dir=self.config.wals_directory,
prefix='.%s.' % os.path.basename(wal_file),
suffix='.compressed')
out_compressor.compress(source_file, compressed_file.name)
source_file = compressed_file.name
# Copy the prepared source file to destination
with open(source_file, 'rb') as input_file:
shutil.copyfileobj(input_file, destination)
# Remove temp files
if uncompressed_file is not None:
uncompressed_file.close()
if compressed_file is not None:
compressed_file.close()
def put_wal(self, fileobj):
"""
Receive a WAL file from SERVER_NAME and securely store it in the
incoming directory.
The file will be read from the fileobj passed as parameter.
"""
# If used through SSH identify the client to add it to logs
source_suffix = ''
ssh_connection = os.environ.get('SSH_CONNECTION')
if ssh_connection:
# The client IP is the first value contained in `SSH_CONNECTION`
# which contains four space-separated values: client IP address,
# client port number, server IP address, and server port number.
source_suffix = ' (SSH host: %s)' % (ssh_connection.split()[0],)
# Incoming directory is where the files will be extracted
dest_dir = self.config.incoming_wals_directory
# Ensure the presence of the destination directory
mkpath(dest_dir)
incoming_file = namedtuple('incoming_file', [
'name',
'tmp_path',
'path',
'checksum',
])
# Stream read tar from stdin, store content in incoming directory
# The closing wrapper is needed only for Python 2.6
extracted_files = {}
validated_files = {}
md5sums = {}
try:
with closing(tarfile.open(mode='r|', fileobj=fileobj)) as tar:
for item in tar:
name = item.name
# Strip leading './' - tar has been manually created
if name.startswith('./'):
name = name[2:]
# Requires a regular file as tar item
if not item.isreg():
output.error(
"Unsupported file type '%s' for file '%s' "
"in put-wal for server '%s'%s",
item.type, name, self.config.name, source_suffix)
return
# Subdirectories are not supported
if '/' in name:
output.error(
"Unsupported filename '%s' "
"in put-wal for server '%s'%s",
name, self.config.name, source_suffix)
return
# Checksum file
if name == 'MD5SUMS':
# Parse content and store it in md5sums dictionary
for line in tar.extractfile(item).readlines():
line = line.decode().rstrip()
try:
# Split checksums and path info
checksum, path = re.split(
r' [* ]', line, 1)
except ValueError:
output.warning(
"Bad checksum line '%s' found "
"in put-wal for server '%s'%s",
line, self.config.name, source_suffix)
continue
# Strip leading './' from path in the checksum file
if path.startswith('./'):
path = path[2:]
md5sums[path] = checksum
else:
# Extract using a temp name (with PID)
tmp_path = os.path.join(dest_dir, '.%s-%s' % (
os.getpid(), name))
path = os.path.join(dest_dir, name)
tar.makefile(item, tmp_path)
# Set the original timestamp
tar.utime(item, tmp_path)
# Add the tuple to the dictionary of extracted files
extracted_files[name] = incoming_file(
name, tmp_path, path, file_md5(tmp_path))
validated_files[name] = False
# For each received checksum verify the corresponding file
for name in md5sums:
# Check that file is present in the tar archive
if name not in extracted_files:
output.error(
"Checksum without corresponding file '%s' "
"in put-wal for server '%s'%s",
name, self.config.name, source_suffix)
return
# Verify the checksum of the file
if extracted_files[name].checksum != md5sums[name]:
output.error(
"Bad file checksum '%s' (should be %s) "
"for file '%s' "
"in put-wal for server '%s'%s",
extracted_files[name].checksum, md5sums[name],
name, self.config.name, source_suffix)
return
_logger.info(
"Received file '%s' with checksum '%s' "
"by put-wal for server '%s'%s",
name, md5sums[name], self.config.name,
source_suffix)
validated_files[name] = True
# Put the files in the final place, atomically and fsync all
for item in extracted_files.values():
# Final verification of checksum presence for each file
if not validated_files[item.name]:
output.error(
"Missing checksum for file '%s' "
"in put-wal for server '%s'%s",
item.name, self.config.name, source_suffix)
return
# If a file with the same name exists, returns an error.
# PostgreSQL archive command will retry again later and,
# at that time, Barman's WAL archiver should have already
# managed this file.
if os.path.exists(item.path):
output.error(
"Impossible to write already existing file '%s' "
"in put-wal for server '%s'%s",
item.name, self.config.name, source_suffix)
return
os.rename(item.tmp_path, item.path)
fsync_file(item.path)
fsync_dir(dest_dir)
finally:
# Cleanup of any remaining temp files (where applicable)
for item in extracted_files.values():
if os.path.exists(item.tmp_path):
os.unlink(item.tmp_path)
def cron(self, wals=True, retention_policies=True, keep_descriptors=False):
"""
Maintenance operations
:param bool wals: WAL archive maintenance
:param bool retention_policies: retention policy maintenance
:param bool keep_descriptors: whether to keep subprocess descriptors,
defaults to False
"""
try:
# Actually this is the highest level of locking in the cron,
# this stops the execution of multiple cron on the same server
with ServerCronLock(self.config.barman_lock_directory,
self.config.name):
# When passive call sync.cron() and never run
# local WAL archival
if self.passive_node:
self.sync_cron(keep_descriptors)
# WAL management and maintenance
elif wals:
# Execute the archive-wal sub-process
self.cron_archive_wal(keep_descriptors)
if self.config.streaming_archiver:
# Spawn the receive-wal sub-process
self.cron_receive_wal(keep_descriptors)
else:
# Terminate the receive-wal sub-process if present
self.kill('receive-wal', fail_if_not_present=False)
# Verify backup
self.cron_check_backup(keep_descriptors)
# Retention policies execution
if retention_policies:
self.backup_manager.cron_retention_policy()
except LockFileBusy:
output.info(
"Another cron process is already running on server %s. "
"Skipping to the next server" % self.config.name)
except LockFilePermissionDenied as e:
output.error("Permission denied, unable to access '%s'" % e)
except (OSError, IOError) as e:
output.error("%s", e)
def cron_archive_wal(self, keep_descriptors):
"""
Method that handles the start of an 'archive-wal' sub-process.
This method must be run protected by ServerCronLock
:param bool keep_descriptors: whether to keep subprocess descriptors
attached to this process.
"""
try:
# Try to acquire ServerWalArchiveLock, if the lock is available,
# no other 'archive-wal' processes are running on this server.
#
# There is a very little race condition window here because
# even if we are protected by ServerCronLock, the user could run
# another 'archive-wal' command manually. However, it would result
# in one of the two commands failing on lock acquisition,
# with no other consequence.
with ServerWalArchiveLock(
self.config.barman_lock_directory,
self.config.name):
# Output and release the lock immediately
output.info("Starting WAL archiving for server %s",
self.config.name, log=False)
# Init a Barman sub-process object
archive_process = BarmanSubProcess(
subcommand='archive-wal',
config=barman.__config__.config_file,
args=[self.config.name],
keep_descriptors=keep_descriptors)
# Launch the sub-process
archive_process.execute()
except LockFileBusy:
# Another archive process is running for the server,
# warn the user and skip to the next one.
output.info(
"Another archive-wal process is already running "
"on server %s. Skipping to the next server"
% self.config.name)
def cron_receive_wal(self, keep_descriptors):
"""
Method that handles the start of a 'receive-wal' sub process
This method must be run protected by ServerCronLock
:param bool keep_descriptors: whether to keep subprocess
descriptors attached to this process.
"""
try:
# Try to acquire ServerWalReceiveLock, if the lock is available,
# no other 'receive-wal' processes are running on this server.
#
# There is a very little race condition window here because
# even if we are protected by ServerCronLock, the user could run
# another 'receive-wal' command manually. However, it would result
# in one of the two commands failing on lock acquisition,
# with no other consequence.
with ServerWalReceiveLock(
self.config.barman_lock_directory,
self.config.name):
# Output and release the lock immediately
output.info("Starting streaming archiver "
"for server %s",
self.config.name, log=False)
# Start a new receive-wal process
receive_process = BarmanSubProcess(
subcommand='receive-wal',
config=barman.__config__.config_file,
args=[self.config.name],
keep_descriptors=keep_descriptors)
# Launch the sub-process
receive_process.execute()
except LockFileBusy:
# Another receive-wal process is running for the server
# exit without message
_logger.debug("Another STREAMING ARCHIVER process is running for "
"server %s" % self.config.name)
def cron_check_backup(self, keep_descriptors):
"""
Method that handles the start of a 'check-backup' sub process
:param bool keep_descriptors: whether to keep subprocess
descriptors attached to this process.
"""
backup_id = self.get_first_backup_id([BackupInfo.WAITING_FOR_WALS])
if not backup_id:
# Nothing to be done for this server
return
try:
# Try to acquire ServerBackupIdLock, if the lock is available,
# no other 'check-backup' processes are running on this backup.
#
# There is a very little race condition window here because
# even if we are protected by ServerCronLock, the user could run
# another command that takes the lock. However, it would result
# in one of the two commands failing on lock acquisition,
# with no other consequence.
with ServerBackupIdLock(
self.config.barman_lock_directory,
self.config.name,
backup_id):
# Output and release the lock immediately
output.info("Starting check-backup for backup %s of server %s",
backup_id, self.config.name, log=False)
# Start a check-backup process
check_process = BarmanSubProcess(
subcommand='check-backup',
config=barman.__config__.config_file,
args=[self.config.name, backup_id],
keep_descriptors=keep_descriptors)
check_process.execute()
except LockFileBusy:
# Another process is holding the backup lock
_logger.debug("Another process is holding the backup lock for %s "
"of server %s" % (backup_id, self.config.name))
def archive_wal(self, verbose=True):
"""
Perform the WAL archiving operations.
Usually run as subprocess of the barman cron command,
but can be executed manually using the barman archive-wal command
:param bool verbose: if false outputs something only if there is
at least one file
"""
output.debug("Starting archive-wal for server %s", self.config.name)
try:
# Take care of the archive lock.
# Only one archive job per server is admitted
with ServerWalArchiveLock(self.config.barman_lock_directory,
self.config.name):
self.backup_manager.archive_wal(verbose)
except LockFileBusy:
# If another process is running for this server,
# warn the user and skip to the next server
output.info("Another archive-wal process is already running "
"on server %s. Skipping to the next server"
% self.config.name)
def create_physical_repslot(self):
"""
Create a physical replication slot using the streaming connection
"""
if not self.streaming:
output.error("Unable to create a physical replication slot: "
"streaming connection not configured")
return
# Replication slots are not supported by PostgreSQL < 9.4
try:
if self.streaming.server_version < 90400:
output.error("Unable to create a physical replication slot: "
"not supported by '%s' "
"(9.4 or higher is required)" %
self.streaming.server_major_version)
return
except PostgresException as exc:
msg = "Cannot connect to server '%s'" % self.config.name
output.error(msg, log=False)
_logger.error("%s: %s", msg, force_str(exc).strip())
return
if not self.config.slot_name:
output.error("Unable to create a physical replication slot: "
"slot_name configuration option required")
return
output.info(
"Creating physical replication slot '%s' on server '%s'",
self.config.slot_name,
self.config.name)
try:
self.streaming.create_physical_repslot(self.config.slot_name)
output.info("Replication slot '%s' created", self.config.slot_name)
except PostgresDuplicateReplicationSlot:
output.error("Replication slot '%s' already exists",
self.config.slot_name)
except PostgresReplicationSlotsFull:
output.error("All replication slots for server '%s' are in use\n"
"Free one or increase the max_replication_slots "
"value on your PostgreSQL server.",
self.config.name)
except PostgresException as exc:
output.error(
"Cannot create replication slot '%s' on server '%s': %s",
self.config.slot_name,
self.config.name,
force_str(exc).strip())
def drop_repslot(self):
"""
Drop a replication slot using the streaming connection
"""
if not self.streaming:
output.error("Unable to drop a physical replication slot: "
"streaming connection not configured")
return
# Replication slots are not supported by PostgreSQL < 9.4
try:
if self.streaming.server_version < 90400:
output.error("Unable to drop a physical replication slot: "
"not supported by '%s' (9.4 or higher is "
"required)" %
self.streaming.server_major_version)
return
except PostgresException as exc:
msg = "Cannot connect to server '%s'" % self.config.name
output.error(msg, log=False)
_logger.error("%s: %s", msg, force_str(exc).strip())
return
if not self.config.slot_name:
output.error("Unable to drop a physical replication slot: "
"slot_name configuration option required")
return
output.info(
"Dropping physical replication slot '%s' on server '%s'",
self.config.slot_name,
self.config.name)
try:
self.streaming.drop_repslot(self.config.slot_name)
output.info("Replication slot '%s' dropped", self.config.slot_name)
except PostgresInvalidReplicationSlot:
output.error("Replication slot '%s' does not exist",
self.config.slot_name)
except PostgresReplicationSlotInUse:
output.error(
"Cannot drop replication slot '%s' on server '%s' "
"because it is in use.",
self.config.slot_name,
self.config.name)
except PostgresException as exc:
output.error(
"Cannot drop replication slot '%s' on server '%s': %s",
self.config.slot_name,
self.config.name,
force_str(exc).strip())
def receive_wal(self, reset=False):
"""
Enable the reception of WAL files using streaming protocol.
Usually started by barman cron command.
Executing this manually, the barman process will not terminate but
will continuously receive WAL files from the PostgreSQL server.
:param reset: When set, resets the status of receive-wal
"""
# Execute the receive-wal command only if streaming_archiver
# is enabled
if not self.config.streaming_archiver:
output.error("Unable to start receive-wal process: "
"streaming_archiver option set to 'off' in "
"barman configuration file")
return
if not reset:
output.info("Starting receive-wal for server %s", self.config.name)
try:
# Take care of the receive-wal lock.
# Only one receiving process per server is permitted
with ServerWalReceiveLock(self.config.barman_lock_directory,
self.config.name):
try:
# Only the StreamingWalArchiver implementation
# does something.
# WARNING: This codes assumes that there is only one
# StreamingWalArchiver in the archivers list.
for archiver in self.archivers:
archiver.receive_wal(reset)
except ArchiverFailure as e:
output.error(e)
except LockFileBusy:
# If another process is running for this server,
if reset:
output.info("Unable to reset the status of receive-wal "
"for server %s. Process is still running"
% self.config.name)
else:
output.info("Another receive-wal process is already running "
"for server %s." % self.config.name)
@property
def systemid(self):
"""
Get the system identifier, as returned by the PostgreSQL server
:return str: the system identifier
"""
status = self.get_remote_status()
# Main PostgreSQL connection has higher priority
if status.get('postgres_systemid'):
return status.get('postgres_systemid')
# Fallback: streaming connection
return status.get('streaming_systemid')
@property
def xlogdb_file_name(self):
"""
The name of the file containing the XLOG_DB
:return str: the name of the file that contains the XLOG_DB
"""
return os.path.join(self.config.wals_directory, self.XLOG_DB)
@contextmanager
def xlogdb(self, mode='r'):
"""
Context manager to access the xlogdb file.
This method uses locking to make sure only one process is accessing
the database at a time. The database file will be created
if it not exists.
Usage example:
with server.xlogdb('w') as file:
file.write(new_line)
:param str mode: open the file with the required mode
(default read-only)
"""
if not os.path.exists(self.config.wals_directory):
os.makedirs(self.config.wals_directory)
xlogdb = self.xlogdb_file_name
with ServerXLOGDBLock(self.config.barman_lock_directory,
self.config.name):
# If the file doesn't exist and it is required to read it,
# we open it in a+ mode, to be sure it will be created
if not os.path.exists(xlogdb) and mode.startswith('r'):
if '+' not in mode:
mode = "a%s+" % mode[1:]
else:
mode = "a%s" % mode[1:]
with open(xlogdb, mode) as f:
# execute the block nested in the with statement
try:
yield f
finally:
# we are exiting the context
# if file is writable (mode contains w, a or +)
# make sure the data is written to disk
# http://docs.python.org/2/library/os.html#os.fsync
if any((c in 'wa+') for c in f.mode):
f.flush()
os.fsync(f.fileno())
def report_backups(self):
if not self.enforce_retention_policies:
return dict()
else:
return self.config.retention_policy.report()
def rebuild_xlogdb(self):
"""
Rebuild the whole xlog database guessing it from the archive content.
"""
return self.backup_manager.rebuild_xlogdb()
def get_backup_ext_info(self, backup_info):
"""
Return a dictionary containing all available information about a backup
The result is equivalent to the sum of information from
* BackupInfo object
* the Server.get_wal_info() return value
* the context in the catalog (if available)
* the retention policy status
:param backup_info: the target backup
:rtype dict: all information about a backup
"""
backup_ext_info = backup_info.to_dict()
if backup_info.status in BackupInfo.STATUS_COPY_DONE:
try:
previous_backup = self.backup_manager.get_previous_backup(
backup_ext_info['backup_id'])
next_backup = self.backup_manager.get_next_backup(
backup_ext_info['backup_id'])
if previous_backup:
backup_ext_info[
'previous_backup_id'] = previous_backup.backup_id
else:
backup_ext_info['previous_backup_id'] = None
if next_backup:
backup_ext_info['next_backup_id'] = next_backup.backup_id
else:
backup_ext_info['next_backup_id'] = None
except UnknownBackupIdException:
# no next_backup_id and previous_backup_id items
# means "Not available"
pass
backup_ext_info.update(self.get_wal_info(backup_info))
if self.enforce_retention_policies:
policy = self.config.retention_policy
backup_ext_info['retention_policy_status'] = \
policy.backup_status(backup_info.backup_id)
else:
backup_ext_info['retention_policy_status'] = None
# Check any child timeline exists
children_timelines = self.get_children_timelines(
backup_ext_info['timeline'],
forked_after=backup_info.end_xlog)
backup_ext_info['children_timelines'] = \
children_timelines
return backup_ext_info
def show_backup(self, backup_info):
"""
Output all available information about a backup
:param backup_info: the target backup
"""
try:
backup_ext_info = self.get_backup_ext_info(backup_info)
output.result('show_backup', backup_ext_info)
except BadXlogSegmentName as e:
output.error(
"invalid xlog segment name %r\n"
"HINT: Please run \"barman rebuild-xlogdb %s\" "
"to solve this issue",
force_str(e), self.config.name)
output.close_and_exit()
@staticmethod
def _build_path(path_prefix=None):
"""
If a path_prefix is provided build a string suitable to be used in
PATH environment variable by joining the path_prefix with the
current content of PATH environment variable.
If the `path_prefix` is None returns None.
:rtype: str|None
"""
if not path_prefix:
return None
sys_path = os.environ.get('PATH')
return "%s%s%s" % (path_prefix, os.pathsep, sys_path)
def kill(self, task, fail_if_not_present=True):
"""
Given the name of a barman sub-task type,
attempts to stop all the processes
:param string task: The task we want to stop
:param bool fail_if_not_present: Display an error when the process
is not present (default: True)
"""
process_list = self.process_manager.list(task)
for process in process_list:
if self.process_manager.kill(process):
output.info('Stopped process %s(%s)',
process.task, process.pid)
return
else:
output.error('Cannot terminate process %s(%s)',
process.task, process.pid)
return
if fail_if_not_present:
output.error('Termination of %s failed: '
'no such process for server %s',
task, self.config.name)
def switch_wal(self, force=False, archive=None, archive_timeout=None):
"""
Execute the switch-wal command on the target server
"""
closed_wal = None
try:
if force:
# If called with force, execute a checkpoint before the
# switch_wal command
_logger.info('Force a CHECKPOINT before pg_switch_wal()')
self.postgres.checkpoint()
# Perform the switch_wal. expect a WAL name only if the switch
# has been successfully executed, False otherwise.
closed_wal = self.postgres.switch_wal()
if closed_wal is None:
# Something went wrong during the execution of the
# pg_switch_wal command
output.error("Unable to perform pg_switch_wal "
"for server '%s'." % self.config.name)
return
if closed_wal:
# The switch_wal command have been executed successfully
output.info(
"The WAL file %s has been closed on server '%s'" %
(closed_wal, self.config.name))
else:
# Is not necessary to perform a switch_wal
output.info("No switch required for server '%s'" %
self.config.name)
except PostgresIsInRecovery:
output.info("No switch performed because server '%s' "
"is a standby." % self.config.name)
except PostgresSuperuserRequired:
# Superuser rights are required to perform the switch_wal
output.error("Barman switch-wal requires superuser rights")
return
# If the user has asked to wait for a WAL file to be archived,
# wait until a new WAL file has been found
# or the timeout has expired
if archive:
self.wait_for_wal(closed_wal, archive_timeout)
def wait_for_wal(self, wal_file=None, archive_timeout=None):
"""
Wait for a WAL file to be archived on the server
:param str|None wal_file: Name of the WAL file, or None if we should
just wait for a new WAL file to be archived
:param int|None archive_timeout: Timeout in seconds
"""
max_msg = ""
if archive_timeout:
max_msg = " (max: %s seconds)" % archive_timeout
initial_wals = dict()
if not wal_file:
wals = self.backup_manager.get_latest_archived_wals_info()
initial_wals = dict([(tli, wals[tli].name) for tli in wals])
if wal_file:
output.info(
"Waiting for the WAL file %s from server '%s'%s",
wal_file, self.config.name, max_msg)
else:
output.info(
"Waiting for a WAL file from server '%s' to be archived%s",
self.config.name, max_msg)
# Wait for a new file until end_time or forever if no archive_timeout
end_time = None
if archive_timeout:
end_time = time.time() + archive_timeout
while not end_time or time.time() < end_time:
self.archive_wal(verbose=False)
# Finish if the closed wal file is in the archive.
if wal_file:
if os.path.exists(self.get_wal_full_path(wal_file)):
break
else:
# Check if any new file has been archived, on any timeline
wals = self.backup_manager.get_latest_archived_wals_info()
current_wals = dict([(tli, wals[tli].name) for tli in wals])
if current_wals != initial_wals:
break
# sleep a bit before retrying
time.sleep(.1)
else:
if wal_file:
output.error("The WAL file %s has not been received "
"in %s seconds",
wal_file, archive_timeout)
else:
output.info(
"A WAL file has not been received in %s seconds",
archive_timeout)
def replication_status(self, target='all'):
"""
Implements the 'replication-status' command.
"""
if target == 'hot-standby':
client_type = PostgreSQLConnection.STANDBY
elif target == 'wal-streamer':
client_type = PostgreSQLConnection.WALSTREAMER
else:
client_type = PostgreSQLConnection.ANY_STREAMING_CLIENT
try:
standby_info = self.postgres.get_replication_stats(client_type)
if standby_info is None:
output.error('Unable to connect to server %s' %
self.config.name)
else:
output.result('replication_status', self.config.name,
target, self.postgres.current_xlog_location,
standby_info)
except PostgresUnsupportedFeature as e:
output.info(" Requires PostgreSQL %s or higher", e)
except PostgresSuperuserRequired:
output.info(" Requires superuser rights")
def get_children_timelines(self, tli, forked_after=None):
"""
Get a list of the children of the passed timeline
:param int tli: Id of the timeline to check
:param str forked_after: XLog location after which the timeline
must have been created
:return List[xlog.HistoryFileData]: the list of timelines that
have the timeline with id 'tli' as parent
"""
comp_manager = self.backup_manager.compression_manager
if forked_after:
forked_after = xlog.parse_lsn(forked_after)
children = []
# Search all the history files after the passed timeline
children_tli = tli
while True:
children_tli += 1
history_path = os.path.join(self.config.wals_directory,
"%08X.history" % children_tli)
# If the file doesn't exists, stop searching
if not os.path.exists(history_path):
break
# Create the WalFileInfo object using the file
wal_info = comp_manager.get_wal_file_info(history_path)
# Get content of the file. We need to pass a compressor manager
# here to handle an eventual compression of the history file
history_info = xlog.decode_history_file(
wal_info,
self.backup_manager.compression_manager)
# Save the history only if is reachable from this timeline.
for tinfo in history_info:
# The history file contains the full genealogy
# but we keep only the line with `tli` timeline as parent.
if tinfo.parent_tli != tli:
continue
# We need to return this history info only if this timeline
# has been forked after the passed LSN
if forked_after and tinfo.switchpoint < forked_after:
continue
children.append(tinfo)
return children
def check_backup(self, backup_info):
"""
Make sure that we have all the WAL files required
by a physical backup for consistency (from the
first to the last WAL file)
:param backup_info: the target backup
"""
output.debug("Checking backup %s of server %s",
backup_info.backup_id, self.config.name)
try:
# No need to check a backup which is not waiting for WALs.
# Doing that we could also mark as DONE backups which
# were previously FAILED due to copy errors
if backup_info.status == BackupInfo.FAILED:
output.error(
"The validity of a failed backup cannot be checked")
return
# Take care of the backup lock.
# Only one process can modify a backup a a time
with ServerBackupIdLock(self.config.barman_lock_directory,
self.config.name,
backup_info.backup_id):
orig_status = backup_info.status
self.backup_manager.check_backup(backup_info)
if orig_status == backup_info.status:
output.debug(
"Check finished: the status of backup %s of server %s "
"remains %s",
backup_info.backup_id,
self.config.name,
backup_info.status)
else:
output.debug(
"Check finished: the status of backup %s of server %s "
"changed from %s to %s",
backup_info.backup_id,
self.config.name,
orig_status,
backup_info.status)
except LockFileBusy:
# If another process is holding the backup lock,
# notify the user and terminate.
# This is not an error condition because it happens when
# another process is validating the backup.
output.info(
"Another process is holding the lock for "
"backup %s of server %s." % (
backup_info.backup_id, self.config.name))
return
except LockFilePermissionDenied as e:
# We cannot access the lockfile.
# warn the user and terminate
output.error("Permission denied, unable to access '%s'" % e)
return
def sync_status(self, last_wal=None, last_position=None):
"""
Return server status for sync purposes.
The method outputs JSON, containing:
* list of backups (with DONE status)
* server configuration
* last read position (in xlog.db)
* last read wal
* list of archived wal files
If last_wal is provided, the method will discard all the wall files
older than last_wal.
If last_position is provided the method will try to read
the xlog.db file using last_position as starting point.
If the wal file at last_position does not match last_wal, read from the
start and use last_wal as limit
:param str|None last_wal: last read wal
:param int|None last_position: last read position (in xlog.db)
"""
sync_status = {}
wals = []
# Get all the backups using default filter for
# get_available_backups method
# (BackupInfo.DONE)
backups = self.get_available_backups()
# Retrieve the first wal associated to a backup, it will be useful
# to filter our eventual WAL too old to be useful
first_useful_wal = None
if backups:
first_useful_wal = backups[sorted(backups.keys())[0]].begin_wal
# Read xlogdb file.
with self.xlogdb() as fxlogdb:
starting_point = self.set_sync_starting_point(fxlogdb,
last_wal,
last_position)
check_first_wal = starting_point == 0 and last_wal is not None
# The wal_info and line variables are used after the loop.
# We initialize them here to avoid errors with an empty xlogdb.
line = None
wal_info = None
for line in fxlogdb:
# Parse the line
wal_info = WalFileInfo.from_xlogdb_line(line)
# Check if user is requesting data that is not available.
# TODO: probably the check should be something like
# TODO: last_wal + 1 < wal_info.name
if check_first_wal:
if last_wal < wal_info.name:
raise SyncError(
"last_wal '%s' is older than the first"
" available wal '%s'" % (last_wal, wal_info.name))
else:
check_first_wal = False
# If last_wal is provided, discard any line older than last_wal
if last_wal:
if wal_info.name <= last_wal:
continue
# Else don't return any WAL older than first available backup
elif first_useful_wal and wal_info.name < first_useful_wal:
continue
wals.append(wal_info)
if wal_info is not None:
# Check if user is requesting data that is not available.
if last_wal is not None and last_wal > wal_info.name:
raise SyncError(
"last_wal '%s' is newer than the last available wal "
" '%s'" % (last_wal, wal_info.name))
# Set last_position with the current position - len(last_line)
# (returning the beginning of the last line)
sync_status['last_position'] = fxlogdb.tell() - len(line)
# Set the name of the last wal of the file
sync_status['last_name'] = wal_info.name
else:
# we started over
sync_status['last_position'] = 0
sync_status['last_name'] = ''
sync_status['backups'] = backups
sync_status['wals'] = wals
sync_status['version'] = barman.__version__
sync_status['config'] = self.config
json.dump(sync_status, sys.stdout, cls=BarmanEncoder, indent=4)
def sync_cron(self, keep_descriptors):
"""
Manage synchronisation operations between passive node and
master node.
The method recover information from the remote master
server, evaluate if synchronisation with the master is required
and spawn barman sub processes, syncing backups and WAL files
:param bool keep_descriptors: whether to keep subprocess descriptors
attached to this process.
"""
# Recover information from primary node
sync_wal_info = self.load_sync_wals_info()
# Use last_wal and last_position for the remote call to the
# master server
try:
remote_info = self.primary_node_info(sync_wal_info.last_wal,
sync_wal_info.last_position)
except SyncError as exc:
output.error("Failed to retrieve the primary node status: %s"
% force_str(exc))
return
# Perform backup synchronisation
if remote_info['backups']:
# Get the list of backups that need to be synced
# with the local server
local_backup_list = self.get_available_backups()
# Subtract the list of the already
# synchronised backups from the remote backup lists,
# obtaining the list of backups still requiring synchronisation
sync_backup_list = set(remote_info['backups']) - set(
local_backup_list)
else:
# No backup to synchronisation required
output.info("No backup synchronisation required for server %s",
self.config.name, log=False)
sync_backup_list = []
for backup_id in sorted(sync_backup_list):
# Check if this backup_id needs to be synchronized by spawning a
# sync-backup process.
# The same set of checks will be executed by the spawned process.
# This "double check" is necessary because we don't want the cron
# to spawn unnecessary processes.
try:
local_backup_info = self.get_backup(backup_id)
self.check_sync_required(backup_id,
remote_info,
local_backup_info)
except SyncError as e:
# It means that neither the local backup
# nor the remote one exist.
# This should not happen here.
output.exception("Unexpected state: %s", e)
break
except SyncToBeDeleted:
# The backup does not exist on primary server
# and is FAILED here.
# It must be removed by the sync-backup process.
pass
except SyncNothingToDo:
# It could mean that the local backup is in DONE state or
# that it is obsolete according to
# the local retention policies.
# In both cases, continue with the next backup.
continue
# Now that we are sure that a backup-sync subprocess is necessary,
# we need to acquire the backup lock, to be sure that
# there aren't other processes synchronising the backup.
# If cannot acquire the lock, another synchronisation process
# is running, so we give up.
try:
with ServerBackupSyncLock(self.config.barman_lock_directory,
self.config.name, backup_id):
output.info("Starting copy of backup %s for server %s",
backup_id, self.config.name)
except LockFileBusy:
output.info("A synchronisation process for backup %s"
" on server %s is already in progress",
backup_id, self.config.name, log=False)
# Stop processing this server
break
# Init a Barman sub-process object
sub_process = BarmanSubProcess(
subcommand='sync-backup',
config=barman.__config__.config_file,
args=[self.config.name, backup_id],
keep_descriptors=keep_descriptors)
# Launch the sub-process
sub_process.execute()
# Stop processing this server
break
# Perform WAL synchronisation
if remote_info['wals']:
# We need to acquire a sync-wal lock, to be sure that
# there aren't other processes synchronising the WAL files.
# If cannot acquire the lock, another synchronisation process
# is running, so we give up.
try:
with ServerWalSyncLock(self.config.barman_lock_directory,
self.config.name,):
output.info("Started copy of WAL files for server %s",
self.config.name)
except LockFileBusy:
output.info("WAL synchronisation already running"
" for server %s", self.config.name, log=False)
return
# Init a Barman sub-process object
sub_process = BarmanSubProcess(
subcommand='sync-wals',
config=barman.__config__.config_file,
args=[self.config.name],
keep_descriptors=keep_descriptors)
# Launch the sub-process
sub_process.execute()
else:
# no WAL synchronisation is required
output.info("No WAL synchronisation required for server %s",
self.config.name, log=False)
def check_sync_required(self,
backup_name,
primary_info,
local_backup_info):
"""
Check if it is necessary to sync a backup.
If the backup is present on the Primary node:
* if it does not exist locally: continue (synchronise it)
* if it exists and is DONE locally: raise SyncNothingToDo
(nothing to do)
* if it exists and is FAILED locally: continue (try to recover it)
If the backup is not present on the Primary node:
* if it does not exist locally: raise SyncError (wrong call)
* if it exists and is DONE locally: raise SyncNothingToDo
(nothing to do)
* if it exists and is FAILED locally: raise SyncToBeDeleted (remove it)
If a backup needs to be synchronised but it is obsolete according
to local retention policies, raise SyncNothingToDo,
else return to the caller.
:param str backup_name: str name of the backup to sync
:param dict primary_info: dict containing the Primary node status
:param barman.infofile.BackupInfo local_backup_info: BackupInfo object
representing the current backup state
:raise SyncError: There is an error in the user request
:raise SyncNothingToDo: Nothing to do for this request
:raise SyncToBeDeleted: Backup is not recoverable and must be deleted
"""
backups = primary_info['backups']
# Backup not present on Primary node, and not present
# locally. Raise exception.
if backup_name not in backups \
and local_backup_info is None:
raise SyncError("Backup %s is absent on %s server" %
(backup_name, self.config.name))
# Backup not present on Primary node, but is
# present locally with status FAILED: backup incomplete.
# Remove the backup and warn the user
if backup_name not in backups \
and local_backup_info is not None \
and local_backup_info.status == BackupInfo.FAILED:
raise SyncToBeDeleted(
"Backup %s is absent on %s server and is incomplete locally" %
(backup_name, self.config.name))
# Backup not present on Primary node, but is
# present locally with status DONE. Sync complete, local only.
if backup_name not in backups \
and local_backup_info is not None \
and local_backup_info.status == BackupInfo.DONE:
raise SyncNothingToDo(
"Backup %s is absent on %s server, but present locally "
"(local copy only)" % (backup_name, self.config.name))
# Backup present on Primary node, and present locally
# with status DONE. Sync complete.
if backup_name in backups \
and local_backup_info is not None \
and local_backup_info.status == BackupInfo.DONE:
raise SyncNothingToDo("Backup %s is already synced with"
" %s server" % (backup_name,
self.config.name))
# Retention Policy: if the local server has a Retention policy,
# check that the remote backup is not obsolete.
enforce_retention_policies = self.enforce_retention_policies
retention_policy_mode = self.config.retention_policy_mode
if enforce_retention_policies and retention_policy_mode == 'auto':
# All the checks regarding retention policies are in
# this boolean method.
if self.is_backup_locally_obsolete(backup_name, backups):
# The remote backup is obsolete according to
# local retention policies.
# Nothing to do.
raise SyncNothingToDo("Remote backup %s/%s is obsolete for "
"local retention policies." %
(primary_info['config']['name'],
backup_name))
def load_sync_wals_info(self):
"""
Load the content of SYNC_WALS_INFO_FILE for the given server
:return collections.namedtuple: last read wal and position information
"""
sync_wals_info_file = os.path.join(self.config.wals_directory,
SYNC_WALS_INFO_FILE)
if not os.path.exists(sync_wals_info_file):
return SyncWalInfo(None, None)
try:
with open(sync_wals_info_file) as f:
return SyncWalInfo._make(f.readline().split('\t'))
except (OSError, IOError) as e:
raise SyncError("Cannot open %s file for server %s: %s" % (
SYNC_WALS_INFO_FILE, self.config.name, e))
def primary_node_info(self, last_wal=None, last_position=None):
"""
Invoke sync-info directly on the specified primary node
The method issues a call to the sync-info method on the primary
node through an SSH connection
:param barman.server.Server self: the Server object
:param str|None last_wal: last read wal
:param int|None last_position: last read position (in xlog.db)
:raise SyncError: if the ssh command fails
"""
# First we need to check if the server is in passive mode
_logger.debug("primary sync-info(%s, %s, %s)",
self.config.name,
last_wal,
last_position)
if not self.passive_node:
raise SyncError("server %s is not passive" % self.config.name)
# Issue a call to 'barman sync-info' to the primary node,
# using primary_ssh_command option to establish an
# SSH connection.
remote_command = Command(cmd=self.config.primary_ssh_command,
shell=True, check=True, path=self.path)
# We run it in a loop to retry when the master issues error.
while True:
try:
# Build the command string
cmd_str = "barman sync-info %s " % self.config.name
# If necessary we add last_wal and last_position
# to the command string
if last_wal is not None:
cmd_str += "%s " % last_wal
if last_position is not None:
cmd_str += "%s " % last_position
# Then issue the command
remote_command(cmd_str)
# All good, exit the retry loop with 'break'
break
except CommandFailedException as exc:
# In case we requested synchronisation with a last WAL info,
# we try again requesting the full current status, but only if
# exit code is 1. A different exit code means that
# the error is not from Barman (i.e. ssh failure)
if exc.args[0]['ret'] == 1 and last_wal is not None:
last_wal = None
last_position = None
output.warning(
"sync-info is out of sync. "
"Self-recovery procedure started: "
"requesting full synchronisation from "
"primary server %s" % self.config.name)
continue
# Wrap the CommandFailed exception with a SyncError
# for custom message and logging.
raise SyncError("sync-info execution on remote "
"primary server %s failed: %s" %
(self.config.name, exc.args[0]['err']))
# Save the result on disk
primary_info_file = os.path.join(self.config.backup_directory,
PRIMARY_INFO_FILE)
# parse the json output
remote_info = json.loads(remote_command.out)
try:
# TODO: rename the method to make it public
# noinspection PyProtectedMember
self._make_directories()
# Save remote info to disk
# We do not use a LockFile here. Instead we write all data
# in a new file (adding '.tmp' extension) then we rename it
# replacing the old one.
# It works while the renaming is an atomic operation
# (this is a POSIX requirement)
primary_info_file_tmp = primary_info_file + '.tmp'
with open(primary_info_file_tmp, 'w') as info_file:
info_file.write(remote_command.out)
os.rename(primary_info_file_tmp, primary_info_file)
except (OSError, IOError) as e:
# Wrap file access exceptions using SyncError
raise SyncError("Cannot open %s file for server %s: %s" % (
PRIMARY_INFO_FILE,
self.config.name, e))
return remote_info
def is_backup_locally_obsolete(self, backup_name, remote_backups):
"""
Check if a remote backup is obsolete according with the local
retention policies.
:param barman.server.Server self: Server object
:param str backup_name: str name of the backup to sync
:param dict remote_backups: dict containing the Primary node status
:return bool: returns if the backup is obsolete or not
"""
# Get the local backups and add the remote backup info. This will
# simulate the situation after the copy of the remote backup.
local_backups = self.get_available_backups(BackupInfo.STATUS_NOT_EMPTY)
backup = remote_backups[backup_name]
local_backups[backup_name] = LocalBackupInfo.from_json(self, backup)
# Execute the local retention policy on the modified list of backups
report = self.config.retention_policy.report(source=local_backups)
# If the added backup is obsolete return true.
return report[backup_name] == BackupInfo.OBSOLETE
def sync_backup(self, backup_name):
"""
Method for the synchronisation of a backup from a primary server.
The Method checks that the server is passive, then if it is possible to
sync with the Primary. Acquires a lock at backup level
and copy the backup from the Primary node using rsync.
During the sync process the backup on the Passive node
is marked as SYNCING and if the sync fails
(due to network failure, user interruption...) it is marked as FAILED.
:param barman.server.Server self: the passive Server object to sync
:param str backup_name: the name of the backup to sync.
"""
_logger.debug("sync_backup(%s, %s)", self.config.name, backup_name)
if not self.passive_node:
raise SyncError("server %s is not passive" % self.config.name)
local_backup_info = self.get_backup(backup_name)
# Step 1. Parse data from Primary server.
_logger.info(
"Synchronising with server %s backup %s: step 1/3: "
"parse server information", self.config.name, backup_name)
try:
primary_info = self.load_primary_info()
self.check_sync_required(backup_name,
primary_info, local_backup_info)
except SyncError as e:
# Invocation error: exit with return code 1
output.error("%s", e)
return
except SyncToBeDeleted as e:
# The required backup does not exist on primary,
# therefore it should be deleted also on passive node,
# as it's not in DONE status.
output.warning("%s, purging local backup", e)
self.delete_backup(local_backup_info)
return
except SyncNothingToDo as e:
# Nothing to do. Log as info level and exit
output.info("%s", e)
return
# If the backup is present on Primary node, and is not present at all
# locally or is present with FAILED status, execute sync.
# Retrieve info about the backup from PRIMARY_INFO_FILE
remote_backup_info = primary_info['backups'][backup_name]
remote_backup_dir = primary_info['config']['basebackups_directory']
# Try to acquire the backup lock, if the lock is not available abort
# the copy.
try:
with ServerBackupSyncLock(self.config.barman_lock_directory,
self.config.name, backup_name):
try:
backup_manager = self.backup_manager
# Build a BackupInfo object
local_backup_info = LocalBackupInfo.from_json(
self,
remote_backup_info)
local_backup_info.set_attribute('status',
BackupInfo.SYNCING)
local_backup_info.save()
backup_manager.backup_cache_add(local_backup_info)
# Activate incremental copy if requested
# Calculate the safe_horizon as the start time of the older
# backup involved in the copy
# NOTE: safe_horizon is a tz-aware timestamp because
# BackupInfo class ensures that property
reuse_mode = self.config.reuse_backup
safe_horizon = None
reuse_dir = None
if reuse_mode:
prev_backup = backup_manager.get_previous_backup(
backup_name)
next_backup = backup_manager.get_next_backup(
backup_name)
# If a newer backup is present, using it is preferable
# because that backup will remain valid longer
if next_backup:
safe_horizon = local_backup_info.begin_time
reuse_dir = next_backup.get_basebackup_directory()
elif prev_backup:
safe_horizon = prev_backup.begin_time
reuse_dir = prev_backup.get_basebackup_directory()
else:
reuse_mode = None
# Try to copy from the Primary node the backup using
# the copy controller.
copy_controller = RsyncCopyController(
ssh_command=self.config.primary_ssh_command,
network_compression=self.config.network_compression,
path=self.path,
reuse_backup=reuse_mode,
safe_horizon=safe_horizon,
retry_times=self.config.basebackup_retry_times,
retry_sleep=self.config.basebackup_retry_sleep,
workers=self.config.parallel_jobs)
copy_controller.add_directory(
'basebackup',
":%s/%s/" % (remote_backup_dir, backup_name),
local_backup_info.get_basebackup_directory(),
exclude_and_protect=['/backup.info', '/.backup.lock'],
bwlimit=self.config.bandwidth_limit,
reuse=reuse_dir,
item_class=RsyncCopyController.PGDATA_CLASS)
_logger.info(
"Synchronising with server %s backup %s: step 2/3: "
"file copy", self.config.name, backup_name)
copy_controller.copy()
# Save the backup state and exit
_logger.info("Synchronising with server %s backup %s: "
"step 3/3: finalise sync",
self.config.name, backup_name)
local_backup_info.set_attribute('status', BackupInfo.DONE)
local_backup_info.save()
except CommandFailedException as e:
# Report rsync errors
msg = 'failure syncing server %s backup %s: %s' % (
self.config.name, backup_name, e)
output.error(msg)
# Set the BackupInfo status to FAILED
local_backup_info.set_attribute('status',
BackupInfo.FAILED)
local_backup_info.set_attribute('error', msg)
local_backup_info.save()
return
# Catch KeyboardInterrupt (Ctrl+c) and all the exceptions
except BaseException as e:
msg_lines = force_str(e).strip().splitlines()
if local_backup_info:
# Use only the first line of exception message
# in local_backup_info error field
local_backup_info.set_attribute("status",
BackupInfo.FAILED)
# If the exception has no attached message
# use the raw type name
if not msg_lines:
msg_lines = [type(e).__name__]
local_backup_info.set_attribute(
"error",
"failure syncing server %s backup %s: %s" % (
self.config.name, backup_name, msg_lines[0]))
local_backup_info.save()
output.error("Backup failed syncing with %s: %s\n%s",
self.config.name, msg_lines[0],
'\n'.join(msg_lines[1:]))
except LockFileException:
output.error("Another synchronisation process for backup %s "
"of server %s is already running.",
backup_name, self.config.name)
def sync_wals(self):
"""
Method for the synchronisation of WAL files on the passive node,
by copying them from the primary server.
The method checks if the server is passive, then tries to acquire
a sync-wal lock.
Recovers the id of the last locally archived WAL file from the
status file ($wals_directory/sync-wals.info).
Reads the primary.info file and parses it, then obtains the list of
WAL files that have not yet been synchronised with the master.
Rsync is used for file synchronisation with the primary server.
Once the copy is finished, acquires a lock on xlog.db, updates it
then releases the lock.
Before exiting, the method updates the last_wal
and last_position fields in the sync-wals.info file.
:param barman.server.Server self: the Server object to synchronise
"""
_logger.debug("sync_wals(%s)", self.config.name)
if not self.passive_node:
raise SyncError("server %s is not passive" % self.config.name)
# Try to acquire the sync-wal lock if the lock is not available,
# abort the sync-wal operation
try:
with ServerWalSyncLock(self.config.barman_lock_directory,
self.config.name, ):
try:
# Need to load data from status files: primary.info
# and sync-wals.info
sync_wals_info = self.load_sync_wals_info()
primary_info = self.load_primary_info()
# We want to exit if the compression on master is different
# from the one on the local server
if primary_info['config']['compression'] \
!= self.config.compression:
raise SyncError("Compression method on server %s "
"(%s) does not match local "
"compression method (%s) " %
(self.config.name,
primary_info['config']['compression'],
self.config.compression))
# If the first WAL that needs to be copied is older
# than the begin WAL of the first locally available backup,
# synchronisation is skipped. This means that we need
# to copy a WAL file which won't be associated to any local
# backup. Consider the following scenarios:
#
# bw: indicates the begin WAL of the first backup
# sw: the first WAL to be sync-ed
#
# The following examples use truncated names for WAL files
# (e.g. 1 instead of 000000010000000000000001)
#
# Case 1: bw = 10, sw = 9 - SKIP and wait for backup
# Case 2: bw = 10, sw = 10 - SYNC
# Case 3: bw = 10, sw = 15 - SYNC
#
# Search for the first WAL file (skip history,
# backup and partial files)
first_remote_wal = None
for wal in primary_info['wals']:
if xlog.is_wal_file(wal['name']):
first_remote_wal = wal['name']
break
first_backup_id = self.get_first_backup_id()
first_backup = self.get_backup(first_backup_id) \
if first_backup_id else None
# Also if there are not any backups on the local server
# no wal synchronisation is required
if not first_backup:
output.warning("No base backup for server %s"
% self.config.name)
return
if first_backup.begin_wal > first_remote_wal:
output.warning("Skipping WAL synchronisation for "
"server %s: no available local backup "
"for %s" % (self.config.name,
first_remote_wal))
return
local_wals = []
wal_file_paths = []
for wal in primary_info['wals']:
# filter all the WALs that are smaller
# or equal to the name of the latest synchronised WAL
if sync_wals_info.last_wal and \
wal['name'] <= sync_wals_info.last_wal:
continue
# Generate WalFileInfo Objects using remote WAL metas.
# This list will be used for the update of the xlog.db
wal_info_file = WalFileInfo(**wal)
local_wals.append(wal_info_file)
wal_file_paths.append(wal_info_file.relpath())
# Rsync Options:
# recursive: recursive copy of subdirectories
# perms: preserve permissions on synced files
# times: preserve modification timestamps during
# synchronisation
# protect-args: force rsync to preserve the integrity of
# rsync command arguments and filename.
# inplace: for inplace file substitution
# and update of files
rsync = Rsync(
args=['--recursive', '--perms', '--times',
'--protect-args', '--inplace'],
ssh=self.config.primary_ssh_command,
bwlimit=self.config.bandwidth_limit,
allowed_retval=(0,),
network_compression=self.config.network_compression,
path=self.path)
# Source and destination of the rsync operations
src = ':%s/' % primary_info['config']['wals_directory']
dest = '%s/' % self.config.wals_directory
# Perform the rsync copy using the list of relative paths
# obtained from the primary.info file
rsync.from_file_list(wal_file_paths, src, dest)
# If everything is synced without errors,
# update xlog.db using the list of WalFileInfo object
with self.xlogdb('a') as fxlogdb:
for wal_info in local_wals:
fxlogdb.write(wal_info.to_xlogdb_line())
# We need to update the sync-wals.info file with the latest
# synchronised WAL and the latest read position.
self.write_sync_wals_info_file(primary_info)
except CommandFailedException as e:
msg = "WAL synchronisation for server %s " \
"failed: %s" % (self.config.name, e)
output.error(msg)
return
except BaseException as e:
msg_lines = force_str(e).strip().splitlines()
# Use only the first line of exception message
# If the exception has no attached message
# use the raw type name
if not msg_lines:
msg_lines = [type(e).__name__]
output.error("WAL synchronisation for server %s "
"failed with: %s\n%s",
self.config.name, msg_lines[0],
'\n'.join(msg_lines[1:]))
except LockFileException:
output.error("Another sync-wal operation is running "
"for server %s ", self.config.name)
@staticmethod
def set_sync_starting_point(xlogdb_file, last_wal, last_position):
"""
Check if the xlog.db file has changed between two requests
from the client and set the start point for reading the file
:param file xlogdb_file: an open and readable xlog.db file object
:param str|None last_wal: last read name
:param int|None last_position: last read position
:return int: the position has been set
"""
# If last_position is None start reading from the beginning of the file
position = int(last_position) if last_position is not None else 0
# Seek to required position
xlogdb_file.seek(position)
# Read 24 char (the size of a wal name)
wal_name = xlogdb_file.read(24)
# If the WAL name is the requested one start from last_position
if wal_name == last_wal:
# Return to the line start
xlogdb_file.seek(position)
return position
# If the file has been truncated, start over
xlogdb_file.seek(0)
return 0
def write_sync_wals_info_file(self, primary_info):
"""
Write the content of SYNC_WALS_INFO_FILE on disk
:param dict primary_info:
"""
try:
with open(os.path.join(self.config.wals_directory,
SYNC_WALS_INFO_FILE), 'w') as syncfile:
syncfile.write("%s\t%s" % (primary_info['last_name'],
primary_info['last_position']))
except (OSError, IOError):
# Wrap file access exceptions using SyncError
raise SyncError("Unable to write %s file for server %s" %
(SYNC_WALS_INFO_FILE, self.config.name))
def load_primary_info(self):
"""
Load the content of PRIMARY_INFO_FILE for the given server
:return dict: primary server information
"""
primary_info_file = os.path.join(self.config.backup_directory,
PRIMARY_INFO_FILE)
try:
with open(primary_info_file) as f:
return json.load(f)
except (OSError, IOError) as e:
# Wrap file access exceptions using SyncError
raise SyncError("Cannot open %s file for server %s: %s" % (
PRIMARY_INFO_FILE, self.config.name, e))
|
jjdmol/LOFAR
|
refs/heads/master
|
CEP/Pipeline/recipes/sip/master/selfcal_awimager.py
|
1
|
# LOFAR IMAGING PIPELINE
#
# Example recipe with simple job distribution
# Wouter Klijn, 2010
# swinbank@transientskp.org
# Nicolas Vilchez, 2014
# vilchez@astron.nl
# ------------------------------------------------------------------------------
import sys
import copy
import lofarpipe.support.lofaringredient as ingredient
from lofarpipe.support.baserecipe import BaseRecipe
from lofarpipe.support.remotecommand import RemoteCommandRecipeMixIn
from lofarpipe.support.remotecommand import ComputeJob
from lofarpipe.support.data_map import DataMap, validate_data_maps,\
align_data_maps
class selfcal_awimager(BaseRecipe, RemoteCommandRecipeMixIn):
"""
Master script for the awimager. Collects arguments from command line and
pipeline inputs.
1. Load mapfiles and validate these
2. Run the awimage node scripts
3. Retrieve output. Construct output map file succesfull runs
Details regarding the implementation of the imaging step can be found in
the node recipe
**CommandLine Arguments**
A mapfile containing (node, datafile) pairs. The measurements set use as
input for awimager executable
"""
inputs = {
'executable': ingredient.ExecField(
'--executable',
help = "The full path to the awimager executable"
),
'parset': ingredient.FileField(
'-p', '--parset',
help = "The full path to a awimager configuration parset."
),
'working_directory': ingredient.StringField(
'-w', '--working-directory',
help = "Working directory used on output nodes. Results location"
),
'output_image': ingredient.StringField(
'--output-image',
help = "Path of the image to be create by the awimager"
),
'mapfile': ingredient.StringField(
'--mapfile',
help = "Full path for output mapfile. A list of the"
"successfully generated images will be written here"
),
'sourcedb_path': ingredient.StringField(
'--sourcedb-path',
help = "Full path of sourcedb used to create a mask for known sources"
),
'mask_patch_size': ingredient.FloatField(
'--mask-patch-size',
help = "Scale factor for patches in the awimager mask"
),
'autogenerate_parameters': ingredient.BoolField(
'--autogenerate-parameters',
default = True,
help = "Turns on the autogeneration of: cellsize, image-size, fov."
" MSSS 'type' functionality"
),
'specify_fov': ingredient.BoolField(
'--specify-fov',
default = False,
help = "calculated Image parameters are relative to fov, parameter"
" is active when autogenerate_parameters is False"
),
'fov': ingredient.FloatField(
'--fov',
default = 0.0,
help = "calculated Image parameters are relative to this"
" Field Of View in arcSec. This parameter is obligatory when"
" specify_fov is True"
),
'major_cycle': ingredient.IntField(
'--major_cycle',
help = "The number of the current cycle to modify the parset."
),
'nr_cycles': ingredient.IntField(
'--nr-cycles',
help = "The number major cycles."
) ,
'perform_self_cal': ingredient.BoolField(
'--perform-self-cal',
default=False,
help = "Control the usage of the self callibartion functionality"
)
}
outputs = {
'mapfile': ingredient.StringField(),
}
def go(self):
"""
This member contains all the functionality of the imager_awimager.
Functionality is all located at the node side of the script.
"""
super(selfcal_awimager, self).go()
self.logger.info("Starting imager_awimager run")
# *********************************************************************
# 1. collect the inputs and validate
input_map = DataMap.load(self.inputs['args'][0])
sourcedb_map = DataMap.load(self.inputs['sourcedb_path'])
if not validate_data_maps(input_map, sourcedb_map):
self.logger.error(
"the supplied input_ms mapfile and sourcedb mapfile"
"are incorrect. Aborting")
self.logger.error(repr(input_map))
self.logger.error(repr(sourcedb_map))
return 1
# *********************************************************************
# 2. Start the node side of the awimager recipe
# Compile the command to be executed on the remote machine
node_command = "python %s" % (self.__file__.replace("master", "nodes"))
jobs = []
output_map = copy.deepcopy(input_map)
align_data_maps(input_map, output_map, sourcedb_map)
sourcedb_map.iterator = input_map.iterator = output_map.iterator = \
DataMap.SkipIterator
for measurement_item, source_item in zip(input_map, sourcedb_map):
if measurement_item.skip or source_item.skip:
jobs.append(None)
continue
# both the sourcedb and the measurement are in a map
# unpack both
host , measurement_path = measurement_item.host, measurement_item.file
host2 , sourcedb_path = source_item.host, source_item.file
# construct and save the output name
arguments = [self.inputs['executable'],
self.environment,
self.inputs['parset'],
self.inputs['working_directory'],
self.inputs['output_image'],
measurement_path,
sourcedb_path,
self.inputs['mask_patch_size'],
self.inputs['autogenerate_parameters'],
self.inputs['specify_fov'],
self.inputs['fov'],
self.inputs['major_cycle'],
self.inputs['nr_cycles'],
self.inputs['perform_self_cal']
]
jobs.append(ComputeJob(host, node_command, arguments))
self._schedule_jobs(jobs)
# *********************************************************************
# 3. Check output of the node scripts
for job, output_item in zip(jobs, output_map):
# job == None on skipped job
if not "image" in job.results:
output_item.file = "failed"
output_item.skip = True
else:
output_item.file = job.results["image"]
output_item.skip = False
# Check if there are finished runs
succesfull_runs = None
for item in output_map:
if item.skip == False:
succesfull_runs = True
break
if not succesfull_runs:
self.logger.error(
"None of the started awimager run finished correct")
self.logger.error(
"No work left to be done: exiting with error status")
return 1
# If partial succes
if self.error.isSet():
self.logger.error("Failed awimager node run detected. continue with"
"successful tasks.")
self._store_data_map(self.inputs['mapfile'], output_map,
"mapfile containing produces awimages")
self.outputs["mapfile"] = self.inputs['mapfile']
return 0
if __name__ == "__main__":
sys.exit(selfcal_awimager().main())
|
fudong1127/python-bitcoin
|
refs/heads/master
|
testscript.py
|
4
|
#!/usr/bin/python
#
# testscript.py
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import sys
import time
import Log
import MemPool
import ChainDb
import cStringIO
from bitcoin.coredefs import NETWORKS
from bitcoin.core import CBlock
from bitcoin.serialize import ser_uint256
from bitcoin.scripteval import VerifySignature
NET_SETTINGS = {
'mainnet' : {
'log' : '/spare/tmp/testscript.log',
'db' : '/spare/tmp/chaindb'
},
'testnet3' : {
'log' : '/spare/tmp/testtestscript.log',
'db' : '/spare/tmp/chaintest'
}
}
MY_NETWORK = 'mainnet'
SETTINGS = NET_SETTINGS[MY_NETWORK]
start_height = 0
end_height = -1
if len(sys.argv) > 1:
start_height = int(sys.argv[1])
if len(sys.argv) > 2:
end_height = int(sys.argv[2])
if len(sys.argv) > 3:
SETTINGS['log'] = sys.argv[3]
log = Log.Log(SETTINGS['log'])
mempool = MemPool.MemPool(log)
chaindb = ChainDb.ChainDb(SETTINGS['db'], log, mempool,
NETWORKS[MY_NETWORK], True)
chaindb.blk_cache.max = 500
if end_height < 0 or end_height > chaindb.getheight():
end_height = chaindb.getheight()
scanned = 0
scanned_tx = 0
failures = 0
opcount = {}
SKIP_TX = {
}
def scan_tx(tx):
tx.calc_sha256()
if tx.sha256 in SKIP_TX:
return True
# log.write("...Scanning TX %064x" % (tx.sha256,))
for i in xrange(len(tx.vin)):
txin = tx.vin[i]
txfrom = chaindb.gettx(txin.prevout.hash)
if not VerifySignature(txfrom, tx, i, 0):
log.write("TX %064x/%d failed" % (tx.sha256, i))
log.write("FROMTX %064x" % (txfrom.sha256,))
log.write(txfrom.__repr__())
log.write("TOTX %064x" % (tx.sha256,))
log.write(tx.__repr__())
return False
return True
for height in xrange(end_height):
if height < start_height:
continue
heightidx = ChainDb.HeightIdx()
heightidx.deserialize(chaindb.height[str(height)])
blkhash = heightidx.blocks[0]
ser_hash = ser_uint256(blkhash)
f = cStringIO.StringIO(chaindb.blocks[ser_hash])
block = CBlock()
block.deserialize(f)
start_time = time.time()
for tx_tmp in block.vtx:
if tx_tmp.is_coinbase():
continue
scanned_tx += 1
if not scan_tx(tx_tmp):
failures += 1
sys.exit(1)
end_time = time.time()
scanned += 1
# if (scanned % 1000) == 0:
log.write("Scanned %d tx, height %d (%d failures), %.2f sec" % (
scanned_tx, height, failures, end_time - start_time))
log.write("Scanned %d tx, %d blocks (%d failures)" % (
scanned_tx, scanned, failures))
#for k,v in opcount.iteritems():
# print k, v
|
m4ns0ur/grumpy
|
refs/heads/master
|
third_party/stdlib/sre_constants.py
|
7
|
#
# Secret Labs' Regular Expression Engine
#
# various symbols used by the regular expression engine.
# run this script to update the _sre include files!
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
__all__ = [
'ANY', 'ANY_ALL', 'ASSERT', 'ASSERT_NOT', 'AT', 'ATCODES', 'AT_BEGINNING',
'AT_BEGINNING_LINE', 'AT_BEGINNING_STRING', 'AT_BOUNDARY', 'AT_END',
'AT_END_LINE', 'AT_END_STRING', 'AT_LOCALE', 'AT_LOC_BOUNDARY',
'AT_LOC_NON_BOUNDARY', 'AT_MULTILINE', 'AT_NON_BOUNDARY', 'AT_UNICODE',
'AT_UNI_BOUNDARY', 'AT_UNI_NON_BOUNDARY', 'BIGCHARSET', 'BRANCH', 'CALL',
'CATEGORY', 'CATEGORY_DIGIT', 'CATEGORY_LINEBREAK', 'CATEGORY_LOC_NOT_WORD',
'CATEGORY_LOC_WORD', 'CATEGORY_NOT_DIGIT', 'CATEGORY_NOT_LINEBREAK',
'CATEGORY_NOT_SPACE', 'CATEGORY_NOT_WORD', 'CATEGORY_SPACE',
'CATEGORY_UNI_DIGIT', 'CATEGORY_UNI_LINEBREAK', 'CATEGORY_UNI_NOT_DIGIT',
'CATEGORY_UNI_NOT_LINEBREAK', 'CATEGORY_UNI_NOT_SPACE',
'CATEGORY_UNI_NOT_WORD', 'CATEGORY_UNI_SPACE', 'CATEGORY_UNI_WORD',
'CATEGORY_WORD', 'CHARSET', 'CHCODES', 'CH_LOCALE', 'CH_UNICODE', 'FAILURE',
'GROUPREF', 'GROUPREF_EXISTS', 'GROUPREF_IGNORE', 'IN', 'INFO', 'IN_IGNORE',
'JUMP', 'LITERAL', 'LITERAL_IGNORE', 'MAGIC', 'MARK', 'MAXREPEAT',
'MAX_REPEAT', 'MAX_UNTIL', 'MIN_REPEAT', 'MIN_REPEAT_ONE', 'MIN_UNTIL',
'NEGATE', 'NOT_LITERAL', 'NOT_LITERAL_IGNORE', 'OPCODES', 'OP_IGNORE',
'RANGE', 'REPEAT', 'REPEAT_ONE', 'SRE_FLAG_DOTALL', 'SRE_FLAG_IGNORECASE',
'SRE_FLAG_LOCALE', 'SRE_FLAG_MULTILINE', 'SRE_FLAG_TEMPLATE',
'SRE_FLAG_UNICODE', 'SRE_FLAG_VERBOSE', 'SRE_INFO_CHARSET',
'SRE_INFO_LITERAL', 'SRE_INFO_PREFIX', 'SUBPATTERN', 'SUCCESS',
'SRE_FLAG_DEBUG', 'MAXCODE', 'error'
]
# update when constants are added or removed
MAGIC = 20031017
MAXCODE = 65535
# try:
# from _sre import MAXREPEAT
# except ImportError:
# import _sre
# MAXREPEAT = _sre.MAXREPEAT = 65535
MAXREPEAT = 65535
# SRE standard exception (access as sre.error)
# should this really be here?
class error(Exception):
pass
# operators
FAILURE = "failure"
SUCCESS = "success"
ANY = "any"
ANY_ALL = "any_all"
ASSERT = "assert"
ASSERT_NOT = "assert_not"
AT = "at"
BIGCHARSET = "bigcharset"
BRANCH = "branch"
CALL = "call"
CATEGORY = "category"
CHARSET = "charset"
GROUPREF = "groupref"
GROUPREF_IGNORE = "groupref_ignore"
GROUPREF_EXISTS = "groupref_exists"
IN = "in"
IN_IGNORE = "in_ignore"
INFO = "info"
JUMP = "jump"
LITERAL = "literal"
LITERAL_IGNORE = "literal_ignore"
MARK = "mark"
MAX_REPEAT = "max_repeat"
MAX_UNTIL = "max_until"
MIN_REPEAT = "min_repeat"
MIN_UNTIL = "min_until"
NEGATE = "negate"
NOT_LITERAL = "not_literal"
NOT_LITERAL_IGNORE = "not_literal_ignore"
RANGE = "range"
REPEAT = "repeat"
REPEAT_ONE = "repeat_one"
SUBPATTERN = "subpattern"
MIN_REPEAT_ONE = "min_repeat_one"
# positions
AT_BEGINNING = "at_beginning"
AT_BEGINNING_LINE = "at_beginning_line"
AT_BEGINNING_STRING = "at_beginning_string"
AT_BOUNDARY = "at_boundary"
AT_NON_BOUNDARY = "at_non_boundary"
AT_END = "at_end"
AT_END_LINE = "at_end_line"
AT_END_STRING = "at_end_string"
AT_LOC_BOUNDARY = "at_loc_boundary"
AT_LOC_NON_BOUNDARY = "at_loc_non_boundary"
AT_UNI_BOUNDARY = "at_uni_boundary"
AT_UNI_NON_BOUNDARY = "at_uni_non_boundary"
# categories
CATEGORY_DIGIT = "category_digit"
CATEGORY_NOT_DIGIT = "category_not_digit"
CATEGORY_SPACE = "category_space"
CATEGORY_NOT_SPACE = "category_not_space"
CATEGORY_WORD = "category_word"
CATEGORY_NOT_WORD = "category_not_word"
CATEGORY_LINEBREAK = "category_linebreak"
CATEGORY_NOT_LINEBREAK = "category_not_linebreak"
CATEGORY_LOC_WORD = "category_loc_word"
CATEGORY_LOC_NOT_WORD = "category_loc_not_word"
CATEGORY_UNI_DIGIT = "category_uni_digit"
CATEGORY_UNI_NOT_DIGIT = "category_uni_not_digit"
CATEGORY_UNI_SPACE = "category_uni_space"
CATEGORY_UNI_NOT_SPACE = "category_uni_not_space"
CATEGORY_UNI_WORD = "category_uni_word"
CATEGORY_UNI_NOT_WORD = "category_uni_not_word"
CATEGORY_UNI_LINEBREAK = "category_uni_linebreak"
CATEGORY_UNI_NOT_LINEBREAK = "category_uni_not_linebreak"
OPCODES = [
# failure=0 success=1 (just because it looks better that way :-)
FAILURE, SUCCESS,
ANY, ANY_ALL,
ASSERT, ASSERT_NOT,
AT,
BRANCH,
CALL,
CATEGORY,
CHARSET, BIGCHARSET,
GROUPREF, GROUPREF_EXISTS, GROUPREF_IGNORE,
IN, IN_IGNORE,
INFO,
JUMP,
LITERAL, LITERAL_IGNORE,
MARK,
MAX_UNTIL,
MIN_UNTIL,
NOT_LITERAL, NOT_LITERAL_IGNORE,
NEGATE,
RANGE,
REPEAT,
REPEAT_ONE,
SUBPATTERN,
MIN_REPEAT_ONE
]
ATCODES = [
AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY,
AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING,
AT_LOC_BOUNDARY, AT_LOC_NON_BOUNDARY, AT_UNI_BOUNDARY,
AT_UNI_NON_BOUNDARY
]
CHCODES = [
CATEGORY_DIGIT, CATEGORY_NOT_DIGIT, CATEGORY_SPACE,
CATEGORY_NOT_SPACE, CATEGORY_WORD, CATEGORY_NOT_WORD,
CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK, CATEGORY_LOC_WORD,
CATEGORY_LOC_NOT_WORD, CATEGORY_UNI_DIGIT, CATEGORY_UNI_NOT_DIGIT,
CATEGORY_UNI_SPACE, CATEGORY_UNI_NOT_SPACE, CATEGORY_UNI_WORD,
CATEGORY_UNI_NOT_WORD, CATEGORY_UNI_LINEBREAK,
CATEGORY_UNI_NOT_LINEBREAK
]
def makedict(list):
d = {}
i = 0
for item in list:
d[item] = i
i = i + 1
return d
OPCODES = makedict(OPCODES)
ATCODES = makedict(ATCODES)
CHCODES = makedict(CHCODES)
# replacement operations for "ignore case" mode
OP_IGNORE = {
GROUPREF: GROUPREF_IGNORE,
IN: IN_IGNORE,
LITERAL: LITERAL_IGNORE,
NOT_LITERAL: NOT_LITERAL_IGNORE
}
AT_MULTILINE = {
AT_BEGINNING: AT_BEGINNING_LINE,
AT_END: AT_END_LINE
}
AT_LOCALE = {
AT_BOUNDARY: AT_LOC_BOUNDARY,
AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY
}
AT_UNICODE = {
AT_BOUNDARY: AT_UNI_BOUNDARY,
AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY
}
CH_LOCALE = {
CATEGORY_DIGIT: CATEGORY_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE,
CATEGORY_WORD: CATEGORY_LOC_WORD,
CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK
}
CH_UNICODE = {
CATEGORY_DIGIT: CATEGORY_UNI_DIGIT,
CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT,
CATEGORY_SPACE: CATEGORY_UNI_SPACE,
CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE,
CATEGORY_WORD: CATEGORY_UNI_WORD,
CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD,
CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK,
CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK
}
# flags
SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking)
SRE_FLAG_IGNORECASE = 2 # case insensitive
SRE_FLAG_LOCALE = 4 # honour system locale
SRE_FLAG_MULTILINE = 8 # treat target as multiline string
SRE_FLAG_DOTALL = 16 # treat target as a single string
SRE_FLAG_UNICODE = 32 # use unicode locale
SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments
SRE_FLAG_DEBUG = 128 # debugging
# flags for INFO primitive
SRE_INFO_PREFIX = 1 # has prefix
SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix)
SRE_INFO_CHARSET = 4 # pattern starts with character from given set
# if __name__ == "__main__":
# def dump(f, d, prefix):
# items = d.items()
# items.sort(key=lambda a: a[1])
# for k, v in items:
# f.write("#define %s_%s %s\n" % (prefix, k.upper(), v))
# f = open("sre_constants.h", "w")
# f.write("""\
# /*
# * Secret Labs' Regular Expression Engine
# *
# * regular expression matching engine
# *
# * NOTE: This file is generated by sre_constants.py. If you need
# * to change anything in here, edit sre_constants.py and run it.
# *
# * Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
# *
# * See the _sre.c file for information on usage and redistribution.
# */
# """)
# f.write("#define SRE_MAGIC %d\n" % MAGIC)
# dump(f, OPCODES, "SRE_OP")
# dump(f, ATCODES, "SRE")
# dump(f, CHCODES, "SRE")
# f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE)
# f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE)
# f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE)
# f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE)
# f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL)
# f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE)
# f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE)
# f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX)
# f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL)
# f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET)
# f.close()
# print "done"
|
honestbleeps/django-oauth-toolkit
|
refs/heads/master
|
oauth2_provider/views/generic.py
|
10
|
from django.views.generic import View
from oauthlib.oauth2 import Server
from ..settings import oauth2_settings
from .mixins import ProtectedResourceMixin, ScopedResourceMixin, ReadWriteScopedResourceMixin
class ProtectedResourceView(ProtectedResourceMixin, View):
"""
Generic view protecting resources by providing OAuth2 authentication out of the box
"""
server_class = Server
validator_class = oauth2_settings.OAUTH2_VALIDATOR_CLASS
oauthlib_backend_class = oauth2_settings.OAUTH2_BACKEND_CLASS
class ScopedProtectedResourceView(ScopedResourceMixin, ProtectedResourceView):
"""
Generic view protecting resources by providing OAuth2 authentication and Scopes handling
out of the box
"""
pass
class ReadWriteScopedResourceView(ReadWriteScopedResourceMixin, ProtectedResourceView):
"""
Generic view protecting resources with OAuth2 authentication and read/write scopes.
GET, HEAD, OPTIONS http methods require "read" scope. Otherwise "write" scope is required.
"""
pass
|
wangmingjob/OnlineJudge
|
refs/heads/master
|
contest/migrations/0006_merge.py
|
6
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contest', '0005_contestsubmission'),
('contest', '0005_contestproblem_score'),
]
operations = [
]
|
salamer/django
|
refs/heads/master
|
tests/test_runner/runner.py
|
193
|
from django.test.runner import DiscoverRunner
class CustomOptionsTestRunner(DiscoverRunner):
def __init__(self, verbosity=1, interactive=True, failfast=True, option_a=None, option_b=None, option_c=None, **kwargs):
super(CustomOptionsTestRunner, self).__init__(verbosity=verbosity, interactive=interactive,
failfast=failfast)
self.option_a = option_a
self.option_b = option_b
self.option_c = option_c
@classmethod
def add_arguments(cls, parser):
parser.add_argument('--option_a', '-a', action='store', dest='option_a', default='1'),
parser.add_argument('--option_b', '-b', action='store', dest='option_b', default='2'),
parser.add_argument('--option_c', '-c', action='store', dest='option_c', default='3'),
def run_tests(self, test_labels, extra_tests=None, **kwargs):
print("%s:%s:%s" % (self.option_a, self.option_b, self.option_c))
|
edxnercel/edx-platform
|
refs/heads/master
|
common/lib/calc/calc/__init__.py
|
270
|
"""
Ideally, we wouldn't need to pull in all the calc symbols here,
but courses were using 'import calc', so we need this for
backwards compatibility
"""
from calc import *
|
sarakha63/persomov
|
refs/heads/master
|
couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/yourupload.py
|
35
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
class YourUploadIE(InfoExtractor):
_VALID_URL = r'''(?x)https?://(?:www\.)?
(?:yourupload\.com/watch|
embed\.yourupload\.com|
embed\.yucache\.net
)/(?P<id>[A-Za-z0-9]+)
'''
_TESTS = [
{
'url': 'http://yourupload.com/watch/14i14h',
'md5': 'bf5c2f95c4c917536e80936af7bc51e1',
'info_dict': {
'id': '14i14h',
'ext': 'mp4',
'title': 'BigBuckBunny_320x180.mp4',
'thumbnail': 're:^https?://.*\.jpe?g',
}
},
{
'url': 'http://embed.yourupload.com/14i14h',
'only_matching': True,
},
{
'url': 'http://embed.yucache.net/14i14h?client_file_id=803349',
'only_matching': True,
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
url = 'http://embed.yucache.net/{0:}'.format(video_id)
webpage = self._download_webpage(url, video_id)
title = self._og_search_title(webpage)
thumbnail = self._og_search_thumbnail(webpage)
url = self._og_search_video_url(webpage)
formats = [{
'format_id': 'sd',
'url': url,
}]
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
}
|
zhouyuan/teuthology
|
refs/heads/master
|
scripts/test/test_nuke.py
|
13
|
from script import Script
class TestNuke(Script):
script_name = 'teuthology-nuke'
|
nathanial/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/tests/modeltests/validators/models.py
|
12133432
| |
primiano/blink-gitcs
|
refs/heads/master
|
Tools/Scripts/webkitpy/common/net/networktransaction.py
|
190
|
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import time
import urllib2
_log = logging.getLogger(__name__)
class NetworkTimeout(Exception):
def __str__(self):
return 'NetworkTimeout'
class NetworkTransaction(object):
def __init__(self, initial_backoff_seconds=10, grown_factor=1.5, timeout_seconds=(10 * 60), convert_404_to_None=False):
self._initial_backoff_seconds = initial_backoff_seconds
self._grown_factor = grown_factor
self._timeout_seconds = timeout_seconds
self._convert_404_to_None = convert_404_to_None
def run(self, request):
self._total_sleep = 0
self._backoff_seconds = self._initial_backoff_seconds
while True:
try:
return request()
except urllib2.HTTPError, e:
if self._convert_404_to_None and e.code == 404:
return None
self._check_for_timeout()
_log.warn("Received HTTP status %s loading \"%s\". Retrying in %s seconds..." % (e.code, e.filename, self._backoff_seconds))
self._sleep()
def _check_for_timeout(self):
if self._total_sleep + self._backoff_seconds > self._timeout_seconds:
raise NetworkTimeout()
def _sleep(self):
time.sleep(self._backoff_seconds)
self._total_sleep += self._backoff_seconds
self._backoff_seconds *= self._grown_factor
|
antoinecarme/pyaf
|
refs/heads/master
|
tests/artificial/transf_RelativeDifference/trend_MovingAverage/cycle_30/ar_12/test_artificial_1024_RelativeDifference_MovingAverage_30_12_100.py
|
1
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 30, transform = "RelativeDifference", sigma = 0.0, exog_count = 100, ar_order = 12);
|
heeraj123/oh-mainline
|
refs/heads/master
|
vendor/packages/docutils/test/test_parsers/test_rst/test_directives/test_compound.py
|
18
|
#! /usr/bin/env python
# $Id: test_compound.py 7072 2011-07-06 15:52:30Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
Tests for the 'compound' directive from body.py.
"""
from __init__ import DocutilsTestSupport
def suite():
s = DocutilsTestSupport.ParserTestSuite()
s.generateTests(totest)
return s
totest = {}
totest['compound'] = [
["""\
.. compound::
Compound paragraphs are single logical paragraphs
which contain embedded
* lists
* tables
* literal blocks
* and other body elements
and are split into multiple physical paragraphs.
""",
"""\
<document source="test data">
<compound>
<paragraph>
Compound paragraphs are single logical paragraphs
which contain embedded
<bullet_list bullet="*">
<list_item>
<paragraph>
lists
<list_item>
<paragraph>
tables
<list_item>
<paragraph>
literal blocks
<list_item>
<paragraph>
and other body elements
<paragraph>
and are split into multiple physical paragraphs.
"""],
["""\
.. compound::
:name: interesting
:class: log
This is an extremely interesting compound paragraph containing a
simple paragraph, a literal block with some useless log messages::
Connecting... OK
Transmitting data... OK
Disconnecting... OK
and another simple paragraph which is actually just a continuation
of the first simple paragraph, with the literal block in between.
""",
"""\
<document source="test data">
<compound classes="log" ids="interesting" names="interesting">
<paragraph>
This is an extremely interesting compound paragraph containing a
simple paragraph, a literal block with some useless log messages:
<literal_block xml:space="preserve">
Connecting... OK
Transmitting data... OK
Disconnecting... OK
<paragraph>
and another simple paragraph which is actually just a continuation
of the first simple paragraph, with the literal block in between.
"""],
["""\
.. compound:: content may start on same line
second paragraph
""",
"""\
<document source="test data">
<compound>
<paragraph>
content may start on same line
<paragraph>
second paragraph
"""],
]
if __name__ == '__main__':
import unittest
unittest.main(defaultTest='suite')
|
Sparker0i/fosswebsite
|
refs/heads/master
|
fosswebsite/wsgi.py
|
8
|
"""
WSGI config for fosswebsite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fosswebsite.settings")
application = get_wsgi_application()
|
CUFCTL/DLBD
|
refs/heads/master
|
face-detection-code/object_detection/builders/box_predictor_builder.py
|
2
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Function to build box predictor from configuration."""
from object_detection.core import box_predictor
from object_detection.protos import box_predictor_pb2
def build(argscope_fn, box_predictor_config, is_training, num_classes):
"""Builds box predictor based on the configuration.
Builds box predictor based on the configuration. See box_predictor.proto for
configurable options. Also, see box_predictor.py for more details.
Args:
argscope_fn: A function that takes the following inputs:
* hyperparams_pb2.Hyperparams proto
* a boolean indicating if the model is in training mode.
and returns a tf slim argscope for Conv and FC hyperparameters.
box_predictor_config: box_predictor_pb2.BoxPredictor proto containing
configuration.
is_training: Whether the models is in training mode.
num_classes: Number of classes to predict.
Returns:
box_predictor: box_predictor.BoxPredictor object.
Raises:
ValueError: On unknown box predictor.
"""
if not isinstance(box_predictor_config, box_predictor_pb2.BoxPredictor):
raise ValueError('box_predictor_config not of type '
'box_predictor_pb2.BoxPredictor.')
box_predictor_oneof = box_predictor_config.WhichOneof('box_predictor_oneof')
if box_predictor_oneof == 'convolutional_box_predictor':
conv_box_predictor = box_predictor_config.convolutional_box_predictor
conv_hyperparams = argscope_fn(conv_box_predictor.conv_hyperparams,
is_training)
box_predictor_object = box_predictor.ConvolutionalBoxPredictor(
is_training=is_training,
num_classes=num_classes,
conv_hyperparams=conv_hyperparams,
min_depth=conv_box_predictor.min_depth,
max_depth=conv_box_predictor.max_depth,
num_layers_before_predictor=(conv_box_predictor.
num_layers_before_predictor),
use_dropout=conv_box_predictor.use_dropout,
dropout_keep_prob=conv_box_predictor.dropout_keep_probability,
kernel_size=conv_box_predictor.kernel_size,
box_code_size=conv_box_predictor.box_code_size,
apply_sigmoid_to_scores=conv_box_predictor.apply_sigmoid_to_scores,
class_prediction_bias_init=(conv_box_predictor.
class_prediction_bias_init),
use_depthwise=conv_box_predictor.use_depthwise
)
return box_predictor_object
if box_predictor_oneof == 'weight_shared_convolutional_box_predictor':
conv_box_predictor = (box_predictor_config.
weight_shared_convolutional_box_predictor)
conv_hyperparams = argscope_fn(conv_box_predictor.conv_hyperparams,
is_training)
box_predictor_object = box_predictor.WeightSharedConvolutionalBoxPredictor(
is_training=is_training,
num_classes=num_classes,
conv_hyperparams=conv_hyperparams,
depth=conv_box_predictor.depth,
num_layers_before_predictor=(conv_box_predictor.
num_layers_before_predictor),
kernel_size=conv_box_predictor.kernel_size,
box_code_size=conv_box_predictor.box_code_size,
class_prediction_bias_init=conv_box_predictor.class_prediction_bias_init
)
return box_predictor_object
if box_predictor_oneof == 'mask_rcnn_box_predictor':
mask_rcnn_box_predictor = box_predictor_config.mask_rcnn_box_predictor
fc_hyperparams = argscope_fn(mask_rcnn_box_predictor.fc_hyperparams,
is_training)
conv_hyperparams = None
if mask_rcnn_box_predictor.HasField('conv_hyperparams'):
conv_hyperparams = argscope_fn(mask_rcnn_box_predictor.conv_hyperparams,
is_training)
box_predictor_object = box_predictor.MaskRCNNBoxPredictor(
is_training=is_training,
num_classes=num_classes,
fc_hyperparams=fc_hyperparams,
use_dropout=mask_rcnn_box_predictor.use_dropout,
dropout_keep_prob=mask_rcnn_box_predictor.dropout_keep_probability,
box_code_size=mask_rcnn_box_predictor.box_code_size,
conv_hyperparams=conv_hyperparams,
predict_instance_masks=mask_rcnn_box_predictor.predict_instance_masks,
mask_height=mask_rcnn_box_predictor.mask_height,
mask_width=mask_rcnn_box_predictor.mask_width,
mask_prediction_num_conv_layers=(
mask_rcnn_box_predictor.mask_prediction_num_conv_layers),
mask_prediction_conv_depth=(
mask_rcnn_box_predictor.mask_prediction_conv_depth),
predict_keypoints=mask_rcnn_box_predictor.predict_keypoints)
return box_predictor_object
if box_predictor_oneof == 'rfcn_box_predictor':
rfcn_box_predictor = box_predictor_config.rfcn_box_predictor
conv_hyperparams = argscope_fn(rfcn_box_predictor.conv_hyperparams,
is_training)
box_predictor_object = box_predictor.RfcnBoxPredictor(
is_training=is_training,
num_classes=num_classes,
conv_hyperparams=conv_hyperparams,
crop_size=[rfcn_box_predictor.crop_height,
rfcn_box_predictor.crop_width],
num_spatial_bins=[rfcn_box_predictor.num_spatial_bins_height,
rfcn_box_predictor.num_spatial_bins_width],
depth=rfcn_box_predictor.depth,
box_code_size=rfcn_box_predictor.box_code_size)
return box_predictor_object
raise ValueError('Unknown box predictor: {}'.format(box_predictor_oneof))
|
R4chel/RecommendationGraph
|
refs/heads/master
|
recgraph/crawler/crawler.py
|
1
|
'''
Created July 28, 2014
@author Adam Campbell, Rachel Ehrlich, Max Fowler
'''
import json
import re
import time
from py2neo import neo4j, cypher
import pywikibot
import mwparserfromhell
from recgraph.settings import GRAPHDB
def get_pages(infobox_type):
site = pywikibot.getSite('en')
infobox_template = pywikibot.Page(site, "Template:Infobox " + infobox_type)
pages = list(infobox_template.embeddedin(False, 0))
return pages
def parse_title(title):
site = pywikibot.getSite('en')
page = pywikibot.Page(site, title)
return parse_page(page)
def parse_page(page):
text = page.get()
return mwparserfromhell.parse(text)
def clean(s):
''' remove references and comments '''
if s is not None:
s = re.sub("<ref>([^<>])*</ref>","",s)
s = re.sub("<ref name([^<>])*/>","",s)
s = re.sub("<ref name([^<>])*>([^<>])*</ref>","",s)
s = re.sub("<!--([^<>])*-->","",s)
s = re.sub("<br>","")
s = re.sub("<br/>",", ",s)
s = re.sub("<br />",", ",s)
s = s.strip()
s = s.strip(',')
return s
def extract_title(s):
if s is not None:
s = re.sub("\[\[","",s)
s = re.sub("\]\]", "",s)
l = s.split("|")
s = l[0]
return s
'''def get_infobox(page):
#check if title is in list
val = wikipedia_utils.GetWikipediaPage(title)
if val is None:
return None
res = wikipedia_utils.ParseTemplates(val["text"])
infobox_comedian = dict(res["templates"]).get("Infobox comedian")
return infobox_comedian
'''
def extract_list(str):
if str is None:
return
str = clean(str)
list = str.split(",")
clean_list = []
for item in list:
clean_list.append(extract_title(item).encode())
return clean_list
def extract_names(str):
if str is None:
return
regex = re.compile('\[\[([^\]\[]*)\]\]')
clean_str = clean(str)
print clean_str
m = re.match(clean_str)
if m is None:
return []
return m.groups()
def extract_names_for_db(str):
if str is None:
return
regex = re.compile('\[\[([^\]\[]*)\]\]')
clean_str = clean(str)
print clean_str
m = re.findall(regex, clean_str)
if m is None:
return []
return m
def write_edges_to_db(db, node, list, category, relationship, is_forward):
if list is None:
return
for item in list:
item = extract_title(item).strip().lower().encode()
if len(item) == 0:
continue
new_node = db.get_or_create_indexed_node("NameIndex", category, item, {category: item})
new_node.add_labels(category.title())
path = neo4j.Path(node, relationship, new_node) if is_forward else neo4j.Path(new_node, relationship, new_node)
path.get_or_create(db)
def store_edges_to_db(db,infobox_label,rel_label, is_forward_rel):
nodes = get_all_comedian_nodes(db)
print len(nodes)
i = 0
for row in nodes:
time.sleep(2) #
print i
i += 1
node = row[0]
props = node.get_properties()
name = props['name']
infobox = get_infobox(name)
if infobox is not None:
list = extract_names_for_db(infobox.get(infobox_label))
write_edges_to_db(db, node, list,infobox_label, rel_label, is_forward_rel)
def open_db():
return GRAPHDB
def store_people_to_db(people, db):
for person in people:
name = person.title().strip().encode()
node = db.get_or_create_indexed_node("NameIndex", "name", name, {"name": name})
node.add_labels("Person", "ComedianInfobox")
def get_all_people_nodes(db):
nodes, metadata = cypher.execute(db, "START n=node(*) MATCH (n:Person) RETURN n")
return nodes
def get_all_comedian_nodes(db):
nodes, metadata = cypher.execute(db, "START n=node(*) MATCH (n:ComedianInfobox) RETURN n")
return nodes
def print_influence_edge_list_for_gephi(db):
fout = open('influence_edge_list_gephi.csv', 'w')
influence_results, metadata = cypher.execute(db, "START n=node(*) MATCH (n)-[b:INFLUENCED]->(c) RETURN n,c")
for row in influence_results:
person1_props = row[0].get_properties()
person1 = person1_props['name']
person2_props = row[1].get_properties()
person2 = person2_props['name']
str = person2 + "," + person1
fout.write(str+"\r\n")
fout.close()
def print_edge_list(db):
fout = open('topic_edge_list.txt', 'w')
topic_results, metadata = cypher.execute(db, "START n=node(*) MATCH (n)-[b:SPOKE_ABOUT]->(c) RETURN n,c")
for row in topic_results:
person_props = row[0].get_properties()
person = person_props['name']
topic_props = row[1].get_properties()
topic = topic_props['subject']
str = person + "#" + topic + "#S"
print str
fout.write(str+"\r\n")
fout.close()
fout = open('influence_edge_list.txt', 'w')
influence_results, metadata = cypher.execute(db, "START n=node(*) MATCH (n)-[b:INFLUENCED]->(c) RETURN n,c")
for row in influence_results:
person1_props = row[0].get_properties()
person1 = person1_props['name']
person2_props = row[1].get_properties()
person2 = person2_props['name']
str = person1 + "#" + person2 + "#I"
fout.write(str+"\r\n")
fout.close()
def convert_to_json_influence(db):
name_dict = {}
nodes_list = []
edge_list = []
nodes, metadata = cypher.execute(db, "START n=node(*) MATCH (n:ComedianInfobox) RETURN n")
i = 0
for row in nodes:
node = row[0]
props = node.get_properties()
name = props['name']
name_dict[name] = i
json_str = '{"name": "'+ name + '"}'
nodes_list.append(json_str)
i += 1
nodes_list_str = ",".join(nodes_list)
influence_results, metadata = cypher.execute(db, "START n=node(*) MATCH (n:ComedianInfobox)-[b:INFLUENCED]->(c:ComedianInfobox) RETURN n,c")
for row in influence_results:
person1_props = row[0].get_properties()
person1_name = person1_props['name']
person1 = name_dict[person1_name]
person2_props = row[1].get_properties()
person2_name = person2_props['name']
person2 = name_dict[person2_name]
json_str = '{"source":' + str(person1) + ', "target": '+ str(person2) + '}'
edge_list.append(json_str)
edge_list_str = ",".join(edge_list)
fout = open('influences_json.json','w')
complete_json_str = '{ "nodes":[' + nodes_list_str + '], "links":[' + edge_list_str + ']}'
json.dump(complete_json_str, fout, indent=4)
fout.close()
|
zainabg/NOX
|
refs/heads/master
|
doc/manual/source/conf.py
|
14
|
# -*- coding: utf-8 -*-
#
# NOX documentation build configuration file, created by
# sphinx-quickstart on Sun May 18 15:09:51 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
import sys, os
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.append(os.path.abspath('some/directory'))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
#extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'NOX'
copyright = '2008, Nicira Networks'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# The short X.Y version.
version = '0.6'
# The full version, including alpha/beta/rc tags.
release = '0.6.0'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
# We exclude anything whose name begins with . or #.
# This makes it more pleasant to use Emacs (which creates temporary
# files whose names begin with #) or Vim (which creates temporary
# files whose names begin with .) with Sphinx.
import os
unused_docs = []
for root, dirs, files in os.walk('.'):
for file in files:
if file.startswith('.') or file.startswith('#'):
fullName = os.path.join(root, file)
unused_docs += [fullName[2:]]
# List of directories, relative to source directories, that shouldn't be searched
# for source files.
#exclude_dirs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'sphinxdoc.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# The name of an image file (within the static path) to place at the top of
# the sidebar.
#html_logo = "nox-icon-small.jpg"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['.static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_use_modindex = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
html_copy_source = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'NOXdoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'NOX.tex', 'NOX Documentation', 'Nicira Networks', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
|
udoprog/metap2p
|
refs/heads/master
|
share/metap2p_service/controllers/__init__.py
|
12133432
| |
pilou-/ansible
|
refs/heads/devel
|
test/units/mock/__init__.py
|
12133432
| |
Affirm/moto
|
refs/heads/master
|
moto/ec2/responses/security_groups.py
|
3
|
from __future__ import unicode_literals
from moto.core.responses import BaseResponse
from moto.ec2.utils import filters_from_querystring
def try_parse_int(value, default=None):
try:
return int(value)
except (TypeError, ValueError):
return default
class SecurityGroups(BaseResponse):
def _process_rules_from_querystring(self):
group_name_or_id = (self._get_param('GroupName') or
self._get_param('GroupId'))
querytree = {}
for key, value in self.querystring.items():
key_splitted = key.split('.')
key_splitted = [try_parse_int(e, e) for e in key_splitted]
d = querytree
for subkey in key_splitted[:-1]:
if subkey not in d:
d[subkey] = {}
d = d[subkey]
d[key_splitted[-1]] = value
ip_permissions = querytree.get('IpPermissions') or {}
for ip_permission_idx in sorted(ip_permissions.keys()):
ip_permission = ip_permissions[ip_permission_idx]
ip_protocol = ip_permission.get('IpProtocol', [None])[0]
from_port = ip_permission.get('FromPort', [None])[0]
to_port = ip_permission.get('ToPort', [None])[0]
ip_ranges = []
ip_ranges_tree = ip_permission.get('IpRanges') or {}
for ip_range_idx in sorted(ip_ranges_tree.keys()):
ip_ranges.append(ip_ranges_tree[ip_range_idx]['CidrIp'][0])
source_groups = []
source_group_ids = []
groups_tree = ip_permission.get('Groups') or {}
for group_idx in sorted(groups_tree.keys()):
group_dict = groups_tree[group_idx]
if 'GroupId' in group_dict:
source_group_ids.append(group_dict['GroupId'][0])
elif 'GroupName' in group_dict:
source_groups.append(group_dict['GroupName'][0])
yield (group_name_or_id, ip_protocol, from_port, to_port, ip_ranges,
source_groups, source_group_ids)
def authorize_security_group_egress(self):
if self.is_not_dryrun('GrantSecurityGroupEgress'):
for args in self._process_rules_from_querystring():
self.ec2_backend.authorize_security_group_egress(*args)
return AUTHORIZE_SECURITY_GROUP_EGRESS_RESPONSE
def authorize_security_group_ingress(self):
if self.is_not_dryrun('GrantSecurityGroupIngress'):
for args in self._process_rules_from_querystring():
self.ec2_backend.authorize_security_group_ingress(*args)
return AUTHORIZE_SECURITY_GROUP_INGRESS_REPONSE
def create_security_group(self):
name = self._get_param('GroupName')
description = self._get_param('GroupDescription')
vpc_id = self._get_param('VpcId')
if self.is_not_dryrun('CreateSecurityGroup'):
group = self.ec2_backend.create_security_group(
name, description, vpc_id=vpc_id)
template = self.response_template(CREATE_SECURITY_GROUP_RESPONSE)
return template.render(group=group)
def delete_security_group(self):
# TODO this should raise an error if there are instances in the group.
# See
# http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DeleteSecurityGroup.html
name = self._get_param('GroupName')
sg_id = self._get_param('GroupId')
if self.is_not_dryrun('DeleteSecurityGroup'):
if name:
self.ec2_backend.delete_security_group(name)
elif sg_id:
self.ec2_backend.delete_security_group(group_id=sg_id)
return DELETE_GROUP_RESPONSE
def describe_security_groups(self):
groupnames = self._get_multi_param("GroupName")
group_ids = self._get_multi_param("GroupId")
filters = filters_from_querystring(self.querystring)
groups = self.ec2_backend.describe_security_groups(
group_ids=group_ids,
groupnames=groupnames,
filters=filters
)
template = self.response_template(DESCRIBE_SECURITY_GROUPS_RESPONSE)
return template.render(groups=groups)
def revoke_security_group_egress(self):
if self.is_not_dryrun('RevokeSecurityGroupEgress'):
for args in self._process_rules_from_querystring():
success = self.ec2_backend.revoke_security_group_egress(*args)
if not success:
return "Could not find a matching egress rule", dict(status=404)
return REVOKE_SECURITY_GROUP_EGRESS_RESPONSE
def revoke_security_group_ingress(self):
if self.is_not_dryrun('RevokeSecurityGroupIngress'):
for args in self._process_rules_from_querystring():
self.ec2_backend.revoke_security_group_ingress(*args)
return REVOKE_SECURITY_GROUP_INGRESS_REPONSE
CREATE_SECURITY_GROUP_RESPONSE = """<CreateSecurityGroupResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
<groupId>{{ group.id }}</groupId>
</CreateSecurityGroupResponse>"""
DELETE_GROUP_RESPONSE = """<DeleteSecurityGroupResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DeleteSecurityGroupResponse>"""
DESCRIBE_SECURITY_GROUPS_RESPONSE = """<DescribeSecurityGroupsResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<securityGroupInfo>
{% for group in groups %}
<item>
<ownerId>123456789012</ownerId>
<groupId>{{ group.id }}</groupId>
<groupName>{{ group.name }}</groupName>
<groupDescription>{{ group.description }}</groupDescription>
{% if group.vpc_id %}
<vpcId>{{ group.vpc_id }}</vpcId>
{% endif %}
<ipPermissions>
{% for rule in group.ingress_rules %}
<item>
<ipProtocol>{{ rule.ip_protocol }}</ipProtocol>
{% if rule.from_port %}
<fromPort>{{ rule.from_port }}</fromPort>
{% endif %}
{% if rule.to_port %}
<toPort>{{ rule.to_port }}</toPort>
{% endif %}
<groups>
{% for source_group in rule.source_groups %}
<item>
<userId>123456789012</userId>
<groupId>{{ source_group.id }}</groupId>
<groupName>{{ source_group.name }}</groupName>
</item>
{% endfor %}
</groups>
<ipRanges>
{% for ip_range in rule.ip_ranges %}
<item>
<cidrIp>{{ ip_range }}</cidrIp>
</item>
{% endfor %}
</ipRanges>
</item>
{% endfor %}
</ipPermissions>
<ipPermissionsEgress>
{% for rule in group.egress_rules %}
<item>
<ipProtocol>{{ rule.ip_protocol }}</ipProtocol>
<fromPort>{{ rule.from_port }}</fromPort>
<toPort>{{ rule.to_port }}</toPort>
<groups>
{% for source_group in rule.source_groups %}
<item>
<userId>123456789012</userId>
<groupId>{{ source_group.id }}</groupId>
<groupName>{{ source_group.name }}</groupName>
</item>
{% endfor %}
</groups>
<ipRanges>
{% for ip_range in rule.ip_ranges %}
<item>
<cidrIp>{{ ip_range }}</cidrIp>
</item>
{% endfor %}
</ipRanges>
</item>
{% endfor %}
</ipPermissionsEgress>
<tagSet>
{% for tag in group.get_tags() %}
<item>
<resourceId>{{ tag.resource_id }}</resourceId>
<resourceType>{{ tag.resource_type }}</resourceType>
<key>{{ tag.key }}</key>
<value>{{ tag.value }}</value>
</item>
{% endfor %}
</tagSet>
</item>
{% endfor %}
</securityGroupInfo>
</DescribeSecurityGroupsResponse>"""
AUTHORIZE_SECURITY_GROUP_INGRESS_REPONSE = """<AuthorizeSecurityGroupIngressResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</AuthorizeSecurityGroupIngressResponse>"""
REVOKE_SECURITY_GROUP_INGRESS_REPONSE = """<RevokeSecurityGroupIngressResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</RevokeSecurityGroupIngressResponse>"""
AUTHORIZE_SECURITY_GROUP_EGRESS_RESPONSE = """
<AuthorizeSecurityGroupEgressResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</AuthorizeSecurityGroupEgressResponse>"""
REVOKE_SECURITY_GROUP_EGRESS_RESPONSE = """<RevokeSecurityGroupEgressResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</RevokeSecurityGroupEgressResponse>"""
|
jimi-c/ansible
|
refs/heads/devel
|
test/units/cli/test_vault.py
|
57
|
# (c) 2017, Adrian Likins <alikins@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch
from units.mock.vault_helper import TextVaultSecret
from ansible import errors
from ansible.cli.vault import VaultCLI
# TODO: make these tests assert something, likely by verifing
# mock calls
class TestVaultCli(unittest.TestCase):
def setUp(self):
self.tty_patcher = patch('ansible.cli.sys.stdin.isatty', return_value=False)
self.mock_isatty = self.tty_patcher.start()
def tearDown(self):
self.tty_patcher.stop()
def test_parse_empty(self):
cli = VaultCLI([])
self.assertRaisesRegexp(errors.AnsibleOptionsError,
'.*Missing required action.*',
cli.parse)
# FIXME: something weird seems to be afoot when parsing actions
# cli = VaultCLI(args=['view', '/dev/null/foo', 'mysecret3'])
# will skip '/dev/null/foo'. something in cli.CLI.set_action() ?
# maybe we self.args gets modified in a loop?
def test_parse_view_file(self):
cli = VaultCLI(args=['ansible-vault', 'view', '/dev/null/foo'])
cli.parse()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
def test_view_missing_file_no_secret(self, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = []
cli = VaultCLI(args=['ansible-vault', 'view', '/dev/null/foo'])
cli.parse()
self.assertRaisesRegexp(errors.AnsibleOptionsError,
"A vault password is required to use Ansible's Vault",
cli.run)
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
def test_encrypt_missing_file_no_secret(self, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = []
cli = VaultCLI(args=['ansible-vault', 'encrypt', '/dev/null/foo'])
cli.parse()
self.assertRaisesRegexp(errors.AnsibleOptionsError,
"A vault password is required to use Ansible's Vault",
cli.run)
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_encrypt(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'encrypt', '/dev/null/foo'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_encrypt_string(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'encrypt_string',
'some string to encrypt'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
@patch('ansible.cli.vault.display.prompt', return_value='a_prompt')
def test_encrypt_string_prompt(self, mock_display, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault',
'encrypt_string',
'--prompt',
'some string to encrypt'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
@patch('ansible.cli.vault.sys.stdin.read', return_value='This is data from stdin')
def test_encrypt_string_stdin(self, mock_stdin_read, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault',
'encrypt_string',
'--stdin-name',
'the_var_from_stdin',
'-'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_encrypt_string_names(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'encrypt_string',
'--name', 'foo1',
'--name', 'foo2',
'some string to encrypt'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_encrypt_string_more_args_than_names(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'encrypt_string',
'--name', 'foo1',
'some string to encrypt',
'other strings',
'a few more string args'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_create(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'create', '/dev/null/foo'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_edit(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'edit', '/dev/null/foo'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_decrypt(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'decrypt', '/dev/null/foo'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_view(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'view', '/dev/null/foo'])
cli.parse()
cli.run()
@patch('ansible.cli.vault.VaultCLI.setup_vault_secrets')
@patch('ansible.cli.vault.VaultEditor')
def test_rekey(self, mock_vault_editor, mock_setup_vault_secrets):
mock_setup_vault_secrets.return_value = [('default', TextVaultSecret('password'))]
cli = VaultCLI(args=['ansible-vault', 'rekey', '/dev/null/foo'])
cli.parse()
cli.run()
|
netscaler/neutron
|
refs/heads/master
|
neutron/tests/unit/midonet/test_midonet_plugin.py
|
2
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Midokura Japan K.K.
# Copyright (C) 2013 Midokura PTE LTD
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Rossella Sblendido, Midokura Europe SARL
# @author: Ryu Ishimoto, Midokura Japan KK
# @author: Tomoe Sugihara, Midokura Japan KK
import mock
import os
import sys
import neutron.common.test_lib as test_lib
import neutron.tests.unit.midonet.mock_lib as mock_lib
import neutron.tests.unit.test_db_plugin as test_plugin
import neutron.tests.unit.test_extension_security_group as sg
MIDOKURA_PKG_PATH = "neutron.plugins.midonet.plugin"
# Need to mock the midonetclient module since the plugin will try to load it.
sys.modules["midonetclient"] = mock.Mock()
class MidonetPluginV2TestCase(test_plugin.NeutronDbPluginV2TestCase):
_plugin_name = ('%s.MidonetPluginV2' % MIDOKURA_PKG_PATH)
def setUp(self):
self.mock_api = mock.patch(
'neutron.plugins.midonet.midonet_lib.MidoClient')
etc_path = os.path.join(os.path.dirname(__file__), 'etc')
test_lib.test_config['config_files'] = [os.path.join(
etc_path, 'midonet.ini.test')]
self.instance = self.mock_api.start()
mock_cfg = mock_lib.MidonetLibMockConfig(self.instance.return_value)
mock_cfg.setup()
super(MidonetPluginV2TestCase, self).setUp(self._plugin_name)
def tearDown(self):
super(MidonetPluginV2TestCase, self).tearDown()
self.mock_api.stop()
class TestMidonetNetworksV2(test_plugin.TestNetworksV2,
MidonetPluginV2TestCase):
pass
class TestMidonetSecurityGroupsTestCase(sg.SecurityGroupDBTestCase):
_plugin_name = ('%s.MidonetPluginV2' % MIDOKURA_PKG_PATH)
def setUp(self):
self.mock_api = mock.patch(
'neutron.plugins.midonet.midonet_lib.MidoClient')
etc_path = os.path.join(os.path.dirname(__file__), 'etc')
test_lib.test_config['config_files'] = [os.path.join(
etc_path, 'midonet.ini.test')]
self.instance = self.mock_api.start()
mock_cfg = mock_lib.MidonetLibMockConfig(self.instance.return_value)
mock_cfg.setup()
super(TestMidonetSecurityGroupsTestCase, self).setUp(self._plugin_name)
class TestMidonetSecurityGroup(sg.TestSecurityGroups,
TestMidonetSecurityGroupsTestCase):
pass
class TestMidonetSubnetsV2(test_plugin.TestSubnetsV2,
MidonetPluginV2TestCase):
# IPv6 is not supported by MidoNet yet. Ignore tests that attempt to
# create IPv6 subnet.
def test_create_subnet_inconsistent_ipv6_cidrv4(self):
pass
def test_create_subnet_inconsistent_ipv6_dns_v4(self):
pass
def test_create_subnet_with_v6_allocation_pool(self):
pass
def test_update_subnet_inconsistent_ipv6_gatewayv4(self):
pass
def test_update_subnet_inconsistent_ipv6_hostroute_dst_v4(self):
pass
def test_update_subnet_inconsistent_ipv6_hostroute_np_v4(self):
pass
def test_create_subnet_inconsistent_ipv6_gatewayv4(self):
pass
class TestMidonetPortsV2(test_plugin.TestPortsV2,
MidonetPluginV2TestCase):
# IPv6 is not supported by MidoNet yet. Ignore tests that attempt to
# create IPv6 subnet.
def test_requested_subnet_id_v4_and_v6(self):
pass
|
sergiohgz/incubator-airflow
|
refs/heads/master
|
airflow/contrib/hooks/salesforce_hook.py
|
7
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""
This module contains a Salesforce Hook
which allows you to connect to your Salesforce instance,
retrieve data from it, and write that data to a file
for other uses.
NOTE: this hook also relies on the simple_salesforce package:
https://github.com/simple-salesforce/simple-salesforce
"""
from simple_salesforce import Salesforce
from airflow.hooks.base_hook import BaseHook
import json
import pandas as pd
import time
from airflow.utils.log.logging_mixin import LoggingMixin
class SalesforceHook(BaseHook, LoggingMixin):
def __init__(
self,
conn_id,
*args,
**kwargs
):
"""
Create new connection to Salesforce
and allows you to pull data out of SFDC and save it to a file.
You can then use that file with other
Airflow operators to move the data into another data source
:param conn_id: the name of the connection that has the parameters
we need to connect to Salesforce.
The conenction shoud be type `http` and include a
user's security token in the `Extras` field.
.. note::
For the HTTP connection type, you can include a
JSON structure in the `Extras` field.
We need a user's security token to connect to Salesforce.
So we define it in the `Extras` field as:
`{"security_token":"YOUR_SECRUITY_TOKEN"}`
"""
self.conn_id = conn_id
self._args = args
self._kwargs = kwargs
# get the connection parameters
self.connection = self.get_connection(conn_id)
self.extras = self.connection.extra_dejson
def sign_in(self):
"""
Sign into Salesforce.
If we have already signed it, this will just return the original object
"""
if hasattr(self, 'sf'):
return self.sf
# connect to Salesforce
sf = Salesforce(
username=self.connection.login,
password=self.connection.password,
security_token=self.extras['security_token'],
instance_url=self.connection.host,
sandbox=self.extras.get('sandbox', False)
)
self.sf = sf
return sf
def make_query(self, query):
"""
Make a query to Salesforce. Returns result in dictionary
:param query: The query to make to Salesforce
"""
self.sign_in()
self.log.info("Querying for all objects")
query = self.sf.query_all(query)
self.log.info(
"Received results: Total size: %s; Done: %s",
query['totalSize'], query['done']
)
query = json.loads(json.dumps(query))
return query
def describe_object(self, obj):
"""
Get the description of an object from Salesforce.
This description is the object's schema
and some extra metadata that Salesforce stores for each object
:param obj: Name of the Salesforce object
that we are getting a description of.
"""
self.sign_in()
return json.loads(json.dumps(self.sf.__getattr__(obj).describe()))
def get_available_fields(self, obj):
"""
Get a list of all available fields for an object.
This only returns the names of the fields.
"""
self.sign_in()
desc = self.describe_object(obj)
return [f['name'] for f in desc['fields']]
def _build_field_list(self, fields):
# join all of the fields in a comma separated list
return ",".join(fields)
def get_object_from_salesforce(self, obj, fields):
"""
Get all instances of the `object` from Salesforce.
For each model, only get the fields specified in fields.
All we really do underneath the hood is run:
SELECT <fields> FROM <obj>;
"""
field_string = self._build_field_list(fields)
query = "SELECT {0} FROM {1}".format(field_string, obj)
self.log.info(
"Making query to Salesforce: %s",
query if len(query) < 30 else " ... ".join([query[:15], query[-15:]])
)
return self.make_query(query)
@classmethod
def _to_timestamp(cls, col):
"""
Convert a column of a dataframe to UNIX timestamps if applicable
:param col: A Series object representing a column of a dataframe.
"""
# try and convert the column to datetimes
# the column MUST have a four digit year somewhere in the string
# there should be a better way to do this,
# but just letting pandas try and convert every column without a format
# caused it to convert floats as well
# For example, a column of integers
# between 0 and 10 are turned into timestamps
# if the column cannot be converted,
# just return the original column untouched
try:
col = pd.to_datetime(col)
except ValueError:
log = LoggingMixin().log
log.warning(
"Could not convert field to timestamps: %s", col.name
)
return col
# now convert the newly created datetimes into timestamps
# we have to be careful here
# because NaT cannot be converted to a timestamp
# so we have to return NaN
converted = []
for i in col:
try:
converted.append(i.timestamp())
except ValueError:
converted.append(pd.np.NaN)
except AttributeError:
converted.append(pd.np.NaN)
# return a new series that maintains the same index as the original
return pd.Series(converted, index=col.index)
def write_object_to_file(
self,
query_results,
filename,
fmt="csv",
coerce_to_timestamp=False,
record_time_added=False
):
"""
Write query results to file.
Acceptable formats are:
- csv:
comma-separated-values file. This is the default format.
- json:
JSON array. Each element in the array is a different row.
- ndjson:
JSON array but each element is new-line delimited
instead of comma delimited like in `json`
This requires a significant amount of cleanup.
Pandas doesn't handle output to CSV and json in a uniform way.
This is especially painful for datetime types.
Pandas wants to write them as strings in CSV,
but as millisecond Unix timestamps.
By default, this function will try and leave all values as
they are represented in Salesforce.
You use the `coerce_to_timestamp` flag to force all datetimes
to become Unix timestamps (UTC).
This is can be greatly beneficial as it will make all of your
datetime fields look the same,
and makes it easier to work with in other database environments
:param query_results: the results from a SQL query
:param filename: the name of the file where the data
should be dumped to
:param fmt: the format you want the output in.
*Default:* csv.
:param coerce_to_timestamp: True if you want all datetime fields to be
converted into Unix timestamps.
False if you want them to be left in the
same format as they were in Salesforce.
Leaving the value as False will result
in datetimes being strings.
*Defaults to False*
:param record_time_added: *(optional)* True if you want to add a
Unix timestamp field to the resulting data
that marks when the data
was fetched from Salesforce.
*Default: False*.
"""
fmt = fmt.lower()
if fmt not in ['csv', 'json', 'ndjson']:
raise ValueError("Format value is not recognized: {0}".format(fmt))
# this line right here will convert all integers to floats if there are
# any None/np.nan values in the column
# that's because None/np.nan cannot exist in an integer column
# we should write all of our timestamps as FLOATS in our final schema
df = pd.DataFrame.from_records(query_results, exclude=["attributes"])
df.columns = [c.lower() for c in df.columns]
# convert columns with datetime strings to datetimes
# not all strings will be datetimes, so we ignore any errors that occur
# we get the object's definition at this point and only consider
# features that are DATE or DATETIME
if coerce_to_timestamp and df.shape[0] > 0:
# get the object name out of the query results
# it's stored in the "attributes" dictionary
# for each returned record
object_name = query_results[0]['attributes']['type']
self.log.info("Coercing timestamps for: %s", object_name)
schema = self.describe_object(object_name)
# possible columns that can be convereted to timestamps
# are the ones that are either date or datetime types
# strings are too general and we risk unintentional conversion
possible_timestamp_cols = [
i['name'].lower()
for i in schema['fields']
if i['type'] in ["date", "datetime"] and
i['name'].lower() in df.columns
]
df[possible_timestamp_cols] = df[possible_timestamp_cols].apply(
lambda x: self._to_timestamp(x)
)
if record_time_added:
fetched_time = time.time()
df["time_fetched_from_salesforce"] = fetched_time
# write the CSV or JSON file depending on the option
# NOTE:
# datetimes here are an issue.
# There is no good way to manage the difference
# for to_json, the options are an epoch or a ISO string
# but for to_csv, it will be a string output by datetime
# For JSON we decided to output the epoch timestamp in seconds
# (as is fairly standard for JavaScript)
# And for csv, we do a string
if fmt == "csv":
# there are also a ton of newline objects
# that mess up our ability to write to csv
# we remove these newlines so that the output is a valid CSV format
self.log.info("Cleaning data and writing to CSV")
possible_strings = df.columns[df.dtypes == "object"]
df[possible_strings] = df[possible_strings].apply(
lambda x: x.str.replace("\r\n", "")
)
df[possible_strings] = df[possible_strings].apply(
lambda x: x.str.replace("\n", "")
)
# write the dataframe
df.to_csv(filename, index=False)
elif fmt == "json":
df.to_json(filename, "records", date_unit="s")
elif fmt == "ndjson":
df.to_json(filename, "records", lines=True, date_unit="s")
return df
|
jmmauricio/pypstools
|
refs/heads/master
|
tests/test_psys_map_sing.py
|
1
|
# -*- coding: utf-8 -*-
"""
Example converting ds txt output to python dict and plotting results
Created on Sat Mar 21 12:13:25 2015
@author: jmmauricio
"""
# add pypstools to the path
import sys,os
sys.path.insert(0,os.path.abspath(os.path.join(os.getcwd(),'..')))
import pypstools
pub = pypstools.publisher
geo_data = {'geojson_file':'./geojson_simplified.json',
'bottom_lat':-25.275, # sing
'top_lat':-17.372,
'left_lon':-71.960,
'right_lon':-64.666,
'mask_oceans':True}
plot_data = {'out_dir':'./png',
'out_formats':['svg'],
'map_name':'sing_simple'}
m = pub.psys_map(geo_data, plot_data)
|
chongtianfeiyu/kbengine
|
refs/heads/master
|
kbe/res/scripts/common/Lib/encodings/iso8859_7.py
|
272
|
""" Python Character Mapping Codec iso8859_7 generated from 'MAPPINGS/ISO8859/8859-7.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-7',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\u2018' # 0xA1 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0xA2 -> RIGHT SINGLE QUOTATION MARK
'\xa3' # 0xA3 -> POUND SIGN
'\u20ac' # 0xA4 -> EURO SIGN
'\u20af' # 0xA5 -> DRACHMA SIGN
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\u037a' # 0xAA -> GREEK YPOGEGRAMMENI
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\ufffe'
'\u2015' # 0xAF -> HORIZONTAL BAR
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\xb3' # 0xB3 -> SUPERSCRIPT THREE
'\u0384' # 0xB4 -> GREEK TONOS
'\u0385' # 0xB5 -> GREEK DIALYTIKA TONOS
'\u0386' # 0xB6 -> GREEK CAPITAL LETTER ALPHA WITH TONOS
'\xb7' # 0xB7 -> MIDDLE DOT
'\u0388' # 0xB8 -> GREEK CAPITAL LETTER EPSILON WITH TONOS
'\u0389' # 0xB9 -> GREEK CAPITAL LETTER ETA WITH TONOS
'\u038a' # 0xBA -> GREEK CAPITAL LETTER IOTA WITH TONOS
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u038c' # 0xBC -> GREEK CAPITAL LETTER OMICRON WITH TONOS
'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
'\u038e' # 0xBE -> GREEK CAPITAL LETTER UPSILON WITH TONOS
'\u038f' # 0xBF -> GREEK CAPITAL LETTER OMEGA WITH TONOS
'\u0390' # 0xC0 -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
'\u0391' # 0xC1 -> GREEK CAPITAL LETTER ALPHA
'\u0392' # 0xC2 -> GREEK CAPITAL LETTER BETA
'\u0393' # 0xC3 -> GREEK CAPITAL LETTER GAMMA
'\u0394' # 0xC4 -> GREEK CAPITAL LETTER DELTA
'\u0395' # 0xC5 -> GREEK CAPITAL LETTER EPSILON
'\u0396' # 0xC6 -> GREEK CAPITAL LETTER ZETA
'\u0397' # 0xC7 -> GREEK CAPITAL LETTER ETA
'\u0398' # 0xC8 -> GREEK CAPITAL LETTER THETA
'\u0399' # 0xC9 -> GREEK CAPITAL LETTER IOTA
'\u039a' # 0xCA -> GREEK CAPITAL LETTER KAPPA
'\u039b' # 0xCB -> GREEK CAPITAL LETTER LAMDA
'\u039c' # 0xCC -> GREEK CAPITAL LETTER MU
'\u039d' # 0xCD -> GREEK CAPITAL LETTER NU
'\u039e' # 0xCE -> GREEK CAPITAL LETTER XI
'\u039f' # 0xCF -> GREEK CAPITAL LETTER OMICRON
'\u03a0' # 0xD0 -> GREEK CAPITAL LETTER PI
'\u03a1' # 0xD1 -> GREEK CAPITAL LETTER RHO
'\ufffe'
'\u03a3' # 0xD3 -> GREEK CAPITAL LETTER SIGMA
'\u03a4' # 0xD4 -> GREEK CAPITAL LETTER TAU
'\u03a5' # 0xD5 -> GREEK CAPITAL LETTER UPSILON
'\u03a6' # 0xD6 -> GREEK CAPITAL LETTER PHI
'\u03a7' # 0xD7 -> GREEK CAPITAL LETTER CHI
'\u03a8' # 0xD8 -> GREEK CAPITAL LETTER PSI
'\u03a9' # 0xD9 -> GREEK CAPITAL LETTER OMEGA
'\u03aa' # 0xDA -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
'\u03ab' # 0xDB -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
'\u03ac' # 0xDC -> GREEK SMALL LETTER ALPHA WITH TONOS
'\u03ad' # 0xDD -> GREEK SMALL LETTER EPSILON WITH TONOS
'\u03ae' # 0xDE -> GREEK SMALL LETTER ETA WITH TONOS
'\u03af' # 0xDF -> GREEK SMALL LETTER IOTA WITH TONOS
'\u03b0' # 0xE0 -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
'\u03b1' # 0xE1 -> GREEK SMALL LETTER ALPHA
'\u03b2' # 0xE2 -> GREEK SMALL LETTER BETA
'\u03b3' # 0xE3 -> GREEK SMALL LETTER GAMMA
'\u03b4' # 0xE4 -> GREEK SMALL LETTER DELTA
'\u03b5' # 0xE5 -> GREEK SMALL LETTER EPSILON
'\u03b6' # 0xE6 -> GREEK SMALL LETTER ZETA
'\u03b7' # 0xE7 -> GREEK SMALL LETTER ETA
'\u03b8' # 0xE8 -> GREEK SMALL LETTER THETA
'\u03b9' # 0xE9 -> GREEK SMALL LETTER IOTA
'\u03ba' # 0xEA -> GREEK SMALL LETTER KAPPA
'\u03bb' # 0xEB -> GREEK SMALL LETTER LAMDA
'\u03bc' # 0xEC -> GREEK SMALL LETTER MU
'\u03bd' # 0xED -> GREEK SMALL LETTER NU
'\u03be' # 0xEE -> GREEK SMALL LETTER XI
'\u03bf' # 0xEF -> GREEK SMALL LETTER OMICRON
'\u03c0' # 0xF0 -> GREEK SMALL LETTER PI
'\u03c1' # 0xF1 -> GREEK SMALL LETTER RHO
'\u03c2' # 0xF2 -> GREEK SMALL LETTER FINAL SIGMA
'\u03c3' # 0xF3 -> GREEK SMALL LETTER SIGMA
'\u03c4' # 0xF4 -> GREEK SMALL LETTER TAU
'\u03c5' # 0xF5 -> GREEK SMALL LETTER UPSILON
'\u03c6' # 0xF6 -> GREEK SMALL LETTER PHI
'\u03c7' # 0xF7 -> GREEK SMALL LETTER CHI
'\u03c8' # 0xF8 -> GREEK SMALL LETTER PSI
'\u03c9' # 0xF9 -> GREEK SMALL LETTER OMEGA
'\u03ca' # 0xFA -> GREEK SMALL LETTER IOTA WITH DIALYTIKA
'\u03cb' # 0xFB -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA
'\u03cc' # 0xFC -> GREEK SMALL LETTER OMICRON WITH TONOS
'\u03cd' # 0xFD -> GREEK SMALL LETTER UPSILON WITH TONOS
'\u03ce' # 0xFE -> GREEK SMALL LETTER OMEGA WITH TONOS
'\ufffe'
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
4Kaylum/SkyBot
|
refs/heads/deleting-cogs
|
cogs/dnd_commands.py
|
1
|
import re
import typing
import random
import string
from urllib.parse import quote
from discord.ext import commands
import voxelbotutils as utils
class DNDCommands(utils.Cog):
DICE_REGEX = re.compile(r"^(?P<count>\d+)?[dD](?P<type>\d+) *(?P<modifier>(?P<modifier_parity>[+-]) *(?P<modifier_amount>\d+))?$")
ATTRIBUTES = {
"strength": "STR",
"dexterity": "DEX",
"constitution": "CON",
"intelligence": "INT",
"wisdom": "WIS",
"charisma": "CHR",
}
@utils.command(aliases=['roll'])
@commands.bot_has_permissions(send_messages=True)
async def dice(self, ctx:utils.Context, *, dice:str):
"""
Rolls a dice for you.
"""
# Validate the dice
if not dice:
raise utils.errors.MissingRequiredArgumentString(dice)
match = self.DICE_REGEX.search(dice)
if not match:
raise commands.BadArgument("Your dice was not in the format `AdB+C`.")
# Roll em
dice_count = int(match.group("count") or 1)
dice_type = int(match.group("type"))
modifier = int((match.group("modifier") or "+0").replace(" ", ""))
rolls = [random.randint(1, dice_type) for _ in range(dice_count)]
total = sum(rolls) + modifier
dice_string = f"{dice_count}d{dice_type}{modifier:+}"
if not modifier:
dice_string = dice_string[:-2]
# Output formatted
if dice_count > 1 or modifier:
equals_string = f"{sum(rolls)} {'+' if modifier > 0 else '-'} {abs(modifier)}"
if modifier:
return await ctx.send(f"Total **{total:,}** ({dice_string})\n({', '.join([str(i) for i in rolls])}) = {equals_string}")
return await ctx.send(f"Total **{total:,}** ({dice_string})\n({', '.join([str(i) for i in rolls])}) = {equals_string}")
return await ctx.send(f"Total **{total}** ({dice_string})")
async def send_web_request(self, resource:str, item:str) -> typing.Optional[dict]:
"""
Send a web request to the dnd5eapi website.
"""
url = f"https://www.dnd5eapi.co/api/{resource}/{quote(item.lower().replace(' ', '-'))}/"
headers = {"User-Agent": self.bot.user_agent}
async with self.bot.session.get(url, headers=headers) as r:
v = await r.json()
if v.get("error"):
return None
return v
@staticmethod
def group_field_descriptions(embed, field_name, input_list) -> None:
"""
Add fields grouped to the embed character limit.
"""
original_field_name = field_name
joiner = "\n"
action_text = [f"**{i['name']}**{joiner}{i['desc']}" for i in input_list]
add_text = ""
for index, text in enumerate(action_text):
if len(add_text) + len(text) + 1 > 1024:
embed.add_field(
field_name, add_text, inline=False,
)
field_name = f"{original_field_name} Continued"
add_text = ""
add_text += joiner + text
if add_text:
embed.add_field(
field_name, add_text, inline=False,
)
@utils.group(aliases=["d&d"])
@commands.bot_has_permissions(send_messages=True)
async def dnd(self, ctx:utils.Context):
"""
The parent group for the D&D commands.
"""
if ctx.invoked_subcommand is None:
return await ctx.send_help(ctx.command)
@dnd.command(name="spell", aliases=["spells"])
@commands.bot_has_permissions(send_messages=True, embed_links=True)
async def dnd_spell(self, ctx:utils.Context, *, spell_name:str):
"""
Gives you information on a D&D spell.
"""
async with ctx.typing():
data = await self.send_web_request("spells", spell_name)
if not data:
return await ctx.send("I couldn't find any information for that spell.")
embed = utils.Embed(
use_random_colour=True,
title=data['name'],
description=data['desc'][0],
).add_field(
"Casting Time", data['casting_time'],
).add_field(
"Range", data['range'],
).add_field(
"Components", ', '.join(data['components']),
).add_field(
"Material", data.get('material', 'N/A'),
).add_field(
"Duration", data['duration'],
).add_field(
"Classes", ', '.join([i['name'] for i in data['classes']]),
).add_field(
"Ritual", data['ritual'],
).add_field(
"Concentration", data['concentration'],
)
if data.get('higher_level'):
embed.add_field(
"Higher Level", "\n".join(data['higher_level']), inline=False,
)
elif data.get('damage'):
text = ""
if data['damage'].get('damage_at_character_level'):
text += "\nCharacter level " + ", ".join([f"{i}: {o}" for i, o in data['damage']['damage_at_character_level'].items()])
if data['damage'].get('damage_at_slot_level'):
text += "\nSlot level " + ", ".join([f"{i}: {o}" for i, o in data['damage']['damage_at_slot_level'].items()])
embed.add_field(
"Damage", text.strip(), inline=False,
)
return await ctx.send(embed=embed)
@dnd.command(name="monster", aliases=["monsters"])
@commands.bot_has_permissions(send_messages=True, embed_links=True)
async def dnd_monster(self, ctx:utils.Context, *, monster_name:str):
"""
Gives you information on a D&D monster.
"""
async with ctx.typing():
data = await self.send_web_request("monsters", monster_name)
if not data:
return await ctx.send("I couldn't find any information for that monster.")
embed = utils.Embed(
use_random_colour=True,
title=data['name'],
description="\n".join([
f"{data['size'].capitalize()} | {data['type']} | {data['hit_points']:,} ({data['hit_dice']}) HP | {data['xp']:,} XP",
", ".join([f"{o} {data[i]}" for i, o in self.ATTRIBUTES.items()]),
])
).add_field(
"Proficiencies", ", ".join([f"{i['proficiency']['name']} {i['value']}" for i in data['proficiencies']]) or "None",
).add_field(
"Damage Vulnerabilities", "\n".join(data['damage_vulnerabilities']).capitalize() or "None",
).add_field(
"Damage Resistances", "\n".join(data['damage_resistances']).capitalize() or "None",
).add_field(
"Damage Immunities", "\n".join(data['damage_immunities']).capitalize() or "None",
).add_field(
"Condition Immunities", "\n".join([i['name'] for i in data['condition_immunities']]).capitalize() or "None",
).add_field(
"Senses", "\n".join([f"{i.replace('_', ' ').capitalize()} {o}" for i, o in data['senses'].items()]) or "None",
)
self.group_field_descriptions(embed, "Actions", data['actions'])
self.group_field_descriptions(embed, "Legendary Actions", data.get('legendary_actions', list()))
if data.get('special_abilities'):
embed.add_field(
"Special Abilities", "\n".join([f"**{i['name']}**\n{i['desc']}" for i in data['special_abilities'] if i['name'] != 'Spellcasting']) or "None", inline=False,
)
spellcasting = [i for i in data.get('special_abilities', list()) if i['name'] == 'Spellcasting']
if spellcasting:
spellcasting = spellcasting[0]
embed.add_field(
"Spellcasting", spellcasting['desc'].replace('\n\n', '\n'), inline=False,
)
return await ctx.send(embed=embed)
@dnd.command(name="condition", aliases=["conditions"])
@commands.bot_has_permissions(send_messages=True, embed_links=True)
async def dnd_condition(self, ctx:utils.Context, *, condition_name:str):
"""
Gives you information on a D&D condition.
"""
async with ctx.typing():
data = await self.send_web_request("conditions", condition_name)
if not data:
return await ctx.send("I couldn't find any information for that condition.")
embed = utils.Embed(
use_random_colour=True,
title=data['name'],
description="\n".join(data['desc']),
)
return await ctx.send(embed=embed)
@dnd.command(name="class", aliases=["classes"])
@commands.bot_has_permissions(send_messages=True, embed_links=True)
async def dnd_class(self, ctx:utils.Context, *, class_name:str):
"""
Gives you information on a D&D class.
"""
async with ctx.typing():
data = await self.send_web_request("classes", class_name)
if not data:
return await ctx.send("I couldn't find any information for that class.")
embed = utils.Embed(
use_random_colour=True,
title=data['name'],
).add_field(
"Proficiencies", ", ".join([i['name'] for i in data['proficiencies']]),
).add_field(
"Saving Throws", ", ".join([i['name'] for i in data['saving_throws']]),
).add_field(
"Starting Equipment", "\n".join([f"{i['quantity']}x {i['equipment']['name']}" for i in data['starting_equipment']]),
)
return await ctx.send(embed=embed)
def setup(bot:utils.Bot):
x = DNDCommands(bot)
bot.add_cog(x)
|
cwrowley/dmdtools
|
refs/heads/master
|
python/scripts/streaming_dmd_example.py
|
1
|
""" An example highlighting the difference between DMD and streaming DMD
Streaming DMD is a modification of the "standard" DMD procedure that
produces *APPROXIMATIONS* of the DMD modes and eigenvalues. The benefit
of this procedure is that it can be applied to data sets with large
(in theory, infinite) numbers of snapshots provided the underlying
system is effectively low-rank.
Returns
-------
Outputs a plot comparing the streaming and standard eigenvalues
"""
import sys
sys.path.append('..')
import dmdtools
import numpy as np
import matplotlib.pyplot as plt
max_rank = 0 # maximum allowable rank of the DMD operator (0 = unlimited)
n_snaps = 501 # total number of snapshots to be processed
n_states = 4000 # number of states
noise_cov = 1.e-4 # measurement noise covariance
dt = 0.01 # timestep
np.random.seed(0)
def snapshots(n_states, n_snaps, noise_cov=0):
# Define the example system
v1 = np.random.randn(n_states)
v2 = np.random.randn(n_states)
v3 = np.random.randn(n_states)
v4 = np.random.randn(n_states)
# characteristic frequencies
f1 = 5.2
f2 = 1.0
for k in range(n_snaps):
x = (v1 * np.cos(2 * np.pi * f1 * dt * k) +
v2 * np.cos(2 * np.pi * f2 * dt * k) +
v3 * np.sin(2 * np.pi * f1 * dt * k) +
v4 * np.sin(2 * np.pi * f2 * dt * k))
yield x + np.sqrt(noise_cov) * np.random.randn(n_states)
def standard_dmd():
X = np.zeros((n_states, n_snaps-1))
Y = np.zeros((n_states, n_snaps-1))
snaps = snapshots(n_states, n_snaps, noise_cov)
x = snaps.next()
for k, y in enumerate(snaps):
X[:, k] = x
Y[:, k] = y
x = y
DMD = dmdtools.DMD()
DMD.fit(X, Y)
return DMD.modes, DMD.evals
def streaming_dmd():
sdmd = dmdtools.StreamingDMD(max_rank)
snaps = snapshots(n_states, n_snaps, noise_cov)
x = snaps.next()
for y in snaps:
sdmd.update(x, y)
x = y
return sdmd.compute_modes()
def main(streaming):
modes, evals = streaming_dmd() if streaming else standard_dmd()
fdmd = np.abs(np.angle(evals)) / (2 * np.pi * dt)
n_modes = len(fdmd)
ydmd = np.zeros(n_modes)
for i in range(n_modes):
ydmd[i] = np.linalg.norm(modes[:, i] * np.abs(evals[i]))
ydmd /= max(ydmd)
plt.stem(fdmd, ydmd)
plt.show()
def compare_methods():
np.random.seed(0)
modes, evals = standard_dmd()
np.random.seed(0)
modes2, evals2 = streaming_dmd()
evals.sort()
evals2.sort()
# print("standard:")
# print(evals)
# print("\nstreaming:")
# print(evals2)
plt.plot(evals.real, evals.imag, 'x')
plt.plot(evals2.real, evals2.imag, '+')
plt.legend(["DMD", "Streaming"])
plt.title("DMD Spectrum")
plt.xlabel(r"$\Re(\lambda)$")
plt.ylabel(r"$\Im(\lambda)$")
plt.show()
print(np.allclose(evals, evals2))
if __name__ == "__main__":
streaming = True
#main(streaming)
compare_methods()
|
metala/avr-gcc-scons-skel
|
refs/heads/master
|
site_scons/site_tools/winavr.py
|
1
|
"""
Tool-specific initialization for WinAVR (AVR-GCC).
"""
import os
import re
import subprocess
import SCons.Util
import SCons.Tool.cc as cc
__author__ = "Marin Ivanov"
__copyright__ = "Copyright 2012"
__credits__ = ["Valori Ivanov"]
__license__ = "BSD 2-clause"
__version__ = "1.0.0"
__maintainer__ = "Marin Ivanov"
__email__ = "dev@metala.org"
__status__ = "Development"
compiler = 'avr-gcc'
objcopy = 'avr-objcopy'
def generate(env):
"""Add Builders and construction variables for gcc to an Environment."""
cc.generate(env)
env['CC'] = env.Detect(compiler) or 'avr-gcc'
env['OBJCOPY'] = env.Detect(objcopy) or 'avr-objcopy'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
env['SPAWN'] = _echospawn
env['ESCAPE'] = lambda x:x
env['WINAVR_PATH'] = _detect_winavr()
#env['CPPPATH'] = env['WINAVR_PATH'] + '\\avr\\include\\avr'
env.Append(BUILDERS = {
'Elf': _get_elf_builder(),
'Hex': _get_hex_builder(),
})
def exists(env):
return env.Detect(compiler) and env.Detect(objcopy)
def _detect_winavr():
import os
if (os.environ.has_key('AVR32_HOME')):
return os.environ['AVR32_HOME']
def _echospawn(sh, escape, cmd, args, env):
return subprocess.call(args)
def _get_elf_builder():
return SCons.Builder.Builder(action = "$CC -mmcu=${MCU} -Wl,-Map=${TARGET}.map -Os -Xlinker -Map=${TARGET}.map -Wl,--gc-sections -o ${TARGET} ${SOURCES}")
def _get_hex_builder():
return SCons.Builder.Builder(action = "$OBJCOPY -O ihex -R .eeprom $SOURCES $TARGET")
|
playpauseandstop/aiohttp
|
refs/heads/master
|
examples/legacy/crawl.py
|
12
|
#!/usr/bin/env python3
import asyncio
import logging
import re
import signal
import sys
import urllib.parse
import aiohttp
class Crawler:
def __init__(self, rooturl, loop, maxtasks=100):
self.rooturl = rooturl
self.loop = loop
self.todo = set()
self.busy = set()
self.done = {}
self.tasks = set()
self.sem = asyncio.Semaphore(maxtasks, loop=loop)
# connector stores cookies between requests and uses connection pool
self.session = aiohttp.ClientSession(loop=loop)
@asyncio.coroutine
def run(self):
t = asyncio.ensure_future(self.addurls([(self.rooturl, '')]),
loop=self.loop)
yield from asyncio.sleep(1, loop=self.loop)
while self.busy:
yield from asyncio.sleep(1, loop=self.loop)
yield from t
yield from self.session.close()
self.loop.stop()
@asyncio.coroutine
def addurls(self, urls):
for url, parenturl in urls:
url = urllib.parse.urljoin(parenturl, url)
url, frag = urllib.parse.urldefrag(url)
if (url.startswith(self.rooturl) and
url not in self.busy and
url not in self.done and
url not in self.todo):
self.todo.add(url)
yield from self.sem.acquire()
task = asyncio.ensure_future(self.process(url), loop=self.loop)
task.add_done_callback(lambda t: self.sem.release())
task.add_done_callback(self.tasks.remove)
self.tasks.add(task)
@asyncio.coroutine
def process(self, url):
print('processing:', url)
self.todo.remove(url)
self.busy.add(url)
try:
resp = yield from self.session.get(url)
except Exception as exc:
print('...', url, 'has error', repr(str(exc)))
self.done[url] = False
else:
if (resp.status == 200 and
('text/html' in resp.headers.get('content-type'))):
data = (yield from resp.read()).decode('utf-8', 'replace')
urls = re.findall(r'(?i)href=["\']?([^\s"\'<>]+)', data)
asyncio.Task(self.addurls([(u, url) for u in urls]))
resp.close()
self.done[url] = True
self.busy.remove(url)
print(len(self.done), 'completed tasks,', len(self.tasks),
'still pending, todo', len(self.todo))
def main():
loop = asyncio.get_event_loop()
c = Crawler(sys.argv[1], loop)
asyncio.ensure_future(c.run(), loop=loop)
try:
loop.add_signal_handler(signal.SIGINT, loop.stop)
except RuntimeError:
pass
loop.run_forever()
print('todo:', len(c.todo))
print('busy:', len(c.busy))
print('done:', len(c.done), '; ok:', sum(c.done.values()))
print('tasks:', len(c.tasks))
if __name__ == '__main__':
if '--iocp' in sys.argv:
from asyncio import events, windows_events
sys.argv.remove('--iocp')
logging.info('using iocp')
el = windows_events.ProactorEventLoop()
events.set_event_loop(el)
main()
|
Kamekameha/crunchy-xml-decoder
|
refs/heads/master
|
crunchy-xml-decoder/requests/packages/urllib3/contrib/pyopenssl.py
|
65
|
'''SSL with SNI_-support for Python 2. Follow these instructions if you would
like to verify SSL certificates in Python 2. Note, the default libraries do
*not* do certificate checking; you need to do additional work to validate
certificates yourself.
This needs the following packages installed:
* pyOpenSSL (tested with 0.13)
* ndg-httpsclient (tested with 0.3.2)
* pyasn1 (tested with 0.1.6)
You can install them with the following command:
pip install pyopenssl ndg-httpsclient pyasn1
To activate certificate checking, call
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
before you begin making HTTP requests. This can be done in a ``sitecustomize``
module, or at any other time before your application begins using ``urllib3``,
like this::
try:
import urllib3.contrib.pyopenssl
urllib3.contrib.pyopenssl.inject_into_urllib3()
except ImportError:
pass
Now you can use :mod:`urllib3` as you normally would, and it will support SNI
when the required modules are installed.
Activating this module also has the positive side effect of disabling SSL/TLS
encryption in Python 2 (see `CRIME attack`_).
If you want to configure the default list of supported cipher suites, you can
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
Module Variables
----------------
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:
ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS``
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
'''
try:
from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
except SyntaxError as e:
raise ImportError(e)
import OpenSSL.SSL
from pyasn1.codec.der import decoder as der_decoder
from pyasn1.type import univ, constraint
from socket import _fileobject, timeout
import ssl
import select
from .. import connection
from .. import util
__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
# SNI only *really* works if we can read the subjectAltName of certificates.
HAS_SNI = SUBJ_ALT_NAME_SUPPORT
# Map from urllib3 to PyOpenSSL compatible parameter-values.
_openssl_versions = {
ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD,
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}
_openssl_verify = {
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
# A secure default.
# Sources for more information on TLS ciphers:
#
# - https://wiki.mozilla.org/Security/Server_Side_TLS
# - https://www.ssllabs.com/projects/best-practices/index.html
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
#
# The general intent is:
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
# - prefer ECDHE over DHE for better performance,
# - prefer any AES-GCM over any AES-CBC for better performance and security,
# - use 3DES as fallback which is secure but slow,
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \
"ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \
"!aNULL:!MD5:!DSS"
orig_util_HAS_SNI = util.HAS_SNI
orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
def inject_into_urllib3():
'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'
connection.ssl_wrap_socket = ssl_wrap_socket
util.HAS_SNI = HAS_SNI
def extract_from_urllib3():
'Undo monkey-patching by :func:`inject_into_urllib3`.'
connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket
util.HAS_SNI = orig_util_HAS_SNI
### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
class SubjectAltName(BaseSubjectAltName):
'''ASN.1 implementation for subjectAltNames support'''
# There is no limit to how many SAN certificates a certificate may have,
# however this needs to have some limit so we'll set an arbitrarily high
# limit.
sizeSpec = univ.SequenceOf.sizeSpec + \
constraint.ValueSizeConstraint(1, 1024)
### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
def get_subj_alt_name(peer_cert):
# Search through extensions
dns_name = []
if not SUBJ_ALT_NAME_SUPPORT:
return dns_name
general_names = SubjectAltName()
for i in range(peer_cert.get_extension_count()):
ext = peer_cert.get_extension(i)
ext_name = ext.get_short_name()
if ext_name != 'subjectAltName':
continue
# PyOpenSSL returns extension data in ASN.1 encoded form
ext_dat = ext.get_data()
decoded_dat = der_decoder.decode(ext_dat,
asn1Spec=general_names)
for name in decoded_dat:
if not isinstance(name, SubjectAltName):
continue
for entry in range(len(name)):
component = name.getComponentByPosition(entry)
if component.getName() != 'dNSName':
continue
dns_name.append(str(component.getComponent()))
return dns_name
class WrappedSocket(object):
'''API-compatibility wrapper for Python OpenSSL's Connection-class.
Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
collector of pypy.
'''
def __init__(self, connection, socket, suppress_ragged_eofs=True):
self.connection = connection
self.socket = socket
self.suppress_ragged_eofs = suppress_ragged_eofs
self._makefile_refs = 0
def fileno(self):
return self.socket.fileno()
def makefile(self, mode, bufsize=-1):
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
def recv(self, *args, **kwargs):
try:
data = self.connection.recv(*args, **kwargs)
except OpenSSL.SSL.SysCallError as e:
if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
return b''
else:
raise
except OpenSSL.SSL.WantReadError:
rd, wd, ed = select.select(
[self.socket], [], [], self.socket.gettimeout())
if not rd:
raise timeout('The read operation timed out')
else:
return self.recv(*args, **kwargs)
else:
return data
def settimeout(self, timeout):
return self.socket.settimeout(timeout)
def sendall(self, data):
return self.connection.sendall(data)
def close(self):
if self._makefile_refs < 1:
return self.connection.shutdown()
else:
self._makefile_refs -= 1
def getpeercert(self, binary_form=False):
x509 = self.connection.get_peer_certificate()
if not x509:
return x509
if binary_form:
return OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_ASN1,
x509)
return {
'subject': (
(('commonName', x509.get_subject().CN),),
),
'subjectAltName': [
('DNS', value)
for value in get_subj_alt_name(x509)
]
}
def _reuse(self):
self._makefile_refs += 1
def _drop(self):
if self._makefile_refs < 1:
self.close()
else:
self._makefile_refs -= 1
def _verify_callback(cnx, x509, err_no, err_depth, return_code):
return err_no == 0
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
ssl_version=None):
ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
if certfile:
ctx.use_certificate_file(certfile)
if keyfile:
ctx.use_privatekey_file(keyfile)
if cert_reqs != ssl.CERT_NONE:
ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback)
if ca_certs:
try:
ctx.load_verify_locations(ca_certs, None)
except OpenSSL.SSL.Error as e:
raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
else:
ctx.set_default_verify_paths()
# Disable TLS compression to migitate CRIME attack (issue #309)
OP_NO_COMPRESSION = 0x20000
ctx.set_options(OP_NO_COMPRESSION)
# Set list of supported ciphersuites.
ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST)
cnx = OpenSSL.SSL.Connection(ctx, sock)
cnx.set_tlsext_host_name(server_hostname)
cnx.set_connect_state()
while True:
try:
cnx.do_handshake()
except OpenSSL.SSL.WantReadError:
select.select([sock], [], [])
continue
except OpenSSL.SSL.Error as e:
raise ssl.SSLError('bad handshake', e)
break
return WrappedSocket(cnx, sock)
|
ciprian-radu/unimap.ns-3noc
|
refs/heads/master
|
bindings/python/ns3modulegen_core_customizations.py
|
9
|
import re
from pybindgen.typehandlers import base as typehandlers
from pybindgen import ReturnValue, Parameter
from pybindgen.cppmethod import CustomCppMethodWrapper, CustomCppConstructorWrapper
from pybindgen.typehandlers.codesink import MemoryCodeSink
from pybindgen.typehandlers import ctypeparser
from pybindgen import cppclass
import warnings
from pybindgen.typehandlers.base import CodeGenerationError
import sys
class SmartPointerTransformation(typehandlers.TypeTransformation):
"""
This class provides a "type transformation" that tends to support
NS-3 smart pointers. Parameters such as "Ptr<Foo> foo" are
transformed into something like Parameter.new("Foo*", "foo",
transfer_ownership=False). Return values such as Ptr<Foo> are
transformed into ReturnValue.new("Foo*",
caller_owns_return=False). Since the underlying objects have
reference counting, PyBindGen does the right thing.
"""
def __init__(self):
super(SmartPointerTransformation, self).__init__()
self.rx = re.compile(r'(ns3::|::ns3::|)Ptr<([^>]+)>\s*$')
def _get_untransformed_type_traits(self, name):
m = self.rx.match(name)
is_const = False
if m is None:
return None, False
else:
name1 = m.group(2).strip()
if name1.startswith('const '):
name1 = name1[len('const '):]
is_const = True
if name1.endswith(' const'):
name1 = name1[:-len(' const')]
is_const = True
new_name = name1+' *'
if new_name.startswith('::'):
new_name = new_name[2:]
return new_name, is_const
def get_untransformed_name(self, name):
new_name, dummy_is_const = self._get_untransformed_type_traits(name)
return new_name
def create_type_handler(self, type_handler, *args, **kwargs):
if issubclass(type_handler, Parameter):
kwargs['transfer_ownership'] = False
elif issubclass(type_handler, ReturnValue):
kwargs['caller_owns_return'] = False
else:
raise AssertionError
## fix the ctype, add ns3:: namespace
orig_ctype, is_const = self._get_untransformed_type_traits(args[0])
if is_const:
correct_ctype = 'ns3::Ptr< %s const >' % orig_ctype[:-2]
else:
correct_ctype = 'ns3::Ptr< %s >' % orig_ctype[:-2]
args = tuple([correct_ctype] + list(args[1:]))
handler = type_handler(*args, **kwargs)
handler.set_tranformation(self, orig_ctype)
return handler
def untransform(self, type_handler, declarations, code_block, expression):
return 'const_cast<%s> (ns3::PeekPointer (%s))' % (type_handler.untransformed_ctype, expression)
def transform(self, type_handler, declarations, code_block, expression):
assert type_handler.untransformed_ctype[-1] == '*'
return 'ns3::Ptr< %s > (%s)' % (type_handler.untransformed_ctype[:-1], expression)
## register the type transformation
transf = SmartPointerTransformation()
typehandlers.return_type_matcher.register_transformation(transf)
typehandlers.param_type_matcher.register_transformation(transf)
del transf
class ArgvParam(Parameter):
"""
Converts a python list-of-strings argument to a pair of 'int argc,
char *argv[]' arguments to pass into C.
One Python argument becomes two C function arguments -> it's a miracle!
Note: this parameter type handler is not registered by any name;
must be used explicitly.
"""
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = []
def convert_c_to_python(self, wrapper):
raise NotImplementedError
def convert_python_to_c(self, wrapper):
py_name = wrapper.declarations.declare_variable('PyObject*', 'py_' + self.name)
argc_var = wrapper.declarations.declare_variable('int', 'argc')
name = wrapper.declarations.declare_variable('char**', self.name)
idx = wrapper.declarations.declare_variable('Py_ssize_t', 'idx')
wrapper.parse_params.add_parameter('O!', ['&PyList_Type', '&'+py_name], self.name)
#wrapper.before_call.write_error_check('!PyList_Check(%s)' % py_name) # XXX
wrapper.before_call.write_code("%s = (char **) malloc(sizeof(char*)*PyList_Size(%s));"
% (name, py_name))
wrapper.before_call.add_cleanup_code('free(%s);' % name)
wrapper.before_call.write_code('''
for (%(idx)s = 0; %(idx)s < PyList_Size(%(py_name)s); %(idx)s++)
{
''' % vars())
wrapper.before_call.sink.indent()
wrapper.before_call.write_code('''
PyObject *item = PyList_GET_ITEM(%(py_name)s, %(idx)s);
''' % vars())
#wrapper.before_call.write_error_check('item == NULL')
wrapper.before_call.write_error_check(
'!PyString_Check(item)',
failure_cleanup=('PyErr_SetString(PyExc_TypeError, '
'"argument %s must be a list of strings");') % self.name)
wrapper.before_call.write_code(
'%s[%s] = PyString_AsString(item);' % (name, idx))
wrapper.before_call.sink.unindent()
wrapper.before_call.write_code('}')
wrapper.before_call.write_code('%s = PyList_Size(%s);' % (argc_var, py_name))
wrapper.call_params.append(argc_var)
wrapper.call_params.append(name)
class CallbackImplProxyMethod(typehandlers.ReverseWrapperBase):
"""
Class that generates a proxy virtual method that calls a similarly named python method.
"""
def __init__(self, return_value, parameters):
super(CallbackImplProxyMethod, self).__init__(return_value, parameters)
def generate_python_call(self):
"""code to call the python method"""
build_params = self.build_params.get_parameters(force_tuple_creation=True)
if build_params[0][0] == '"':
build_params[0] = '(char *) ' + build_params[0]
args = self.before_call.declare_variable('PyObject*', 'args')
self.before_call.write_code('%s = Py_BuildValue(%s);'
% (args, ', '.join(build_params)))
self.before_call.add_cleanup_code('Py_DECREF(%s);' % args)
self.before_call.write_code('py_retval = PyObject_CallObject(m_callback, %s);' % args)
self.before_call.write_error_check('py_retval == NULL')
self.before_call.add_cleanup_code('Py_DECREF(py_retval);')
def generate_callback_classes(out, callbacks):
for callback_impl_num, template_parameters in enumerate(callbacks):
sink = MemoryCodeSink()
cls_name = "ns3::Callback< %s >" % ', '.join(template_parameters)
#print >> sys.stderr, "***** trying to register callback: %r" % cls_name
class_name = "PythonCallbackImpl%i" % callback_impl_num
sink.writeln('''
class %s : public ns3::CallbackImpl<%s>
{
public:
PyObject *m_callback;
%s(PyObject *callback)
{
Py_INCREF(callback);
m_callback = callback;
}
virtual ~%s()
{
Py_DECREF(m_callback);
m_callback = NULL;
}
virtual bool IsEqual(ns3::Ptr<const ns3::CallbackImplBase> other_base) const
{
const %s *other = dynamic_cast<const %s*> (ns3::PeekPointer (other_base));
if (other != NULL)
return (other->m_callback == m_callback);
else
return false;
}
''' % (class_name, ', '.join(template_parameters), class_name, class_name, class_name, class_name))
sink.indent()
callback_return = template_parameters[0]
return_ctype = ctypeparser.parse_type(callback_return)
if ('const' in return_ctype.remove_modifiers()):
kwargs = {'is_const': True}
else:
kwargs = {}
try:
return_type = ReturnValue.new(str(return_ctype), **kwargs)
except (typehandlers.TypeLookupError, typehandlers.TypeConfigurationError), ex:
warnings.warn("***** Unable to register callback; Return value '%s' error (used in %s): %r"
% (callback_return, cls_name, ex),
Warning)
continue
arguments = []
ok = True
callback_parameters = [arg for arg in template_parameters[1:] if arg != 'ns3::empty']
for arg_num, arg_type in enumerate(callback_parameters):
arg_name = 'arg%i' % (arg_num+1)
param_ctype = ctypeparser.parse_type(arg_type)
if ('const' in param_ctype.remove_modifiers()):
kwargs = {'is_const': True}
else:
kwargs = {}
try:
arguments.append(Parameter.new(str(param_ctype), arg_name, **kwargs))
except (typehandlers.TypeLookupError, typehandlers.TypeConfigurationError), ex:
warnings.warn("***** Unable to register callback; parameter '%s %s' error (used in %s): %r"
% (arg_type, arg_name, cls_name, ex),
Warning)
ok = False
if not ok:
continue
wrapper = CallbackImplProxyMethod(return_type, arguments)
wrapper.generate(sink, 'operator()', decl_modifiers=[])
sink.unindent()
sink.writeln('};\n')
sink.flush_to(out)
class PythonCallbackParameter(Parameter):
"Class handlers"
CTYPES = [cls_name]
#print >> sys.stderr, "***** registering callback handler: %r" % ctypeparser.normalize_type_string(cls_name)
DIRECTIONS = [Parameter.DIRECTION_IN]
PYTHON_CALLBACK_IMPL_NAME = class_name
TEMPLATE_ARGS = template_parameters
def convert_python_to_c(self, wrapper):
"parses python args to get C++ value"
assert isinstance(wrapper, typehandlers.ForwardWrapperBase)
if self.default_value is None:
py_callback = wrapper.declarations.declare_variable('PyObject*', self.name)
wrapper.parse_params.add_parameter('O', ['&'+py_callback], self.name)
wrapper.before_call.write_error_check(
'!PyCallable_Check(%s)' % py_callback,
'PyErr_SetString(PyExc_TypeError, "parameter \'%s\' must be callbale");' % self.name)
callback_impl = wrapper.declarations.declare_variable(
'ns3::Ptr<%s>' % self.PYTHON_CALLBACK_IMPL_NAME,
'%s_cb_impl' % self.name)
wrapper.before_call.write_code("%s = ns3::Create<%s> (%s);"
% (callback_impl, self.PYTHON_CALLBACK_IMPL_NAME, py_callback))
wrapper.call_params.append(
'ns3::Callback<%s> (%s)' % (', '.join(self.TEMPLATE_ARGS), callback_impl))
else:
py_callback = wrapper.declarations.declare_variable('PyObject*', self.name, 'NULL')
wrapper.parse_params.add_parameter('O', ['&'+py_callback], self.name, optional=True)
value = wrapper.declarations.declare_variable(
'ns3::Callback<%s>' % ', '.join(self.TEMPLATE_ARGS),
self.name+'_value',
self.default_value)
wrapper.before_call.write_code("if (%s) {" % (py_callback,))
wrapper.before_call.indent()
wrapper.before_call.write_error_check(
'!PyCallable_Check(%s)' % py_callback,
'PyErr_SetString(PyExc_TypeError, "parameter \'%s\' must be callbale");' % self.name)
wrapper.before_call.write_code("%s = ns3::Callback<%s> (ns3::Create<%s> (%s));"
% (value, ', '.join(self.TEMPLATE_ARGS),
self.PYTHON_CALLBACK_IMPL_NAME, py_callback))
wrapper.before_call.unindent()
wrapper.before_call.write_code("}") # closes: if (py_callback) {
wrapper.call_params.append(value)
def convert_c_to_python(self, wrapper):
raise typehandlers.NotSupportedError("Reverse wrappers for ns3::Callback<...> types "
"(python using callbacks defined in C++) not implemented.")
# def write_preamble(out):
# pybindgen.write_preamble(out)
# out.writeln("#include \"ns3/everything.h\"")
def Simulator_customizations(module):
Simulator = module['ns3::Simulator']
## Simulator::Schedule(delay, callback, ...user..args...)
Simulator.add_custom_method_wrapper("Schedule", "_wrap_Simulator_Schedule",
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
## Simulator::ScheduleNow(callback, ...user..args...)
Simulator.add_custom_method_wrapper("ScheduleNow", "_wrap_Simulator_ScheduleNow",
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
## Simulator::ScheduleDestroy(callback, ...user..args...)
Simulator.add_custom_method_wrapper("ScheduleDestroy", "_wrap_Simulator_ScheduleDestroy",
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
Simulator.add_custom_method_wrapper("Run", "_wrap_Simulator_Run",
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
def CommandLine_customizations(module):
CommandLine = module['ns3::CommandLine']
CommandLine.add_method('Parse', None, [ArgvParam(None, 'argv')],
is_static=False)
CommandLine.add_custom_method_wrapper("AddValue", "_wrap_CommandLine_AddValue",
flags=["METH_VARARGS", "METH_KEYWORDS"])
def Object_customizations(module):
## ---------------------------------------------------------------------
## Here we generate custom constructor code for all classes that
## derive from ns3::Object. The custom constructors are needed in
## order to support kwargs only and to translate kwargs into ns3
## attributes, etc.
## ---------------------------------------------------------------------
Object = module['ns3::Object']
## add a GetTypeId method to all generatd helper classes
def helper_class_hook(helper_class):
decl = """
static ns3::TypeId GetTypeId (void)
{
static ns3::TypeId tid = ns3::TypeId ("%s")
.SetParent< %s > ()
;
return tid;
}""" % (helper_class.name, helper_class.class_.full_name)
helper_class.add_custom_method(decl)
helper_class.add_post_generation_code(
"NS_OBJECT_ENSURE_REGISTERED (%s);" % helper_class.name)
Object.add_helper_class_hook(helper_class_hook)
def ns3_object_instance_creation_function(cpp_class, code_block, lvalue,
parameters, construct_type_name):
assert lvalue
assert not lvalue.startswith('None')
if cpp_class.cannot_be_constructed:
raise CodeGenerationError("%s cannot be constructed (%s)"
% cpp_class.full_name)
if cpp_class.incomplete_type:
raise CodeGenerationError("%s cannot be constructed (incomplete type)"
% cpp_class.full_name)
code_block.write_code("%s = new %s(%s);" % (lvalue, construct_type_name, parameters))
code_block.write_code("%s->Ref ();" % (lvalue))
def ns3_object_post_instance_creation_function(cpp_class, code_block, lvalue,
parameters, construct_type_name):
code_block.write_code("ns3::CompleteConstruct(%s);" % (lvalue, ))
Object.set_instance_creation_function(ns3_object_instance_creation_function)
Object.set_post_instance_creation_function(ns3_object_post_instance_creation_function)
def Attribute_customizations(module):
# Fix up for the "const AttributeValue &v = EmptyAttribute()"
# case, as used extensively by helper classes.
# Here's why we need to do this: pybindgen.gccxmlscanner, when
# scanning parameter default values, is only provided with the
# value as a simple C expression string. (py)gccxml does not
# report the type of the default value.
# As a workaround, here we iterate over all parameters of all
# methods of all classes and tell pybindgen what is the type of
# the default value for attributes.
for cls in module.classes:
for meth in cls.get_all_methods():
for param in meth.parameters:
if isinstance(param, cppclass.CppClassRefParameter):
if param.cpp_class.name == 'AttributeValue' \
and param.default_value is not None \
and param.default_value_type is None:
param.default_value_type = 'ns3::EmptyAttributeValue'
def TypeId_customizations(module):
TypeId = module['ns3::TypeId']
TypeId.add_custom_method_wrapper("LookupByNameFailSafe", "_wrap_TypeId_LookupByNameFailSafe",
flags=["METH_VARARGS", "METH_KEYWORDS", "METH_STATIC"])
def add_std_ofstream(module):
module.add_include('<fstream>')
ostream = module.add_class('ostream', foreign_cpp_namespace='::std')
ostream.set_cannot_be_constructed("abstract base class")
ofstream = module.add_class('ofstream', foreign_cpp_namespace='::std', parent=ostream)
ofstream.add_enum('openmode', [
('app', 'std::ios_base::app'),
('ate', 'std::ios_base::ate'),
('binary', 'std::ios_base::binary'),
('in', 'std::ios_base::in'),
('out', 'std::ios_base::out'),
('trunc', 'std::ios_base::trunc'),
])
ofstream.add_constructor([Parameter.new("const char *", 'filename'),
Parameter.new("::std::ofstream::openmode", 'mode', default_value="std::ios_base::out")])
ofstream.add_method('close', None, [])
import pybindgen.typehandlers.base
for alias in "std::_Ios_Openmode", "std::ios::openmode":
pybindgen.typehandlers.base.param_type_matcher.add_type_alias(alias, "int")
for flag in 'in', 'out', 'ate', 'app', 'trunc', 'binary':
module.after_init.write_code('PyModule_AddIntConstant(m, (char *) "STD_IOS_%s", std::ios::%s);'
% (flag.upper(), flag))
def add_ipv4_address_tp_hash(module):
module.body.writeln('''
long
_ns3_Ipv4Address_tp_hash (PyObject *obj)
{
PyNs3Ipv4Address *addr = reinterpret_cast<PyNs3Ipv4Address *> (obj);
return static_cast<long> (ns3::Ipv4AddressHash () (*addr->obj));
}
''')
module.header.writeln('long _ns3_Ipv4Address_tp_hash (PyObject *obj);')
module['Ipv4Address'].pytype.slots['tp_hash'] = "_ns3_Ipv4Address_tp_hash"
|
spulec/freezegun
|
refs/heads/master
|
tests/test_configure.py
|
1
|
from unittest import mock
import freezegun
import freezegun.config
def setup_function():
freezegun.config.reset_config()
def teardown_function():
freezegun.config.reset_config()
def test_default_ignore_list_is_overridden():
freezegun.configure(default_ignore_list=['threading', 'tensorflow'])
with mock.patch("freezegun.api._freeze_time.__init__", return_value=None) as _freeze_time_init_mock:
freezegun.freeze_time("2020-10-06")
expected_ignore_list = [
'threading',
'tensorflow',
]
_freeze_time_init_mock.assert_called_once_with(
time_to_freeze_str="2020-10-06",
tz_offset=0,
ignore=expected_ignore_list,
tick=False,
as_arg=False,
as_kwarg='',
auto_tick_seconds=0,
)
def test_extend_default_ignore_list():
freezegun.configure(extend_ignore_list=['tensorflow'])
with mock.patch("freezegun.api._freeze_time.__init__", return_value=None) as _freeze_time_init_mock:
freezegun.freeze_time("2020-10-06")
expected_ignore_list = [
'nose.plugins',
'six.moves',
'django.utils.six.moves',
'google.gax',
'threading',
'Queue',
'selenium',
'_pytest.terminal.',
'_pytest.runner.',
'gi',
'tensorflow',
]
_freeze_time_init_mock.assert_called_once_with(
time_to_freeze_str="2020-10-06",
tz_offset=0,
ignore=expected_ignore_list,
tick=False,
as_arg=False,
as_kwarg='',
auto_tick_seconds=0,
)
|
cwurld/django-phonegap
|
refs/heads/master
|
django_phonegap/data_port/serializers.py
|
1
|
__author__ = 'Chuck Martin'
from rest_framework import serializers
from models import Message
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = Message
exclude = ('user',)
|
sunbenxin/python-script
|
refs/heads/master
|
ask_ok.py
|
1
|
#!/usr/bin/python
def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
while True:
ok = raw_input(prompt)
if ok in ('y', 'ye', 'yes'):
return True
if ok in ('n', 'no', 'nop', 'nope'):
return False
retries = retries - 1
if retries < 0:
raise IOError('refusenik user')
print complaint
if __name__ == '__main__':
pass
print __name__
else:
ask_ok('Do you really want to quit?')
ask_ok('OK to overwrite the file?', 2)
ask_ok('OK to overwrite the file?', 2, 'Come on, only yes or no!')
|
BrandonY/python-docs-samples
|
refs/heads/master
|
speech/cloud-client/transcribe_streaming.py
|
1
|
#!/usr/bin/env python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Speech API sample application using the streaming API.
Example usage:
python transcribe_streaming.py resources/audio.raw
"""
# [START import_libraries]
import argparse
import io
# [END import_libraries]
# [START def_transcribe_streaming]
def transcribe_streaming(stream_file):
"""Streams transcription of the given audio file."""
from google.cloud import speech
from google.cloud.speech import enums
from google.cloud.speech import types
client = speech.SpeechClient()
# [START migration_streaming_request]
with io.open(stream_file, 'rb') as audio_file:
content = audio_file.read()
# In practice, stream should be a generator yielding chunks of audio data.
stream = [content]
requests = (types.StreamingRecognizeRequest(audio_content=chunk)
for chunk in stream)
config = types.RecognitionConfig(
encoding=enums.RecognitionConfig.AudioEncoding.LINEAR16,
sample_rate_hertz=16000,
language_code='en-US')
streaming_config = types.StreamingRecognitionConfig(config=config)
# streaming_recognize returns a generator.
# [START migration_streaming_response]
responses = client.streaming_recognize(streaming_config, requests)
# [END migration_streaming_request]
for response in responses:
for result in response.results:
print('Finished: {}'.format(result.is_final))
print('Stability: {}'.format(result.stability))
alternatives = result.alternatives
for alternative in alternatives:
print('Confidence: {}'.format(alternative.confidence))
print('Transcript: {}'.format(alternative.transcript))
# [END migration_streaming_response]
# [END def_transcribe_streaming]
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('stream', help='File to stream to the API')
args = parser.parse_args()
transcribe_streaming(args.stream)
|
MalloyPower/parsing-python
|
refs/heads/master
|
front-end/testsuite-python-lib/Python-2.2/Lib/test/test_ucn.py
|
15
|
""" Test script for the Unicode implementation.
Written by Bill Tutt.
Modified for Python 2.0 by Fredrik Lundh (fredrik@pythonware.com)
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""#"
from test_support import verify, verbose
print 'Testing General Unicode Character Name, and case insensitivity...',
# General and case insensitivity test:
try:
# put all \N escapes inside exec'd raw strings, to make sure this
# script runs even if the compiler chokes on \N escapes
exec r"""
s = u"\N{LATIN CAPITAL LETTER T}" \
u"\N{LATIN SMALL LETTER H}" \
u"\N{LATIN SMALL LETTER E}" \
u"\N{SPACE}" \
u"\N{LATIN SMALL LETTER R}" \
u"\N{LATIN CAPITAL LETTER E}" \
u"\N{LATIN SMALL LETTER D}" \
u"\N{SPACE}" \
u"\N{LATIN SMALL LETTER f}" \
u"\N{LATIN CAPITAL LeTtEr o}" \
u"\N{LATIN SMaLl LETTER x}" \
u"\N{SPACE}" \
u"\N{LATIN SMALL LETTER A}" \
u"\N{LATIN SMALL LETTER T}" \
u"\N{LATIN SMALL LETTER E}" \
u"\N{SPACE}" \
u"\N{LATIN SMALL LETTER T}" \
u"\N{LATIN SMALL LETTER H}" \
u"\N{LATIN SMALL LETTER E}" \
u"\N{SpAcE}" \
u"\N{LATIN SMALL LETTER S}" \
u"\N{LATIN SMALL LETTER H}" \
u"\N{LATIN SMALL LETTER E}" \
u"\N{LATIN SMALL LETTER E}" \
u"\N{LATIN SMALL LETTER P}" \
u"\N{FULL STOP}"
verify(s == u"The rEd fOx ate the sheep.", s)
"""
except UnicodeError, v:
print v
print "done."
import unicodedata
print "Testing name to code mapping....",
for char in "SPAM":
name = "LATIN SMALL LETTER %s" % char
code = unicodedata.lookup(name)
verify(unicodedata.name(code) == name)
print "done."
print "Testing code to name mapping for all characters....",
count = 0
for code in range(65536):
try:
char = unichr(code)
name = unicodedata.name(char)
verify(unicodedata.lookup(name) == char)
count += 1
except (KeyError, ValueError):
pass
print "done."
print "Found", count, "characters in the unicode name database"
# misc. symbol testing
print "Testing misc. symbols for unicode character name expansion....",
exec r"""
verify(u"\N{PILCROW SIGN}" == u"\u00b6")
verify(u"\N{REPLACEMENT CHARACTER}" == u"\uFFFD")
verify(u"\N{HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK}" == u"\uFF9F")
verify(u"\N{FULLWIDTH LATIN SMALL LETTER A}" == u"\uFF41")
"""
print "done."
# strict error testing:
print "Testing unicode character name expansion strict error handling....",
try:
unicode("\N{blah}", 'unicode-escape', 'strict')
except UnicodeError:
pass
else:
raise AssertionError, "failed to raise an exception when given a bogus character name"
try:
unicode("\N{" + "x" * 100000 + "}", 'unicode-escape', 'strict')
except UnicodeError:
pass
else:
raise AssertionError, "failed to raise an exception when given a very " \
"long bogus character name"
try:
unicode("\N{SPACE", 'unicode-escape', 'strict')
except UnicodeError:
pass
else:
raise AssertionError, "failed to raise an exception for a missing closing brace."
try:
unicode("\NSPACE", 'unicode-escape', 'strict')
except UnicodeError:
pass
else:
raise AssertionError, "failed to raise an exception for a missing opening brace."
print "done."
|
rynkowsg/hamster
|
refs/heads/master
|
src/hamster/lib/stuff.py
|
2
|
# - coding: utf-8 -
# Copyright (C) 2008-2010 Toms Bauģis <toms.baugis at gmail.com>
# This file is part of Project Hamster.
# Project Hamster is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Project Hamster is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Project Hamster. If not, see <http://www.gnu.org/licenses/>.
# some widgets that repeat all over the place
# cells, columns, trees and other
import logging
import gtk
import pango
from pango import ELLIPSIZE_END
from itertools import groupby
import datetime as dt
import calendar
import time
import re
import locale
import os
def format_duration(minutes, human = True):
"""formats duration in a human readable format.
accepts either minutes or timedelta"""
if isinstance(minutes, dt.timedelta):
minutes = duration_minutes(minutes)
if not minutes:
if human:
return ""
else:
return "00:00"
hours = minutes / 60
minutes = minutes % 60
formatted_duration = ""
if human:
if minutes % 60 == 0:
# duration in round hours
formatted_duration += _("%dh") % (hours)
elif hours == 0:
# duration less than hour
formatted_duration += _("%dmin") % (minutes % 60.0)
else:
# x hours, y minutes
formatted_duration += _("%dh %dmin") % (hours, minutes % 60)
else:
formatted_duration += "%02d:%02d" % (hours, minutes)
return formatted_duration
def format_range(start_date, end_date):
dates_dict = dateDict(start_date, "start_")
dates_dict.update(dateDict(end_date, "end_"))
if start_date == end_date:
# label of date range if looking on single day
# date format for overview label when only single day is visible
# Using python datetime formatting syntax. See:
# http://docs.python.org/library/time.html#time.strftime
title = start_date.strftime(_("%B %d, %Y"))
elif start_date.year != end_date.year:
# label of date range if start and end years don't match
# letter after prefixes (start_, end_) is the one of
# standard python date formatting ones- you can use all of them
# see http://docs.python.org/library/time.html#time.strftime
title = _(u"%(start_B)s %(start_d)s, %(start_Y)s – %(end_B)s %(end_d)s, %(end_Y)s") % dates_dict
elif start_date.month != end_date.month:
# label of date range if start and end month do not match
# letter after prefixes (start_, end_) is the one of
# standard python date formatting ones- you can use all of them
# see http://docs.python.org/library/time.html#time.strftime
title = _(u"%(start_B)s %(start_d)s – %(end_B)s %(end_d)s, %(end_Y)s") % dates_dict
else:
# label of date range for interval in same month
# letter after prefixes (start_, end_) is the one of
# standard python date formatting ones- you can use all of them
# see http://docs.python.org/library/time.html#time.strftime
title = _(u"%(start_B)s %(start_d)s – %(end_d)s, %(end_Y)s") % dates_dict
return title
def week(view_date):
# aligns start and end date to week
start_date = view_date - dt.timedelta(view_date.weekday() + 1)
start_date = start_date + dt.timedelta(locale_first_weekday())
end_date = start_date + dt.timedelta(6)
return start_date, end_date
def month(view_date):
# aligns start and end date to month
start_date = view_date - dt.timedelta(view_date.day - 1) #set to beginning of month
first_weekday, days_in_month = calendar.monthrange(view_date.year, view_date.month)
end_date = start_date + dt.timedelta(days_in_month - 1)
return start_date, end_date
def duration_minutes(duration):
"""returns minutes from duration, otherwise we keep bashing in same math"""
if isinstance(duration, list):
res = dt.timedelta()
for entry in duration:
res += entry
return duration_minutes(res)
elif isinstance(duration, dt.timedelta):
return duration.seconds / 60 + duration.days * 24 * 60
else:
return duration
def zero_hour(date):
return dt.datetime.combine(date.date(), dt.time(0,0))
# it seems that python or something has bug of sorts, that breaks stuff for
# japanese locale, so we have this locale from and to ut8 magic in some places
# see bug 562298
def locale_from_utf8(utf8_str):
try:
retval = unicode (utf8_str, "utf-8").encode(locale.getpreferredencoding())
except:
retval = utf8_str
return retval
def locale_to_utf8(locale_str):
try:
retval = unicode (locale_str, locale.getpreferredencoding()).encode("utf-8")
except:
retval = locale_str
return retval
def locale_first_weekday():
"""figure if week starts on monday or sunday"""
first_weekday = 6 #by default settle on monday
try:
process = os.popen("locale first_weekday week-1stday")
week_offset, week_start = process.read().split('\n')[:2]
process.close()
week_start = dt.date(*time.strptime(week_start, "%Y%m%d")[:3])
week_offset = dt.timedelta(int(week_offset) - 1)
beginning = week_start + week_offset
first_weekday = int(beginning.strftime("%w"))
except:
logging.warn("WARNING - Failed to get first weekday from locale")
return first_weekday
def totals(iter, keyfunc, sumfunc):
"""groups items by field described in keyfunc and counts totals using value
from sumfunc
"""
data = sorted(iter, key=keyfunc)
res = {}
for k, group in groupby(data, keyfunc):
res[k] = sum([sumfunc(entry) for entry in group])
return res
def dateDict(date, prefix = ""):
"""converts date into dictionary, having prefix for all the keys"""
res = {}
res[prefix+"a"] = date.strftime("%a")
res[prefix+"A"] = date.strftime("%A")
res[prefix+"b"] = date.strftime("%b")
res[prefix+"B"] = date.strftime("%B")
res[prefix+"c"] = date.strftime("%c")
res[prefix+"d"] = date.strftime("%d")
res[prefix+"H"] = date.strftime("%H")
res[prefix+"I"] = date.strftime("%I")
res[prefix+"j"] = date.strftime("%j")
res[prefix+"m"] = date.strftime("%m")
res[prefix+"M"] = date.strftime("%M")
res[prefix+"p"] = date.strftime("%p")
res[prefix+"S"] = date.strftime("%S")
res[prefix+"U"] = date.strftime("%U")
res[prefix+"w"] = date.strftime("%w")
res[prefix+"W"] = date.strftime("%W")
res[prefix+"x"] = date.strftime("%x")
res[prefix+"X"] = date.strftime("%X")
res[prefix+"y"] = date.strftime("%y")
res[prefix+"Y"] = date.strftime("%Y")
res[prefix+"Z"] = date.strftime("%Z")
for i, value in res.items():
res[i] = locale_to_utf8(value)
return res
def escape_pango(text):
if not text:
return text
text = text.replace ("&", "&")
text = text.replace("<", "<")
text = text.replace(">", ">")
return text
|
pombredanne/pywb
|
refs/heads/master
|
pywb/perms/perms_handler.py
|
1
|
from pywb.utils.canonicalize import UrlCanonicalizer
from pywb.utils.wbexception import NotFoundException
from pywb.framework.basehandlers import WbUrlHandler
from pywb.framework.archivalrouter import ArchivalRouter, Route
from pywb.framework.wbrequestresponse import WbResponse
BLOCK = '["block"]'
ALLOW = '["allow"]'
RESPONSE_TYPE = 'application/json'
NOT_FOUND = 'Please specify a url to check for access'
#=================================================================
class PermsHandler(WbUrlHandler):
def __init__(self, perms_policy, url_canon):
self.perms_policy = perms_policy
self.url_canon = url_canon
def __call__(self, wbrequest):
perms_checker = self.perms_policy(wbrequest)
if wbrequest.wb_url:
return self.check_single_url(wbrequest, perms_checker)
# elif wbrequest.env['REQUEST_METHOD'] == 'POST':
# return self.check_bulk(wbrequest, perms_checker)
else:
raise NotFoundException(NOT_FOUND)
def check_single_url(self, wbrequest, perms_checker):
urlkey = self.url_canon(wbrequest.wb_url.url)
urlkey = urlkey.encode('utf-8')
if not perms_checker.allow_url_lookup(urlkey):
response_text = BLOCK
else:
response_text = ALLOW
#TODO: other types of checking
return WbResponse.text_response(response_text,
content_type=RESPONSE_TYPE)
#TODO
# def check_bulk_urls(self, wbrequest, perms_checker):
# pass
#
#=================================================================
def create_perms_checker_app(config):
"""
Create permissions checker standalone app
Running under the '/check-access' route
"""
port = config.get('port')
perms_policy = config.get('perms_policy')
canonicalizer = UrlCanonicalizer(config.get('surt_ordered', True))
handler = PermsHandler(perms_policy, canonicalizer)
routes = [Route('check-access', handler)]
return ArchivalRouter(routes, port=port)
|
JING-TIME/ustc-course
|
refs/heads/master
|
app/__init__.py
|
1
|
"""
ustc courses
:copyright: (c) 2015 by the USTC-Courses Team.
"""
import os
from flask import Flask,request
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager,current_user,user_logged_in,user_loaded_from_cookie
from flask_wtf.csrf import CSRFProtect
from flask_babel import Babel
from datetime import datetime
from flask_debugtoolbar import DebugToolbarExtension
app = Flask(__name__)
app.config.from_object('config.default')
toolbar = DebugToolbarExtension(app)
db = SQLAlchemy(app)
app.csrf = CSRFProtect(app)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'home.signin'
babel = Babel(app)
@babel.localeselector
def get_locale():
return request.accept_languages.best_match(app.config['LANGUAGES'].keys())
def log_login(app,user):
'''update the last login time of the user'''
user.last_login_time = datetime.utcnow()
db.session.commit()
user_logged_in.connect(log_login)
user_loaded_from_cookie.connect(log_login)
from app.views import *
app.register_blueprint(home,url_prefix='')
app.register_blueprint(course,url_prefix='/course')
app.register_blueprint(review, url_prefix='/review')
app.register_blueprint(api, url_prefix='/api')
app.register_blueprint(user, url_prefix='/user')
app.register_blueprint(teacher, url_prefix='/teacher')
|
alex/logbook
|
refs/heads/master
|
benchmark/bench_redirect_to_logging.py
|
30
|
"""Tests redirects from logging to logbook"""
from logging import getLogger, StreamHandler
from logbook.compat import LoggingHandler
from cStringIO import StringIO
log = getLogger('Test logger')
def run():
out = StringIO()
log.addHandler(StreamHandler(out))
with LoggingHandler():
for x in xrange(500):
log.warning('this is not handled')
assert out.getvalue().count('\n') == 500
|
spartonia/django-oscar
|
refs/heads/master
|
src/oscar/apps/search/signals.py
|
28
|
from django.dispatch import Signal
user_search = Signal(providing_args=["session_id", "user", "query"])
|
norayr/unisubs
|
refs/heads/staging
|
apps/auth/migrations/0028_auto__del_field_customuser_partner.py
|
5
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'CustomUser.partner'
db.delete_column('auth_customuser', 'partner')
def backwards(self, orm):
# Adding field 'CustomUser.partner'
db.add_column('auth_customuser', 'partner', self.gf('django.db.models.fields.CharField')(blank=True, max_length=32, null=True, db_index=True), keep_default=False)
models = {
'accountlinker.thirdpartyaccount': {
'Meta': {'unique_together': "(('type', 'username'),)", 'object_name': 'ThirdPartyAccount'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'oauth_access_token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'oauth_refresh_token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'auth.announcement': {
'Meta': {'object_name': 'Announcement'},
'content': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'auth.awards': {
'Meta': {'object_name': 'Awards'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'points': ('django.db.models.fields.IntegerField', [], {}),
'type': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True'})
},
'auth.customuser': {
'Meta': {'object_name': 'CustomUser', '_ormbases': ['auth.User']},
'autoplay_preferences': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'award_points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'can_send_messages': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'is_partner': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'notify_by_email': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'notify_by_message': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'partner_fk': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Partner']", 'null': 'True', 'blank': 'True'}),
'picture': ('utils.amazon.fields.S3EnabledImageField', [], {'thumb_options': "{'upscale': True, 'crop': 'smart'}", 'max_length': '100', 'blank': 'True'}),
'preferred_language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'valid_email': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.emailconfirmation': {
'Meta': {'object_name': 'EmailConfirmation'},
'confirmation_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sent': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.logintoken': {
'Meta': {'object_name': 'LoginToken'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'login_token'", 'unique': 'True', 'to': "orm['auth.CustomUser']"})
},
'auth.message': {
'Meta': {'object_name': 'Message'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_message_set'", 'to': "orm['auth.User']"})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'auth.userlanguage': {
'Meta': {'unique_together': "(['user', 'language'],)", 'object_name': 'UserLanguage'},
'follow_requests': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'proficiency': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'teams.application': {
'Meta': {'unique_together': "(('team', 'user', 'status'),)", 'object_name': 'Application'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applications'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_applications'", 'to': "orm['auth.CustomUser']"})
},
'teams.partner': {
'Meta': {'object_name': 'Partner'},
'can_request_paid_captions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'teams.project': {
'Meta': {'unique_together': "(('team', 'name'), ('team', 'slug'))", 'object_name': 'Project'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'guidelines': ('django.db.models.fields.TextField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'workflow_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'teams.team': {
'Meta': {'object_name': 'Team'},
'applicants': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'applicated_teams'", 'symmetrical': 'False', 'through': "orm['teams.Application']", 'to': "orm['auth.CustomUser']"}),
'application_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'auth_provider_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'header_html_text': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'highlight': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'last_notification_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'logo': ('utils.amazon.fields.S3EnabledImageField', [], {'thumb_options': "{'upscale': True, 'autocrop': True}", 'max_length': '100', 'blank': 'True'}),
'max_tasks_per_member': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'membership_policy': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}),
'page_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'teams'", 'null': 'True', 'to': "orm['teams.Partner']"}),
'points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'projects_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'subtitle_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'task_assign_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'task_expiration': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'third_party_accounts': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'tseams'", 'symmetrical': 'False', 'to': "orm['accountlinker.ThirdPartyAccount']"}),
'translate_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'through': "orm['teams.TeamMember']", 'to': "orm['auth.CustomUser']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'intro_for_teams'", 'null': 'True', 'to': "orm['videos.Video']"}),
'video_policy': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'through': "orm['teams.TeamVideo']", 'symmetrical': 'False'}),
'workflow_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'teams.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'default': "'contributor'", 'max_length': '16', 'db_index': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'members'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_members'", 'to': "orm['auth.CustomUser']"})
},
'teams.teamvideo': {
'Meta': {'unique_together': "(('team', 'video'),)", 'object_name': 'TeamVideo'},
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'all_languages': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'completed_languages': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.SubtitleLanguage']", 'symmetrical': 'False', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'partner_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Project']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'thumb_options': "{'upscale': True, 'crop': 'smart'}", 'null': 'True', 'thumb_sizes': '((290, 165), (120, 90))', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'video': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['videos.Video']", 'unique': 'True'})
},
'videos.subtitlelanguage': {
'Meta': {'unique_together': "(('video', 'language', 'standard_language'),)", 'object_name': 'SubtitleLanguage'},
'created': ('django.db.models.fields.DateTimeField', [], {}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'followed_languages'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}),
'had_version': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'has_version': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_forked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_original': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'percent_done': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'standard_language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']", 'null': 'True', 'blank': 'True'}),
'subtitle_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subtitles_fetched_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'videos.video': {
'Meta': {'object_name': 'Video'},
'allow_community_edits': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'allow_video_urls_edit': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'complete_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'edited': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'featured': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'followed_videos'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'languages_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'moderated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'moderating'", 'null': 'True', 'to': "orm['teams.Team']"}),
's3_thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'thumb_options': "{'upscale': True, 'crop': 'smart'}", 'max_length': '100', 'thumb_sizes': '((290, 165), (120, 90))', 'blank': 'True'}),
'small_thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'subtitles_fetched_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'video_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'was_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'widget_views_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'writelock_owners'", 'null': 'True', 'to': "orm['auth.CustomUser']"}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
}
}
complete_apps = ['auth']
|
flgiordano/netcash
|
refs/heads/master
|
+/google-cloud-sdk/lib/surface/compute/operations/list.py
|
1
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for listing operations."""
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import constants
from googlecloudsdk.api_lib.compute import request_helper
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
def AddFlags(parser, is_ga):
"""Helper function for adding flags dependant on the release track."""
base_classes.BaseLister.Args(parser)
if is_ga:
scope = parser.add_mutually_exclusive_group()
scope.add_argument(
'--zones',
metavar='ZONE',
help=('If provided, only zonal resources are shown. '
'If arguments are provided, only resources from the given '
'zones are shown.'),
type=arg_parsers.ArgList(),
action=arg_parsers.FloatingListValuesCatcher(switch_value=[]))
scope.add_argument(
'--regions',
metavar='REGION',
help=('If provided, only regional resources are shown. '
'If arguments are provided, only resources from the given '
'regions are shown.'),
type=arg_parsers.ArgList(),
action=arg_parsers.FloatingListValuesCatcher(switch_value=[]))
scope.add_argument(
'--global',
action='store_true',
help='If provided, only global resources are shown.',
default=False)
else:
parser.add_argument(
'--zones',
metavar='ZONE',
help=('If arguments are provided, only resources from the given '
'zones are shown. If no arguments are provided all zonal '
'operations are shown.'),
type=arg_parsers.ArgList(),
action=arg_parsers.FloatingListValuesCatcher(switch_value=[]))
parser.add_argument(
'--regions',
metavar='REGION',
help=('If arguments are provided, only resources from the given '
'regions are shown. If no arguments are provided all regional '
'operations are shown.'),
type=arg_parsers.ArgList(),
action=arg_parsers.FloatingListValuesCatcher(switch_value=[]))
parser.add_argument(
'--global',
action='store_true',
help='If provided, all global resources are shown.',
default=False)
parser.add_argument(
'--accounts',
action='store_true',
help='If provided, all accounts resources are shown.',
default=False)
@base.ReleaseTracks(base.ReleaseTrack.GA)
class ListGA(base_classes.BaseLister):
"""List Google Compute Engine operations."""
def __init__(self, *args, **kwargs):
super(ListGA, self).__init__(*args, **kwargs)
self._ga = True
@staticmethod
def Args(parser):
AddFlags(parser, True)
@property
def global_service(self):
return self.compute.globalOperations
@property
def regional_service(self):
return self.compute.regionOperations
@property
def zonal_service(self):
return self.compute.zoneOperations
@property
def account_service(self):
return self.clouduseraccounts.globalAccountsOperations
@property
def resource_type(self):
return 'operations'
@property
def allowed_filtering_types(self):
return ['globalOperations', 'regionOperations', 'zoneOperations']
def NoArguments(self, args):
"""Determine if the user provided any flags indicating scope."""
no_compute_args = (args.zones is None and args.regions is None and
not getattr(args, 'global'))
if self._ga:
return no_compute_args
else:
return no_compute_args and not args.accounts
def GetResources(self, args, errors):
"""Yields zonal, regional, and/or global resources."""
# This is True if the user provided no flags indicating scope.
no_scope_flags = self.NoArguments(args)
requests = []
filter_expr = self.GetFilterExpr(args)
max_results = constants.MAX_RESULTS_PER_PAGE
project = self.project
# TODO(user): Start using aggregatedList for zones and regions when the
# operations list API supports them.
if no_scope_flags:
requests.append(
(self.global_service,
'AggregatedList',
self.global_service.GetRequestType('AggregatedList')(
filter=filter_expr,
maxResults=max_results,
project=project)))
if not self._ga:
# Add a request to get all Compute Account operations.
requests.append(
(self.account_service,
'List',
self.account_service.GetRequestType('List')(
filter=filter_expr,
maxResults=max_results,
project=project)))
else:
if getattr(args, 'global'):
requests.append(
(self.global_service,
'List',
self.global_service.GetRequestType('List')(
filter=filter_expr,
maxResults=max_results,
project=project)))
if args.regions is not None:
args_region_names = [
self.CreateGlobalReference(region, resource_type='regions').Name()
for region in args.regions or []]
# If no regions were provided by the user, fetch a list.
region_names = (
args_region_names or [res.name for res in self.FetchChoiceResources(
attribute='region',
service=self.compute.regions,
flag_names=['--regions'])])
for region_name in region_names:
requests.append(
(self.regional_service,
'List',
self.regional_service.GetRequestType('List')(
filter=filter_expr,
maxResults=constants.MAX_RESULTS_PER_PAGE,
region=region_name,
project=self.project)))
if args.zones is not None:
args_zone_names = [
self.CreateGlobalReference(zone, resource_type='zones').Name()
for zone in args.zones or []]
# If no zones were provided by the user, fetch a list.
zone_names = (
args_zone_names or [res.name for res in self.FetchChoiceResources(
attribute='zone',
service=self.compute.zones,
flag_names=['--zones'])])
for zone_name in zone_names:
requests.append(
(self.zonal_service,
'List',
self.zonal_service.GetRequestType('List')(
filter=filter_expr,
maxResults=constants.MAX_RESULTS_PER_PAGE,
zone=zone_name,
project=self.project)))
if not self._ga and args.accounts:
requests.append(
(self.account_service,
'List',
self.account_service.GetRequestType('List')(
filter=filter_expr,
maxResults=max_results,
project=project)))
return request_helper.MakeRequests(
requests=requests,
http=self.http,
batch_url=self.batch_url,
errors=errors,
custom_get_requests=None)
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA)
class ListBeta(ListGA):
"""List Google Compute Engine operations."""
def __init__(self, *args, **kwargs):
super(ListBeta, self).__init__(*args, **kwargs)
self._ga = False
@staticmethod
def Args(parser):
AddFlags(parser, False)
ListGA.detailed_help = base_classes.GetGlobalRegionalListerHelp('operations')
ListBeta.detailed_help = {
'brief': 'List Google Compute Engine operations',
'DESCRIPTION': """\
*{command}* displays all Google Compute Engine operations in a
project.
By default, all global, regional, zonal and Compute Accounts operations
are listed. The results can be narrowed by providing combinations of
the --zones, --regions, --global and --accounts flags.
""",
'EXAMPLES': """\
To list all operations in a project in table form, run:
$ {command}
To list the URIs of all operations in a project, run:
$ {command} --uri
To list all operations in zones us-central1-b and
europe-west1-d, run:
$ {command} --zones us-central1-b,europe-west1-d
To list all global operations in a project, run:
$ {command} --global
To list all regional operations in a project, run:
$ {command} --regions ""
To list all operations in the us-central1 and europe-west1
regions and all operations in the us-central1-a zone, run:
$ {command} --zones us-central1-a --regions us-central1,europe-west1
To list all Compute Accounts operations, run:
$ {command} --accounts
""",
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.