repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
RPGOne/Skynet
|
refs/heads/Miho
|
scikit-learn-c604ac39ad0e5b066d964df3e8f31ba7ebda1e0e/sklearn/linear_model/tests/__init__.py
|
12133432
| |
sipwise/repoapi
|
refs/heads/master
|
panel/__init__.py
|
12133432
| |
FCP-INDI/nipype
|
refs/heads/master
|
nipype/interfaces/dipy/tests/__init__.py
|
12133432
| |
mdaniel/intellij-community
|
refs/heads/master
|
python/helpers/tests/generator3_tests/data/SkeletonGeneration/cache_skeleton_generated_and_reused_when_sdk_skeleton_is_outdated/mod.py
|
12133432
| |
sertac/django
|
refs/heads/master
|
tests/migrations2/test_migrations_2_first/__init__.py
|
12133432
| |
kwantopia/shoppley-migrate
|
refs/heads/master
|
shoppley.com/shoppley/apps/shoppleyuser/__init__.py
|
12133432
| |
ruslanloman/nova
|
refs/heads/master
|
nova/db/sqlalchemy/api_migrations/migrate_repo/versions/__init__.py
|
12133432
| |
the-invoice/smart2onyma
|
refs/heads/master
|
smart2onyma/__init__.py
|
12133432
| |
iabdalkader/micropython
|
refs/heads/master
|
tests/basics/async_def.py
|
54
|
# test async def
def dec(f):
print('decorator')
return f
# test definition with a decorator
@dec
async def foo():
print('foo')
coro = foo()
try:
coro.send(None)
except StopIteration:
print('StopIteration')
|
darjus-amzn/boto
|
refs/heads/develop
|
boto/cloudfront/signers.py
|
170
|
# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class Signer(object):
def __init__(self):
self.id = None
self.key_pair_ids = []
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Self':
self.id = 'Self'
elif name == 'AwsAccountNumber':
self.id = value
elif name == 'KeyPairId':
self.key_pair_ids.append(value)
class ActiveTrustedSigners(list):
def startElement(self, name, attrs, connection):
if name == 'Signer':
s = Signer()
self.append(s)
return s
def endElement(self, name, value, connection):
pass
class TrustedSigners(list):
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Self':
self.append(name)
elif name == 'AwsAccountNumber':
self.append(value)
|
peragro/peragro-index
|
refs/heads/master
|
tests/__init__.py
|
2
|
"""The tests collector used in setup.py"""
from __future__ import absolute_import
import os
import unittest
def suite():
"""Return a list of tests"""
loader = unittest.TestLoader()
directory = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')
return loader.discover(directory, 'test_*.py')
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
|
edwinksl/edwinksl.github.io
|
refs/heads/source
|
publishconf.py
|
1
|
#!/usr/bin/env python
# This file is only used if you use `make publish` or explicitly specify it as your config file
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.edwinksl.com'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = 'edwinksl'
# GOOGLE_ANALYTICS = ''
GOOGLE_ANALYTICS_UNIVERSAL = 'UA-100873462-1'
GOOGLE_ANALYTICS_UNIVERSAL_PROPERTY = 'auto'
|
jeffrimko/PopPage
|
refs/heads/master
|
tests/testlib.py
|
1
|
"""Provides a library to aid testig."""
##==============================================================#
## SECTION: Imports #
##==============================================================#
import os
import os.path as op
import random
from string import ascii_uppercase
import subprocess
import sys
import unittest
from time import sleep
from auxly.filesys import Cwd, File, delete, makedirs, countfiles
import auxly.shell as sh
# Allows development version of library to be used instead of installed.
appdir = op.normpath(op.join(op.abspath(op.dirname(__file__)), r"../app"))
sys.path.insert(0, appdir)
##==============================================================#
## SECTION: Global Definitions #
##==============================================================#
OUTDIR = "./__output__"
OUTFILE = OUTDIR + "/outfile.txt"
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class BaseTest(unittest.TestCase):
def setUp(test):
makedirs(OUTDIR)
def tearDown(test):
super(BaseTest, test).tearDown()
while op.exists("./__output__"):
# NOTE: This is a hacky fix to avoid Dropbox related error.
try: delete("./__output__")
except: pass
##==============================================================#
## SECTION: Function Definitions #
##==============================================================#
#: Random uppercase string of length x.
getrands = lambda x: "".join(random.choice(ascii_uppercase) for _ in range(x))
def get_args():
"""Returns a default utility args dictionary before parsing."""
args = {}
args['--inpath'] = None
args['--outpath'] = None
args['--defaults'] = None
args['--keysep'] = "::"
args['--string'] = []
args['--file'] = []
args['VAL'] = []
args['PATH'] = []
return args
def call(args, app_path="../app"):
"""Call PopPage as a CLI utility."""
cmd = "python %s/poppage.py " % (app_path) + args
print(cmd)
return sh.call(cmd)
|
Voluntarynet/BitmessageKit
|
refs/heads/master
|
BitmessageKit/Vendor/static-python/Lib/plat-mac/lib-scriptpackages/Explorer/__init__.py
|
73
|
"""
Package generated from /Applications/Internet Explorer.app
"""
from warnings import warnpy3k
warnpy3k("In 3.x, the Explorer module is removed.", stacklevel=2)
import aetools
Error = aetools.Error
import Standard_Suite
import URL_Suite
import Netscape_Suite
import Microsoft_Internet_Explorer
import Web_Browser_Suite
import Required_Suite
_code_to_module = {
'****' : Standard_Suite,
'GURL' : URL_Suite,
'MOSS' : Netscape_Suite,
'MSIE' : Microsoft_Internet_Explorer,
'WWW!' : Web_Browser_Suite,
'reqd' : Required_Suite,
}
_code_to_fullname = {
'****' : ('Explorer.Standard_Suite', 'Standard_Suite'),
'GURL' : ('Explorer.URL_Suite', 'URL_Suite'),
'MOSS' : ('Explorer.Netscape_Suite', 'Netscape_Suite'),
'MSIE' : ('Explorer.Microsoft_Internet_Explorer', 'Microsoft_Internet_Explorer'),
'WWW!' : ('Explorer.Web_Browser_Suite', 'Web_Browser_Suite'),
'reqd' : ('Explorer.Required_Suite', 'Required_Suite'),
}
from Standard_Suite import *
from URL_Suite import *
from Netscape_Suite import *
from Microsoft_Internet_Explorer import *
from Web_Browser_Suite import *
from Required_Suite import *
def getbaseclasses(v):
if not getattr(v, '_propdict', None):
v._propdict = {}
v._elemdict = {}
for superclassname in getattr(v, '_superclassnames', []):
superclass = eval(superclassname)
getbaseclasses(superclass)
v._propdict.update(getattr(superclass, '_propdict', {}))
v._elemdict.update(getattr(superclass, '_elemdict', {}))
v._propdict.update(getattr(v, '_privpropdict', {}))
v._elemdict.update(getattr(v, '_privelemdict', {}))
import StdSuites
#
# Set property and element dictionaries now that all classes have been defined
#
getbaseclasses(application)
#
# Indices of types declared in this module
#
_classdeclarations = {
'capp' : application,
}
class Explorer(Standard_Suite_Events,
URL_Suite_Events,
Netscape_Suite_Events,
Microsoft_Internet_Explorer_Events,
Web_Browser_Suite_Events,
Required_Suite_Events,
aetools.TalkTo):
_signature = 'MSIE'
_moduleName = 'Explorer'
_elemdict = application._elemdict
_propdict = application._propdict
|
tarc/gyp
|
refs/heads/master
|
test/rules-rebuild/gyptest-default.py
|
345
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that a rule that generates multiple outputs rebuilds
correctly when the inputs change.
"""
import TestGyp
test = TestGyp.TestGyp(workdir='workarea_default')
test.run_gyp('same_target.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('same_target.gyp', chdir='relocate/src')
expect = """\
Hello from main.c
Hello from prog1.in!
Hello from prog2.in!
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
test.sleep()
contents = test.read(['relocate', 'src', 'prog1.in'])
contents = contents.replace('!', ' AGAIN!')
test.write(['relocate', 'src', 'prog1.in'], contents)
test.build('same_target.gyp', chdir='relocate/src')
expect = """\
Hello from main.c
Hello from prog1.in AGAIN!
Hello from prog2.in!
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
test.sleep()
contents = test.read(['relocate', 'src', 'prog2.in'])
contents = contents.replace('!', ' AGAIN!')
test.write(['relocate', 'src', 'prog2.in'], contents)
test.build('same_target.gyp', chdir='relocate/src')
expect = """\
Hello from main.c
Hello from prog1.in AGAIN!
Hello from prog2.in AGAIN!
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
# Test that modifying a rule's inputs (specifically, make-sources.py) causes
# the targets to be built.
test.sleep()
contents = test.read(['relocate', 'src', 'make-sources.py'])
contents = contents.replace('%s', 'the amazing %s')
test.write(['relocate', 'src', 'make-sources.py'], contents)
test.build('same_target.gyp', chdir='relocate/src')
expect = """\
Hello from main.c
Hello from the amazing prog1.in AGAIN!
Hello from the amazing prog2.in AGAIN!
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
test.pass_test()
|
ajgallegog/gem5_arm
|
refs/heads/master
|
src/mem/slicc/__init__.py
|
92
|
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
piyush0609/scipy
|
refs/heads/master
|
scipy/optimize/tests/test__differential_evolution.py
|
59
|
"""
Unit tests for the differential global minimization algorithm.
"""
from scipy.optimize import _differentialevolution
from scipy.optimize._differentialevolution import DifferentialEvolutionSolver
from scipy.optimize import differential_evolution
import numpy as np
from scipy.optimize import rosen
from numpy.testing import (assert_equal, TestCase, assert_allclose,
run_module_suite, assert_almost_equal,
assert_string_equal)
class TestDifferentialEvolutionSolver(TestCase):
def setUp(self):
self.old_seterr = np.seterr(invalid='raise')
self.limits = np.array([[0., 0.],
[2., 2.]])
self.bounds = [(0., 2.), (0., 2.)]
self.dummy_solver = DifferentialEvolutionSolver(self.quadratic,
[(0, 100)])
#dummy_solver2 will be used to test mutation strategies
self.dummy_solver2 = DifferentialEvolutionSolver(self.quadratic,
[(0, 1)],
popsize=7,
mutation=0.5)
#create a population that's only 7 members long
#[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7]
population = np.atleast_2d(np.arange(0.1, 0.8, 0.1)).T
self.dummy_solver2.population = population
def tearDown(self):
np.seterr(**self.old_seterr)
def quadratic(self, x):
return x[0]**2
def test__strategy_resolves(self):
#test that the correct mutation function is resolved by
#different requested strategy arguments
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='best1exp')
assert_equal(solver.strategy, 'best1exp')
assert_equal(solver.mutation_func.__name__, '_best1')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='best1bin')
assert_equal(solver.strategy, 'best1bin')
assert_equal(solver.mutation_func.__name__, '_best1')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='rand1bin')
assert_equal(solver.strategy, 'rand1bin')
assert_equal(solver.mutation_func.__name__, '_rand1')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='rand1exp')
assert_equal(solver.strategy, 'rand1exp')
assert_equal(solver.mutation_func.__name__, '_rand1')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='rand2exp')
assert_equal(solver.strategy, 'rand2exp')
assert_equal(solver.mutation_func.__name__, '_rand2')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='best2bin')
assert_equal(solver.strategy, 'best2bin')
assert_equal(solver.mutation_func.__name__, '_best2')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='rand2bin')
assert_equal(solver.strategy, 'rand2bin')
assert_equal(solver.mutation_func.__name__, '_rand2')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='rand2exp')
assert_equal(solver.strategy, 'rand2exp')
assert_equal(solver.mutation_func.__name__, '_rand2')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='randtobest1bin')
assert_equal(solver.strategy, 'randtobest1bin')
assert_equal(solver.mutation_func.__name__, '_randtobest1')
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='randtobest1exp')
assert_equal(solver.strategy, 'randtobest1exp')
assert_equal(solver.mutation_func.__name__, '_randtobest1')
def test__mutate1(self):
#strategies */1/*, i.e. rand/1/bin, best/1/exp, etc.
result = np.array([0.05])
trial = self.dummy_solver2._best1((2, 3, 4, 5, 6))
assert_allclose(trial, result)
result = np.array([0.25])
trial = self.dummy_solver2._rand1((2, 3, 4, 5, 6))
assert_allclose(trial, result)
def test__mutate2(self):
#strategies */2/*, i.e. rand/2/bin, best/2/exp, etc.
#[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7]
result = np.array([-0.1])
trial = self.dummy_solver2._best2((2, 3, 4, 5, 6))
assert_allclose(trial, result)
result = np.array([0.1])
trial = self.dummy_solver2._rand2((2, 3, 4, 5, 6))
assert_allclose(trial, result)
def test__randtobest1(self):
#strategies randtobest/1/*
result = np.array([0.1])
trial = self.dummy_solver2._randtobest1(1, (2, 3, 4, 5, 6))
assert_allclose(trial, result)
def test_can_init_with_dithering(self):
mutation = (0.5, 1)
solver = DifferentialEvolutionSolver(self.quadratic,
self.bounds,
mutation=mutation)
self.assertEqual(solver.dither, list(mutation))
def test_invalid_mutation_values_arent_accepted(self):
func = rosen
mutation = (0.5, 3)
self.assertRaises(ValueError,
DifferentialEvolutionSolver,
func,
self.bounds,
mutation=mutation)
mutation = (-1, 1)
self.assertRaises(ValueError,
DifferentialEvolutionSolver,
func,
self.bounds,
mutation=mutation)
mutation = (0.1, np.nan)
self.assertRaises(ValueError,
DifferentialEvolutionSolver,
func,
self.bounds,
mutation=mutation)
mutation = (0.5)
solver = DifferentialEvolutionSolver(func,
self.bounds,
mutation=mutation)
assert_equal(0.5, solver.scale)
assert_equal(None, solver.dither)
def test__scale_parameters(self):
trial = np.array([0.3])
assert_equal(30, self.dummy_solver._scale_parameters(trial))
# it should also work with the limits reversed
self.dummy_solver.limits = np.array([[100], [0.]])
assert_equal(30, self.dummy_solver._scale_parameters(trial))
def test__unscale_parameters(self):
trial = np.array([30])
assert_equal(0.3, self.dummy_solver._unscale_parameters(trial))
# it should also work with the limits reversed
self.dummy_solver.limits = np.array([[100], [0.]])
assert_equal(0.3, self.dummy_solver._unscale_parameters(trial))
def test__ensure_constraint(self):
trial = np.array([1.1, -100, 2., 300., -0.00001])
self.dummy_solver._ensure_constraint(trial)
assert_equal(np.all(trial <= 1), True)
def test_differential_evolution(self):
# test that the Jmin of DifferentialEvolutionSolver
# is the same as the function evaluation
solver = DifferentialEvolutionSolver(self.quadratic, [(-2, 2)])
result = solver.solve()
assert_almost_equal(result.fun, self.quadratic(result.x))
def test_best_solution_retrieval(self):
# test that the getter property method for the best solution works.
solver = DifferentialEvolutionSolver(self.quadratic, [(-2, 2)])
result = solver.solve()
assert_equal(result.x, solver.x)
def test_callback_terminates(self):
# test that if the callback returns true, then the minimization halts
bounds = [(0, 2), (0, 2)]
def callback(param, convergence=0.):
return True
result = differential_evolution(rosen, bounds, callback=callback)
assert_string_equal(result.message,
'callback function requested stop early '
'by returning True')
def test_args_tuple_is_passed(self):
# test that the args tuple is passed to the cost function properly.
bounds = [(-10, 10)]
args = (1., 2., 3.)
def quadratic(x, *args):
if type(args) != tuple:
raise ValueError('args should be a tuple')
return args[0] + args[1] * x + args[2] * x**2.
result = differential_evolution(quadratic,
bounds,
args=args,
polish=True)
assert_almost_equal(result.fun, 2 / 3.)
def test_init_with_invalid_strategy(self):
#test that passing an invalid strategy raises ValueError
func = rosen
bounds = [(-3, 3)]
self.assertRaises(ValueError,
differential_evolution,
func,
bounds,
strategy='abc')
def test_bounds_checking(self):
#test that the bounds checking works
func = rosen
bounds = [(-3, None)]
self.assertRaises(ValueError,
differential_evolution,
func,
bounds)
bounds = [(-3)]
self.assertRaises(ValueError,
differential_evolution,
func,
bounds)
bounds = [(-3, 3), (3, 4, 5)]
self.assertRaises(ValueError,
differential_evolution,
func,
bounds)
def test_select_samples(self):
#select_samples should return 5 separate random numbers.
limits = np.arange(12., dtype='float64').reshape(2, 6)
bounds = list(zip(limits[0, :], limits[1, :]))
solver = DifferentialEvolutionSolver(None, bounds, popsize=1)
candidate = 0
r1, r2, r3, r4, r5 = solver._select_samples(candidate, 5)
assert_equal(
len(np.unique(np.array([candidate, r1, r2, r3, r4, r5]))), 6)
def test_maxiter_stops_solve(self):
#test that if the maximum number of iterations is exceeded
#the solver stops.
solver = DifferentialEvolutionSolver(rosen, self.bounds, maxiter=1)
result = solver.solve()
assert_equal(result.success, False)
assert_equal(result.message,
'Maximum number of iterations has been exceeded.')
def test_maxfun_stops_solve(self):
#test that if the maximum number of function evaluations is exceeded
#during initialisation the solver stops
solver = DifferentialEvolutionSolver(rosen, self.bounds, maxfun=1)
result = solver.solve()
assert_equal(result.nfev, 2)
assert_equal(result.success, False)
assert_equal(result.message,
'Maximum number of function evaluations has '
'been exceeded.')
#test that if the maximum number of function evaluations is exceeded
#during the actual minimisation, then the solver stops.
#Have to turn polishing off, as this will still occur even if maxfun
#is reached. For popsize=5 and len(bounds)=2, then there are only 10
#function evaluations during initialisation.
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
popsize=5,
polish=False,
maxfun=40)
result = solver.solve()
assert_equal(result.nfev, 41)
assert_equal(result.success, False)
assert_equal(result.message,
'Maximum number of function evaluations has '
'been exceeded.')
def test_quadratic(self):
# test the quadratic function from object
solver = DifferentialEvolutionSolver(self.quadratic,
[(-100, 100)],
tol=0.02)
solver.solve()
def test_quadratic_from_diff_ev(self):
# test the quadratic function from differential_evolution function
differential_evolution(self.quadratic,
[(-100, 100)],
tol=0.02)
def test_seed_gives_repeatability(self):
result = differential_evolution(self.quadratic,
[(-100, 100)],
polish=False,
seed=1,
tol=0.5)
result2 = differential_evolution(self.quadratic,
[(-100, 100)],
polish=False,
seed=1,
tol=0.5)
assert_equal(result.x, result2.x)
def test_exp_runs(self):
# test whether exponential mutation loop runs
solver = DifferentialEvolutionSolver(rosen,
self.bounds,
strategy='best1exp',
maxiter=1)
solver.solve()
def test__make_random_gen(self):
# If seed is None, return the RandomState singleton used by np.random.
# If seed is an int, return a new RandomState instance seeded with seed.
# If seed is already a RandomState instance, return it.
# Otherwise raise ValueError.
rsi = _differentialevolution._make_random_gen(1)
assert_equal(type(rsi), np.random.RandomState)
rsi = _differentialevolution._make_random_gen(rsi)
assert_equal(type(rsi), np.random.RandomState)
rsi = _differentialevolution._make_random_gen(None)
assert_equal(type(rsi), np.random.RandomState)
self.assertRaises(
ValueError, _differentialevolution._make_random_gen, 'a')
def test_gh_4511_regression(self):
# This modification of the differential evolution docstring example
# uses a custom popsize that had triggered an off-by-one error.
# Because we do not care about solving the optimization problem in
# this test, we use maxiter=1 to reduce the testing time.
bounds = [(-5, 5), (-5, 5)]
result = differential_evolution(rosen, bounds, popsize=1815, maxiter=1)
if __name__ == '__main__':
run_module_suite()
|
alu042/edx-platform
|
refs/heads/master
|
common/djangoapps/auth_exchange/tests/test_forms.py
|
113
|
# pylint: disable=no-member
"""
Tests for OAuth token exchange forms
"""
import unittest
from django.conf import settings
from django.contrib.sessions.middleware import SessionMiddleware
from django.test import TestCase
from django.test.client import RequestFactory
import httpretty
from provider import scope
import social.apps.django_app.utils as social_utils
from auth_exchange.forms import AccessTokenExchangeForm
from auth_exchange.tests.utils import AccessTokenExchangeTestMixin
from third_party_auth.tests.utils import ThirdPartyOAuthTestMixinFacebook, ThirdPartyOAuthTestMixinGoogle
class AccessTokenExchangeFormTest(AccessTokenExchangeTestMixin):
"""
Mixin that defines test cases for AccessTokenExchangeForm
"""
def setUp(self):
super(AccessTokenExchangeFormTest, self).setUp()
self.request = RequestFactory().post("dummy_url")
redirect_uri = 'dummy_redirect_url'
SessionMiddleware().process_request(self.request)
self.request.social_strategy = social_utils.load_strategy(self.request)
# pylint: disable=no-member
self.request.backend = social_utils.load_backend(self.request.social_strategy, self.BACKEND, redirect_uri)
def _assert_error(self, data, expected_error, expected_error_description):
form = AccessTokenExchangeForm(request=self.request, data=data)
self.assertEqual(
form.errors,
{"error": expected_error, "error_description": expected_error_description}
)
self.assertNotIn("partial_pipeline", self.request.session)
def _assert_success(self, data, expected_scopes):
form = AccessTokenExchangeForm(request=self.request, data=data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["user"], self.user)
self.assertEqual(form.cleaned_data["client"], self.oauth_client)
self.assertEqual(scope.to_names(form.cleaned_data["scope"]), expected_scopes)
# This is necessary because cms does not implement third party auth
@unittest.skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
@httpretty.activate
class AccessTokenExchangeFormTestFacebook(
AccessTokenExchangeFormTest,
ThirdPartyOAuthTestMixinFacebook,
TestCase
):
"""
Tests for AccessTokenExchangeForm used with Facebook
"""
pass
# This is necessary because cms does not implement third party auth
@unittest.skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
@httpretty.activate
class AccessTokenExchangeFormTestGoogle(
AccessTokenExchangeFormTest,
ThirdPartyOAuthTestMixinGoogle,
TestCase
):
"""
Tests for AccessTokenExchangeForm used with Google
"""
pass
|
apocquet/django
|
refs/heads/master
|
django/contrib/gis/serializers/geojson.py
|
275
|
from __future__ import unicode_literals
from django.contrib.gis.gdal import HAS_GDAL
from django.core.serializers.base import (
SerializationError, SerializerDoesNotExist,
)
from django.core.serializers.json import Serializer as JSONSerializer
if HAS_GDAL:
from django.contrib.gis.gdal import CoordTransform, SpatialReference
class Serializer(JSONSerializer):
"""
Convert a queryset to GeoJSON, http://geojson.org/
"""
def _init_options(self):
super(Serializer, self)._init_options()
self.geometry_field = self.json_kwargs.pop('geometry_field', None)
self.srid = self.json_kwargs.pop('srid', 4326)
def start_serialization(self):
self._init_options()
self._cts = {} # cache of CoordTransform's
self.stream.write(
'{"type": "FeatureCollection", "crs": {"type": "name", "properties": {"name": "EPSG:%d"}},'
' "features": [' % self.srid)
def end_serialization(self):
self.stream.write(']}')
def start_object(self, obj):
super(Serializer, self).start_object(obj)
self._geometry = None
if self.geometry_field is None:
# Find the first declared geometry field
for field in obj._meta.fields:
if hasattr(field, 'geom_type'):
self.geometry_field = field.name
break
def get_dump_object(self, obj):
data = {
"type": "Feature",
"properties": self._current,
}
if self._geometry:
if self._geometry.srid != self.srid:
# If needed, transform the geometry in the srid of the global geojson srid
if not HAS_GDAL:
raise SerializationError(
'Unable to convert geometry to SRID %s when GDAL is not installed.' % self.srid
)
if self._geometry.srid not in self._cts:
srs = SpatialReference(self.srid)
self._cts[self._geometry.srid] = CoordTransform(self._geometry.srs, srs)
self._geometry.transform(self._cts[self._geometry.srid])
data["geometry"] = eval(self._geometry.geojson)
else:
data["geometry"] = None
return data
def handle_field(self, obj, field):
if field.name == self.geometry_field:
self._geometry = field.value_from_object(obj)
else:
super(Serializer, self).handle_field(obj, field)
class Deserializer(object):
def __init__(self, *args, **kwargs):
raise SerializerDoesNotExist("geojson is a serialization-only serializer")
|
beatrizjesus/my-first-blog
|
refs/heads/master
|
blog/tests.py
|
24123
|
from django.test import TestCase
# Create your tests here.
|
Lab603/PicEncyclopedias
|
refs/heads/master
|
jni-build/jni-build/jni/include/tensorflow/contrib/learn/python/learn/tests/dataframe/binary_transform_test.py
|
5
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for binary transforms."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow.contrib.learn.python.learn.dataframe import tensorflow_dataframe as df
from tensorflow.contrib.learn.python.learn.dataframe.transforms.binary_transforms import BINARY_TRANSFORMS
NUMPY_ARRAY_SIZE = 100
SCALAR = 50.0
TEST_NAME_PREFIX = "testBinaryOp_"
class BinaryTransformTestCase(tf.test.TestCase):
"""Test class for binary transforms."""
@classmethod
def add_test_case(cls, fn_name, op):
def _test(self):
rng = np.arange(-NUMPY_ARRAY_SIZE // 2,
NUMPY_ARRAY_SIZE // 2,
dtype="float32")
frame = df.TensorFlowDataFrame.from_numpy(rng,
batch_size=len(rng),
shuffle=False)
frame["sqr"] = frame["value"].square()
self.assertTrue(hasattr(frame["value"], fn_name))
frame["series_result"] = getattr(frame["value"],
fn_name)(frame["sqr"])
frame["scalar_result"] = getattr(frame["value"], fn_name)(SCALAR)
frame_built = frame.build()
expected_series_tensor = op(frame_built["value"], frame_built["sqr"])
actual_series_tensor = frame_built["series_result"]
expected_scalar_tensor = op(frame_built["value"], SCALAR)
actual_scalar_tensor = frame_built["scalar_result"]
session = tf.Session()
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=session, coord=coord)
actual_series, expected_series, actual_scalar, expected_scalar = (
session.run([actual_series_tensor, expected_series_tensor,
actual_scalar_tensor, expected_scalar_tensor]))
coord.request_stop()
coord.join(threads)
np.testing.assert_almost_equal(expected_series, actual_series)
np.testing.assert_almost_equal(expected_scalar, actual_scalar)
setattr(cls, "{}{}".format(TEST_NAME_PREFIX, op.__name__), _test)
for bt in BINARY_TRANSFORMS:
BinaryTransformTestCase.add_test_case(*bt)
# Check that the number of test methods matches the number of binary transforms.
test_methods = [test for test in dir(BinaryTransformTestCase)
if test.startswith(TEST_NAME_PREFIX)]
assert len(test_methods) == len(BINARY_TRANSFORMS)
if __name__ == "__main__":
tf.test.main()
|
Nikoala/CouchPotatoServer
|
refs/heads/develop
|
couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/condenast.py
|
115
|
# coding: utf-8
from __future__ import unicode_literals
import re
import json
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_parse_urlparse,
compat_urlparse,
)
from ..utils import (
orderedSet,
)
class CondeNastIE(InfoExtractor):
"""
Condé Nast is a media group, some of its sites use a custom HTML5 player
that works the same in all of them.
"""
# The keys are the supported sites and the values are the name to be shown
# to the user and in the extractor description.
_SITES = {
'wired': 'WIRED',
'gq': 'GQ',
'vogue': 'Vogue',
'glamour': 'Glamour',
'wmagazine': 'W Magazine',
'vanityfair': 'Vanity Fair',
'cnevids': 'Condé Nast',
}
_VALID_URL = r'http://(video|www|player)\.(?P<site>%s)\.com/(?P<type>watch|series|video|embed)/(?P<id>[^/?#]+)' % '|'.join(_SITES.keys())
IE_DESC = 'Condé Nast media group: %s' % ', '.join(sorted(_SITES.values()))
EMBED_URL = r'(?:https?:)?//player\.(?P<site>%s)\.com/(?P<type>embed)/.+?' % '|'.join(_SITES.keys())
_TEST = {
'url': 'http://video.wired.com/watch/3d-printed-speakers-lit-with-led',
'md5': '1921f713ed48aabd715691f774c451f7',
'info_dict': {
'id': '5171b343c2b4c00dd0c1ccb3',
'ext': 'mp4',
'title': '3D Printed Speakers Lit With LED',
'description': 'Check out these beautiful 3D printed LED speakers. You can\'t actually buy them, but LumiGeek is working on a board that will let you make you\'re own.',
}
}
def _extract_series(self, url, webpage):
title = self._html_search_regex(r'<div class="cne-series-info">.*?<h1>(.+?)</h1>',
webpage, 'series title', flags=re.DOTALL)
url_object = compat_urllib_parse_urlparse(url)
base_url = '%s://%s' % (url_object.scheme, url_object.netloc)
m_paths = re.finditer(r'<p class="cne-thumb-title">.*?<a href="(/watch/.+?)["\?]',
webpage, flags=re.DOTALL)
paths = orderedSet(m.group(1) for m in m_paths)
build_url = lambda path: compat_urlparse.urljoin(base_url, path)
entries = [self.url_result(build_url(path), 'CondeNast') for path in paths]
return self.playlist_result(entries, playlist_title=title)
def _extract_video(self, webpage, url_type):
if url_type != 'embed':
description = self._html_search_regex(
[
r'<div class="cne-video-description">(.+?)</div>',
r'<div class="video-post-content">(.+?)</div>',
],
webpage, 'description', fatal=False, flags=re.DOTALL)
else:
description = None
params = self._search_regex(r'var params = {(.+?)}[;,]', webpage,
'player params', flags=re.DOTALL)
video_id = self._search_regex(r'videoId: [\'"](.+?)[\'"]', params, 'video id')
player_id = self._search_regex(r'playerId: [\'"](.+?)[\'"]', params, 'player id')
target = self._search_regex(r'target: [\'"](.+?)[\'"]', params, 'target')
data = compat_urllib_parse.urlencode({'videoId': video_id,
'playerId': player_id,
'target': target,
})
base_info_url = self._search_regex(r'url = [\'"](.+?)[\'"][,;]',
webpage, 'base info url',
default='http://player.cnevids.com/player/loader.js?')
info_url = base_info_url + data
info_page = self._download_webpage(info_url, video_id,
'Downloading video info')
video_info = self._search_regex(r'var video = ({.+?});', info_page, 'video info')
video_info = json.loads(video_info)
formats = [{
'format_id': '%s-%s' % (fdata['type'].split('/')[-1], fdata['quality']),
'url': fdata['src'],
'ext': fdata['type'].split('/')[-1],
'quality': 1 if fdata['quality'] == 'high' else 0,
} for fdata in video_info['sources'][0]]
self._sort_formats(formats)
return {
'id': video_id,
'formats': formats,
'title': video_info['title'],
'thumbnail': video_info['poster_frame'],
'description': description,
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
site = mobj.group('site')
url_type = mobj.group('type')
item_id = mobj.group('id')
self.to_screen('Extracting from %s with the Condé Nast extractor' % self._SITES[site])
webpage = self._download_webpage(url, item_id)
if url_type == 'series':
return self._extract_series(url, webpage)
else:
return self._extract_video(webpage, url_type)
|
vperron/sentry
|
refs/heads/master
|
tests/sentry/web/frontend/test_admin.py
|
23
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from exam import fixture
from sentry.testutils import TestCase
class EnvStatusTest(TestCase):
@fixture
def path(self):
return reverse('sentry-admin-status')
def test_requires_auth(self):
resp = self.client.get(self.path)
self.assertEquals(resp.status_code, 302)
def test_renders_template(self):
self.login_as(self.user)
resp = self.client.get(self.path)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed(resp, 'sentry/admin/status/env.html')
class PackageStatusTest(TestCase):
@fixture
def path(self):
return reverse('sentry-admin-packages-status')
def test_requires_auth(self):
resp = self.client.get(self.path)
self.assertEquals(resp.status_code, 302)
def test_renders_template(self):
self.login_as(self.user)
resp = self.client.get(self.path)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed(resp, 'sentry/admin/status/packages.html')
class MailStatusTest(TestCase):
@fixture
def path(self):
return reverse('sentry-admin-mail-status')
def test_requires_auth(self):
resp = self.client.get(self.path)
self.assertEquals(resp.status_code, 302)
def test_renders_template(self):
self.login_as(self.user)
resp = self.client.get(self.path)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed(resp, 'sentry/admin/status/mail.html')
class OverviewTest(TestCase):
@fixture
def path(self):
return reverse('sentry-admin-overview')
def test_requires_auth(self):
resp = self.client.get(self.path)
self.assertEquals(resp.status_code, 302)
def test_renders_template(self):
self.login_as(self.user)
resp = self.client.get(self.path)
self.assertEquals(resp.status_code, 200)
self.assertTemplateUsed(resp, 'sentry/admin/stats.html')
class ManageUsersTest(TestCase):
@fixture
def path(self):
return reverse('sentry-admin-users')
def test_does_render(self):
self.login_as(self.user)
resp = self.client.get(self.path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/admin/users/list.html')
assert self.user in resp.context['user_list']
class ManageTeamsTest(TestCase):
@fixture
def path(self):
return reverse('sentry-admin-teams')
def test_does_render(self):
team = self.create_team()
self.create_project(team=team)
self.create_project(team=team)
self.login_as(self.user)
resp = self.client.get(self.path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/admin/teams/list.html')
assert team in resp.context['team_list']
class ManageProjectsTest(TestCase):
@fixture
def path(self):
return reverse('sentry-admin-projects')
def test_does_render(self):
project = self.create_project()
project2 = self.create_project()
self.login_as(self.user)
resp = self.client.get(self.path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/admin/projects/list.html')
assert project in resp.context['project_list']
assert project2 in resp.context['project_list']
|
linvictor88/vse-lbaas-driver
|
refs/heads/master
|
quantum/db/extraroute_db.py
|
2
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013, Nachi Ueno, NTT MCL, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from oslo.config import cfg
import sqlalchemy as sa
from sqlalchemy import orm
from quantum.common import utils
from quantum.db import db_base_plugin_v2
from quantum.db import l3_db
from quantum.db import model_base
from quantum.db import models_v2
from quantum.extensions import extraroute
from quantum.extensions import l3
from quantum.openstack.common import log as logging
LOG = logging.getLogger(__name__)
extra_route_opts = [
#TODO(nati): use quota framework when it support quota for attributes
cfg.IntOpt('max_routes', default=30,
help=_("Maximum number of routes")),
]
cfg.CONF.register_opts(extra_route_opts)
class RouterRoute(model_base.BASEV2, models_v2.Route):
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id',
ondelete="CASCADE"),
primary_key=True)
router = orm.relationship(l3_db.Router,
backref=orm.backref("route_list",
lazy='joined',
cascade='delete'))
class ExtraRoute_db_mixin(l3_db.L3_NAT_db_mixin):
"""Mixin class to support extra route configuration on router."""
def _extend_router_dict_extraroute(self, router_res, router_db):
router_res['routes'] = (ExtraRoute_db_mixin.
_make_extra_route_list(
router_db['route_list']
))
db_base_plugin_v2.QuantumDbPluginV2.register_dict_extend_funcs(
l3.ROUTERS, [_extend_router_dict_extraroute])
def update_router(self, context, id, router):
r = router['router']
with context.session.begin(subtransactions=True):
#check if route exists and have permission to access
router_db = self._get_router(context, id)
if 'routes' in r:
self._update_extra_routes(context,
router_db,
r['routes'])
router_updated = super(ExtraRoute_db_mixin, self).update_router(
context, id, router)
router_updated['routes'] = self._get_extra_routes_by_router_id(
context, id)
return router_updated
def _get_subnets_by_cidr(self, context, cidr):
query_subnets = context.session.query(models_v2.Subnet)
return query_subnets.filter_by(cidr=cidr).all()
def _validate_routes_nexthop(self, context, ports, routes, nexthop):
#Note(nati): Nexthop should be connected,
# so we need to check
# nexthop belongs to one of cidrs of the router ports
cidrs = []
for port in ports:
cidrs += [self._get_subnet(context,
ip['subnet_id'])['cidr']
for ip in port['fixed_ips']]
if not netaddr.all_matching_cidrs(nexthop, cidrs):
raise extraroute.InvalidRoutes(
routes=routes,
reason=_('the nexthop is not connected with router'))
#Note(nati) nexthop should not be same as fixed_ips
for port in ports:
for ip in port['fixed_ips']:
if nexthop == ip['ip_address']:
raise extraroute.InvalidRoutes(
routes=routes,
reason=_('the nexthop is used by router'))
def _validate_routes(self, context,
router_id, routes):
if len(routes) > cfg.CONF.max_routes:
raise extraroute.RoutesExhausted(
router_id=router_id,
quota=cfg.CONF.max_routes)
filters = {'device_id': [router_id]}
ports = self.get_ports(context, filters)
for route in routes:
self._validate_routes_nexthop(
context, ports, routes, route['nexthop'])
def _update_extra_routes(self, context, router, routes):
self._validate_routes(context, router['id'],
routes)
old_routes = self._get_extra_routes_by_router_id(
context, router['id'])
added, removed = utils.diff_list_of_dict(old_routes,
routes)
LOG.debug('Added routes are %s' % added)
for route in added:
router_routes = RouterRoute(
router_id=router['id'],
destination=route['destination'],
nexthop=route['nexthop'])
context.session.add(router_routes)
LOG.debug('Removed routes are %s' % removed)
for route in removed:
del_context = context.session.query(RouterRoute)
del_context.filter_by(router_id=router['id'],
destination=route['destination'],
nexthop=route['nexthop']).delete()
@staticmethod
def _make_extra_route_list(extra_routes):
return [{'destination': route['destination'],
'nexthop': route['nexthop']}
for route in extra_routes]
def _get_extra_routes_by_router_id(self, context, id):
query = context.session.query(RouterRoute)
query.filter(RouterRoute.router_id == id)
return self._make_extra_route_list(query)
def get_router(self, context, id, fields=None):
with context.session.begin(subtransactions=True):
router = super(ExtraRoute_db_mixin, self).get_router(
context, id, fields)
return router
def get_routers(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
with context.session.begin(subtransactions=True):
routers = super(ExtraRoute_db_mixin, self).get_routers(
context, filters, fields, sorts=sorts, limit=limit,
marker=marker, page_reverse=page_reverse)
return routers
def _confirm_router_interface_not_in_use(self, context, router_id,
subnet_id):
super(ExtraRoute_db_mixin, self)._confirm_router_interface_not_in_use(
context, router_id, subnet_id)
subnet_db = self._get_subnet(context, subnet_id)
subnet_cidr = netaddr.IPNetwork(subnet_db['cidr'])
extra_routes = self._get_extra_routes_by_router_id(context, router_id)
for route in extra_routes:
if netaddr.all_matching_cidrs(route['nexthop'], [subnet_cidr]):
raise extraroute.RouterInterfaceInUseByRoute(
router_id=router_id, subnet_id=subnet_id)
|
hyperized/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/fortios/fortios_system_session_helper.py
|
13
|
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_session_helper
short_description: Configure session helper in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify system feature and session_helper category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
system_session_helper:
description:
- Configure session helper.
default: null
type: dict
suboptions:
id:
description:
- Session helper ID.
required: true
type: int
name:
description:
- Helper name.
type: str
choices:
- ftp
- tftp
- ras
- h323
- tns
- mms
- sip
- pptp
- rtsp
- dns-udp
- dns-tcp
- pmap
- rsh
- dcerpc
- mgcp
- gtp-c
- gtp-u
- gtp-b
port:
description:
- Protocol port.
type: int
protocol:
description:
- Protocol number.
type: int
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure session helper.
fortios_system_session_helper:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
system_session_helper:
id: "3"
name: "default_name_4"
port: "5"
protocol: "6"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_session_helper_data(json):
option_list = ['id', 'name', 'port',
'protocol']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_session_helper(data, fos):
vdom = data['vdom']
state = data['state']
system_session_helper_data = data['system_session_helper']
filtered_data = underscore_to_hyphen(filter_system_session_helper_data(system_session_helper_data))
if state == "present":
return fos.set('system',
'session-helper',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('system',
'session-helper',
mkey=filtered_data['id'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system(data, fos):
if data['system_session_helper']:
resp = system_session_helper(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"system_session_helper": {
"required": False, "type": "dict", "default": None,
"options": {
"id": {"required": True, "type": "int"},
"name": {"required": False, "type": "str",
"choices": ["ftp", "tftp", "ras",
"h323", "tns", "mms",
"sip", "pptp", "rtsp",
"dns-udp", "dns-tcp", "pmap",
"rsh", "dcerpc", "mgcp",
"gtp-c", "gtp-u", "gtp-b"]},
"port": {"required": False, "type": "int"},
"protocol": {"required": False, "type": "int"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_system(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_system(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
efortuna/AndroidSDKClone
|
refs/heads/master
|
ndk/prebuilt/linux-x86_64/lib/python2.7/md5.py
|
255
|
# $Id$
#
# Copyright (C) 2005 Gregory P. Smith (greg@krypto.org)
# Licensed to PSF under a Contributor Agreement.
import warnings
warnings.warn("the md5 module is deprecated; use hashlib instead",
DeprecationWarning, 2)
from hashlib import md5
new = md5
blocksize = 1 # legacy value (wrong in any useful sense)
digest_size = 16
|
jso0003auburn/Is-It-Raining
|
refs/heads/master
|
lib/tweepy/error.py
|
10
|
# Tweepy
# Copyright 2009-2010 Joshua Roesslein
# See LICENSE for details.
from __future__ import print_function
import six
class TweepError(Exception):
"""Tweepy exception"""
def __init__(self, reason, response=None):
self.reason = six.text_type(reason)
self.response = response
Exception.__init__(self, reason)
def __str__(self):
return self.reason
|
nachoaguadoc/aimlx-demos
|
refs/heads/master
|
controller/argumentation_controller.py
|
1
|
from flask import Blueprint
from flask import Flask, abort
from flask import jsonify
from flask import render_template
from flask import request,send_from_directory
import requests
import config as conf
import helpers
argumentation_api = Blueprint('argumentation_api', __name__)
@argumentation_api.route('')
def getArgumentation():
return render_template('argumentation/argumentation.html')
@argumentation_api.route('', methods=['POST'])
def submitArgumentation():
parameters = request.get_json(force=True)
print("Demo argumentation:", parameters)
if request.method == 'POST':
result = requests.post(conf.argumentation['url'], json=parameters)
resultdict = result.json()
return jsonify(resultdict)
|
javaos74/neutron
|
refs/heads/master
|
neutron/db/vlantransparent_db.py
|
54
|
# Copyright (c) 2015 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.api.v2 import attributes
from neutron.db import db_base_plugin_v2
from neutron.extensions import vlantransparent
class Vlantransparent_db_mixin(object):
"""Mixin class to add vlan transparent methods to db_base_plugin_v2."""
def _extend_network_dict_vlan_transparent(self, network_res, network_db):
network_res[vlantransparent.VLANTRANSPARENT] = (
network_db.vlan_transparent)
return network_res
db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs(
attributes.NETWORKS, ['_extend_network_dict_vlan_transparent'])
|
abhilashnta/edx-platform
|
refs/heads/master
|
common/djangoapps/student/migrations/0046_auto__add_entranceexamconfiguration__add_unique_entranceexamconfigurat.py
|
93
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'EntranceExamConfiguration'
db.create_table('student_entranceexamconfiguration', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('course_id', self.gf('xmodule_django.models.CourseKeyField')(max_length=255, db_index=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_index=True, blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, db_index=True, blank=True)),
('skip_entrance_exam', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('student', ['EntranceExamConfiguration'])
# Adding unique constraint on 'EntranceExamConfiguration', fields ['user', 'course_id']
db.create_unique('student_entranceexamconfiguration', ['user_id', 'course_id'])
def backwards(self, orm):
# Removing unique constraint on 'EntranceExamConfiguration', fields ['user', 'course_id']
db.delete_unique('student_entranceexamconfiguration', ['user_id', 'course_id'])
# Deleting model 'EntranceExamConfiguration'
db.delete_table('student_entranceexamconfiguration')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.anonymoususerid': {
'Meta': {'object_name': 'AnonymousUserId'},
'anonymous_user_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseaccessrole': {
'Meta': {'unique_together': "(('user', 'org', 'course_id', 'role'),)", 'object_name': 'CourseAccessRole'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'org': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseenrollmentallowed': {
'Meta': {'unique_together': "(('email', 'course_id'),)", 'object_name': 'CourseEnrollmentAllowed'},
'auto_enroll': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'student.dashboardconfiguration': {
'Meta': {'object_name': 'DashboardConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recent_enrollment_time_delta': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'student.entranceexamconfiguration': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'EntranceExamConfiguration'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'skip_entrance_exam': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.linkedinaddtoprofileconfiguration': {
'Meta': {'object_name': 'LinkedInAddToProfileConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'company_identifier': ('django.db.models.fields.TextField', [], {}),
'dashboard_tracking_code': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'trk_partner_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10', 'blank': 'True'})
},
'student.loginfailures': {
'Meta': {'object_name': 'LoginFailures'},
'failure_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lockout_until': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.passwordhistory': {
'Meta': {'object_name': 'PasswordHistory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'time_set': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.pendingemailchange': {
'Meta': {'object_name': 'PendingEmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.pendingnamechange': {
'Meta': {'object_name': 'PendingNameChange'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rationale': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.registration': {
'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"},
'allow_certificate': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'city': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'goals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'level_of_education': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'mailing_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'year_of_birth': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'})
},
'student.usersignupsource': {
'Meta': {'object_name': 'UserSignupSource'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.userstanding': {
'Meta': {'object_name': 'UserStanding'},
'account_status': ('django.db.models.fields.CharField', [], {'max_length': '31', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'standing_last_changed_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'standing'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'student.usertestgroup': {
'Meta': {'object_name': 'UserTestGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'db_index': 'True', 'symmetrical': 'False'})
}
}
complete_apps = ['student']
|
sgzsh269/django
|
refs/heads/master
|
django/views/static.py
|
300
|
"""
Views and functions for serving static files. These are only to be used
during development, and SHOULD NOT be used in a production setting.
"""
from __future__ import unicode_literals
import mimetypes
import os
import posixpath
import re
import stat
from django.http import (
FileResponse, Http404, HttpResponse, HttpResponseNotModified,
HttpResponseRedirect,
)
from django.template import Context, Engine, TemplateDoesNotExist, loader
from django.utils.http import http_date, parse_http_date
from django.utils.six.moves.urllib.parse import unquote
from django.utils.translation import ugettext as _, ugettext_lazy
def serve(request, path, document_root=None, show_indexes=False):
"""
Serve static files below a given point in the directory structure.
To use, put a URL pattern such as::
from django.views.static import serve
url(r'^(?P<path>.*)$', serve, {'document_root': '/path/to/my/files/'})
in your URLconf. You must provide the ``document_root`` param. You may
also set ``show_indexes`` to ``True`` if you'd like to serve a basic index
of the directory. This index view will use the template hardcoded below,
but if you'd like to override it, you can create a template called
``static/directory_index.html``.
"""
path = posixpath.normpath(unquote(path))
path = path.lstrip('/')
newpath = ''
for part in path.split('/'):
if not part:
# Strip empty path components.
continue
drive, part = os.path.splitdrive(part)
head, part = os.path.split(part)
if part in (os.curdir, os.pardir):
# Strip '.' and '..' in path.
continue
newpath = os.path.join(newpath, part).replace('\\', '/')
if newpath and path != newpath:
return HttpResponseRedirect(newpath)
fullpath = os.path.join(document_root, newpath)
if os.path.isdir(fullpath):
if show_indexes:
return directory_index(newpath, fullpath)
raise Http404(_("Directory indexes are not allowed here."))
if not os.path.exists(fullpath):
raise Http404(_('"%(path)s" does not exist') % {'path': fullpath})
# Respect the If-Modified-Since header.
statobj = os.stat(fullpath)
if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),
statobj.st_mtime, statobj.st_size):
return HttpResponseNotModified()
content_type, encoding = mimetypes.guess_type(fullpath)
content_type = content_type or 'application/octet-stream'
response = FileResponse(open(fullpath, 'rb'), content_type=content_type)
response["Last-Modified"] = http_date(statobj.st_mtime)
if stat.S_ISREG(statobj.st_mode):
response["Content-Length"] = statobj.st_size
if encoding:
response["Content-Encoding"] = encoding
return response
DEFAULT_DIRECTORY_INDEX_TEMPLATE = """
{% load i18n %}
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
<meta http-equiv="Content-Language" content="en-us" />
<meta name="robots" content="NONE,NOARCHIVE" />
<title>{% blocktrans %}Index of {{ directory }}{% endblocktrans %}</title>
</head>
<body>
<h1>{% blocktrans %}Index of {{ directory }}{% endblocktrans %}</h1>
<ul>
{% if directory != "/" %}
<li><a href="../">../</a></li>
{% endif %}
{% for f in file_list %}
<li><a href="{{ f|urlencode }}">{{ f }}</a></li>
{% endfor %}
</ul>
</body>
</html>
"""
template_translatable = ugettext_lazy("Index of %(directory)s")
def directory_index(path, fullpath):
try:
t = loader.select_template([
'static/directory_index.html',
'static/directory_index',
])
except TemplateDoesNotExist:
t = Engine().from_string(DEFAULT_DIRECTORY_INDEX_TEMPLATE)
files = []
for f in os.listdir(fullpath):
if not f.startswith('.'):
if os.path.isdir(os.path.join(fullpath, f)):
f += '/'
files.append(f)
c = Context({
'directory': path + '/',
'file_list': files,
})
return HttpResponse(t.render(c))
def was_modified_since(header=None, mtime=0, size=0):
"""
Was something modified since the user last downloaded it?
header
This is the value of the If-Modified-Since header. If this is None,
I'll just return True.
mtime
This is the modification time of the item we're talking about.
size
This is the size of the item we're talking about.
"""
try:
if header is None:
raise ValueError
matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", header,
re.IGNORECASE)
header_mtime = parse_http_date(matches.group(1))
header_len = matches.group(3)
if header_len and int(header_len) != size:
raise ValueError
if int(mtime) > header_mtime:
raise ValueError
except (AttributeError, ValueError, OverflowError):
return True
return False
|
Tejal011089/fbd_erpnext
|
refs/heads/develop
|
erpnext/patches/v4_0/move_warehouse_user_to_restrictions.py
|
120
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import frappe.permissions
def execute():
for warehouse, user in frappe.db.sql("""select parent, user from `tabWarehouse User`"""):
frappe.permissions.add_user_permission("Warehouse", warehouse, user)
frappe.delete_doc_if_exists("DocType", "Warehouse User")
frappe.reload_doc("stock", "doctype", "warehouse")
|
DrMeers/django
|
refs/heads/master
|
django/contrib/gis/db/backends/oracle/adapter.py
|
110
|
from cx_Oracle import CLOB
from django.contrib.gis.db.backends.adapter import WKTAdapter
class OracleSpatialAdapter(WKTAdapter):
input_size = CLOB
|
sztanko/solsticestreets
|
refs/heads/master
|
python/solstreets/azimuth_processor.py
|
1
|
from typing import Generator, Iterable, Tuple, List
import math
import logging
from functools import lru_cache
from collections import Counter
from datetime import datetime
import ephem
from .entities import Street, StreetSegment, from_street
from .geo_utils import get_segment_details
logging.basicConfig(level="DEBUG")
log = logging.getLogger(__file__)
def combine_segments(street_segments: Iterable[StreetSegment]) -> Generator[StreetSegment, None, None]:
previous_segment: StreetSegment = None
c = 0
c_combined = 0
for segment in street_segments:
c += 1
if previous_segment:
if previous_segment.points[1] == segment.points[0]:
previous_segment.points[1] = segment.points[1] # extend the segment
previous_segment.length += segment.length # and add up the length
continue
c_combined += 1
yield previous_segment
previous_segment = segment
c_combined += 1
yield previous_segment
if c > 0:
log.info(f"Processed {c} segments, combined them into {c_combined} ({c_combined*100/c:.2f} %)")
class AzimuthProcessor:
def __init__(self, ts: datetime, threshold: float):
self.ts = ts
self.stats = Counter()
self.solstice_azimuths = []
self.threshold = threshold
@lru_cache(maxsize=None)
def get_sun_azimuth_cached(self, lng: float, lat: float) -> Tuple[float, float]:
o = ephem.Observer()
o.lat = str(lat)
o.lon = str(lng)
o.date = self.ts
s = ephem.Sun()
out = []
for d in [o.next_rising(s), o.next_setting(s)]:
o.date = d
s.compute(o)
az = math.degrees(s.az)
if az > 180:
az = az - 180
# log.info(f"Sunrise is on {d}, azimuth is {az}")
out.append(az)
self.solstice_azimuths = out
return tuple(out)
def get_sun_azimuth(self, lng: float, lat: float) -> Tuple[float, float]:
return self.get_sun_azimuth_cached(round(lng, 0), round(lat, 0))
def is_aligned_towards_sun(self, p1: Tuple[float, float], p2: Tuple[float, float]) -> bool:
seg_az = get_segment_details([p1, p2])[0]
sun_az = self.get_sun_azimuth(p1[0], p1[1])
# Count
self.stats[round(seg_az)] += 1
return abs(sun_az - seg_az) < self.threshold
def add_alignment_data(self, street_segment: StreetSegment) -> StreetSegment:
p1, p2 = street_segment.points
seg_az, length = get_segment_details([p1, p2])
sun_az = self.get_sun_azimuth(p1[0], p1[1])
# Count
self.stats[round(seg_az)] += length
street_segment.slope = seg_az
street_segment.sun_diff = min(abs(az - seg_az) for az in sun_az)
street_segment.length = length
return street_segment
def process_segments(self, street: Street) -> Generator[StreetSegment, None, None]:
previous_point = None
for point in street.points:
if previous_point:
segment = from_street(street, previous_point, point)
self.add_alignment_data(segment)
yield segment
previous_point = point
def get_stats(self) -> dict:
return {
"sun_azimuths": self.solstice_azimuths,
"last_update_ts": datetime.now().timestamp(),
"street_histogram": [self.stats.get(i, 0) for i in range(0, 180)],
}
|
hhru/pycerberus-deb
|
refs/heads/master
|
tests/i18n_custom_test.py
|
2
|
# -*- coding: UTF-8 -*-
#
# The MIT License
#
# Copyright (c) 2009-2010 Felix Schwarz <felix.schwarz@oss.schwarz.eu>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from pycerberus.test_util import ValidationTest
from pycerberus.validators import IntegerValidator
class FrameworkValidator(IntegerValidator):
def translation_parameters(self, context):
return {'domain': 'framework'}
class ValidatorWithAdditionalKeys(FrameworkValidator):
def messages(self):
return {'foo': 'bar'}
def translation_parameters(self, context):
return {'domain': 'fnord'}
def translate_message(self, key, native_message, translation_parameters, context):
assert key == 'foo'
return 'A message from an application validator.'
class SimpleDerivedValidator(ValidatorWithAdditionalKeys):
pass
class ValidatorRedefiningKeys(FrameworkValidator):
def messages(self):
return {'empty': 'fnord'}
def translation_parameters(self, context):
# We need to change back the domain as this validator is used to get
# a real message - if the .mo file for the gettext domain does not
# exist, gettext will raise an error.
return {'domain': 'pycerberus'}
class ValidatorWithNonGettextTranslation(FrameworkValidator):
def translation_parameters(self, context):
# we change the domain here on purpose - if gettext would check for
# locale files for this domain, it would raise an exception because the
# file is not there...
return {'domain': 'application'}
def translate_message(self, key, native_message, translation_parameters, context):
assert key == 'inactive'
if context['locale'] == 'de':
return u'db Übersetzung'
return 'db translation'
def messages(self):
return {'inactive': 'Untranslated message'}
class CustomizedI18NBehaviorTest(ValidationTest):
validator_class = ValidatorWithAdditionalKeys
def domain_for_key(self, key):
gettext_args = self.validator()._implementation(key, 'translation_parameters', {})()
return gettext_args.get('domain')
def test_validator_can_define_more_translations_while_keeping_existing_ones(self):
self.assert_equals('Bitte geben Sie einen Wert ein.', self.message_for_key('empty'))
self.assert_equals('A message from an application validator.', self.message_for_key('foo'))
def test_validator_can_define_custom_parameters_for_translation_mechanism(self):
self.assert_equals('pycerberus', self.domain_for_key('empty'))
self.assert_equals('fnord', self.domain_for_key('foo'))
def test_parameters_for_translation_are_inherited_from_super_class(self):
self.assert_equals('fnord', self.domain_for_key('foo'))
self.init_validator(SimpleDerivedValidator())
self.assert_equals('fnord', self.domain_for_key('foo'))
def test_use_parameters_for_translation_from_class_where_key_is_defined(self):
self.init_validator(SimpleDerivedValidator())
self.assert_equals('framework', self.domain_for_key('invalid_type'))
self.assert_equals('fnord', self.domain_for_key('foo'))
def test_validators_can_use_their_own_translations_for_existing_keys(self):
self.assert_equals(u'Bitte geben Sie einen Wert ein.', self.message_for_key('empty'))
self.init_validator(ValidatorRedefiningKeys())
self.assert_equals('fnord', self.message_for_key('empty'))
def test_validators_can_use_other_translation_systems_than_gettext(self):
self.init_validator(ValidatorWithNonGettextTranslation())
self.assert_equals('db translation', self.message_for_key('inactive', locale='en'))
self.assert_equals(u'db Übersetzung', self.message_for_key('inactive', locale='de'))
def test_different_translation_system_is_only_applied_to_messages_declared_in_that_class(self):
self.init_validator(ValidatorWithNonGettextTranslation())
# This translation is present in the included mo files but not returned
# by the custom translation method.
self.assert_equals(u'Bitte geben Sie einen Wert ein.', self.message_for_key('empty'))
|
dagwieers/ansible
|
refs/heads/devel
|
lib/ansible/modules/storage/netapp/na_elementsw_volume.py
|
22
|
#!/usr/bin/python
# (c) 2017, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""Element OS Software Volume Manager"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: na_elementsw_volume
short_description: NetApp Element Software Manage Volumes
extends_documentation_fragment:
- netapp.solidfire
version_added: '2.7'
author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com>
description:
- Create, destroy, or update volumes on ElementSW
options:
state:
description:
- Whether the specified volume should exist or not.
required: true
choices: ['present', 'absent']
name:
description:
- The name of the volume to manage.
- It accepts volume_name or volume_id
required: true
account_id:
description:
- Account ID for the owner of this volume.
- It accepts Account_id or Account_name
required: true
enable512e:
description:
- Required when C(state=present)
- Should the volume provide 512-byte sector emulation?
type: bool
aliases:
- 512emulation
qos:
description: Initial quality of service settings for this volume. Configure as dict in playbooks.
attributes:
description: A YAML dictionary of attributes that you would like to apply on this volume.
size:
description:
- The size of the volume in (size_unit).
- Required when C(state = present).
size_unit:
description:
- The unit used to interpret the size parameter.
choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb']
default: 'gb'
access:
description:
- Access allowed for the volume.
- readOnly Only read operations are allowed.
- readWrite Reads and writes are allowed.
- locked No reads or writes are allowed.
- replicationTarget Identify a volume as the target volume for a paired set of volumes.
- If the volume is not paired, the access status is locked.
- If unspecified, the access settings of the clone will be the same as the source.
choices: ['readOnly', 'readWrite', 'locked', 'replicationTarget']
password:
description:
- ElementSW access account password
aliases:
- pass
username:
description:
- ElementSW access account user-name
aliases:
- user
'''
EXAMPLES = """
- name: Create Volume
na_elementsw_volume:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
state: present
name: AnsibleVol
qos: {minIOPS: 1000, maxIOPS: 20000, burstIOPS: 50000}
account_id: 3
enable512e: False
size: 1
size_unit: gb
- name: Update Volume
na_elementsw_volume:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
state: present
name: AnsibleVol
account_id: 3
access: readWrite
- name: Delete Volume
na_elementsw_volume:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
state: absent
name: AnsibleVol
account_id: 2
"""
RETURN = """
msg:
description: Success message
returned: success
type: str
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
from ansible.module_utils.netapp_elementsw_module import NaElementSWModule
HAS_SF_SDK = netapp_utils.has_sf_sdk()
try:
import solidfire.common
except Exception:
HAS_SF_SDK = False
class ElementOSVolume(object):
"""
Contains methods to parse arguments,
derive details of ElementSW objects
and send requests to ElementOS via
the ElementSW SDK
"""
def __init__(self):
"""
Parse arguments, setup state variables,
check paramenters and ensure SDK is installed
"""
self._size_unit_map = netapp_utils.SF_BYTE_MAP
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
name=dict(required=True, type='str'),
account_id=dict(required=True),
enable512e=dict(type='bool', aliases=['512emulation']),
qos=dict(required=False, type='dict', default=None),
attributes=dict(required=False, type='dict', default=None),
size=dict(type='int'),
size_unit=dict(default='gb',
choices=['bytes', 'b', 'kb', 'mb', 'gb', 'tb',
'pb', 'eb', 'zb', 'yb'], type='str'),
access=dict(required=False, type='str', default=None, choices=['readOnly', 'readWrite',
'locked', 'replicationTarget']),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
required_if=[
('state', 'present', ['size', 'enable512e'])
],
supports_check_mode=True
)
param = self.module.params
# set up state variables
self.state = param['state']
self.name = param['name']
self.account_id = param['account_id']
self.enable512e = param['enable512e']
self.qos = param['qos']
self.attributes = param['attributes']
self.access = param['access']
self.size_unit = param['size_unit']
if param['size'] is not None:
self.size = param['size'] * self._size_unit_map[self.size_unit]
else:
self.size = None
if HAS_SF_SDK is False:
self.module.fail_json(msg="Unable to import the ElementSW Python SDK")
else:
try:
self.sfe = netapp_utils.create_sf_connection(module=self.module)
except solidfire.common.ApiServerError:
self.module.fail_json(msg="Unable to create the connection")
self.elementsw_helper = NaElementSWModule(self.sfe)
# add telemetry attributes
if self.attributes is not None:
self.attributes.update(self.elementsw_helper.set_element_attributes(source='na_elementsw_volume'))
else:
self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_volume')
def get_account_id(self):
"""
Return account id if found
"""
try:
# Update and return self.account_id
self.account_id = self.elementsw_helper.account_exists(self.account_id)
return self.account_id
except Exception as err:
self.module.fail_json(msg="Error: account_id %s does not exist" % self.account_id, exception=to_native(err))
def get_volume(self):
"""
Return volume details if found
"""
# Get volume details
volume_id = self.elementsw_helper.volume_exists(self.name, self.account_id)
if volume_id is not None:
# Return volume_details
volume_details = self.elementsw_helper.get_volume(volume_id)
if volume_details is not None:
return volume_details
return None
def create_volume(self):
"""
Create Volume
:return: True if created, False if fails
"""
try:
self.sfe.create_volume(name=self.name,
account_id=self.account_id,
total_size=self.size,
enable512e=self.enable512e,
qos=self.qos,
attributes=self.attributes)
except Exception as err:
self.module.fail_json(msg="Error provisioning volume %s of size %s" % (self.name, self.size),
exception=to_native(err))
def delete_volume(self, volume_id):
"""
Delete and purge the volume using volume id
:return: Success : True , Failed : False
"""
try:
self.sfe.delete_volume(volume_id=volume_id)
self.sfe.purge_deleted_volume(volume_id=volume_id)
# Delete method will delete and also purge the volume instead of moving the volume state to inactive.
except Exception as err:
# Throwing the exact error message instead of generic error message
self.module.fail_json(msg=err.message,
exception=to_native(err))
def update_volume(self, volume_id):
"""
Update the volume with the specified param
:return: Success : True, Failed : False
"""
try:
self.sfe.modify_volume(volume_id,
account_id=self.account_id,
access=self.access,
qos=self.qos,
total_size=self.size,
attributes=self.attributes)
except Exception as err:
# Throwing the exact error message instead of generic error message
self.module.fail_json(msg=err.message,
exception=to_native(err))
def apply(self):
# Perform pre-checks, call functions and exit
changed = False
volume_exists = False
update_volume = False
self.get_account_id()
volume_detail = self.get_volume()
if volume_detail:
volume_exists = True
volume_id = volume_detail.volume_id
if self.state == 'absent':
# Checking for state change(s) here, and applying it later in the code allows us to support
# check_mode
changed = True
elif self.state == 'present':
# Checking all the params for update operation
if volume_detail.access is not None and self.access is not None and volume_detail.access != self.access:
update_volume = True
changed = True
elif volume_detail.account_id is not None and self.account_id is not None \
and volume_detail.account_id != self.account_id:
update_volume = True
changed = True
elif volume_detail.qos is not None and self.qos is not None:
"""
Actual volume_detail.qos has ['burst_iops', 'burst_time', 'curve', 'max_iops', 'min_iops'] keys.
As only minOPS, maxOPS, burstOPS is important to consider, checking only these values.
"""
volume_qos = volume_detail.qos.__dict__
if volume_qos['min_iops'] != self.qos['minIOPS'] or volume_qos['max_iops'] != self.qos['maxIOPS'] \
or volume_qos['burst_iops'] != self.qos['burstIOPS']:
update_volume = True
changed = True
else:
# If check fails, do nothing
pass
if volume_detail.total_size is not None and volume_detail.total_size != self.size:
size_difference = abs(float(volume_detail.total_size - self.size))
# Change size only if difference is bigger than 0.001
if size_difference / self.size > 0.001:
update_volume = True
changed = True
else:
# If check fails, do nothing
pass
if volume_detail.attributes is not None and self.attributes is not None and \
volume_detail.attributes != self.attributes:
update_volume = True
changed = True
else:
if self.state == 'present':
changed = True
result_message = ""
if changed:
if self.module.check_mode:
result_message = "Check mode, skipping changes"
else:
if self.state == 'present':
if not volume_exists:
self.create_volume()
result_message = "Volume created"
elif update_volume:
self.update_volume(volume_id)
result_message = "Volume updated"
elif self.state == 'absent':
self.delete_volume(volume_id)
result_message = "Volume deleted"
self.module.exit_json(changed=changed, msg=result_message)
def main():
# Create object and call apply
na_elementsw_volume = ElementOSVolume()
na_elementsw_volume.apply()
if __name__ == '__main__':
main()
|
Peddle/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/tests/bug8245/tests.py
|
125
|
from django.contrib import admin
from django.utils.unittest import TestCase
class Bug8245Test(TestCase):
"""
Test for bug #8245 - don't raise an AlreadyRegistered exception when using
autodiscover() and an admin.py module contains an error.
"""
def test_bug_8245(self):
# The first time autodiscover is called, we should get our real error.
with self.assertRaises(Exception) as cm:
admin.autodiscover()
self.assertEqual(str(cm.exception), "Bad admin module")
# Calling autodiscover again should raise the very same error it did
# the first time, not an AlreadyRegistered error.
with self.assertRaises(Exception) as cm:
admin.autodiscover()
self.assertEqual(str(cm.exception), "Bad admin module")
|
F483/ngcccbase
|
refs/heads/master
|
ngcccbase/tests/test_blockchain.py
|
4
|
import unittest
import os, tempfile, shutil
import time
from ngcccbase.pwallet import PersistentWallet
from ngcccbase.services.chroma import ChromaBlockchainState
from ngcccbase.blockchain import VerifierBlockchainState
class TestVerifierBlockchainState(unittest.TestCase):
@classmethod
def setUpClass(cls):
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
#cls.tempdir = tempfile.mkdtemp()
cls.tempdir = '/path/to/folder'
cls.pwallet = PersistentWallet(os.path.join(cls.tempdir, 'testnet.wallet'), True)
cls.pwallet.init_model()
cls.vbs = VerifierBlockchainState(cls.tempdir, ChromaBlockchainState())
@classmethod
def tearDownClass(cls):
#shutil.rmtree(cls.tempdir)
pass
def test_(self):
pass
self.vbs.start()
while self.vbs.is_running():
time.sleep(0.1)
if __name__ == '__main__':
unittest.main()
|
Raphx/fuzza
|
refs/heads/master
|
tests/transformer/test_base64.py
|
1
|
import unittest
from fuzza.transformer._base64 import transform
class TestTransformerBase64(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.data = [
b'this is string',
b'aaa',
b'kkoopq'
]
def test_base64_encode_correctly(self):
expected = [
b'dGhpcyBpcyBzdHJpbmc=',
b'YWFh',
b'a2tvb3Bx'
]
self.assertListEqual(
transform(self.data),
expected
)
|
PlayUAV/MissionPlanner
|
refs/heads/master
|
Lib/site-packages/scipy/linalg/benchmarks/bench_basic.py
|
57
|
import sys
from numpy.testing import *
import numpy.linalg as linalg
def random(size):
return rand(*size)
class TestSolve(TestCase):
def bench_random(self):
basic_solve = linalg.solve
print
print ' Solving system of linear equations'
print ' =================================='
print ' | contiguous | non-contiguous '
print '----------------------------------------------'
print ' size | scipy | basic | scipy | basic '
for size,repeat in [(20,1000),(100,150),(500,2),(1000,1)][:-1]:
repeat *= 2
print '%5s' % size,
sys.stdout.flush()
a = random([size,size])
# larger diagonal ensures non-singularity:
for i in range(size): a[i,i] = 10*(.1+a[i,i])
b = random([size])
print '| %6.2f ' % measure('solve(a,b)',repeat),
sys.stdout.flush()
print '| %6.2f ' % measure('basic_solve(a,b)',repeat),
sys.stdout.flush()
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert not a.flags['CONTIGUOUS']
print '| %6.2f ' % measure('solve(a,b)',repeat),
sys.stdout.flush()
print '| %6.2f ' % measure('basic_solve(a,b)',repeat),
sys.stdout.flush()
print ' (secs for %s calls)' % (repeat)
class TestInv(TestCase):
def bench_random(self):
basic_inv = linalg.inv
print
print ' Finding matrix inverse'
print ' =================================='
print ' | contiguous | non-contiguous '
print '----------------------------------------------'
print ' size | scipy | basic | scipy | basic'
for size,repeat in [(20,1000),(100,150),(500,2),(1000,1)][:-1]:
repeat *= 2
print '%5s' % size,
sys.stdout.flush()
a = random([size,size])
# large diagonal ensures non-singularity:
for i in range(size): a[i,i] = 10*(.1+a[i,i])
print '| %6.2f ' % measure('inv(a)',repeat),
sys.stdout.flush()
print '| %6.2f ' % measure('basic_inv(a)',repeat),
sys.stdout.flush()
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert not a.flags['CONTIGUOUS']
print '| %6.2f ' % measure('inv(a)',repeat),
sys.stdout.flush()
print '| %6.2f ' % measure('basic_inv(a)',repeat),
sys.stdout.flush()
print ' (secs for %s calls)' % (repeat)
class TestDet(TestCase):
def bench_random(self):
basic_det = linalg.det
print
print ' Finding matrix determinant'
print ' =================================='
print ' | contiguous | non-contiguous '
print '----------------------------------------------'
print ' size | scipy | basic | scipy | basic '
for size,repeat in [(20,1000),(100,150),(500,2),(1000,1)][:-1]:
repeat *= 2
print '%5s' % size,
sys.stdout.flush()
a = random([size,size])
print '| %6.2f ' % measure('det(a)',repeat),
sys.stdout.flush()
print '| %6.2f ' % measure('basic_det(a)',repeat),
sys.stdout.flush()
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert not a.flags['CONTIGUOUS']
print '| %6.2f ' % measure('det(a)',repeat),
sys.stdout.flush()
print '| %6.2f ' % measure('basic_det(a)',repeat),
sys.stdout.flush()
print ' (secs for %s calls)' % (repeat)
if __name__ == "__main__":
run_module_suite()
|
noam09/kodi
|
refs/heads/master
|
script.module.israeliveresolver/lib/livestreamer/requests/packages/chardet/langhungarianmodel.py
|
2762
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
Latin2_HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
win1250HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 94.7368%
# first 1024 sequences:5.2623%
# rest sequences: 0.8894%
# negative sequences: 0.0009%
HungarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
)
Latin2HungarianModel = {
'charToOrderMap': Latin2_HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "ISO-8859-2"
}
Win1250HungarianModel = {
'charToOrderMap': win1250HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "windows-1250"
}
# flake8: noqa
|
vidoardes/XS-ICS
|
refs/heads/master
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py
|
12980
|
# SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
|
guewen/OpenUpgrade
|
refs/heads/master
|
addons/mail/mail_followers.py
|
22
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import osv, fields
from openerp import tools, SUPERUSER_ID
from openerp.tools.translate import _
from openerp.tools.mail import plaintext2html
class mail_followers(osv.Model):
""" mail_followers holds the data related to the follow mechanism inside
OpenERP. Partners can choose to follow documents (records) of any kind
that inherits from mail.thread. Following documents allow to receive
notifications for new messages.
A subscription is characterized by:
:param: res_model: model of the followed objects
:param: res_id: ID of resource (may be 0 for every objects)
"""
_name = 'mail.followers'
_rec_name = 'partner_id'
_log_access = False
_description = 'Document Followers'
_columns = {
'res_model': fields.char('Related Document Model', size=128,
required=True, select=1,
help='Model of the followed resource'),
'res_id': fields.integer('Related Document ID', select=1,
help='Id of the followed resource'),
'partner_id': fields.many2one('res.partner', string='Related Partner',
ondelete='cascade', required=True, select=1),
'subtype_ids': fields.many2many('mail.message.subtype', string='Subtype',
help="Message subtypes followed, meaning subtypes that will be pushed onto the user's Wall."),
}
class mail_notification(osv.Model):
""" Class holding notifications pushed to partners. Followers and partners
added in 'contacts to notify' receive notifications. """
_name = 'mail.notification'
_rec_name = 'partner_id'
_log_access = False
_description = 'Notifications'
_columns = {
'partner_id': fields.many2one('res.partner', string='Contact',
ondelete='cascade', required=True, select=1),
'read': fields.boolean('Read', select=1),
'starred': fields.boolean('Starred', select=1,
help='Starred message that goes into the todo mailbox'),
'message_id': fields.many2one('mail.message', string='Message',
ondelete='cascade', required=True, select=1),
}
_defaults = {
'read': False,
'starred': False,
}
def init(self, cr):
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('mail_notification_partner_id_read_starred_message_id',))
if not cr.fetchone():
cr.execute('CREATE INDEX mail_notification_partner_id_read_starred_message_id ON mail_notification (partner_id, read, starred, message_id)')
def get_partners_to_email(self, cr, uid, ids, message, context=None):
""" Return the list of partners to notify, based on their preferences.
:param browse_record message: mail.message to notify
:param list partners_to_notify: optional list of partner ids restricting
the notifications to process
"""
notify_pids = []
for notification in self.browse(cr, uid, ids, context=context):
if notification.read:
continue
partner = notification.partner_id
# Do not send to partners without email address defined
if not partner.email:
continue
# Do not send to partners having same email address than the author (can cause loops or bounce effect due to messy database)
if message.author_id and message.author_id.email == partner.email:
continue
# Partner does not want to receive any emails or is opt-out
if partner.notify_email == 'none':
continue
notify_pids.append(partner.id)
return notify_pids
def get_signature_footer(self, cr, uid, user_id, res_model=None, res_id=None, context=None):
""" Format a standard footer for notification emails (such as pushed messages
notification or invite emails).
Format:
<p>--<br />
Administrator
</p>
<div>
<small>Sent from <a ...>Your Company</a> using <a ...>OpenERP</a>.</small>
</div>
"""
footer = ""
if not user_id:
return footer
# add user signature
user = self.pool.get("res.users").browse(cr, SUPERUSER_ID, [user_id], context=context)[0]
if user.signature:
signature = plaintext2html(user.signature)
else:
signature = "--<br />%s" % user.name
footer = tools.append_content_to_html(footer, signature, plaintext=False, container_tag='p')
# add company signature
if user.company_id.website:
website_url = ('http://%s' % user.company_id.website) if not user.company_id.website.lower().startswith(('http:', 'https:')) \
else user.company_id.website
company = "<a style='color:inherit' href='%s'>%s</a>" % (website_url, user.company_id.name)
else:
company = user.company_id.name
sent_by = _('Sent from %(company)s using %(openerp)s')
signature_company = '<small>%s</small>' % (sent_by % {
'company': company,
'openerp': "<a style='color:inherit' href='https://www.openerp.com/'>OpenERP</a>"
})
footer = tools.append_content_to_html(footer, signature_company, plaintext=False, container_tag='div')
return footer
def update_message_notification(self, cr, uid, ids, message_id, partner_ids, context=None):
existing_pids = set()
new_pids = set()
new_notif_ids = []
for notification in self.browse(cr, uid, ids, context=context):
existing_pids.add(notification.partner_id.id)
# update existing notifications
self.write(cr, uid, ids, {'read': False}, context=context)
# create new notifications
new_pids = set(partner_ids) - existing_pids
for new_pid in new_pids:
new_notif_ids.append(self.create(cr, uid, {'message_id': message_id, 'partner_id': new_pid, 'read': False}, context=context))
return new_notif_ids
def _notify_email(self, cr, uid, ids, message_id, force_send=False, user_signature=True, context=None):
message = self.pool['mail.message'].browse(cr, SUPERUSER_ID, message_id, context=context)
# compute partners
email_pids = self.get_partners_to_email(cr, uid, ids, message, context=None)
if not email_pids:
return True
# compute email body (signature, company data)
body_html = message.body
user_id = message.author_id and message.author_id.user_ids and message.author_id.user_ids[0] and message.author_id.user_ids[0].id or None
if user_signature:
signature_company = self.get_signature_footer(cr, uid, user_id, res_model=message.model, res_id=message.res_id, context=context)
body_html = tools.append_content_to_html(body_html, signature_company, plaintext=False, container_tag='div')
# compute email references
references = message.parent_id.message_id if message.parent_id else False
# create email values
max_recipients = 100
chunks = [email_pids[x:x + max_recipients] for x in xrange(0, len(email_pids), max_recipients)]
email_ids = []
for chunk in chunks:
mail_values = {
'mail_message_id': message.id,
'auto_delete': True,
'body_html': body_html,
'recipient_ids': [(4, id) for id in chunk],
'references': references,
}
email_ids.append(self.pool.get('mail.mail').create(cr, uid, mail_values, context=context))
if force_send and len(chunks) < 6: # for more than 500 followers, use the queue system
self.pool.get('mail.mail').send(cr, uid, email_ids, context=context)
return True
def _notify(self, cr, uid, message_id, partners_to_notify=None, context=None,
force_send=False, user_signature=True):
""" Send by email the notification depending on the user preferences
:param list partners_to_notify: optional list of partner ids restricting
the notifications to process
:param bool force_send: if True, the generated mail.mail is
immediately sent after being created, as if the scheduler
was executed for this message only.
:param bool user_signature: if True, the generated mail.mail body is
the body of the related mail.message with the author's signature
"""
notif_ids = self.search(cr, SUPERUSER_ID, [('message_id', '=', message_id), ('partner_id', 'in', partners_to_notify)], context=context)
# update or create notifications
new_notif_ids = self.update_message_notification(cr, SUPERUSER_ID, notif_ids, message_id, partners_to_notify, context=context)
# mail_notify_noemail (do not send email) or no partner_ids: do not send, return
if context and context.get('mail_notify_noemail'):
return True
# browse as SUPERUSER_ID because of access to res_partner not necessarily allowed
self._notify_email(cr, SUPERUSER_ID, new_notif_ids, message_id, force_send, user_signature, context=context)
|
tsnoam/python-telegram-bot
|
refs/heads/master
|
telegram/__init__.py
|
1
|
#!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2016
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""A library that provides a Python interface to the Telegram Bot API"""
from .base import TelegramObject
from .user import User
from .chat import Chat
from .photosize import PhotoSize
from .audio import Audio
from .voice import Voice
from .document import Document
from .sticker import Sticker
from .video import Video
from .contact import Contact
from .location import Location
from .chataction import ChatAction
from .userprofilephotos import UserProfilePhotos
from .replymarkup import ReplyMarkup
from .replykeyboardmarkup import ReplyKeyboardMarkup
from .replykeyboardhide import ReplyKeyboardHide
from .forcereply import ForceReply
from .error import TelegramError
from .inputfile import InputFile
from .file import File
from .nullhandler import NullHandler
from .emoji import Emoji
from .parsemode import ParseMode
from .message import Message
from .inlinequery import InlineQuery
from .choseninlineresult import ChosenInlineResult
from .inlinequeryresult import InlineQueryResultArticle, InlineQueryResultGif,\
InlineQueryResultMpeg4Gif, InlineQueryResultPhoto, InlineQueryResultVideo
from .update import Update
from .bot import Bot
def Updater(*args, **kwargs):
"""
Load the updater module on invocation and return an Updater instance.
"""
import warnings
warnings.warn("telegram.Updater is being deprecated, please use "
"telegram.ext.Updater from now on.")
from .ext.updater import Updater as Up
return Up(*args, **kwargs)
def Dispatcher(*args, **kwargs):
"""
Load the dispatcher module on invocation and return an Dispatcher instance.
"""
import warnings
warnings.warn("telegram.Dispatcher is being deprecated, please use "
"telegram.ext.Dispatcher from now on.")
from .ext.dispatcher import Dispatcher as Dis
return Dis(*args, **kwargs)
def JobQueue(*args, **kwargs):
"""
Load the jobqueue module on invocation and return a JobQueue instance.
"""
import warnings
warnings.warn("telegram.JobQueue is being deprecated, please use "
"telegram.ext.JobQueue from now on.")
from .ext.jobqueue import JobQueue as JobQ
return JobQ(*args, **kwargs)
__author__ = 'devs@python-telegram-bot.org'
__version__ = '3.4'
__all__ = ('Audio', 'Bot', 'Chat', 'Emoji', 'TelegramError', 'InputFile',
'Contact', 'ForceReply', 'ReplyKeyboardHide', 'ReplyKeyboardMarkup',
'UserProfilePhotos', 'ChatAction', 'Location', 'Video', 'Document',
'Sticker', 'File', 'PhotoSize', 'Update', 'ParseMode', 'Message',
'User', 'TelegramObject', 'NullHandler', 'Voice', 'InlineQuery',
'ReplyMarkup', 'ChosenInlineResult', 'InlineQueryResultArticle',
'InlineQueryResultGif', 'InlineQueryResultPhoto',
'InlineQueryResultMpeg4Gif', 'InlineQueryResultVideo')
|
grilo/ansible-1
|
refs/heads/devel
|
test/integration/targets/module_utils/module_utils/a/b/c/__init__.py
|
12133432
| |
adelton/django
|
refs/heads/master
|
tests/admin_scripts/custom_templates/app_template/__init__.py
|
12133432
| |
coderbhupendra/cltk
|
refs/heads/master
|
cltk/text_reuse/__init__.py
|
12133432
| |
rlowrance/dot-vimrc
|
refs/heads/master
|
.vim/.vim/.vim/.vim/.vim/ftplugin/orgmode/vimbuffer.py
|
9
|
# -*- coding: utf-8 -*-
"""
vimbuffer
~~~~~~~~~~
VimBuffer and VimBufferContent are the interface between liborgmode and
vim.
VimBuffer extends the liborgmode.document.Document().
Document() is just a general implementation for loading an org file. It
has no interface to an actual file or vim buffer. This is the task of
vimbuffer.VimBuffer(). It is the interfaces to vim. The main tasks for
VimBuffer are to provide read and write access to a real vim buffer.
VimBufferContent is a helper class for VimBuffer. Basically, it hides the
details of encoding - everything read from or written to VimBufferContent
is UTF-8.
"""
from UserList import UserList
import vim
import settings
from exceptions import BufferNotFound, BufferNotInSync
from liborgmode.documents import Document, MultiPurposeList, Direction
from liborgmode.headings import Heading
class VimBuffer(Document):
def __init__(self, bufnr=0):
u"""
:bufnr: 0: current buffer, every other number refers to another buffer
"""
Document.__init__(self)
self._bufnr = vim.current.buffer.number if bufnr == 0 else bufnr
self._changedtick = -1
self._cached_heading = None
if self._bufnr == vim.current.buffer.number:
self._content = VimBufferContent(vim.current.buffer)
else:
_buffer = None
for b in vim.buffers:
if self._bufnr == b.number:
_buffer = b
break
if not _buffer:
raise BufferNotFound(u'Unable to locate buffer number #%d' % self._bufnr)
self._content = VimBufferContent(_buffer)
self.update_changedtick()
self._orig_changedtick = self._changedtick
@property
def tabstop(self):
return int(vim.eval(u'&ts'.encode(u'utf-8')))
@property
def tag_column(self):
return int(settings.get('org_tag_column', '77'))
@property
def is_insync(self):
if self._changedtick == self._orig_changedtick:
self.update_changedtick()
return self._changedtick == self._orig_changedtick
@property
def bufnr(self):
u"""
:returns: The buffer's number for the current document
"""
return self._bufnr
def changedtick():
""" Number of changes in vimbuffer """
def fget(self):
return self._changedtick
def fset(self, value):
self._changedtick = value
return locals()
changedtick = property(**changedtick())
def get_todo_states(self, strip_access_key=True):
u""" Returns a list containing a tuple of two lists of allowed todo
states split by todo and done states. Multiple todo-done state
sequences can be defined.
:returns: [([todo states], [done states]), ..]
"""
states = settings.get(u'org_todo_keywords', [])
if type(states) not in (list, tuple):
return []
def parse_states(s, stop=0):
res = []
if not s:
return res
if type(s[0]) in (unicode, str):
r = []
for i in s:
_i = i
if type(_i) == str:
_i = _i.decode(u'utf-8')
if type(_i) == unicode and _i:
if strip_access_key and u'(' in _i:
_i = _i[:_i.index(u'(')]
if _i:
r.append(_i)
else:
r.append(_i)
if not u'|' in r:
if not stop:
res.append((r[:-1], [r[-1]]))
else:
res = (r[:-1], [r[-1]])
else:
seperator_pos = r.index(u'|')
if not stop:
res.append((r[0:seperator_pos], r[seperator_pos + 1:]))
else:
res = (r[0:seperator_pos], r[seperator_pos + 1:])
elif type(s) in (list, tuple) and not stop:
for i in s:
r = parse_states(i, stop=1)
if r:
res.append(r)
return res
return parse_states(states)
def update_changedtick(self):
if self.bufnr == vim.current.buffer.number:
self._changedtick = int(vim.eval(u'b:changedtick'.encode(u'utf-8')))
else:
vim.command(u'unlet! g:org_changedtick | let g:org_lz = &lz | let g:org_hidden = &hidden | set lz hidden'.encode(u'utf-8'))
# TODO is this likely to fail? maybe some error hangling should be added
vim.command((u'keepalt buffer %d | let g:org_changedtick = b:changedtick | buffer %d' % \
(self.bufnr, vim.current.buffer.number)).encode(u'utf-8'))
vim.command(u'let &lz = g:org_lz | let &hidden = g:org_hidden | unlet! g:org_lz g:org_hidden | redraw'.encode(u'utf-8'))
self._changedtick = int(vim.eval(u'g:org_changedtick'.encode(u'utf-8')))
def write(self):
u""" write the changes to the vim buffer
:returns: True if something was written, otherwise False
"""
if not self.is_dirty:
return False
self.update_changedtick()
if not self.is_insync:
raise BufferNotInSync(u'Buffer is not in sync with vim!')
# write meta information
if self.is_dirty_meta_information:
meta_end = 0 if self._orig_meta_information_len is None else self._orig_meta_information_len
self._content[:meta_end] = self.meta_information
self._orig_meta_information_len = len(self.meta_information)
# remove deleted headings
already_deleted = []
for h in sorted(self._deleted_headings, cmp=lambda x, y: cmp(x._orig_start, y._orig_start), reverse=True):
if h._orig_start is not None and h._orig_start not in already_deleted:
# this is a heading that actually exists on the buffer and it
# needs to be removed
del self._content[h._orig_start:h._orig_start + h._orig_len]
already_deleted.append(h._orig_start)
del self._deleted_headings[:]
del already_deleted
# update changed headings and add new headings
for h in self.all_headings():
if h.is_dirty:
if h._orig_start is not None:
# this is a heading that existed before and was changed. It
# needs to be replaced
if h.is_dirty_heading:
self._content[h.start:h.start + 1] = [unicode(h)]
if h.is_dirty_body:
self._content[h.start + 1:h.start + h._orig_len] = h.body
else:
# this is a new heading. It needs to be inserted
self._content[h.start:h.start] = [unicode(h)] + h.body
h._dirty_heading = False
h._dirty_body = False
# for all headings the length and start offset needs to be updated
h._orig_start = h.start
h._orig_len = len(h)
self._dirty_meta_information = False
self._dirty_document = False
self.update_changedtick()
self._orig_changedtick = self._changedtick
return True
def write_heading(self, heading, including_children=True):
""" WARNING: use this function only when you know what you are doing!
This function writes a heading to the vim buffer. It offers performance
advantages over the regular write() function. This advantage is
combined with no sanity checks! Whenever you use this function, make
sure the heading you are writing contains the right offsets
(Heading._orig_start, Heading._orig_len).
Usage example:
# Retrieve a potentially dirty document
d = ORGMODE.get_document(allow_dirty=True)
# Don't rely on the DOM, retrieve the heading afresh
h = d.find_heading(direction=Direction.FORWARD, position=100)
# Update tags
h.tags = ['tag1', 'tag2']
# Write the heading
d.write_heading(h)
This function can't be used to delete a heading!
:heading: Write this heading with to the vim buffer
:including_children: Also include children in the update
:returns The written heading
"""
if including_children and heading.children:
for child in heading.children[::-1]:
self.write_heading(child, including_children)
if heading.is_dirty:
if heading._orig_start is not None:
# this is a heading that existed before and was changed. It
# needs to be replaced
if heading.is_dirty_heading:
self._content[heading._orig_start:heading._orig_start + 1] = [unicode(heading)]
if heading.is_dirty_body:
self._content[heading._orig_start + 1:heading._orig_start + heading._orig_len] = heading.body
else:
# this is a new heading. It needs to be inserted
raise ValueError('Heading must contain the attribute _orig_start! %s' % heading)
heading._dirty_heading = False
heading._dirty_body = False
# for all headings the length offset needs to be updated
heading._orig_len = len(heading)
return heading
def previous_heading(self, position=None):
u""" Find the next heading (search forward) and return the related object
:returns: Heading object or None
"""
h = self.current_heading(position=position)
if h:
return h.previous_heading
def current_heading(self, position=None):
u""" Find the current heading (search backward) and return the related object
:returns: Heading object or None
"""
if position is None:
position = vim.current.window.cursor[0] - 1
if not self.headings:
return
def binaryFindInDocument():
hi = len(self.headings)
lo = 0
while lo < hi:
mid = (lo+hi)//2
h = self.headings[mid]
if h.end_of_last_child < position:
lo = mid + 1
elif h.start > position:
hi = mid
else:
return binaryFindHeading(h)
def binaryFindHeading(heading):
if not heading.children or heading.end >= position:
return heading
hi = len(heading.children)
lo = 0
while lo < hi:
mid = (lo+hi)//2
h = heading.children[mid]
if h.end_of_last_child < position:
lo = mid + 1
elif h.start > position:
hi = mid
else:
return binaryFindHeading(h)
# look at the cache to find the heading
h_tmp = self._cached_heading
if h_tmp is not None:
if h_tmp.end_of_last_child > position and \
h_tmp.start < position:
if h_tmp.end < position:
self._cached_heading = binaryFindHeading(h_tmp)
return self._cached_heading
self._cached_heading = binaryFindInDocument()
return self._cached_heading
def next_heading(self, position=None):
u""" Find the next heading (search forward) and return the related object
:returns: Heading object or None
"""
h = self.current_heading(position=position)
if h:
return h.next_heading
def find_current_heading(self, position=None, heading=Heading):
u""" Find the next heading backwards from the position of the cursor.
The difference to the function current_heading is that the returned
object is not built into the DOM. In case the DOM doesn't exist or is
out of sync this function is much faster in fetching the current
heading.
:position: The position to start the search from
:heading: The base class for the returned heading
:returns: Heading object or None
"""
return self.find_heading(vim.current.window.cursor[0] - 1 \
if position is None else position, \
direction=Direction.BACKWARD, heading=heading, \
connect_with_document=False)
class VimBufferContent(MultiPurposeList):
u""" Vim Buffer Content is a UTF-8 wrapper around a vim buffer. When
retrieving or setting items in the buffer an automatic conversion is
performed.
This ensures UTF-8 usage on the side of liborgmode and the vim plugin
vim-orgmode.
"""
def __init__(self, vimbuffer, on_change=None):
MultiPurposeList.__init__(self, on_change=on_change)
# replace data with vimbuffer to make operations change the actual
# buffer
self.data = vimbuffer
def __contains__(self, item):
i = item
if type(i) is unicode:
i = item.encode(u'utf-8')
return MultiPurposeList.__contains__(self, i)
def __getitem__(self, i):
item = MultiPurposeList.__getitem__(self, i)
if type(item) is str:
return item.decode(u'utf-8')
return item
def __getslice__(self, i, j):
return [item.decode(u'utf-8') if type(item) is str else item \
for item in MultiPurposeList.__getslice__(self, i, j)]
def __setitem__(self, i, item):
_i = item
if type(_i) is unicode:
_i = item.encode(u'utf-8')
MultiPurposeList.__setitem__(self, i, _i)
def __setslice__(self, i, j, other):
o = []
o_tmp = other
if type(o_tmp) not in (list, tuple) and not isinstance(o_tmp, UserList):
o_tmp = list(o_tmp)
for item in o_tmp:
if type(item) == unicode:
o.append(item.encode(u'utf-8'))
else:
o.append(item)
MultiPurposeList.__setslice__(self, i, j, o)
def __add__(self, other):
raise NotImplementedError()
# TODO: implement me
if isinstance(other, UserList):
return self.__class__(self.data + other.data)
elif isinstance(other, type(self.data)):
return self.__class__(self.data + other)
else:
return self.__class__(self.data + list(other))
def __radd__(self, other):
raise NotImplementedError()
# TODO: implement me
if isinstance(other, UserList):
return self.__class__(other.data + self.data)
elif isinstance(other, type(self.data)):
return self.__class__(other + self.data)
else:
return self.__class__(list(other) + self.data)
def __iadd__(self, other):
o = []
o_tmp = other
if type(o_tmp) not in (list, tuple) and not isinstance(o_tmp, UserList):
o_tmp = list(o_tmp)
for i in o_tmp:
if type(i) is unicode:
o.append(i.encode(u'utf-8'))
else:
o.append(i)
return MultiPurposeList.__iadd__(self, o)
def append(self, item):
i = item
if type(item) is str:
i = item.encode(u'utf-8')
MultiPurposeList.append(self, i)
def insert(self, i, item):
_i = item
if type(_i) is str:
_i = item.encode(u'utf-8')
MultiPurposeList.insert(self, i, _i)
def index(self, item, *args):
i = item
if type(i) is unicode:
i = item.encode(u'utf-8')
MultiPurposeList.index(self, i, *args)
def pop(self, i=-1):
return MultiPurposeList.pop(self, i).decode(u'utf-8')
def extend(self, other):
o = []
o_tmp = other
if type(o_tmp) not in (list, tuple) and not isinstance(o_tmp, UserList):
o_tmp = list(o_tmp)
for i in o_tmp:
if type(i) is unicode:
o.append(i.encode(u'utf-8'))
else:
o.append(i)
MultiPurposeList.extend(self, o)
# vim: set noexpandtab:
|
bellkev/asg-sim
|
refs/heads/master
|
asgsim/plots/autoscaling.py
|
1
|
from collections import defaultdict
import csv
from math import log, sqrt
from multiprocessing import Pool
from numpy import mean
from ..batches import generate_jobs, load_results, STATIC_MINIMA, STATIC_MINIMA_LIMITED, BOOT_TIMES
from ..cost import costs, run_job, costs_from_job_results, cost_ci, compare_result_means, COST_PER_BUILDER_HOUR_EXPENSIVE
from ..model import Model, run_model
from .utils import plt, plt_title, plt_save, make_scaling_plot
def compare_result_means_expensive(a, b):
return compare_result_means(a, b, cost_per_builder_hour=COST_PER_BUILDER_HOUR_EXPENSIVE)
def generate_candidate_jobs(sorted_auto, path, fraction=0.01, static_minima=STATIC_MINIMA, **kwargs):
minima = defaultdict(list)
candidates_per_key = max(1, int(len(sorted_auto) / float(len(static_minima) * len(BOOT_TIMES)) * fraction)) # take the best `fraction`
print 'candidates per key:', candidates_per_key
for result in sorted_auto:
params = result['input']
result_key = (params['build_run_time'], params['builds_per_hour'], params['builder_boot_time'])
if len(minima[result_key]) < candidates_per_key:
minima[result_key].append(result)
generate_jobs([result['input'] for key in minima for result in minima[key]], path, **kwargs)
def param_match(d1, result):
d2 = result['input']
return all(d1[key] == d2[key] for key in set(d1.keys()).intersection(set(d2.keys())))
def param_match_pred(d):
return lambda x: param_match(d, x)
def auto_key_fn(x):
return (x['build_run_time'], x['builds_per_hour'], x['builder_boot_time'])
def static_key_fn(x):
return (x['build_run_time'], x['builds_per_hour'])
def min_auto_params(static, auto, **kwargs):
"""
Given static and autoscaling results, return the parameters
that minimize cost for each set of independent variables,
with savings over static fleets added into the param dict.
"""
static_costs = {}
min_auto_costs = {}
min_autos = {}
for result in static:
static_costs[static_key_fn(result['input'])] = mean(costs_from_job_results(result, **kwargs))
for result in auto:
key = auto_key_fn(result['input'])
cost = mean(costs_from_job_results(result, **kwargs))
if key not in min_autos or cost < min_auto_costs[key]:
min_autos[key] = result
min_auto_costs[key] = cost
ret = []
for key in min_autos:
params = min_autos[key]['input']
params['savings'] = 1 - min_auto_costs[key] / static_costs[key[:2]]
ret.append(params)
return ret
def one_min_auto_params(static, auto, param_filter, **kwargs):
"""
Given static and autoscaling results and a parameter filter,
ensure that the filter uniquely specifies a set of independent
variables and return the corresponding minimum-cost parameters.
"""
pred = param_match_pred(param_filter)
params = min_auto_params(filter(pred, static), filter(pred, auto), **kwargs)
assert len(params) == 1
return params[0]
def dump_params(static, auto, path):
rows = min_auto_params(static, auto)
with open(path, 'w') as f:
writer = csv.DictWriter(f, sorted(rows[0].keys()))
writer.writeheader()
for row in rows:
writer.writerow(row)
def make_log_contour_plot(static, auto, path):
rows = min_auto_params(static, auto)
log_boot_build_times = [log(row['builder_boot_time'] / float(row['build_run_time']), 2) for row in rows]
log_boot_sec_pers = [log(row['builder_boot_time'] / (3600.0 / row['builds_per_hour']), 2) for row in rows]
savings = [row['savings'] for row in rows]
plt.tricontourf(log_boot_build_times, log_boot_sec_pers, savings)
plt.xlabel('log(boot_time/build_time)')
plt.ylabel('log(boot_time/sec_per)')
plt.colorbar()
plt_save(path)
def make_linear_contour_plot_for_boot_time(static, auto, boot_time, path):
rows = [row for row in min_auto_params(static, auto) if row['builder_boot_time'] == boot_time]
plt.tricontourf([row['build_run_time'] for row in rows], [row['builds_per_hour'] for row in rows], [row['savings'] for row in rows], 20)
plt.xlabel('build_run_time')
plt.ylabel('builds_per_hour')
plt.colorbar()
plt_save(path)
def make_savings_v_boot_time_plot(static, auto):
pred = param_match_pred({'builds_per_hour': 50.0, 'build_run_time': 600})
rows = min_auto_params(filter(pred, static), filter(pred, auto))
plt_title('Max Savings Over Static Fleet (50 builds / hr, 10 min / build)')
plt.xlabel('Builder Boot Time (m)')
plt.ylabel('Savings (%)')
plt.axis([0, 11, 0, 35])
plt.plot([params['builder_boot_time'] / 60.0 for params in rows], [params['savings'] * 100.0 for params in rows], 'bo')
plt_save('plots/savings_v_boot_time')
def make_savings_v_build_time_plot(static, auto):
boot_time = 300
slow_pred = param_match_pred({'builder_boot_time': boot_time, 'builds_per_hour': 2.0})
fast_pred = param_match_pred({'builder_boot_time': boot_time, 'builds_per_hour': 50.0})
slow = min_auto_params(filter(slow_pred, static), filter(slow_pred, auto))
fast = min_auto_params(filter(fast_pred, static), filter(fast_pred, auto))
plt_title('Max Savings Over Static Fleet (5 min builder boot time)')
plt.xlabel('Build Run Time (m)')
plt.ylabel('Savings (%)')
plt.axis([0, 41, 0, 50])
s_handle, = plt.plot([params['build_run_time'] / 60.0 for params in slow], [params['savings'] * 100.0 for params in slow], 'bo', label='2 builds / hr')
f_handle, = plt.plot([params['build_run_time'] / 60.0 for params in fast], [params['savings'] * 100.0 for params in fast], 'gs', label='50 builds / hr')
plt.legend(handles=(s_handle, f_handle), loc='upper left')
plt_save('plots/savings_v_build_time')
def make_savings_v_traffic_plot(static, auto):
boot_time = 300
slow_pred = param_match_pred({'builder_boot_time': boot_time, 'build_run_time': 2400})
fast_pred = param_match_pred({'builder_boot_time': boot_time, 'build_run_time': 300})
slow = min_auto_params(filter(slow_pred, static), filter(slow_pred, auto))
fast = min_auto_params(filter(fast_pred, static), filter(fast_pred, auto))
plt_title('Max Savings Over Static Fleet (5 min builder boot time)')
plt.xlabel('Builds Per Hour')
plt.ylabel('Savings (%)')
plt.axis([0, 205, 0, 50])
s_handle, = plt.plot([params['builds_per_hour'] for params in slow], [params['savings'] * 100.0 for params in slow], 'bo', label='40 min builds')
f_handle, = plt.plot([params['builds_per_hour'] for params in fast], [params['savings'] * 100.0 for params in fast], 'gs', label='5 min builds')
plt.legend(handles=(s_handle, f_handle), loc='upper right')
plt_save('plots/savings_v_traffic')
def make_savings_v_dev_cost_plot(static, auto):
dev_costs = [0.01, 0.1, 1, 10, 100]
rows = [one_min_auto_params(static, auto,
{'builder_boot_time': 300, 'build_run_time': 300, 'builds_per_hour': 50.0},
cost_per_builder_hour=COST_PER_BUILDER_HOUR_EXPENSIVE,
cost_per_dev_hour=dev_cost)
for dev_cost in dev_costs]
plt.plot([log(dev_cost, 10) for dev_cost in dev_costs], [row['savings'] for row in rows], 'bo')
plt_save('plots/savings_dev_cost_expensive')
def make_savings_v_traffic_plot_varying(static_const, auto_const, static_sine, auto_sine):
boot_time = 300
pred = param_match_pred({'builder_boot_time': boot_time, 'build_run_time': 300})
const = min_auto_params(filter(pred, static_const), filter(pred, auto_const))
sine = min_auto_params(filter(pred, static_sine), filter(pred, auto_sine))
plt_title('Max Savings Over Static Fleet (5 min boot time, 5 min / build)')
plt.xlabel('Builds Per Hour')
plt.ylabel('Savings (%)')
c_handle, = plt.plot([params['builds_per_hour'] for params in const], [params['savings'] * 100.0 for params in const], 'bo', label='Constant Traffic')
s_handle, = plt.plot([params['builds_per_hour'] for params in sine], [params['savings'] * 100.0 for params in sine], 'gs', label='Sine-Varying Traffic')
plt.legend(handles=(c_handle, s_handle), loc='upper left')
plt.axis([0, 210, 0, 50])
plt_save('plots/savings_v_traffic_varying')
def make_constant_traffic_plots():
make_savings_v_build_time_plot(load_results('job-archives/2c517e8/static'), load_results('job-archives/2c517e8/candidates2'))
make_savings_v_traffic_plot(load_results('job-archives/2c517e8/static'), load_results('job-archives/2c517e8/candidates2'))
make_savings_v_boot_time_plot(load_results('job-archives/2c517e8/static'), load_results('job-archives/2c517e8/candidates2'))
make_savings_v_dev_cost_plot(load_results('job-archives/2c517e8/static-expensive'), load_results('job-archives/2c517e8/auto'))
params = one_min_auto_params(load_results('job-archives/2c517e8/static'), load_results('job-archives/2c517e8/candidates2'),
{'build_run_time': 2400, 'builds_per_hour': 2, 'builder_boot_time': 300})
params['ticks'] = 2000
make_scaling_plot(params, 'Auto Scaling Fleet Capacity and Usage (40 min / build, 2 builds / hr)',
'plots/slow_auto_scaling', axis=[0, 20000 / 60, 0, 8])
def make_varying_traffic_plots():
make_savings_v_traffic_plot_varying(load_results('job-archives/2c517e8/static'), load_results('job-archives/2c517e8/candidates2'),
load_results('job-archives/9987101/static'), load_results('job-archives/9987101/candidates2'))
params = one_min_auto_params(load_results('job-archives/9987101/static'), load_results('job-archives/9987101/candidates2'),
{'build_run_time': 300, 'builds_per_hour': 200.0, 'builder_boot_time': 300})
params['ticks'] = 1500 * 60
make_scaling_plot(params, 'Auto Scaling Fleet Capacity and Usage (5 min / build, 200 builds / hr)',
'plots/sine_auto_scaling', axis=[0, 1500, 0, 35])
def compute_params(params):
builds_per_hour = params['builds_per_hour']
build_run_time = params['build_run_time']
volume = builds_per_hour * build_run_time / 3600.0
desired_capacity = 1.17 + 2.52 * sqrt(volume) # results of sqrt regression
hysteresis = desired_capacity * 0.2
scale_up_threshold = int(round(desired_capacity - hysteresis))
scale_down_threshold = int(round(desired_capacity + hysteresis))
scale_up_change = max(1, int(round(desired_capacity * 0.4)))
scale_down_change = max(1, int(round(desired_capacity * 0.2)))
fmt_str = 'up_thresh: %d down_thresh: %d up_change: %d down_change: %d'
return {'autoscale': True,
'builds_per_hour': builds_per_hour,
'builds_per_hour_fn': params['builds_per_hour_fn'],
'build_run_time': build_run_time,
'builder_boot_time': params['builder_boot_time'],
'alarm_period_duration': 300,
'scale_up_alarm_period_count': 1,
'scale_down_alarm_period_count': 2,
'scale_up_change': scale_up_change,
'scale_down_change': scale_down_change,
'scale_up_threshold': scale_up_threshold,
'scale_down_threshold': scale_down_threshold,
'initial_builder_count': scale_up_threshold,
'sec_per_tick': params['sec_per_tick'],
'ticks': params['ticks']}
def non_autoscaling_params(params):
optimum_fleet_sizes = {(30, 50.0): 4,
(60, 50.0): 5,
(120, 50.0): 7,
(300, 50.0): 12,
(600, 50.0): 19,
(1200, 50.0): 31,
(2400, 50.0): 55,
(300, 10.0): 5,
(300, 50.0): 12,
(300, 100.0): 19,
(300, 200.0): 31,
(300, 500.0): 63,
(300, 1000.0): 114}
ret = params.copy()
ret['autoscale'] = False
ret['initial_builder_count'] = optimum_fleet_sizes[static_key_fn(params)]
return ret
def test_recommendations():
static_results = load_results('job-archives/9987101/static')
auto_results = load_results('job-archives/9987101/candidates2')
experimental_params = [p for p in min_auto_params(static_results, auto_results) if p['builder_boot_time'] == 300]
recommended_params = map(compute_params, experimental_params)
static_params = map(non_autoscaling_params, experimental_params)
# Add some high-traffic situations that weren't covered by batch runs
extra_params = [{'build_run_time': run_time, 'builds_per_hour': per_hour,
'builds_per_hour_fn': Model.SINE, 'builder_boot_time': boot_time,
'ticks': 20000, 'sec_per_tick': 10}
for run_time, per_hour in [(2400, 50.0), (300, 500.0), (300, 1000.0)]
for boot_time in [300]]
recommended_params.extend(map(compute_params, extra_params))
static_params.extend(map(non_autoscaling_params, extra_params))
percent_savings = lambda a, s: int(round((1 - a / s) * 100))
p = Pool(8)
for e, r, s in zip(experimental_params, recommended_params, static_params):
e['trials'] = r['trials'] = s['trials'] = 50
experimental_costs = map(mean, p.map(costs, experimental_params))
recommended_costs = map(mean, p.map(costs, recommended_params))
static_costs = map(mean, p.map(costs, static_params))
p.close()
volumes = [p['builds_per_hour'] * p['build_run_time'] / 3600.0 for p in static_params]
experimental_savings = [percent_savings(e, s) for e, s in zip(experimental_costs, static_costs)]
recommended_savings = [percent_savings(r, s) for r, s in zip(recommended_costs, static_costs)]
fmt_str = 'builder_boot_time: %d build_run_time: %d builds_per_hour: %.0f recommended_savings: %d'
plt_title('Savings Over Static Fleet vs Build Volume (5 min boot times)')
plt.xlabel('Volume (build hrs / hr) (erlangs)')
plt.ylabel('Savings (%)')
e_handle, = plt.plot(volumes[:len(experimental_savings)], experimental_savings, 'bo', label='Best parameters measured')
r_handle, = plt.plot(volumes, recommended_savings, 'gs', label='Parameters from formula')
plt.legend(handles=(e_handle, r_handle), loc='lower right')
plt.axis([0, 90, 0, 65])
plt_save('plots/test_recommendations')
if __name__ == '__main__':
make_constant_traffic_plots()
make_varying_traffic_plots()
test_recommendations()
|
domix/FrameworkBenchmarks
|
refs/heads/master
|
dancer/__init__.py
|
12133432
| |
openstack/vitrage
|
refs/heads/master
|
vitrage/tests/unit/datasources/heat/__init__.py
|
12133432
| |
deevarvar/myLab
|
refs/heads/master
|
baidu_code/bcoreapi/monitor/__init__.py
|
12133432
| |
AevumDecessus/fragforce.org
|
refs/heads/dev
|
ffdonations/migrations/__init__.py
|
12133432
| |
Pandaaaa906/ChemErpSystem
|
refs/heads/master
|
WriteTogether/migrations/0001_initial.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-08-16 02:04
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='InspectionFinishDate',
fields=[
('recid', models.AutoField(primary_key=True, serialize=False)),
('created_date', models.DateTimeField(blank=True, default=django.utils.timezone.now, verbose_name='\u521b\u5efa\u65f6\u95f4')),
('modified_date', models.DateTimeField(auto_now=True, null=True, verbose_name='\u4fee\u6539\u65f6\u95f4')),
('date', models.DateField(verbose_name='\u9001\u68c0\u5305\u88c5\u5b8c\u6210\u65f6\u95f4')),
('created_by', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='writetogether_inspectionfinishdate_created', to=settings.AUTH_USER_MODEL, verbose_name='\u521b\u5efa\u4eba')),
],
options={
'verbose_name': '\u9001\u68c0\u5b8c\u6210\u65f6\u95f4',
},
),
migrations.CreateModel(
name='InspectionItem',
fields=[
('recid', models.AutoField(primary_key=True, serialize=False)),
('created_date', models.DateTimeField(blank=True, default=django.utils.timezone.now, verbose_name='\u521b\u5efa\u65f6\u95f4')),
('modified_date', models.DateTimeField(auto_now=True, null=True, verbose_name='\u4fee\u6539\u65f6\u95f4')),
('inspect_company', models.TextField(verbose_name='\u9001\u68c0\u516c\u53f8')),
('inspect_quantity', models.TextField(verbose_name='\u9001\u68c0\u91cf')),
('inspect_label', models.TextField(verbose_name='\u9001\u68c0\u6807\u7b7e')),
('inspect_item', models.TextField(choices=[('HPLC/LCMS', 'HPLC/LCMS'), ('HPLC', 'HPLC'), ('LCMS', 'LCMS'), ('HNMR', 'HNMR'), ('CNMR', 'CNMR'), ('GCMS', 'GCMS'), ('TGA', 'TGA'), ('IR', 'IR'), ('GC', 'GC'), ('MS', 'MS'), ('W', '\u6c34\u5206'), ('CZCZ', '\u70bd\u707c\u6b8b\u6e23')], verbose_name='\u9001\u68c0\u9879\u76ee')),
('inspect_method', models.TextField(verbose_name='\u9001\u68c0\u65b9\u6cd5')),
('inspect_info', models.TextField(verbose_name='\u6837\u54c1\u4fe1\u606f')),
('info', models.TextField(verbose_name='\u5907\u6ce8')),
('created_by', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='writetogether_inspectionitem_created', to=settings.AUTH_USER_MODEL, verbose_name='\u521b\u5efa\u4eba')),
('modified_by', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='writetogether_inspectionitem_modified', to=settings.AUTH_USER_MODEL, verbose_name='\u4fee\u6539\u4eba')),
],
options={
'verbose_name': '\u9001\u68c0\u6761\u76ee',
},
),
migrations.CreateModel(
name='POEnter',
fields=[
('recid', models.AutoField(primary_key=True, serialize=False)),
('created_date', models.DateTimeField(blank=True, default=django.utils.timezone.now, verbose_name='\u521b\u5efa\u65f6\u95f4')),
('modified_date', models.DateTimeField(auto_now=True, null=True, verbose_name='\u4fee\u6539\u65f6\u95f4')),
('date', models.DateField(verbose_name='\u5165\u5e93\u65f6\u95f4')),
('created_by', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='writetogether_poenter_created', to=settings.AUTH_USER_MODEL, verbose_name='\u521b\u5efa\u4eba')),
('modified_by', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='writetogether_poenter_modified', to=settings.AUTH_USER_MODEL, verbose_name='\u4fee\u6539\u4eba')),
],
options={
'verbose_name': '\u91c7\u8d2d\u8ba2\u5355\u6761\u76ee',
},
),
migrations.CreateModel(
name='PurchaseOrderItem',
fields=[
('recid', models.AutoField(primary_key=True, serialize=False)),
('created_date', models.DateTimeField(blank=True, default=django.utils.timezone.now, verbose_name='\u521b\u5efa\u65f6\u95f4')),
('modified_date', models.DateTimeField(auto_now=True, null=True, verbose_name='\u4fee\u6539\u65f6\u95f4')),
('purchase_order_id', models.TextField(verbose_name='\u9500\u552e\u5355\u53f7')),
('cat_no_unit', models.TextField(verbose_name='\u8d27\u53f7')),
('name', models.TextField(verbose_name='\u4ea7\u54c1\u540d\u79f0')),
('cas', models.TextField(verbose_name='CAS\u53f7')),
('quantity', models.FloatField(verbose_name='\u6570\u91cf')),
('valid', models.BooleanField(default=True)),
('info', models.TextField(verbose_name='\u5907\u6ce8')),
('created_by', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='writetogether_purchaseorderitem_created', to=settings.AUTH_USER_MODEL, verbose_name='\u521b\u5efa\u4eba')),
('modified_by', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='writetogether_purchaseorderitem_modified', to=settings.AUTH_USER_MODEL, verbose_name='\u4fee\u6539\u4eba')),
],
options={
'verbose_name': '\u91c7\u8d2d\u8ba2\u5355\u6761\u76ee',
},
),
migrations.AddField(
model_name='poenter',
name='purchase_item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='WriteTogether.PurchaseOrderItem'),
),
migrations.AddField(
model_name='inspectionitem',
name='purchase_item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='WriteTogether.PurchaseOrderItem'),
),
migrations.AddField(
model_name='inspectionfinishdate',
name='inspect_item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='WriteTogether.InspectionItem'),
),
migrations.AddField(
model_name='inspectionfinishdate',
name='modified_by',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='writetogether_inspectionfinishdate_modified', to=settings.AUTH_USER_MODEL, verbose_name='\u4fee\u6539\u4eba'),
),
]
|
aperigault/ansible
|
refs/heads/devel
|
lib/ansible/modules/net_tools/nios/__init__.py
|
12133432
| |
OpenLD/enigma2-wetek
|
refs/heads/master
|
lib/python/Plugins/SystemPlugins/SoftwareManager/__init__.py
|
12133432
| |
tomevans/utils
|
refs/heads/master
|
constants.py
|
1
|
import scipy
# Units in SI, i.e. not cgs
RSUN = 6.955e8
MSUN = 1.9889e30
MJUP = 1.8986e27
RJUP = 7.149e7
REARTH = 6.371e6
DAY2S = 86400.0
DEG2RAD = scipy.pi/180.
AU = 1.496e11
PLANCKH = 6.626e-34
BOLTZK = 1.38065e-23
C = 2.9979e8 # peed of light in vacuum in m s^-1
G = 6.673e-11 # gravitational constant in m^3 kg^-1 s^-2
RGAS = 8.314 # gas constant in J mol^-1 K^-1
|
whs/django
|
refs/heads/master
|
django/contrib/sessions/serializers.py
|
59
|
import pickle
from django.core.signing import JSONSerializer as BaseJSONSerializer
class PickleSerializer:
"""
Simple wrapper around pickle to be used in signing.dumps and
signing.loads.
"""
def dumps(self, obj):
return pickle.dumps(obj, pickle.HIGHEST_PROTOCOL)
def loads(self, data):
return pickle.loads(data)
JSONSerializer = BaseJSONSerializer
|
paulrouget/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/fetch/cross-origin-resource-policy/resources/hello.py
|
25
|
def main(request, response):
headers = [("Cross-Origin-Resource-Policy", request.GET['corp'])]
if 'origin' in request.headers:
headers.append(('Access-Control-Allow-Origin', request.headers['origin']))
return 200, headers, "hello"
|
unnikrishnankgs/va
|
refs/heads/master
|
venv/lib/python3.5/site-packages/tensorflow/models/compression/entropy_coder/dataset/gen_synthetic_dataset.py
|
9
|
# Copyright 2017 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generate a synthetic dataset."""
import os
import numpy as np
import tensorflow as tf
import synthetic_model
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string(
'dataset_dir', None,
"""Directory where to write the dataset and the configs.""")
tf.app.flags.DEFINE_integer(
'count', 1000,
"""Number of samples to generate.""")
def int64_feature(values):
"""Returns a TF-Feature of int64s.
Args:
values: A scalar or list of values.
Returns:
A TF-Feature.
"""
if not isinstance(values, (tuple, list)):
values = [values]
return tf.train.Feature(int64_list=tf.train.Int64List(value=values))
def float_feature(values):
"""Returns a TF-Feature of floats.
Args:
values: A scalar of list of values.
Returns:
A TF-Feature.
"""
if not isinstance(values, (tuple, list)):
values = [values]
return tf.train.Feature(float_list=tf.train.FloatList(value=values))
def AddToTFRecord(code, tfrecord_writer):
example = tf.train.Example(features=tf.train.Features(feature={
'code_shape': int64_feature(code.shape),
'code': float_feature(code.flatten().tolist()),
}))
tfrecord_writer.write(example.SerializeToString())
def GenerateDataset(filename, count, code_shape):
with tf.python_io.TFRecordWriter(filename) as tfrecord_writer:
for _ in xrange(count):
code = synthetic_model.GenerateSingleCode(code_shape)
# Convert {0,1} codes to {-1,+1} codes.
code = 2.0 * code - 1.0
AddToTFRecord(code, tfrecord_writer)
def main(argv=None): # pylint: disable=unused-argument
GenerateDataset(os.path.join(FLAGS.dataset_dir + '/synthetic_dataset'),
FLAGS.count,
[35, 48, 8])
if __name__ == '__main__':
tf.app.run()
|
azatoth/scons
|
refs/heads/master
|
test/option-k.py
|
5
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import os.path
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
test.subdir('work1', 'work2', 'work3')
test.write('succeed.py', r"""
import sys
file = open(sys.argv[1], 'wb')
file.write("succeed.py: %s\n" % sys.argv[1])
file.close()
sys.exit(0)
""")
test.write('fail.py', r"""
import sys
sys.exit(1)
""")
#
# Test: work1
#
test.write(['work1', 'SConstruct'], """\
Succeed = Builder(action = r'%(_python_)s ../succeed.py $TARGETS')
Fail = Builder(action = r'%(_python_)s ../fail.py $TARGETS')
env = Environment(BUILDERS = { 'Succeed' : Succeed, 'Fail' : Fail })
env.Fail(target = 'aaa.1', source = 'aaa.in')
env.Succeed(target = 'aaa.out', source = 'aaa.1')
env.Succeed(target = 'bbb.out', source = 'bbb.in')
""" % locals())
test.write(['work1', 'aaa.in'], "aaa.in\n")
test.write(['work1', 'bbb.in'], "bbb.in\n")
test.run(chdir = 'work1',
arguments = 'aaa.out bbb.out',
stderr = 'scons: *** [aaa.1] Error 1\n',
status = 2)
test.must_not_exist(test.workpath('work1', 'aaa.1'))
test.must_not_exist(test.workpath('work1', 'aaa.out'))
test.must_not_exist(test.workpath('work1', 'bbb.out'))
test.run(chdir = 'work1',
arguments = '-k aaa.out bbb.out',
stderr = 'scons: *** [aaa.1] Error 1\n',
status = 2)
test.must_not_exist(test.workpath('work1', 'aaa.1'))
test.must_not_exist(test.workpath('work1', 'aaa.out'))
test.must_match(['work1', 'bbb.out'], "succeed.py: bbb.out\n")
test.unlink(['work1', 'bbb.out'])
test.run(chdir = 'work1',
arguments = '--keep-going aaa.out bbb.out',
stderr = 'scons: *** [aaa.1] Error 1\n',
status = 2)
test.must_not_exist(test.workpath('work1', 'aaa.1'))
test.must_not_exist(test.workpath('work1', 'aaa.out'))
test.must_match(['work1', 'bbb.out'], "succeed.py: bbb.out\n")
expect = """\
scons: Reading SConscript files ...
scons: done reading SConscript files.
scons: Cleaning targets ...
Removed bbb.out
scons: done cleaning targets.
"""
test.run(chdir = 'work1',
arguments = '--clean --keep-going aaa.out bbb.out',
stdout = expect)
test.must_not_exist(test.workpath('work1', 'aaa.1'))
test.must_not_exist(test.workpath('work1', 'aaa.out'))
test.must_not_exist(test.workpath('work1', 'bbb.out'))
#
# Test: work2
#
test.write(['work2', 'SConstruct'], """\
Succeed = Builder(action = r'%(_python_)s ../succeed.py $TARGETS')
Fail = Builder(action = r'%(_python_)s ../fail.py $TARGETS')
env = Environment(BUILDERS = { 'Succeed' : Succeed, 'Fail' : Fail })
env.Fail('aaa.out', 'aaa.in')
env.Succeed('bbb.out', 'aaa.out')
env.Succeed('ccc.out', 'ccc.in')
env.Succeed('ddd.out', 'ccc.in')
""" % locals())
test.write(['work2', 'aaa.in'], "aaa.in\n")
test.write(['work2', 'ccc.in'], "ccc.in\n")
test.run(chdir = 'work2',
arguments = '-k .',
status = 2,
stderr = None,
stdout = """\
scons: Reading SConscript files ...
scons: done reading SConscript files.
scons: Building targets ...
%(_python_)s ../fail.py aaa.out
%(_python_)s ../succeed.py ccc.out
%(_python_)s ../succeed.py ddd.out
scons: done building targets (errors occurred during build).
""" % locals())
test.must_not_exist(['work2', 'aaa.out'])
test.must_not_exist(['work2', 'bbb.out'])
test.must_match(['work2', 'ccc.out'], "succeed.py: ccc.out\n")
test.must_match(['work2', 'ddd.out'], "succeed.py: ddd.out\n")
#
# Test: work3
#
# Check that the -k (keep-going) switch works correctly when the Nodes
# forms a DAG. The test case is the following
#
# all
# |
# +-----+-----+-------------+
# | | |
# a1 a2 a3
# | | |
# + +---+---+ +---+---+
# \ | / | |
# \ bbb.out / a4 ccc.out
# \ / /
# \ / /
# \ / /
# aaa.out (fails)
#
test.write(['work3', 'SConstruct'], """\
Succeed = Builder(action = r'%(_python_)s ../succeed.py $TARGETS')
Fail = Builder(action = r'%(_python_)s ../fail.py $TARGETS')
env = Environment(BUILDERS = { 'Succeed' : Succeed, 'Fail' : Fail })
a = env.Fail('aaa.out', 'aaa.in')
b = env.Succeed('bbb.out', 'bbb.in')
c = env.Succeed('ccc.out', 'ccc.in')
a1 = Alias( 'a1', a )
a2 = Alias( 'a2', a+b)
a4 = Alias( 'a4', c)
a3 = Alias( 'a3', a4+c)
Alias('all', a1+a2+a3)
""" % locals())
test.write(['work3', 'aaa.in'], "aaa.in\n")
test.write(['work3', 'bbb.in'], "bbb.in\n")
test.write(['work3', 'ccc.in'], "ccc.in\n")
# Test tegular build (i.e. without -k)
test.run(chdir = 'work3',
arguments = '.',
status = 2,
stderr = None,
stdout = """\
scons: Reading SConscript files ...
scons: done reading SConscript files.
scons: Building targets ...
%(_python_)s ../fail.py aaa.out
scons: building terminated because of errors.
""" % locals())
test.must_not_exist(['work3', 'aaa.out'])
test.must_not_exist(['work3', 'bbb.out'])
test.must_not_exist(['work3', 'ccc.out'])
test.run(chdir = 'work3',
arguments = '-c .')
test.must_not_exist(['work3', 'aaa.out'])
test.must_not_exist(['work3', 'bbb.out'])
test.must_not_exist(['work3', 'ccc.out'])
# Current directory
test.run(chdir = 'work3',
arguments = '-k .',
status = 2,
stderr = None,
stdout = """\
scons: Reading SConscript files ...
scons: done reading SConscript files.
scons: Building targets ...
%(_python_)s ../fail.py aaa.out
%(_python_)s ../succeed.py bbb.out
%(_python_)s ../succeed.py ccc.out
scons: done building targets (errors occurred during build).
""" % locals())
test.must_not_exist(['work3', 'aaa.out'])
test.must_exist(['work3', 'bbb.out'])
test.must_exist(['work3', 'ccc.out'])
test.run(chdir = 'work3',
arguments = '-c .')
test.must_not_exist(['work3', 'aaa.out'])
test.must_not_exist(['work3', 'bbb.out'])
test.must_not_exist(['work3', 'ccc.out'])
# Single target
test.run(chdir = 'work3',
arguments = '--keep-going all',
status = 2,
stderr = None,
stdout = """\
scons: Reading SConscript files ...
scons: done reading SConscript files.
scons: Building targets ...
%(_python_)s ../fail.py aaa.out
%(_python_)s ../succeed.py bbb.out
%(_python_)s ../succeed.py ccc.out
scons: done building targets (errors occurred during build).
""" % locals())
test.must_not_exist(['work3', 'aaa.out'])
test.must_exist(['work3', 'bbb.out'])
test.must_exist(['work3', 'ccc.out'])
test.run(chdir = 'work3',
arguments = '-c .')
test.must_not_exist(['work3', 'aaa.out'])
test.must_not_exist(['work3', 'bbb.out'])
test.must_not_exist(['work3', 'ccc.out'])
# Separate top-level targets
test.run(chdir = 'work3',
arguments = '-k a1 a2 a3',
status = 2,
stderr = None,
stdout = """\
scons: Reading SConscript files ...
scons: done reading SConscript files.
scons: Building targets ...
%(_python_)s ../fail.py aaa.out
%(_python_)s ../succeed.py bbb.out
%(_python_)s ../succeed.py ccc.out
scons: done building targets (errors occurred during build).
""" % locals())
test.must_not_exist(['work3', 'aaa.out'])
test.must_exist(['work3', 'bbb.out'])
test.must_exist(['work3', 'ccc.out'])
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
sysalexis/kbengine
|
refs/heads/master
|
kbe/res/scripts/common/Lib/distutils/tests/test_file_util.py
|
59
|
"""Tests for distutils.file_util."""
import unittest
import os
import shutil
import errno
from unittest.mock import patch
from distutils.file_util import move_file
from distutils import log
from distutils.tests import support
from distutils.errors import DistutilsFileError
from test.support import run_unittest
class FileUtilTestCase(support.TempdirManager, unittest.TestCase):
def _log(self, msg, *args):
if len(args) > 0:
self._logs.append(msg % args)
else:
self._logs.append(msg)
def setUp(self):
super(FileUtilTestCase, self).setUp()
self._logs = []
self.old_log = log.info
log.info = self._log
tmp_dir = self.mkdtemp()
self.source = os.path.join(tmp_dir, 'f1')
self.target = os.path.join(tmp_dir, 'f2')
self.target_dir = os.path.join(tmp_dir, 'd1')
def tearDown(self):
log.info = self.old_log
super(FileUtilTestCase, self).tearDown()
def test_move_file_verbosity(self):
f = open(self.source, 'w')
try:
f.write('some content')
finally:
f.close()
move_file(self.source, self.target, verbose=0)
wanted = []
self.assertEqual(self._logs, wanted)
# back to original state
move_file(self.target, self.source, verbose=0)
move_file(self.source, self.target, verbose=1)
wanted = ['moving %s -> %s' % (self.source, self.target)]
self.assertEqual(self._logs, wanted)
# back to original state
move_file(self.target, self.source, verbose=0)
self._logs = []
# now the target is a dir
os.mkdir(self.target_dir)
move_file(self.source, self.target_dir, verbose=1)
wanted = ['moving %s -> %s' % (self.source, self.target_dir)]
self.assertEqual(self._logs, wanted)
def test_move_file_exception_unpacking_rename(self):
# see issue 22182
with patch("os.rename", side_effect=OSError("wrong", 1)), \
self.assertRaises(DistutilsFileError):
with open(self.source, 'w') as fobj:
fobj.write('spam eggs')
move_file(self.source, self.target, verbose=0)
def test_move_file_exception_unpacking_unlink(self):
# see issue 22182
with patch("os.rename", side_effect=OSError(errno.EXDEV, "wrong")), \
patch("os.unlink", side_effect=OSError("wrong", 1)), \
self.assertRaises(DistutilsFileError):
with open(self.source, 'w') as fobj:
fobj.write('spam eggs')
move_file(self.source, self.target, verbose=0)
def test_suite():
return unittest.makeSuite(FileUtilTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
|
ocefpaf/folium
|
refs/heads/master
|
tests/plugins/test_heat_map.py
|
2
|
# -*- coding: utf-8 -*-
"""
Test HeatMap
------------
"""
import folium
from folium.plugins import HeatMap
from folium.utilities import normalize
from jinja2 import Template
import numpy as np
import pytest
def test_heat_map():
np.random.seed(3141592)
data = (np.random.normal(size=(100, 2)) * np.array([[1, 1]]) +
np.array([[48, 5]]))
m = folium.Map([48., 5.], tiles='stamentoner', zoom_start=6)
hm = HeatMap(data)
m.add_child(hm)
m._repr_html_()
out = normalize(m._parent.render())
# We verify that the script import is present.
script = '<script src="https://cdn.jsdelivr.net/gh/python-visualization/folium@master/folium/templates/leaflet_heat.min.js"></script>' # noqa
assert script in out
# We verify that the script part is correct.
tmpl = Template("""
var {{this.get_name()}} = L.heatLayer(
{{this.data}},
{
minOpacity: {{this.min_opacity}},
maxZoom: {{this.max_zoom}},
radius: {{this.radius}},
blur: {{this.blur}},
gradient: {{this.gradient}}
})
.addTo({{this._parent.get_name()}});
""")
assert tmpl.render(this=hm)
bounds = m.get_bounds()
np.testing.assert_allclose(
bounds,
[[46.218566840847025, 3.0302801394447734],
[50.75345011431167, 7.132453997672826]])
def test_heatmap_data():
data = HeatMap(np.array([[3, 4, 1], [5, 6, 1], [7, 8, 0.5]])).data
assert isinstance(data, list)
assert len(data) == 3
for i in range(len(data)):
assert isinstance(data[i], list)
assert len(data[i]) == 3
def test_heat_map_exception():
with pytest.raises(ValueError):
HeatMap(np.array([[4, 5, 1], [3, 6, np.nan]]))
with pytest.raises(Exception):
HeatMap(np.array([3, 4, 5]))
|
joshpeng/Network-Intrusions-Flask
|
refs/heads/master
|
app/performance.py
|
1
|
from __future__ import division
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.externals import joblib
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_score
from sklearn.metrics import recall_score
from sklearn.metrics import f1_score
def get_perf(y_true, y_pred, scorer, pos=''):
result = 0
if scorer == 'acc':
result = accuracy_score(y_true, y_pred)
elif scorer == 'prec':
result = precision_score(y_true, y_pred, labels=[pos + '.'], average='weighted')
elif scorer == 'rec':
result = recall_score(y_true, y_pred, pos_label=pos + '.', average='weighted')
elif scorer == 'f1':
result = f1_score(y_true, y_pred, pos_label=pos + '.', average='weighted')
return result
|
yeyanchao/calibre
|
refs/heads/master
|
src/calibre/gui2/update.py
|
1
|
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
import traceback
from PyQt4.Qt import (QThread, pyqtSignal, Qt, QUrl, QDialog, QGridLayout,
QLabel, QCheckBox, QDialogButtonBox, QIcon, QPixmap)
import mechanize
from calibre.constants import (__appname__, __version__, iswindows, isosx,
isportable)
from calibre import browser, prints, as_unicode
from calibre.utils.config import prefs
from calibre.gui2 import config, dynamic, open_url
from calibre.gui2.dialogs.plugin_updater import get_plugin_updates_available
URL = 'http://status.calibre-ebook.com/latest'
#URL = 'http://localhost:8000/latest'
NO_CALIBRE_UPDATE = '-0.0.0'
VSEP = '|'
def get_newest_version():
br = browser()
req = mechanize.Request(URL)
req.add_header('CALIBRE_VERSION', __version__)
req.add_header('CALIBRE_OS',
'win' if iswindows else 'osx' if isosx else 'oth')
req.add_header('CALIBRE_INSTALL_UUID', prefs['installation_uuid'])
version = br.open(req).read().strip()
try:
version = version.decode('utf-8')
except UnicodeDecodeError:
version = u''
return version
class CheckForUpdates(QThread):
update_found = pyqtSignal(object)
INTERVAL = 24*60*60
def __init__(self, parent):
QThread.__init__(self, parent)
def run(self):
while True:
calibre_update_version = NO_CALIBRE_UPDATE
plugins_update_found = 0
try:
version = get_newest_version()
if version and version != __version__ and len(version) < 10:
calibre_update_version = version
except Exception as e:
prints('Failed to check for calibre update:', as_unicode(e))
try:
update_plugins = get_plugin_updates_available(raise_error=True)
if update_plugins is not None:
plugins_update_found = len(update_plugins)
except Exception as e:
prints('Failed to check for plugin update:', as_unicode(e))
if (calibre_update_version != NO_CALIBRE_UPDATE or
plugins_update_found > 0):
self.update_found.emit('%s%s%d'%(calibre_update_version,
VSEP, plugins_update_found))
self.sleep(self.INTERVAL)
class UpdateNotification(QDialog):
def __init__(self, calibre_version, plugin_updates, parent=None):
QDialog.__init__(self, parent)
self.resize(400, 250)
self.l = QGridLayout()
self.setLayout(self.l)
self.logo = QLabel()
self.logo.setMaximumWidth(110)
self.logo.setPixmap(QPixmap(I('lt.png')).scaled(100, 100,
Qt.IgnoreAspectRatio, Qt.SmoothTransformation))
self.label = QLabel(('<p>'+
_('New version <b>%(ver)s</b> of %(app)s is available for download. '
'See the <a href="http://calibre-ebook.com/whats-new'
'">new features</a>.'))%dict(
app=__appname__, ver=calibre_version))
self.label.setOpenExternalLinks(True)
self.label.setWordWrap(True)
self.setWindowTitle(_('Update available!'))
self.setWindowIcon(QIcon(I('lt.png')))
self.l.addWidget(self.logo, 0, 0)
self.l.addWidget(self.label, 0, 1)
self.cb = QCheckBox(
_('Show this notification for future updates'), self)
self.l.addWidget(self.cb, 1, 0, 1, -1)
self.cb.setChecked(config.get('new_version_notification'))
self.cb.stateChanged.connect(self.show_future)
self.bb = QDialogButtonBox(self)
b = self.bb.addButton(_('&Get update'), self.bb.AcceptRole)
b.setDefault(True)
b.setIcon(QIcon(I('arrow-down.png')))
if plugin_updates > 0:
b = self.bb.addButton(_('Update &plugins'), self.bb.ActionRole)
b.setIcon(QIcon(I('plugins/plugin_updater.png')))
b.clicked.connect(self.get_plugins, type=Qt.QueuedConnection)
self.bb.addButton(self.bb.Cancel)
self.l.addWidget(self.bb, 2, 0, 1, -1)
self.bb.accepted.connect(self.accept)
self.bb.rejected.connect(self.reject)
dynamic.set('update to version %s'%calibre_version, False)
def get_plugins(self):
from calibre.gui2.dialogs.plugin_updater import (PluginUpdaterDialog,
FILTER_UPDATE_AVAILABLE)
d = PluginUpdaterDialog(self.parent(),
initial_filter=FILTER_UPDATE_AVAILABLE)
d.exec_()
def show_future(self, *args):
config.set('new_version_notification', bool(self.cb.isChecked()))
def accept(self):
url = ('http://calibre-ebook.com/download_' +
('portable' if isportable else 'windows' if iswindows
else 'osx' if isosx else 'linux'))
open_url(QUrl(url))
QDialog.accept(self)
class UpdateMixin(object):
def __init__(self, opts):
self.last_newest_calibre_version = NO_CALIBRE_UPDATE
if not opts.no_update_check:
self.update_checker = CheckForUpdates(self)
self.update_checker.update_found.connect(self.update_found,
type=Qt.QueuedConnection)
self.update_checker.start()
def recalc_update_label(self, number_of_plugin_updates):
self.update_found('%s%s%d'%(self.last_newest_calibre_version, VSEP,
number_of_plugin_updates), no_show_popup=True)
def update_found(self, version, force=False, no_show_popup=False):
try:
calibre_version, plugin_updates = version.split(VSEP)
plugin_updates = int(plugin_updates)
except:
traceback.print_exc()
return
self.last_newest_calibre_version = calibre_version
has_calibre_update = calibre_version and calibre_version != NO_CALIBRE_UPDATE
has_plugin_updates = plugin_updates > 0
self.plugin_update_found(plugin_updates)
if not has_calibre_update and not has_plugin_updates:
self.status_bar.update_label.setVisible(False)
return
if has_calibre_update:
plt = u''
if has_plugin_updates:
plt = _(' (%d plugin updates)')%plugin_updates
msg = (u'<span style="color:green; font-weight: bold">%s: '
u'<a href="update:%s">%s%s</a></span>') % (
_('Update found'), version, calibre_version, plt)
else:
msg = (u'<a href="update:%s">%d %s</a>')%(version, plugin_updates,
_('updated plugins'))
self.status_bar.update_label.setText(msg)
self.status_bar.update_label.setVisible(True)
if has_calibre_update:
if (force or (config.get('new_version_notification') and
dynamic.get('update to version %s'%calibre_version, True))):
if not no_show_popup:
self._update_notification__ = UpdateNotification(calibre_version,
plugin_updates, parent=self)
self._update_notification__.show()
elif has_plugin_updates:
if force:
from calibre.gui2.dialogs.plugin_updater import (PluginUpdaterDialog,
FILTER_UPDATE_AVAILABLE)
d = PluginUpdaterDialog(self,
initial_filter=FILTER_UPDATE_AVAILABLE)
d.exec_()
if d.do_restart:
self.quit(restart=True)
def plugin_update_found(self, number_of_updates):
# Change the plugin icon to indicate there are updates available
plugin = self.iactions.get('Plugin Updater', None)
if not plugin:
return
if number_of_updates:
plugin.qaction.setText(_('Plugin Updates')+'*')
plugin.qaction.setIcon(QIcon(I('plugins/plugin_updater_updates.png')))
plugin.qaction.setToolTip(
_('There are %d plugin updates available')%number_of_updates)
else:
plugin.qaction.setText(_('Plugin Updates'))
plugin.qaction.setIcon(QIcon(I('plugins/plugin_updater.png')))
plugin.qaction.setToolTip(_('Install and configure user plugins'))
def update_link_clicked(self, url):
url = unicode(url)
if url.startswith('update:'):
version = url[len('update:'):]
self.update_found(version, force=True)
|
opencord/voltha
|
refs/heads/master
|
netconf/session/session_mgr.py
|
1
|
#!/usr/bin/env python
#
# Copyright 2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from session import Session
import structlog
log = structlog.get_logger()
class SessionManager:
instance = None
def __init__(self):
self.next_session_id = 1
self.sessions = {}
def create_session(self, user):
session = Session(self.next_session_id, user)
self.sessions[self.next_session_id] = session
self.next_session_id += 1
return session
def remove_session(self, session):
session_id = session.session_id
if session_id in self.sessions.keys():
del self.sessions[session_id]
log.info('remove-session', session_id=session_id)
else:
log.error('invalid-session', session_id=session_id)
def get_session_manager_instance():
if SessionManager.instance == None:
SessionManager.instance = SessionManager()
return SessionManager.instance
|
ContinuumIO/watchdog
|
refs/heads/master
|
docs/source/conf.py
|
8
|
# -*- coding: utf-8 -*-
#
# watchdog documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 30 00:43:58 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os.path
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
TOP_DIR_PATH = os.path.abspath('../../')
SRC_DIR_PATH = os.path.join(TOP_DIR_PATH, 'src')
sys.path.insert(0, SRC_DIR_PATH)
import watchdog.version
PROJECT_NAME = 'watchdog'
AUTHOR_NAME = 'Yesudeep Mangalapilly'
COPYRIGHT = '2010, Yesudeep Mangalapilly'
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = PROJECT_NAME
copyright = COPYRIGHT
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = watchdog.version.VERSION_STRING
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
html_theme = 'pyramid'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % PROJECT_NAME
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', '%s.tex' % PROJECT_NAME, '%s Documentation' % PROJECT_NAME,
AUTHOR_NAME, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', PROJECT_NAME, '%s Documentation' % PROJECT_NAME,
[AUTHOR_NAME], 1)
]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = PROJECT_NAME
epub_author = AUTHOR_NAME
epub_publisher = AUTHOR_NAME
epub_copyright = COPYRIGHT
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
|
monash-merc/cvl-fabric-launcher
|
refs/heads/master
|
wsgidav/lock_manager.py
|
4
|
# (c) 2009-2011 Martin Wendt and contributors; see WsgiDAV http://wsgidav.googlecode.com/
# Original PyFileServer (c) 2005 Ho Chun Wei.
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
Implements the `LockManager` object that provides the locking functionality.
The LockManager requires a LockStorage object to implement persistence.
Two alternative lock storage classes are defined in the lock_storage module:
- wsgidav.lock_storage.LockStorageDict
- wsgidav.lock_storage.LockStorageShelve
The lock data model is a dictionary with these fields:
root:
Resource URL.
principal:
Name of the authenticated user that created the lock.
type:
Must be 'write'.
scope:
Must be 'shared' or 'exclusive'.
depth:
Must be '0' or 'infinity'.
owner:
String identifying the owner.
timeout:
Seconds remaining until lock expiration.
This value is passed to create() and refresh()
expire:
Converted timeout for persistence: expire = time() + timeout.
token:
Automatically generated unique token.
See `Developers info`_ for more information about the WsgiDAV architecture.
.. _`Developers info`: http://docs.wsgidav.googlecode.com/hg/html/develop.html
"""
from pprint import pprint
from dav_error import DAVError, HTTP_LOCKED, PRECONDITION_CODE_LockConflict
from wsgidav.dav_error import DAVErrorCondition
import sys
import util
import random
import time
from rw_lock import ReadWriteLock
__docformat__ = "reStructuredText"
_logger = util.getModuleLogger(__name__)
#===============================================================================
# Tool functions
#===============================================================================
def generateLockToken():
return "opaquelocktoken:" + str(hex(random.getrandbits(256)))
def normalizeLockRoot(path):
# Normalize root: /foo/bar
assert path
if type(path) is unicode:
path = path.encode("utf-8")
path = "/" + path.strip("/")
return path
def isLockExpired(lock):
expire = float(lock["expire"])
return expire >= 0 and expire < time.time()
def lockString(lockDict):
"""Return readable rep."""
if not lockDict:
return "Lock: None"
if lockDict["expire"] < 0:
expire = "Infinite (%s)" % (lockDict["expire"])
else:
expire = "%s (in %s seconds)" % (util.getLogTime(lockDict["expire"]),
lockDict["expire"] - time.time())
return "Lock(<%s..>, '%s', %s, %s, depth-%s, until %s" % (
lockDict.get("token","?"*30)[18:22], # first 4 significant token characters
lockDict.get("root"),
lockDict.get("principal"),
lockDict.get("scope"),
lockDict.get("depth"),
expire,
)
def validateLock(lock):
assert type(lock["root"]) is str
assert lock["root"].startswith("/")
assert lock["type"] == "write"
assert lock["scope"] in ("shared", "exclusive")
assert lock["depth"] in ("0", "infinity")
assert type(lock["owner"]) is str
# raises TypeError:
timeout = float(lock["timeout"])
assert timeout > 0 or timeout == -1, "timeout must be positive or -1"
assert type(lock["principal"]) is str
if "token" in lock:
assert type(lock["token"]) is str
#===============================================================================
# LockManager
#===============================================================================
class LockManager(object):
"""
Implements locking functionality using a custom storage layer.
"""
LOCK_TIME_OUT_DEFAULT = 604800 # 1 week, in seconds
def __init__(self, storage):
"""
storage:
LockManagerStorage object
"""
assert hasattr(storage, "getLockList")
self._lock = ReadWriteLock()
self.storage = storage
self.storage.open()
def __del__(self):
self.storage.close()
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.storage)
def _dump(self, msg="", out=None):
if out is None:
out = sys.stdout
urlDict = {} # { <url>: [<tokenlist>] }
ownerDict = {} # { <LOCKOWNER>: [<tokenlist>] }
userDict = {} # { <LOCKUSER>: [<tokenlist>] }
tokenDict = {} # { <token>: <LOCKURLS> }
print >>out, "%s: %s" % (self, msg)
for lock in self.storage.getLockList("/", includeRoot=True,
includeChildren=True,
tokenOnly=False):
tok = lock["token"]
tokenDict[tok] = lockString(lock)
userDict.setdefault(lock["principal"], []).append(tok)
ownerDict.setdefault(lock["owner"], []).append(tok)
urlDict.setdefault(lock["root"], []).append(tok)
# assert ("URL2TOKEN:" + v["root"]) in self._dict, "Inconsistency: missing URL2TOKEN:%s" % v["root"]
# assert v["token"] in self._dict["URL2TOKEN:" + v["root"]], "Inconsistency: missing token %s in URL2TOKEN:%s" % (v["token"], v["root"])
print >>out, "Locks:"
pprint(tokenDict, indent=0, width=255)
if tokenDict:
print >>out, "Locks by URL:"
pprint(urlDict, indent=4, width=255, stream=out)
print >>out, "Locks by principal:"
pprint(userDict, indent=4, width=255, stream=out)
print >>out, "Locks by owner:"
pprint(ownerDict, indent=4, width=255, stream=out)
def _generateLock(self, principal,
locktype, lockscope, lockdepth, lockowner, path, timeout):
"""Acquire lock and return lockDict.
principal
Name of the principal.
locktype
Must be 'write'.
lockscope
Must be 'shared' or 'exclusive'.
lockdepth
Must be '0' or 'infinity'.
lockowner
String identifying the owner.
path
Resource URL.
timeout
Seconds to live
This function does NOT check, if the new lock creates a conflict!
"""
if timeout is None:
timeout = LockManager.LOCK_TIME_OUT_DEFAULT
elif timeout < 0:
timeout = -1
lockDict = {"root": path,
"type": locktype,
"scope": lockscope,
"depth": lockdepth,
"owner": lockowner,
"timeout": timeout,
"principal": principal,
}
#
self.storage.create(path, lockDict)
return lockDict
def acquire(self, url, locktype, lockscope, lockdepth, lockowner, timeout,
principal, tokenList):
"""Check for permissions and acquire a lock.
On success return new lock dictionary.
On error raise a DAVError with an embedded DAVErrorCondition.
"""
url = normalizeLockRoot(url)
self._lock.acquireWrite()
try:
# Raises DAVError on conflict:
self._checkLockPermission(url, locktype, lockscope, lockdepth, tokenList, principal)
return self._generateLock(principal, locktype, lockscope, lockdepth, lockowner, url, timeout)
finally:
self._lock.release()
def refresh(self, token, timeout=None):
"""Set new timeout for lock, if existing and valid."""
if timeout is None:
timeout = LockManager.LOCK_TIME_OUT_DEFAULT
return self.storage.refresh(token, timeout)
def getLock(self, token, key=None):
"""Return lockDict, or None, if not found or invalid.
Side effect: if lock is expired, it will be purged and None is returned.
key:
name of lock attribute that will be returned instead of a dictionary.
"""
assert key in (None, "type", "scope", "depth", "owner", "root",
"timeout", "principal", "token")
lock = self.storage.get(token)
if key is None or lock is None:
return lock
return lock[key]
def release(self, token):
"""Delete lock."""
self.storage.delete(token)
def isTokenLockedByUser(self, token, principal):
"""Return True, if <token> exists, is valid, and bound to <principal>."""
return self.getLock(token, "principal") == principal
# def getUrlLockList(self, url, principal=None):
def getUrlLockList(self, url):
"""Return list of lockDict, if <url> is protected by at least one direct, valid lock.
Side effect: expired locks for this url are purged.
"""
url = normalizeLockRoot(url)
lockList = self.storage.getLockList(url, includeRoot=True,
includeChildren=False,
tokenOnly=False)
return lockList
def getIndirectUrlLockList(self, url, principal=None):
"""Return a list of valid lockDicts, that protect <path> directly or indirectly.
If a principal is given, only locks owned by this principal are returned.
Side effect: expired locks for this path and all parents are purged.
"""
url = normalizeLockRoot(url)
lockList = []
u = url
while u:
ll = self.storage.getLockList(u, includeRoot=True,
includeChildren=False,
tokenOnly=False)
for l in ll:
if u != url and l["depth"] != "infinity":
continue # We only consider parents with Depth: infinity
# TODO: handle shared locks in some way?
# if l["scope"] == "shared" and lockscope == "shared" and principal != l["principal"]:
# continue # Only compatible with shared locks by other users
if principal is None or principal == l["principal"]:
lockList.append(l)
u = util.getUriParent(u)
return lockList
def isUrlLocked(self, url):
"""Return True, if url is directly locked."""
lockList = self.getUrlLockList(url)
return len(lockList) > 0
def isUrlLockedByToken(self, url, locktoken):
"""Check, if url (or any of it's parents) is locked by locktoken."""
lockUrl = self.getLock(locktoken, "root")
return lockUrl and util.isEqualOrChildUri(lockUrl, url)
def removeAllLocksFromUrl(self, url):
self._lock.acquireWrite()
try:
lockList = self.getUrlLockList(url)
for lock in lockList:
self.release(lock["token"])
finally:
self._lock.release()
def _checkLockPermission(self, url, locktype, lockscope, lockdepth,
tokenList, principal):
"""Check, if <principal> can lock <url>, otherwise raise an error.
If locking <url> would create a conflict, DAVError(HTTP_LOCKED) is
raised. An embedded DAVErrorCondition contains the conflicting resource.
@see http://www.webdav.org/specs/rfc4918.html#lock-model
- Parent locks WILL NOT be conflicting, if they are depth-0.
- Exclusive depth-infinity parent locks WILL be conflicting, even if
they are owned by <principal>.
- Child locks WILL NOT be conflicting, if we request a depth-0 lock.
- Exclusive child locks WILL be conflicting, even if they are owned by
<principal>. (7.7)
- It is not enough to check whether a lock is owned by <principal>, but
also the token must be passed with the request. (Because <principal>
may run two different applications on his client.)
- <principal> cannot lock-exclusive, if he holds a parent shared-lock.
(This would only make sense, if he was the only shared-lock holder.)
- TODO: litmus tries to acquire a shared lock on one resource twice
(locks: 27 'double_sharedlock') and fails, when we return HTTP_LOCKED.
So we allow multi shared locks on a resource even for the same
principal.
@param url: URL that shall be locked
@param locktype: "write"
@param lockscope: "shared"|"exclusive"
@param lockdepth: "0"|"infinity"
@param tokenList: list of lock tokens, that the user submitted in If: header
@param principal: name of the principal requesting a lock
@return: None (or raise)
"""
assert locktype == "write"
assert lockscope in ("shared", "exclusive")
assert lockdepth in ("0", "infinity")
_logger.debug("checkLockPermission(%s, %s, %s, %s)" % (url, lockscope, lockdepth, principal))
# Error precondition to collect conflicting URLs
errcond = DAVErrorCondition(PRECONDITION_CODE_LockConflict)
self._lock.acquireRead()
try:
# Check url and all parents for conflicting locks
u = url
while u:
ll = self.getUrlLockList(u)
for l in ll:
_logger.debug(" check parent %s, %s" % (u, lockString(l)))
if u != url and l["depth"] != "infinity":
# We only consider parents with Depth: infinity
continue
elif l["scope"] == "shared" and lockscope == "shared":
# Only compatible with shared locks (even by same principal)
continue
# Lock conflict
_logger.debug(" -> DENIED due to locked parent %s" % lockString(l))
errcond.add_href(l["root"])
u = util.getUriParent(u)
if lockdepth == "infinity":
# Check child URLs for conflicting locks
childLocks = self.storage.getLockList(url,
includeRoot=False,
includeChildren=True,
tokenOnly=False)
for l in childLocks:
assert util.isChildUri(url, l["root"])
# if util.isChildUri(url, l["root"]):
_logger.debug(" -> DENIED due to locked child %s" % lockString(l))
errcond.add_href(l["root"])
finally:
self._lock.release()
# If there were conflicts, raise HTTP_LOCKED for <url>, and pass
# conflicting resource with 'no-conflicting-lock' precondition
if len(errcond.hrefs) > 0:
raise DAVError(HTTP_LOCKED, errcondition=errcond)
return
def checkWritePermission(self, url, depth, tokenList, principal):
"""Check, if <principal> can modify <url>, otherwise raise HTTP_LOCKED.
If modifying <url> is prevented by a lock, DAVError(HTTP_LOCKED) is
raised. An embedded DAVErrorCondition contains the conflicting locks.
<url> may be modified by <principal>, if it is not currently locked
directly or indirectly (i.e. by a locked parent).
For depth-infinity operations, <url> also must not have locked children.
It is not enough to check whether a lock is owned by <principal>, but
also the token must be passed with the request. Because <principal> may
run two different applications.
See http://www.webdav.org/specs/rfc4918.html#lock-model
http://www.webdav.org/specs/rfc4918.html#rfc.section.7.4
TODO: verify assumptions:
- Parent locks WILL NOT be conflicting, if they are depth-0.
- Exclusive child locks WILL be conflicting, even if they are owned by <principal>.
@param url: URL that shall be modified, created, moved, or deleted
@param depth: "0"|"infinity"
@param tokenList: list of lock tokens, that the principal submitted in If: header
@param principal: name of the principal requesting a lock
@return: None or raise error
"""
assert depth in ("0", "infinity")
_logger.debug("checkWritePermission(%s, %s, %s, %s)" % (url, depth, tokenList, principal))
# Error precondition to collect conflicting URLs
errcond = DAVErrorCondition(PRECONDITION_CODE_LockConflict)
self._lock.acquireRead()
try:
# Check url and all parents for conflicting locks
u = url
while u:
ll = self.getUrlLockList(u)
_logger.debug(" checking %s" % u)
for l in ll:
_logger.debug(" l=%s" % lockString(l))
if u != url and l["depth"] != "infinity":
# We only consider parents with Depth: inifinity
continue
elif principal == l["principal"] and l["token"] in tokenList:
# User owns this lock
continue
else:
# Token is owned by principal, but not passed with lock list
_logger.debug(" -> DENIED due to locked parent %s" % lockString(l))
errcond.add_href(l["root"])
u = util.getUriParent(u)
if depth == "infinity":
# Check child URLs for conflicting locks
childLocks = self.storage.getLockList(url,
includeRoot=False,
includeChildren=True,
tokenOnly=False)
for l in childLocks:
assert util.isChildUri(url, l["root"])
# if util.isChildUri(url, l["root"]):
_logger.debug(" -> DENIED due to locked child %s" % lockString(l))
errcond.add_href(l["root"])
finally:
self._lock.release()
# If there were conflicts, raise HTTP_LOCKED for <url>, and pass
# conflicting resource with 'no-conflicting-lock' precondition
if len(errcond.hrefs) > 0:
raise DAVError(HTTP_LOCKED, errcondition=errcond)
return
#===============================================================================
# test
#===============================================================================
def test():
# l = ShelveLockManager("wsgidav-locks.shelve")
# l._lazyOpen()
# l._dump()
# l.generateLock("martin", "", lockscope, lockdepth, lockowner, lockroot, timeout)
pass
if __name__ == "__main__":
test()
|
jneight/django-xadmin
|
refs/heads/batch-fix
|
xadmin/plugins/quickform.py
|
2
|
from django.db import models
from django import forms
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.forms.models import modelform_factory
import copy
from xadmin.sites import site
from xadmin.util import get_model_from_relation, vendor
from xadmin.views import BaseAdminPlugin, ModelFormAdminView
from xadmin.layout import Layout
class QuickFormPlugin(BaseAdminPlugin):
def init_request(self, *args, **kwargs):
if self.request.method == 'GET' and self.request.is_ajax() or self.request.GET.get('_ajax'):
self.admin_view.add_form_template = 'xadmin/views/quick_form.html'
self.admin_view.change_form_template = 'xadmin/views/quick_form.html'
return True
return False
def get_model_form(self, __, **kwargs):
if '_field' in self.request.GET:
defaults = {
"form": self.admin_view.form,
"fields": self.request.GET['_field'].split(','),
"formfield_callback": self.admin_view.formfield_for_dbfield,
}
return modelform_factory(self.model, **defaults)
return __()
def get_form_layout(self, __):
if '_field' in self.request.GET:
return Layout(*self.request.GET['_field'].split(','))
return __()
def get_context(self, context):
context['form_url'] = self.request.path
return context
class RelatedFieldWidgetWrapper(forms.Widget):
"""
This class is a wrapper to a given widget to add the add icon for the
admin interface.
"""
def __init__(self, widget, rel, add_url, rel_add_url):
self.is_hidden = widget.is_hidden
self.needs_multipart_form = widget.needs_multipart_form
self.attrs = widget.attrs
self.choices = widget.choices
self.is_required = widget.is_required
self.widget = widget
self.rel = rel
self.add_url = add_url
self.rel_add_url = rel_add_url
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.widget = copy.deepcopy(self.widget, memo)
obj.attrs = self.widget.attrs
memo[id(self)] = obj
return obj
@property
def media(self):
media = self.widget.media + vendor('xadmin.plugin.quick-form.js')
return media
def render(self, name, value, *args, **kwargs):
self.widget.choices = self.choices
output = []
if self.add_url:
output.append(u'<a href="%s" title="%s" class="btn btn-primary btn-sm btn-ajax pull-right" data-for-id="id_%s" data-refresh-url="%s"><i class="icon-plus"></i></a>'
% (
self.add_url, (_('Create New %s') % self.rel.to._meta.verbose_name), name,
"%s?_field=%s&%s=" % (self.rel_add_url, name, name)))
output.extend(['<div class="control-wrap" id="id_%s_wrap_container">' % name,
self.widget.render(name, value, *args, **kwargs), '</div>'])
return mark_safe(u''.join(output))
def build_attrs(self, extra_attrs=None, **kwargs):
"Helper function for building an attribute dictionary."
self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs)
return self.attrs
def value_from_datadict(self, data, files, name):
return self.widget.value_from_datadict(data, files, name)
def id_for_label(self, id_):
return self.widget.id_for_label(id_)
class QuickAddBtnPlugin(BaseAdminPlugin):
def formfield_for_dbfield(self, formfield, db_field, **kwargs):
if formfield and self.model in self.admin_site._registry and isinstance(db_field, (models.ForeignKey, models.ManyToManyField)):
rel_model = get_model_from_relation(db_field)
if rel_model in self.admin_site._registry and self.has_model_perm(rel_model, 'add'):
add_url = self.get_model_url(rel_model, 'add')
formfield.widget = RelatedFieldWidgetWrapper(
formfield.widget, db_field.rel, add_url, self.get_model_url(self.model, 'add'))
return formfield
site.register_plugin(QuickFormPlugin, ModelFormAdminView)
site.register_plugin(QuickAddBtnPlugin, ModelFormAdminView)
|
Tigge/platinumshrimp
|
refs/heads/master
|
plugins/titlegiver/__init__.py
|
6
|
__author__ = "reggna"
|
simonemurzilli/geonode
|
refs/heads/master
|
geonode/catalogue/backends/pycsw_local.py
|
18
|
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import os
from lxml import etree
from django.conf import settings
from ConfigParser import SafeConfigParser
from owslib.iso import MD_Metadata
from pycsw import server
from geonode.catalogue.backends.generic import CatalogueBackend as GenericCatalogueBackend
from geonode.catalogue.backends.generic import METADATA_FORMATS
from shapely.geometry.base import ReadingError
# pycsw settings that the user shouldn't have to worry about
CONFIGURATION = {
'server': {
'home': '.',
'url': settings.CATALOGUE['default']['URL'],
'encoding': 'UTF-8',
'language': settings.LANGUAGE_CODE,
'maxrecords': '10',
# 'loglevel': 'DEBUG',
# 'logfile': '/tmp/pycsw.log',
# 'federatedcatalogues': 'http://geo.data.gov/geoportal/csw/discovery',
# 'pretty_print': 'true',
# 'domainquerytype': 'range',
# 'domaincounts': 'true',
'profiles': 'apiso,ebrim',
},
'repository': {
'source': 'geonode',
'mappings': os.path.join(os.path.dirname(__file__), 'pycsw_local_mappings.py')
}
}
class CatalogueBackend(GenericCatalogueBackend):
def __init__(self, *args, **kwargs):
super(CatalogueBackend, self).__init__(*args, **kwargs)
self.catalogue.formats = ['Atom', 'DIF', 'Dublin Core', 'ebRIM', 'FGDC', 'ISO']
self.catalogue.local = True
def remove_record(self, uuid):
pass
def create_record(self, item):
pass
def get_record(self, uuid):
results = self._csw_local_dispatch(identifier=uuid)
if len(results) < 1:
return None
result = etree.fromstring(results).find('{http://www.isotc211.org/2005/gmd}MD_Metadata')
if result is None:
return None
record = MD_Metadata(result)
record.keywords = []
if hasattr(record, 'identification') and hasattr(record.identification, 'keywords'):
for kw in record.identification.keywords:
record.keywords.extend(kw['keywords'])
record.links = {}
record.links['metadata'] = self.catalogue.urls_for_uuid(uuid)
record.links['download'] = self.catalogue.extract_links(record)
return record
def search_records(self, keywords, start, limit, bbox):
with self.catalogue:
lresults = self._csw_local_dispatch(keywords, keywords, start+1, limit, bbox)
# serialize XML
e = etree.fromstring(lresults)
self.catalogue.records = \
[MD_Metadata(x) for x in e.findall('//{http://www.isotc211.org/2005/gmd}MD_Metadata')]
# build results into JSON for API
results = [self.catalogue.metadatarecord2dict(doc) for v, doc in self.catalogue.records.iteritems()]
result = {'rows': results,
'total': e.find('{http://www.opengis.net/cat/csw/2.0.2}SearchResults').attrib.get(
'numberOfRecordsMatched'),
'next_page': e.find('{http://www.opengis.net/cat/csw/2.0.2}SearchResults').attrib.get(
'nextRecord')
}
return result
def _csw_local_dispatch(self, keywords=None, start=0, limit=10, bbox=None, identifier=None):
"""
HTTP-less CSW
"""
# serialize pycsw settings into SafeConfigParser
# object for interaction with pycsw
mdict = dict(settings.PYCSW['CONFIGURATION'], **CONFIGURATION)
if 'server' in settings.PYCSW['CONFIGURATION']:
# override server system defaults with user specified directives
mdict['server'].update(settings.PYCSW['CONFIGURATION']['server'])
config = SafeConfigParser()
for section, options in mdict.iteritems():
config.add_section(section)
for option, value in options.iteritems():
config.set(section, option, value)
# fake HTTP environment variable
os.environ['QUERY_STRING'] = ''
# init pycsw
csw = server.Csw(config)
# fake HTTP method
csw.requesttype = 'POST'
# fake HTTP request parameters
if identifier is None: # it's a GetRecords request
formats = []
for f in self.catalogue.formats:
formats.append(METADATA_FORMATS[f][0])
csw.kvp = {
'elementsetname': 'full',
'typenames': formats,
'resulttype': 'results',
'constraintlanguage': 'CQL_TEXT',
'constraint': 'csw:AnyText like "%%%s%%"' % keywords,
'outputschema': 'http://www.isotc211.org/2005/gmd',
'constraint': None,
'startposition': start,
'maxrecords': limit
}
response = csw.getrecords()
else: # it's a GetRecordById request
csw.kvp = {
'id': [identifier],
'outputschema': 'http://www.isotc211.org/2005/gmd',
}
# FIXME(Ariel): Remove this try/except block when pycsw deals with
# empty geometry fields better.
# https://gist.github.com/ingenieroariel/717bb720a201030e9b3a
try:
response = csw.getrecordbyid()
except ReadingError:
return []
return etree.tostring(response)
|
mcmcplotlib/mcmcplotlib
|
refs/heads/gh-pages
|
_downloads/1b7f8df4a92ddc1424eebaed150db6ed/mpl_plot_hdi.py
|
2
|
"""
Plot HDI
========
_thumb: .8, .8
"""
import matplotlib.pyplot as plt
import numpy as np
import arviz as az
az.style.use("arviz-darkgrid")
x_data = np.random.normal(0, 1, 100)
y_data = 2 + x_data * 0.5
y_data_rep = np.random.normal(y_data, 0.5, (200, 100))
az.plot_hdi(x_data, y_data_rep, color="k", plot_kwargs={"ls": "--"})
plt.plot(x_data, y_data, "C6")
plt.show()
|
CPqD/RouteFlow
|
refs/heads/master
|
pox/pox/web/jsonrpc.py
|
25
|
# Copyright 2011,2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
A library for implementing JSON-RPC based web services
This is lightweight, low on features, and not a whole lot of effort
has been paid to really complying with the JSON-RPC spec. Feel
free to improve it. ;)
It'd be nice to factor the JSON-RPC stuff out so that it could
be used with something besides just HTTP.
Also, it has some capability for compatibility with Qooxdoo.
"""
import json
import sys
from pox.web.webcore import *
from pox.core import core
log = core.getLogger()
# A long polling handler can return this if it notices that the
# connection has closed.
ABORT = object()
class JSONRPCHandler (SplitRequestHandler):
"""
Meant for implementing JSON-RPC web services
Implement RPC methods by prefacing them with "_exec_".
config keys of note:
"auth" is a function which takes a username and password and returns
True if they are a valid user. If set, turns on authentication.
"auth_realm" is the optional authentication realm name.
"qx" turns on Qooxdoo mode by default (it's usually switched on by
seeing a "service" key in the request).
There are a couple of extensions to JSON-RPC:
If you want to use positional AND named parameters, in a request, use
"params" for the former and "kwparams" for the latter.
There's an optional "service" key in requests. This comes from qooxdoo.
If it is given, look for the _exec_ method on some otherobject instead
of self. Put the additional services in an arg named 'services'.
"""
protocol_version = 'HTTP/1.1'
QX_ERR_ILLEGAL_SERVICE = 1
QX_ERR_SERVICE_NOT_FOUND = 2
QX_ERR_CLASS_NOT_FOUND = 3
QX_ERR_METHOD_NOT_FOUND = 4
QX_ERR_PARAMETER_MISMATCH = 5
QX_ERR_PERMISSION_DENIED = 6
QX_ORIGIN_SERVER = 1
QX_ORIGIN_METHOD = 2
ERR_PARSE_ERROR = -32700 # WE USE THIS
ERR_INVALID_REQUEST = -32600
ERR_METHOD_NOT_FOUND = -32601 # WE USE THIS
ERR_INVALID_PARAMS = -32602
ERR_INTERNAL_ERROR = -32603 # WE USE THIS
ERR_SERVER_ERROR = -32000 # to -32099 WE USE THIS
ERR_METHOD_ERROR = 99 # We use this for errors in methods
ERROR_XLATE = {
ERR_PARSE_ERROR : (1, QX_ERR_ILLEGAL_SERVICE), # Nonsense
ERR_METHOD_NOT_FOUND : (1, QX_ERR_METHOD_NOT_FOUND),
ERR_INTERNAL_ERROR : (),
ERR_SERVER_ERROR : (),
}
_qx = False
def _init (self):
# Maybe the following arg-adding feature should just be part of
# SplitRequestHandler?
for k,v in self.args.iteritems():
setattr(self, "_arg_" + k, v)
self.auth_function = self.args.get('auth', None)
self.auth_realm = self.args.get('auth_realm', "JSONRPC")
self._qx = self.args.get('qx', self._qx)
def _send_auth_header (self):
if self.auth_function:
self.send_header('WWW-Authenticate',
'Basic realm="%s"' % (self.auth_realm,))
def _do_auth (self):
if not self.auth_function:
return True
auth = self.headers.get("Authorization", "").strip().lower()
success = False
if auth.startswith("basic "):
try:
auth = base64.decodestring(auth[6:].strip()).split(':', 1)
success = self.auth_function(auth[0], auth[1])
except:
pass
if not success:
self.send_response(401, "Authorization Required")
self._send_auth_header()
self.end_headers()
return success
def _translate_error (self, e):
if not 'error' in e: return
if self._qx:
if e['code'] < 0:
c,o = ERROR_XLATE.get(e['code'], (1, self.QX_ERR_ILLEGAL_SERVICE))
e['code'] = c
e['origin'] = o
else:
e['origin'] = QX_ORIGIN_METHOD
def _handle (self, data):
try:
try:
service = self
if 'services' in self.args:
if 'service' in data:
service = self.args['services'].get(data['service'], self)
self._qx = True # This is a qooxdoo request
method = "_exec_" + data.get('method')
method = getattr(service, method)
except:
response = {}
response['error'] = {'code':self.ERR_METHOD_NOT_FOUND,
'message':'Method not found'}
return response
params = data.get('params', [])
if isinstance(params, dict):
kw = params
params = []
else:
kw = data.get('kwparams', {})
try:
r = method(*params,**kw)
#TODO: jsonrpc version?
return r
except:
response = {}
t,v,_ = sys.exc_info()
response['error'] = {'message': "%s: %s" % (t,v),
'code':self.ERR_METHOD_ERROR}
import traceback
response['error']['data'] = {'traceback':traceback.format_exc()}
log.exception("While handling %s...", data.get('method'))
return response
except:
response = {}
t,v,_ = sys.exc_info()
response['error'] = {'message': "%s: %s" % (t,v),
'code':self.ERR_INTERNAL_ERROR}
return response
def do_POST (self):
if not self._do_auth():
return
dumps_opts = {}
#FIXME: this is a hack
if 'pretty' in self.path:
dumps_opts = {'sort_keys':True, 'indent':2}
def reply (response):
orig = response
#if not isinstance(response, basestring):
if isinstance(response, list):
for r in response: self._translate_error(r)
else:
self._translate_error(response)
response = json.dumps(response, default=str, **dumps_opts)
response = response.strip()
if len(response) and not response.endswith("\n"): response += "\n"
try:
self.send_response(200, "OK")
self.send_header("Content-Type", "application/json")
self.send_header("Content-Length", len(response))
self.end_headers()
self.wfile.write(response)
except IOError as e:
if e.errno == 32:
if isinstance(orig, dict) and 'error' in orig:
log.info("Socket closed when writing error response")
else:
log.warning("Socket closed when writing response")
#log.debug(" response was: " + response)
else:
log.exception("Exception while trying to send JSON-RPC response")
try:
self.wfile.close()
except:
pass
return False
except:
log.exception("Exception while trying to send JSON-RPC response")
return False
return True
l = self.headers.get("Content-Length", "")
data = ''
if l == "":
data = self.rfile.read()
else:
data = self.rfile.read(int(l))
try:
data = json.loads(data)
except:
response = {}
response['error'] = {'code':self.ERR_PARSE_ERROR,
'message':'Parse error'}
return reply(response)
single = False
if not isinstance(data, list):
data = [data]
single = True
responses = []
for req in data:
response = self._handle(req) # Should never raise an exception
if response is ABORT:
return
if 'id' in req or 'error' in response:
response['id'] = req.get('id')
responses.append(response)
if len(responses) == 0:
responses = ''
else:
if single:
responses = responses[0]
reply(responses)
class QXJSONRPCHandler (JSONRPCHandler):
"""
A subclass of JSONRPCHandler which speaks something closer to
qooxdoo's version JSON-RPC.
"""
_qx = True
#TODO: Implement the <SCRIPT> based GET method for cross-domain
def make_error (msg = "Unknown Error",
code = JSONRPCHandler.ERR_SERVER_ERROR,
data = None):
e = {'code':code,'message':msg}
if data is not None:
e['data'] = data
r = {'error':e}
return r
|
bikashgupta11/javarobot
|
refs/heads/master
|
src/main/resources/jython/Lib/selenium/webdriver/chrome/__init__.py
|
2454
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
|
kutenai/django
|
refs/heads/master
|
tests/transactions/tests.py
|
24
|
from __future__ import unicode_literals
import sys
import threading
import time
from unittest import skipIf, skipUnless
from django.db import (
DatabaseError, Error, IntegrityError, OperationalError, connection,
transaction,
)
from django.test import (
TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature,
)
from .models import Reporter
@skipUnless(connection.features.uses_savepoints, "'atomic' requires transactions and savepoints.")
class AtomicTests(TransactionTestCase):
"""
Tests for the atomic decorator and context manager.
The tests make assertions on internal attributes because there isn't a
robust way to ask the database for its current transaction state.
Since the decorator syntax is converted into a context manager (see the
implementation), there are only a few basic tests with the decorator
syntax and the bulk of the tests use the context manager syntax.
"""
available_apps = ['transactions']
def test_decorator_syntax_commit(self):
@transaction.atomic
def make_reporter():
Reporter.objects.create(first_name="Tintin")
make_reporter()
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
def test_decorator_syntax_rollback(self):
@transaction.atomic
def make_reporter():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
with self.assertRaisesMessage(Exception, "Oops"):
make_reporter()
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_alternate_decorator_syntax_commit(self):
@transaction.atomic()
def make_reporter():
Reporter.objects.create(first_name="Tintin")
make_reporter()
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
def test_alternate_decorator_syntax_rollback(self):
@transaction.atomic()
def make_reporter():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
with self.assertRaisesMessage(Exception, "Oops"):
make_reporter()
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_commit(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
def test_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_nested_commit_commit(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with transaction.atomic():
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
self.assertQuerysetEqual(
Reporter.objects.all(),
['<Reporter: Archibald Haddock>', '<Reporter: Tintin>']
)
def test_nested_commit_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
def test_nested_rollback_commit(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with transaction.atomic():
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_nested_rollback_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_merged_commit_commit(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
self.assertQuerysetEqual(
Reporter.objects.all(),
['<Reporter: Archibald Haddock>', '<Reporter: Tintin>']
)
def test_merged_commit_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
# Writes in the outer block are rolled back too.
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_merged_rollback_commit(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with transaction.atomic(savepoint=False):
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_merged_rollback_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_reuse_commit_commit(self):
atomic = transaction.atomic()
with atomic:
Reporter.objects.create(first_name="Tintin")
with atomic:
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Archibald Haddock>', '<Reporter: Tintin>'])
def test_reuse_commit_rollback(self):
atomic = transaction.atomic()
with atomic:
Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
def test_reuse_rollback_commit(self):
atomic = transaction.atomic()
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(last_name="Tintin")
with atomic:
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_reuse_rollback_rollback(self):
atomic = transaction.atomic()
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_force_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
# atomic block shouldn't rollback, but force it.
self.assertFalse(transaction.get_rollback())
transaction.set_rollback(True)
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_prevent_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
sid = transaction.savepoint()
# trigger a database error inside an inner atomic without savepoint
with self.assertRaises(DatabaseError):
with transaction.atomic(savepoint=False):
with connection.cursor() as cursor:
cursor.execute(
"SELECT no_such_col FROM transactions_reporter")
# prevent atomic from rolling back since we're recovering manually
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
transaction.savepoint_rollback(sid)
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
class AtomicInsideTransactionTests(AtomicTests):
"""All basic tests for atomic should also pass within an existing transaction."""
def setUp(self):
self.atomic = transaction.atomic()
self.atomic.__enter__()
def tearDown(self):
self.atomic.__exit__(*sys.exc_info())
@skipIf(
connection.features.autocommits_when_autocommit_is_off,
"This test requires a non-autocommit mode that doesn't autocommit."
)
class AtomicWithoutAutocommitTests(AtomicTests):
"""All basic tests for atomic should also pass when autocommit is turned off."""
def setUp(self):
transaction.set_autocommit(False)
def tearDown(self):
# The tests access the database after exercising 'atomic', initiating
# a transaction ; a rollback is required before restoring autocommit.
transaction.rollback()
transaction.set_autocommit(True)
@skipUnless(connection.features.uses_savepoints, "'atomic' requires transactions and savepoints.")
class AtomicMergeTests(TransactionTestCase):
"""Test merging transactions with savepoint=False."""
available_apps = ['transactions']
def test_merged_outer_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Calculus")
raise Exception("Oops, that's his last name")
# The third insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The second insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The first block has a savepoint and must roll back.
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_merged_inner_savepoint_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with transaction.atomic():
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Calculus")
raise Exception("Oops, that's his last name")
# The third insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The second block has a savepoint and must roll back.
self.assertEqual(Reporter.objects.count(), 1)
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
@skipUnless(connection.features.uses_savepoints, "'atomic' requires transactions and savepoints.")
class AtomicErrorsTests(TransactionTestCase):
available_apps = ['transactions']
def test_atomic_prevents_setting_autocommit(self):
autocommit = transaction.get_autocommit()
with transaction.atomic():
with self.assertRaises(transaction.TransactionManagementError):
transaction.set_autocommit(not autocommit)
# Make sure autocommit wasn't changed.
self.assertEqual(connection.autocommit, autocommit)
def test_atomic_prevents_calling_transaction_methods(self):
with transaction.atomic():
with self.assertRaises(transaction.TransactionManagementError):
transaction.commit()
with self.assertRaises(transaction.TransactionManagementError):
transaction.rollback()
def test_atomic_prevents_queries_in_broken_transaction(self):
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with transaction.atomic():
r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
# The transaction is marked as needing rollback.
with self.assertRaises(transaction.TransactionManagementError):
r2.save(force_update=True)
self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Haddock")
@skipIfDBFeature('atomic_transactions')
def test_atomic_allows_queries_after_fixing_transaction(self):
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with transaction.atomic():
r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
# Mark the transaction as no longer needing rollback.
transaction.set_rollback(False)
r2.save(force_update=True)
self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Calculus")
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_atomic_prevents_queries_in_broken_transaction_after_client_close(self):
with transaction.atomic():
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
connection.close()
# The connection is closed and the transaction is marked as
# needing rollback. This will raise an InterfaceError on databases
# that refuse to create cursors on closed connections (PostgreSQL)
# and a TransactionManagementError on other databases.
with self.assertRaises(Error):
Reporter.objects.create(first_name="Cuthbert", last_name="Calculus")
# The connection is usable again .
self.assertEqual(Reporter.objects.count(), 0)
@skipUnless(connection.vendor == 'mysql', "MySQL-specific behaviors")
class AtomicMySQLTests(TransactionTestCase):
available_apps = ['transactions']
@skipIf(threading is None, "Test requires threading")
def test_implicit_savepoint_rollback(self):
"""MySQL implicitly rolls back savepoints when it deadlocks (#22291)."""
other_thread_ready = threading.Event()
def other_thread():
try:
with transaction.atomic():
Reporter.objects.create(id=1, first_name="Tintin")
other_thread_ready.set()
# We cannot synchronize the two threads with an event here
# because the main thread locks. Sleep for a little while.
time.sleep(1)
# 2) ... and this line deadlocks. (see below for 1)
Reporter.objects.exclude(id=1).update(id=2)
finally:
# This is the thread-local connection, not the main connection.
connection.close()
other_thread = threading.Thread(target=other_thread)
other_thread.start()
other_thread_ready.wait()
with self.assertRaisesMessage(OperationalError, 'Deadlock found'):
# Double atomic to enter a transaction and create a savepoint.
with transaction.atomic():
with transaction.atomic():
# 1) This line locks... (see above for 2)
Reporter.objects.create(id=1, first_name="Tintin")
other_thread.join()
class AtomicMiscTests(TransactionTestCase):
available_apps = []
def test_wrap_callable_instance(self):
"""#20028 -- Atomic must support wrapping callable instances."""
class Callable(object):
def __call__(self):
pass
# Must not raise an exception
transaction.atomic(Callable())
@skipUnlessDBFeature('can_release_savepoints')
def test_atomic_does_not_leak_savepoints_on_failure(self):
"""#23074 -- Savepoints must be released after rollback."""
# Expect an error when rolling back a savepoint that doesn't exist.
# Done outside of the transaction block to ensure proper recovery.
with self.assertRaises(Error):
# Start a plain transaction.
with transaction.atomic():
# Swallow the intentional error raised in the sub-transaction.
with self.assertRaisesMessage(Exception, "Oops"):
# Start a sub-transaction with a savepoint.
with transaction.atomic():
sid = connection.savepoint_ids[-1]
raise Exception("Oops")
# This is expected to fail because the savepoint no longer exists.
connection.savepoint_rollback(sid)
@skipIf(connection.features.autocommits_when_autocommit_is_off,
"This test requires a non-autocommit mode that doesn't autocommit.")
def test_orm_query_without_autocommit(self):
"""#24921 -- ORM queries must be possible after set_autocommit(False)."""
transaction.set_autocommit(False)
try:
Reporter.objects.create(first_name="Tintin")
finally:
transaction.rollback()
transaction.set_autocommit(True)
|
akrzos/cfme_tests
|
refs/heads/master
|
cfme/tests/infrastructure/test_host_provisioning.py
|
2
|
import pytest
from cfme.infrastructure import host
from cfme.fixtures import pytest_selenium as sel
from cfme.infrastructure.pxe import get_pxe_server_from_config, get_template_from_config
from cfme.provisioning import provisioning_form
from cfme.services import requests
from cfme.web_ui import flash, fill
from utils.conf import cfme_data
from utils.log import logger
from utils.wait import wait_for
from utils import testgen, version
pytestmark = [
pytest.mark.meta(server_roles="+automate +notifier"),
pytest.mark.usefixtures('uses_infra_providers'),
]
def pytest_generate_tests(metafunc):
# Filter out providers without host provisioning data defined
argnames, argvalues, idlist = testgen.infra_providers(metafunc, required_fields=[
['host_provisioning', 'pxe_server'],
['host_provisioning', 'pxe_image'],
['host_provisioning', 'pxe_image_type'],
['host_provisioning', 'pxe_kickstart'],
['host_provisioning', 'datacenter'],
['host_provisioning', 'cluster'],
['host_provisioning', 'datastores'],
['host_provisioning', 'hostname'],
['host_provisioning', 'root_password'],
['host_provisioning', 'ip_addr'],
['host_provisioning', 'subnet_mask'],
['host_provisioning', 'gateway'],
['host_provisioning', 'dns'],
])
pargnames, pargvalues, pidlist = testgen.pxe_servers(metafunc)
argnames = argnames + ['pxe_server', 'pxe_cust_template']
pxe_server_names = [pval[0] for pval in pargvalues]
new_idlist = []
new_argvalues = []
for i, argvalue_tuple in enumerate(argvalues):
args = dict(zip(argnames, argvalue_tuple))
try:
prov_data = args['provider'].data['host_provisioning']
except KeyError:
# No host provisioning data available
continue
stream = prov_data.get('runs_on_stream', '')
if not version.current_version().is_in_series(str(stream)):
continue
pxe_server_name = prov_data.get('pxe_server', '')
if pxe_server_name not in pxe_server_names:
continue
pxe_cust_template = prov_data.get('pxe_kickstart', '')
if pxe_cust_template not in cfme_data.get('customization_templates', {}).keys():
continue
argvalues[i].append(get_pxe_server_from_config(pxe_server_name))
argvalues[i].append(get_template_from_config(pxe_cust_template))
new_idlist.append(idlist[i])
new_argvalues.append(argvalues[i])
testgen.parametrize(metafunc, argnames, new_argvalues, ids=new_idlist, scope="module")
@pytest.fixture(scope="module")
def setup_pxe_servers_host_prov(pxe_server, pxe_cust_template, host_provisioning):
if not pxe_server.exists():
pxe_server.create()
pxe_server.set_pxe_image_type(host_provisioning['pxe_image'],
host_provisioning['pxe_image_type'])
if not pxe_cust_template.exists():
pxe_cust_template.create()
@pytest.mark.meta(blockers=[1203775, 1232427])
@pytest.mark.usefixtures('setup_pxe_servers_host_prov')
def test_host_provisioning(setup_provider, cfme_data, host_provisioning, provider, smtp_test,
request):
"""Tests host provisioning
Metadata:
test_flag: host_provision
"""
# Add host before provisioning
test_host = host.get_from_config('esx')
test_host.create()
# Populate provisioning_data before submitting host provisioning form
pxe_server, pxe_image, pxe_image_type, pxe_kickstart, datacenter, cluster, datastores,\
prov_host_name, root_password, ip_addr, subnet_mask, gateway, dns = map(
host_provisioning.get,
('pxe_server', 'pxe_image', 'pxe_image_type', 'pxe_kickstart', 'datacenter', 'cluster',
'datastores', 'hostname', 'root_password', 'ip_addr', 'subnet_mask', 'gateway', 'dns'))
def cleanup_host():
try:
logger.info('Cleaning up host %s on provider %s', prov_host_name, provider.key)
mgmt_system = provider.mgmt
host_list = mgmt_system.list_host()
if host_provisioning['ip_addr'] in host_list:
wait_for(mgmt_system.is_host_connected, [host_provisioning['ip_addr']])
mgmt_system.remove_host_from_cluster(host_provisioning['ip_addr'])
ipmi = test_host.get_ipmi()
ipmi.power_off()
# During host provisioning,the host name gets changed from what's specified at creation
# time.If host provisioning succeeds,the original name is reverted to,otherwise the
# changed names are retained upon failure
renamed_host_name1 = "{} ({})".format('IPMI', host_provisioning['ipmi_address'])
renamed_host_name2 = "{} ({})".format('VMware ESXi', host_provisioning['ip_addr'])
host_list_ui = host.get_all_hosts()
if host_provisioning['hostname'] in host_list_ui:
test_host.delete(cancel=False)
host.wait_for_host_delete(test_host)
elif renamed_host_name1 in host_list_ui:
host_renamed_obj1 = host.Host(name=renamed_host_name1)
host_renamed_obj1.delete(cancel=False)
host.wait_for_host_delete(host_renamed_obj1)
elif renamed_host_name2 in host_list_ui:
host_renamed_obj2 = host.Host(name=renamed_host_name2)
host_renamed_obj2.delete(cancel=False)
host.wait_for_host_delete(host_renamed_obj2)
except:
# The mgmt_sys classes raise Exception :\
logger.warning('Failed to clean up host %s on provider %s',
prov_host_name, provider.key)
request.addfinalizer(cleanup_host)
pytest.sel.force_navigate('infrastructure_provision_host', context={
'host': test_host, })
note = ('Provisioning host {} on provider {}'.format(prov_host_name, provider.key))
provisioning_data = {
'email': 'template_provisioner@example.com',
'first_name': 'Template',
'last_name': 'Provisioner',
'notes': note,
'pxe_server': pxe_server,
'pxe_image': {'name': [pxe_image]},
'provider_name': provider.name,
'cluster': "{} / {}".format(datacenter, cluster),
'datastore_name': {'name': datastores},
'root_password': root_password,
'prov_host_name': prov_host_name,
'ip_address': ip_addr,
'subnet_mask': subnet_mask,
'gateway': gateway,
'dns_servers': dns,
'custom_template': {'name': [pxe_kickstart]},
}
fill(provisioning_form, provisioning_data, action=provisioning_form.host_submit_button)
flash.assert_success_message(
"Host Request was Submitted, you will be notified when your Hosts are ready")
row_description = 'PXE install on [{}] from image [{}]'.format(prov_host_name, pxe_image)
cells = {'Description': row_description}
row, __ = wait_for(requests.wait_for_request, [cells],
fail_func=requests.reload, num_sec=1500, delay=20)
assert row.last_message.text == 'Host Provisioned Successfully'
assert row.status.text != 'Error'
# Navigate to host details page and verify Provider and cluster names
sel.force_navigate('infrastructure_host', context={'host': test_host, })
assert test_host.get_detail('Relationships', 'Infrastructure Provider') ==\
provider.name, 'Provider name does not match'
assert test_host.get_detail('Relationships', 'Cluster') ==\
host_provisioning['cluster'], 'Cluster does not match'
# Navigate to host datastore page and verify that the requested datastore has been assigned
# to the host
requested_ds = host_provisioning['datastores']
datastores = test_host.get_datastores()
assert set(requested_ds).issubset(datastores), 'Datastores are missing some members'
# Wait for e-mails to appear
def verify():
return len(
smtp_test.get_emails(
subject_like="Your host provisioning request has Completed - Host:%%".format(
prov_host_name))
) > 0
wait_for(verify, message="email receive check", delay=5)
|
ronanki/merlin
|
refs/heads/master
|
src/work_in_progress/oliver/run_dnn_hourly_check.py
|
3
|
import cPickle
import gzip
import os, sys, errno
import time
import math
# numpy & theano imports need to be done in this order (only for some numpy installations, not sure why)
import numpy
# we need to explicitly import this in some cases, not sure why this doesn't get imported with numpy itself
import numpy.distutils.__config__
# and only after that can we import theano
import theano
from utils.providers import ListDataProvider
from frontend.label_normalisation import HTSLabelNormalisation, XMLLabelNormalisation
from frontend.silence_remover import SilenceRemover
from frontend.silence_remover import trim_silence
from frontend.min_max_norm import MinMaxNormalisation
#from frontend.acoustic_normalisation import CMPNormalisation
from frontend.acoustic_composition import AcousticComposition
from frontend.parameter_generation import ParameterGeneration
#from frontend.feature_normalisation_base import FeatureNormBase
from frontend.mean_variance_norm import MeanVarianceNorm
# the new class for label composition and normalisation
from frontend.label_composer import LabelComposer
import configuration
from models.dnn import DNN
from models.ms_dnn import MultiStreamDNN
from models.ms_dnn_gv import MultiStreamDNNGv
from models.sdae import StackedDenoiseAutoEncoder
from utils.compute_distortion import DistortionComputation, IndividualDistortionComp
from utils.generate import generate_wav
from utils.learn_rates import ExpDecreaseLearningRate
#import matplotlib.pyplot as plt
# our custom logging class that can also plot
#from logplot.logging_plotting import LoggerPlotter, MultipleTimeSeriesPlot, SingleWeightMatrixPlot
from logplot.logging_plotting import LoggerPlotter, MultipleSeriesPlot, SingleWeightMatrixPlot
import logging # as logging
import logging.config
import StringIO
def extract_file_id_list(file_list):
file_id_list = []
for file_name in file_list:
file_id = os.path.basename(os.path.splitext(file_name)[0])
file_id_list.append(file_id)
return file_id_list
def read_file_list(file_name):
logger = logging.getLogger("read_file_list")
file_lists = []
fid = open(file_name)
for line in fid.readlines():
line = line.strip()
if len(line) < 1:
continue
file_lists.append(line)
fid.close()
logger.debug('Read file list from %s' % file_name)
return file_lists
def make_output_file_list(out_dir, in_file_lists):
out_file_lists = []
for in_file_name in in_file_lists:
file_id = os.path.basename(in_file_name)
out_file_name = out_dir + '/' + file_id
out_file_lists.append(out_file_name)
return out_file_lists
def prepare_file_path_list(file_id_list, file_dir, file_extension, new_dir_switch=True):
if not os.path.exists(file_dir) and new_dir_switch:
os.makedirs(file_dir)
file_name_list = []
for file_id in file_id_list:
file_name = file_dir + '/' + file_id + file_extension
file_name_list.append(file_name)
return file_name_list
def visualize_dnn(dnn):
layer_num = len(dnn.params) / 2 ## including input and output
for i in xrange(layer_num):
fig_name = 'Activation weights W' + str(i)
fig_title = 'Activation weights of W' + str(i)
xlabel = 'Neuron index of hidden layer ' + str(i)
ylabel = 'Neuron index of hidden layer ' + str(i+1)
if i == 0:
xlabel = 'Input feature index'
if i == layer_num-1:
ylabel = 'Output feature index'
logger.create_plot(fig_name, SingleWeightMatrixPlot)
plotlogger.add_plot_point(fig_name, fig_name, dnn.params[i*2].get_value(borrow=True).T)
plotlogger.save_plot(fig_name, title=fig_name, xlabel=xlabel, ylabel=ylabel)
def train_DNN(train_xy_file_list, valid_xy_file_list, \
nnets_file_name, n_ins, n_outs, ms_outs, hyper_params, buffer_size, plot=False):
# get loggers for this function
# this one writes to both console and file
logger = logging.getLogger("main.train_DNN")
logger.debug('Starting train_DNN')
if plot:
# this one takes care of plotting duties
plotlogger = logging.getLogger("plotting")
# create an (empty) plot of training convergence, ready to receive data points
logger.create_plot('training convergence',MultipleSeriesPlot)
try:
assert numpy.sum(ms_outs) == n_outs
except AssertionError:
logger.critical('the summation of multi-stream outputs does not equal to %d' %(n_outs))
raise
####parameters#####
finetune_lr = float(hyper_params['learning_rate'])
training_epochs = int(hyper_params['training_epochs'])
batch_size = int(hyper_params['batch_size'])
l1_reg = float(hyper_params['l1_reg'])
l2_reg = float(hyper_params['l2_reg'])
private_l2_reg = float(hyper_params['private_l2_reg'])
warmup_epoch = int(hyper_params['warmup_epoch'])
momentum = float(hyper_params['momentum'])
warmup_momentum = float(hyper_params['warmup_momentum'])
use_rprop = int(hyper_params['use_rprop'])
use_rprop = int(hyper_params['use_rprop'])
hidden_layers_sizes = hyper_params['hidden_layers_sizes']
stream_weights = hyper_params['stream_weights']
private_hidden_sizes = hyper_params['private_hidden_sizes']
buffer_utt_size = buffer_size
early_stop_epoch = int(hyper_params['early_stop_epochs'])
hidden_activation = hyper_params['hidden_activation']
output_activation = hyper_params['output_activation']
stream_lr_weights = hyper_params['stream_lr_weights']
use_private_hidden = hyper_params['use_private_hidden']
model_type = hyper_params['model_type']
## use a switch to turn on pretraining
## pretraining may not help too much, if this case, we turn it off to save time
do_pretraining = hyper_params['do_pretraining']
pretraining_epochs = int(hyper_params['pretraining_epochs'])
pretraining_lr = float(hyper_params['pretraining_lr'])
buffer_size = int(buffer_size / batch_size) * batch_size
###################
(train_x_file_list, train_y_file_list) = train_xy_file_list
(valid_x_file_list, valid_y_file_list) = valid_xy_file_list
logger.debug('Creating training data provider')
train_data_reader = ListDataProvider(x_file_list = train_x_file_list, y_file_list = train_y_file_list, n_ins = n_ins, n_outs = n_outs, buffer_size = buffer_size, shuffle = True)
logger.debug('Creating validation data provider')
valid_data_reader = ListDataProvider(x_file_list = valid_x_file_list, y_file_list = valid_y_file_list, n_ins = n_ins, n_outs = n_outs, buffer_size = buffer_size, shuffle = False)
shared_train_set_xy, temp_train_set_x, temp_train_set_y = train_data_reader.load_next_partition()
train_set_x, train_set_y = shared_train_set_xy
shared_valid_set_xy, temp_valid_set_x, temp_valid_set_y = valid_data_reader.load_next_partition()
valid_set_x, valid_set_y = shared_valid_set_xy
train_data_reader.reset()
valid_data_reader.reset()
# frames_per_hour = 720000.0
# tframes = train_set_x.get_value().shape[0]
# vframes = valid_set_x.get_value().shape[0]
# print 'Training frames: %s (%s hours)'%(tframes, tframes / frames_per_hour)
# print 'Validation frames: %s (%s hours)'%(tframes, tframes / frames_per_hour)
# sys.exit('999')
##temporally we use the training set as pretrain_set_x.
##we need to support any data for pretraining
pretrain_set_x = train_set_x
# numpy random generator
numpy_rng = numpy.random.RandomState(123)
logger.info('building the model')
dnn_model = None
pretrain_fn = None ## not all the model support pretraining right now
train_fn = None
valid_fn = None
valid_model = None ## valid_fn and valid_model are the same. reserve to computer multi-stream distortion
if model_type == 'DNN':
dnn_model = DNN(numpy_rng=numpy_rng, n_ins=n_ins, n_outs = n_outs,
l1_reg = l1_reg, l2_reg = l2_reg,
hidden_layers_sizes = hidden_layers_sizes,
hidden_activation = hidden_activation,
output_activation = output_activation,
use_rprop = use_rprop, rprop_init_update=finetune_lr)
train_fn, valid_fn, valid_score_i = dnn_model.build_finetune_functions(
(train_set_x, train_set_y), (valid_set_x, valid_set_y), batch_size=batch_size, return_valid_score_i=True)
elif model_type == 'SDAE':
##basic model is ready.
##if corruption levels is set to zero. it becomes normal autoencoder
dnn_model = StackedDenoiseAutoEncoder(numpy_rng=numpy_rng, n_ins=n_ins, n_outs = n_outs,
l1_reg = l1_reg, l2_reg = l2_reg,
hidden_layers_sizes = hidden_layers_sizes)
if do_pretraining:
pretraining_fn = dnn_model.pretraining_functions(pretrain_set_x, batch_size)
train_fn, valid_fn = dnn_model.build_finetune_functions(
(train_set_x, train_set_y), (valid_set_x, valid_set_y), batch_size=batch_size)
elif model_type == 'MSDNN': ##model is ready, but the hyper-parameters are not optimised.
dnn_model = MultiStreamDNN(numpy_rng=numpy_rng, n_ins=n_ins, ms_outs=ms_outs,
l1_reg = l1_reg, l2_reg = l2_reg,
hidden_layers_sizes = hidden_layers_sizes,
stream_weights = stream_weights,
hidden_activation = hidden_activation,
output_activation = output_activation)
train_fn, valid_fn = dnn_model.build_finetune_functions(
(train_set_x, train_set_y), (valid_set_x, valid_set_y),
batch_size=batch_size, lr_weights = stream_lr_weights)
elif model_type == 'MSDNN_GV': ## not fully ready
dnn_model = MultiStreamDNNGv(numpy_rng=numpy_rng, n_ins=n_ins, ms_outs=ms_outs,
l1_reg = l1_reg, l2_reg = l2_reg,
hidden_layers_sizes = hidden_layers_sizes,
stream_weights = stream_weights,
hidden_activation = hidden_activation,
output_activation = output_activation)
train_fn, valid_fn = dnn_model.build_finetune_functions(
(train_set_x, train_set_y), (valid_set_x, valid_set_y),
batch_size=batch_size, lr_weights = stream_lr_weights)
else:
logger.critical('%s type NN model is not supported!' %(model_type))
raise
## if pretraining is supported in one model, add the switch here
## be careful to use autoencoder for pretraining here:
## for SDAE, currently only sigmoid function is supported in the hidden layers, as our input is scaled to [0, 1]
## however, tanh works better and converge fast in finetuning
##
## Will extend this soon...
if do_pretraining and model_type == 'SDAE':
logger.info('pretraining the %s model' %(model_type))
corruption_level = 0.0
## in SDAE we do layer-wise pretraining using autoencoders
for i in xrange(dnn_model.n_layers):
for epoch in xrange(pretraining_epochs):
sub_start_time = time.clock()
pretrain_loss = []
while (not train_data_reader.is_finish()):
shared_train_set_xy, temp_train_set_x, temp_train_set_y = train_data_reader.load_next_partition()
pretrain_set_x.set_value(numpy.asarray(temp_train_set_x, dtype=theano.config.floatX), borrow=True)
n_train_batches = pretrain_set_x.get_value().shape[0] / batch_size
for batch_index in xrange(n_train_batches):
pretrain_loss.append(pretraining_fn[i](index=batch_index,
corruption=corruption_level,
learning_rate=pretraining_lr))
sub_end_time = time.clock()
logger.info('Pre-training layer %i, epoch %d, cost %s, time spent%.2f' % (i+1, epoch+1, numpy.mean(pretrain_loss), (sub_end_time - sub_start_time)))
train_data_reader.reset()
logger.info('fine-tuning the %s model' %(model_type))
start_time = time.clock()
best_dnn_model = dnn_model
best_validation_loss = sys.float_info.max
previous_loss = sys.float_info.max
early_stop = 0
epoch = 0
previous_finetune_lr = finetune_lr
hours_seen = 0
seen_frames = 0
train_error = []
sub_start_time = time.clock()
# =============================================================================
# The original script (run_dnn.py) has a training routine that looks like this:
#
# foreach epoch:
# foreach partition:
# foreach minibatch:
# train_model
# validate_performance_and_stop_if_converged
#
# The current script's rountine looks like this:
#
# foreach epoch:
# foreach partition:
# foreach minibatch:
# train_model
# if we've seen another hour of data:
# validate_performance_and_stop_if_converged
#
# In order to jump out of these multiple loops when converged, we'll use this variable:
#
break_main_loop = False
while (epoch < training_epochs):
epoch = epoch + 1
current_momentum = momentum
current_finetune_lr = finetune_lr
if epoch <= warmup_epoch:
current_finetune_lr = finetune_lr
current_momentum = warmup_momentum
else:
current_finetune_lr = previous_finetune_lr * 0.5
previous_finetune_lr = current_finetune_lr
while (not train_data_reader.is_finish()):
shared_train_set_xy, temp_train_set_x, temp_train_set_y = train_data_reader.load_next_partition()
train_set_x.set_value(numpy.asarray(temp_train_set_x, dtype=theano.config.floatX), borrow=True)
train_set_y.set_value(numpy.asarray(temp_train_set_y, dtype=theano.config.floatX), borrow=True)
n_train_batches = train_set_x.get_value().shape[0] / batch_size
logger.debug('this partition: %d frames (divided into %d batches of size %d)' %(train_set_x.get_value(borrow=True).shape[0], n_train_batches, batch_size) )
for minibatch_index in xrange(n_train_batches):
this_train_error = train_fn(minibatch_index, current_finetune_lr, current_momentum)
train_error.append(this_train_error)
if numpy.isnan(this_train_error):
logger.warning('training error over minibatch %d of %d was %s' % (minibatch_index+1,n_train_batches,this_train_error) )
seen_frames += batch_size
if seen_frames >= 720000: ## Hardcoded checking intervals and framerate: 720000 frames per hour at 5ms frame rate
hours_seen += 1
logger.debug('seen %s hour(s) of data -- calculating validation loss'%(hours_seen))
### calculation validation error in 1 big batch can fail for big data --
### use minibatches
#validation_losses = valid_fn()
#this_validation_loss = numpy.mean(validation_losses)
valid_error = []
valid_data_reader.reset()
while (not valid_data_reader.is_finish()):
shared_valid_set_xy, temp_valid_set_x, temp_valid_set_y = valid_data_reader.load_next_partition()
valid_set_x.set_value(numpy.asarray(temp_valid_set_x, dtype=theano.config.floatX), borrow=True)
valid_set_y.set_value(numpy.asarray(temp_valid_set_y, dtype=theano.config.floatX), borrow=True)
n_valid_batches = valid_set_x.get_value().shape[0] / batch_size
for minibatch_index in xrange(n_valid_batches):
v_loss = valid_score_i(minibatch_index)
valid_error.append(v_loss)
#print ' validation for batch %s (%s frames): %s'%(minibatch_index, batch_size, v_loss)
this_validation_loss = numpy.mean(valid_error)
this_validation_loss_std = numpy.std(valid_error)
print 'Mean validation loss: %s, std over minibatches: %s'%(this_validation_loss, this_validation_loss_std)
# this has a possible bias if the minibatches were not all of identical size
# but it should not be siginficant if minibatches are small
this_train_valid_loss = numpy.mean(train_error)
## It might also be interesting to look at how consistent performance is across minibatches:
this_train_valid_loss_std = numpy.std(train_error)
sub_end_time = time.clock()
loss_difference = this_validation_loss - previous_loss
logger.info('epoch %i, validation error %f (std: %f), train error %f (std: %f) time spent %.2f' %(epoch, this_validation_loss, this_validation_loss_std, this_train_valid_loss, this_train_valid_loss_std, (sub_end_time - sub_start_time)))
if plot:
plotlogger.add_plot_point('training convergence','validation set',(hours_seen,this_validation_loss))
plotlogger.add_plot_point('training convergence','training set',(hours_seen,this_train_valid_loss))
plotlogger.save_plot('training convergence',title='Progress of training and validation error',xlabel='hours of data seen',ylabel='error')
if this_validation_loss < best_validation_loss:
best_dnn_model = dnn_model
best_validation_loss = this_validation_loss
logger.debug('validation loss decreased, so saving model')
early_stop = 0
else:
logger.debug('validation loss did not improve')
dbn = best_dnn_model
early_stop += 1
if early_stop > early_stop_epoch:
# too many consecutive checks without surpassing the best model
logger.debug('stopping early')
break_main_loop = True
break
if math.isnan(this_validation_loss):
break_main_loop = True
break
previous_loss = this_validation_loss
sub_start_time = time.clock()
seen_frames = 0
train_error = []
if break_main_loop:
break
if break_main_loop:
break
train_data_reader.reset()
end_time = time.clock()
cPickle.dump(best_dnn_model, open(nnets_file_name, 'wb'))
logger.info('overall training time: %.2fm validation error %f' % ((end_time - start_time) / 60., best_validation_loss))
if plot:
plotlogger.save_plot('training convergence',title='Final training and validation error',xlabel='epochs',ylabel='error')
return best_validation_loss
def dnn_generation(valid_file_list, nnets_file_name, n_ins, n_outs, out_file_list):
logger = logging.getLogger("dnn_generation")
logger.debug('Starting dnn_generation')
plotlogger = logging.getLogger("plotting")
dnn_model = cPickle.load(open(nnets_file_name, 'rb'))
# visualize_dnn(dbn)
file_number = len(valid_file_list)
for i in xrange(file_number):
logger.info('generating %4d of %4d: %s' % (i+1,file_number,valid_file_list[i]) )
fid_lab = open(valid_file_list[i], 'rb')
features = numpy.fromfile(fid_lab, dtype=numpy.float32)
fid_lab.close()
features = features[:(n_ins * (features.size / n_ins))]
features = features.reshape((-1, n_ins))
temp_set_x = features.tolist()
test_set_x = theano.shared(numpy.asarray(temp_set_x, dtype=theano.config.floatX))
predicted_parameter = dnn_model.parameter_prediction(test_set_x=test_set_x)
# predicted_parameter = test_out()
### write to cmp file
predicted_parameter = numpy.array(predicted_parameter, 'float32')
temp_parameter = predicted_parameter
fid = open(out_file_list[i], 'wb')
predicted_parameter.tofile(fid)
logger.debug('saved to %s' % out_file_list[i])
fid.close()
##generate bottleneck layer as festures
def dnn_hidden_generation(valid_file_list, nnets_file_name, n_ins, n_outs, out_file_list):
logger = logging.getLogger("dnn_generation")
logger.debug('Starting dnn_generation')
plotlogger = logging.getLogger("plotting")
dnn_model = cPickle.load(open(nnets_file_name, 'rb'))
file_number = len(valid_file_list)
for i in xrange(file_number):
logger.info('generating %4d of %4d: %s' % (i+1,file_number,valid_file_list[i]) )
fid_lab = open(valid_file_list[i], 'rb')
features = numpy.fromfile(fid_lab, dtype=numpy.float32)
fid_lab.close()
features = features[:(n_ins * (features.size / n_ins))]
features = features.reshape((-1, n_ins))
temp_set_x = features.tolist()
test_set_x = theano.shared(numpy.asarray(temp_set_x, dtype=theano.config.floatX))
predicted_parameter = dnn_model.generate_top_hidden_layer(test_set_x=test_set_x)
### write to cmp file
predicted_parameter = numpy.array(predicted_parameter, 'float32')
temp_parameter = predicted_parameter
fid = open(out_file_list[i], 'wb')
predicted_parameter.tofile(fid)
logger.debug('saved to %s' % out_file_list[i])
fid.close()
def main_function(cfg):
# get a logger for this main function
logger = logging.getLogger("main")
# get another logger to handle plotting duties
plotlogger = logging.getLogger("plotting")
# later, we might do this via a handler that is created, attached and configured
# using the standard config mechanism of the logging module
# but for now we need to do it manually
plotlogger.set_plot_path(cfg.plot_dir)
#### parameter setting########
hidden_layers_sizes = cfg.hyper_params['hidden_layers_sizes']
####prepare environment
try:
file_id_list = read_file_list(cfg.file_id_scp)
logger.debug('Loaded file id list from %s' % cfg.file_id_scp)
except IOError:
# this means that open(...) threw an error
logger.critical('Could not load file id list from %s' % cfg.file_id_scp)
raise
###total file number including training, development, and testing
total_file_number = len(file_id_list)
data_dir = cfg.data_dir
nn_cmp_dir = os.path.join(data_dir, 'nn' + cfg.combined_feature_name + '_' + str(cfg.cmp_dim))
nn_cmp_norm_dir = os.path.join(data_dir, 'nn_norm' + cfg.combined_feature_name + '_' + str(cfg.cmp_dim))
model_dir = os.path.join(cfg.work_dir, 'nnets_model')
gen_dir = os.path.join(cfg.work_dir, 'gen')
in_file_list_dict = {}
for feature_name in cfg.in_dir_dict.keys():
in_file_list_dict[feature_name] = prepare_file_path_list(file_id_list, cfg.in_dir_dict[feature_name], cfg.file_extension_dict[feature_name], False)
nn_cmp_file_list = prepare_file_path_list(file_id_list, nn_cmp_dir, cfg.cmp_ext)
nn_cmp_norm_file_list = prepare_file_path_list(file_id_list, nn_cmp_norm_dir, cfg.cmp_ext)
###normalisation information
norm_info_file = os.path.join(data_dir, 'norm_info' + cfg.combined_feature_name + '_' + str(cfg.cmp_dim) + '_' + cfg.output_feature_normalisation + '.dat')
### normalise input full context label
# currently supporting two different forms of lingustic features
# later, we should generalise this
if cfg.label_style == 'HTS':
label_normaliser = HTSLabelNormalisation(question_file_name=cfg.question_file_name)
lab_dim = label_normaliser.dimension
logger.info('Input label dimension is %d' % lab_dim)
suffix=str(lab_dim)
# no longer supported - use new "composed" style labels instead
elif cfg.label_style == 'composed':
# label_normaliser = XMLLabelNormalisation(xpath_file_name=cfg.xpath_file_name)
suffix='composed'
if cfg.process_labels_in_work_dir:
label_data_dir = cfg.work_dir
else:
label_data_dir = data_dir
# the number can be removed
binary_label_dir = os.path.join(label_data_dir, 'binary_label_'+suffix)
nn_label_dir = os.path.join(label_data_dir, 'nn_no_silence_lab_'+suffix)
nn_label_norm_dir = os.path.join(label_data_dir, 'nn_no_silence_lab_norm_'+suffix)
# nn_label_norm_mvn_dir = os.path.join(data_dir, 'nn_no_silence_lab_norm_'+suffix)
in_label_align_file_list = prepare_file_path_list(file_id_list, cfg.in_label_align_dir, cfg.lab_ext, False)
binary_label_file_list = prepare_file_path_list(file_id_list, binary_label_dir, cfg.lab_ext)
nn_label_file_list = prepare_file_path_list(file_id_list, nn_label_dir, cfg.lab_ext)
nn_label_norm_file_list = prepare_file_path_list(file_id_list, nn_label_norm_dir, cfg.lab_ext)
# to do - sanity check the label dimension here?
min_max_normaliser = None
label_norm_file = 'label_norm_%s.dat' %(cfg.label_style)
label_norm_file = os.path.join(label_data_dir, label_norm_file)
if cfg.NORMLAB and (cfg.label_style == 'HTS'):
# simple HTS labels
logger.info('preparing label data (input) using standard HTS style labels')
label_normaliser.perform_normalisation(in_label_align_file_list, binary_label_file_list)
remover = SilenceRemover(n_cmp = lab_dim, silence_pattern = ['*-#+*'])
remover.remove_silence(binary_label_file_list, in_label_align_file_list, nn_label_file_list)
min_max_normaliser = MinMaxNormalisation(feature_dimension = lab_dim, min_value = 0.01, max_value = 0.99)
###use only training data to find min-max information, then apply on the whole dataset
min_max_normaliser.find_min_max_values(nn_label_file_list[0:cfg.train_file_number])
min_max_normaliser.normalise_data(nn_label_file_list, nn_label_norm_file_list)
if cfg.NORMLAB and (cfg.label_style == 'composed'):
# new flexible label preprocessor
logger.info('preparing label data (input) using "composed" style labels')
label_composer = LabelComposer()
label_composer.load_label_configuration(cfg.label_config_file)
logger.info('Loaded label configuration')
# logger.info('%s' % label_composer.configuration.labels )
lab_dim=label_composer.compute_label_dimension()
logger.info('label dimension will be %d' % lab_dim)
if cfg.precompile_xpaths:
label_composer.precompile_xpaths()
# there are now a set of parallel input label files (e.g, one set of HTS and another set of Ossian trees)
# create all the lists of these, ready to pass to the label composer
in_label_align_file_list = {}
for label_style, label_style_required in label_composer.label_styles.iteritems():
if label_style_required:
logger.info('labels of style %s are required - constructing file paths for them' % label_style)
if label_style == 'xpath':
in_label_align_file_list['xpath'] = prepare_file_path_list(file_id_list, cfg.xpath_label_align_dir, cfg.utt_ext, False)
elif label_style == 'hts':
in_label_align_file_list['hts'] = prepare_file_path_list(file_id_list, cfg.hts_label_align_dir, cfg.lab_ext, False)
else:
logger.critical('unsupported label style %s specified in label configuration' % label_style)
raise Exception
# now iterate through the files, one at a time, constructing the labels for them
num_files=len(file_id_list)
logger.info('the label styles required are %s' % label_composer.label_styles)
for i in xrange(num_files):
logger.info('making input label features for %4d of %4d' % (i+1,num_files))
# iterate through the required label styles and open each corresponding label file
# a dictionary of file descriptors, pointing at the required files
required_labels={}
for label_style, label_style_required in label_composer.label_styles.iteritems():
# the files will be a parallel set of files for a single utterance
# e.g., the XML tree and an HTS label file
if label_style_required:
required_labels[label_style] = open(in_label_align_file_list[label_style][i] , 'r')
logger.debug(' opening label file %s' % in_label_align_file_list[label_style][i])
logger.debug('label styles with open files: %s' % required_labels)
label_composer.make_labels(required_labels,out_file_name=binary_label_file_list[i],fill_missing_values=cfg.fill_missing_values,iterate_over_frames=cfg.iterate_over_frames)
# now close all opened files
for fd in required_labels.itervalues():
fd.close()
# silence removal
if cfg.remove_silence_using_binary_labels:
silence_feature = 0 ## use first feature in label -- hardcoded for now
logger.info('Silence removal from label using silence feature: %s'%(label_composer.configuration.labels[silence_feature]))
logger.info('Silence will be removed from CMP files in same way')
## Binary labels have 2 roles: both the thing trimmed and the instructions for trimming:
trim_silence(binary_label_file_list, nn_label_file_list, lab_dim, \
binary_label_file_list, lab_dim, silence_feature, percent_to_keep=5)
else:
logger.info('No silence removal done')
# start from the labels we have just produced, not trimmed versions
nn_label_file_list = binary_label_file_list
min_max_normaliser = MinMaxNormalisation(feature_dimension = lab_dim, min_value = 0.01, max_value = 0.99)
###use only training data to find min-max information, then apply on the whole dataset
min_max_normaliser.find_min_max_values(nn_label_file_list[0:cfg.train_file_number])
min_max_normaliser.normalise_data(nn_label_file_list, nn_label_norm_file_list)
if min_max_normaliser != None:
### save label normalisation information for unseen testing labels
label_min_vector = min_max_normaliser.min_vector
label_max_vector = min_max_normaliser.max_vector
label_norm_info = numpy.concatenate((label_min_vector, label_max_vector), axis=0)
label_norm_info = numpy.array(label_norm_info, 'float32')
fid = open(label_norm_file, 'wb')
label_norm_info.tofile(fid)
fid.close()
logger.info('saved %s vectors to %s' %(label_min_vector.size, label_norm_file))
### make output acoustic data
if cfg.MAKECMP:
logger.info('creating acoustic (output) features')
delta_win = [-0.5, 0.0, 0.5]
acc_win = [1.0, -2.0, 1.0]
acoustic_worker = AcousticComposition(delta_win = delta_win, acc_win = acc_win)
acoustic_worker.prepare_nn_data(in_file_list_dict, nn_cmp_file_list, cfg.in_dimension_dict, cfg.out_dimension_dict)
if cfg.remove_silence_using_binary_labels:
## do this to get lab_dim:
label_composer = LabelComposer()
label_composer.load_label_configuration(cfg.label_config_file)
lab_dim=label_composer.compute_label_dimension()
silence_feature = 0 ## use first feature in label -- hardcoded for now
logger.info('Silence removal from CMP using binary label file')
## overwrite the untrimmed audio with the trimmed version:
trim_silence(nn_cmp_file_list, nn_cmp_file_list, cfg.cmp_dim, \
binary_label_file_list, lab_dim, silence_feature, percent_to_keep=5)
else: ## back off to previous method using HTS labels:
remover = SilenceRemover(n_cmp = cfg.cmp_dim, silence_pattern = ['*-#+*'])
remover.remove_silence(nn_cmp_file_list, in_label_align_file_list, nn_cmp_file_list) # save to itself
### save acoustic normalisation information for normalising the features back
var_dir = os.path.join(data_dir, 'var')
if not os.path.exists(var_dir):
os.makedirs(var_dir)
var_file_dict = {}
for feature_name in cfg.out_dimension_dict.keys():
var_file_dict[feature_name] = os.path.join(var_dir, feature_name + '_' + str(cfg.out_dimension_dict[feature_name]))
### normalise output acoustic data
if cfg.NORMCMP:
logger.info('normalising acoustic (output) features using method %s' % cfg.output_feature_normalisation)
cmp_norm_info = None
if cfg.output_feature_normalisation == 'MVN':
normaliser = MeanVarianceNorm(feature_dimension=cfg.cmp_dim)
###calculate mean and std vectors on the training data, and apply on the whole dataset
global_mean_vector = normaliser.compute_mean(nn_cmp_file_list[0:cfg.train_file_number], 0, cfg.cmp_dim)
global_std_vector = normaliser.compute_std(nn_cmp_file_list[0:cfg.train_file_number], global_mean_vector, 0, cfg.cmp_dim)
normaliser.feature_normalisation(nn_cmp_file_list, nn_cmp_norm_file_list)
cmp_norm_info = numpy.concatenate((global_mean_vector, global_std_vector), axis=0)
elif cfg.output_feature_normalisation == 'MINMAX':
min_max_normaliser = MinMaxNormalisation(feature_dimension = cfg.cmp_dim)
global_mean_vector = min_max_normaliser.compute_mean(nn_cmp_file_list[0:cfg.train_file_number])
global_std_vector = min_max_normaliser.compute_std(nn_cmp_file_list[0:cfg.train_file_number], global_mean_vector)
min_max_normaliser = MinMaxNormalisation(feature_dimension = cfg.cmp_dim, min_value = 0.01, max_value = 0.99)
min_max_normaliser.find_min_max_values(nn_cmp_file_list[0:cfg.train_file_number])
min_max_normaliser.normalise_data(nn_cmp_file_list, nn_cmp_norm_file_list)
cmp_min_vector = min_max_normaliser.min_vector
cmp_max_vector = min_max_normaliser.max_vector
cmp_norm_info = numpy.concatenate((cmp_min_vector, cmp_max_vector), axis=0)
else:
logger.critical('Normalisation type %s is not supported!\n' %(cfg.output_feature_normalisation))
raise
cmp_norm_info = numpy.array(cmp_norm_info, 'float32')
fid = open(norm_info_file, 'wb')
cmp_norm_info.tofile(fid)
fid.close()
logger.info('saved %s vectors to %s' %(cfg.output_feature_normalisation, norm_info_file))
# logger.debug(' value was\n%s' % cmp_norm_info)
feature_index = 0
for feature_name in cfg.out_dimension_dict.keys():
feature_std_vector = numpy.array(global_std_vector[:,feature_index:feature_index+cfg.out_dimension_dict[feature_name]], 'float32')
fid = open(var_file_dict[feature_name], 'w')
feature_std_vector.tofile(fid)
fid.close()
logger.info('saved %s variance vector to %s' %(feature_name, var_file_dict[feature_name]))
# logger.debug(' value was\n%s' % feature_std_vector)
feature_index += cfg.out_dimension_dict[feature_name]
train_x_file_list = nn_label_norm_file_list[0:cfg.train_file_number]
train_y_file_list = nn_cmp_norm_file_list[0:cfg.train_file_number]
valid_x_file_list = nn_label_norm_file_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number]
valid_y_file_list = nn_cmp_norm_file_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number]
test_x_file_list = nn_label_norm_file_list[cfg.train_file_number+cfg.valid_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
test_y_file_list = nn_cmp_norm_file_list[cfg.train_file_number+cfg.valid_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
# we need to know the label dimension before training the DNN
# computing that requires us to look at the labels
#
# currently, there are two ways to do this
if cfg.label_style == 'HTS':
label_normaliser = HTSLabelNormalisation(question_file_name=cfg.question_file_name)
lab_dim = label_normaliser.dimension
elif cfg.label_style == 'composed':
label_composer = LabelComposer()
label_composer.load_label_configuration(cfg.label_config_file)
lab_dim=label_composer.compute_label_dimension()
logger.info('label dimension is %d' % lab_dim)
combined_model_arch = str(len(hidden_layers_sizes))
for hid_size in hidden_layers_sizes:
combined_model_arch += '_' + str(hid_size)
# nnets_file_name = '%s/%s_%s_%d.%d.%d.%d.%d.train.%d.model' \
# %(model_dir, cfg.model_type, cfg.combined_feature_name, int(cfg.multistream_switch),
# len(hidden_layers_sizes), hidden_layers_sizes[0],
# lab_dim, cfg.cmp_dim, cfg.train_file_number)
nnets_file_name = '%s/%s_%s_%d_%s_%d.%d.train.%d.model' \
%(model_dir, cfg.model_type, cfg.combined_feature_name, int(cfg.multistream_switch),
combined_model_arch, lab_dim, cfg.cmp_dim, cfg.train_file_number)
### DNN model training
if cfg.TRAINDNN:
logger.info('training DNN')
try:
os.makedirs(model_dir)
except OSError as e:
if e.errno == errno.EEXIST:
# not an error - just means directory already exists
pass
else:
logger.critical('Failed to create model directory %s' % model_dir)
logger.critical(' OS error was: %s' % e.strerror)
raise
try:
# print 'start DNN'
train_DNN(train_xy_file_list = (train_x_file_list, train_y_file_list), \
valid_xy_file_list = (valid_x_file_list, valid_y_file_list), \
nnets_file_name = nnets_file_name, \
n_ins = lab_dim, n_outs = cfg.cmp_dim, ms_outs = cfg.multistream_outs, \
hyper_params = cfg.hyper_params, buffer_size = cfg.buffer_size, plot = cfg.plot)
except KeyboardInterrupt:
logger.critical('train_DNN interrupted via keyboard')
# Could 'raise' the exception further, but that causes a deep traceback to be printed
# which we don't care about for a keyboard interrupt. So, just bail out immediately
sys.exit(1)
except:
logger.critical('train_DNN threw an exception')
raise
### generate parameters from DNN
temp_dir_name = '%s_%s_%d_%d_%d_%d_%d_%d' \
%(cfg.model_type, cfg.combined_feature_name, int(cfg.do_post_filtering), \
cfg.train_file_number, lab_dim, cfg.cmp_dim, \
len(hidden_layers_sizes), hidden_layers_sizes[0])
gen_dir = os.path.join(gen_dir, temp_dir_name)
gen_file_id_list = file_id_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
test_x_file_list = nn_label_norm_file_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
if cfg.DNNGEN:
logger.info('generating from DNN')
try:
os.makedirs(gen_dir)
except OSError as e:
if e.errno == errno.EEXIST:
# not an error - just means directory already exists
pass
else:
logger.critical('Failed to create generation directory %s' % gen_dir)
logger.critical(' OS error was: %s' % e.strerror)
raise
gen_file_list = prepare_file_path_list(gen_file_id_list, gen_dir, cfg.cmp_ext)
# dnn_generation(valid_x_file_list, nnets_file_name, lab_dim, cfg.cmp_dim, gen_file_list)
dnn_generation(test_x_file_list, nnets_file_name, lab_dim, cfg.cmp_dim, gen_file_list)
logger.debug('denormalising generated output using method %s' % cfg.output_feature_normalisation)
fid = open(norm_info_file, 'rb')
cmp_min_max = numpy.fromfile(fid, dtype=numpy.float32)
fid.close()
cmp_min_max = cmp_min_max.reshape((2, -1))
cmp_min_vector = cmp_min_max[0, ]
cmp_max_vector = cmp_min_max[1, ]
if cfg.output_feature_normalisation == 'MVN':
denormaliser = MeanVarianceNorm(feature_dimension = cfg.cmp_dim)
denormaliser.feature_denormalisation(gen_file_list, gen_file_list, cmp_min_vector, cmp_max_vector)
elif cfg.output_feature_normalisation == 'MINMAX':
denormaliser = MinMaxNormalisation(cfg.cmp_dim, min_value = 0.01, max_value = 0.99, min_vector = cmp_min_vector, max_vector = cmp_max_vector)
denormaliser.denormalise_data(gen_file_list, gen_file_list)
else:
logger.critical('denormalising method %s is not supported!\n' %(cfg.output_feature_normalisation))
raise
##perform MLPG to smooth parameter trajectory
## lf0 is included, the output features much have vuv.
generator = ParameterGeneration(gen_wav_features = cfg.gen_wav_features)
generator.acoustic_decomposition(gen_file_list, cfg.cmp_dim, cfg.out_dimension_dict, cfg.file_extension_dict, var_file_dict)
### generate wav
if cfg.GENWAV:
logger.info('reconstructing waveform(s)')
generate_wav(gen_dir, gen_file_id_list, cfg) # generated speech
# generate_wav(nn_cmp_dir, gen_file_id_list) # reference copy synthesis speech
### evaluation: calculate distortion
if cfg.CALMCD:
logger.info('calculating MCD')
ref_data_dir = os.path.join(data_dir, 'ref_data')
ref_mgc_list = prepare_file_path_list(gen_file_id_list, ref_data_dir, cfg.mgc_ext)
ref_bap_list = prepare_file_path_list(gen_file_id_list, ref_data_dir, cfg.bap_ext)
ref_lf0_list = prepare_file_path_list(gen_file_id_list, ref_data_dir, cfg.lf0_ext)
in_gen_label_align_file_list = in_label_align_file_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
calculator = IndividualDistortionComp()
spectral_distortion = 0.0
bap_mse = 0.0
f0_mse = 0.0
vuv_error = 0.0
valid_file_id_list = file_id_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number]
test_file_id_list = file_id_list[cfg.train_file_number+cfg.valid_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
if cfg.remove_silence_using_binary_labels:
## get lab_dim:
label_composer = LabelComposer()
label_composer.load_label_configuration(cfg.label_config_file)
lab_dim=label_composer.compute_label_dimension()
## use first feature in label -- hardcoded for now
silence_feature = 0
## Use these to trim silence:
untrimmed_test_labels = binary_label_file_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
if cfg.in_dimension_dict.has_key('mgc'):
if cfg.remove_silence_using_binary_labels:
untrimmed_reference_data = in_file_list_dict['mgc'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
trim_silence(untrimmed_reference_data, ref_mgc_list, cfg.mgc_dim, \
untrimmed_test_labels, lab_dim, silence_feature)
else:
remover = SilenceRemover(n_cmp = cfg.mgc_dim, silence_pattern = ['*-#+*'])
remover.remove_silence(in_file_list_dict['mgc'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number], in_gen_label_align_file_list, ref_mgc_list)
valid_spectral_distortion = calculator.compute_distortion(valid_file_id_list, ref_data_dir, gen_dir, cfg.mgc_ext, cfg.mgc_dim)
test_spectral_distortion = calculator.compute_distortion(test_file_id_list , ref_data_dir, gen_dir, cfg.mgc_ext, cfg.mgc_dim)
valid_spectral_distortion *= (10 /numpy.log(10)) * numpy.sqrt(2.0) ##MCD
test_spectral_distortion *= (10 /numpy.log(10)) * numpy.sqrt(2.0) ##MCD
if cfg.in_dimension_dict.has_key('bap'):
if cfg.remove_silence_using_binary_labels:
untrimmed_reference_data = in_file_list_dict['bap'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
trim_silence(untrimmed_reference_data, ref_bap_list, cfg.bap_dim, \
untrimmed_test_labels, lab_dim, silence_feature)
else:
remover = SilenceRemover(n_cmp = cfg.bap_dim, silence_pattern = ['*-#+*'])
remover.remove_silence(in_file_list_dict['bap'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number], in_gen_label_align_file_list, ref_bap_list)
valid_bap_mse = calculator.compute_distortion(valid_file_id_list, ref_data_dir, gen_dir, cfg.bap_ext, cfg.bap_dim)
test_bap_mse = calculator.compute_distortion(test_file_id_list , ref_data_dir, gen_dir, cfg.bap_ext, cfg.bap_dim)
valid_bap_mse = valid_bap_mse / 10.0 ##Cassia's bap is computed from 10*log|S(w)|. if use HTS/SPTK style, do the same as MGC
test_bap_mse = test_bap_mse / 10.0 ##Cassia's bap is computed from 10*log|S(w)|. if use HTS/SPTK style, do the same as MGC
if cfg.in_dimension_dict.has_key('lf0'):
if cfg.remove_silence_using_binary_labels:
untrimmed_reference_data = in_file_list_dict['lf0'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
trim_silence(untrimmed_reference_data, ref_lf0_list, cfg.lf0_dim, \
untrimmed_test_labels, lab_dim, silence_feature)
else:
remover = SilenceRemover(n_cmp = cfg.lf0_dim, silence_pattern = ['*-#+*'])
remover.remove_silence(in_file_list_dict['lf0'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number], in_gen_label_align_file_list, ref_lf0_list)
valid_f0_mse, valid_vuv_error = calculator.compute_distortion(valid_file_id_list, ref_data_dir, gen_dir, cfg.lf0_ext, cfg.lf0_dim)
test_f0_mse , test_vuv_error = calculator.compute_distortion(test_file_id_list , ref_data_dir, gen_dir, cfg.lf0_ext, cfg.lf0_dim)
logger.info('Develop: DNN -- MCD: %.3f dB; BAP: %.3f dB; F0: %.3f Hz; VUV: %.3f%%' \
%(valid_spectral_distortion, valid_bap_mse, valid_f0_mse, valid_vuv_error*100.))
logger.info('Test : DNN -- MCD: %.3f dB; BAP: %.3f dB; F0: %.3f Hz; VUV: %.3f%%' \
%(test_spectral_distortion , test_bap_mse , test_f0_mse , test_vuv_error*100.))
# this can be removed
#
if 0: #to calculate distortion of HMM baseline
hmm_gen_no_silence_dir = '/afs/inf.ed.ac.uk/group/project/dnn_tts/data/nick/nick_hmm_pf_2400_no_silence'
hmm_gen_dir = '/afs/inf.ed.ac.uk/group/project/dnn_tts/data/nick/nick_hmm_pf_2400'
if 1:
hmm_mgc_list = prepare_file_path_list(gen_file_id_list, hmm_gen_dir, cfg.mgc_ext)
hmm_bap_list = prepare_file_path_list(gen_file_id_list, hmm_gen_dir, cfg.bap_ext)
hmm_lf0_list = prepare_file_path_list(gen_file_id_list, hmm_gen_dir, cfg.lf0_ext)
hmm_mgc_no_silence_list = prepare_file_path_list(gen_file_id_list, hmm_gen_no_silence_dir, cfg.mgc_ext)
hmm_bap_no_silence_list = prepare_file_path_list(gen_file_id_list, hmm_gen_no_silence_dir, cfg.bap_ext)
hmm_lf0_no_silence_list = prepare_file_path_list(gen_file_id_list, hmm_gen_no_silence_dir, cfg.lf0_ext)
in_gen_label_align_file_list = in_label_align_file_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
remover = SilenceRemover(n_cmp = cfg.mgc_dim, silence_pattern = ['*-#+*'])
remover.remove_silence(hmm_mgc_list, in_gen_label_align_file_list, hmm_mgc_no_silence_list)
remover = SilenceRemover(n_cmp = cfg.bap_dim, silence_pattern = ['*-#+*'])
remover.remove_silence(hmm_bap_list, in_gen_label_align_file_list, hmm_bap_no_silence_list)
remover = SilenceRemover(n_cmp = cfg.lf0_dim, silence_pattern = ['*-#+*'])
remover.remove_silence(hmm_lf0_list, in_gen_label_align_file_list, hmm_lf0_no_silence_list)
calculator = IndividualDistortionComp()
spectral_distortion = calculator.compute_distortion(valid_file_id_list, ref_data_dir, hmm_gen_no_silence_dir, cfg.mgc_ext, cfg.mgc_dim)
bap_mse = calculator.compute_distortion(valid_file_id_list, ref_data_dir, hmm_gen_no_silence_dir, cfg.bap_ext, cfg.bap_dim)
f0_mse, vuv_error = calculator.compute_distortion(valid_file_id_list, ref_data_dir, hmm_gen_no_silence_dir, cfg.lf0_ext, cfg.lf0_dim)
spectral_distortion *= (10 /numpy.log(10)) * numpy.sqrt(2.0)
bap_mse = bap_mse / 10.0
logger.info('Develop: HMM -- MCD: %.3f dB; BAP: %.3f dB; F0: %.3f Hz; VUV: %.3f%%' %(spectral_distortion, bap_mse, f0_mse, vuv_error*100.))
spectral_distortion = calculator.compute_distortion(test_file_id_list, ref_data_dir, hmm_gen_no_silence_dir, cfg.mgc_ext, cfg.mgc_dim)
bap_mse = calculator.compute_distortion(test_file_id_list, ref_data_dir, hmm_gen_no_silence_dir, cfg.bap_ext, cfg.bap_dim)
f0_mse, vuv_error = calculator.compute_distortion(test_file_id_list, ref_data_dir, hmm_gen_no_silence_dir, cfg.lf0_ext, cfg.lf0_dim)
spectral_distortion *= (10 /numpy.log(10)) * numpy.sqrt(2.0)
bap_mse = bap_mse / 10.0
logger.info('Test : HMM -- MCD: %.3f dB; BAP: %.3f dB; F0: %.3f Hz; VUV: %.3f%%' %(spectral_distortion, bap_mse, f0_mse, vuv_error*100.))
if __name__ == '__main__':
# these things should be done even before trying to parse the command line
# create a configuration instance
# and get a short name for this instance
cfg=configuration.cfg
# set up logging to use our custom class
logging.setLoggerClass(LoggerPlotter)
# get a logger for this main function
logger = logging.getLogger("main")
if len(sys.argv) != 2:
logger.critical('usage: run_dnn.sh [config file name]')
sys.exit(1)
config_file = sys.argv[1]
config_file = os.path.abspath(config_file)
cfg.configure(config_file)
if cfg.profile:
logger.info('profiling is activated')
import cProfile, pstats
cProfile.run('main_function(cfg)', 'mainstats')
# create a stream for the profiler to write to
profiling_output = StringIO.StringIO()
p = pstats.Stats('mainstats', stream=profiling_output)
# print stats to that stream
# here we just report the top 10 functions, sorted by total amount of time spent in each
p.strip_dirs().sort_stats('tottime').print_stats(10)
# print the result to the log
logger.info('---Profiling result follows---\n%s' % profiling_output.getvalue() )
profiling_output.close()
logger.info('---End of profiling result---')
else:
main_function(cfg)
sys.exit(0)
|
vasiliykochergin/euca2ools
|
refs/heads/master
|
euca2ools/commands/ec2/resetsnapshotattribute.py
|
5
|
# Copyright 2014 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from requestbuilder import Arg
from euca2ools.commands.ec2 import EC2Request
class ResetSnapshotAttribute(EC2Request):
DESCRIPTION = 'Reset an attribute of a snapshot to its default value'
ARGS = [Arg('SnapshotId', metavar='SNAPSHOT', help='''ID of the
snapshot whose attribute should be reset (required)'''),
Arg('-c', '--create-volume-permission', dest='Attribute',
action='store_const', const='createVolumePermission',
required=True, help='''clear the list of users and
groups allowed to create volumes''')]
def print_result(self, _):
print self.tabify(('createVolumePermission', self.args['SnapshotId'],
'RESET'))
|
ixc/django_polymorphic
|
refs/heads/master
|
polymorphic/admin/forms.py
|
4
|
from django import forms
from django.contrib.admin.widgets import AdminRadioSelect
from django.utils.translation import ugettext_lazy as _
class PolymorphicModelChoiceForm(forms.Form):
"""
The default form for the ``add_type_form``. Can be overwritten and replaced.
"""
#: Define the label for the radiofield
type_label = _('Type')
ct_id = forms.ChoiceField(label=type_label, widget=AdminRadioSelect(attrs={'class': 'radiolist'}))
def __init__(self, *args, **kwargs):
# Allow to easily redefine the label (a commonly expected usecase)
super(PolymorphicModelChoiceForm, self).__init__(*args, **kwargs)
self.fields['ct_id'].label = self.type_label
|
rvrheenen/OpenKattis
|
refs/heads/master
|
Python/forests/forests.py
|
1
|
n_people, n_trees = [int(x) for x in input().split()]
people = [[] for _ in range(n_people)]
while True:
try:
p, t = [int(x) for x in input().split()]
people[p-1].append(t)
except EOFError as e:
break
opinions = set([str(sorted(heard)) for heard in people])
print(len(opinions))
|
prakritish/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/amazon/elasticache.py
|
27
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: elasticache
short_description: Manage cache clusters in Amazon Elasticache.
description:
- Manage cache clusters in Amazon Elasticache.
- Returns information about the specified cache cluster.
version_added: "1.4"
author: "Jim Dalton (@jsdalton)"
options:
state:
description:
- C(absent) or C(present) are idempotent actions that will create or destroy a cache cluster as needed. C(rebooted) will reboot the cluster,
resulting in a momentary outage.
choices: ['present', 'absent', 'rebooted']
required: true
name:
description:
- The cache cluster identifier
required: true
engine:
description:
- Name of the cache engine to be used.
required: false
default: memcached
choices: ['redis', 'memcached']
cache_engine_version:
description:
- The version number of the cache engine
required: false
default: None
node_type:
description:
- The compute and memory capacity of the nodes in the cache cluster
required: false
default: cache.m1.small
num_nodes:
description:
- The initial number of cache nodes that the cache cluster will have. Required when state=present.
required: false
cache_port:
description:
- The port number on which each of the cache nodes will accept connections
required: false
default: None
cache_parameter_group:
description:
- The name of the cache parameter group to associate with this cache cluster. If this argument is omitted, the default cache parameter group
for the specified engine will be used.
required: false
default: None
version_added: "2.0"
aliases: [ 'parameter_group' ]
cache_subnet_group:
description:
- The subnet group name to associate with. Only use if inside a vpc. Required if inside a vpc
required: false
default: None
version_added: "2.0"
security_group_ids:
description:
- A list of vpc security group names to associate with this cache cluster. Only use if inside a vpc
required: false
default: None
version_added: "1.6"
cache_security_groups:
description:
- A list of cache security group names to associate with this cache cluster. Must be an empty list if inside a vpc
required: false
default: None
zone:
description:
- The EC2 Availability Zone in which the cache cluster will be created
required: false
default: None
wait:
description:
- Wait for cache cluster result before returning
required: false
default: yes
choices: [ "yes", "no" ]
hard_modify:
description:
- Whether to destroy and recreate an existing cache cluster if necessary in order to modify its state
required: false
default: no
choices: [ "yes", "no" ]
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = """
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
# Basic example
- elasticache:
name: "test-please-delete"
state: present
engine: memcached
cache_engine_version: 1.4.14
node_type: cache.m1.small
num_nodes: 1
cache_port: 11211
cache_security_groups:
- default
zone: us-east-1d
# Ensure cache cluster is gone
- elasticache:
name: "test-please-delete"
state: absent
# Reboot cache cluster
- elasticache:
name: "test-please-delete"
state: rebooted
"""
import sys
import time
try:
import boto
from boto.elasticache.layer1 import ElastiCacheConnection
from boto.regioninfo import RegionInfo
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
class ElastiCacheManager(object):
"""Handles elasticache creation and destruction"""
EXIST_STATUSES = ['available', 'creating', 'rebooting', 'modifying']
def __init__(self, module, name, engine, cache_engine_version, node_type,
num_nodes, cache_port, cache_parameter_group, cache_subnet_group,
cache_security_groups, security_group_ids, zone, wait,
hard_modify, region, **aws_connect_kwargs):
self.module = module
self.name = name
self.engine = engine
self.cache_engine_version = cache_engine_version
self.node_type = node_type
self.num_nodes = num_nodes
self.cache_port = cache_port
self.cache_parameter_group = cache_parameter_group
self.cache_subnet_group = cache_subnet_group
self.cache_security_groups = cache_security_groups
self.security_group_ids = security_group_ids
self.zone = zone
self.wait = wait
self.hard_modify = hard_modify
self.region = region
self.aws_connect_kwargs = aws_connect_kwargs
self.changed = False
self.data = None
self.status = 'gone'
self.conn = self._get_elasticache_connection()
self._refresh_data()
def ensure_present(self):
"""Ensure cache cluster exists or create it if not"""
if self.exists():
self.sync()
else:
self.create()
def ensure_absent(self):
"""Ensure cache cluster is gone or delete it if not"""
self.delete()
def ensure_rebooted(self):
"""Ensure cache cluster is gone or delete it if not"""
self.reboot()
def exists(self):
"""Check if cache cluster exists"""
return self.status in self.EXIST_STATUSES
def create(self):
"""Create an ElastiCache cluster"""
if self.status == 'available':
return
if self.status in ['creating', 'rebooting', 'modifying']:
if self.wait:
self._wait_for_status('available')
return
if self.status == 'deleting':
if self.wait:
self._wait_for_status('gone')
else:
msg = "'%s' is currently deleting. Cannot create."
self.module.fail_json(msg=msg % self.name)
try:
response = self.conn.create_cache_cluster(cache_cluster_id=self.name,
num_cache_nodes=self.num_nodes,
cache_node_type=self.node_type,
engine=self.engine,
engine_version=self.cache_engine_version,
cache_security_group_names=self.cache_security_groups,
security_group_ids=self.security_group_ids,
cache_parameter_group_name=self.cache_parameter_group,
cache_subnet_group_name=self.cache_subnet_group,
preferred_availability_zone=self.zone,
port=self.cache_port)
except boto.exception.BotoServerError as e:
self.module.fail_json(msg=e.message)
self._refresh_data()
self.changed = True
if self.wait:
self._wait_for_status('available')
return True
def delete(self):
"""Destroy an ElastiCache cluster"""
if self.status == 'gone':
return
if self.status == 'deleting':
if self.wait:
self._wait_for_status('gone')
return
if self.status in ['creating', 'rebooting', 'modifying']:
if self.wait:
self._wait_for_status('available')
else:
msg = "'%s' is currently %s. Cannot delete."
self.module.fail_json(msg=msg % (self.name, self.status))
try:
response = self.conn.delete_cache_cluster(cache_cluster_id=self.name)
except boto.exception.BotoServerError as e:
self.module.fail_json(msg=e.message)
cache_cluster_data = response['DeleteCacheClusterResponse']['DeleteCacheClusterResult']['CacheCluster']
self._refresh_data(cache_cluster_data)
self.changed = True
if self.wait:
self._wait_for_status('gone')
def sync(self):
"""Sync settings to cluster if required"""
if not self.exists():
msg = "'%s' is %s. Cannot sync."
self.module.fail_json(msg=msg % (self.name, self.status))
if self.status in ['creating', 'rebooting', 'modifying']:
if self.wait:
self._wait_for_status('available')
else:
# Cluster can only be synced if available. If we can't wait
# for this, then just be done.
return
if self._requires_destroy_and_create():
if not self.hard_modify:
msg = "'%s' requires destructive modification. 'hard_modify' must be set to true to proceed."
self.module.fail_json(msg=msg % self.name)
if not self.wait:
msg = "'%s' requires destructive modification. 'wait' must be set to true."
self.module.fail_json(msg=msg % self.name)
self.delete()
self.create()
return
if self._requires_modification():
self.modify()
def modify(self):
"""Modify the cache cluster. Note it's only possible to modify a few select options."""
nodes_to_remove = self._get_nodes_to_remove()
try:
response = self.conn.modify_cache_cluster(cache_cluster_id=self.name,
num_cache_nodes=self.num_nodes,
cache_node_ids_to_remove=nodes_to_remove,
cache_security_group_names=self.cache_security_groups,
cache_parameter_group_name=self.cache_parameter_group,
security_group_ids=self.security_group_ids,
apply_immediately=True,
engine_version=self.cache_engine_version)
except boto.exception.BotoServerError as e:
self.module.fail_json(msg=e.message)
self._refresh_data()
self.changed = True
if self.wait:
self._wait_for_status('available')
def reboot(self):
"""Reboot the cache cluster"""
if not self.exists():
msg = "'%s' is %s. Cannot reboot."
self.module.fail_json(msg=msg % (self.name, self.status))
if self.status == 'rebooting':
return
if self.status in ['creating', 'modifying']:
if self.wait:
self._wait_for_status('available')
else:
msg = "'%s' is currently %s. Cannot reboot."
self.module.fail_json(msg=msg % (self.name, self.status))
# Collect ALL nodes for reboot
cache_node_ids = [cn['CacheNodeId'] for cn in self.data['CacheNodes']]
try:
response = self.conn.reboot_cache_cluster(cache_cluster_id=self.name,
cache_node_ids_to_reboot=cache_node_ids)
except boto.exception.BotoServerError as e:
self.module.fail_json(msg=e.message)
self._refresh_data()
self.changed = True
if self.wait:
self._wait_for_status('available')
def get_info(self):
"""Return basic info about the cache cluster"""
info = {
'name': self.name,
'status': self.status
}
if self.data:
info['data'] = self.data
return info
def _wait_for_status(self, awaited_status):
"""Wait for status to change from present status to awaited_status"""
status_map = {
'creating': 'available',
'rebooting': 'available',
'modifying': 'available',
'deleting': 'gone'
}
if self.status == awaited_status:
# No need to wait, we're already done
return
if status_map[self.status] != awaited_status:
msg = "Invalid awaited status. '%s' cannot transition to '%s'"
self.module.fail_json(msg=msg % (self.status, awaited_status))
if awaited_status not in set(status_map.values()):
msg = "'%s' is not a valid awaited status."
self.module.fail_json(msg=msg % awaited_status)
while True:
time.sleep(1)
self._refresh_data()
if self.status == awaited_status:
break
def _requires_modification(self):
"""Check if cluster requires (nondestructive) modification"""
# Check modifiable data attributes
modifiable_data = {
'NumCacheNodes': self.num_nodes,
'EngineVersion': self.cache_engine_version
}
for key, value in modifiable_data.items():
if value is not None and self.data[key] != value:
return True
# Check cache security groups
cache_security_groups = []
for sg in self.data['CacheSecurityGroups']:
cache_security_groups.append(sg['CacheSecurityGroupName'])
if set(cache_security_groups) != set(self.cache_security_groups):
return True
# check vpc security groups
if len(self.security_group_ids) > 0:
vpc_security_groups = []
security_groups = self.data['SecurityGroups'] or []
for sg in security_groups:
vpc_security_groups.append(sg['SecurityGroupId'])
if set(vpc_security_groups) != set(self.security_group_ids):
return True
return False
def _requires_destroy_and_create(self):
"""
Check whether a destroy and create is required to synchronize cluster.
"""
unmodifiable_data = {
'node_type': self.data['CacheNodeType'],
'engine': self.data['Engine'],
'cache_port': self._get_port()
}
# Only check for modifications if zone is specified
if self.zone is not None:
unmodifiable_data['zone'] = self.data['PreferredAvailabilityZone']
for key, value in unmodifiable_data.items():
if getattr(self, key) is not None and getattr(self, key) != value:
return True
return False
def _get_elasticache_connection(self):
"""Get an elasticache connection"""
try:
endpoint = "elasticache.%s.amazonaws.com" % self.region
connect_region = RegionInfo(name=self.region, endpoint=endpoint)
return ElastiCacheConnection(
region=connect_region,
**self.aws_connect_kwargs
)
except boto.exception.NoAuthHandlerFound as e:
self.module.fail_json(msg=e.message)
def _get_port(self):
"""Get the port. Where this information is retrieved from is engine dependent."""
if self.data['Engine'] == 'memcached':
return self.data['ConfigurationEndpoint']['Port']
elif self.data['Engine'] == 'redis':
# Redis only supports a single node (presently) so just use
# the first and only
return self.data['CacheNodes'][0]['Endpoint']['Port']
def _refresh_data(self, cache_cluster_data=None):
"""Refresh data about this cache cluster"""
if cache_cluster_data is None:
try:
response = self.conn.describe_cache_clusters(cache_cluster_id=self.name,
show_cache_node_info=True)
except boto.exception.BotoServerError:
self.data = None
self.status = 'gone'
return
cache_cluster_data = response['DescribeCacheClustersResponse']['DescribeCacheClustersResult']['CacheClusters'][0]
self.data = cache_cluster_data
self.status = self.data['CacheClusterStatus']
# The documentation for elasticache lies -- status on rebooting is set
# to 'rebooting cache cluster nodes' instead of 'rebooting'. Fix it
# here to make status checks etc. more sane.
if self.status == 'rebooting cache cluster nodes':
self.status = 'rebooting'
def _get_nodes_to_remove(self):
"""If there are nodes to remove, it figures out which need to be removed"""
num_nodes_to_remove = self.data['NumCacheNodes'] - self.num_nodes
if num_nodes_to_remove <= 0:
return None
if not self.hard_modify:
msg = "'%s' requires removal of cache nodes. 'hard_modify' must be set to true to proceed."
self.module.fail_json(msg=msg % self.name)
cache_node_ids = [cn['CacheNodeId'] for cn in self.data['CacheNodes']]
return cache_node_ids[-num_nodes_to_remove:]
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state ={'required': True, 'choices': ['present', 'absent', 'rebooted']},
name ={'required': True},
engine ={'required': False, 'default': 'memcached'},
cache_engine_version ={'required': False},
node_type ={'required': False, 'default': 'cache.m1.small'},
num_nodes ={'required': False, 'default': None, 'type': 'int'},
# alias for compat with the original PR 1950
cache_parameter_group ={'required': False, 'default': None, 'aliases': ['parameter_group']},
cache_port ={'required': False, 'type': 'int'},
cache_subnet_group ={'required': False, 'default': None},
cache_security_groups ={'required': False, 'default': [], 'type': 'list'},
security_group_ids ={'required': False, 'default': [], 'type': 'list'},
zone ={'required': False, 'default': None},
wait ={'required': False, 'type' : 'bool', 'default': True},
hard_modify ={'required': False, 'type': 'bool', 'default': False}
)
)
module = AnsibleModule(
argument_spec=argument_spec,
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
name = module.params['name']
state = module.params['state']
engine = module.params['engine']
cache_engine_version = module.params['cache_engine_version']
node_type = module.params['node_type']
num_nodes = module.params['num_nodes']
cache_port = module.params['cache_port']
cache_subnet_group = module.params['cache_subnet_group']
cache_security_groups = module.params['cache_security_groups']
security_group_ids = module.params['security_group_ids']
zone = module.params['zone']
wait = module.params['wait']
hard_modify = module.params['hard_modify']
cache_parameter_group = module.params['cache_parameter_group']
if cache_subnet_group and cache_security_groups:
module.fail_json(msg="Can't specify both cache_subnet_group and cache_security_groups")
if state == 'present' and not num_nodes:
module.fail_json(msg="'num_nodes' is a required parameter. Please specify num_nodes > 0")
if not region:
module.fail_json(msg=str("Either region or AWS_REGION or EC2_REGION environment variable or boto config aws_region or ec2_region must be set."))
elasticache_manager = ElastiCacheManager(module, name, engine,
cache_engine_version, node_type,
num_nodes, cache_port,
cache_parameter_group,
cache_subnet_group,
cache_security_groups,
security_group_ids, zone, wait,
hard_modify, region, **aws_connect_kwargs)
if state == 'present':
elasticache_manager.ensure_present()
elif state == 'absent':
elasticache_manager.ensure_absent()
elif state == 'rebooted':
elasticache_manager.ensure_rebooted()
facts_result = dict(changed=elasticache_manager.changed,
elasticache=elasticache_manager.get_info())
module.exit_json(**facts_result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
jideobs/twilioAngular
|
refs/heads/master
|
venv/lib/python2.7/site-packages/twilio/rest/resources/task_router/activities.py
|
38
|
from .. import NextGenInstanceResource, NextGenListResource
class Activity(NextGenInstanceResource):
"""
An Activity resource.
See the `TaskRouter API reference
<https://www.twilio.com/docs/taskrouter/activities>_`
for more information.
.. attribute:: sid
The unique ID for this Activity.
.. attribute:: account_sid
The unique ID of the Account that owns this Activity.
.. attribute:: workspace_sid
The unique ID of the :class:`Workspace` that owns this Activity.
.. attribute:: friendly_name
A human-readable name for the Activity, such as 'on-call', 'break',
'email', etc. These names will be used to calculate and expose
statistics about workers, and give you visibility into the state of
each of your workers.
.. attribute:: available
Boolean value indicating whether the worker should be eligible to
receive a Task when they occupy this Activity. For example, in an
activity called 'On Call', the worker would be unavailable to receive
additional Task assignments.
.. attribute:: date_created
The date this Activity was created, given as UTC in ISO 8601 format.
.. attribute:: date_updated
The date this Activity was last updated, given as UTC in ISO 8601
format.
"""
def delete(self):
"""
Delete an activity.
"""
return self.parent.delete_instance(self.name)
def update(self, **kwargs):
"""
Update an activity.
"""
return self.parent.update_instance(self.name, kwargs)
class Activities(NextGenListResource):
""" A list of Activity resources """
name = "Activities"
instance = Activity
def create(self, friendly_name, available):
"""
Create an Activity.
:param friendly_name: A human-readable name for the activity, such as
'On Call', 'Break', 'Email', etc. Must be unique in this Workspace.
These names will be used to calculate and expose statistics about
workers, and give you visibility into the state of each of your
workers.
:param available: Boolean value indicating whether the worker should be
eligible to receive a Task when they occupy this Activity. For
example, a call center might have an activity named 'On Call' with
an availability set to 'false'.
"""
return self.create_instance({'friendly_name': friendly_name,
'available': available})
def delete(self, sid):
"""
Delete the given activity
"""
return self.delete_instance(sid)
def update(self, sid, **kwargs):
"""
Update an :class:`Activity` with the given parameters.
All the parameters are describe above in :meth:`create`
"""
return self.update_instance(sid, kwargs)
|
Fireblend/chromium-crosswalk
|
refs/heads/master
|
tools/telemetry/telemetry/core/backends/remote/__init__.py
|
1201
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
|
gaddman/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/docker/docker_stack.py
|
23
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2018 Dario Zanzico (git@dariozanzico.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: docker_stack
author: "Dario Zanzico (@dariko)"
short_description: docker stack module
description:
- Manage docker stacks using the 'docker stack' command
on the target node
(see examples)
version_added: "2.8"
options:
name:
required: true
description:
- Stack name
state:
description:
- Service state.
default: "present"
choices:
- present
- absent
compose:
required: true
default: []
description:
- List of compose definitions. Any element may be a string
referring to the path of the compose file on the target host
or the YAML contents of a compose file nested as dictionary.
prune:
required: false
default: false
description:
- If true will add the C(--prune) option to the C(docker stack deploy) command.
This will have docker remove the services not present in the
current stack definition.
type: bool
with_registry_auth:
required: false
default: false
description:
- If true will add the C(--with-registry-auth) option to the C(docker stack deploy) command.
This will have docker send registry authentication details to Swarm agents.
type: bool
resolve_image:
required: false
choices: ["always", "changed", "never"]
description:
- If set will add the C(--resolve-image) option to the C(docker stack deploy) command.
This will have docker query the registry to resolve image digest and
supported platforms. If not set, docker use "always" by default.
absent_retries:
required: false
default: 0
description:
- If C(>0) and C(state==absent) the module will retry up to
C(absent_retries) times to delete the stack until all the
resources have been effectively deleted.
If the last try still reports the stack as not completely
removed the module will fail.
absent_retries_interval:
required: false
default: 1
description:
- Interval in seconds between C(absent_retries)
requirements:
- jsondiff
- pyyaml
'''
RETURN = '''
docker_stack_spec_diff:
description: |
dictionary containing the differences between the 'Spec' field
of the stack services before and after applying the new stack
definition.
sample: >
"docker_stack_specs_diff":
{'test_stack_test_service': {u'TaskTemplate': {u'ContainerSpec': {delete: [u'Env']}}}}
returned: on change
type: dict
'''
EXAMPLES = '''
- name: deploy 'stack1' stack from file
docker_stack:
state: present
name: stack1
compose:
- /opt/stack.compose
- name: deploy 'stack2' from base file and yaml overrides
docker_stack:
state: present
name: stack2
compose:
- /opt/stack.compose
- version: '3'
services:
web:
image: nginx:latest
environment:
ENVVAR: envvar
- name: deprovision 'stack1'
docker_stack:
state: absent
'''
import json
import tempfile
from ansible.module_utils.six import string_types
from time import sleep
try:
from jsondiff import diff as json_diff
HAS_JSONDIFF = True
except ImportError:
HAS_JSONDIFF = False
try:
from yaml import dump as yaml_dump
HAS_YAML = True
except ImportError:
HAS_YAML = False
from ansible.module_utils.basic import AnsibleModule, os
def docker_stack_services(module, stack_name):
docker_bin = module.get_bin_path('docker', required=True)
rc, out, err = module.run_command([docker_bin,
"stack",
"services",
stack_name,
"--format",
"{{.Name}}"])
if err == "Nothing found in stack: %s\n" % stack_name:
return []
return out.strip().split('\n')
def docker_service_inspect(module, service_name):
docker_bin = module.get_bin_path('docker', required=True)
rc, out, err = module.run_command([docker_bin,
"service",
"inspect",
service_name])
if rc != 0:
return None
else:
ret = json.loads(out)[0]['Spec']
return ret
def docker_stack_deploy(module, stack_name, compose_files):
docker_bin = module.get_bin_path('docker', required=True)
command = [docker_bin, "stack", "deploy"]
if module.params["prune"]:
command += ["--prune"]
if module.params["with_registry_auth"]:
command += ["--with-registry-auth"]
if module.params["resolve_image"]:
command += ["--resolve-image",
module.params["resolve_image"]]
for compose_file in compose_files:
command += ["--compose-file",
compose_file]
command += [stack_name]
return module.run_command(command)
def docker_stack_inspect(module, stack_name):
ret = {}
for service_name in docker_stack_services(module, stack_name):
ret[service_name] = docker_service_inspect(module, service_name)
return ret
def docker_stack_rm(module, stack_name, retries, interval):
docker_bin = module.get_bin_path('docker', required=True)
command = [docker_bin, "stack", "rm", stack_name]
rc, out, err = module.run_command(command)
while err != "Nothing found in stack: %s\n" % stack_name and retries > 0:
sleep(interval)
retries = retries - 1
rc, out, err = module.run_command(command)
return rc, out, err
def main():
module = AnsibleModule(
argument_spec={
'name': dict(required=True, type='str'),
'compose': dict(required=False, type='list', default=[]),
'prune': dict(default=False, type='bool'),
'with_registry_auth': dict(default=False, type='bool'),
'resolve_image': dict(type='str', choices=['always', 'changed', 'never']),
'state': dict(default='present', choices=['present', 'absent']),
'absent_retries': dict(type='int', default=0),
'absent_retries_interval': dict(type='int', default=1)
},
supports_check_mode=False
)
if not HAS_JSONDIFF:
return module.fail_json(msg="jsondiff is not installed, try 'pip install jsondiff'")
if not HAS_YAML:
return module.fail_json(msg="yaml is not installed, try 'pip install pyyaml'")
state = module.params['state']
compose = module.params['compose']
name = module.params['name']
absent_retries = module.params['absent_retries']
absent_retries_interval = module.params['absent_retries_interval']
if state == 'present':
if not compose:
module.fail_json(msg=("compose parameter must be a list "
"containing at least one element"))
compose_files = []
for i, compose_def in enumerate(compose):
if isinstance(compose_def, dict):
compose_file_fd, compose_file = tempfile.mkstemp()
module.add_cleanup_file(compose_file)
with os.fdopen(compose_file_fd, 'w') as stack_file:
compose_files.append(compose_file)
stack_file.write(yaml_dump(compose_def))
elif isinstance(compose_def, string_types):
compose_files.append(compose_def)
else:
module.fail_json(msg="compose element '%s' must be a " +
"string or a dictionary" % compose_def)
before_stack_services = docker_stack_inspect(module, name)
rc, out, err = docker_stack_deploy(module, name, compose_files)
after_stack_services = docker_stack_inspect(module, name)
if rc != 0:
module.fail_json(msg="docker stack up deploy command failed",
out=out,
rc=rc, err=err)
before_after_differences = json_diff(before_stack_services,
after_stack_services)
for k in before_after_differences.keys():
if isinstance(before_after_differences[k], dict):
before_after_differences[k].pop('UpdatedAt', None)
before_after_differences[k].pop('Version', None)
if not list(before_after_differences[k].keys()):
before_after_differences.pop(k)
if not before_after_differences:
module.exit_json(changed=False)
else:
module.exit_json(
changed=True,
docker_stack_spec_diff=json_diff(before_stack_services,
after_stack_services,
dump=True))
else:
if docker_stack_services(module, name):
rc, out, err = docker_stack_rm(module, name, absent_retries, absent_retries_interval)
if rc != 0:
module.fail_json(msg="'docker stack down' command failed",
out=out,
rc=rc,
err=err)
else:
module.exit_json(changed=True, msg=out, err=err, rc=rc)
module.exit_json(changed=False)
if __name__ == "__main__":
main()
|
SciTools/biggus
|
refs/heads/master
|
biggus/tests/unit/init/test_mean.py
|
3
|
# (C) British Crown Copyright 2014 - 2016, Met Office
#
# This file is part of Biggus.
#
# Biggus is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Biggus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Biggus. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for `biggus.mean`."""
from __future__ import absolute_import, division, print_function
from six.moves import (filter, input, map, range, zip) # noqa
import numpy as np
import numpy.ma as ma
import unittest
import biggus
from biggus import mean
class TestInvalidAxis(unittest.TestCase):
def setUp(self):
self.array = biggus.NumpyArrayAdapter(np.arange(12))
def test_none(self):
with self.assertRaises(biggus.AxisSupportError):
mean(self.array)
def test_too_large(self):
with self.assertRaises(ValueError):
mean(self.array, axis=1)
def test_too_small(self):
with self.assertRaises(ValueError):
mean(self.array, axis=-2)
def test_multiple(self):
array = biggus.NumpyArrayAdapter(np.arange(12).reshape(3, 4))
with self.assertRaises(biggus.AxisSupportError):
mean(array, axis=(0, 1))
class TestAggregationDtype(unittest.TestCase):
def _check(self, source, target):
array = biggus.NumpyArrayAdapter(np.arange(2, dtype=source))
agg = mean(array, axis=0)
self.assertEqual(agg.dtype, target)
def test_int_to_float(self):
dtypes = [np.int8, np.int16, np.int32, np.int]
for dtype in dtypes:
self._check(dtype, np.float)
def test_bool_to_float(self):
self._check(np.bool, np.float)
def test_floats(self):
dtypes = [np.float16, np.float32, np.float]
for dtype in dtypes:
self._check(dtype, dtype)
def test_complex(self):
self._check(np.complex, np.complex)
class TestNumpyArrayAdapter(unittest.TestCase):
def setUp(self):
self.data = np.arange(12)
def _check(self, data, dtype=None, shape=None):
data = np.asarray(data, dtype=dtype)
if shape is not None:
data = data.reshape(shape)
array = biggus.NumpyArrayAdapter(data)
result = mean(array, axis=0).ndarray()
expected = np.mean(data, axis=0)
if expected.ndim == 0:
expected = np.asarray(expected)
np.testing.assert_array_equal(result, expected)
def test_flat_int(self):
self._check(self.data)
def test_multi_int(self):
self._check(self.data, shape=(3, 4))
def test_flat_float(self):
self._check(self.data, dtype=np.float)
def test_multi_float(self):
self._check(self.data, dtype=np.float, shape=(3, 4))
class TestNumpyArrayAdapterMasked(unittest.TestCase):
def _check(self, data):
array = biggus.NumpyArrayAdapter(data)
result = mean(array, axis=0).masked_array()
expected = ma.mean(data, axis=0)
if expected.ndim == 0:
expected = ma.asarray(expected)
np.testing.assert_array_equal(result.filled(), expected.filled())
np.testing.assert_array_equal(result.mask, expected.mask)
def test_no_mask_flat(self):
for dtype in [np.int, np.float]:
data = ma.arange(12, dtype=dtype)
self._check(data)
def test_no_mask_multi(self):
for dtype in [np.int, np.float]:
data = ma.arange(12, dtype=dtype).reshape(3, 4)
self._check(data)
def test_flat(self):
for dtype in [np.int, np.float]:
data = ma.arange(12, dtype=dtype)
data[::2] = ma.masked
self._check(data)
data.mask = ma.nomask
data[1::2] = ma.masked
self._check(data)
def test_multi(self):
for dtype in [np.int, np.float]:
data = ma.arange(12, dtype=dtype)
data[::2] = ma.masked
self._check(data.reshape(3, 4))
data = ma.arange(12, dtype=dtype)
data[1::2] = ma.masked
self._check(data.reshape(3, 4))
data = ma.arange(12, dtype=dtype).reshape(3, 4)
data[::2] = ma.masked
self._check(data)
data = ma.arange(12, dtype=dtype).reshape(3, 4)
data[1::2] = ma.masked
self._check(data)
if __name__ == '__main__':
unittest.main()
|
tdruez/django-registration
|
refs/heads/master
|
registration/tests/test_forms.py
|
3
|
"""
Exercise django-registration's built-in form classes.
"""
from django.contrib.auth.models import User
from django.test import TestCase
from django.utils.six import text_type
from .. import forms
class RegistrationFormTests(TestCase):
valid_data = {
'username': 'testuser',
'email': 'test@example.com',
'password1': 'swordfish',
'password2': 'swordfish',
}
def test_username_format(self):
"""
Invalid usernames are rejected.
"""
bad_usernames = [
'user!example', 'valid?',
]
for username in bad_usernames:
data = self.valid_data.copy()
data.update(username=username)
form = forms.RegistrationForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['username'],
[text_type(forms.BAD_USERNAME)]
)
def test_user_uniqueness(self):
"""
Existing usernames cannot be re-used.
"""
User.objects.create(
username='testuser',
email='test@example.com',
password='swordfish'
)
form = forms.RegistrationForm(data=self.valid_data.copy())
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['username'],
[text_type(forms.DUPLICATE_USER)]
)
def test_password_match(self):
"""
Both submitted passwords must match.
"""
data = self.valid_data.copy()
data.update(password2='swordfishes')
form = forms.RegistrationForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['__all__'],
[text_type(forms.PASSWORD_MISMATCH)]
)
def test_tos_field(self):
"""
The terms-of-service field on RegistrationFormTermsOfService
is required.
"""
form = forms.RegistrationFormTermsOfService(
data=self.valid_data.copy()
)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['tos'],
[text_type(forms.TOS_REQUIRED)]
)
def test_email_uniqueness(self):
"""
Email uniqueness is enforced by RegistrationFormUniqueEmail.
"""
User.objects.create(
username='testuser2',
email='test@example.com',
password='swordfish'
)
form = forms.RegistrationFormUniqueEmail(
data=self.valid_data.copy()
)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['email'],
[text_type(forms.DUPLICATE_EMAIL)]
)
data = self.valid_data.copy()
data.update(email='test2@example.com')
form = forms.RegistrationFormUniqueEmail(
data=data
)
self.assertTrue(form.is_valid())
def test_no_free_email(self):
"""
Free email domains are disallowed by
RegistrationFormNoFreeEmail.
"""
for domain in forms.RegistrationFormNoFreeEmail.bad_domains:
data = self.valid_data.copy()
data.update(
email='testuser@%s' % domain
)
form = forms.RegistrationFormNoFreeEmail(
data=data
)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['email'],
[text_type(forms.FREE_EMAIL)]
)
form = forms.RegistrationFormNoFreeEmail(
data=self.valid_data.copy()
)
self.assertTrue(form.is_valid())
|
consulo/consulo-python
|
refs/heads/master
|
plugin/src/main/dist/helpers/pycharm/buildout_engulfer.py
|
85
|
# Expects two env variables:
# PYCHARM_ENGULF_SCRIPT = which script should be engulfed.
# PYCHARM_PREPEND_SYSPATH = which entries should be added to the beginning of sys.path;
# items must be separated by path separator. May be unset.
#
# Given script is loaded and compiled, then sys.path is prepended as requested.
# On win32, getpass is changed to insecure but working version.
# Then the compiled script evaluated, as if it were run by python interpreter itself.
# Works OK with debugger.
import os
import sys
target = os.getenv("PYCHARM_ENGULF_SCRIPT")
print("Running script through buildout: " + target)
assert target, "PYCHARM_ENGULF_SCRIPT must be set"
filepath = os.path.abspath(target)
f = None
try:
f = open(filepath, "r")
source = "\n".join((s.rstrip() for s in f.readlines()))
finally:
if f:
f.close()
from fix_getpass import fixGetpass
fixGetpass()
#prependable = os.getenv("PYCHARM_PREPEND_SYSPATH")
#if prependable:
# sys.path[0:0] = [x for x in prependable.split(os.path.pathsep)]
# include engulfed's path, everyone expects this
our_path = os.path.dirname(filepath)
if our_path not in sys.path:
sys.path.append(our_path)
compile(source, target, "exec")
exec(source)
# here we come
|
miguelparaiso/OdooAccessible
|
refs/heads/master
|
addons/l10n_cn/__init__.py
|
339
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2007-2014 Jeff Wang(<http://jeff@osbzr.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
dharhas/wibblywobbly
|
refs/heads/master
|
pelicanconf.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
import os
AUTHOR = u'Dharhas Pothina'
SITENAME = u'wibbly wobbly'
SITESUBTITLE = u'Musings on data through time and space'
SITEURL = '' # change in publishconf.py
# Times and dates
DEFAULT_DATE_FORMAT = '%b %d, %Y'
TIMEZONE = 'US/Central'
DEFAULT_LANG = u'en'
# Set the article URL
ARTICLE_URL = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/'
ARTICLE_SAVE_AS = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/index.html'
# Title menu options
MENUITEMS = [('Archives', '/archives.html'),
]
NEWEST_FIRST_ARCHIVES = False
#Github include settings
GITHUB_USER = 'dharhas'
GITHUB_REPO_COUNT = 3
GITHUB_SKIP_FORK = True
GITHUB_SHOW_USER_LINK = True
# Blogroll
#LINKS = (('Pelican', 'http://docs.notmyidea.org/alexis/pelican/'),
# ('Python.org', 'http://python.org'),
# ('Jinja2', 'http://jinja.pocoo.org'),
# ('You can modify those links in your config file', '#'),)
# Social widget
#SOCIAL = (('You can add links in your config file', '#'),
# ('Another social link', '#'),)
DEFAULT_PAGINATION = 10
# STATIC_OUT_DIR requires https://github.com/jakevdp/pelican/tree/specify-static
#STATIC_OUT_DIR = ''
#STATIC_PATHS = ['images', 'figures', 'downloads']
#FILES_TO_COPY = [('favicon.png', 'favicon.png')]
# This requires Pelican 3.3+
STATIC_PATHS = ['images', 'figures', 'downloads', 'favicon.png']
CODE_DIR = 'downloads/code'
NOTEBOOK_DIR = 'downloads/notebooks'
# Theme and plugins
# Theme requires http://github.com/duilio/pelican-octopress-theme/
# Plugins require http://github.com/getpelican/pelican-plugins/
THEME = os.path.join(os.environ.get('HOME'),
'blog/pelican-octopress-theme/')
PLUGIN_PATH = os.path.join(os.environ.get('HOME'),
'blog/pelican-plugins')
PLUGINS = ['liquid_tags.img', 'liquid_tags.video',
'liquid_tags.include_code', 'liquid_tags.notebook',
'liquid_tags.literal']
# The theme file should be updated so that the base header contains the line:
#
# {% if EXTRA_HEADER %}
# {{ EXTRA_HEADER }}
# {% endif %}
#
# This header file is automatically generated by the notebook plugin
if not os.path.exists('_nb_header.html'):
import warnings
warnings.warn("_nb_header.html not found. "
"Rerun make html to finalize build.")
else:
EXTRA_HEADER = open('_nb_header.html').read().decode('utf-8')
# Sharing
#TWITTER_USER = 'jakevdp'
GOOGLE_PLUS_USER = 'dharhas'
GOOGLE_PLUS_ONE = True
GOOGLE_PLUS_HIDDEN = False
FACEBOOK_LIKE = False
#TWITTER_TWEET_BUTTON = True
#TWITTER_LATEST_TWEETS = True
#TWITTER_FOLLOW_BUTTON = True
#TWITTER_TWEET_COUNT = 3
#TWITTER_SHOW_REPLIES = 'false'
#TWITTER_SHOW_FOLLOWER_COUNT = 'true'
# RSS/Atom feeds
FEED_DOMAIN = SITEURL
FEED_ATOM = 'atom.xml'
# Search
SEARCH_BOX = True
|
xNovax/SickRage
|
refs/heads/master
|
lib/hachoir_core/field/seekable_field_set.py
|
74
|
from hachoir_core.field import BasicFieldSet, GenericFieldSet, ParserError, createRawField
from hachoir_core.error import HACHOIR_ERRORS
# getgaps(int, int, [listof (int, int)]) -> generator of (int, int)
# Gets all the gaps not covered by a block in `blocks` from `start` for `length` units.
def getgaps(start, length, blocks):
'''
Example:
>>> list(getgaps(0, 20, [(15,3), (6,2), (6,2), (1,2), (2,3), (11,2), (9,5)]))
[(0, 1), (5, 1), (8, 1), (14, 1), (18, 2)]
'''
# done this way to avoid mutating the original
blocks = sorted(blocks, key=lambda b: b[0])
end = start+length
for s, l in blocks:
if s > start:
yield (start, s-start)
start = s
if s+l > start:
start = s+l
if start < end:
yield (start, end-start)
class RootSeekableFieldSet(GenericFieldSet):
def seekBit(self, address, relative=True):
if not relative:
address -= self.absolute_address
if address < 0:
raise ParserError("Seek below field set start (%s.%s)" % divmod(address, 8))
self._current_size = address
return None
def seekByte(self, address, relative=True):
return self.seekBit(address*8, relative)
def _fixLastField(self):
"""
Try to fix last field when we know current field set size.
Returns new added field if any, or None.
"""
assert self._size is not None
# Stop parser
message = ["stop parser"]
self._field_generator = None
# If last field is too big, delete it
while self._size < self._current_size:
field = self._deleteField(len(self._fields)-1)
message.append("delete field %s" % field.path)
assert self._current_size <= self._size
blocks = [(x.absolute_address, x.size) for x in self._fields]
fields = []
self._size = max(self._size, max(a+b for a,b in blocks) - self.absolute_address)
for start, length in getgaps(self.absolute_address, self._size, blocks):
self.seekBit(start, relative=False)
field = createRawField(self, length, "unparsed[]")
self.setUniqueFieldName(field)
self._fields.append(field.name, field)
fields.append(field)
message.append("found unparsed segment: start %s, length %s" % (start, length))
self.seekBit(self._size + self.absolute_address, relative=False)
message = ", ".join(message)
if fields:
self.warning("[Autofix] Fix parser error: " + message)
return fields
def _stopFeeding(self):
new_field = None
if self._size is None:
if self._parent:
self._size = self._current_size
new_field = self._fixLastField()
self._field_generator = None
return new_field
class SeekableFieldSet(RootSeekableFieldSet):
def __init__(self, parent, name, description=None, size=None):
assert issubclass(parent.__class__, BasicFieldSet)
RootSeekableFieldSet.__init__(self, parent, name, parent.stream, description, size)
|
theheros/kbengine
|
refs/heads/master
|
kbe/res/scripts/common/Lib/test/test_grp.py
|
3
|
"""Test script for the grp module."""
import unittest
from test import support
grp = support.import_module('grp')
class GroupDatabaseTestCase(unittest.TestCase):
def check_value(self, value):
# check that a grp tuple has the entries and
# attributes promised by the docs
self.assertEqual(len(value), 4)
self.assertEqual(value[0], value.gr_name)
self.assertIsInstance(value.gr_name, str)
self.assertEqual(value[1], value.gr_passwd)
self.assertIsInstance(value.gr_passwd, str)
self.assertEqual(value[2], value.gr_gid)
self.assertIsInstance(value.gr_gid, int)
self.assertEqual(value[3], value.gr_mem)
self.assertIsInstance(value.gr_mem, list)
def test_values(self):
entries = grp.getgrall()
for e in entries:
self.check_value(e)
if len(entries) > 1000: # Huge group file (NIS?) -- skip the rest
return
for e in entries:
e2 = grp.getgrgid(e.gr_gid)
self.check_value(e2)
self.assertEqual(e2.gr_gid, e.gr_gid)
name = e.gr_name
if name.startswith('+') or name.startswith('-'):
# NIS-related entry
continue
e2 = grp.getgrnam(name)
self.check_value(e2)
# There are instances where getgrall() returns group names in
# lowercase while getgrgid() returns proper casing.
# Discovered on Ubuntu 5.04 (custom).
self.assertEqual(e2.gr_name.lower(), name.lower())
def test_errors(self):
self.assertRaises(TypeError, grp.getgrgid)
self.assertRaises(TypeError, grp.getgrnam)
self.assertRaises(TypeError, grp.getgrall, 42)
# try to get some errors
bynames = {}
bygids = {}
for (n, p, g, mem) in grp.getgrall():
if not n or n == '+':
continue # skip NIS entries etc.
bynames[n] = g
bygids[g] = n
allnames = list(bynames.keys())
namei = 0
fakename = allnames[namei]
while fakename in bynames:
chars = list(fakename)
for i in range(len(chars)):
if chars[i] == 'z':
chars[i] = 'A'
break
elif chars[i] == 'Z':
continue
else:
chars[i] = chr(ord(chars[i]) + 1)
break
else:
namei = namei + 1
try:
fakename = allnames[namei]
except IndexError:
# should never happen... if so, just forget it
break
fakename = ''.join(chars)
self.assertRaises(KeyError, grp.getgrnam, fakename)
# Choose a non-existent gid.
fakegid = 4127
while fakegid in bygids:
fakegid = (fakegid * 3) % 0x10000
self.assertRaises(KeyError, grp.getgrgid, fakegid)
def test_main():
support.run_unittest(GroupDatabaseTestCase)
if __name__ == "__main__":
test_main()
|
g1011999/Android-kernel-for-kindle-fire
|
refs/heads/master
|
tools/perf/scripts/python/check-perf-trace.py
|
948
|
# perf trace event handlers, generated by perf trace -g python
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
|
kronicz/ecommerce-2
|
refs/heads/master
|
lib/python2.7/site-packages/django/conf/locale/sl/formats.py
|
115
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'd. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y. H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j. M. Y'
SHORT_DATETIME_FORMAT = 'j.n.Y. H:i'
FIRST_DAY_OF_WEEK = 0
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%d-%m-%Y', # '25-10-2006'
'%d. %m. %Y', '%d. %m. %y', # '25. 10. 2006', '25. 10. 06'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59'
'%d.%m.%y %H:%M:%S.%f', # '25.10.06 14:30:59.000200'
'%d.%m.%y %H:%M', # '25.10.06 14:30'
'%d.%m.%y', # '25.10.06'
'%d-%m-%Y %H:%M:%S', # '25-10-2006 14:30:59'
'%d-%m-%Y %H:%M:%S.%f', # '25-10-2006 14:30:59.000200'
'%d-%m-%Y %H:%M', # '25-10-2006 14:30'
'%d-%m-%Y', # '25-10-2006'
'%d. %m. %Y %H:%M:%S', # '25. 10. 2006 14:30:59'
'%d. %m. %Y %H:%M:%S.%f', # '25. 10. 2006 14:30:59.000200'
'%d. %m. %Y %H:%M', # '25. 10. 2006 14:30'
'%d. %m. %Y', # '25. 10. 2006'
'%d. %m. %y %H:%M:%S', # '25. 10. 06 14:30:59'
'%d. %m. %y %H:%M:%S.%f', # '25. 10. 06 14:30:59.000200'
'%d. %m. %y %H:%M', # '25. 10. 06 14:30'
'%d. %m. %y', # '25. 10. 06'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
mostaphaRoudsari/Honeybee
|
refs/heads/master
|
src/Honeybee_Glare Analysis.py
|
1
|
#
# Honeybee: A Plugin for Environmental Analysis (GPL) started by Mostapha Sadeghipour Roudsari
#
# This file is part of Honeybee.
#
# Copyright (c) 2013-2020, Mostapha Sadeghipour Roudsari <mostapha@ladybug.tools>
# Honeybee is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Honeybee is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Honeybee; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+>
"""
Glare Analysis
This component is using evalglare for glare calculations. Evalgalare is developed by J. Wienold at Fraunhofer ISE.
http://www.ise.fraunhofer.de/en/
Check this link for more information about glare analysis. Thanks to Christoph Reinhart, Shelby Doyle, J Alstan Jakubiec and Rashida Mogri.
http://web.mit.edu/tito_/www/Projects/Glare/GlareRecommendationsForPractice.html
-
Provided by Honeybee 0.0.66
Args:
_HDRImagePath: Path to an HDR image file
taskPositionUV_: Task position in x and y coordinates
taskPositionAngle_: Task position opening angle in degrees
_runIt: Set to True to run the analysis
Returns:
readMe: ...
glareCheckImage: Path to HDR image of the glare study
DGP: Daylight glare probability.
DGI: Daylight glare index
glareComfortRange: Comfort Ranges. Imperceptible Glare [0.35 > DGP], Perceptible Glare [0.4 > DGP >= 0.35], Disturbing Glare [0.45 > DGP >= 0.4], Intolerable Glare [DGP >= 0.45]
imageWithTaskArea: Path to HDR image with task area marked with blue circle
"""
ghenv.Component.Name = "Honeybee_Glare Analysis"
ghenv.Component.NickName = 'glareAnalysis'
ghenv.Component.Message = 'VER 0.0.66\nJUL_07_2020'
ghenv.Component.IconDisplayMode = ghenv.Component.IconDisplayMode.application
ghenv.Component.Category = "HB-Legacy"
ghenv.Component.SubCategory = "04 | Daylight | Daylight"
#compatibleHBVersion = VER 0.0.56\nFEB_01_2015
#compatibleLBVersion = VER 0.0.59\nFEB_01_2015
try: ghenv.Component.AdditionalHelpFromDocStrings = "1"
except: pass
import scriptcontext as sc
import Grasshopper.Kernel as gh
import os
import subprocess
import math
def runCmdAndGetTheResults(command, shellKey = True):
p = subprocess.Popen(["cmd", command], shell=shellKey, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
# p.kill()
return out, err
def readGlareResults(glareRes):
resultDict = {}
try:
result, possibleNotice = glareRes.split("Notice:")
except:
result = glareRes
possibleNotice = None
keys, values = result.strip().split(":")
keys = keys.split(",")
values = values[1:].split(" ")
# remove empty strings
for keyCount, key in enumerate(keys):
resultDict[key.strip()] = values[keyCount].strip()
return resultDict, possibleNotice
def DGPComfortRange(DGP):
"""
This a helper function that takes in DGP values and return comfort ranges.
:param : DGP : DGP value as a String
:return : comfort range as a String
"""
DGP = float(DGP)
if (DGP) < 0.35:
return "Imperceptible Glare"
elif DGP >= 0.35 and DGP < 0.40:
return "Perceptible Glare"
elif DGP >= 0.40 and DGP < 0.45:
return "Disturbing Glare"
elif DGP >= 0.45:
return "Intolerable Glare"
def main(HDRImagePath, taskPosition, taskPositionAngle):
# import the classes
if sc.sticky.has_key('honeybee_release'):
try:
if not sc.sticky['honeybee_release'].isCompatible(ghenv.Component): return -1
if sc.sticky['honeybee_release'].isInputMissing(ghenv.Component): return -1
except:
warning = "You need a newer version of Honeybee to use this compoent." + \
" Use updateHoneybee component to update userObjects.\n" + \
"If you have already updated userObjects drag Honeybee_Honeybee component " + \
"into canvas and try again."
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, warning)
return -1
hb_folders = sc.sticky["honeybee_folders"]
hb_RADPath = hb_folders["RADPath"]
hb_RADLibPath = hb_folders["RADLibPath"]
hb_DSPath = hb_folders["DSPath"]
hb_DSCore = hb_folders["DSCorePath"]
hb_DSLibPath = hb_folders["DSLibPath"]
hb_EPPath = hb_folders["EPPath"]
else:
print "You should first let Honeybee to fly..."
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, "You should first let Honeybee to fly...")
return -1
# make sure the image is the result of an luminance analysis and not illuminance
# I check for -i flag for rpict - This will work for all the Honeybee generatred renders
# may or may not work for other cases I may want to change this to be a popup window
# so the user can select between the options
isLuminance = True
with open(HDRImagePath, "r") as hdrFile:
for lineCount, line in enumerate(hdrFile):
if lineCount<10:
if line.strip().lower().startswith("rpict"):
if line.find("-i") > -1:
isLuminance = False
break
else:
break
if not isLuminance:
warningMsg = "This image is the result of an illuminance analysis and not a luminance analysis which is needed for glare analysis!"
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, warningMsg)
return -1
# http://www.ise.fraunhofer.de/en/downloads-englisch/software/evalglare_windows.zip/at_download/file
notes = ""
# try to find evalglare and check the version
out, err = runCmdAndGetTheResults("/c " + hb_RADPath + "\evalglare -v")
msg = "Failed to find evalglare.exe.\n" + \
"Make sure you have evalglare 1.x.x installed at " + hb_RADPath +\
"You can download evalglare from: \n" + \
"http://www.ise.fraunhofer.de/en/downloads-englisch/software/evalglare_windows.zip/at_download/file"
try:
if out.split(" ")[0].strip() == "evalglare" and float(out.split(" ")[1].strip())> 1.0:
msg = "\nThis component is using " + out.split("\n")[0] + " for glare analysis.\n"
print msg
notes += msg + "\n"
else:
print msg
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Error, msg)
return -1
except:
print msg
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Error, msg)
return -1
#task position in x and y coordinates
taskP = False
if taskPosition != None and taskPositionAngle != None:
taskPX= taskPosition.X
taskPY = taskPosition.Y
taskPA = math.radians(taskPositionAngle)
taskP = True
if taskP and (taskPX > 1 or taskPY > 1):
msg = "U and V valeus for taskPositionUV should be between 0 and 1." + \
"%.3f"%taskPX + " and " + "%.3f"%taskPY + " are not acceptable input." + \
"glare study will be run for the image and not the task plane"
taskP = False
elif taskP == True:
msg = "Task position is provided.\n"
elif taskP == False:
msg = "No task position is provided. The result will be calculated for the whole scene.\n"
print msg
notes += msg + "\n"
# check size and proportion of the image
command = '/c ' + hb_RADPath + '\getinfo -d ' + HDRImagePath
out, err = runCmdAndGetTheResults(command)
try:
# image size
x = float(out.split(" ")[-1].strip())
y = float(out.split(" ")[-3].strip())
except:
msg = "Failed to find size of the picture. It will be set to 800.\n"
print msg
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg)
notes += msg + "\n"
x = y = 800
if x!=y:
msg = "You need a fisheye HDR image for an accurate study.\nThis image seems not to be a fisheye image which may produce inaccurate results.\n"
print msg
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg)
notes += msg + "\n"
# resize the image if needed
if x > 800 or y > 800:
msg = "Due to performance reasons of the evalglare code, the image should be smaller than 800x800 pixels. " + \
"Honeybee is resizing the image...\n"
print msg
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg)
notes += msg + "\n"
proportion = max(x,y)/800
resizedImage = ".".join(HDRImagePath.split(".")[:-1]) + "_resized." + HDRImagePath.split(".")[-1]
pflitLine = "/c " + hb_RADPath + "\pfilt -x /" + str(proportion) + " -y /" + str(proportion) + \
" " + HDRImagePath +" > " + resizedImage
out, err = runCmdAndGetTheResults(pflitLine)
x = x/proportion
y = y/proportion
HDRImagePath = resizedImage
glareCheckImage = ".".join(HDRImagePath.split(".")[:-1]) + "_chkFile." + HDRImagePath.split(".")[-1]
glareNoTextImage = ".".join(HDRImagePath.split(".")[:-1]) + "_noText." + HDRImagePath.split(".")[-1]
# run the analysis
evalGlareLine = "/c " + hb_RADPath + "\evalglare -c " + glareNoTextImage + " " + HDRImagePath
glareRes, err = runCmdAndGetTheResults(evalGlareLine)
if "error: no valid view specified" in err.strip():
# since I use pcomp to merge images HDR image doesn't have HDR view information
# adding default Honeybee view information for fish-eye camera
evalGlareLine = "/c " + hb_RADPath + "\evalglare -vth -vv 180 -vh 180 -c " + glareNoTextImage + " " + HDRImagePath
glareRes, err = runCmdAndGetTheResults(evalGlareLine)
notes += "Results for the image:\n" + glareRes + "\n"
# read the results
totalGlareResultDict, possibleNotice = readGlareResults(glareRes)
# add the results to the picture
DGP = totalGlareResultDict['dgp']
DGI = totalGlareResultDict['dgi']
textHeight = x / 28
if textHeight < 8: textHeight = 8
addNumbersLine = "/c " + hb_RADPath + r"\psign -h " + str(textHeight) + " -cb 0 0 0 -cf 1 1 1 DGP=" + str(DGP) +" This view has "+ str(DGPComfortRange(DGP))+ " | " + \
hb_RADPath + r"\pcompos " + glareNoTextImage + " 0 0 - " + str(textHeight/2) + " " + str(y) + " > " + glareCheckImage
runCmdAndGetTheResults(addNumbersLine)
if possibleNotice!=None: notes += "Notice: " + possibleNotice + "\n"
# if task position run one image to
if taskP:
glareTaskPCheckImage = ".".join(HDRImagePath.split(".")[:-1]) + "_TPChkFile." + HDRImagePath.split(".")[-1]
glareTaskPNoText = ".".join(HDRImagePath.split(".")[:-1]) + "_TPnoText." + HDRImagePath.split(".")[-1]
xPixle = int(taskPX * x)
yPixle = int(taskPY * y) # 0,0 coordinate for evalglare located at top left
taskPA = math.radians(taskPositionAngle)
TArguments = " ".join([str(xPixle), str(yPixle), "%.3f"%taskPA])
evalGlareTaskPLine = "/c " + hb_RADPath + "\evalglare -c " + glareTaskPNoText + " -T " + \
TArguments + " " + HDRImagePath
glareTaskRes, err = runCmdAndGetTheResults(evalGlareTaskPLine)
notes += "Results for the task position:\n" + glareTaskRes + "\n"
if err.strip() == "error: no valid view specified":
# since I use pcomp to merge images HDR image doesn't have HDR view information
# adding default Honeybee view information for fish-eye camera
evalGlareTaskPLine = "/c " + hb_RADPath + "\evalglare -vth -vv 180 -vh 180 -c " + glareTaskPNoText + " -T " + \
TArguments + " " + HDRImagePath
glareTaskRes, err = runCmdAndGetTheResults(evalGlareTaskPLine)
taskPGlareResultDict, possibleNotice = readGlareResults(glareTaskRes)
# add the results to the picture
DGP = taskPGlareResultDict['dgp']
DGI = taskPGlareResultDict['dgi']
addNumbersTLine = "/c " + hb_RADPath + r"\psign -h " + str(textHeight) + " -cb 0 0 0 -cf 1 1 1 DGP=" + str(DGP) + " This view has " + str(DGPComfortRange(DGP))+ " | "+ \
hb_RADPath + r"\pcompos " + glareTaskPNoText + " 0 0 - " + str(textHeight/2) + " " + str(y) + " > " + glareTaskPCheckImage
runCmdAndGetTheResults(addNumbersTLine)
if possibleNotice!=None: notes += "Notice: " + possibleNotice + "\n"
return notes, glareCheckImage, totalGlareResultDict, glareTaskPCheckImage, taskPGlareResultDict
else:
return notes, glareCheckImage, totalGlareResultDict, None, None
if _HDRImagePath and _runIt:
result = main(_HDRImagePath, taskPositionUV_, taskPositionAngle_)
if result!= -1:
readMe, glareCheckImage, totalGlareResultDict, imageWithTaskArea, taskPGlareResultDict = result
if taskPGlareResultDict!=None:
DGP = taskPGlareResultDict['dgp']
glareComfortRange = DGPComfortRange(DGP)
DGI = taskPGlareResultDict['dgi']
else:
DGP = totalGlareResultDict['dgp']
glareComfortRange = DGPComfortRange(DGP)
DGI = totalGlareResultDict['dgi']
else:
readMe = "Provide a valid HDR Image and set _runIt to True."
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning,readMe)
|
pniedzielski/fb-hackathon-2013-11-21
|
refs/heads/master
|
src/repl.it/jsrepl/extern/python/closured/lib/python2.7/lib2to3/fixes/fix_methodattrs.py
|
326
|
"""Fix bound method attributes (method.im_? -> method.__?__).
"""
# Author: Christian Heimes
# Local imports
from .. import fixer_base
from ..fixer_util import Name
MAP = {
"im_func" : "__func__",
"im_self" : "__self__",
"im_class" : "__self__.__class__"
}
class FixMethodattrs(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
"""
def transform(self, node, results):
attr = results["attr"][0]
new = unicode(MAP[attr.value])
attr.replace(Name(new, prefix=attr.prefix))
|
macs03/demo-cms
|
refs/heads/master
|
cms/lib/python2.7/site-packages/django/conf/locale/fa/formats.py
|
234
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j F Y'
TIME_FORMAT = 'G:i:s'
DATETIME_FORMAT = 'j F Y، ساعت G:i:s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'Y/n/j'
SHORT_DATETIME_FORMAT = 'Y/n/j، G:i:s'
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
|
sergiorua/libcloud
|
refs/heads/trunk
|
libcloud/compute/ssh.py
|
10
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Wraps multiple ways to communicate over SSH.
"""
have_paramiko = False
try:
import paramiko
have_paramiko = True
except ImportError:
pass
# Depending on your version of Paramiko, it may cause a deprecation
# warning on Python 2.6.
# Ref: https://bugs.launchpad.net/paramiko/+bug/392973
import os
import time
import subprocess
import logging
import warnings
from os.path import split as psplit
from os.path import join as pjoin
from libcloud.utils.logging import ExtraLogFormatter
from libcloud.utils.py3 import StringIO
from libcloud.utils.py3 import b
__all__ = [
'BaseSSHClient',
'ParamikoSSHClient',
'ShellOutSSHClient',
'SSHCommandTimeoutError'
]
class SSHCommandTimeoutError(Exception):
"""
Exception which is raised when an SSH command times out.
"""
def __init__(self, cmd, timeout):
self.cmd = cmd
self.timeout = timeout
message = 'Command didn\'t finish in %s seconds' % (timeout)
super(SSHCommandTimeoutError, self).__init__(message)
def __repr__(self):
return ('<SSHCommandTimeoutError: cmd="%s",timeout=%s)>' %
(self.cmd, self.timeout))
def __str__(self):
return self.message
class BaseSSHClient(object):
"""
Base class representing a connection over SSH/SCP to a remote node.
"""
def __init__(self, hostname, port=22, username='root', password=None,
key=None, key_files=None, timeout=None):
"""
:type hostname: ``str``
:keyword hostname: Hostname or IP address to connect to.
:type port: ``int``
:keyword port: TCP port to communicate on, defaults to 22.
:type username: ``str``
:keyword username: Username to use, defaults to root.
:type password: ``str``
:keyword password: Password to authenticate with or a password used
to unlock a private key if a password protected key
is used.
:param key: Deprecated in favor of ``key_files`` argument.
:type key_files: ``str`` or ``list``
:keyword key_files: A list of paths to the private key files to use.
"""
if key is not None:
message = ('You are using deprecated "key" argument which has '
'been replaced with "key_files" argument')
warnings.warn(message, DeprecationWarning)
# key_files has precedent
key_files = key if not key_files else key_files
self.hostname = hostname
self.port = port
self.username = username
self.password = password
self.key_files = key_files
self.timeout = timeout
def connect(self):
"""
Connect to the remote node over SSH.
:return: True if the connection has been successfully established,
False otherwise.
:rtype: ``bool``
"""
raise NotImplementedError(
'connect not implemented for this ssh client')
def put(self, path, contents=None, chmod=None, mode='w'):
"""
Upload a file to the remote node.
:type path: ``str``
:keyword path: File path on the remote node.
:type contents: ``str``
:keyword contents: File Contents.
:type chmod: ``int``
:keyword chmod: chmod file to this after creation.
:type mode: ``str``
:keyword mode: Mode in which the file is opened.
:return: Full path to the location where a file has been saved.
:rtype: ``str``
"""
raise NotImplementedError(
'put not implemented for this ssh client')
def delete(self, path):
"""
Delete/Unlink a file on the remote node.
:type path: ``str``
:keyword path: File path on the remote node.
:return: True if the file has been successfully deleted, False
otherwise.
:rtype: ``bool``
"""
raise NotImplementedError(
'delete not implemented for this ssh client')
def run(self, cmd):
"""
Run a command on a remote node.
:type cmd: ``str``
:keyword cmd: Command to run.
:return ``list`` of [stdout, stderr, exit_status]
"""
raise NotImplementedError(
'run not implemented for this ssh client')
def close(self):
"""
Shutdown connection to the remote node.
:return: True if the connection has been successfully closed, False
otherwise.
:rtype: ``bool``
"""
raise NotImplementedError(
'close not implemented for this ssh client')
def _get_and_setup_logger(self):
logger = logging.getLogger('libcloud.compute.ssh')
path = os.getenv('LIBCLOUD_DEBUG')
if path:
handler = logging.FileHandler(path)
handler.setFormatter(ExtraLogFormatter())
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
return logger
class ParamikoSSHClient(BaseSSHClient):
"""
A SSH Client powered by Paramiko.
"""
# Maximum number of bytes to read at once from a socket
CHUNK_SIZE = 1024
# How long to sleep while waiting for command to finish
SLEEP_DELAY = 1.5
def __init__(self, hostname, port=22, username='root', password=None,
key=None, key_files=None, key_material=None, timeout=None):
"""
Authentication is always attempted in the following order:
- The key passed in (if key is provided)
- Any key we can find through an SSH agent (only if no password and
key is provided)
- Any "id_rsa" or "id_dsa" key discoverable in ~/.ssh/ (only if no
password and key is provided)
- Plain username/password auth, if a password was given (if password is
provided)
"""
if key_files and key_material:
raise ValueError(('key_files and key_material arguments are '
'mutually exclusive'))
super(ParamikoSSHClient, self).__init__(hostname=hostname, port=port,
username=username,
password=password,
key=key,
key_files=key_files,
timeout=timeout)
self.key_material = key_material
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.logger = self._get_and_setup_logger()
def connect(self):
conninfo = {'hostname': self.hostname,
'port': self.port,
'username': self.username,
'allow_agent': False,
'look_for_keys': False}
if self.password:
conninfo['password'] = self.password
if self.key_files:
conninfo['key_filename'] = self.key_files
if self.key_material:
conninfo['pkey'] = self._get_pkey_object(key=self.key_material)
if not self.password and not (self.key_files or self.key_material):
conninfo['allow_agent'] = True
conninfo['look_for_keys'] = True
if self.timeout:
conninfo['timeout'] = self.timeout
extra = {'_hostname': self.hostname, '_port': self.port,
'_username': self.username, '_timeout': self.timeout}
self.logger.debug('Connecting to server', extra=extra)
self.client.connect(**conninfo)
return True
def put(self, path, contents=None, chmod=None, mode='w'):
extra = {'_path': path, '_mode': mode, '_chmod': chmod}
self.logger.debug('Uploading file', extra=extra)
sftp = self.client.open_sftp()
# less than ideal, but we need to mkdir stuff otherwise file() fails
head, tail = psplit(path)
if path[0] == "/":
sftp.chdir("/")
else:
# Relative path - start from a home directory (~)
sftp.chdir('.')
for part in head.split("/"):
if part != "":
try:
sftp.mkdir(part)
except IOError:
# so, there doesn't seem to be a way to
# catch EEXIST consistently *sigh*
pass
sftp.chdir(part)
cwd = sftp.getcwd()
ak = sftp.file(tail, mode=mode)
ak.write(contents)
if chmod is not None:
ak.chmod(chmod)
ak.close()
sftp.close()
if path[0] == '/':
file_path = path
else:
file_path = pjoin(cwd, path)
return file_path
def delete(self, path):
extra = {'_path': path}
self.logger.debug('Deleting file', extra=extra)
sftp = self.client.open_sftp()
sftp.unlink(path)
sftp.close()
return True
def run(self, cmd, timeout=None):
"""
Note: This function is based on paramiko's exec_command()
method.
:param timeout: How long to wait (in seconds) for the command to
finish (optional).
:type timeout: ``float``
"""
extra = {'_cmd': cmd}
self.logger.debug('Executing command', extra=extra)
# Use the system default buffer size
bufsize = -1
transport = self.client.get_transport()
chan = transport.open_session()
start_time = time.time()
chan.exec_command(cmd)
stdout = StringIO()
stderr = StringIO()
# Create a stdin file and immediately close it to prevent any
# interactive script from hanging the process.
stdin = chan.makefile('wb', bufsize)
stdin.close()
# Receive all the output
# Note #1: This is used instead of chan.makefile approach to prevent
# buffering issues and hanging if the executed command produces a lot
# of output.
#
# Note #2: If you are going to remove "ready" checks inside the loop
# you are going to have a bad time. Trying to consume from a channel
# which is not ready will block for indefinitely.
exit_status_ready = chan.exit_status_ready()
while not exit_status_ready:
current_time = time.time()
elapsed_time = (current_time - start_time)
if timeout and (elapsed_time > timeout):
# TODO: Is this the right way to clean up?
chan.close()
raise SSHCommandTimeoutError(cmd=cmd, timeout=timeout)
if chan.recv_ready():
data = chan.recv(self.CHUNK_SIZE)
while data:
stdout.write(b(data).decode('utf-8'))
ready = chan.recv_ready()
if not ready:
break
data = chan.recv(self.CHUNK_SIZE)
if chan.recv_stderr_ready():
data = chan.recv_stderr(self.CHUNK_SIZE)
while data:
stderr.write(b(data).decode('utf-8'))
ready = chan.recv_stderr_ready()
if not ready:
break
data = chan.recv_stderr(self.CHUNK_SIZE)
# We need to check the exist status here, because the command could
# print some output and exit during this sleep bellow.
exit_status_ready = chan.exit_status_ready()
if exit_status_ready:
break
# Short sleep to prevent busy waiting
time.sleep(self.SLEEP_DELAY)
# Receive the exit status code of the command we ran.
status = chan.recv_exit_status()
stdout = stdout.getvalue()
stderr = stderr.getvalue()
extra = {'_status': status, '_stdout': stdout, '_stderr': stderr}
self.logger.debug('Command finished', extra=extra)
return [stdout, stderr, status]
def close(self):
self.logger.debug('Closing server connection')
self.client.close()
return True
def _get_pkey_object(self, key):
"""
Try to detect private key type and return paramiko.PKey object.
"""
for cls in [paramiko.RSAKey, paramiko.DSSKey, paramiko.ECDSAKey]:
try:
key = cls.from_private_key(StringIO(key))
except paramiko.ssh_exception.SSHException:
# Invalid key, try other key type
pass
else:
return key
msg = 'Invalid or unsupported key type'
raise paramiko.ssh_exception.SSHException(msg)
class ShellOutSSHClient(BaseSSHClient):
"""
This client shells out to "ssh" binary to run commands on the remote
server.
Note: This client should not be used in production.
"""
def __init__(self, hostname, port=22, username='root', password=None,
key=None, key_files=None, timeout=None):
super(ShellOutSSHClient, self).__init__(hostname=hostname,
port=port, username=username,
password=password,
key=key,
key_files=key_files,
timeout=timeout)
if self.password:
raise ValueError('ShellOutSSHClient only supports key auth')
child = subprocess.Popen(['ssh'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
child.communicate()
if child.returncode == 127:
raise ValueError('ssh client is not available')
self.logger = self._get_and_setup_logger()
def connect(self):
"""
This client doesn't support persistent connections establish a new
connection every time "run" method is called.
"""
return True
def run(self, cmd):
return self._run_remote_shell_command([cmd])
def put(self, path, contents=None, chmod=None, mode='w'):
if mode == 'w':
redirect = '>'
elif mode == 'a':
redirect = '>>'
else:
raise ValueError('Invalid mode: ' + mode)
cmd = ['echo "%s" %s %s' % (contents, redirect, path)]
self._run_remote_shell_command(cmd)
return path
def delete(self, path):
cmd = ['rm', '-rf', path]
self._run_remote_shell_command(cmd)
return True
def close(self):
return True
def _get_base_ssh_command(self):
cmd = ['ssh']
if self.key_files:
cmd += ['-i', self.key_files]
if self.timeout:
cmd += ['-oConnectTimeout=%s' % (self.timeout)]
cmd += ['%s@%s' % (self.username, self.hostname)]
return cmd
def _run_remote_shell_command(self, cmd):
"""
Run a command on a remote server.
:param cmd: Command to run.
:type cmd: ``list`` of ``str``
:return: Command stdout, stderr and status code.
:rtype: ``tuple``
"""
base_cmd = self._get_base_ssh_command()
full_cmd = base_cmd + [' '.join(cmd)]
self.logger.debug('Executing command: "%s"' % (' '.join(full_cmd)))
child = subprocess.Popen(full_cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = child.communicate()
return (stdout, stderr, child.returncode)
class MockSSHClient(BaseSSHClient):
pass
SSHClient = ParamikoSSHClient
if not have_paramiko:
SSHClient = MockSSHClient
|
dietrichc/streamline-ppc-reports
|
refs/heads/master
|
examples/dfp/v201403/creative_service/create_creative_from_template.py
|
1
|
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates a new template creative for a given advertiser.
To determine which companies are advertisers, run get_advertisers.py.
To determine which creative templates exist, run
get_all_creative_templates.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: CreativeService.createCreative
"""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
import base64
import os
import uuid
# Import appropriate modules from the client library.
from googleads import dfp
# Set id of the advertiser (company) that the creative will be assigned to.
ADVERTISER_ID = 'INSERT_ADVERTISER_COMPANY_ID_HERE'
def main(client, advertiser_id):
# Initialize appropriate service.
creative_service = client.GetService('CreativeService', version='v201403')
# Use the image banner with optional third party tracking template.
creative_template_id = '10000680'
image_data = open(os.path.join(os.path.split(__file__)[0], '..', '..', 'data',
'medium_rectangle.jpg'), 'r').read()
image_data = base64.encodestring(image_data)
# Create creative from templates.
creative = {
'xsi_type': 'TemplateCreative',
'name': 'Template Creative #%s' % uuid.uuid4(),
'advertiserId': advertiser_id,
'size': {'width': '300', 'height': '250'},
'creativeTemplateId': creative_template_id,
'creativeTemplateVariableValues': [
{
'xsi_type': 'AssetCreativeTemplateVariableValue',
'uniqueName': 'Imagefile',
'assetByteArray': image_data,
'fileName': 'image%s.jpg' % uuid.uuid4()
},
{
'xsi_type': 'LongCreativeTemplateVariableValue',
'uniqueName': 'Imagewidth',
'value': '300'
},
{
'xsi_type': 'LongCreativeTemplateVariableValue',
'uniqueName': 'Imageheight',
'value': '250'
},
{
'xsi_type': 'UrlCreativeTemplateVariableValue',
'uniqueName': 'ClickthroughURL',
'value': 'www.google.com'
},
{
'xsi_type': 'StringCreativeTemplateVariableValue',
'uniqueName': 'Targetwindow',
'value': '_blank'
}
]
}
# Call service to create the creative.
creative = creative_service.createCreative(creative)
# Display results.
print ('Template creative with id \'%s\', name \'%s\', and type \'%s\' was '
'created and can be previewed at %s.'
% (creative['id'], creative['name'], creative['Creative.Type'],
creative['previewUrl']))
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, ADVERTISER_ID)
|
UdK-VPT/Open_eQuarter
|
refs/heads/master
|
mole/webinteraction/__init__.py
|
12133432
| |
thnee/ansible
|
refs/heads/devel
|
test/units/module_utils/__init__.py
|
12133432
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.