repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
piquadrat/django | refs/heads/master | django/contrib/gis/db/backends/postgis/introspection.py | 42 | from django.contrib.gis.gdal import OGRGeomType
from django.db.backends.postgresql.introspection import DatabaseIntrospection
class GeoIntrospectionError(Exception):
pass
class PostGISIntrospection(DatabaseIntrospection):
# Reverse dictionary for PostGIS geometry types not populated until
# introspection is actually performed.
postgis_types_reverse = {}
ignored_tables = DatabaseIntrospection.ignored_tables + [
'geography_columns',
'geometry_columns',
'raster_columns',
'spatial_ref_sys',
'raster_overviews',
]
# Overridden from parent to include raster indices in retrieval.
# Raster indices have pg_index.indkey value 0 because they are an
# expression over the raster column through the ST_ConvexHull function.
# So the default query has to be adapted to include raster indices.
_get_indexes_query = """
SELECT DISTINCT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary
FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, pg_catalog.pg_index idx,
pg_catalog.pg_attribute attr, pg_catalog.pg_type t
WHERE
c.oid = idx.indrelid
AND idx.indexrelid = c2.oid
AND attr.attrelid = c.oid
AND t.oid = attr.atttypid
AND (
attr.attnum = idx.indkey[0] OR
(t.typname LIKE 'raster' AND idx.indkey = '0')
)
AND attr.attnum > 0
AND c.relname = %s"""
def get_postgis_types(self):
"""
Return a dictionary with keys that are the PostgreSQL object
identification integers for the PostGIS geometry and/or
geography types (if supported).
"""
field_types = [
('geometry', 'GeometryField'),
# The value for the geography type is actually a tuple
# to pass in the `geography=True` keyword to the field
# definition.
('geography', ('GeometryField', {'geography': True})),
]
postgis_types = {}
# The OID integers associated with the geometry type may
# be different across versions; hence, this is why we have
# to query the PostgreSQL pg_type table corresponding to the
# PostGIS custom data types.
oid_sql = 'SELECT "oid" FROM "pg_type" WHERE "typname" = %s'
cursor = self.connection.cursor()
try:
for field_type in field_types:
cursor.execute(oid_sql, (field_type[0],))
for result in cursor.fetchall():
postgis_types[result[0]] = field_type[1]
finally:
cursor.close()
return postgis_types
def get_field_type(self, data_type, description):
if not self.postgis_types_reverse:
# If the PostGIS types reverse dictionary is not populated, do so
# now. In order to prevent unnecessary requests upon connection
# initialization, the `data_types_reverse` dictionary is not updated
# with the PostGIS custom types until introspection is actually
# performed -- in other words, when this function is called.
self.postgis_types_reverse = self.get_postgis_types()
self.data_types_reverse.update(self.postgis_types_reverse)
return super().get_field_type(data_type, description)
def get_geometry_type(self, table_name, geo_col):
"""
The geometry type OID used by PostGIS does not indicate the particular
type of field that a geometry column is (e.g., whether it's a
PointField or a PolygonField). Thus, this routine queries the PostGIS
metadata tables to determine the geometry type.
"""
cursor = self.connection.cursor()
try:
try:
# First seeing if this geometry column is in the `geometry_columns`
cursor.execute('SELECT "coord_dimension", "srid", "type" '
'FROM "geometry_columns" '
'WHERE "f_table_name"=%s AND "f_geometry_column"=%s',
(table_name, geo_col))
row = cursor.fetchone()
if not row:
raise GeoIntrospectionError
except GeoIntrospectionError:
cursor.execute('SELECT "coord_dimension", "srid", "type" '
'FROM "geography_columns" '
'WHERE "f_table_name"=%s AND "f_geography_column"=%s',
(table_name, geo_col))
row = cursor.fetchone()
if not row:
raise Exception('Could not find a geometry or geography column for "%s"."%s"' %
(table_name, geo_col))
# OGRGeomType does not require GDAL and makes it easy to convert
# from OGC geom type name to Django field.
field_type = OGRGeomType(row[2]).django
# Getting any GeometryField keyword arguments that are not the default.
dim = row[0]
srid = row[1]
field_params = {}
if srid != 4326:
field_params['srid'] = srid
if dim != 2:
field_params['dim'] = dim
finally:
cursor.close()
return field_type, field_params
|
zhangyunfeng/.emacs.d | refs/heads/master | elpa/yasnippet-20170326.1030/snippets/rename_add_contr.py | 134 | #!/usr/bin/env python
import os
import re
from os.path import join
from shutil import move
def rename(root, f):
if f.endswith('.yasnippet'):
base, _ = f.split('.')
print("move %s to %s" % (join(root, f), join(root, base)))
move(join(root, f), join(root, base))
CONT = "# contributor: Andrea crotti\n# --"
END = "# --\n\n"
orig = "# --\n\n"
to = "# --\n"
def insert(root, f, orig, to):
fname = join(root, f)
text = open(fname).read()
nex_text = re.sub(orig, to, text)
open(fname, 'w').write(nex_text)
if __name__ == '__main__':
for root, dirs, files in os.walk('.'):
if "mode" in root:
# os.popen("git add *yasnippet")
for f in files:
rename(root, f)
# insert(root, f, orig, to)
|
nfvs/flask-login | refs/heads/master | flask_login/utils.py | 2 | # -*- coding: utf-8 -*-
'''
flask_login.utils
-----------------
General utilities.
'''
import hmac
from hashlib import sha512
from functools import wraps
from werkzeug.local import LocalProxy
from werkzeug.security import safe_str_cmp
from werkzeug.urls import url_decode, url_encode
from flask import (_request_ctx_stack, current_app, request, session, url_for,
has_request_context)
from ._compat import text_type, urlparse, urlunparse
from .config import COOKIE_NAME, EXEMPT_METHODS
from .signals import user_logged_in, user_logged_out, user_login_confirmed
#: A proxy for the current user. If no user is logged in, this will be an
#: anonymous user
current_user = LocalProxy(lambda: _get_user())
def encode_cookie(payload):
'''
This will encode a ``unicode`` value into a cookie, and sign that cookie
with the app's secret key.
:param payload: The value to encode, as `unicode`.
:type payload: unicode
'''
return u'{0}|{1}'.format(payload, _cookie_digest(payload))
def decode_cookie(cookie):
'''
This decodes a cookie given by `encode_cookie`. If verification of the
cookie fails, ``None`` will be implicitly returned.
:param cookie: An encoded cookie.
:type cookie: str
'''
try:
payload, digest = cookie.rsplit(u'|', 1)
if hasattr(digest, 'decode'):
digest = digest.decode('ascii') # pragma: no cover
except ValueError:
return
if safe_str_cmp(_cookie_digest(payload), digest):
return payload
def make_next_param(login_url, current_url):
'''
Reduces the scheme and host from a given URL so it can be passed to
the given `login` URL more efficiently.
:param login_url: The login URL being redirected to.
:type login_url: str
:param current_url: The URL to reduce.
:type current_url: str
'''
l = urlparse(login_url)
c = urlparse(current_url)
if (not l.scheme or l.scheme == c.scheme) and \
(not l.netloc or l.netloc == c.netloc):
return urlunparse(('', '', c.path, c.params, c.query, ''))
return current_url
def expand_login_view(login_view):
'''
Returns the url for the login view, expanding the view name to a url if
needed.
:param login_view: The name of the login view or a URL for the login view.
:type login_view: str
'''
if login_view.startswith(('https://', 'http://', '/')):
return login_view
else:
return url_for(login_view)
def login_url(login_view, next_url=None, next_field='next'):
'''
Creates a URL for redirecting to a login page. If only `login_view` is
provided, this will just return the URL for it. If `next_url` is provided,
however, this will append a ``next=URL`` parameter to the query string
so that the login view can redirect back to that URL.
:param login_view: The name of the login view. (Alternately, the actual
URL to the login view.)
:type login_view: str
:param next_url: The URL to give the login view for redirection.
:type next_url: str
:param next_field: What field to store the next URL in. (It defaults to
``next``.)
:type next_field: str
'''
base = expand_login_view(login_view)
if next_url is None:
return base
parsed_result = urlparse(base)
md = url_decode(parsed_result.query)
md[next_field] = make_next_param(base, next_url)
parsed_result = parsed_result._replace(query=url_encode(md, sort=True))
return urlunparse(parsed_result)
def login_fresh():
'''
This returns ``True`` if the current login is fresh.
'''
return session.get('_fresh', False)
def login_user(user, remember=False, force=False, fresh=True):
'''
Logs a user in. You should pass the actual user object to this. If the
user's `is_active` property is ``False``, they will not be logged in
unless `force` is ``True``.
This will return ``True`` if the log in attempt succeeds, and ``False`` if
it fails (i.e. because the user is inactive).
:param user: The user object to log in.
:type user: object
:param remember: Whether to remember the user after their session expires.
Defaults to ``False``.
:type remember: bool
:param force: If the user is inactive, setting this to ``True`` will log
them in regardless. Defaults to ``False``.
:type force: bool
:param fresh: setting this to ``False`` will log in the user with a session
marked as not "fresh". Defaults to ``True``.
:type fresh: bool
'''
if not force and not user.is_active:
return False
user_id = getattr(user, current_app.login_manager.id_attribute)()
session['user_id'] = user_id
session['_fresh'] = fresh
session['_id'] = current_app.login_manager._session_identifier_generator()
if remember:
session['remember'] = 'set'
_request_ctx_stack.top.user = user
user_logged_in.send(current_app._get_current_object(), user=_get_user())
return True
def logout_user():
'''
Logs a user out. (You do not need to pass the actual user.) This will
also clean up the remember me cookie if it exists.
'''
user = _get_user()
if 'user_id' in session:
session.pop('user_id')
if '_fresh' in session:
session.pop('_fresh')
cookie_name = current_app.config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
if cookie_name in request.cookies:
session['remember'] = 'clear'
user_logged_out.send(current_app._get_current_object(), user=user)
current_app.login_manager.reload_user()
return True
def confirm_login():
'''
This sets the current session as fresh. Sessions become stale when they
are reloaded from a cookie.
'''
session['_fresh'] = True
session['_id'] = current_app.login_manager._session_identifier_generator()
user_login_confirmed.send(current_app._get_current_object())
def login_required(func):
'''
If you decorate a view with this, it will ensure that the current user is
logged in and authenticated before calling the actual view. (If they are
not, it calls the :attr:`LoginManager.unauthorized` callback.) For
example::
@app.route('/post')
@login_required
def post():
pass
If there are only certain times you need to require that your user is
logged in, you can do so with::
if not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
...which is essentially the code that this function adds to your views.
It can be convenient to globally turn off authentication when unit testing.
To enable this, if the application configuration variable `LOGIN_DISABLED`
is set to `True`, this decorator will be ignored.
.. Note ::
Per `W3 guidelines for CORS preflight requests
<http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_,
HTTP ``OPTIONS`` requests are exempt from login checks.
:param func: The view function to decorate.
:type func: function
'''
@wraps(func)
def decorated_view(*args, **kwargs):
if request.method in EXEMPT_METHODS:
return func(*args, **kwargs)
elif current_app.login_manager._login_disabled:
return func(*args, **kwargs)
elif not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
return func(*args, **kwargs)
return decorated_view
def fresh_login_required(func):
'''
If you decorate a view with this, it will ensure that the current user's
login is fresh - i.e. their session was not restored from a 'remember me'
cookie. Sensitive operations, like changing a password or e-mail, should
be protected with this, to impede the efforts of cookie thieves.
If the user is not authenticated, :meth:`LoginManager.unauthorized` is
called as normal. If they are authenticated, but their session is not
fresh, it will call :meth:`LoginManager.needs_refresh` instead. (In that
case, you will need to provide a :attr:`LoginManager.refresh_view`.)
Behaves identically to the :func:`login_required` decorator with respect
to configutation variables.
.. Note ::
Per `W3 guidelines for CORS preflight requests
<http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_,
HTTP ``OPTIONS`` requests are exempt from login checks.
:param func: The view function to decorate.
:type func: function
'''
@wraps(func)
def decorated_view(*args, **kwargs):
if request.method in EXEMPT_METHODS:
return func(*args, **kwargs)
elif current_app.login_manager._login_disabled:
return func(*args, **kwargs)
elif not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
elif not login_fresh():
return current_app.login_manager.needs_refresh()
return func(*args, **kwargs)
return decorated_view
def set_login_view(login_view, blueprint=None):
'''
Sets the login view for the app or blueprint. If a blueprint is passed,
the login view is set for this blueprint on ``blueprint_login_views``.
:param login_view: The user object to log in.
:type login_view: str
:param blueprint: The blueprint which this login view should be set on.
Defaults to ``None``.
:type blueprint: object
'''
num_login_views = len(current_app.login_manager.blueprint_login_views)
if blueprint is not None or num_login_views != 0:
(current_app.login_manager
.blueprint_login_views[blueprint.name]) = login_view
if (current_app.login_manager.login_view is not None and
None not in current_app.login_manager.blueprint_login_views):
(current_app.login_manager
.blueprint_login_views[None]) = (current_app.login_manager
.login_view)
current_app.login_manager.login_view = None
else:
current_app.login_manager.login_view = login_view
def _get_user():
if has_request_context() and not hasattr(_request_ctx_stack.top, 'user'):
current_app.login_manager._load_user()
return getattr(_request_ctx_stack.top, 'user', None)
def _cookie_digest(payload, key=None):
key = _secret_key(key)
return hmac.new(key, payload.encode('utf-8'), sha512).hexdigest()
def _get_remote_addr():
address = request.headers.get('X-Forwarded-For', request.remote_addr)
if address is not None:
# An 'X-Forwarded-For' header includes a comma separated list of the
# addresses, the first address being the actual remote address.
address = address.encode('utf-8').split(b',')[0].strip()
return address
def _create_identifier():
user_agent = request.headers.get('User-Agent')
if user_agent is not None:
user_agent = user_agent.encode('utf-8')
base = '{0}|{1}'.format(_get_remote_addr(), user_agent)
if str is bytes:
base = text_type(base, 'utf-8', errors='replace') # pragma: no cover
h = sha512()
h.update(base.encode('utf8'))
return h.hexdigest()
def _user_context_processor():
return dict(current_user=_get_user())
def _secret_key(key=None):
if key is None:
key = current_app.config['SECRET_KEY']
if isinstance(key, text_type): # pragma: no cover
key = key.encode('latin1') # ensure bytes
return key
|
dphang/sage | refs/heads/master | dota/parser/__init__.py | 12133432 | |
seem-sky/FrameworkBenchmarks | refs/heads/master | php-fuel/__init__.py | 12133432 | |
CodeWingX/yowsup | refs/heads/master | yowsup/layers/protocol_media/protocolentities/test_message_media.py | 69 | from yowsup.layers.protocol_media.protocolentities.message_media import MediaMessageProtocolEntity
from yowsup.layers.protocol_messages.protocolentities.test_message import MessageProtocolEntityTest
from yowsup.structs import ProtocolTreeNode
class MediaMessageProtocolEntityTest(MessageProtocolEntityTest):
def setUp(self):
super(MediaMessageProtocolEntityTest, self).setUp()
self.ProtocolEntity = MediaMessageProtocolEntity
mediaNode = ProtocolTreeNode("media", {"type":"MEDIA_TYPE"}, None, None)
self.node.addChild(mediaNode)
|
benthomasson/ansible | refs/heads/devel | test/units/modules/network/aruba/aruba_module.py | 42 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
def set_module_args(args):
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args)
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except:
pass
fixture_data[path] = data
return data
class AnsibleExitJson(Exception):
pass
class AnsibleFailJson(Exception):
pass
class TestArubaModule(unittest.TestCase):
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
self.load_fixtures(commands)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if commands is not None:
if sort:
self.assertEqual(sorted(commands), sorted(result['commands']), result['commands'])
else:
self.assertEqual(commands, result['commands'], result['commands'])
return result
def failed(self):
def fail_json(*args, **kwargs):
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
with patch.object(basic.AnsibleModule, 'fail_json', fail_json):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def changed(self, changed=False):
def exit_json(*args, **kwargs):
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
with patch.object(basic.AnsibleModule, 'exit_json', exit_json):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], changed, result)
return result
def load_fixtures(self, commands=None):
pass
|
wezhang/vim-setup | refs/heads/master | bundle/python-mode/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/format.py | 17 | # Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Python code format's checker.
By default try to follow Guido's style guide :
http://www.python.org/doc/essays/styleguide.html
Some parts of the process_token method is based from The Tab Nanny std module.
"""
import keyword
import sys
import tokenize
if not hasattr(tokenize, 'NL'):
raise ValueError("tokenize.NL doesn't exist -- tokenize module too old")
from astroid import nodes
from pylint.interfaces import ITokenChecker, IAstroidChecker, IRawChecker
from pylint.checkers import BaseTokenChecker
from pylint.checkers.utils import check_messages
from pylint.utils import WarningScope, OPTION_RGX
_CONTINUATION_BLOCK_OPENERS = ['elif', 'except', 'for', 'if', 'while', 'def', 'class']
_KEYWORD_TOKENS = ['assert', 'del', 'elif', 'except', 'for', 'if', 'in', 'not',
'raise', 'return', 'while', 'yield']
if sys.version_info < (3, 0):
_KEYWORD_TOKENS.append('print')
_SPACED_OPERATORS = ['==', '<', '>', '!=', '<>', '<=', '>=',
'+=', '-=', '*=', '**=', '/=', '//=', '&=', '|=', '^=',
'%=', '>>=', '<<=']
_OPENING_BRACKETS = ['(', '[', '{']
_CLOSING_BRACKETS = [')', ']', '}']
_TAB_LENGTH = 8
_EOL = frozenset([tokenize.NEWLINE, tokenize.NL, tokenize.COMMENT])
_JUNK_TOKENS = (tokenize.COMMENT, tokenize.NL)
# Whitespace checking policy constants
_MUST = 0
_MUST_NOT = 1
_IGNORE = 2
# Whitespace checking config constants
_DICT_SEPARATOR = 'dict-separator'
_TRAILING_COMMA = 'trailing-comma'
_NO_SPACE_CHECK_CHOICES = [_TRAILING_COMMA, _DICT_SEPARATOR]
MSGS = {
'C0301': ('Line too long (%s/%s)',
'line-too-long',
'Used when a line is longer than a given number of characters.'),
'C0302': ('Too many lines in module (%s)', # was W0302
'too-many-lines',
'Used when a module has too much lines, reducing its readability.'
),
'C0303': ('Trailing whitespace',
'trailing-whitespace',
'Used when there is whitespace between the end of a line and the '
'newline.'),
'C0304': ('Final newline missing',
'missing-final-newline',
'Used when the last line in a file is missing a newline.'),
'W0311': ('Bad indentation. Found %s %s, expected %s',
'bad-indentation',
'Used when an unexpected number of indentation\'s tabulations or '
'spaces has been found.'),
'C0330': ('Wrong %s indentation%s.\n%s%s',
'bad-continuation',
'TODO'),
'W0312': ('Found indentation with %ss instead of %ss',
'mixed-indentation',
'Used when there are some mixed tabs and spaces in a module.'),
'W0301': ('Unnecessary semicolon', # was W0106
'unnecessary-semicolon',
'Used when a statement is ended by a semi-colon (";"), which \
isn\'t necessary (that\'s python, not C ;).'),
'C0321': ('More than one statement on a single line',
'multiple-statements',
'Used when more than on statement are found on the same line.',
{'scope': WarningScope.NODE}),
'C0325' : ('Unnecessary parens after %r keyword',
'superfluous-parens',
'Used when a single item in parentheses follows an if, for, or '
'other keyword.'),
'C0326': ('%s space %s %s %s\n%s',
'bad-whitespace',
('Used when a wrong number of spaces is used around an operator, '
'bracket or block opener.'),
{'old_names': [('C0323', 'no-space-after-operator'),
('C0324', 'no-space-after-comma'),
('C0322', 'no-space-before-operator')]})
}
if sys.version_info < (3, 0):
MSGS.update({
'W0331': ('Use of the <> operator',
'old-ne-operator',
'Used when the deprecated "<>" operator is used instead \
of "!=".'),
'W0332': ('Use of "l" as long integer identifier',
'lowercase-l-suffix',
'Used when a lower case "l" is used to mark a long integer. You '
'should use a upper case "L" since the letter "l" looks too much '
'like the digit "1"'),
'W0333': ('Use of the `` operator',
'backtick',
'Used when the deprecated "``" (backtick) operator is used '
'instead of the str() function.',
{'scope': WarningScope.NODE}),
})
def _underline_token(token):
length = token[3][1] - token[2][1]
offset = token[2][1]
return token[4] + (' ' * offset) + ('^' * length)
def _column_distance(token1, token2):
if token1 == token2:
return 0
if token2[3] < token1[3]:
token1, token2 = token2, token1
if token1[3][0] != token2[2][0]:
return None
return token2[2][1] - token1[3][1]
def _last_token_on_line_is(tokens, line_end, token):
return (
line_end > 0 and tokens.token(line_end-1) == token or
line_end > 1 and tokens.token(line_end-2) == token
and tokens.type(line_end-1) == tokenize.COMMENT)
def _token_followed_by_eol(tokens, position):
return (tokens.type(position+1) == tokenize.NL or
tokens.type(position+1) == tokenize.COMMENT and
tokens.type(position+2) == tokenize.NL)
def _get_indent_length(line):
"""Return the length of the indentation on the given token's line."""
result = 0
for char in line:
if char == ' ':
result += 1
elif char == '\t':
result += _TAB_LENGTH
else:
break
return result
def _get_indent_hint_line(bar_positions, bad_position):
"""Return a line with |s for each of the positions in the given lists."""
if not bar_positions:
return ''
markers = [(pos, '|') for pos in bar_positions]
markers.append((bad_position, '^'))
markers.sort()
line = [' '] * (markers[-1][0] + 1)
for position, marker in markers:
line[position] = marker
return ''.join(line)
class _ContinuedIndent(object):
__slots__ = ('valid_outdent_offsets',
'valid_continuation_offsets',
'context_type',
'token',
'position')
def __init__(self,
context_type,
token,
position,
valid_outdent_offsets,
valid_continuation_offsets):
self.valid_outdent_offsets = valid_outdent_offsets
self.valid_continuation_offsets = valid_continuation_offsets
self.context_type = context_type
self.position = position
self.token = token
# The contexts for hanging indents.
# A hanging indented dictionary value after :
HANGING_DICT_VALUE = 'dict-value'
# Hanging indentation in an expression.
HANGING = 'hanging'
# Hanging indentation in a block header.
HANGING_BLOCK = 'hanging-block'
# Continued indentation inside an expression.
CONTINUED = 'continued'
# Continued indentation in a block header.
CONTINUED_BLOCK = 'continued-block'
SINGLE_LINE = 'single'
WITH_BODY = 'multi'
_CONTINUATION_MSG_PARTS = {
HANGING_DICT_VALUE: ('hanging', ' in dict value'),
HANGING: ('hanging', ''),
HANGING_BLOCK: ('hanging', ' before block'),
CONTINUED: ('continued', ''),
CONTINUED_BLOCK: ('continued', ' before block'),
}
def _Offsets(*args):
"""Valid indentation offsets for a continued line."""
return dict((a, None) for a in args)
def _BeforeBlockOffsets(single, with_body):
"""Valid alternative indent offsets for continued lines before blocks.
:param single: Valid offset for statements on a single logical line.
:param with_body: Valid offset for statements on several lines.
"""
return {single: SINGLE_LINE, with_body: WITH_BODY}
class TokenWrapper(object):
"""A wrapper for readable access to token information."""
def __init__(self, tokens):
self._tokens = tokens
def token(self, idx):
return self._tokens[idx][1]
def type(self, idx):
return self._tokens[idx][0]
def start_line(self, idx):
return self._tokens[idx][2][0]
def start_col(self, idx):
return self._tokens[idx][2][1]
def line(self, idx):
return self._tokens[idx][4]
class ContinuedLineState(object):
"""Tracker for continued indentation inside a logical line."""
def __init__(self, tokens, config):
self._line_start = -1
self._cont_stack = []
self._is_block_opener = False
self.retained_warnings = []
self._config = config
self._tokens = TokenWrapper(tokens)
@property
def has_content(self):
return bool(self._cont_stack)
@property
def _block_indent_size(self):
return len(self._config.indent_string.replace('\t', ' ' * _TAB_LENGTH))
@property
def _continuation_size(self):
return self._config.indent_after_paren
def handle_line_start(self, pos):
"""Record the first non-junk token at the start of a line."""
if self._line_start > -1:
return
self._is_block_opener = self._tokens.token(pos) in _CONTINUATION_BLOCK_OPENERS
self._line_start = pos
def next_physical_line(self):
"""Prepares the tracker for a new physical line (NL)."""
self._line_start = -1
self._is_block_opener = False
def next_logical_line(self):
"""Prepares the tracker for a new logical line (NEWLINE).
A new logical line only starts with block indentation.
"""
self.next_physical_line()
self.retained_warnings = []
self._cont_stack = []
def add_block_warning(self, token_position, state, valid_offsets):
self.retained_warnings.append((token_position, state, valid_offsets))
def get_valid_offsets(self, idx):
""""Returns the valid offsets for the token at the given position."""
# The closing brace on a dict or the 'for' in a dict comprehension may
# reset two indent levels because the dict value is ended implicitly
stack_top = -1
if self._tokens.token(idx) in ('}', 'for') and self._cont_stack[-1].token == ':':
stack_top = -2
indent = self._cont_stack[stack_top]
if self._tokens.token(idx) in _CLOSING_BRACKETS:
valid_offsets = indent.valid_outdent_offsets
else:
valid_offsets = indent.valid_continuation_offsets
return indent, valid_offsets.copy()
def _hanging_indent_after_bracket(self, bracket, position):
"""Extracts indentation information for a hanging indent."""
indentation = _get_indent_length(self._tokens.line(position))
if self._is_block_opener and self._continuation_size == self._block_indent_size:
return _ContinuedIndent(
HANGING_BLOCK,
bracket,
position,
_Offsets(indentation + self._continuation_size, indentation),
_BeforeBlockOffsets(indentation + self._continuation_size,
indentation + self._continuation_size * 2))
elif bracket == ':':
if self._cont_stack[-1].context_type == CONTINUED:
# If the dict key was on the same line as the open brace, the new
# correct indent should be relative to the key instead of the
# current indent level
paren_align = self._cont_stack[-1].valid_outdent_offsets
next_align = self._cont_stack[-1].valid_continuation_offsets.copy()
next_align[next_align.keys()[0] + self._continuation_size] = True
else:
next_align = _Offsets(indentation + self._continuation_size, indentation)
paren_align = _Offsets(indentation + self._continuation_size, indentation)
return _ContinuedIndent(HANGING_DICT_VALUE, bracket, position, paren_align, next_align)
else:
return _ContinuedIndent(
HANGING,
bracket,
position,
_Offsets(indentation, indentation + self._continuation_size),
_Offsets(indentation + self._continuation_size))
def _continuation_inside_bracket(self, bracket, pos):
"""Extracts indentation information for a continued indent."""
indentation = _get_indent_length(self._tokens.line(pos))
if self._is_block_opener and self._tokens.start_col(pos+1) - indentation == self._block_indent_size:
return _ContinuedIndent(
CONTINUED_BLOCK,
bracket,
pos,
_Offsets(self._tokens.start_col(pos)),
_BeforeBlockOffsets(self._tokens.start_col(pos+1),
self._tokens.start_col(pos+1) + self._continuation_size))
else:
return _ContinuedIndent(
CONTINUED,
bracket,
pos,
_Offsets(self._tokens.start_col(pos)),
_Offsets(self._tokens.start_col(pos+1)))
def pop_token(self):
self._cont_stack.pop()
def push_token(self, token, position):
"""Pushes a new token for continued indentation on the stack.
Tokens that can modify continued indentation offsets are:
* opening brackets
* 'lambda'
* : inside dictionaries
push_token relies on the caller to filter out those
interesting tokens.
:param token: The concrete token
:param position: The position of the token in the stream.
"""
if _token_followed_by_eol(self._tokens, position):
self._cont_stack.append(
self._hanging_indent_after_bracket(token, position))
else:
self._cont_stack.append(
self._continuation_inside_bracket(token, position))
class FormatChecker(BaseTokenChecker):
"""checks for :
* unauthorized constructions
* strict indentation
* line length
* use of <> instead of !=
"""
__implements__ = (ITokenChecker, IAstroidChecker, IRawChecker)
# configuration section name
name = 'format'
# messages
msgs = MSGS
# configuration options
# for available dict keys/values see the optik parser 'add_option' method
options = (('max-line-length',
{'default' : 80, 'type' : "int", 'metavar' : '<int>',
'help' : 'Maximum number of characters on a single line.'}),
('ignore-long-lines',
{'type': 'regexp', 'metavar': '<regexp>',
'default': r'^\s*(# )?<?https?://\S+>?$',
'help': ('Regexp for a line that is allowed to be longer than '
'the limit.')}),
('single-line-if-stmt',
{'default': False, 'type' : 'yn', 'metavar' : '<y_or_n>',
'help' : ('Allow the body of an if to be on the same '
'line as the test if there is no else.')}),
('no-space-check',
{'default': ','.join(_NO_SPACE_CHECK_CHOICES),
'type': 'multiple_choice',
'choices': _NO_SPACE_CHECK_CHOICES,
'help': ('List of optional constructs for which whitespace '
'checking is disabled')}),
('max-module-lines',
{'default' : 1000, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of lines in a module'}
),
('indent-string',
{'default' : ' ', 'type' : "string", 'metavar' : '<string>',
'help' : 'String used as indentation unit. This is usually \
" " (4 spaces) or "\\t" (1 tab).'}),
('indent-after-paren',
{'type': 'int', 'metavar': '<int>', 'default': 4,
'help': 'Number of spaces of indent required inside a hanging '
' or continued line.'}),
)
def __init__(self, linter=None):
BaseTokenChecker.__init__(self, linter)
self._lines = None
self._visited_lines = None
self._bracket_stack = [None]
def _pop_token(self):
self._bracket_stack.pop()
self._current_line.pop_token()
def _push_token(self, token, idx):
self._bracket_stack.append(token)
self._current_line.push_token(token, idx)
def new_line(self, tokens, line_end, line_start):
"""a new line has been encountered, process it if necessary"""
if _last_token_on_line_is(tokens, line_end, ';'):
self.add_message('unnecessary-semicolon', line=tokens.start_line(line_end))
line_num = tokens.start_line(line_start)
line = tokens.line(line_start)
if tokens.type(line_start) not in _JUNK_TOKENS:
self._lines[line_num] = line.split('\n')[0]
self.check_lines(line, line_num)
def process_module(self, module):
self._keywords_with_parens = set()
if 'print_function' in module.future_imports:
self._keywords_with_parens.add('print')
def _check_keyword_parentheses(self, tokens, start):
"""Check that there are not unnecessary parens after a keyword.
Parens are unnecessary if there is exactly one balanced outer pair on a
line, and it is followed by a colon, and contains no commas (i.e. is not a
tuple).
Args:
tokens: list of Tokens; the entire list of Tokens.
start: int; the position of the keyword in the token list.
"""
# If the next token is not a paren, we're fine.
if self._inside_brackets(':') and tokens[start][1] == 'for':
self._pop_token()
if tokens[start+1][1] != '(':
return
found_and_or = False
depth = 0
keyword_token = tokens[start][1]
line_num = tokens[start][2][0]
for i in xrange(start, len(tokens) - 1):
token = tokens[i]
# If we hit a newline, then assume any parens were for continuation.
if token[0] == tokenize.NL:
return
if token[1] == '(':
depth += 1
elif token[1] == ')':
depth -= 1
if not depth:
# ')' can't happen after if (foo), since it would be a syntax error.
if (tokens[i+1][1] in (':', ')', ']', '}', 'in') or
tokens[i+1][0] in (tokenize.NEWLINE, tokenize.ENDMARKER,
tokenize.COMMENT)):
# The empty tuple () is always accepted.
if i == start + 2:
return
if keyword_token == 'not':
if not found_and_or:
self.add_message('superfluous-parens', line=line_num,
args=keyword_token)
elif keyword_token in ('return', 'yield'):
self.add_message('superfluous-parens', line=line_num,
args=keyword_token)
elif keyword_token not in self._keywords_with_parens:
if not (tokens[i+1][1] == 'in' and found_and_or):
self.add_message('superfluous-parens', line=line_num,
args=keyword_token)
return
elif depth == 1:
# This is a tuple, which is always acceptable.
if token[1] == ',':
return
# 'and' and 'or' are the only boolean operators with lower precedence
# than 'not', so parens are only required when they are found.
elif token[1] in ('and', 'or'):
found_and_or = True
# A yield inside an expression must always be in parentheses,
# quit early without error.
elif token[1] == 'yield':
return
# A generator expression always has a 'for' token in it, and
# the 'for' token is only legal inside parens when it is in a
# generator expression. The parens are necessary here, so bail
# without an error.
elif token[1] == 'for':
return
def _opening_bracket(self, tokens, i):
self._push_token(tokens[i][1], i)
# Special case: ignore slices
if tokens[i][1] == '[' and tokens[i+1][1] == ':':
return
if (i > 0 and (tokens[i-1][0] == tokenize.NAME and
not (keyword.iskeyword(tokens[i-1][1]))
or tokens[i-1][1] in _CLOSING_BRACKETS)):
self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT))
else:
self._check_space(tokens, i, (_IGNORE, _MUST_NOT))
def _closing_bracket(self, tokens, i):
if self._inside_brackets(':'):
self._pop_token()
self._pop_token()
# Special case: ignore slices
if tokens[i-1][1] == ':' and tokens[i][1] == ']':
return
policy_before = _MUST_NOT
if tokens[i][1] in _CLOSING_BRACKETS and tokens[i-1][1] == ',':
if _TRAILING_COMMA in self.config.no_space_check:
policy_before = _IGNORE
self._check_space(tokens, i, (policy_before, _IGNORE))
def _check_equals_spacing(self, tokens, i):
"""Check the spacing of a single equals sign."""
if self._inside_brackets('(') or self._inside_brackets('lambda'):
self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT))
else:
self._check_space(tokens, i, (_MUST, _MUST))
def _open_lambda(self, tokens, i): # pylint:disable=unused-argument
self._push_token('lambda', i)
def _handle_colon(self, tokens, i):
# Special case: ignore slices
if self._inside_brackets('['):
return
if (self._inside_brackets('{') and
_DICT_SEPARATOR in self.config.no_space_check):
policy = (_IGNORE, _IGNORE)
else:
policy = (_MUST_NOT, _MUST)
self._check_space(tokens, i, policy)
if self._inside_brackets('lambda'):
self._pop_token()
elif self._inside_brackets('{'):
self._push_token(':', i)
def _handle_comma(self, tokens, i):
# Only require a following whitespace if this is
# not a hanging comma before a closing bracket.
if tokens[i+1][1] in _CLOSING_BRACKETS:
self._check_space(tokens, i, (_MUST_NOT, _IGNORE))
else:
self._check_space(tokens, i, (_MUST_NOT, _MUST))
if self._inside_brackets(':'):
self._pop_token()
def _check_surrounded_by_space(self, tokens, i):
"""Check that a binary operator is surrounded by exactly one space."""
self._check_space(tokens, i, (_MUST, _MUST))
def _check_space(self, tokens, i, policies):
def _policy_string(policy):
if policy == _MUST:
return 'Exactly one', 'required'
else:
return 'No', 'allowed'
def _name_construct(token):
if tokens[i][1] == ',':
return 'comma'
elif tokens[i][1] == ':':
return ':'
elif tokens[i][1] in '()[]{}':
return 'bracket'
elif tokens[i][1] in ('<', '>', '<=', '>=', '!=', '=='):
return 'comparison'
else:
if self._inside_brackets('('):
return 'keyword argument assignment'
else:
return 'assignment'
good_space = [True, True]
pairs = [(tokens[i-1], tokens[i]), (tokens[i], tokens[i+1])]
for other_idx, (policy, token_pair) in enumerate(zip(policies, pairs)):
if token_pair[other_idx][0] in _EOL or policy == _IGNORE:
continue
distance = _column_distance(*token_pair)
if distance is None:
continue
good_space[other_idx] = (
(policy == _MUST and distance == 1) or
(policy == _MUST_NOT and distance == 0))
warnings = []
if not any(good_space) and policies[0] == policies[1]:
warnings.append((policies[0], 'around'))
else:
for ok, policy, position in zip(good_space, policies, ('before', 'after')):
if not ok:
warnings.append((policy, position))
for policy, position in warnings:
construct = _name_construct(tokens[i])
count, state = _policy_string(policy)
self.add_message('bad-whitespace', line=tokens[i][2][0],
args=(count, state, position, construct,
_underline_token(tokens[i])))
def _inside_brackets(self, left):
return self._bracket_stack[-1] == left
def _handle_old_ne_operator(self, tokens, i):
if tokens[i][1] == '<>':
self.add_message('old-ne-operator', line=tokens[i][2][0])
def _prepare_token_dispatcher(self):
raw = [
(_KEYWORD_TOKENS,
self._check_keyword_parentheses),
(_OPENING_BRACKETS, self._opening_bracket),
(_CLOSING_BRACKETS, self._closing_bracket),
(['='], self._check_equals_spacing),
(_SPACED_OPERATORS, self._check_surrounded_by_space),
([','], self._handle_comma),
([':'], self._handle_colon),
(['lambda'], self._open_lambda),
(['<>'], self._handle_old_ne_operator),
]
dispatch = {}
for tokens, handler in raw:
for token in tokens:
dispatch[token] = handler
return dispatch
def process_tokens(self, tokens):
"""process tokens and search for :
_ non strict indentation (i.e. not always using the <indent> parameter as
indent unit)
_ too long lines (i.e. longer than <max_chars>)
_ optionally bad construct (if given, bad_construct must be a compiled
regular expression).
"""
self._bracket_stack = [None]
indents = [0]
check_equal = False
line_num = 0
self._lines = {}
self._visited_lines = {}
token_handlers = self._prepare_token_dispatcher()
self._current_line = ContinuedLineState(tokens, self.config)
for idx, (tok_type, token, start, _, line) in enumerate(tokens):
if start[0] != line_num:
line_num = start[0]
# A tokenizer oddity: if an indented line contains a multi-line
# docstring, the line member of the INDENT token does not contain
# the full line; therefore we check the next token on the line.
if tok_type == tokenize.INDENT:
self.new_line(TokenWrapper(tokens), idx-1, idx+1)
else:
self.new_line(TokenWrapper(tokens), idx-1, idx)
if tok_type == tokenize.NEWLINE:
# a program statement, or ENDMARKER, will eventually follow,
# after some (possibly empty) run of tokens of the form
# (NL | COMMENT)* (INDENT | DEDENT+)?
# If an INDENT appears, setting check_equal is wrong, and will
# be undone when we see the INDENT.
check_equal = True
self._process_retained_warnings(TokenWrapper(tokens), idx)
self._current_line.next_logical_line()
elif tok_type == tokenize.INDENT:
check_equal = False
self.check_indent_level(token, indents[-1]+1, line_num)
indents.append(indents[-1]+1)
elif tok_type == tokenize.DEDENT:
# there's nothing we need to check here! what's important is
# that when the run of DEDENTs ends, the indentation of the
# program statement (or ENDMARKER) that triggered the run is
# equal to what's left at the top of the indents stack
check_equal = True
if len(indents) > 1:
del indents[-1]
elif tok_type == tokenize.NL:
self._check_continued_indentation(TokenWrapper(tokens), idx+1)
self._current_line.next_physical_line()
elif tok_type != tokenize.COMMENT:
self._current_line.handle_line_start(idx)
# This is the first concrete token following a NEWLINE, so it
# must be the first token of the next program statement, or an
# ENDMARKER; the "line" argument exposes the leading whitespace
# for this statement; in the case of ENDMARKER, line is an empty
# string, so will properly match the empty string with which the
# "indents" stack was seeded
if check_equal:
check_equal = False
self.check_indent_level(line, indents[-1], line_num)
if tok_type == tokenize.NUMBER and token.endswith('l'):
self.add_message('lowercase-l-suffix', line=line_num)
try:
handler = token_handlers[token]
except KeyError:
pass
else:
handler(tokens, idx)
line_num -= 1 # to be ok with "wc -l"
if line_num > self.config.max_module_lines:
self.add_message('too-many-lines', args=line_num, line=1)
def _process_retained_warnings(self, tokens, current_pos):
single_line_block_stmt = not _last_token_on_line_is(tokens, current_pos, ':')
for indent_pos, state, offsets in self._current_line.retained_warnings:
block_type = offsets[tokens.start_col(indent_pos)]
hints = dict((k, v) for k, v in offsets.iteritems()
if v != block_type)
if single_line_block_stmt and block_type == WITH_BODY:
self._add_continuation_message(state, hints, tokens, indent_pos)
elif not single_line_block_stmt and block_type == SINGLE_LINE:
self._add_continuation_message(state, hints, tokens, indent_pos)
def _check_continued_indentation(self, tokens, next_idx):
# Do not issue any warnings if the next line is empty.
if not self._current_line.has_content or tokens.type(next_idx) == tokenize.NL:
return
state, valid_offsets = self._current_line.get_valid_offsets(next_idx)
# Special handling for hanging comments. If the last line ended with a
# comment and the new line contains only a comment, the line may also be
# indented to the start of the previous comment.
if (tokens.type(next_idx) == tokenize.COMMENT and
tokens.type(next_idx-2) == tokenize.COMMENT):
valid_offsets[tokens.start_col(next_idx-2)] = True
# We can only decide if the indentation of a continued line before opening
# a new block is valid once we know of the body of the block is on the
# same line as the block opener. Since the token processing is single-pass,
# emitting those warnings is delayed until the block opener is processed.
if (state.context_type in (HANGING_BLOCK, CONTINUED_BLOCK)
and tokens.start_col(next_idx) in valid_offsets):
self._current_line.add_block_warning(next_idx, state, valid_offsets)
elif tokens.start_col(next_idx) not in valid_offsets:
self._add_continuation_message(state, valid_offsets, tokens, next_idx)
def _add_continuation_message(self, state, offsets, tokens, position):
readable_type, readable_position = _CONTINUATION_MSG_PARTS[state.context_type]
hint_line = _get_indent_hint_line(offsets, tokens.start_col(position))
self.add_message(
'bad-continuation',
line=tokens.start_line(position),
args=(readable_type, readable_position, tokens.line(position), hint_line))
@check_messages('multiple-statements')
def visit_default(self, node):
"""check the node line number and check it if not yet done"""
if not node.is_statement:
return
if not node.root().pure_python:
return # XXX block visit of child nodes
prev_sibl = node.previous_sibling()
if prev_sibl is not None:
prev_line = prev_sibl.fromlineno
else:
# The line on which a finally: occurs in a try/finally
# is not directly represented in the AST. We infer it
# by taking the last line of the body and adding 1, which
# should be the line of finally:
if (isinstance(node.parent, nodes.TryFinally)
and node in node.parent.finalbody):
prev_line = node.parent.body[0].tolineno + 1
else:
prev_line = node.parent.statement().fromlineno
line = node.fromlineno
assert line, node
if prev_line == line and self._visited_lines.get(line) != 2:
self._check_multi_statement_line(node, line)
return
if line in self._visited_lines:
return
try:
tolineno = node.blockstart_tolineno
except AttributeError:
tolineno = node.tolineno
assert tolineno, node
lines = []
for line in xrange(line, tolineno + 1):
self._visited_lines[line] = 1
try:
lines.append(self._lines[line].rstrip())
except KeyError:
lines.append('')
def _check_multi_statement_line(self, node, line):
"""Check for lines containing multiple statements."""
# Do not warn about multiple nested context managers
# in with statements.
if isinstance(node, nodes.With):
return
# For try... except... finally..., the two nodes
# appear to be on the same line due to how the AST is built.
if (isinstance(node, nodes.TryExcept) and
isinstance(node.parent, nodes.TryFinally)):
return
if (isinstance(node.parent, nodes.If) and not node.parent.orelse
and self.config.single_line_if_stmt):
return
self.add_message('multiple-statements', node=node)
self._visited_lines[line] = 2
@check_messages('backtick')
def visit_backquote(self, node):
self.add_message('backtick', node=node)
def check_lines(self, lines, i):
"""check lines have less than a maximum number of characters
"""
max_chars = self.config.max_line_length
ignore_long_line = self.config.ignore_long_lines
for line in lines.splitlines(True):
if not line.endswith('\n'):
self.add_message('missing-final-newline', line=i)
else:
stripped_line = line.rstrip()
if line[len(stripped_line):] not in ('\n', '\r\n'):
self.add_message('trailing-whitespace', line=i)
# Don't count excess whitespace in the line length.
line = stripped_line
mobj = OPTION_RGX.search(line)
if mobj and mobj.group(1).split('=', 1)[0].strip() == 'disable':
line = line.split('#')[0].rstrip()
if len(line) > max_chars and not ignore_long_line.search(line):
self.add_message('line-too-long', line=i, args=(len(line), max_chars))
i += 1
def check_indent_level(self, string, expected, line_num):
"""return the indent level of the string
"""
indent = self.config.indent_string
if indent == '\\t': # \t is not interpreted in the configuration file
indent = '\t'
level = 0
unit_size = len(indent)
while string[:unit_size] == indent:
string = string[unit_size:]
level += 1
suppl = ''
while string and string[0] in ' \t':
if string[0] != indent[0]:
if string[0] == '\t':
args = ('tab', 'space')
else:
args = ('space', 'tab')
self.add_message('mixed-indentation', args=args, line=line_num)
return level
suppl += string[0]
string = string[1:]
if level != expected or suppl:
i_type = 'spaces'
if indent[0] == '\t':
i_type = 'tabs'
self.add_message('bad-indentation', line=line_num,
args=(level * unit_size + len(suppl), i_type,
expected * unit_size))
def register(linter):
"""required method to auto register this checker """
linter.register_checker(FormatChecker(linter))
|
tiagocoutinho/bliss | refs/heads/master | scripts/TestBench.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of the bliss project
#
# Copyright (c) 2016 Beamline Control Unit, ESRF
# Distributed under the GNU LGPLv3. See LICENSE for more info.
# Distributed under the GNU LGPLv3. See LICENSE.txt for more info.
# Simple python script to test bliss and beacon
#
# needs a ". blissrc"
from bliss.config import static
cfg = static.get_config()
ra = cfg.get("ra")
print "ra velocity : %g" % ra.velocity()
|
liamgh/liamgreenhughes-sl4a-tf101 | refs/heads/master | python/src/Demo/parser/test_parser.py | 41 | #! /usr/bin/env python
# (Force the script to use the latest build.)
#
# test_parser.py
import parser, traceback
_numFailed = 0
def testChunk(t, fileName):
global _numFailed
print '----', fileName,
try:
ast = parser.suite(t)
tup = parser.ast2tuple(ast)
# this discards the first AST; a huge memory savings when running
# against a large source file like Tkinter.py.
ast = None
new = parser.tuple2ast(tup)
except parser.ParserError, err:
print
print 'parser module raised exception on input file', fileName + ':'
traceback.print_exc()
_numFailed = _numFailed + 1
else:
if tup != parser.ast2tuple(new):
print
print 'parser module failed on input file', fileName
_numFailed = _numFailed + 1
else:
print 'o.k.'
def testFile(fileName):
t = open(fileName).read()
testChunk(t, fileName)
def test():
import sys
args = sys.argv[1:]
if not args:
import glob
args = glob.glob("*.py")
args.sort()
map(testFile, args)
sys.exit(_numFailed != 0)
if __name__ == '__main__':
test()
|
XiaosongWei/crosswalk-test-suite | refs/heads/master | misc/sampleapp-android-tests/sampleapp/spacedodgegame_stop.py | 3 | #!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Wang, Hongjuan<hongjuanx.wang@intel.com>
import unittest
import os
import sys
import commands
import comm
import time
class TestSampleAppFunctions(unittest.TestCase):
def test_stop(self):
comm.setUp()
app_name = "Spacedodgegame"
# Find whether the app have launched
cmdacti = "adb -s " + comm.device + \
" shell dumpsys activity activities | grep org.xwalk.%s" % \
app_name.lower()
launched = commands.getstatusoutput(cmdacti)
if launched[0] != 0:
print "Stop APK ---------------->%s App haven't launched, "\
"need to launch it!" % app_name
cmdstart = "adb -s " + comm.device + " shell am start -n "\
"org.xwalk.%s/.%sActivity" % \
(app_name.lower(), app_name)
comm.app_launch(cmdstart, self)
time.sleep(1)
cmdstop = "adb -s " + comm.device + \
" shell am force-stop org.xwalk.%s" % app_name.lower()
comm.app_stop(cmdstop, self)
if __name__ == '__main__':
unittest.main()
|
mihaip/readerisdead | refs/heads/master | third_party/web/test.py | 118 | """test utilities
(part of web.py)
"""
import unittest
import sys, os
import web
TestCase = unittest.TestCase
TestSuite = unittest.TestSuite
def load_modules(names):
return [__import__(name, None, None, "x") for name in names]
def module_suite(module, classnames=None):
"""Makes a suite from a module."""
if classnames:
return unittest.TestLoader().loadTestsFromNames(classnames, module)
elif hasattr(module, 'suite'):
return module.suite()
else:
return unittest.TestLoader().loadTestsFromModule(module)
def doctest_suite(module_names):
"""Makes a test suite from doctests."""
import doctest
suite = TestSuite()
for mod in load_modules(module_names):
suite.addTest(doctest.DocTestSuite(mod))
return suite
def suite(module_names):
"""Creates a suite from multiple modules."""
suite = TestSuite()
for mod in load_modules(module_names):
suite.addTest(module_suite(mod))
return suite
def runTests(suite):
runner = unittest.TextTestRunner()
return runner.run(suite)
def main(suite=None):
if not suite:
main_module = __import__('__main__')
# allow command line switches
args = [a for a in sys.argv[1:] if not a.startswith('-')]
suite = module_suite(main_module, args or None)
result = runTests(suite)
sys.exit(not result.wasSuccessful())
|
kinsamanka/machinekit | refs/heads/master | lib/python/machinekit/launcher.py | 10 | import os
import sys
from time import *
import subprocess
import signal
from machinekit import compat
_processes = []
_realtimeStarted = False
_exiting = False
# ends a running Machinekit session
def end_session():
stop_processes()
if _realtimeStarted: # Stop realtime only when explicitely started
stop_realtime()
# checks wheter a single command is available or not
def check_command(command):
process = subprocess.Popen('which ' + command, stdout=subprocess.PIPE,
shell=True)
process.wait()
if process.returncode != 0:
print((command + ' not found, check Machinekit installation'))
sys.exit(1)
# checks the whole Machinekit installation
def check_installation():
commands = ['realtime', 'configserver', 'halcmd', 'haltalk', 'webtalk']
for command in commands:
check_command(command)
# checks for a running session and cleans it up if necessary
def cleanup_session():
pids = []
commands = ['configserver', 'halcmd', 'haltalk', 'webtalk', 'rtapi']
process = subprocess.Popen(['ps', '-A'], stdout=subprocess.PIPE)
out, _ = process.communicate()
for line in out.splitlines():
for command in commands:
if command in line:
pid = int(line.split(None, 1)[0])
pids.append(pid)
if pids != []:
stop_realtime()
sys.stdout.write("cleaning up leftover session... ")
sys.stdout.flush()
for pid in pids:
try:
os.killpg(pid, signal.SIGTERM)
except OSError:
pass
sys.stdout.write('done\n')
# starts a command, waits for termination and checks the output
def check_process(command):
sys.stdout.write("running " + command.split(None, 1)[0] + "... ")
sys.stdout.flush()
subprocess.check_call(command, shell=True)
sys.stdout.write('done\n')
# starts and registers a process
def start_process(command, check=True, wait=1.0):
sys.stdout.write("starting " + command.split(None, 1)[0] + "... ")
sys.stdout.flush()
process = subprocess.Popen(command, shell=True, preexec_fn=os.setsid)
process.command = command
if check:
sleep(wait)
process.poll()
if (process.returncode is not None):
raise subprocess.CalledProcessError(process.returncode, command, None)
_processes.append(process)
sys.stdout.write('done\n')
# stops a registered process by its name
def stop_process(command):
for process in _processes:
processCommand = process.command.split(None, 1)[0]
if command == processCommand:
sys.stdout.write('stopping ' + command + '... ')
sys.stdout.flush()
os.killpg(process.pid, signal.SIGTERM)
process.wait()
sys.stdout.write('done\n')
# stops all registered processes
def stop_processes():
for process in _processes:
sys.stdout.write('stopping ' + process.command.split(None, 1)[0]
+ '... ')
sys.stdout.flush()
os.killpg(process.pid, signal.SIGTERM)
process.wait()
sys.stdout.write('done\n')
# loads a HAL configuraton file
def load_hal_file(filename, ini=None):
sys.stdout.write("loading " + filename + '... ')
sys.stdout.flush()
_, ext = os.path.splitext(filename)
if ext == '.py':
from machinekit import rtapi
if not rtapi.__rtapicmd:
rtapi.init_RTAPI()
if ini is not None:
from machinekit import config
config.load_ini(ini)
execfile(filename)
else:
command = 'halcmd'
if ini is not None:
command += ' -i ' + ini
command += ' -f ' + filename
subprocess.check_call(command, shell=True)
sys.stdout.write('done\n')
# loads a BBIO configuration file
def load_bbio_file(filename):
check_command('config-pin')
sys.stdout.write("loading " + filename + '... ')
sys.stdout.flush()
subprocess.check_call('config-pin -f ' + filename, shell=True)
sys.stdout.write('done\n')
# installs a comp RT component
def install_comp(filename):
install = True
base, ext = os.path.splitext(os.path.basename(filename))
flavor = compat.default_flavor()
moduleDir = compat.get_rtapi_config("RTLIB_DIR")
moduleName = flavor.name + '/' + base + flavor.mod_ext
modulePath = os.path.join(moduleDir, moduleName)
if os.path.exists(modulePath):
compTime = os.path.getmtime(filename)
moduleTime = os.path.getmtime(modulePath)
if (compTime < moduleTime):
install = False
if install is True:
if ext == '.icomp':
cmdBase = 'instcomp'
else:
cmdBase = 'comp'
sys.stdout.write("installing " + filename + '... ')
sys.stdout.flush()
if os.access(moduleDir, os.W_OK): # if we have write access we might not need sudo
cmd = '%s --install %s' % (cmdBase, filename)
else:
cmd = 'sudo %s --install %s' % (cmdBase, filename)
subprocess.check_call(cmd, shell=True)
sys.stdout.write('done\n')
# starts realtime
def start_realtime():
global _realtimeStarted
sys.stdout.write("starting realtime...")
sys.stdout.flush()
subprocess.check_call('realtime start', shell=True)
sys.stdout.write('done\n')
_realtimeStarted = True
# stops realtime
def stop_realtime():
global _realtimeStarted
sys.stdout.write("stopping realtime... ")
sys.stdout.flush()
subprocess.check_call('realtime stop', shell=True)
sys.stdout.write('done\n')
_realtimeStarted = False
# rip the Machinekit environment
def rip_environment(path=None, force=False):
if force is False and os.getenv('EMC2_PATH') is not None: # check if already ripped
return
if path is None:
command = None
scriptFilePath = os.environ['HOME'] + '/.bashrc'
if os.path.exists(scriptFilePath):
with open(scriptFilePath) as f: # use the bashrc
content = f.readlines()
for line in content:
if 'rip-environment' in line:
line = line.strip()
if (line[0] == '.'):
command = line
scriptFilePath = os.environ['HOME'] + '/machinekit/scripts/rip-environment'
if os.path.exists(scriptFilePath):
command = '. ' + scriptFilePath
if (command is None):
sys.stderr.write('Unable to rip environment')
sys.exit(1)
else:
command = '. ' + path + '/scripts/rip-environment'
process = subprocess.Popen(command + ' && env',
stdout=subprocess.PIPE,
shell=True)
for line in process.stdout:
(key, _, value) = line.partition('=')
os.environ[key] = value.rstrip()
sys.path.append(os.environ['PYTHONPATH'])
# checks the running processes and exits when exited
def check_processes():
for process in _processes:
process.poll()
if (process.returncode is not None):
_processes.remove(process)
end_session()
if (process.returncode != 0):
sys.exit(1)
else:
sys.exit(0)
# register exit signal handlers
def register_exit_handler():
signal.signal(signal.SIGINT, _exitHandler)
signal.signal(signal.SIGTERM, _exitHandler)
def _exitHandler(signum, frame):
del signum # unused
del frame # unused
global _exiting
if not _exiting:
_exiting = True # prevent double execution
end_session()
sys.exit(0)
# set the Machinekit debug level
def set_debug_level(level):
os.environ['DEBUG'] = str(level)
# set the Machinekit ini
def set_machinekit_ini(ini):
os.environ['MACHINEKIT_INI'] = ini
|
victorywang80/Maintenance | refs/heads/master | saltstack/src/salt/modules/ebuild.py | 1 | # -*- coding: utf-8 -*-
'''
Support for Portage
:optdepends: - portage Python adapter
For now all package names *MUST* include the package category,
i.e. ``'vim'`` will not work, ``'app-editors/vim'`` will.
'''
# Import python libs
import copy
import logging
import re
# Import salt libs
import salt.utils
# Import third party libs
HAS_PORTAGE = False
try:
import portage
HAS_PORTAGE = True
except ImportError:
import os
import sys
if os.path.isdir('/usr/lib/portage/pym'):
try:
# In a virtualenv, the portage python path needs to be manually added
sys.path.insert(0, '/usr/lib/portage/pym')
import portage
HAS_PORTAGE = True
except ImportError:
pass
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'pkg'
def __virtual__():
'''
Confirm this module is on a Gentoo based system
'''
if HAS_PORTAGE and __grains__['os'] == 'Gentoo':
return __virtualname__
return False
def _vartree():
return portage.db[portage.root]['vartree']
def _porttree():
return portage.db[portage.root]['porttree']
def _p_to_cp(p):
ret = _porttree().dbapi.xmatch("match-all", p)
if ret:
return portage.cpv_getkey(ret[0])
return None
def _allnodes():
if 'portage._allnodes' in __context__:
return __context__['portage._allnodes']
else:
ret = _porttree().getallnodes()
__context__['portage._allnodes'] = ret
return ret
def _cpv_to_cp(cpv):
ret = portage.cpv_getkey(cpv)
if ret:
return ret
else:
return cpv
def _cpv_to_version(cpv):
return portage.versions.cpv_getversion(cpv)
def _process_emerge_err(stdout, stderr):
'''
Used to parse emerge output to provide meaningful output when emerge fails
'''
ret = {}
changes = {}
rexp = re.compile(r'^[<>=][^ ]+/[^ ]+ [^\n]+', re.M)
slot_conflicts = re.compile(r'^[^ \n]+/[^ ]+:[^ ]', re.M).findall(stderr)
if slot_conflicts:
changes['slot conflicts'] = slot_conflicts
blocked = re.compile(r'(?m)^\[blocks .+\] '
r'([^ ]+/[^ ]+-[0-9]+[^ ]+)'
r'.*$').findall(stdout)
unsatisfied = re.compile(
r'Error: The above package list contains').findall(stderr)
# If there were blocks and emerge could not resolve it.
if blocked and unsatisfied:
changes['blocked'] = blocked
sections = re.split('\n\n', stderr)
for section in sections:
if 'The following keyword changes' in section:
changes['keywords'] = rexp.findall(section)
elif 'The following license changes' in section:
changes['license'] = rexp.findall(section)
elif 'The following USE changes' in section:
changes['use'] = rexp.findall(section)
elif 'The following mask changes' in section:
changes['mask'] = rexp.findall(section)
ret['changes'] = {'Needed changes': changes}
return ret
def check_db(*names, **kwargs):
'''
.. versionadded:: 0.17.0
Returns a dict containing the following information for each specified
package:
1. A key ``found``, which will be a boolean value denoting if a match was
found in the package database.
2. If ``found`` is ``False``, then a second key called ``suggestions`` will
be present, which will contain a list of possible matches. This list
will be empty if the package name was specified in ``category/pkgname``
format, since the suggestions are only intended to disambiguate
ambiguous package names (ones submitted without a category).
CLI Examples:
.. code-block:: bash
salt '*' pkg.check_db <package1> <package2> <package3>
'''
### NOTE: kwargs is not used here but needs to be present due to it being
### required in the check_db function in other package providers.
ret = {}
for name in names:
if name in ret:
log.warning('pkg.check_db: Duplicate package name {0!r} '
'submitted'.format(name))
continue
if '/' not in name:
ret.setdefault(name, {})['found'] = False
ret[name]['suggestions'] = porttree_matches(name)
else:
ret.setdefault(name, {})['found'] = name in _allnodes()
if ret[name]['found'] is False:
ret[name]['suggestions'] = []
return ret
def ex_mod_init(low):
'''
Enforce a nice tree structure for /etc/portage/package.* configuration files.
CLI Example:
.. code-block:: bash
salt '*' pkg.ex_mod_init
'''
__salt__['portage_config.enforce_nice_config']()
return True
def latest_version(*names, **kwargs):
'''
Return the latest version of the named package available for upgrade or
installation. If more than one package name is specified, a dict of
name/version pairs is returned.
If the latest version of a given package is already installed, an empty
string will be returned for that package.
CLI Example:
.. code-block:: bash
salt '*' pkg.latest_version <package name>
salt '*' pkg.latest_version <package1> <package2> <package3> ...
'''
refresh = salt.utils.is_true(kwargs.pop('refresh', True))
if len(names) == 0:
return ''
# Refresh before looking for the latest version available
if refresh:
refresh_db()
ret = {}
# Initialize the dict with empty strings
for name in names:
ret[name] = ''
installed = _cpv_to_version(_vartree().dep_bestmatch(name))
avail = _cpv_to_version(_porttree().dep_bestmatch(name))
if avail:
if not installed \
or salt.utils.compare_versions(ver1=installed,
oper='<',
ver2=avail,
cmp_func=version_cmp):
ret[name] = avail
# Return a string if only one package name passed
if len(names) == 1:
return ret[names[0]]
return ret
# available_version is being deprecated
available_version = latest_version
def _get_upgradable():
'''
Utility function to get upgradable packages
Sample return data:
{ 'pkgname': '1.2.3-45', ... }
'''
cmd = 'emerge --pretend --update --newuse --deep --ask n world'
out = __salt__['cmd.run_stdout'](cmd)
rexp = re.compile(r'(?m)^\[.+\] '
r'([^ ]+/[^ ]+)' # Package string
'-'
r'([0-9]+[^ ]+)' # Version
r'.*$')
keys = ['name', 'version']
_get = lambda l, k: l[keys.index(k)]
upgrades = rexp.findall(out)
ret = {}
for line in upgrades:
name = _get(line, 'name')
version_num = _get(line, 'version')
ret[name] = version_num
return ret
def list_upgrades(refresh=True):
'''
List all available package upgrades.
CLI Example:
.. code-block:: bash
salt '*' pkg.list_upgrades
'''
if salt.utils.is_true(refresh):
refresh_db()
return _get_upgradable()
def upgrade_available(name):
'''
Check whether or not an upgrade is available for a given package
CLI Example:
.. code-block:: bash
salt '*' pkg.upgrade_available <package name>
'''
return latest_version(name) != ''
def version(*names, **kwargs):
'''
Returns a string representing the package version or an empty string if not
installed. If more than one package name is specified, a dict of
name/version pairs is returned.
CLI Example:
.. code-block:: bash
salt '*' pkg.version <package name>
salt '*' pkg.version <package1> <package2> <package3> ...
'''
return __salt__['pkg_resource.version'](*names, **kwargs)
def porttree_matches(name):
'''
Returns a list containing the matches for a given package name from the
portage tree. Note that the specific version of the package will not be
provided for packages that have several versions in the portage tree, but
rather the name of the package (i.e. "dev-python/paramiko").
'''
matches = []
for category in _porttree().dbapi.categories:
if _porttree().dbapi.cp_list(category + "/" + name):
matches.append(category + "/" + name)
return matches
def list_pkgs(versions_as_list=False, **kwargs):
'''
List the packages currently installed in a dict::
{'<package_name>': '<version>'}
CLI Example:
.. code-block:: bash
salt '*' pkg.list_pkgs
'''
versions_as_list = salt.utils.is_true(versions_as_list)
# 'removed' not yet implemented or not applicable
if salt.utils.is_true(kwargs.get('removed')):
return {}
if 'pkg.list_pkgs' in __context__:
if versions_as_list:
return __context__['pkg.list_pkgs']
else:
ret = copy.deepcopy(__context__['pkg.list_pkgs'])
__salt__['pkg_resource.stringify'](ret)
return ret
ret = {}
pkgs = _vartree().dbapi.cpv_all()
for cpv in pkgs:
__salt__['pkg_resource.add_pkg'](ret,
_cpv_to_cp(cpv),
_cpv_to_version(cpv))
__salt__['pkg_resource.sort_pkglist'](ret)
__context__['pkg.list_pkgs'] = copy.deepcopy(ret)
if not versions_as_list:
__salt__['pkg_resource.stringify'](ret)
return ret
def refresh_db():
'''
Updates the portage tree (emerge --sync). Uses eix-sync if available.
CLI Example:
.. code-block:: bash
salt '*' pkg.refresh_db
'''
if 'eix.sync' in __salt__:
return __salt__['eix.sync']()
if 'makeconf.features_contains'in __salt__ and __salt__['makeconf.features_contains']('webrsync-gpg'):
# GPG sign verify is supported only for "webrsync"
cmd = 'emerge-webrsync -q'
# We prefer 'delta-webrsync' to 'webrsync'
if salt.utils.which('emerge-delta-webrsync'):
cmd = 'emerge-delta-webrsync -q'
return __salt__['cmd.retcode'](cmd) == 0
else:
if __salt__['cmd.retcode']('emerge --sync --ask n --quiet') == 0:
return True
# We fall back to "webrsync" if "rsync" fails for some reason
cmd = 'emerge-webrsync -q'
# We prefer 'delta-webrsync' to 'webrsync'
if salt.utils.which('emerge-delta-webrsync'):
cmd = 'emerge-delta-webrsync -q'
return __salt__['cmd.retcode'](cmd) == 0
def install(name=None,
refresh=False,
pkgs=None,
sources=None,
slot=None,
fromrepo=None,
uses=None,
**kwargs):
'''
Install the passed package(s), add refresh=True to sync the portage tree
before package is installed.
name
The name of the package to be installed. Note that this parameter is
ignored if either "pkgs" or "sources" is passed. Additionally, please
note that this option can only be used to emerge a package from the
portage tree. To install a tbz2 package manually, use the "sources"
option described below.
CLI Example:
.. code-block:: bash
salt '*' pkg.install <package name>
refresh
Whether or not to sync the portage tree before installing.
version
Install a specific version of the package, e.g. 1.0.9-r1. Ignored
if "pkgs" or "sources" is passed.
slot
Similar to version, but specifies a valid slot to be installed. It
will install the latest available version in the specified slot.
Ignored if "pkgs" or "sources" or "version" is passed.
CLI Example:
.. code-block:: bash
salt '*' pkg.install sys-devel/gcc slot='4.4'
fromrepo
Similar to slot, but specifies the repository from the package will be
installed. It will install the latest available version in the
specified repository.
Ignored if "pkgs" or "sources" or "version" is passed.
CLI Example:
.. code-block:: bash
salt '*' pkg.install salt fromrepo='gentoo'
uses
Similar to slot, but specifies a list of use flag.
Ignored if "pkgs" or "sources" or "version" is passed.
CLI Example:
.. code-block:: bash
salt '*' pkg.install sys-devel/gcc uses='["nptl","-nossp"]'
Multiple Package Installation Options:
pkgs
A list of packages to install from the portage tree. Must be passed as
a python list.
CLI Example:
.. code-block:: bash
salt '*' pkg.install pkgs='["foo","bar","~category/package:slot::repository[use]"]'
sources
A list of tbz2 packages to install. Must be passed as a list of dicts,
with the keys being package names, and the values being the source URI
or local path to the package.
CLI Example:
.. code-block:: bash
salt '*' pkg.install sources='[{"foo": "salt://foo.tbz2"},{"bar": "salt://bar.tbz2"}]'
Returns a dict containing the new package names and versions::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
'''
log.debug('Called modules.pkg.install: {0}'.format(
{
'name': name,
'refresh': refresh,
'pkgs': pkgs,
'sources': sources,
'kwargs': kwargs
}
))
if salt.utils.is_true(refresh):
refresh_db()
pkg_params, pkg_type = __salt__['pkg_resource.parse_targets'](name,
pkgs,
sources,
**kwargs)
# Handle version kwarg for a single package target
if pkgs is None and sources is None:
version_num = kwargs.get('version')
if version_num:
pkg_params = {name: version_num}
else:
version_num = ''
if slot is not None:
version_num += ':{0}'.format(slot)
if fromrepo is not None:
version_num += '::{0}'.format(fromrepo)
if uses is not None:
version_num += '["{0}"]'.format('","'.join(uses))
pkg_params = {name: version_num}
if pkg_params is None or len(pkg_params) == 0:
return {}
elif pkg_type == 'file':
emerge_opts = 'tbz2file'
else:
emerge_opts = ''
changes = {}
if pkg_type == 'repository':
targets = list()
for param, version_num in pkg_params.iteritems():
original_param = param
param = _p_to_cp(param)
if param is None:
raise portage.dep.InvalidAtom(original_param)
if version_num is None:
targets.append(param)
else:
keyword = None
match = re.match('^(~)?([<>])?(=)?([^<>=]*)$', version_num)
if match:
keyword, gt_lt, eq, verstr = match.groups()
prefix = gt_lt or ''
prefix += eq or ''
# We need to delete quotes around use flag list elements
verstr = verstr.replace("'", "")
# If no prefix characters were supplied and verstr contains a version, use '='
if len(verstr) > 0 and verstr[0] != ':' and verstr[0] != '[':
prefix = prefix or '='
target = '"{0}{1}-{2}"'.format(prefix, param, verstr)
else:
target = '"{0}{1}"'.format(param, verstr)
else:
target = '"{0}"'.format(param)
if '[' in target:
old = __salt__['portage_config.get_flags_from_package_conf']('use', target[1:-1])
__salt__['portage_config.append_use_flags'](target[1:-1])
new = __salt__['portage_config.get_flags_from_package_conf']('use', target[1:-1])
if old != new:
changes[param + '-USE'] = {'old': old, 'new': new}
target = target[:target.rfind('[')] + '"'
if keyword is not None:
__salt__['portage_config.append_to_package_conf']('accept_keywords', target[1:-1], ['~ARCH'])
changes[param + '-ACCEPT_KEYWORD'] = {'old': '', 'new': '~ARCH'}
targets.append(target)
else:
targets = pkg_params
cmd = 'emerge --quiet --ask n {0} {1}'.format(emerge_opts, ' '.join(targets))
old = list_pkgs()
call = __salt__['cmd.run_all'](cmd)
__context__.pop('pkg.list_pkgs', None)
if call['retcode'] != 0:
return _process_emerge_err(call['stdout'], call['stderr'])
new = list_pkgs()
changes.update(salt.utils.compare_dicts(old, new))
return changes
def update(pkg, slot=None, fromrepo=None, refresh=False):
'''
Updates the passed package (emerge --update package)
slot
Restrict the update to a particular slot. It will update to the
latest version within the slot.
fromrepo
Restrict the update to a particular repository. It will update to the
latest version within the repository.
Return a dict containing the new package names and versions::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
CLI Example:
.. code-block:: bash
salt '*' pkg.update <package name>
'''
if salt.utils.is_true(refresh):
refresh_db()
full_atom = pkg
if slot is not None:
full_atom = '{0}:{1}'.format(full_atom, slot)
if fromrepo is not None:
full_atom = '{0}::{1}'.format(full_atom, fromrepo)
old = list_pkgs()
cmd = 'emerge --update --newuse --oneshot --ask n --quiet {0}'.format(full_atom)
call = __salt__['cmd.run_all'](cmd)
__context__.pop('pkg.list_pkgs', None)
if call['retcode'] != 0:
return _process_emerge_err(call['stdout'], call['stderr'])
new = list_pkgs()
return salt.utils.compare_dicts(old, new)
def upgrade(refresh=True):
'''
Run a full system upgrade (emerge --update world)
Return a dict containing the new package names and versions::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
CLI Example:
.. code-block:: bash
salt '*' pkg.upgrade
'''
if salt.utils.is_true(refresh):
refresh_db()
old = list_pkgs()
cmd = 'emerge --update --newuse --deep --ask n --quiet world'
call = __salt__['cmd.run_all'](cmd)
__context__.pop('pkg.list_pkgs', None)
if call['retcode'] != 0:
return _process_emerge_err(call['stdout'], call['stderr'])
new = list_pkgs()
return salt.utils.compare_dicts(old, new)
def remove(name=None, slot=None, fromrepo=None, pkgs=None, **kwargs):
'''
Remove packages via emerge --unmerge.
name
The name of the package to be deleted.
slot
Restrict the remove to a specific slot. Ignored if ``name`` is None.
fromrepo
Restrict the remove to a specific slot. Ignored if ``name`` is None.
Multiple Package Options:
pkgs
Uninstall multiple packages. ``slot`` and ``fromrepo`` arguments are
ignored if this argument is present. Must be passed as a python list.
.. versionadded:: 0.16.0
Returns a dict containing the changes.
CLI Example:
.. code-block:: bash
salt '*' pkg.remove <package name>
salt '*' pkg.remove <package name> slot=4.4 fromrepo=gentoo
salt '*' pkg.remove <package1>,<package2>,<package3>
salt '*' pkg.remove pkgs='["foo", "bar"]'
'''
old = list_pkgs()
pkg_params = __salt__['pkg_resource.parse_targets'](name, pkgs)[0]
if name and not pkgs and (slot is not None or fromrepo is not None)and len(pkg_params) == 1:
fullatom = name
if slot is not None:
targets = ['{0}:{1}'.format(fullatom, slot)]
if fromrepo is not None:
targets = ['{0}::{1}'.format(fullatom, fromrepo)]
targets = [fullatom]
else:
targets = [x for x in pkg_params if x in old]
if not targets:
return {}
cmd = 'emerge --unmerge --quiet --quiet-unmerge-warn --ask n' \
'{0}'.format(' '.join(targets))
__salt__['cmd.run_all'](cmd)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return salt.utils.compare_dicts(old, new)
def purge(name=None, slot=None, fromrepo=None, pkgs=None, **kwargs):
'''
Portage does not have a purge, this function calls remove followed
by depclean to emulate a purge process
name
The name of the package to be deleted.
slot
Restrict the remove to a specific slot. Ignored if name is None.
fromrepo
Restrict the remove to a specific slot. Ignored if ``name`` is None.
Multiple Package Options:
pkgs
Uninstall multiple packages. ``slot`` and ``fromrepo`` arguments are
ignored if this argument is present. Must be passed as a python list.
.. versionadded:: 0.16.0
Returns a dict containing the changes.
CLI Example:
.. code-block:: bash
salt '*' pkg.purge <package name>
salt '*' pkg.purge <package name> slot=4.4
salt '*' pkg.purge <package1>,<package2>,<package3>
salt '*' pkg.purge pkgs='["foo", "bar"]'
'''
ret = remove(name=name, slot=slot, fromrepo=fromrepo, pkgs=pkgs)
ret.update(depclean(name=name, slot=slot, fromrepo=fromrepo, pkgs=pkgs))
return ret
def depclean(name=None, slot=None, fromrepo=None, pkgs=None):
'''
Portage has a function to remove unused dependencies. If a package
is provided, it will only removed the package if no other package
depends on it.
name
The name of the package to be cleaned.
slot
Restrict the remove to a specific slot. Ignored if ``name`` is None.
fromrepo
Restrict the remove to a specific slot. Ignored if ``name`` is None.
pkgs
Clean multiple packages. ``slot`` and ``fromrepo`` arguments are
ignored if this argument is present. Must be passed as a python list.
Return a list containing the removed packages:
CLI Example:
.. code-block:: bash
salt '*' pkg.depclean <package name>
'''
old = list_pkgs()
pkg_params = __salt__['pkg_resource.parse_targets'](name, pkgs)[0]
if name and not pkgs and (slot is not None or fromrepo is not None)and len(pkg_params) == 1:
fullatom = name
if slot is not None:
targets = ['{0}:{1}'.format(fullatom, slot)]
if fromrepo is not None:
targets = ['{0}::{1}'.format(fullatom, fromrepo)]
targets = [fullatom]
else:
targets = [x for x in pkg_params if x in old]
cmd = 'emerge --depclean --ask n --quiet {0}'.format(' '.join(targets))
__salt__['cmd.run_all'](cmd)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return salt.utils.compare_dicts(old, new)
def version_cmp(pkg1, pkg2):
'''
Do a cmp-style comparison on two packages. Return -1 if pkg1 < pkg2, 0 if
pkg1 == pkg2, and 1 if pkg1 > pkg2. Return None if there was a problem
making the comparison.
CLI Example:
.. code-block:: bash
salt '*' pkg.version_cmp '0.2.4-0' '0.2.4.1-0'
'''
regex = r'^~?([^:\[]+):?[^\[]*\[?.*$'
ver1 = re.match(regex, pkg1)
ver2 = re.match(regex, pkg2)
if ver1 and ver2:
return portage.versions.vercmp(ver1.group(1), ver2.group(1))
return None
def version_clean(version):
'''
Clean the version string removing extra data.
CLI Example:
.. code-block:: bash
salt '*' pkg.version_clean <version_string>
'''
return re.match(r'^~?[<>]?=?([^<>=:\[]+).*$', version)
def check_extra_requirements(pkgname, pkgver):
'''
Check if the installed package already has the given requirements.
CLI Example:
.. code-block:: bash
salt '*' pkg.check_extra_requirements 'sys-devel/gcc' '~>4.1.2:4.1::gentoo[nls,fortran]'
'''
keyword = None
match = re.match('^(~)?([<>])?(=)?([^<>=]*)$', pkgver)
if match:
keyword, gt_lt, eq, verstr = match.groups()
prefix = gt_lt or ''
prefix += eq or ''
# We need to delete quotes around use flag list elements
verstr = verstr.replace("'", "")
# If no prefix characters were supplied and verstr contains a version, use '='
if verstr[0] != ':' and verstr[0] != '[':
prefix = prefix or '='
atom = '{0}{1}-{2}'.format(prefix, pkgname, verstr)
else:
atom = '{0}{1}'.format(pkgname, verstr)
else:
return True
cpv = _porttree().dbapi.xmatch('bestmatch-visible', atom)
if cpv == '':
return False
try:
cur_repo, cur_use = _vartree().dbapi.aux_get(cpv, ['repository', 'USE'])
except KeyError:
return False
des_repo = re.match(r'^.+::([^\[]+).*$', atom)
if des_repo and des_repo.group(1) != cur_repo:
return False
des_uses = set(portage.dep.dep_getusedeps(atom))
cur_use = cur_use.split()
if len([x for x in des_uses.difference(cur_use)
if x[0] != '-' or x[1:] in cur_use]) > 0:
return False
if keyword:
if not __salt__['portage_config.has_flag']('accept_keywords', atom, '~ARCH'):
return False
return True
|
vishdha/erpnext | refs/heads/develop | erpnext/support/doctype/warranty_claim/test_warranty_claim.py | 121 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
test_records = frappe.get_test_records('Warranty Claim')
class TestWarrantyClaim(unittest.TestCase):
pass
|
pkimber/old_story | refs/heads/master | story/models.py | 1 | from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
import reversion
from base.model_utils import TimeStampedModel
from moderate.models import ModerateModel
class Area(models.Model):
name = models.CharField(max_length=100)
slug = models.SlugField(max_length=100)
class Meta:
ordering = ['name']
verbose_name = 'Area'
verbose_name_plural = 'Areas'
def __unicode__(self):
return unicode('{}'.format(self.name))
reversion.register(Area)
class Event(ModerateModel, TimeStampedModel):
""" Event """
owner = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True)
email = models.EmailField(blank=True, null=True)
name = models.CharField(max_length=100, blank=True)
area = models.ForeignKey(Area)
title = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
class Meta:
ordering = ['modified']
verbose_name = 'Event'
verbose_name_plural = 'Events'
def __unicode__(self):
return unicode('{}'.format(self.title))
reversion.register(Event)
def _default_moderate_state():
return ModerateState.pending()
class Story(ModerateModel, TimeStampedModel):
"""News story"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True)
email = models.EmailField(blank=True, null=True)
name = models.CharField(max_length=100, blank=True)
area = models.ForeignKey(Area)
title = models.CharField(max_length=100)
description = models.TextField()
picture = models.ImageField(upload_to='story/%Y/%m/%d', blank=True)
class Meta:
ordering = ['-created']
verbose_name = 'Story'
verbose_name_plural = 'Stories'
def __unicode__(self):
return unicode('{}'.format(self.title))
def save(self, *args, **kwargs):
if self.user:
pass
elif self.email and self.name:
pass
else:
raise ValueError(
"Story must have a 'user' or a 'name' AND 'email'"
)
super(Story, self).save(*args, **kwargs)
def get_absolute_url(self):
return reverse('story.detail', args=[self.pk])
def user_can_edit(self, user):
"""
A member of staff can edit anything. A standard user can only edit
their own stories if they haven't been moderated
"""
result = False
if user.is_staff:
result = True
elif user.is_active and not self.date_moderated:
result = user == self.user
return result
def _author(self):
return self.name or self.user.username
author = property(_author)
reversion.register(Story)
|
HPPTECH/hpp_IOSTressTest | refs/heads/master | Refer/IOST_OLD_SRC/IOST_0.17/Libs/IOST_AboutDialog.py | 2 | #!/usr/bin/python
#======================================================================
#
# Project : hpp_IOStressTest
# File : IOST_AboutDialog.py
# Date : Sep 21, 2016
# Author : HuuHoang Nguyen
# Contact : hhnguyen@apm.com
# : hoangnh.hpp@gmail.com
# License : MIT License
# Copyright : 2016
# Description: The hpp_IOStressTest is under the MIT License, a copy of license which may be found in LICENSE
#
#======================================================================
import io
import os
import re
import operator
import sys
import base64
import time
from IOST_Prepare import IOST_Prepare
from IOST_Config import *
import gtk
import gtk.glade
class IOST_AboutDialog():
def __init__(self, glade_filename, window_name, object_name ,main_builder):
"This is a function get of Diaglog Help -> About Window"
self.IOST_AboutDialog_window_name = window_name
self.IOST_AboutDialog_object_name = object_name
if not main_builder:
self.IOST_AboutDialog_Builder = gtk.Builder()
self.IOST_AboutDialog_Builder.add_from_file(glade_filename)
self.IOST_AboutDialog_Builder.connect_signals(self)
else:
self.IOST_AboutDialog_Builder = main_builder
self.IOST_Objs[window_name][window_name+ object_name] = self.IOST_AboutDialog_Builder.get_object(window_name+object_name)
self.IOST_Objs[window_name][window_name+ object_name].set_version(self.IOST_Data["ProjectVersion"])
def Run(self, window_name, object_name):
self.IOST_Objs[window_name][window_name+object_name].run()
self.IOST_Objs[window_name][window_name+object_name].hide()
def ActiveLink(self, object_name):
self.IOST_Objs[self.IOST_AboutDialog_window_name][self.IOST_AboutDialog_window_name+ self.IOST_AboutDialog_object_name].hide()
def on_IOST_WHelpAbout_destroy(self, object, data=None):
""
self.IOST_Objs[self.IOST_AboutDialog_window_name][self.IOST_AboutDialog_window_name+self.IOST_AboutDialog_object_name].hide()
def on_IOST_WHelpAbout_DialogActionArea_destroy(self, object, data=None):
""
self.IOST_Objs[self.IOST_AboutDialog_window_name][self.IOST_AboutDialog_window_name+self.IOST_AboutDialog_object_name].hide()
def on_IOST_WHelpAbout_button_press_event(self, widget, event, data=None):
""
self.IOST_Objs[self.IOST_AboutDialog_window_name][self.IOST_AboutDialog_object_name].hide()
def on_IOST_WHelpAbout_DialogVB_button_press_event(self, widget, event, data=None):
""
self.IOST_Objs[self.IOST_AboutDialog_window_name][self.IOST_AboutDialog_window_name+ self.IOST_AboutDialog_objectt_name].hide()
|
molmod/yaff | refs/heads/master | yaff/pes/test/test_scaling.py | 1 | # -*- coding: utf-8 -*-
# YAFF is yet another force-field code.
# Copyright (C) 2011 Toon Verstraelen <Toon.Verstraelen@UGent.be>,
# Louis Vanduyfhuys <Louis.Vanduyfhuys@UGent.be>, Center for Molecular Modeling
# (CMM), Ghent University, Ghent, Belgium; all rights reserved unless otherwise
# stated.
#
# This file is part of YAFF.
#
# YAFF is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# YAFF is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
from __future__ import division
from nose.tools import assert_raises
import numpy as np
from yaff.test.common import get_system_water32, get_system_glycine, \
get_system_quartz, get_system_caffeine, get_system_mil53
from yaff import *
def test_scaling_water32():
system = get_system_water32()
stab = Scalings(system, 0.5, 0.0, 1.0).stab
assert (stab['a'] > stab['b']).all()
assert len(stab) == system.natom
for i0, i1, scale, nbond in stab:
if system.numbers[i1] == 8:
assert (i0 == i1+1) or (i0 == i1+2)
assert scale == 0.5
assert nbond == 1
elif system.numbers[i1] == 1:
assert i0 == i1+1
assert scale == 0.0
assert nbond == 2
def test_scaling_glycine():
system = get_system_glycine()
stab = Scalings(system, 1.0, 0.5, 0.2).stab # warning: absurd numbers
assert (stab['a'] > stab['b']).all()
assert len(stab) == sum(len(system.neighs2[i]) + len(system.neighs3[i]) for i in range(system.natom))//2
for i0, i1, scale, nbond in stab:
if i0 in system.neighs2[i1]:
assert scale == 0.5
assert nbond == 2
elif i0 in system.neighs3[i1]:
assert scale == 0.2
assert nbond == 3
def test_scaling_quartz():
system = get_system_quartz().supercell(2, 2, 2)
stab = Scalings(system).stab
assert (stab['a'] > stab['b']).all()
assert len(stab) == sum(len(system.neighs1[i]) + len(system.neighs2[i]) for i in range(system.natom))//2
for i0, i1, scale, nbond in stab:
assert scale == 0.0
assert i0 in system.neighs1[i1] or i0 in system.neighs2[i1]
assert nbond == 1 or nbond == 2
def test_iter_paths1():
system = get_system_caffeine()
paths = set(iter_paths(system, 2, 8, 3))
assert all(len(path) == 4 for path in paths)
assert len(paths) == 2
assert paths == set([(2, 7, 6, 8), (2, 9, 4, 8)])
def test_iter_paths2():
system = get_system_caffeine()
paths = set(iter_paths(system, 13, 5, 5))
assert all(len(path) == 6 for path in paths)
assert len(paths) == 2
assert paths == set([(13, 4, 8, 6, 7, 5), (13, 4, 9, 2, 7, 5)])
def test_iter_paths3():
system = get_system_caffeine()
paths = set(iter_paths(system, 18, 19, 2))
assert all(len(path) == 3 for path in paths)
assert len(paths) == 1
assert paths == set([(18, 12, 19)])
def test_scaling_mil53():
system = get_system_mil53()
with assert_raises(AssertionError):
scalings = Scalings(system)
|
Proggie02/TestRepo | refs/heads/master | django/contrib/sites/models.py | 103 | from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
SITE_CACHE = {}
class SiteManager(models.Manager):
def get_current(self):
"""
Returns the current ``Site`` based on the SITE_ID in the
project's settings. The ``Site`` object is cached the first
time it's retrieved from the database.
"""
from django.conf import settings
try:
sid = settings.SITE_ID
except AttributeError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You're using the Django \"sites framework\" without having set the SITE_ID setting. Create a site in your database and set the SITE_ID setting to fix this error.")
try:
current_site = SITE_CACHE[sid]
except KeyError:
current_site = self.get(pk=sid)
SITE_CACHE[sid] = current_site
return current_site
def clear_cache(self):
"""Clears the ``Site`` object cache."""
global SITE_CACHE
SITE_CACHE = {}
@python_2_unicode_compatible
class Site(models.Model):
domain = models.CharField(_('domain name'), max_length=100)
name = models.CharField(_('display name'), max_length=50)
objects = SiteManager()
class Meta:
db_table = 'django_site'
verbose_name = _('site')
verbose_name_plural = _('sites')
ordering = ('domain',)
def __str__(self):
return self.domain
def save(self, *args, **kwargs):
super(Site, self).save(*args, **kwargs)
# Cached information will likely be incorrect now.
if self.id in SITE_CACHE:
del SITE_CACHE[self.id]
def delete(self):
pk = self.pk
super(Site, self).delete()
try:
del SITE_CACHE[pk]
except KeyError:
pass
@python_2_unicode_compatible
class RequestSite(object):
"""
A class that shares the primary interface of Site (i.e., it has
``domain`` and ``name`` attributes) but gets its data from a Django
HttpRequest object rather than from a database.
The save() and delete() methods raise NotImplementedError.
"""
def __init__(self, request):
self.domain = self.name = request.get_host()
def __str__(self):
return self.domain
def save(self, force_insert=False, force_update=False):
raise NotImplementedError('RequestSite cannot be saved.')
def delete(self):
raise NotImplementedError('RequestSite cannot be deleted.')
def get_current_site(request):
"""
Checks if contrib.sites is installed and returns either the current
``Site`` object or a ``RequestSite`` object based on the request.
"""
if Site._meta.installed:
current_site = Site.objects.get_current()
else:
current_site = RequestSite(request)
return current_site
|
jspargo/AneMo | refs/heads/master | django/lib/python2.7/site-packages/django/contrib/gis/geoip/base.py | 68 | import os
import re
from ctypes import c_char_p
from django.core.validators import ipv4_re
from django.contrib.gis.geoip.libgeoip import GEOIP_SETTINGS
from django.contrib.gis.geoip.prototypes import (
GeoIP_open, GeoIP_delete, GeoIP_database_info,
GeoIP_lib_version, GeoIP_record_by_addr, GeoIP_record_by_name,
GeoIP_country_code_by_addr, GeoIP_country_code_by_name,
GeoIP_country_name_by_addr, GeoIP_country_name_by_name)
from django.utils import six
from django.utils.encoding import force_bytes
# Regular expressions for recognizing the GeoIP free database editions.
free_regex = re.compile(r'^GEO-\d{3}FREE')
lite_regex = re.compile(r'^GEO-\d{3}LITE')
#### GeoIP classes ####
class GeoIPException(Exception):
pass
class GeoIP(object):
# The flags for GeoIP memory caching.
# GEOIP_STANDARD - read database from filesystem, uses least memory.
#
# GEOIP_MEMORY_CACHE - load database into memory, faster performance
# but uses more memory
#
# GEOIP_CHECK_CACHE - check for updated database. If database has been
# updated, reload filehandle and/or memory cache. This option
# is not thread safe.
#
# GEOIP_INDEX_CACHE - just cache the most frequently accessed index
# portion of the database, resulting in faster lookups than
# GEOIP_STANDARD, but less memory usage than GEOIP_MEMORY_CACHE -
# useful for larger databases such as GeoIP Organization and
# GeoIP City. Note, for GeoIP Country, Region and Netspeed
# databases, GEOIP_INDEX_CACHE is equivalent to GEOIP_MEMORY_CACHE
#
# GEOIP_MMAP_CACHE - load database into mmap shared memory ( not available
# on Windows).
GEOIP_STANDARD = 0
GEOIP_MEMORY_CACHE = 1
GEOIP_CHECK_CACHE = 2
GEOIP_INDEX_CACHE = 4
GEOIP_MMAP_CACHE = 8
cache_options = dict((opt, None) for opt in (0, 1, 2, 4, 8))
# Paths to the city & country binary databases.
_city_file = ''
_country_file = ''
# Initially, pointers to GeoIP file references are NULL.
_city = None
_country = None
def __init__(self, path=None, cache=0, country=None, city=None):
"""
Initializes the GeoIP object, no parameters are required to use default
settings. Keyword arguments may be passed in to customize the locations
of the GeoIP data sets.
* path: Base directory to where GeoIP data is located or the full path
to where the city or country data files (*.dat) are located.
Assumes that both the city and country data sets are located in
this directory; overrides the GEOIP_PATH settings attribute.
* cache: The cache settings when opening up the GeoIP datasets,
and may be an integer in (0, 1, 2, 4, 8) corresponding to
the GEOIP_STANDARD, GEOIP_MEMORY_CACHE, GEOIP_CHECK_CACHE,
GEOIP_INDEX_CACHE, and GEOIP_MMAP_CACHE, `GeoIPOptions` C API
settings, respectively. Defaults to 0, meaning that the data is read
from the disk.
* country: The name of the GeoIP country data file. Defaults to
'GeoIP.dat'; overrides the GEOIP_COUNTRY settings attribute.
* city: The name of the GeoIP city data file. Defaults to
'GeoLiteCity.dat'; overrides the GEOIP_CITY settings attribute.
"""
# Checking the given cache option.
if cache in self.cache_options:
self._cache = cache
else:
raise GeoIPException('Invalid GeoIP caching option: %s' % cache)
# Getting the GeoIP data path.
if not path:
path = GEOIP_SETTINGS.get('GEOIP_PATH', None)
if not path:
raise GeoIPException('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
if not isinstance(path, six.string_types):
raise TypeError('Invalid path type: %s' % type(path).__name__)
if os.path.isdir(path):
# Constructing the GeoIP database filenames using the settings
# dictionary. If the database files for the GeoLite country
# and/or city datasets exist, then try and open them.
country_db = os.path.join(path, country or GEOIP_SETTINGS.get('GEOIP_COUNTRY', 'GeoIP.dat'))
if os.path.isfile(country_db):
self._country = GeoIP_open(force_bytes(country_db), cache)
self._country_file = country_db
city_db = os.path.join(path, city or GEOIP_SETTINGS.get('GEOIP_CITY', 'GeoLiteCity.dat'))
if os.path.isfile(city_db):
self._city = GeoIP_open(force_bytes(city_db), cache)
self._city_file = city_db
elif os.path.isfile(path):
# Otherwise, some detective work will be needed to figure
# out whether the given database path is for the GeoIP country
# or city databases.
ptr = GeoIP_open(force_bytes(path), cache)
info = GeoIP_database_info(ptr)
if lite_regex.match(info):
# GeoLite City database detected.
self._city = ptr
self._city_file = path
elif free_regex.match(info):
# GeoIP Country database detected.
self._country = ptr
self._country_file = path
else:
raise GeoIPException('Unable to recognize database edition: %s' % info)
else:
raise GeoIPException('GeoIP path must be a valid file or directory.')
def __del__(self):
# Cleaning any GeoIP file handles lying around.
if GeoIP_delete is None:
return
if self._country:
GeoIP_delete(self._country)
if self._city:
GeoIP_delete(self._city)
def _check_query(self, query, country=False, city=False, city_or_country=False):
"Helper routine for checking the query and database availability."
# Making sure a string was passed in for the query.
if not isinstance(query, six.string_types):
raise TypeError('GeoIP query must be a string, not type %s' % type(query).__name__)
# Extra checks for the existence of country and city databases.
if city_or_country and not (self._country or self._city):
raise GeoIPException('Invalid GeoIP country and city data files.')
elif country and not self._country:
raise GeoIPException('Invalid GeoIP country data file: %s' % self._country_file)
elif city and not self._city:
raise GeoIPException('Invalid GeoIP city data file: %s' % self._city_file)
# Return the query string back to the caller. GeoIP only takes bytestrings.
return force_bytes(query)
def city(self, query):
"""
Returns a dictionary of city information for the given IP address or
Fully Qualified Domain Name (FQDN). Some information in the dictionary
may be undefined (None).
"""
enc_query = self._check_query(query, city=True)
if ipv4_re.match(query):
# If an IP address was passed in
return GeoIP_record_by_addr(self._city, c_char_p(enc_query))
else:
# If a FQDN was passed in.
return GeoIP_record_by_name(self._city, c_char_p(enc_query))
def country_code(self, query):
"Returns the country code for the given IP Address or FQDN."
enc_query = self._check_query(query, city_or_country=True)
if self._country:
if ipv4_re.match(query):
return GeoIP_country_code_by_addr(self._country, enc_query)
else:
return GeoIP_country_code_by_name(self._country, enc_query)
else:
return self.city(query)['country_code']
def country_name(self, query):
"Returns the country name for the given IP Address or FQDN."
enc_query = self._check_query(query, city_or_country=True)
if self._country:
if ipv4_re.match(query):
return GeoIP_country_name_by_addr(self._country, enc_query)
else:
return GeoIP_country_name_by_name(self._country, enc_query)
else:
return self.city(query)['country_name']
def country(self, query):
"""
Returns a dictionary with the country code and name when given an
IP address or a Fully Qualified Domain Name (FQDN). For example, both
'24.124.1.80' and 'djangoproject.com' are valid parameters.
"""
# Returning the country code and name
return {'country_code': self.country_code(query),
'country_name': self.country_name(query),
}
#### Coordinate retrieval routines ####
def coords(self, query, ordering=('longitude', 'latitude')):
cdict = self.city(query)
if cdict is None:
return None
else:
return tuple(cdict[o] for o in ordering)
def lon_lat(self, query):
"Returns a tuple of the (longitude, latitude) for the given query."
return self.coords(query)
def lat_lon(self, query):
"Returns a tuple of the (latitude, longitude) for the given query."
return self.coords(query, ('latitude', 'longitude'))
def geos(self, query):
"Returns a GEOS Point object for the given query."
ll = self.lon_lat(query)
if ll:
from django.contrib.gis.geos import Point
return Point(ll, srid=4326)
else:
return None
#### GeoIP Database Information Routines ####
@property
def country_info(self):
"Returns information about the GeoIP country database."
if self._country is None:
ci = 'No GeoIP Country data in "%s"' % self._country_file
else:
ci = GeoIP_database_info(self._country)
return ci
@property
def city_info(self):
"Retuns information about the GeoIP city database."
if self._city is None:
ci = 'No GeoIP City data in "%s"' % self._city_file
else:
ci = GeoIP_database_info(self._city)
return ci
@property
def info(self):
"Returns information about the GeoIP library and databases in use."
info = ''
if GeoIP_lib_version:
info += 'GeoIP Library:\n\t%s\n' % GeoIP_lib_version()
return info + 'Country:\n\t%s\nCity:\n\t%s' % (self.country_info, self.city_info)
#### Methods for compatibility w/the GeoIP-Python API. ####
@classmethod
def open(cls, full_path, cache):
return GeoIP(full_path, cache)
def _rec_by_arg(self, arg):
if self._city:
return self.city(arg)
else:
return self.country(arg)
region_by_addr = city
region_by_name = city
record_by_addr = _rec_by_arg
record_by_name = _rec_by_arg
country_code_by_addr = country_code
country_code_by_name = country_code
country_name_by_addr = country_name
country_name_by_name = country_name
|
aaltinisik/OCBAltinkaya | refs/heads/8.0 | addons/portal_project_issue/__init__.py | 493 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
|
acsone/multi-company | refs/heads/8.0 | product_multi_company/models/__init__.py | 2 | # -*- coding: utf-8 -*-
# (c) 2015 Serv. Tecnol. Avanzados - Pedro M. Baeza
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import product_template
|
saurabh6790/aimobilize-app-backup | refs/heads/master | stock/report/serial_no_service_contract_expiry/__init__.py | 12133432 | |
PopCap/GameIdea | refs/heads/master | Engine/Source/ThirdParty/HTML5/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/pythonwin/pywin/mfc/__init__.py | 12133432 | |
ydaniv/django-currencies | refs/heads/master | currencies/tests/__init__.py | 12133432 | |
drmrd/ansible | refs/heads/devel | lib/ansible/modules/monitoring/logstash_plugin.py | 30 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Loic Blot <loic.blot@unix-experience.fr>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: logstash_plugin
short_description: Manage Logstash plugins
description:
- Manages Logstash plugins.
version_added: "2.3"
author: Loic Blot (@nerzhul)
options:
name:
description:
- Install plugin with that name.
required: True
state:
description:
- Apply plugin state.
choices: ["present", "absent"]
default: present
plugin_bin:
description:
- Specify logstash-plugin to use for plugin management.
default: /usr/share/logstash/bin/logstash-plugin
proxy_host:
description:
- Proxy host to use during plugin installation.
proxy_port:
description:
- Proxy port to use during plugin installation.
version:
description:
- Specify plugin Version of the plugin to install.
If plugin exists with previous version, it will NOT be updated.
'''
EXAMPLES = '''
- name: Install Logstash beats input plugin
logstash_plugin:
state: present
name: logstash-input-beats
- name: Install specific version of a plugin
logstash_plugin:
state: present
name: logstash-input-syslog
version: '3.2.0'
- name: Uninstall Logstash plugin
logstash_plugin:
state: absent
name: logstash-filter-multiline
'''
from ansible.module_utils.basic import AnsibleModule
PACKAGE_STATE_MAP = dict(
present="install",
absent="remove"
)
def is_plugin_present(module, plugin_bin, plugin_name):
cmd_args = [plugin_bin, "list", plugin_name]
rc, out, err = module.run_command(" ".join(cmd_args))
return rc == 0
def parse_error(string):
reason = "reason: "
try:
return string[string.index(reason) + len(reason):].strip()
except ValueError:
return string
def install_plugin(module, plugin_bin, plugin_name, version, proxy_host, proxy_port):
cmd_args = [plugin_bin, PACKAGE_STATE_MAP["present"], plugin_name]
if version:
cmd_args.append("--version %s" % version)
if proxy_host and proxy_port:
cmd_args.append("-DproxyHost=%s -DproxyPort=%s" % (proxy_host, proxy_port))
cmd = " ".join(cmd_args)
if module.check_mode:
rc, out, err = 0, "check mode", ""
else:
rc, out, err = module.run_command(cmd)
if rc != 0:
reason = parse_error(out)
module.fail_json(msg=reason)
return True, cmd, out, err
def remove_plugin(module, plugin_bin, plugin_name):
cmd_args = [plugin_bin, PACKAGE_STATE_MAP["absent"], plugin_name]
cmd = " ".join(cmd_args)
if module.check_mode:
rc, out, err = 0, "check mode", ""
else:
rc, out, err = module.run_command(cmd)
if rc != 0:
reason = parse_error(out)
module.fail_json(msg=reason)
return True, cmd, out, err
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
state=dict(default="present", choices=PACKAGE_STATE_MAP.keys()),
plugin_bin=dict(default="/usr/share/logstash/bin/logstash-plugin", type="path"),
proxy_host=dict(default=None),
proxy_port=dict(default=None),
version=dict(default=None)
),
supports_check_mode=True
)
name = module.params["name"]
state = module.params["state"]
plugin_bin = module.params["plugin_bin"]
proxy_host = module.params["proxy_host"]
proxy_port = module.params["proxy_port"]
version = module.params["version"]
present = is_plugin_present(module, plugin_bin, name)
# skip if the state is correct
if (present and state == "present") or (state == "absent" and not present):
module.exit_json(changed=False, name=name, state=state)
if state == "present":
changed, cmd, out, err = install_plugin(module, plugin_bin, name, version, proxy_host, proxy_port)
elif state == "absent":
changed, cmd, out, err = remove_plugin(module, plugin_bin, name)
module.exit_json(changed=changed, cmd=cmd, name=name, state=state, stdout=out, stderr=err)
if __name__ == '__main__':
main()
|
neumerance/cloudloon2 | refs/heads/master | .venv/lib/python2.7/site-packages/django/contrib/sessions/backends/cached_db.py | 102 | """
Cached, database-backed sessions.
"""
from django.contrib.sessions.backends.db import SessionStore as DBStore
from django.core.cache import cache
from django.core.exceptions import SuspiciousOperation
from django.utils import timezone
KEY_PREFIX = "django.contrib.sessions.cached_db"
class SessionStore(DBStore):
"""
Implements cached, database backed sessions.
"""
def __init__(self, session_key=None):
super(SessionStore, self).__init__(session_key)
@property
def cache_key(self):
return KEY_PREFIX + self._get_or_create_session_key()
def load(self):
try:
data = cache.get(self.cache_key, None)
except Exception:
# Some backends (e.g. memcache) raise an exception on invalid
# cache keys. If this happens, reset the session. See #17810.
data = None
if data is None:
# Duplicate DBStore.load, because we need to keep track
# of the expiry date to set it properly in the cache.
try:
s = Session.objects.get(
session_key=self.session_key,
expire_date__gt=timezone.now()
)
data = self.decode(s.session_data)
cache.set(self.cache_key, data,
self.get_expiry_age(expiry=s.expire_date))
except (Session.DoesNotExist, SuspiciousOperation):
self.create()
data = {}
return data
def exists(self, session_key):
if (KEY_PREFIX + session_key) in cache:
return True
return super(SessionStore, self).exists(session_key)
def save(self, must_create=False):
super(SessionStore, self).save(must_create)
cache.set(self.cache_key, self._session, self.get_expiry_age())
def delete(self, session_key=None):
super(SessionStore, self).delete(session_key)
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
cache.delete(KEY_PREFIX + session_key)
def flush(self):
"""
Removes the current session data from the database and regenerates the
key.
"""
self.clear()
self.delete(self.session_key)
self.create()
# At bottom to avoid circular import
from django.contrib.sessions.models import Session
|
40223247/2015cd_midterm-master | refs/heads/master | static/Brython3.1.3-20150514-095342/Lib/site-packages/pygame/draw.py | 603 | from javascript import console
from browser import timer
import math
class Queue:
def __init__(self):
self._list=[]
def empty(self):
return len(self._list) == 0
def put(self, element):
self._list.append(element)
def get(self):
if len(self._list) == 0:
raise BaseError
_element=self._list[0]
if len(self._list) == 1:
self._list=[]
else:
self._list=self._list[1:]
return _element
dm={}
def aaline(canvas, color, startpos, endpos, width, outline, blend=1):
#console.log("aaline")
if canvas not in dm:
dm[canvas]=DrawManager(canvas)
dm[canvas].process()
_dl=DrawLine(startpos[0], startpos[1], endpos[0], endpos[1], color,
width, outline, speed=10)
dm[canvas].add_line(_dl) #color, startpos, endpos, width, outline)
def aapolygon(canvas, color, coordinates, width, outline, blend=1):
#console.log("aapolygon")
if canvas not in dm:
dm[canvas]=DrawManager(canvas)
dm[canvas].process()
_dp=DrawPolygon(coordinates, color, width, outline, speed=10)
dm[canvas].add_polygon(_dp)
def aapolygon_bg(canvas, shape):
if canvas not in dm:
dm[canvas]=DrawManager(canvas)
dm[canvas].process()
dm[canvas].add_polygon_bg(shape)
class DrawPolygon:
def __init__(self, coordinates, color, width, outline, speed=10):
self.moveTo=coordinates[0]
self.segments=coordinates[1:]
self.color=color
self.width=width
self.outline=outline
class DrawLine:
def __init__(self, x0, y0, x1, y1, color, width, outline, speed=None):
self._type='LINE'
self._x0=x0
self._x1=x1
self._y0=y0
self._y1=y1
self._speed=speed
self._color=color
self._width=width
self._outline=outline
def get_segments(self):
if self._speed==0: #no animate since speed is 0 (return one segment)
return [{'type': self._type, 'x0':self._x0, 'y0': self._y0,
'x1': self._x1, 'y1': self._y1, 'color': self._color}]
#need to figure out how to translate speed into pixels, etc
#maybe speed is pixels per ms? 10 = 10 pixels per millisecond?
_x=(self._x1 - self._x0)
_x*=_x
_y=(self._y1 - self._y0)
_y*=_y
_distance=math.sqrt(_x + _y)
if _distance < self._speed: # we can do this in one segment
return [{'type': self._type, 'x0':self._x0, 'y0': self._y0,
'x1': self._x1, 'y1': self._y1, 'color': self._color}]
_segments=[]
_num_segments=math.floor(_distance/self._speed)
_pos_x=self._x0
_pos_y=self._y0
_x_diff=self._x1 - self._x0
_y_diff=self._y1 - self._y0
for _i in range(1,_num_segments+1):
_x=self._x0 + _i/_num_segments * _x_diff
_y=self._y0 + _i/_num_segments * _y_diff
_segments.append({'type': 'LINE': 'x0': _pos_x, 'y0': _pos_y,
'x1': _x, 'y1': _y, 'color': self._color})
_pos_x=_x
_pos_y=_y
if _pos_x != self._x1 or _pos_y != self._y1:
_segments.append({'type': 'LINE': 'x0': _pos_x, 'y0': _pos_y,
'x1': _x, 'y1': _y, 'color': self._color})
return _segments
class DrawManager:
def __init__(self, canvas):
self._queue=Queue()
self._canvas=canvas
self._ctx=canvas.getContext('2d')
self._interval=None
self._bg=None #used to capture bg before polygon is drawn
def __del__(self):
if self._interval is not None:
timer.clear_Interval(self._interval)
self._interval=None
del self._queue
def rect_from_shape(self, points):
_width=self._canvas.width
_height=self._canvas.height
_min_x=_width
_max_x=0
_min_y=_height
_max_y=0
for _point in points:
_x, _y = _point
_min_x=min(_min_x, _x)
_min_y=min(_min_y, _y)
_max_x=max(_max_x, _x)
_max_y=max(_max_y, _y)
_w2=_width/2
_h2=_height/2
return math.floor(_min_x-0.5)+_w2, math.floor(_min_y-0.5+_h2), \
math.ceil(_max_x+0.5)+_w2, math.ceil(_max_y+0.5+_h2)
def __interval(self):
if not self._queue.empty():
_dict=self._queue.get()
if _dict['type'] == 'LINE':
self._ctx.beginPath()
self._ctx.moveTo(_dict['x0'], _dict['y0'])
self._ctx.lineTo(_dict['x1'], _dict['y1'])
#if _dict['outline'] is not None:
# self._ctx.strokeStyle=_dict['outline'] #set line color
if _dict['color'] is not None:
self._ctx.fillStyle=_dict['color']
self._ctx.stroke()
elif _dict['type'] == 'POLYGON':
if self._bg is not None:
self._ctx.putImageData(self._bg[0], self._bg[1], self._bg[2])
console.log(self._bg[0])
self._bg=None
self._ctx.beginPath()
_moveTo=_dict['moveTo']
self._ctx.moveTo(_moveTo[0], _moveTo[1])
for _segment in _dict['segments']:
self._ctx.lineTo(_segment[0], _segment[1])
if _dict['width']:
self._ctx.lineWidth=_dict['width']
if _dict['outline']:
self._ctx.strokeStyle=_dict['outline']
if _dict['color']:
self._ctx.fillStyle=_dict['color']
self._ctx.fill()
self._ctx.closePath()
self._ctx.stroke()
elif _dict['type'] == 'POLYGON_BG':
_x0,_y0,_x1,_y1=self.rect_from_shape(_dict['shape'])
console.log(_x0,_y0,_x1, _y1)
self._bg=[]
self._bg.append(self._ctx.getImageData(_x0,_y0,abs(_x1)-abs(_x0),abs(_y1)-abs(_y0)))
self._bg.append(_x0)
self._bg.append(_y0)
def process(self):
self._interval=timer.set_interval(self.__interval, 10)
def add_line(self, dl): #color, startpos, endpos, width, outline, speed=None):
for _segment in dl.get_segments():
self._queue.put(_segment)
def add_polygon(self, dp):
self._queue.put({'type': 'POLYGON', 'moveTo': dp.moveTo,
'segments': dp.segments, 'color': dp.color,
'outline': dp.outline, 'width': dp.width})
def add_polygon_bg(self, shape):
self._queue.put({'type': 'POLYGON_BG', 'shape': shape})
|
MeteorAdminz/viper | refs/heads/master | viper/modules/rats/smallnet.py | 12 | # Originally written by Kevin Breen (@KevTheHermit):
# https://github.com/kevthehermit/RATDecoders/blob/master/SmallNet.py
def ver_52(data):
config_dict = {}
config_parts = data.split('!!<3SAFIA<3!!')
config_dict['Domain'] = config_parts[1]
config_dict['Port'] = config_parts[2]
config_dict['Disbale Registry'] = config_parts[3]
config_dict['Disbale TaskManager'] = config_parts[4]
config_dict['Install Server'] = config_parts[5]
config_dict['Registry Key'] = config_parts[8]
config_dict['Install Name'] = config_parts[9]
config_dict['Disbale UAC'] = config_parts[10]
config_dict['Anti-Sandboxie'] = config_parts[13]
config_dict['Anti-Anubis'] = config_parts[14]
config_dict['Anti-VirtualBox'] = config_parts[15]
config_dict['Anti-VmWare'] = config_parts[16]
config_dict['Anti-VirtualPC'] = config_parts[17]
config_dict['ServerID'] = config_parts[18]
config_dict['USB Spread'] = config_parts[19]
config_dict['P2P Spread'] = config_parts[20]
config_dict['RAR Spread'] = config_parts[21]
config_dict['MSN Spread'] = config_parts[22]
config_dict['Yahoo Spread'] = config_parts[23]
config_dict['LAN Spread'] = config_parts[24]
config_dict['Disbale Firewall'] = config_parts[25]
config_dict['Delay Execution MiliSeconds'] = config_parts[26]
config_dict['Attribute Read Only'] = config_parts[27]
config_dict['Attribute System File'] = config_parts[28]
config_dict['Attribute Hidden'] = config_parts[29]
config_dict['Attribute Compressed'] = config_parts[30]
config_dict['Attribute Temporary'] = config_parts[31]
config_dict['Attribute Archive'] = config_parts[32]
config_dict['Modify Creation Date'] = config_parts[33]
config_dict['Modified Creation Data'] = config_parts[34]
config_dict['Thread Persistance'] = config_parts[35]
config_dict['Anti-ZoneAlarm'] = config_parts[36]
config_dict['Anti-SpyTheSpy'] = config_parts[37]
config_dict['Anti-NetStat'] = config_parts[38]
config_dict['Anti-TiGeRFirewall'] = config_parts[39]
config_dict['Anti-TCPview'] = config_parts[40]
config_dict['Anti-CurrentPorts'] = config_parts[41]
config_dict['Anti-RogueKiller'] = config_parts[42]
config_dict['Enable MessageBox'] = config_parts[43]
config_dict['MessageBox Message'] = config_parts[44]
config_dict['MessageBox Icon'] = config_parts[45]
config_dict['MessageBox Buttons'] = config_parts[46]
config_dict['MessageBox Title'] = config_parts[47]
if config_parts[6] == 1:
config_dict['Install Path'] = 'Temp'
if config_parts[7] == 1:
config_dict['Install Path'] = 'Windows'
if config_parts[11] == 1:
config_dict['Install Path'] = 'System32'
if config_parts[12] == 1:
config_dict['Install Path'] = 'Program Files'
return config_dict
def ver_5(data):
config_dict = {}
config_parts = data.split('!!ElMattadorDz!!')
config_dict['Domain'] = config_parts[1]
config_dict['Port'] = config_parts[2]
config_dict['Disable Registry'] = config_parts[3]
config_dict['Disbale TaskManager'] = config_parts[4]
config_dict['Install Server'] = config_parts[5]
config_dict['Registry Key'] = config_parts[8]
config_dict['Install Name'] = config_parts[9]
config_dict['Disbale UAC'] = config_parts[10]
config_dict['Anti-Sandboxie'] = config_parts[13]
config_dict['Anti-Anubis'] = config_parts[14]
config_dict['Anti-VirtualBox'] = config_parts[15]
config_dict['Anti-VmWare'] = config_parts[16]
config_dict['Anti-VirtualPC'] = config_parts[17]
config_dict['ServerID'] = config_parts[18]
config_dict['USB Spread'] = config_parts[19]
config_dict['P2P Spread'] = config_parts[20]
config_dict['RAR Spread'] = config_parts[21]
config_dict['MSN Spread'] = config_parts[22]
config_dict['Yahoo Spread'] = config_parts[23]
config_dict['LAN Spread'] = config_parts[24]
config_dict['Disbale Firewall'] = config_parts[25]
config_dict['Delay Execution MiliSeconds'] = config_parts[26]
if config_parts[6] == 1:
config_dict['Install Path'] = 'Temp'
if config_parts[7] == 1:
config_dict['Install Path'] = 'Windows'
if config_parts[11] == 1:
config_dict['Install Path'] = 'System32'
if config_parts[12] == 1:
config_dict['Install Path'] = 'Program Files'
return config_dict
def config(data):
if '!!<3SAFIA<3!!' in data:
config_dict = ver_52(data)
return config_dict
elif '!!ElMattadorDz!!' in data:
config_dict = ver_5(data)
return config_dict
|
wangjun/FileBackup | refs/heads/master | config/base.py | 2 | #coding:UTF-8
"""
基本配置
"""
backup_paths=[
{'path':'/home/yubang/tmp/backup123','name':'back1_'}
]
|
Universal-Model-Converter/UMC3.0a | refs/heads/master | data/Python/x86/Lib/test/test_grp.py | 39 | """Test script for the grp module."""
import unittest
from test import test_support
grp = test_support.import_module('grp')
class GroupDatabaseTestCase(unittest.TestCase):
def check_value(self, value):
# check that a grp tuple has the entries and
# attributes promised by the docs
self.assertEqual(len(value), 4)
self.assertEqual(value[0], value.gr_name)
self.assertIsInstance(value.gr_name, basestring)
self.assertEqual(value[1], value.gr_passwd)
self.assertIsInstance(value.gr_passwd, basestring)
self.assertEqual(value[2], value.gr_gid)
self.assertIsInstance(value.gr_gid, (long, int))
self.assertEqual(value[3], value.gr_mem)
self.assertIsInstance(value.gr_mem, list)
def test_values(self):
entries = grp.getgrall()
for e in entries:
self.check_value(e)
if len(entries) > 1000: # Huge group file (NIS?) -- skip the rest
return
for e in entries:
e2 = grp.getgrgid(e.gr_gid)
self.check_value(e2)
self.assertEqual(e2.gr_gid, e.gr_gid)
name = e.gr_name
if name.startswith('+') or name.startswith('-'):
# NIS-related entry
continue
e2 = grp.getgrnam(name)
self.check_value(e2)
# There are instances where getgrall() returns group names in
# lowercase while getgrgid() returns proper casing.
# Discovered on Ubuntu 5.04 (custom).
self.assertEqual(e2.gr_name.lower(), name.lower())
def test_errors(self):
self.assertRaises(TypeError, grp.getgrgid)
self.assertRaises(TypeError, grp.getgrnam)
self.assertRaises(TypeError, grp.getgrall, 42)
# try to get some errors
bynames = {}
bygids = {}
for (n, p, g, mem) in grp.getgrall():
if not n or n == '+':
continue # skip NIS entries etc.
bynames[n] = g
bygids[g] = n
allnames = bynames.keys()
namei = 0
fakename = allnames[namei]
while fakename in bynames:
chars = list(fakename)
for i in xrange(len(chars)):
if chars[i] == 'z':
chars[i] = 'A'
break
elif chars[i] == 'Z':
continue
else:
chars[i] = chr(ord(chars[i]) + 1)
break
else:
namei = namei + 1
try:
fakename = allnames[namei]
except IndexError:
# should never happen... if so, just forget it
break
fakename = ''.join(chars)
self.assertRaises(KeyError, grp.getgrnam, fakename)
# Choose a non-existent gid.
fakegid = 4127
while fakegid in bygids:
fakegid = (fakegid * 3) % 0x10000
self.assertRaises(KeyError, grp.getgrgid, fakegid)
def test_main():
test_support.run_unittest(GroupDatabaseTestCase)
if __name__ == "__main__":
test_main()
|
spool/django-allauth | refs/heads/master | allauth/socialaccount/providers/hubic/views.py | 10 | import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
from .provider import HubicProvider
class HubicOAuth2Adapter(OAuth2Adapter):
provider_id = HubicProvider.id
access_token_url = 'https://api.hubic.com/oauth/token'
authorize_url = 'https://api.hubic.com/oauth/auth'
profile_url = 'https://api.hubic.com/1.0/account'
redirect_uri_protocol = 'https'
def complete_login(self, request, app, token, **kwargs):
token_type = kwargs['response']['token_type']
resp = requests.get(
self.profile_url,
headers={'Authorization': '%s %s' % (token_type, token.token)})
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(HubicOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(HubicOAuth2Adapter)
|
longman694/youtube-dl | refs/heads/mod | youtube_dl/extractor/toutv.py | 8 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
int_or_none,
js_to_json,
urlencode_postdata,
extract_attributes,
smuggle_url,
)
class TouTvIE(InfoExtractor):
_NETRC_MACHINE = 'toutv'
IE_NAME = 'tou.tv'
_VALID_URL = r'https?://ici\.tou\.tv/(?P<id>[a-zA-Z0-9_-]+(?:/S[0-9]+E[0-9]+)?)'
_access_token = None
_claims = None
_TESTS = [{
'url': 'http://ici.tou.tv/garfield-tout-court/S2015E17',
'info_dict': {
'id': '122017',
'ext': 'mp4',
'title': 'Saison 2015 Épisode 17',
'description': 'La photo de famille 2',
'upload_date': '20100717',
},
'params': {
# m3u8 download
'skip_download': True,
},
'skip': '404 Not Found',
}, {
'url': 'http://ici.tou.tv/hackers',
'only_matching': True,
}]
def _real_initialize(self):
email, password = self._get_login_info()
if email is None:
return
state = 'http://ici.tou.tv//'
webpage = self._download_webpage(state, None, 'Downloading homepage')
toutvlogin = self._parse_json(self._search_regex(
r'(?s)toutvlogin\s*=\s*({.+?});', webpage, 'toutvlogin'), None, js_to_json)
authorize_url = toutvlogin['host'] + '/auth/oauth/v2/authorize'
login_webpage = self._download_webpage(
authorize_url, None, 'Downloading login page', query={
'client_id': toutvlogin['clientId'],
'redirect_uri': 'https://ici.tou.tv/login/loginCallback',
'response_type': 'token',
'scope': 'media-drmt openid profile email id.write media-validation.read.privileged',
'state': state,
})
login_form = self._search_regex(
r'(?s)(<form[^>]+(?:id|name)="Form-login".+?</form>)', login_webpage, 'login form')
form_data = self._hidden_inputs(login_form)
form_data.update({
'login-email': email,
'login-password': password,
})
post_url = extract_attributes(login_form).get('action') or authorize_url
_, urlh = self._download_webpage_handle(
post_url, None, 'Logging in', data=urlencode_postdata(form_data))
self._access_token = self._search_regex(
r'access_token=([\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12})',
urlh.geturl(), 'access token')
self._claims = self._download_json(
'https://services.radio-canada.ca/media/validation/v2/getClaims',
None, 'Extracting Claims', query={
'token': self._access_token,
'access_token': self._access_token,
})['claims']
def _real_extract(self, url):
path = self._match_id(url)
metadata = self._download_json('http://ici.tou.tv/presentation/%s' % path, path)
# IsDrm does not necessarily mean the video is DRM protected (see
# https://github.com/rg3/youtube-dl/issues/13994).
if metadata.get('IsDrm'):
self.report_warning('This video is probably DRM protected.', path)
video_id = metadata['IdMedia']
details = metadata['Details']
title = details['OriginalTitle']
video_url = 'radiocanada:%s:%s' % (metadata.get('AppCode', 'toutv'), video_id)
if self._access_token and self._claims:
video_url = smuggle_url(video_url, {
'access_token': self._access_token,
'claims': self._claims,
})
return {
'_type': 'url_transparent',
'url': video_url,
'id': video_id,
'title': title,
'thumbnail': details.get('ImageUrl'),
'duration': int_or_none(details.get('LengthInSeconds')),
}
|
googleapis/googleapis-gen | refs/heads/master | google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/pagers.py | 1 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional
from google.cloud.recommendationengine_v1beta1.types import prediction_apikey_registry_service
class ListPredictionApiKeyRegistrationsPager:
"""A pager for iterating through ``list_prediction_api_key_registrations`` requests.
This class thinly wraps an initial
:class:`google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsResponse` object, and
provides an ``__iter__`` method to iterate through its
``prediction_api_key_registrations`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListPredictionApiKeyRegistrations`` requests and continue to iterate
through the ``prediction_api_key_registrations`` field on the
corresponding responses.
All the usual :class:`google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse],
request: prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest,
response: prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsRequest):
The initial request object.
response (google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[prediction_apikey_registry_service.PredictionApiKeyRegistration]:
for page in self.pages:
yield from page.prediction_api_key_registrations
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListPredictionApiKeyRegistrationsAsyncPager:
"""A pager for iterating through ``list_prediction_api_key_registrations`` requests.
This class thinly wraps an initial
:class:`google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``prediction_api_key_registrations`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListPredictionApiKeyRegistrations`` requests and continue to iterate
through the ``prediction_api_key_registrations`` field on the
corresponding responses.
All the usual :class:`google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse]],
request: prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest,
response: prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsRequest):
The initial request object.
response (google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[prediction_apikey_registry_service.PredictionApiKeyRegistration]:
async def async_generator():
async for page in self.pages:
for response in page.prediction_api_key_registrations:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
|
davidyezsetz/kuma | refs/heads/master | vendor/packages/pylint/test/input/func_return_yield_mix.py | 6 | """pylint should detect yield and return mix inside genrators"""
__revision__ = None
def somegen():
"""this is a bad generator"""
if True:
return 1
else:
yield 2
|
MycChiu/tensorflow | refs/heads/master | tensorflow/python/saved_model/loader_impl.py | 29 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Loader implementation for SavedModel with hermetic, language-neutral exports.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from google.protobuf import message
from google.protobuf import text_format
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.core.protobuf import saved_model_pb2
from tensorflow.python.framework import ops
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import tf_logging
from tensorflow.python.saved_model import constants
from tensorflow.python.training import saver as tf_saver
from tensorflow.python.util import compat
def _parse_saved_model(export_dir):
"""Reads the savedmodel.pb or savedmodel.pbtxt file containing `SavedModel`.
Args:
export_dir: Directory containing the SavedModel file.
Returns:
A `SavedModel` protocol buffer.
Raises:
IOError: If the file does not exist, or cannot be successfully parsed.
"""
# Build the path to the SavedModel in pbtxt format.
path_to_pbtxt = os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes(constants.SAVED_MODEL_FILENAME_PBTXT))
# Build the path to the SavedModel in pb format.
path_to_pb = os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes(constants.SAVED_MODEL_FILENAME_PB))
# Parse the SavedModel protocol buffer.
saved_model = saved_model_pb2.SavedModel()
if file_io.file_exists(path_to_pb):
try:
file_content = file_io.FileIO(path_to_pb, "rb").read()
saved_model.ParseFromString(file_content)
return saved_model
except message.DecodeError as e:
raise IOError("Cannot parse file %s: %s." % (path_to_pb, str(e)))
elif file_io.file_exists(path_to_pbtxt):
try:
file_content = file_io.FileIO(path_to_pbtxt, "rb").read()
text_format.Merge(file_content.decode("utf-8"), saved_model)
return saved_model
except text_format.ParseError as e:
raise IOError("Cannot parse file %s: %s." % (path_to_pbtxt, str(e)))
else:
raise IOError("SavedModel file does not exist at: %s/{%s|%s}" %
(export_dir,
constants.SAVED_MODEL_FILENAME_PBTXT,
constants.SAVED_MODEL_FILENAME_PB))
def _get_asset_tensors(export_dir, meta_graph_def_to_load):
"""Gets the asset tensors, if defined in the meta graph def to load.
Args:
export_dir: Directory where the SavedModel is located.
meta_graph_def_to_load: The meta graph def from the SavedModel to be loaded.
Returns:
A dictionary of asset tensors, keyed by the name of the asset tensor. The
value in the map corresponds to the absolute path of the asset file.
"""
# Collection-def that may contain the assets key.
collection_def = meta_graph_def_to_load.collection_def
asset_tensor_dict = {}
if constants.ASSETS_KEY in collection_def:
# Location of the assets for SavedModel.
assets_directory = os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes(constants.ASSETS_DIRECTORY))
assets_any_proto = collection_def[constants.ASSETS_KEY].any_list.value
# Process each asset and add it to the asset tensor dictionary.
for asset_any_proto in assets_any_proto:
asset_proto = meta_graph_pb2.AssetFileDef()
asset_any_proto.Unpack(asset_proto)
asset_tensor_dict[asset_proto.tensor_info.name] = os.path.join(
compat.as_bytes(assets_directory),
compat.as_bytes(asset_proto.filename))
return asset_tensor_dict
def _get_main_op_tensor(meta_graph_def_to_load):
"""Gets the main op tensor, if one exists.
Args:
meta_graph_def_to_load: The meta graph def from the SavedModel to be loaded.
Returns:
The main op tensor, if it exists and `None` otherwise.
Raises:
RuntimeError: If the collection def corresponding to the main op key has
other than exactly one tensor.
"""
collection_def = meta_graph_def_to_load.collection_def
main_op_tensor = None
if constants.MAIN_OP_KEY in collection_def:
main_ops = collection_def[constants.MAIN_OP_KEY].node_list.value
if len(main_ops) != 1:
raise RuntimeError("Expected exactly one SavedModel main op.")
main_op_tensor = ops.get_collection(constants.MAIN_OP_KEY)[0]
return main_op_tensor
def _get_legacy_init_op_tensor(meta_graph_def_to_load):
"""Gets the legacy init op tensor, if one exists.
Args:
meta_graph_def_to_load: The meta graph def from the SavedModel to be loaded.
Returns:
The legacy init op tensor, if it exists and `None` otherwise.
Raises:
RuntimeError: If the collection def corresponding to the legacy init op key
has other than exactly one tensor.
"""
collection_def = meta_graph_def_to_load.collection_def
legacy_init_op_tensor = None
if constants.LEGACY_INIT_OP_KEY in collection_def:
legacy_init_ops = collection_def[
constants.LEGACY_INIT_OP_KEY].node_list.value
if len(legacy_init_ops) != 1:
raise RuntimeError("Expected exactly one legacy serving init op.")
legacy_init_op_tensor = ops.get_collection(constants.LEGACY_INIT_OP_KEY)[0]
return legacy_init_op_tensor
def maybe_saved_model_directory(export_dir):
"""Checks whether the provided export directory could contain a SavedModel.
Note that the method does not load any data by itself. If the method returns
`false`, the export directory definitely does not contain a SavedModel. If the
method returns `true`, the export directory may contain a SavedModel but
provides no guarantee that it can be loaded.
Args:
export_dir: Absolute string path to possible export location. For example,
'/my/foo/model'.
Returns:
True if the export directory contains SavedModel files, False otherwise.
"""
txt_path = os.path.join(export_dir, constants.SAVED_MODEL_FILENAME_PBTXT)
pb_path = os.path.join(export_dir, constants.SAVED_MODEL_FILENAME_PB)
return file_io.file_exists(txt_path) or file_io.file_exists(pb_path)
def load(sess, tags, export_dir, **saver_kwargs):
"""Loads the model from a SavedModel as specified by tags.
Args:
sess: The TensorFlow session to restore the variables.
tags: Set of string tags to identify the required MetaGraphDef. These should
correspond to the tags used when saving the variables using the
SavedModel `save()` API.
export_dir: Directory in which the SavedModel protocol buffer and variables
to be loaded are located.
**saver_kwargs: Optional keyword arguments passed through to Saver.
Returns:
The `MetaGraphDef` protocol buffer loaded in the provided session. This
can be used to further extract signature-defs, collection-defs, etc.
Raises:
RuntimeError: MetaGraphDef associated with the tags cannot be found.
"""
# Build the SavedModel protocol buffer and find the requested meta graph def.
saved_model = _parse_saved_model(export_dir)
found_match = False
for meta_graph_def in saved_model.meta_graphs:
if set(meta_graph_def.meta_info_def.tags) == set(tags):
meta_graph_def_to_load = meta_graph_def
found_match = True
break
if not found_match:
raise RuntimeError("MetaGraphDef associated with tags " + str(tags).strip(
"[]") + " could not be found in SavedModel")
# Build a saver by importing the meta graph def to load.
saver = tf_saver.import_meta_graph(meta_graph_def_to_load, **saver_kwargs)
if saver:
# Build the checkpoint path where the variables are located.
variables_path = os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes(constants.VARIABLES_DIRECTORY),
compat.as_bytes(constants.VARIABLES_FILENAME))
# Restore the variables using the built saver in the provided session.
saver.restore(sess, variables_path)
else:
tf_logging.info("The specified SavedModel has no variables; no "
"checkpoints were restored.")
# Get asset tensors, if any.
asset_tensors_dictionary = _get_asset_tensors(export_dir,
meta_graph_def_to_load)
main_op_tensor = _get_main_op_tensor(meta_graph_def_to_load)
if main_op_tensor is not None:
sess.run(fetches=[main_op_tensor], feed_dict=asset_tensors_dictionary)
else:
legacy_init_op_tensor = _get_legacy_init_op_tensor(meta_graph_def_to_load)
if legacy_init_op_tensor is not None:
sess.run(fetches=[legacy_init_op_tensor],
feed_dict=asset_tensors_dictionary)
return meta_graph_def_to_load
|
sss/calibre-at-bzr | refs/heads/upstream/master | src/calibre/utils/filenames.py | 2 | '''
Make strings safe for use as ASCII filenames, while trying to preserve as much
meaning as possible.
'''
import os, errno
from math import ceil
from calibre import sanitize_file_name, isbytestring, force_unicode
from calibre.constants import (preferred_encoding, iswindows,
filesystem_encoding)
from calibre.utils.localization import get_udc
def ascii_text(orig):
udc = get_udc()
try:
ascii = udc.decode(orig)
except:
if isinstance(orig, unicode):
orig = orig.encode('ascii', 'replace')
ascii = orig.decode(preferred_encoding,
'replace').encode('ascii', 'replace')
return ascii
def ascii_filename(orig, substitute='_'):
ans = []
orig = ascii_text(orig).replace('?', '_')
for x in orig:
if ord(x) < 32:
x = substitute
ans.append(x)
return sanitize_file_name(''.join(ans), substitute=substitute)
def supports_long_names(path):
t = ('a'*300)+'.txt'
try:
p = os.path.join(path, t)
open(p, 'wb').close()
os.remove(p)
except:
return False
else:
return True
def shorten_component(s, by_what):
l = len(s)
if l < by_what:
return s
l = (l - by_what)//2
if l <= 0:
return s
return s[:l] + s[-l:]
def shorten_components_to(length, components, more_to_take=0):
filepath = os.sep.join(components)
extra = len(filepath) - (length - more_to_take)
if extra < 1:
return components
deltas = []
for x in components:
pct = len(x)/float(len(filepath))
deltas.append(int(ceil(pct*extra)))
ans = []
for i, x in enumerate(components):
delta = deltas[i]
if delta > len(x):
r = x[0] if x is components[-1] else ''
else:
if x is components[-1]:
b, e = os.path.splitext(x)
if e == '.': e = ''
r = shorten_component(b, delta)+e
if r.startswith('.'): r = x[0]+r
else:
r = shorten_component(x, delta)
r = r.strip()
if not r:
r = x.strip()[0] if x.strip() else 'x'
ans.append(r)
if len(os.sep.join(ans)) > length:
return shorten_components_to(length, components, more_to_take+2)
return ans
def find_executable_in_path(name, path=None):
if path is None:
path = os.environ.get('PATH', '')
sep = ';' if iswindows else ':'
if iswindows and not name.endswith('.exe'):
name += '.exe'
path = path.split(sep)
for x in path:
q = os.path.abspath(os.path.join(x, name))
if os.access(q, os.X_OK):
return q
def is_case_sensitive(path):
'''
Return True if the filesystem is case sensitive.
path must be the path to an existing directory. You must have permission
to create and delete files in this directory. The results of this test
apply to the filesystem containing the directory in path.
'''
is_case_sensitive = False
if not iswindows:
name1, name2 = ('calibre_test_case_sensitivity.txt',
'calibre_TesT_CaSe_sensitiVitY.Txt')
f1, f2 = os.path.join(path, name1), os.path.join(path, name2)
if os.path.exists(f1):
os.remove(f1)
open(f1, 'w').close()
is_case_sensitive = not os.path.exists(f2)
os.remove(f1)
return is_case_sensitive
def case_preserving_open_file(path, mode='wb', mkdir_mode=0777):
'''
Open the file pointed to by path with the specified mode. If any
directories in path do not exist, they are created. Returns the
opened file object and the path to the opened file object. This path is
guaranteed to have the same case as the on disk path. For case insensitive
filesystems, the returned path may be different from the passed in path.
The returned path is always unicode and always an absolute path.
If mode is None, then this function assumes that path points to a directory
and return the path to the directory as the file object.
mkdir_mode specifies the mode with which any missing directories in path
are created.
'''
if isbytestring(path):
path = path.decode(filesystem_encoding)
path = os.path.abspath(path)
sep = force_unicode(os.sep, 'ascii')
if path.endswith(sep):
path = path[:-1]
if not path:
raise ValueError('Path must not point to root')
components = path.split(sep)
if not components:
raise ValueError('Invalid path: %r'%path)
cpath = sep
if iswindows:
# Always upper case the drive letter and add a trailing slash so that
# the first os.listdir works correctly
cpath = components[0].upper() + sep
bdir = path if mode is None else os.path.dirname(path)
if not os.path.exists(bdir):
os.makedirs(bdir, mkdir_mode)
# Walk all the directories in path, putting the on disk case version of
# the directory into cpath
dirs = components[1:] if mode is None else components[1:-1]
for comp in dirs:
cdir = os.path.join(cpath, comp)
cl = comp.lower()
try:
candidates = [c for c in os.listdir(cpath) if c.lower() == cl]
except:
# Dont have permission to do the listdir, assume the case is
# correct as we have no way to check it.
pass
else:
if len(candidates) == 1:
cdir = os.path.join(cpath, candidates[0])
# else: We are on a case sensitive file system so cdir must already
# be correct
cpath = cdir
if mode is None:
ans = fpath = cpath
else:
fname = components[-1]
ans = open(os.path.join(cpath, fname), mode)
# Ensure file and all its metadata is written to disk so that subsequent
# listdir() has file name in it. I don't know if this is actually
# necessary, but given the diversity of platforms, best to be safe.
ans.flush()
os.fsync(ans.fileno())
cl = fname.lower()
try:
candidates = [c for c in os.listdir(cpath) if c.lower() == cl]
except EnvironmentError:
# The containing directory, somehow disappeared?
candidates = []
if len(candidates) == 1:
fpath = os.path.join(cpath, candidates[0])
else:
# We are on a case sensitive filesystem
fpath = os.path.join(cpath, fname)
return ans, fpath
def samefile_windows(src, dst):
import win32file
from pywintypes import error
samestring = (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
if samestring:
return True
handles = []
def get_fileid(x):
if isbytestring(x): x = x.decode(filesystem_encoding)
try:
h = win32file.CreateFile(x, 0, 0, None, win32file.OPEN_EXISTING,
win32file.FILE_FLAG_BACKUP_SEMANTICS, 0)
handles.append(h)
data = win32file.GetFileInformationByHandle(h)
except (error, EnvironmentError):
return None
return (data[4], data[8], data[9])
a, b = get_fileid(src), get_fileid(dst)
for h in handles:
win32file.CloseHandle(h)
if a is None and b is None:
return False
return a == b
def samefile(src, dst):
'''
Check if two paths point to the same actual file on the filesystem. Handles
symlinks, case insensitivity, mapped drives, etc.
Returns True iff both paths exist and point to the same file on disk.
Note: On windows will return True if the two string are identical (upto
case) even if the file does not exist. This is because I have no way of
knowing how reliable the GetFileInformationByHandle method is.
'''
if iswindows:
return samefile_windows(src, dst)
if hasattr(os.path, 'samefile'):
# Unix
try:
return os.path.samefile(src, dst)
except EnvironmentError:
return False
# All other platforms: check for same pathname.
samestring = (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
return samestring
class WindowsAtomicFolderMove(object):
'''
Move all the files inside a specified folder in an atomic fashion,
preventing any other process from locking a file while the operation is
incomplete. Raises an IOError if another process has locked a file before
the operation starts. Note that this only operates on the files in the
folder, not any sub-folders.
'''
def __init__(self, path):
self.handle_map = {}
import win32file, winerror
from pywintypes import error
if isbytestring(path): path = path.decode(filesystem_encoding)
if not os.path.exists(path):
return
for x in os.listdir(path):
f = os.path.normcase(os.path.abspath(os.path.join(path, x)))
if not os.path.isfile(f): continue
try:
# Ensure the file is not read-only
win32file.SetFileAttributes(f, win32file.FILE_ATTRIBUTE_NORMAL)
except:
pass
try:
h = win32file.CreateFile(f, win32file.GENERIC_READ,
win32file.FILE_SHARE_DELETE, None,
win32file.OPEN_EXISTING, win32file.FILE_FLAG_SEQUENTIAL_SCAN, 0)
except error as e:
self.close_handles()
if getattr(e, 'winerror', 0) == winerror.ERROR_SHARING_VIOLATION:
err = IOError(errno.EACCES,
_('File is open in another process'))
err.filename = f
raise err
raise
except:
self.close_handles()
raise
self.handle_map[f] = h
def copy_path_to(self, path, dest):
import win32file
handle = None
for p, h in self.handle_map.iteritems():
if samefile_windows(path, p):
handle = h
break
if handle is None:
if os.path.exists(path):
raise ValueError(u'The file %r did not exist when this move'
' operation was started'%path)
else:
raise ValueError(u'The file %r does not exist'%path)
try:
win32file.CreateHardLink(dest, path)
if os.path.getsize(dest) != os.path.getsize(path):
raise Exception('This apparently can happen on network shares. Sigh.')
return
except:
pass
with lopen(dest, 'wb') as f:
while True:
hr, raw = win32file.ReadFile(handle, 1024*1024)
if hr != 0:
raise IOError(hr, u'Error while reading from %r'%path)
if not raw:
break
f.write(raw)
def release_file(self, path):
key = None
for p, h in self.handle_map.iteritems():
if samefile_windows(path, p):
key = (p, h)
break
if key is not None:
import win32file
win32file.CloseHandle(key[1])
self.handle_map.pop(key[0])
def close_handles(self):
import win32file
for h in self.handle_map.itervalues():
win32file.CloseHandle(h)
self.handle_map = {}
def delete_originals(self):
import win32file
for path in self.handle_map.iterkeys():
win32file.DeleteFile(path)
self.close_handles()
def hardlink_file(src, dest):
if iswindows:
import win32file
win32file.CreateHardLink(dest, src)
if os.path.getsize(dest) != os.path.getsize(src):
raise Exception('This apparently can happen on network shares. Sigh.')
return
os.link(src, dest)
|
abhilashnta/edx-platform | refs/heads/master | cms/djangoapps/contentstore/features/help.py | 90 | # pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
from nose.tools import assert_false # pylint: disable=no-name-in-module
from lettuce import step, world
@step(u'I should see online help for "([^"]*)"$')
def see_online_help_for(step, page_name):
# make sure the online Help link exists on this page and contains the expected page name
elements_found = world.browser.find_by_xpath(
'//li[contains(@class, "nav-account-help")]//a[contains(@href, "{page_name}")]'.format(
page_name=page_name
)
)
assert_false(elements_found.is_empty())
# make sure the PDF link on the sock of this page exists
# for now, the PDF link stays constant for all the pages so we just check for "pdf"
elements_found = world.browser.find_by_xpath(
'//section[contains(@class, "sock")]//li[contains(@class, "js-help-pdf")]//a[contains(@href, "pdf")]'
)
assert_false(elements_found.is_empty())
|
rrrichter/bateuabad | refs/heads/master | bateuabad/settings.py | 1 | """
Django settings for bateuabad project.
Generated by 'django-admin startproject' using Django 1.9.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
LOCAL_SECRET_KEY = 'dummy'
LOCAL_DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dummy',
'USER': 'dummy',
'PASSWORD': 'dummy',
'HOST': 'localhost',
'PORT': 5432,
}
}
DEBUG = True
if 'ISINHEROKU' in os.environ:
DEBUG = False
else:
from secrets_settings import *
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
if 'ISINHEROKU' in os.environ:
SECRET_KEY = os.environ.get('SECRET_KEY')
else:
SECRET_KEY = LOCAL_SECRET_KEY
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'bateuabad.apps.BateuabadConfig',
'board.apps.BoardConfig',
'polls.apps.PollsConfig',
'bootstrap3',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'bateuabad.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'bateuabad.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
if 'ISINHEROKU' in os.environ:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASSWORD'),
'HOST': 'localhost',
'PORT': 5432,
}
}
else:
DATABASES = LOCAL_DATABASES
# Update database configuration with $DATABASE_URL.
import dj_database_url
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Sao_Paulo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') |
suncycheng/intellij-community | refs/heads/master | python/testData/hierarchy/call/Static/OverriddenMethod/main.py | 80 | from file_1 import A
class B(A):
def target_func(self, p):
p.another_func()
class C(object):
def func1(self, a):
a.target_func(A())
def func2(self):
a = A()
b = B()
a.target_func(b)
def bar1(a):
a.target_func(a)
def bar2(a, b):
atf, btf = a.target_func, b.target_func
bar1(A())
bar2(A(), B())
B().target_<caret>func(A()) |
cberry777/dd-agent | refs/heads/master | checks.d/hdfs_namenode.py | 11 | # (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
'''
HDFS NameNode Metrics
---------------------
hdfs.namenode.capacity_total Total disk capacity in bytes
hdfs.namenode.capacity_used Disk usage in bytes
hdfs.namenode.capacity_remaining Remaining disk space left in bytes
hdfs.namenode.total_load Total load on the file system
hdfs.namenode.fs_lock_queue_length Lock queue length
hdfs.namenode.blocks_total Total number of blocks
hdfs.namenode.max_objects Maximum number of files HDFS supports
hdfs.namenode.files_total Total number of files
hdfs.namenode.pending_replication_blocks Number of blocks pending replication
hdfs.namenode.under_replicated_blocks Number of under replicated blocks
hdfs.namenode.scheduled_replication_blocks Number of blocks scheduled for replication
hdfs.namenode.pending_deletion_blocks Number of pending deletion blocks
hdfs.namenode.num_live_data_nodes Total number of live data nodes
hdfs.namenode.num_dead_data_nodes Total number of dead data nodes
hdfs.namenode.num_decom_live_data_nodes Number of decommissioning live data nodes
hdfs.namenode.num_decom_dead_data_nodes Number of decommissioning dead data nodes
hdfs.namenode.volume_failures_total Total volume failures
hdfs.namenode.estimated_capacity_lost_total Estimated capacity lost in bytes
hdfs.namenode.num_decommissioning_data_nodes Number of decommissioning data nodes
hdfs.namenode.num_stale_data_nodes Number of stale data nodes
hdfs.namenode.num_stale_storages Number of stale storages
hdfs.namenode.missing_blocks Number of missing blocks
hdfs.namenode.corrupt_blocks Number of corrupt blocks
'''
# stdlib
from urlparse import urljoin
# 3rd party
import requests
from requests.exceptions import Timeout, HTTPError, InvalidURL, ConnectionError
from simplejson import JSONDecodeError
# Project
from checks import AgentCheck
# Service check names
JMX_SERVICE_CHECK = 'hdfs.namenode.jmx.can_connect'
# URL Paths
JMX_PATH = 'jmx'
# Namesystem state bean
HDFS_NAME_SYSTEM_STATE_BEAN = 'Hadoop:service=NameNode,name=FSNamesystemState'
# Namesystem bean
HDFS_NAME_SYSTEM_BEAN = 'Hadoop:service=NameNode,name=FSNamesystem'
# Metric types
GAUGE = 'gauge'
# HDFS metrics
HDFS_NAME_SYSTEM_STATE_METRICS = {
'CapacityTotal' : ('hdfs.namenode.capacity_total', GAUGE),
'CapacityUsed' : ('hdfs.namenode.capacity_used', GAUGE),
'CapacityRemaining' : ('hdfs.namenode.capacity_remaining', GAUGE),
'TotalLoad' : ('hdfs.namenode.total_load', GAUGE),
'FsLockQueueLength' : ('hdfs.namenode.fs_lock_queue_length', GAUGE),
'BlocksTotal' : ('hdfs.namenode.blocks_total', GAUGE),
'MaxObjects' : ('hdfs.namenode.max_objects', GAUGE),
'FilesTotal' : ('hdfs.namenode.files_total', GAUGE),
'PendingReplicationBlocks' : ('hdfs.namenode.pending_replication_blocks', GAUGE),
'UnderReplicatedBlocks' : ('hdfs.namenode.under_replicated_blocks', GAUGE),
'ScheduledReplicationBlocks' : ('hdfs.namenode.scheduled_replication_blocks', GAUGE),
'PendingDeletionBlocks' : ('hdfs.namenode.pending_deletion_blocks', GAUGE),
'NumLiveDataNodes' : ('hdfs.namenode.num_live_data_nodes', GAUGE),
'NumDeadDataNodes' : ('hdfs.namenode.num_dead_data_nodes', GAUGE),
'NumDecomLiveDataNodes' : ('hdfs.namenode.num_decom_live_data_nodes', GAUGE),
'NumDecomDeadDataNodes' : ('hdfs.namenode.num_decom_dead_data_nodes', GAUGE),
'VolumeFailuresTotal' : ('hdfs.namenode.volume_failures_total', GAUGE),
'EstimatedCapacityLostTotal' : ('hdfs.namenode.estimated_capacity_lost_total', GAUGE),
'NumDecommissioningDataNodes' : ('hdfs.namenode.num_decommissioning_data_nodes', GAUGE),
'NumStaleDataNodes' : ('hdfs.namenode.num_stale_data_nodes', GAUGE),
'NumStaleStorages' : ('hdfs.namenode.num_stale_storages', GAUGE),
}
HDFS_NAME_SYSTEM_METRICS = {
'MissingBlocks' : ('hdfs.namenode.missing_blocks', GAUGE),
'CorruptBlocks' : ('hdfs.namenode.corrupt_blocks', GAUGE)
}
class HDFSNameNode(AgentCheck):
def check(self, instance):
jmx_address = instance.get('hdfs_namenode_jmx_uri')
if jmx_address is None:
raise Exception('The JMX URL must be specified in the instance configuration')
# Get metrics from JMX
self._hdfs_namenode_metrics(jmx_address,
HDFS_NAME_SYSTEM_STATE_BEAN,
HDFS_NAME_SYSTEM_STATE_METRICS)
self._hdfs_namenode_metrics(jmx_address,
HDFS_NAME_SYSTEM_BEAN,
HDFS_NAME_SYSTEM_METRICS)
self.service_check(JMX_SERVICE_CHECK,
AgentCheck.OK,
tags=['namenode_url:' + jmx_address],
message='Connection to %s was successful' % jmx_address)
def _hdfs_namenode_metrics(self, jmx_uri, bean_name, metrics):
'''
Get HDFS namenode metrics from JMX
'''
response = self._rest_request_to_json(jmx_uri,
JMX_PATH,
query_params={'qry':bean_name})
beans = response.get('beans', [])
tags = ['namenode_url:' + jmx_uri]
if beans:
bean = next(iter(beans))
bean_name = bean.get('name')
if bean_name != bean_name:
raise Exception("Unexpected bean name {0}".format(bean_name))
for metric, (metric_name, metric_type) in metrics.iteritems():
metric_value = bean.get(metric)
if metric_value is not None:
self._set_metric(metric_name, metric_type, metric_value, tags)
if 'CapacityUsed' in bean and 'CapacityTotal' in bean:
self._set_metric('hdfs.namenode.capacity_in_use', GAUGE,
float(bean['CapacityUsed']) / float(bean['CapacityTotal']), tags)
def _set_metric(self, metric_name, metric_type, value, tags=None):
'''
Set a metric
'''
if metric_type == GAUGE:
self.gauge(metric_name, value, tags=tags)
else:
self.log.error('Metric type "%s" unknown' % (metric_type))
def _rest_request_to_json(self, address, object_path, query_params):
'''
Query the given URL and return the JSON response
'''
response_json = None
service_check_tags = ['namenode_url:' + address]
url = address
if object_path:
url = self._join_url_dir(url, object_path)
# Add query_params as arguments
if query_params:
query = '&'.join(['{0}={1}'.format(key, value) for key, value in query_params.iteritems()])
url = urljoin(url, '?' + query)
self.log.debug('Attempting to connect to "%s"' % url)
try:
response = requests.get(url, timeout=self.default_integration_http_timeout)
response.raise_for_status()
response_json = response.json()
except Timeout as e:
self.service_check(JMX_SERVICE_CHECK,
AgentCheck.CRITICAL,
tags=service_check_tags,
message="Request timeout: {0}, {1}".format(url, e))
raise
except (HTTPError,
InvalidURL,
ConnectionError) as e:
self.service_check(JMX_SERVICE_CHECK,
AgentCheck.CRITICAL,
tags=service_check_tags,
message="Request failed: {0}, {1}".format(url, e))
raise
except JSONDecodeError as e:
self.service_check(JMX_SERVICE_CHECK,
AgentCheck.CRITICAL,
tags=service_check_tags,
message='JSON Parse failed: {0}, {1}'.format(url, e))
raise
except ValueError as e:
self.service_check(JMX_SERVICE_CHECK,
AgentCheck.CRITICAL,
tags=service_check_tags,
message=str(e))
raise
return response_json
def _join_url_dir(self, url, *args):
'''
Join a URL with multiple directories
'''
for path in args:
url = url.rstrip('/') + '/'
url = urljoin(url, path.lstrip('/'))
return url
|
leaflabs/leafysd | refs/heads/master | test/test_acquire.py | 1 | """Tests acquisition: ACQUIRE to disk, sleep, stop, send and check samples."""
from __future__ import print_function
import unittest
import shutil
import sys
import tempfile
import time
import os
import test_helpers
from daemon_control import *
SLEEP_TIME_SEC = 60. * 0.2
class TestAcquire(test_helpers.DaemonTest):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
super(TestAcquire, self).setUp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
super(TestAcquire, self).tearDown()
def do_testAcquire(self, start_sample):
if not test_helpers.DO_IT_LIVE:
raise unittest.SkipTest()
cmd = self.getAcquireCommand(enable=True, start_sample=start_sample,
exp_cookie=start_sample)
# Start acquisition
responses = do_control_cmds([cmd])
self.assertIsNotNone(responses)
resp = responses[0]
self.assertEqual(resp.type, ControlResponse.SUCCESS,
msg='\nenable response:\n' + str(resp))
# Run for a while
print('Sleeping', SLEEP_TIME_SEC / 60., 'minutes while acquiring... ',
end='', file=sys.stderr)
sys.stderr.flush()
time.sleep(SLEEP_TIME_SEC)
print('done.', file=sys.stderr)
# Stop acquisition
cmd.acquire.enable = False
responses = do_control_cmds([cmd])
self.assertIsNotNone(responses)
resp = responses[0]
self.assertEqual(resp.type, ControlResponse.SUCCESS,
msg='\ndisable response:\n' + str(resp))
# Get the data to disk
store_path = os.path.join(self.tmpdir, "all_samples.h5")
cs = ControlCommand(type=ControlCommand.STORE)
cs.store.path = store_path
cs.store.start_sample = start_sample
cs.store.backend = STORE_HDF5
responses = do_control_cmds([cs])
self.assertIsNotNone(responses)
resp = responses[0]
self.assertEqual(resp.type, ControlResponse.STORE_FINISHED)
rs = resp.store
msg = '\nstore response:\n' + str(rs)
self.assertEqual(rs.status, ControlResStore.DONE, msg=msg)
self.assertEqual(rs.path, store_path)
min_nsamples_expected = SLEEP_TIME_SEC * test_helpers.SAMPLE_RATE_HZ
fudge_factor = 0.95
self.assertTrue(rs.nsamples > fudge_factor * min_nsamples_expected)
# Ensure the data looks ok
self.ensureHDF5OK(rs.path, rs.nsamples, exp_cookie=start_sample)
def testAcquire0(self):
self.do_testAcquire(0)
def testAcquire1920(self):
self.do_testAcquire(1920)
def testAcquire192000(self):
self.do_testAcquire(192000)
|
gerco/duplicati | refs/heads/master | thirdparty/rumps/rumps.py | 6 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# rumps: Ridiculously Uncomplicated Mac os x Python Statusbar apps.
# Copyright: (c) 2015, Jared Suttles. All rights reserved.
# License: BSD, see LICENSE for details.
#
# This file is a join of the rumps.py and utils.py source from:
# https://github.com/jaredks/rumps/commit/ae11371bddcafbabc5f62d1eadddf83bfbd653dc
#
# The method _nsimage_from_file has been modified to also allow
# loading a manually constructed NSImage
# The notification class has also been modified to allow an image
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
from collections import OrderedDict
_NOTIFICATIONS = True
try:
from Foundation import NSUserNotification, NSUserNotificationCenter
except ImportError:
_NOTIFICATIONS = False
from Foundation import (NSDate, NSTimer, NSRunLoop, NSDefaultRunLoopMode, NSSearchPathForDirectoriesInDomains,
NSMakeRect, NSLog, NSObject)
from AppKit import NSApplication, NSStatusBar, NSMenu, NSMenuItem, NSAlert, NSTextField, NSImage
from PyObjCTools import AppHelper
import os
import weakref
from collections import Mapping, Iterable
_TIMERS = weakref.WeakKeyDictionary()
separator = object()
# ListDict: OrderedDict subclass with insertion methods for modifying the order of the linked list in O(1) time
# https://gist.github.com/jaredks/6276032
class ListDict(OrderedDict):
def __insertion(self, link_prev, key_value):
key, value = key_value
if link_prev[2] != key:
if key in self:
del self[key]
link_next = link_prev[1]
self.OrderedDict[key] = link_prev[1] = link_next[0] = [link_prev, link_next, key]
dict.__setitem__(self, key, value)
def insert_after(self, existing_key, key_value):
self.__insertion(self.OrderedDict[existing_key], key_value)
def insert_before(self, existing_key, key_value):
self.__insertion(self.OrderedDict[existing_key][0], key_value)
def debug_mode(choice):
"""Enable/disable printing helpful information for debugging the program. Default is off."""
global _log
if choice:
def _log(*args):
NSLog(' '.join(map(str, args)))
else:
def _log(*_):
pass
debug_mode(False)
def alert(title=None, message='', ok=None, cancel=None):
"""Generate a simple alert window.
.. versionchanged:: 0.2.0
Providing a `cancel` string will set the button text rather than only using text "Cancel". `title` is no longer
a required parameter.
:param title: the text positioned at the top of the window in larger font. If ``None``, a default localized title
is used. If not ``None`` or a string, will use the string representation of the object.
:param message: the text positioned below the `title` in smaller font. If not a string, will use the string
representation of the object.
:param ok: the text for the "ok" button. Must be either a string or ``None``. If ``None``, a default
localized button title will be used.
:param cancel: the text for the "cancel" button. If a string, the button will have that text. If `cancel`
evaluates to ``True``, will create a button with text "Cancel". Otherwise, this button will not be
created.
:return: a number representing the button pressed. The "ok" button is ``1`` and "cancel" is ``0``.
"""
message = unicode(message)
if title is not None:
title = unicode(title)
_require_string_or_none(ok)
if not isinstance(cancel, basestring):
cancel = 'Cancel' if cancel else None
alert = NSAlert.alertWithMessageText_defaultButton_alternateButton_otherButton_informativeTextWithFormat_(
title, ok, cancel, None, message)
alert.setAlertStyle_(0) # informational style
_log('alert opened with message: {0}, title: {1}'.format(repr(message), repr(title)))
return alert.runModal()
def notification(title, subtitle, message, data=None, sound=True, image=None):
"""Send a notification to Notification Center (Mac OS X 10.8+). If running on a version of Mac OS X that does not
support notifications, a ``RuntimeError`` will be raised. Apple says,
"The userInfo content must be of reasonable serialized size (less than 1k) or an exception will be thrown."
So don't do that!
:param title: text in a larger font.
:param subtitle: text in a smaller font below the `title`.
:param message: text representing the body of the notification below the `subtitle`.
:param data: will be passed to the application's "notification center" (see :func:`rumps.notifications`) when this
notification is clicked.
:param sound: whether the notification should make a noise when it arrives.
"""
if not _NOTIFICATIONS:
raise RuntimeError('Mac OS X 10.8+ is required to send notifications')
if data is not None and not isinstance(data, Mapping):
raise TypeError('notification data must be a mapping')
_require_string_or_none(title, subtitle, message)
notification = NSUserNotification.alloc().init()
notification.setTitle_(title)
notification.setSubtitle_(subtitle)
notification.setInformativeText_(message)
notification.setUserInfo_({} if data is None else data)
if sound:
notification.setSoundName_("NSUserNotificationDefaultSoundName")
if image != None:
notification.setContentImage_(image)
notification.setDeliveryDate_(NSDate.dateWithTimeInterval_sinceDate_(0, NSDate.date()))
NSUserNotificationCenter.defaultUserNotificationCenter().scheduleNotification_(notification)
def application_support(name):
"""Return the application support folder path for the given `name`, creating it if it doesn't exist."""
app_support_path = os.path.join(NSSearchPathForDirectoriesInDomains(14, 1, 1).objectAtIndex_(0), name)
if not os.path.isdir(app_support_path):
os.mkdir(app_support_path)
return app_support_path
def timers():
"""Return a list of all :class:`rumps.Timer` objects. These can be active or inactive."""
return list(_TIMERS)
def quit_application(sender=None):
"""Quit the application. Some menu item should call this function so that the application can exit gracefully."""
nsapplication = NSApplication.sharedApplication()
_log('closing application')
nsapplication.terminate_(sender)
def _nsimage_from_file(filename, dimensions=None):
"""Take a path to an image file and return an NSImage object."""
if filename != None and type(filename) == NSImage:
return filename
try:
_log('attempting to open image at {0}'.format(filename))
with open(filename):
pass
except IOError: # literal file path didn't work -- try to locate image based on main script path
try:
from __main__ import __file__ as main_script_path
main_script_path = os.path.dirname(main_script_path)
filename = os.path.join(main_script_path, filename)
except ImportError:
pass
_log('attempting (again) to open image at {0}'.format(filename))
with open(filename): # file doesn't exist
pass # otherwise silently errors in NSImage which isn't helpful for debugging
image = NSImage.alloc().initByReferencingFile_(filename)
image.setScalesWhenResized_(True)
image.setSize_((20, 20) if dimensions is None else dimensions)
return image
def _require_string(*objs):
for obj in objs:
if not isinstance(obj, basestring):
raise TypeError('a string is required but given {0}, a {1}'.format(obj, type(obj).__name__))
def _require_string_or_none(*objs):
for obj in objs:
if not(obj is None or isinstance(obj, basestring)):
raise TypeError('a string or None is required but given {0}, a {1}'.format(obj, type(obj).__name__))
# Decorators and helper function serving to register functions for dealing with interaction and events
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def timer(interval):
"""Decorator for registering a function as a callback in a new thread. The function will be repeatedly called every
`interval` seconds. This decorator accomplishes the same thing as creating a :class:`rumps.Timer` object by using
the decorated function and `interval` as parameters and starting it on application launch.
.. code-block:: python
@rumps.timer(2)
def repeating_function(sender):
print 'hi'
:param interval: a number representing the time in seconds before the decorated function should be called.
"""
def decorator(f):
timers = timer.__dict__.setdefault('*timers', [])
timers.append(Timer(f, interval))
return f
return decorator
def clicked(*args, **options):
"""Decorator for registering a function as a callback for a click action on a :class:`rumps.MenuItem` within the
application. The passed `args` must specify an existing path in the main menu. The :class:`rumps.MenuItem`
instance at the end of that path will have its :meth:`rumps.MenuItem.set_callback` method called, passing in the
decorated function.
.. versionchanged:: 0.2.1
Accepts `key` keyword argument.
.. code-block:: python
@rumps.clicked('Animal', 'Dog', 'Corgi')
def corgi_button(sender):
import subprocess
subprocess.call(['say', '"corgis are the cutest"'])
:param args: a series of strings representing the path to a :class:`rumps.MenuItem` in the main menu of the
application.
:param key: a string representing the key shortcut as an alternative means of clicking the menu item.
"""
def decorator(f):
def register_click(self):
menuitem = self._menu # self not defined yet but will be later in 'run' method
if menuitem is None:
raise ValueError('no menu created')
for arg in args:
try:
menuitem = menuitem[arg]
except KeyError:
menuitem.add(arg)
menuitem = menuitem[arg]
menuitem.set_callback(f, options.get('key'))
# delay registering the button until we have a current instance to be able to traverse the menu
buttons = clicked.__dict__.setdefault('*buttons', [])
buttons.append(register_click)
return f
return decorator
def notifications(f):
"""Decorator for registering a function to serve as a "notification center" for the application. This function will
receive the data associated with an incoming OS X notification sent using :func:`rumps.notification`. This occurs
whenever the user clicks on a notification for this application in the OS X Notification Center.
.. code-block:: python
@rumps.notifications
def notification_center(info):
if 'unix' in info:
print 'i know this'
"""
notifications.__dict__['*notification_center'] = f
return f
def _call_as_function_or_method(f, event):
# The idea here is that when using decorators in a class, the functions passed are not bound so we have to
# determine later if the functions we have (those saved as callbacks) for particular events need to be passed
# 'self'.
#
# This works for an App subclass method or a standalone decorated function. Will attempt to call function with event
# alone then try with self and event. This might not be a great idea if the function is unbound and normally takes
# two arguments... but people shouldn't be decorating functions that consume more than a single parameter anyway!
#
# Decorating methods of a class subclassing something other than App should produce AttributeError eventually which
# is hopefully understandable.
try:
r = f(event)
_log('given function {0} is outside an App subclass definition'.format(repr(f)))
return r
except TypeError as e: # possibly try it with self if TypeError makes sense
if e.message.endswith('takes exactly 2 arguments (1 given)'):
r = f(getattr(App, '*app_instance'), event)
_log('given function {0} is probably inside a class (which should be an App subclass)'.format(repr(f)))
return r
raise e
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class Menu(ListDict):
"""Wrapper for Objective-C's NSMenu class.
Implements core functionality of menus in rumps. :class:`rumps.MenuItem` subclasses `Menu`.
"""
# NOTE:
# Only ever used as the main menu since every other menu would exist as a submenu of a MenuItem
_choose_key = object()
def __init__(self):
self._separators = 1
if not hasattr(self, '_menu'):
self._menu = NSMenu.alloc().init()
super(Menu, self).__init__()
def __setitem__(self, key, value):
if key not in self:
key, value = self._process_new_menuitem(key, value)
self._menu.addItem_(value._menuitem)
super(Menu, self).__setitem__(key, value)
def __delitem__(self, key):
value = self[key]
self._menu.removeItem_(value._menuitem)
super(Menu, self).__delitem__(key)
def add(self, menuitem):
"""Adds the object to the menu as a :class:`rumps.MenuItem` using the :attr:`rumps.MenuItem.title` as the
key. `menuitem` will be converted to a `MenuItem` object if not one already.
"""
self.__setitem__(self._choose_key, menuitem)
def clear(self):
"""Remove all `MenuItem` objects from within the menu of this `MenuItem`."""
self._menu.removeAllItems()
super(Menu, self).clear()
def copy(self):
raise NotImplementedError
@classmethod
def fromkeys(cls, *args, **kwargs):
raise NotImplementedError
def update(self, iterable, **kwargs):
"""Update with objects from `iterable` after each is converted to a :class:`rumps.MenuItem`, ignoring
existing keys. This update is a bit different from the usual ``dict.update`` method. It works recursively and
will parse a variety of Python containers and objects, creating `MenuItem` object and submenus as necessary.
If the `iterable` is an instance of :class:`rumps.MenuItem`, then add to the menu.
Otherwise, for each element in the `iterable`,
- if the element is a string or is not an iterable itself, it will be converted to a
:class:`rumps.MenuItem` and the key will be its string representation.
- if the element is a :class:`rumps.MenuItem` already, it will remain the same and the key will be its
:attr:`rumps.MenuItem.title` attribute.
- if the element is an iterable having a length of 2, the first value will be converted to a
:class:`rumps.MenuItem` and the second will act as the submenu for that `MenuItem`
- if the element is an iterable having a length of anything other than 2, a ``ValueError`` will be raised
- if the element is a mapping, each key-value pair will act as an iterable having a length of 2
"""
def parse_menu(iterable, menu, depth):
if isinstance(iterable, MenuItem):
menu.add(iterable)
return
for n, ele in enumerate(iterable.iteritems() if isinstance(iterable, Mapping) else iterable):
# for mappings we recurse but don't drop down a level in the menu
if not isinstance(ele, MenuItem) and isinstance(ele, Mapping):
parse_menu(ele, menu, depth)
# any iterables other than strings and MenuItems
elif not isinstance(ele, (basestring, MenuItem)) and isinstance(ele, Iterable):
try:
menuitem, submenu = ele
except TypeError:
raise ValueError('menu iterable element #{0} at depth {1} has length {2}; must be a single '
'menu item or a pair consisting of a menu item and its '
'submenu'.format(n, depth, len(tuple(ele))))
menuitem = MenuItem(menuitem)
menu.add(menuitem)
parse_menu(submenu, menuitem, depth+1)
# menu item / could be visual separator where ele is None or separator
else:
menu.add(ele)
parse_menu(iterable, self, 0)
parse_menu(kwargs, self, 0)
# ListDict insertion methods
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def insert_after(self, existing_key, menuitem):
"""Insert a :class:`rumps.MenuItem` in the menu after the `existing_key`.
:param existing_key: a string key for an existing `MenuItem` value.
:param menuitem: an object to be added. It will be converted to a `MenuItem` if not one already.
"""
key, menuitem = self._process_new_menuitem(self._choose_key, menuitem)
self._insert_helper(existing_key, key, menuitem, 1)
super(Menu, self).insert_after(existing_key, (key, menuitem))
def insert_before(self, existing_key, menuitem):
"""Insert a :class:`rumps.MenuItem` in the menu before the `existing_key`.
:param existing_key: a string key for an existing `MenuItem` value.
:param menuitem: an object to be added. It will be converted to a `MenuItem` if not one already.
"""
key, menuitem = self._process_new_menuitem(self._choose_key, menuitem)
self._insert_helper(existing_key, key, menuitem, 0)
super(Menu, self).insert_before(existing_key, (key, menuitem))
def _insert_helper(self, existing_key, key, menuitem, pos):
if existing_key == key: # this would mess stuff up...
raise ValueError('same key provided for location and insertion')
existing_menuitem = self[existing_key]
index = self._menu.indexOfItem_(existing_menuitem._menuitem)
self._menu.insertItem_atIndex_(menuitem._menuitem, index + pos)
# Processing MenuItems
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _process_new_menuitem(self, key, value):
if value is None:
value = separator
if value is not separator:
value = MenuItem(value) # safely convert if not already MenuItem
if key is self._choose_key:
key = value.title
if key != value.title:
_log('WARNING: key {0} is not the same as the title of the corresponding MenuItem {1}; while this '
'would occur if the title is dynamically altered, having different names at the time of menu '
'creation may not be desired '.format(repr(key), repr(value.title)))
else:
value = SeparatorMenuItem()
if key is self._choose_key:
key = 'separator_' + str(self._separators)
self._separators += 1
return key, value
class MenuItem(Menu):
"""Represents an item within the application's menu.
A :class:`rumps.MenuItem` is a button inside a menu but it can also serve as a menu itself whose elements are
other `MenuItem` instances.
Encapsulates and abstracts Objective-C NSMenuItem (and possibly a corresponding NSMenu as a submenu).
A couple of important notes:
- A new `MenuItem` instance can be created from any object with a string representation.
- Attempting to create a `MenuItem` by passing an existing `MenuItem` instance as the first parameter will not
result in a new instance but will instead return the existing instance.
Remembers the order of items added to menu and has constant time lookup. Can insert new `MenuItem` object before or
after other specified ones.
.. note::
When adding a `MenuItem` instance to a menu, the value of :attr:`title` at that time will serve as its key for
lookup performed on menus even if the `title` changes during program execution.
:param title: the name of this menu item. If not a string, will use the string representation of the object.
:param callback: the function serving as callback for when a click event occurs on this menu item.
:param key: the key shortcut to click this menu item. Must be a string or ``None``.
:param icon: a path to an image. If set to ``None``, the current image (if any) is removed.
:param dimensions: a sequence of numbers whose length is two, specifying the dimensions of the icon.
"""
# NOTE:
# Because of the quirks of PyObjC, a class level dictionary **inside an NSObject subclass for 10.9.x** is required
# in order to have callback_ be a @classmethod. And we need callback_ to be class level because we can't use
# instances in setTarget_ method of NSMenuItem. Otherwise this would be much more straightfoward like Timer class.
#
# So the target is always the NSApp class and action is always the @classmethod callback_ -- for every function
# decorated with @clicked(...). All we do is lookup the MenuItem instance and the user-provided callback function
# based on the NSMenuItem (the only argument passed to callback_).
def __new__(cls, *args, **kwargs):
if args and isinstance(args[0], MenuItem): # can safely wrap MenuItem instances
return args[0]
return super(MenuItem, cls).__new__(cls, *args, **kwargs)
def __init__(self, title, callback=None, key=None, icon=None, dimensions=None):
if isinstance(title, MenuItem): # don't initialize already existing instances
return
self._menuitem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(unicode(title), None, '')
self._menuitem.setTarget_(NSApp)
self._menu = self._icon = None
self.set_callback(callback, key)
self.set_icon(icon, dimensions)
super(MenuItem, self).__init__()
def __setitem__(self, key, value):
if self._menu is None:
self._menu = NSMenu.alloc().init()
self._menuitem.setSubmenu_(self._menu)
super(MenuItem, self).__setitem__(key, value)
def __repr__(self):
return '<{0}: [{1} -> {2}; callback: {3}]>'.format(type(self).__name__, repr(self.title), map(str, self),
repr(self.callback))
@property
def title(self):
"""The text displayed in a menu for this menu item. If not a string, will use the string representation of the
object.
"""
return self._menuitem.title()
@title.setter
def title(self, new_title):
new_title = unicode(new_title)
self._menuitem.setTitle_(new_title)
@property
def icon(self):
"""The path to an image displayed next to the text for this menu item. If set to ``None``, the current image
(if any) is removed.
.. versionchanged:: 0.2.0
Setting icon to ``None`` after setting it to an image will correctly remove the icon. Returns the path to an
image rather than exposing a `PyObjC` class.
"""
return self._icon
@icon.setter
def icon(self, icon_path):
self.set_icon(icon_path)
def set_icon(self, icon_path, dimensions=None):
"""Sets the icon displayed next to the text for this menu item. If set to ``None``, the current image (if any)
is removed. Can optionally supply `dimensions`.
.. versionchanged:: 0.2.0
Setting `icon` to ``None`` after setting it to an image will correctly remove the icon. Passing `dimensions`
a sequence whose length is not two will no longer silently error.
:param icon_path: a file path to an image.
:param dimensions: a sequence of numbers whose length is two.
"""
new_icon = _nsimage_from_file(icon_path, dimensions) if icon_path is not None else None
self._icon = icon_path
self._menuitem.setImage_(new_icon)
@property
def state(self):
"""The state of the menu item. The "on" state is symbolized by a check mark. The "mixed" state is symbolized
by a dash.
.. table:: Setting states
===== ======
State Number
===== ======
ON 1
OFF 0
MIXED -1
===== ======
"""
return self._menuitem.state()
@state.setter
def state(self, new_state):
self._menuitem.setState_(new_state)
def set_callback(self, callback, key=None):
"""Set the function serving as callback for when a click event occurs on this menu item. When `callback` is
``None``, it will disable the callback function and grey out the menu item. If `key` is a string, set as the
key shortcut. If it is ``None``, no adjustment will be made to the current key shortcut.
.. versionchanged:: 0.2.0
Allowed passing ``None`` as both `callback` and `key`. Additionally, passing a `key` that is neither a
string nor ``None`` will result in a standard ``TypeError`` rather than various, uninformative `PyObjC`
internal errors depending on the object.
:param callback: the function to be called when the user clicks on this menu item.
:param key: the key shortcut to click this menu item.
"""
_require_string_or_none(key)
if key is not None:
self._menuitem.setKeyEquivalent_(key)
NSApp._ns_to_py_and_callback[self._menuitem] = self, callback
self._menuitem.setAction_('callback:' if callback is not None else None)
@property
def callback(self):
"""Return the current callback function.
.. versionadded:: 0.2.0
"""
return NSApp._ns_to_py_and_callback[self._menuitem][1]
@property
def key(self):
"""The key shortcut to click this menu item.
.. versionadded:: 0.2.0
"""
return self._menuitem.keyEquivalent()
class SeparatorMenuItem(object):
"""Visual separator between :class:`rumps.MenuItem` objects in the application menu."""
def __init__(self):
self._menuitem = NSMenuItem.separatorItem()
class Timer(object):
"""
Python abstraction of an Objective-C event timer in a new thread for application. Controls the callback function,
interval, and starting/stopping the run loop.
.. versionchanged:: 0.2.0
Method `__call__` removed.
:param callback: Function that should be called every `interval` seconds. It will be passed this
:class:`rumps.Timer` object as its only parameter.
:param interval: The time in seconds to wait before calling the `callback` function.
"""
def __init__(self, callback, interval):
self.set_callback(callback)
self._interval = interval
self._status = False
def __repr__(self):
return ('<{0}: [callback: {1}; interval: {2}; '
'status: {3}]>').format(type(self).__name__, repr(getattr(self, '*callback').__name__),
self._interval, 'ON' if self._status else 'OFF')
@property
def interval(self):
"""The time in seconds to wait before calling the :attr:`callback` function."""
return self._interval # self._nstimer.timeInterval() when active but could be inactive
@interval.setter
def interval(self, new_interval):
if self._status:
if abs(self._nsdate.timeIntervalSinceNow()) >= self._nstimer.timeInterval():
self.stop()
self._interval = new_interval
self.start()
else:
self._interval = new_interval
@property
def callback(self):
"""The current function specified as the callback."""
return getattr(self, '*callback')
def is_alive(self):
"""Whether the timer thread loop is currently running."""
return self._status
def start(self):
"""Start the timer thread loop."""
if not self._status:
self._nsdate = NSDate.date()
self._nstimer = NSTimer.alloc().initWithFireDate_interval_target_selector_userInfo_repeats_(
self._nsdate, self._interval, self, 'callback:', None, True)
NSRunLoop.currentRunLoop().addTimer_forMode_(self._nstimer, NSDefaultRunLoopMode)
_TIMERS[self] = None
self._status = True
def stop(self):
"""Stop the timer thread loop."""
if self._status:
self._nstimer.invalidate()
del self._nstimer
del self._nsdate
self._status = False
def set_callback(self, callback):
"""Set the function that should be called every :attr:`interval` seconds. It will be passed this
:class:`rumps.Timer` object as its only parameter.
"""
setattr(self, '*callback', callback)
def callback_(self, _):
_log(self)
return _call_as_function_or_method(getattr(self, '*callback'), self)
class Window(object):
"""Generate a window to consume user input in the form of both text and button clicked.
.. versionchanged:: 0.2.0
Providing a `cancel` string will set the button text rather than only using text "Cancel". `message` is no
longer a required parameter.
:param message: the text positioned below the `title` in smaller font. If not a string, will use the string
representation of the object.
:param title: the text positioned at the top of the window in larger font. If not a string, will use the string
representation of the object.
:param default_text: the text within the editable textbox. If not a string, will use the string representation of
the object.
:param ok: the text for the "ok" button. Must be either a string or ``None``. If ``None``, a default
localized button title will be used.
:param cancel: the text for the "cancel" button. If a string, the button will have that text. If `cancel`
evaluates to ``True``, will create a button with text "Cancel". Otherwise, this button will not be
created.
:param dimensions: the size of the editable textbox. Must be sequence with a length of 2.
"""
def __init__(self, message='', title='', default_text='', ok=None, cancel=None, dimensions=(320, 160)):
message = unicode(message)
title = unicode(title)
self._cancel = bool(cancel)
self._icon = None
_require_string_or_none(ok)
if not isinstance(cancel, basestring):
cancel = 'Cancel' if cancel else None
self._alert = NSAlert.alertWithMessageText_defaultButton_alternateButton_otherButton_informativeTextWithFormat_(
title, ok, cancel, None, message)
self._alert.setAlertStyle_(0) # informational style
self._textfield = NSTextField.alloc().initWithFrame_(NSMakeRect(0, 0, *dimensions))
self._textfield.setSelectable_(True)
self._alert.setAccessoryView_(self._textfield)
self.default_text = default_text
@property
def title(self):
"""The text positioned at the top of the window in larger font. If not a string, will use the string
representation of the object.
"""
return self._alert.messageText()
@title.setter
def title(self, new_title):
new_title = unicode(new_title)
self._alert.setMessageText_(new_title)
@property
def message(self):
"""The text positioned below the :attr:`title` in smaller font. If not a string, will use the string
representation of the object.
"""
return self._alert.informativeText()
@message.setter
def message(self, new_message):
new_message = unicode(new_message)
self._alert.setInformativeText_(new_message)
@property
def default_text(self):
"""The text within the editable textbox. An example would be
"Type your message here."
If not a string, will use the string representation of the object.
"""
return self._default_text
@default_text.setter
def default_text(self, new_text):
new_text = unicode(new_text)
self._default_text = new_text
self._textfield.setStringValue_(new_text)
@property
def icon(self):
"""The path to an image displayed for this window. If set to ``None``, will default to the icon for the
application using :attr:`rumps.App.icon`.
.. versionchanged:: 0.2.0
If the icon is set to an image then changed to ``None``, it will correctly be changed to the application
icon.
"""
return self._icon
@icon.setter
def icon(self, icon_path):
new_icon = _nsimage_from_file(icon_path) if icon_path is not None else None
self._icon = icon_path
self._alert.setIcon_(new_icon)
def add_button(self, name):
"""Create a new button.
.. versionchanged:: 0.2.0
The `name` parameter is required to be a string.
:param name: the text for a new button. Must be a string.
"""
_require_string(name)
self._alert.addButtonWithTitle_(name)
def add_buttons(self, iterable=None, *args):
"""Create multiple new buttons.
.. versionchanged:: 0.2.0
Since each element is passed to :meth:`rumps.Window.add_button`, they must be strings.
"""
if iterable is None:
return
if isinstance(iterable, basestring):
self.add_button(iterable)
else:
for ele in iterable:
self.add_button(ele)
for arg in args:
self.add_button(arg)
def run(self):
"""Launch the window. :class:`rumps.Window` instances can be reused to retrieve user input as many times as
needed.
:return: a :class:`rumps.rumps.Response` object that contains the text and the button clicked as an integer.
"""
_log(self)
clicked = self._alert.runModal() % 999
if clicked > 2 and self._cancel:
clicked -= 1
self._textfield.validateEditing()
text = self._textfield.stringValue()
self.default_text = self._default_text # reset default text
return Response(clicked, text)
class Response(object):
"""Holds information from user interaction with a :class:`rumps.Window` after it has been closed."""
def __init__(self, clicked, text):
self._clicked = clicked
self._text = text
def __repr__(self):
shortened_text = self._text if len(self._text) < 21 else self._text[:17] + '...'
return '<{0}: [clicked: {1}, text: {2}]>'.format(type(self).__name__, self._clicked, repr(shortened_text))
@property
def clicked(self):
"""Return a number representing the button pressed by the user.
The "ok" button will return ``1`` and the "cancel" button will return ``0``. This makes it convenient to write
a conditional like,
.. code-block:: python
if response.clicked:
do_thing_for_ok_pressed()
else:
do_thing_for_cancel_pressed()
Where `response` is an instance of :class:`rumps.rumps.Response`.
Additional buttons added using methods :meth:`rumps.Window.add_button` and :meth:`rumps.Window.add_buttons`
will return ``2``, ``3``, ... in the order they were added.
"""
return self._clicked
@property
def text(self):
"""Return the text collected from the user."""
return self._text
class NSApp(NSObject):
"""Objective-C delegate class for NSApplication. Don't instantiate - use App instead."""
_ns_to_py_and_callback = {}
def userNotificationCenter_didActivateNotification_(self, notification_center, notification):
notification_center.removeDeliveredNotification_(notification)
data = dict(notification.userInfo())
try:
notification_function = getattr(notifications, '*notification_center')
except AttributeError: # notification center function not specified -> no error but warning in log
_log('WARNING: notification received but no function specified for answering it; use @notifications '
'decorator to register a function.')
else:
_call_as_function_or_method(notification_function, data)
def initializeStatusBar(self):
self.nsstatusitem = NSStatusBar.systemStatusBar().statusItemWithLength_(-1) # variable dimensions
self.nsstatusitem.setHighlightMode_(True)
self.setStatusBarIcon()
self.setStatusBarTitle()
mainmenu = self._app['_menu']
quit_button = self._app['_quit_button']
if quit_button is not None:
quit_button.set_callback(quit_application)
mainmenu.add(quit_button)
else:
_log('WARNING: the default quit button is disabled. To exit the application gracefully, another button '
'should have a callback of quit_application or call it indirectly.')
self.nsstatusitem.setMenu_(mainmenu._menu) # mainmenu of our status bar spot (_menu attribute is NSMenu)
def setStatusBarTitle(self):
self.nsstatusitem.setTitle_(self._app['_title'])
self.fallbackOnName()
def setStatusBarIcon(self):
self.nsstatusitem.setImage_(self._app['_icon_nsimage'])
self.fallbackOnName()
def fallbackOnName(self):
if not (self.nsstatusitem.title() or self.nsstatusitem.image()):
self.nsstatusitem.setTitle_(self._app['_name'])
@classmethod
def callback_(cls, nsmenuitem):
self, callback = cls._ns_to_py_and_callback[nsmenuitem]
_log(self)
return _call_as_function_or_method(callback, self)
class App(object):
"""Represents the statusbar application.
Provides a simple and pythonic interface for all those long and ugly `PyObjC` calls. :class:`rumps.App` may be
subclassed so that the application logic can be encapsulated within a class. Alternatively, an `App` can be
instantiated and the various callback functions can exist at module level.
.. versionchanged:: 0.2.0
`name` parameter must be a string and `title` must be either a string or ``None``. `quit_button` parameter added.
:param name: the name of the application.
:param title: text that will be displayed for the application in the statusbar.
:param icon: file path to the icon that will be displayed for the application in the statusbar.
:param menu: an iterable of Python objects or pairs of objects that will be converted into the main menu for the
application. Parsing is implemented by calling :meth:`rumps.MenuItem.update`.
:param quit_button: the quit application menu item within the main menu. If ``None``, the default quit button will
not be added.
"""
# NOTE:
# Serves as a setup class for NSApp since Objective-C classes shouldn't be instantiated normally.
# This is the most user-friendly way.
def __init__(self, name, title=None, icon=None, menu=None, quit_button='Quit'):
_require_string(name)
self._name = name
self._icon = self._icon_nsimage = self._title = None
self.icon = icon
self.title = title
self.quit_button = quit_button
self._menu = Menu()
if menu is not None:
self.menu = menu
self._application_support = application_support(self._name)
# Properties
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@property
def name(self):
"""The name of the application. Determines the application support folder name. Will also serve as the title
text of the application if :attr:`title` is not set.
"""
return self._name
@property
def title(self):
"""The text that will be displayed for the application in the statusbar. Can be ``None`` in which case the icon
will be used or, if there is no icon set the application text will fallback on the application :attr:`name`.
.. versionchanged:: 0.2.0
If the title is set then changed to ``None``, it will correctly be removed. Must be either a string or
``None``.
"""
return self._title
@title.setter
def title(self, title):
_require_string_or_none(title)
self._title = title
try:
self._nsapp.setStatusBarTitle()
except AttributeError:
pass
@property
def icon(self):
"""A path to an image representing the icon that will be displayed for the application in the statusbar.
Can be ``None`` in which case the text from :attr:`title` will be used.
.. versionchanged:: 0.2.0
If the icon is set to an image then changed to ``None``, it will correctly be removed.
"""
return self._icon
@icon.setter
def icon(self, icon_path):
new_icon = _nsimage_from_file(icon_path) if icon_path is not None else None
self._icon = icon_path
self._icon_nsimage = new_icon
try:
self._nsapp.setStatusBarIcon()
except AttributeError:
pass
@property
def menu(self):
"""Represents the main menu of the statusbar application. Setting `menu` works by calling
:meth:`rumps.MenuItem.update`.
"""
return self._menu
@menu.setter
def menu(self, iterable):
self._menu.update(iterable)
@property
def quit_button(self):
"""The quit application menu item within the main menu. This is a special :class:`rumps.MenuItem` object that
will both replace any function callback with :func:`rumps.quit_application` and add itself to the end of the
main menu when :meth:`rumps.App.run` is called. If set to ``None``, the default quit button will not be added.
.. warning::
If set to ``None``, some other menu item should call :func:`rumps.quit_application` so that the
application can exit gracefully.
.. versionadded:: 0.2.0
"""
return self._quit_button
@quit_button.setter
def quit_button(self, quit_text):
if quit_text is None:
self._quit_button = None
else:
self._quit_button = MenuItem(quit_text)
# Open files in application support folder
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def open(self, *args):
"""Open a file within the application support folder for this application.
.. code-block:: python
app = App('Cool App')
with app.open('data.json') as f:
pass
Is a shortcut for,
.. code-block:: python
app = App('Cool App')
filename = os.path.join(application_support(app.name), 'data.json')
with open(filename) as f:
pass
"""
return open(os.path.join(self._application_support, args[0]), *args[1:])
# Run the application
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def run(self, **options):
"""Performs various setup tasks including creating the underlying Objective-C application, starting the timers,
and registering callback functions for click events. Then starts the application run loop.
.. versionchanged:: 0.2.1
Accepts `debug` keyword argument.
:param debug: determines if application should log information useful for debugging. Same effect as calling
:func:`rumps.debug_mode`.
"""
dont_change = object()
debug = options.get('debug', dont_change)
if debug is not dont_change:
debug_mode(debug)
nsapplication = NSApplication.sharedApplication()
nsapplication.activateIgnoringOtherApps_(True) # NSAlerts in front
self._nsapp = NSApp.alloc().init()
self._nsapp._app = self.__dict__ # allow for dynamic modification based on this App instance
nsapplication.setDelegate_(self._nsapp)
if _NOTIFICATIONS:
NSUserNotificationCenter.defaultUserNotificationCenter().setDelegate_(self._nsapp)
setattr(App, '*app_instance', self) # class level ref to running instance (for passing self to App subclasses)
t = b = None
for t in getattr(timer, '*timers', []):
t.start()
for b in getattr(clicked, '*buttons', []):
b(self) # we waited on registering clicks so we could pass self to access _menu attribute
del t, b
self._nsapp.initializeStatusBar()
AppHelper.runEventLoop()
|
vsocrates/mhealth-grabapp | refs/heads/master | Lab3/randomizer.py | 1 | from random import shuffle
x = [i for i in range(10928)]
shuffle(x)
with open("orderpy.random", "w") as f:
for num in x:
f.write("%d\n" % num)
|
leighpauls/k2cro4 | refs/heads/master | third_party/trace-viewer/third_party/closure_linter/closure_linter/common/tokens_test.py | 126 | #!/usr/bin/env python
# Copyright 2011 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest as googletest
from closure_linter.common import tokens
def _CreateDummyToken():
return tokens.Token('foo', None, 1, 1)
def _CreateDummyTokens(count):
dummy_tokens = []
for _ in xrange(count):
dummy_tokens.append(_CreateDummyToken())
return dummy_tokens
def _SetTokensAsNeighbors(neighbor_tokens):
for i in xrange(len(neighbor_tokens)):
prev_index = i - 1
next_index = i + 1
if prev_index >= 0:
neighbor_tokens[i].previous = neighbor_tokens[prev_index]
if next_index < len(neighbor_tokens):
neighbor_tokens[i].next = neighbor_tokens[next_index]
class TokensTest(googletest.TestCase):
def testIsFirstInLine(self):
# First token in file (has no previous).
self.assertTrue(_CreateDummyToken().IsFirstInLine())
a, b = _CreateDummyTokens(2)
_SetTokensAsNeighbors([a, b])
# Tokens on same line
a.line_number = 30
b.line_number = 30
self.assertFalse(b.IsFirstInLine())
# Tokens on different lines
b.line_number = 31
self.assertTrue(b.IsFirstInLine())
def testIsLastInLine(self):
# Last token in file (has no next).
self.assertTrue(_CreateDummyToken().IsLastInLine())
a, b = _CreateDummyTokens(2)
_SetTokensAsNeighbors([a, b])
# Tokens on same line
a.line_number = 30
b.line_number = 30
self.assertFalse(a.IsLastInLine())
b.line_number = 31
self.assertTrue(a.IsLastInLine())
def testIsType(self):
a = tokens.Token('foo', 'fakeType1', 1, 1)
self.assertTrue(a.IsType('fakeType1'))
self.assertFalse(a.IsType('fakeType2'))
def testIsAnyType(self):
a = tokens.Token('foo', 'fakeType1', 1, 1)
self.assertTrue(a.IsAnyType(['fakeType1', 'fakeType2']))
self.assertFalse(a.IsAnyType(['fakeType3', 'fakeType4']))
def testRepr(self):
a = tokens.Token('foo', 'fakeType1', 1, 1)
self.assertEquals('<Token: fakeType1, "foo", None, 1, None>', str(a))
def testIter(self):
dummy_tokens = _CreateDummyTokens(5)
_SetTokensAsNeighbors(dummy_tokens)
a, b, c, d, e = dummy_tokens
i = iter(a)
self.assertListEqual([a, b, c, d, e], list(i))
def testReverseIter(self):
dummy_tokens = _CreateDummyTokens(5)
_SetTokensAsNeighbors(dummy_tokens)
a, b, c, d, e = dummy_tokens
ri = reversed(e)
self.assertListEqual([e, d, c, b, a], list(ri))
if __name__ == '__main__':
googletest.main()
|
ygrass/handsome | refs/heads/master | handsome/clean_media_temp.py | 1 | # -*- coding: utf-8 -*-
# remove all files of an expired modification date = mtime
# you could also use creation date (ctime) or last access date (atime)
# os.stat(filename) returns (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime)
import glob
import os
import time
from datetime import datetime, timedelta
if __name__ == '__main__':
root = os.path.realpath(os.path.dirname(__file__))
media_temp_files = os.path.join(root, '..', 'media', 'tmp', '*')
for f in glob.glob(media_temp_files):
# retrieves the stats for the current file
# the tuple element at index 8 is the last-modified-date
stats = os.stat(f)
last_modify_date = time.localtime(stats[8])
three_days_ago = datetime.now() - timedelta(days=3)
if three_days_ago.timetuple() > last_modify_date:
try:
os.remove(f)
except OSError:
print 'Could not remove file', f
|
sainathadapa/i3-project-focus-workflow | refs/heads/master | switch_to_next_project.py | 1 | # -*- coding: utf-8 -*-
import subprocess
import json
import sys
import necessaryFuncs as nf
proc_out = subprocess.run(['i3-msg', '-t', 'get_workspaces'], stdout=subprocess.PIPE)
wkList = json.loads(proc_out.stdout.decode('utf-8'))
focWkName = nf.getFocusedWK(wkList)
allProjectNames = nf.getListOfProjects(wkList)
if (len(allProjectNames) == 0) or (allProjectNames is None):
sys.exit(1)
currentProjName = nf.getProjectFromWKName(focWkName)
if currentProjName is None:
nextProjIndex = 0
else:
nextProjIndex = allProjectNames.index(currentProjName)
if nextProjIndex == (len(allProjectNames) - 1):
nextProjIndex = 0
else:
nextProjIndex = nextProjIndex + 1
nxtProjWks = nf.getWKNamesFromProj(wkList, allProjectNames[nextProjIndex])
visWks = nf.getVisibleWKs(wkList)
wksToMakeVisible = list(set(nxtProjWks) - set(visWks))
focOutput = nf.getOutputForWK(wkList, focWkName)
focOutputWks = nf.getWorkspacesOnOutput(wkList, focOutput)
wkToBeFocused = list(set(focOutputWks).intersection(nxtProjWks))
parCommToRun = ['workspace ' + x for x in wksToMakeVisible]
if len(wkToBeFocused) > 0 and wksToMakeVisible[-1] != wkToBeFocused[0]:
parCommToRun.append('workspace ' + wkToBeFocused[0])
commandToRun = ["i3-msg", '; '.join(parCommToRun)]
subprocess.call(commandToRun)
|
bringsvor/account-financial-tools | refs/heads/8.0 | account_credit_control/scenarios/features/steps/account_credit_control.py | 40 | # -*- coding: utf-8 -*-
# flake8: noqa
import time
from behave import given, when
from support import model, assert_equal
@given(u'I configure the following accounts on the credit control policy with oid: "{policy_oid}"')
def impl(ctx, policy_oid):
policy = model('credit.control.policy').get(policy_oid)
assert policy, 'No policy % found' % policy_oid
acc_obj = model('account.account')
accounts = []
for row in ctx.table:
acc = acc_obj.get(['code = %s' % row['account code']])
assert acc, "Account with code %s not found" % row['account code']
accounts.append(acc)
policy.write({'account_ids': [x.id for x in accounts]})
@when(u'I launch the credit run')
def impl(ctx):
assert ctx.found_item
# Must be a cleaner way to do it
assert 'credit.control.run' == ctx.found_item._model._name
ctx.found_item.generate_credit_lines()
@given(u'I clean all the credit lines')
def impl(ctx):
model('credit.control.line').browse([]).unlink()
@then(u'my credit run should be in state "done"')
def impl(ctx):
assert ctx.found_item
# Must be a cleaner way to do it
assert model("credit.control.run").get(ctx.found_item.id).state == 'done'
@then(u'the generated credit lines should have the following values')
def impl(ctx):
def _row_to_dict(row):
return dict((name, row[name]) for name in row.headings if row[name])
rows = map(_row_to_dict, ctx.table)
def _parse_date(value):
return time.strftime(value) if '%' in value else value
for row in rows:
account = model('account.account').get(['name = %s' % row['account']])
assert account, "no account named %s found" % row['account']
policy = model('credit.control.policy').get(['name = %s' % row['policy']])
assert policy, "No policy %s found" % row['policy']
partner = model('res.partner').get(['name = %s' % row['partner']])
assert partner, "No partner %s found" % row['partner']
maturity_date = _parse_date(row['date due'])
move_line = model('account.move.line').get(['name = %s' % row['move line'],
'date_maturity = %s' % maturity_date])
assert move_line, "No move line %s found" % row['move line']
level = model('credit.control.policy.level').get(['name = %s' % row['policy level'],
'policy_id = %s' % policy.id])
assert level, "No level % found" % row['policy level']
domain = [['account_id', '=', account.id],
['policy_id', '=', policy.id],
['partner_id', '=', partner.id],
['policy_level_id', '=', level.id],
['amount_due', '=', row.get('amount due', 0.0)],
['state', '=', row['state']],
['level', '=', row.get('level', 0.0)],
['channel', '=', row['channel']],
['balance_due', '=', row.get('balance', 0.0)],
['date_due', '=', _parse_date(row['date due'])],
['date', '=', _parse_date(row['date'])],
['move_line_id', '=', move_line.id],
]
if row.get('currency'):
curreny = model('res.currency').get(['name = %s' % row['currency']])
assert curreny, "No currency %s found" % row['currency']
domain.append(('currency_id', '=', curreny.id))
lines = model('credit.control.line').search(domain)
assert lines, "no line found for %s" % repr(row)
assert len(lines) == 1, "Too many lines found for %s" % repr(row)
date_lines = model('credit.control.line').search([('date', '=', ctx.found_item.date)])
assert len(date_lines) == len(ctx.table.rows), "Too many lines generated"
def open_invoice(ctx):
assert ctx.found_item
ctx.found_item._send('invoice_open')
# _send refresh object
assert ctx.found_item.state == 'open'
@then(u'I open the credit invoice')
def impl(ctx):
open_invoice(ctx)
@given(u'I open the credit invoice')
def impl(ctx):
open_invoice(ctx)
@given(u'there is "{state}" credit lines')
def impl(ctx, state):
assert model('credit.control.line').search(['state = %s' % state])
@given(u'I mark all draft email to state "{state}"')
def impl(ctx, state):
wiz = model('credit.control.marker').create({'name': state})
lines = model('credit.control.line').search([('state', '=', 'draft')])
assert lines
ctx.lines = lines
wiz.write({'line_ids': lines})
wiz.mark_lines()
@then(u'the draft line should be in state "{state}"')
def impl(ctx, state):
assert ctx.lines
lines = model('credit.control.line').search([('state', '!=', state),
('id', 'in', ctx.lines)])
assert not lines
@given(u'I ignore the "{partner}" credit line at level "{level:d}" for move line "{move_line_name}" with amount "{amount:f}"')
def impl(ctx, partner, level, move_line_name, amount):
print ctx, partner, level, move_line_name, amount
to_ignore = model('credit.control.line').search([('partner_id.name', '=', partner),
('level', '=', level),
('amount_due', '=', amount),
('move_line_id.name', '=', move_line_name)])
assert to_ignore
wiz = model('credit.control.marker').create({'name': 'ignored'})
ctx.lines = to_ignore
wiz.write({'line_ids': to_ignore})
wiz.mark_lines()
assert model('credit.control.line').get(to_ignore[0]).state == 'ignored'
@given(u'I have for "{partner}" "{number:d}" credit lines at level "{level:d}" for move line "{move_line_name}" with amount "{amount:f}" respectively in state "draft" and "ignored"')
def impl(ctx, partner, number, level, move_line_name, amount):
to_check = model('credit.control.line').search([('partner_id.name', '=', partner),
('level', '=', level),
('amount_due', '=', amount),
('move_line_id.name', '=', move_line_name),
('state', 'in', ('draft', 'ignored'))])
assert_equal(len(to_check), int(number), msg="More than %s found" % number)
lines = model('credit.control.line').browse(to_check)
assert set(['ignored', 'draft']) == set(lines.state)
|
spektom/incubator-airflow | refs/heads/master | airflow/models/base.py | 4 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Any
from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base
from airflow.configuration import conf
SQL_ALCHEMY_SCHEMA = conf.get("core", "SQL_ALCHEMY_SCHEMA")
metadata = (
None
if not SQL_ALCHEMY_SCHEMA or SQL_ALCHEMY_SCHEMA.isspace()
else MetaData(schema=SQL_ALCHEMY_SCHEMA)
)
Base = declarative_base(metadata=metadata) # type: Any
ID_LEN = 250
# used for typing
class Operator:
pass
|
mchaparro/Skeleton-Django1.6-Bootstrap-3.0 | refs/heads/master | skeleton/migrations/0001_initial.py | 1 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Usuario'
db.create_table(u'skeleton_usuario', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('password', self.gf('django.db.models.fields.CharField')(max_length=128)),
('last_login', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('is_superuser', self.gf('django.db.models.fields.BooleanField')(default=False)),
('usuario', self.gf('django.db.models.fields.CharField')(unique=True, max_length=15, db_index=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=50, null=True, blank=True)),
('nombre', self.gf('django.db.models.fields.CharField')(max_length=100)),
('apellidos', self.gf('django.db.models.fields.CharField')(max_length=100)),
('fecha', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('fecha_mod', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('is_admin', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('skeleton', ['Usuario'])
# Adding M2M table for field groups on 'Usuario'
m2m_table_name = db.shorten_name(u'skeleton_usuario_groups')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('usuario', models.ForeignKey(orm['skeleton.usuario'], null=False)),
('group', models.ForeignKey(orm[u'auth.group'], null=False))
))
db.create_unique(m2m_table_name, ['usuario_id', 'group_id'])
# Adding M2M table for field user_permissions on 'Usuario'
m2m_table_name = db.shorten_name(u'skeleton_usuario_user_permissions')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('usuario', models.ForeignKey(orm['skeleton.usuario'], null=False)),
('permission', models.ForeignKey(orm[u'auth.permission'], null=False))
))
db.create_unique(m2m_table_name, ['usuario_id', 'permission_id'])
def backwards(self, orm):
# Deleting model 'Usuario'
db.delete_table(u'skeleton_usuario')
# Removing M2M table for field groups on 'Usuario'
db.delete_table(db.shorten_name(u'skeleton_usuario_groups'))
# Removing M2M table for field user_permissions on 'Usuario'
db.delete_table(db.shorten_name(u'skeleton_usuario_user_permissions'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'skeleton.usuario': {
'Meta': {'object_name': 'Usuario'},
'apellidos': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'fecha': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fecha_mod': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'usuario': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '15', 'db_index': 'True'})
}
}
complete_apps = ['skeleton'] |
drexly/tonginBlobStore | refs/heads/master | lib/django/views/generic/list.py | 471 | from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from django.core.paginator import InvalidPage, Paginator
from django.db.models.query import QuerySet
from django.http import Http404
from django.utils import six
from django.utils.translation import ugettext as _
from django.views.generic.base import ContextMixin, TemplateResponseMixin, View
class MultipleObjectMixin(ContextMixin):
"""
A mixin for views manipulating multiple objects.
"""
allow_empty = True
queryset = None
model = None
paginate_by = None
paginate_orphans = 0
context_object_name = None
paginator_class = Paginator
page_kwarg = 'page'
ordering = None
def get_queryset(self):
"""
Return the list of items for this view.
The return value must be an iterable and may be an instance of
`QuerySet` in which case `QuerySet` specific behavior will be enabled.
"""
if self.queryset is not None:
queryset = self.queryset
if isinstance(queryset, QuerySet):
queryset = queryset.all()
elif self.model is not None:
queryset = self.model._default_manager.all()
else:
raise ImproperlyConfigured(
"%(cls)s is missing a QuerySet. Define "
"%(cls)s.model, %(cls)s.queryset, or override "
"%(cls)s.get_queryset()." % {
'cls': self.__class__.__name__
}
)
ordering = self.get_ordering()
if ordering:
if isinstance(ordering, six.string_types):
ordering = (ordering,)
queryset = queryset.order_by(*ordering)
return queryset
def get_ordering(self):
"""
Return the field or fields to use for ordering the queryset.
"""
return self.ordering
def paginate_queryset(self, queryset, page_size):
"""
Paginate the queryset, if needed.
"""
paginator = self.get_paginator(
queryset, page_size, orphans=self.get_paginate_orphans(),
allow_empty_first_page=self.get_allow_empty())
page_kwarg = self.page_kwarg
page = self.kwargs.get(page_kwarg) or self.request.GET.get(page_kwarg) or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
try:
page = paginator.page(page_number)
return (paginator, page, page.object_list, page.has_other_pages())
except InvalidPage as e:
raise Http404(_('Invalid page (%(page_number)s): %(message)s') % {
'page_number': page_number,
'message': str(e)
})
def get_paginate_by(self, queryset):
"""
Get the number of items to paginate by, or ``None`` for no pagination.
"""
return self.paginate_by
def get_paginator(self, queryset, per_page, orphans=0,
allow_empty_first_page=True, **kwargs):
"""
Return an instance of the paginator for this view.
"""
return self.paginator_class(
queryset, per_page, orphans=orphans,
allow_empty_first_page=allow_empty_first_page, **kwargs)
def get_paginate_orphans(self):
"""
Returns the maximum number of orphans extend the last page by when
paginating.
"""
return self.paginate_orphans
def get_allow_empty(self):
"""
Returns ``True`` if the view should display empty lists, and ``False``
if a 404 should be raised instead.
"""
return self.allow_empty
def get_context_object_name(self, object_list):
"""
Get the name of the item to be used in the context.
"""
if self.context_object_name:
return self.context_object_name
elif hasattr(object_list, 'model'):
return '%s_list' % object_list.model._meta.model_name
else:
return None
def get_context_data(self, **kwargs):
"""
Get the context for this view.
"""
queryset = kwargs.pop('object_list', self.object_list)
page_size = self.get_paginate_by(queryset)
context_object_name = self.get_context_object_name(queryset)
if page_size:
paginator, page, queryset, is_paginated = self.paginate_queryset(queryset, page_size)
context = {
'paginator': paginator,
'page_obj': page,
'is_paginated': is_paginated,
'object_list': queryset
}
else:
context = {
'paginator': None,
'page_obj': None,
'is_paginated': False,
'object_list': queryset
}
if context_object_name is not None:
context[context_object_name] = queryset
context.update(kwargs)
return super(MultipleObjectMixin, self).get_context_data(**context)
class BaseListView(MultipleObjectMixin, View):
"""
A base view for displaying a list of objects.
"""
def get(self, request, *args, **kwargs):
self.object_list = self.get_queryset()
allow_empty = self.get_allow_empty()
if not allow_empty:
# When pagination is enabled and object_list is a queryset,
# it's better to do a cheap query than to load the unpaginated
# queryset in memory.
if (self.get_paginate_by(self.object_list) is not None
and hasattr(self.object_list, 'exists')):
is_empty = not self.object_list.exists()
else:
is_empty = len(self.object_list) == 0
if is_empty:
raise Http404(_("Empty list and '%(class_name)s.allow_empty' is False.")
% {'class_name': self.__class__.__name__})
context = self.get_context_data()
return self.render_to_response(context)
class MultipleObjectTemplateResponseMixin(TemplateResponseMixin):
"""
Mixin for responding with a template and list of objects.
"""
template_name_suffix = '_list'
def get_template_names(self):
"""
Return a list of template names to be used for the request. Must return
a list. May not be called if render_to_response is overridden.
"""
try:
names = super(MultipleObjectTemplateResponseMixin, self).get_template_names()
except ImproperlyConfigured:
# If template_name isn't specified, it's not a problem --
# we just start with an empty list.
names = []
# If the list is a queryset, we'll invent a template name based on the
# app and model name. This name gets put at the end of the template
# name list so that user-supplied names override the automatically-
# generated ones.
if hasattr(self.object_list, 'model'):
opts = self.object_list.model._meta
names.append("%s/%s%s.html" % (opts.app_label, opts.model_name, self.template_name_suffix))
return names
class ListView(MultipleObjectTemplateResponseMixin, BaseListView):
"""
Render some list of objects, set by `self.model` or `self.queryset`.
`self.queryset` can actually be any iterable of items, not just a queryset.
"""
|
SucharithaPrabhakar/leosatellite | refs/heads/master | examples/tutorial/first.py | 102 | # /*
# * This program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License version 2 as
# * published by the Free Software Foundation;
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# */
import ns.applications
import ns.core
import ns.internet
import ns.network
import ns.point_to_point
ns.core.LogComponentEnable("UdpEchoClientApplication", ns.core.LOG_LEVEL_INFO)
ns.core.LogComponentEnable("UdpEchoServerApplication", ns.core.LOG_LEVEL_INFO)
nodes = ns.network.NodeContainer()
nodes.Create(2)
pointToPoint = ns.point_to_point.PointToPointHelper()
pointToPoint.SetDeviceAttribute("DataRate", ns.core.StringValue("5Mbps"))
pointToPoint.SetChannelAttribute("Delay", ns.core.StringValue("2ms"))
devices = pointToPoint.Install(nodes)
stack = ns.internet.InternetStackHelper()
stack.Install(nodes)
address = ns.internet.Ipv4AddressHelper()
address.SetBase(ns.network.Ipv4Address("10.1.1.0"),
ns.network.Ipv4Mask("255.255.255.0"))
interfaces = address.Assign(devices)
echoServer = ns.applications.UdpEchoServerHelper(9)
serverApps = echoServer.Install(nodes.Get(1))
serverApps.Start(ns.core.Seconds(1.0))
serverApps.Stop(ns.core.Seconds(10.0))
echoClient = ns.applications.UdpEchoClientHelper(interfaces.GetAddress(1), 9)
echoClient.SetAttribute("MaxPackets", ns.core.UintegerValue(1))
echoClient.SetAttribute("Interval", ns.core.TimeValue(ns.core.Seconds(1.0)))
echoClient.SetAttribute("PacketSize", ns.core.UintegerValue(1024))
clientApps = echoClient.Install(nodes.Get(0))
clientApps.Start(ns.core.Seconds(2.0))
clientApps.Stop(ns.core.Seconds(10.0))
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()
|
massot/odoo | refs/heads/8.0 | addons/account/wizard/account_reconcile.py | 226 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round
import openerp.addons.decimal_precision as dp
class account_move_line_reconcile(osv.osv_memory):
"""
Account move line reconcile wizard, it checks for the write off the reconcile entry or directly reconcile.
"""
_name = 'account.move.line.reconcile'
_description = 'Account move line reconcile'
_columns = {
'trans_nbr': fields.integer('# of Transaction', readonly=True),
'credit': fields.float('Credit amount', readonly=True, digits_compute=dp.get_precision('Account')),
'debit': fields.float('Debit amount', readonly=True, digits_compute=dp.get_precision('Account')),
'writeoff': fields.float('Write-Off amount', readonly=True, digits_compute=dp.get_precision('Account')),
}
def default_get(self, cr, uid, fields, context=None):
res = super(account_move_line_reconcile, self).default_get(cr, uid, fields, context=context)
data = self.trans_rec_get(cr, uid, context['active_ids'], context)
if 'trans_nbr' in fields:
res.update({'trans_nbr':data['trans_nbr']})
if 'credit' in fields:
res.update({'credit':data['credit']})
if 'debit' in fields:
res.update({'debit':data['debit']})
if 'writeoff' in fields:
res.update({'writeoff':data['writeoff']})
return res
def trans_rec_get(self, cr, uid, ids, context=None):
account_move_line_obj = self.pool.get('account.move.line')
if context is None:
context = {}
credit = debit = 0
account_id = False
count = 0
for line in account_move_line_obj.browse(cr, uid, context['active_ids'], context=context):
if not line.reconcile_id and not line.reconcile_id.id:
count += 1
credit += line.credit
debit += line.debit
account_id = line.account_id.id
precision = self.pool['decimal.precision'].precision_get(cr, uid, 'Account')
writeoff = float_round(debit-credit, precision_digits=precision)
credit = float_round(credit, precision_digits=precision)
debit = float_round(debit, precision_digits=precision)
return {'trans_nbr': count, 'account_id': account_id, 'credit': credit, 'debit': debit, 'writeoff': writeoff}
def trans_rec_addendum_writeoff(self, cr, uid, ids, context=None):
return self.pool.get('account.move.line.reconcile.writeoff').trans_rec_addendum(cr, uid, ids, context)
def trans_rec_reconcile_partial_reconcile(self, cr, uid, ids, context=None):
return self.pool.get('account.move.line.reconcile.writeoff').trans_rec_reconcile_partial(cr, uid, ids, context)
def trans_rec_reconcile_full(self, cr, uid, ids, context=None):
account_move_line_obj = self.pool.get('account.move.line')
period_obj = self.pool.get('account.period')
date = False
period_id = False
journal_id= False
account_id = False
if context is None:
context = {}
date = time.strftime('%Y-%m-%d')
ids = period_obj.find(cr, uid, dt=date, context=context)
if ids:
period_id = ids[0]
account_move_line_obj.reconcile(cr, uid, context['active_ids'], 'manual', account_id,
period_id, journal_id, context=context)
return {'type': 'ir.actions.act_window_close'}
class account_move_line_reconcile_writeoff(osv.osv_memory):
"""
It opens the write off wizard form, in that user can define the journal, account, analytic account for reconcile
"""
_name = 'account.move.line.reconcile.writeoff'
_description = 'Account move line reconcile (writeoff)'
_columns = {
'journal_id': fields.many2one('account.journal','Write-Off Journal', required=True),
'writeoff_acc_id': fields.many2one('account.account','Write-Off account', required=True),
'date_p': fields.date('Date'),
'comment': fields.char('Comment', required=True),
'analytic_id': fields.many2one('account.analytic.account', 'Analytic Account', domain=[('parent_id', '!=', False)]),
}
_defaults = {
'date_p': lambda *a: time.strftime('%Y-%m-%d'),
'comment': _('Write-off'),
}
def trans_rec_addendum(self, cr, uid, ids, context=None):
mod_obj = self.pool.get('ir.model.data')
if context is None:
context = {}
model_data_ids = mod_obj.search(cr, uid,[('model','=','ir.ui.view'),('name','=','account_move_line_reconcile_writeoff')], context=context)
resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id']
return {
'name': _('Reconcile Writeoff'),
'context': context,
'view_type': 'form',
'view_mode': 'form',
'res_model': 'account.move.line.reconcile.writeoff',
'views': [(resource_id,'form')],
'type': 'ir.actions.act_window',
'target': 'new',
}
def trans_rec_reconcile_partial(self, cr, uid, ids, context=None):
account_move_line_obj = self.pool.get('account.move.line')
if context is None:
context = {}
account_move_line_obj.reconcile_partial(cr, uid, context['active_ids'], 'manual', context=context)
return {'type': 'ir.actions.act_window_close'}
def trans_rec_reconcile(self, cr, uid, ids, context=None):
context = dict(context or {})
account_move_line_obj = self.pool.get('account.move.line')
period_obj = self.pool.get('account.period')
if context is None:
context = {}
data = self.read(cr, uid, ids,context=context)[0]
account_id = data['writeoff_acc_id'][0]
context['date_p'] = data['date_p']
journal_id = data['journal_id'][0]
context['comment'] = data['comment']
if data['analytic_id']:
context['analytic_id'] = data['analytic_id'][0]
if context['date_p']:
date = context['date_p']
ids = period_obj.find(cr, uid, dt=date, context=context)
if ids:
period_id = ids[0]
account_move_line_obj.reconcile(cr, uid, context['active_ids'], 'manual', account_id,
period_id, journal_id, context=context)
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
movermeyer/namebench | refs/heads/master | nb_third_party/dns/name.py | 228 | # Copyright (C) 2001-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Names.
@var root: The DNS root name.
@type root: dns.name.Name object
@var empty: The empty DNS name.
@type empty: dns.name.Name object
"""
import cStringIO
import struct
import sys
if sys.hexversion >= 0x02030000:
import encodings.idna
import dns.exception
NAMERELN_NONE = 0
NAMERELN_SUPERDOMAIN = 1
NAMERELN_SUBDOMAIN = 2
NAMERELN_EQUAL = 3
NAMERELN_COMMONANCESTOR = 4
class EmptyLabel(dns.exception.SyntaxError):
"""Raised if a label is empty."""
pass
class BadEscape(dns.exception.SyntaxError):
"""Raised if an escaped code in a text format name is invalid."""
pass
class BadPointer(dns.exception.FormError):
"""Raised if a compression pointer points forward instead of backward."""
pass
class BadLabelType(dns.exception.FormError):
"""Raised if the label type of a wire format name is unknown."""
pass
class NeedAbsoluteNameOrOrigin(dns.exception.DNSException):
"""Raised if an attempt is made to convert a non-absolute name to
wire when there is also a non-absolute (or missing) origin."""
pass
class NameTooLong(dns.exception.FormError):
"""Raised if a name is > 255 octets long."""
pass
class LabelTooLong(dns.exception.SyntaxError):
"""Raised if a label is > 63 octets long."""
pass
class AbsoluteConcatenation(dns.exception.DNSException):
"""Raised if an attempt is made to append anything other than the
empty name to an absolute name."""
pass
class NoParent(dns.exception.DNSException):
"""Raised if an attempt is made to get the parent of the root name
or the empty name."""
pass
_escaped = {
'"' : True,
'(' : True,
')' : True,
'.' : True,
';' : True,
'\\' : True,
'@' : True,
'$' : True
}
def _escapify(label):
"""Escape the characters in label which need it.
@returns: the escaped string
@rtype: string"""
text = ''
for c in label:
if c in _escaped:
text += '\\' + c
elif ord(c) > 0x20 and ord(c) < 0x7F:
text += c
else:
text += '\\%03d' % ord(c)
return text
def _validate_labels(labels):
"""Check for empty labels in the middle of a label sequence,
labels that are too long, and for too many labels.
@raises NameTooLong: the name as a whole is too long
@raises LabelTooLong: an individual label is too long
@raises EmptyLabel: a label is empty (i.e. the root label) and appears
in a position other than the end of the label sequence"""
l = len(labels)
total = 0
i = -1
j = 0
for label in labels:
ll = len(label)
total += ll + 1
if ll > 63:
raise LabelTooLong
if i < 0 and label == '':
i = j
j += 1
if total > 255:
raise NameTooLong
if i >= 0 and i != l - 1:
raise EmptyLabel
class Name(object):
"""A DNS name.
The dns.name.Name class represents a DNS name as a tuple of labels.
Instances of the class are immutable.
@ivar labels: The tuple of labels in the name. Each label is a string of
up to 63 octets."""
__slots__ = ['labels']
def __init__(self, labels):
"""Initialize a domain name from a list of labels.
@param labels: the labels
@type labels: any iterable whose values are strings
"""
super(Name, self).__setattr__('labels', tuple(labels))
_validate_labels(self.labels)
def __setattr__(self, name, value):
raise TypeError("object doesn't support attribute assignment")
def is_absolute(self):
"""Is the most significant label of this name the root label?
@rtype: bool
"""
return len(self.labels) > 0 and self.labels[-1] == ''
def is_wild(self):
"""Is this name wild? (I.e. Is the least significant label '*'?)
@rtype: bool
"""
return len(self.labels) > 0 and self.labels[0] == '*'
def __hash__(self):
"""Return a case-insensitive hash of the name.
@rtype: int
"""
h = 0L
for label in self.labels:
for c in label:
h += ( h << 3 ) + ord(c.lower())
return int(h % sys.maxint)
def fullcompare(self, other):
"""Compare two names, returning a 3-tuple (relation, order, nlabels).
I{relation} describes the relation ship beween the names,
and is one of: dns.name.NAMERELN_NONE,
dns.name.NAMERELN_SUPERDOMAIN, dns.name.NAMERELN_SUBDOMAIN,
dns.name.NAMERELN_EQUAL, or dns.name.NAMERELN_COMMONANCESTOR
I{order} is < 0 if self < other, > 0 if self > other, and ==
0 if self == other. A relative name is always less than an
absolute name. If both names have the same relativity, then
the DNSSEC order relation is used to order them.
I{nlabels} is the number of significant labels that the two names
have in common.
"""
sabs = self.is_absolute()
oabs = other.is_absolute()
if sabs != oabs:
if sabs:
return (NAMERELN_NONE, 1, 0)
else:
return (NAMERELN_NONE, -1, 0)
l1 = len(self.labels)
l2 = len(other.labels)
ldiff = l1 - l2
if ldiff < 0:
l = l1
else:
l = l2
order = 0
nlabels = 0
namereln = NAMERELN_NONE
while l > 0:
l -= 1
l1 -= 1
l2 -= 1
label1 = self.labels[l1].lower()
label2 = other.labels[l2].lower()
if label1 < label2:
order = -1
if nlabels > 0:
namereln = NAMERELN_COMMONANCESTOR
return (namereln, order, nlabels)
elif label1 > label2:
order = 1
if nlabels > 0:
namereln = NAMERELN_COMMONANCESTOR
return (namereln, order, nlabels)
nlabels += 1
order = ldiff
if ldiff < 0:
namereln = NAMERELN_SUPERDOMAIN
elif ldiff > 0:
namereln = NAMERELN_SUBDOMAIN
else:
namereln = NAMERELN_EQUAL
return (namereln, order, nlabels)
def is_subdomain(self, other):
"""Is self a subdomain of other?
The notion of subdomain includes equality.
@rtype: bool
"""
(nr, o, nl) = self.fullcompare(other)
if nr == NAMERELN_SUBDOMAIN or nr == NAMERELN_EQUAL:
return True
return False
def is_superdomain(self, other):
"""Is self a superdomain of other?
The notion of subdomain includes equality.
@rtype: bool
"""
(nr, o, nl) = self.fullcompare(other)
if nr == NAMERELN_SUPERDOMAIN or nr == NAMERELN_EQUAL:
return True
return False
def canonicalize(self):
"""Return a name which is equal to the current name, but is in
DNSSEC canonical form.
@rtype: dns.name.Name object
"""
return Name([x.lower() for x in self.labels])
def __eq__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] == 0
else:
return False
def __ne__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] != 0
else:
return True
def __lt__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] < 0
else:
return NotImplemented
def __le__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] <= 0
else:
return NotImplemented
def __ge__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] >= 0
else:
return NotImplemented
def __gt__(self, other):
if isinstance(other, Name):
return self.fullcompare(other)[1] > 0
else:
return NotImplemented
def __repr__(self):
return '<DNS name ' + self.__str__() + '>'
def __str__(self):
return self.to_text(False)
def to_text(self, omit_final_dot = False):
"""Convert name to text format.
@param omit_final_dot: If True, don't emit the final dot (denoting the
root label) for absolute names. The default is False.
@rtype: string
"""
if len(self.labels) == 0:
return '@'
if len(self.labels) == 1 and self.labels[0] == '':
return '.'
if omit_final_dot and self.is_absolute():
l = self.labels[:-1]
else:
l = self.labels
s = '.'.join(map(_escapify, l))
return s
def to_unicode(self, omit_final_dot = False):
"""Convert name to Unicode text format.
IDN ACE lables are converted to Unicode.
@param omit_final_dot: If True, don't emit the final dot (denoting the
root label) for absolute names. The default is False.
@rtype: string
"""
if len(self.labels) == 0:
return u'@'
if len(self.labels) == 1 and self.labels[0] == '':
return u'.'
if omit_final_dot and self.is_absolute():
l = self.labels[:-1]
else:
l = self.labels
s = u'.'.join([encodings.idna.ToUnicode(_escapify(x)) for x in l])
return s
def to_digestable(self, origin=None):
"""Convert name to a format suitable for digesting in hashes.
The name is canonicalized and converted to uncompressed wire format.
@param origin: If the name is relative and origin is not None, then
origin will be appended to it.
@type origin: dns.name.Name object
@raises NeedAbsoluteNameOrOrigin: All names in wire format are
absolute. If self is a relative name, then an origin must be supplied;
if it is missing, then this exception is raised
@rtype: string
"""
if not self.is_absolute():
if origin is None or not origin.is_absolute():
raise NeedAbsoluteNameOrOrigin
labels = list(self.labels)
labels.extend(list(origin.labels))
else:
labels = self.labels
dlabels = ["%s%s" % (chr(len(x)), x.lower()) for x in labels]
return ''.join(dlabels)
def to_wire(self, file = None, compress = None, origin = None):
"""Convert name to wire format, possibly compressing it.
@param file: the file where the name is emitted (typically
a cStringIO file). If None, a string containing the wire name
will be returned.
@type file: file or None
@param compress: The compression table. If None (the default) names
will not be compressed.
@type compress: dict
@param origin: If the name is relative and origin is not None, then
origin will be appended to it.
@type origin: dns.name.Name object
@raises NeedAbsoluteNameOrOrigin: All names in wire format are
absolute. If self is a relative name, then an origin must be supplied;
if it is missing, then this exception is raised
"""
if file is None:
file = cStringIO.StringIO()
want_return = True
else:
want_return = False
if not self.is_absolute():
if origin is None or not origin.is_absolute():
raise NeedAbsoluteNameOrOrigin
labels = list(self.labels)
labels.extend(list(origin.labels))
else:
labels = self.labels
i = 0
for label in labels:
n = Name(labels[i:])
i += 1
if not compress is None:
pos = compress.get(n)
else:
pos = None
if not pos is None:
value = 0xc000 + pos
s = struct.pack('!H', value)
file.write(s)
break
else:
if not compress is None and len(n) > 1:
pos = file.tell()
if pos < 0xc000:
compress[n] = pos
l = len(label)
file.write(chr(l))
if l > 0:
file.write(label)
if want_return:
return file.getvalue()
def __len__(self):
"""The length of the name (in labels).
@rtype: int
"""
return len(self.labels)
def __getitem__(self, index):
return self.labels[index]
def __getslice__(self, start, stop):
return self.labels[start:stop]
def __add__(self, other):
return self.concatenate(other)
def __sub__(self, other):
return self.relativize(other)
def split(self, depth):
"""Split a name into a prefix and suffix at depth.
@param depth: the number of labels in the suffix
@type depth: int
@raises ValueError: the depth was not >= 0 and <= the length of the
name.
@returns: the tuple (prefix, suffix)
@rtype: tuple
"""
l = len(self.labels)
if depth == 0:
return (self, dns.name.empty)
elif depth == l:
return (dns.name.empty, self)
elif depth < 0 or depth > l:
raise ValueError('depth must be >= 0 and <= the length of the name')
return (Name(self[: -depth]), Name(self[-depth :]))
def concatenate(self, other):
"""Return a new name which is the concatenation of self and other.
@rtype: dns.name.Name object
@raises AbsoluteConcatenation: self is absolute and other is
not the empty name
"""
if self.is_absolute() and len(other) > 0:
raise AbsoluteConcatenation
labels = list(self.labels)
labels.extend(list(other.labels))
return Name(labels)
def relativize(self, origin):
"""If self is a subdomain of origin, return a new name which is self
relative to origin. Otherwise return self.
@rtype: dns.name.Name object
"""
if not origin is None and self.is_subdomain(origin):
return Name(self[: -len(origin)])
else:
return self
def derelativize(self, origin):
"""If self is a relative name, return a new name which is the
concatenation of self and origin. Otherwise return self.
@rtype: dns.name.Name object
"""
if not self.is_absolute():
return self.concatenate(origin)
else:
return self
def choose_relativity(self, origin=None, relativize=True):
"""Return a name with the relativity desired by the caller. If
origin is None, then self is returned. Otherwise, if
relativize is true the name is relativized, and if relativize is
false the name is derelativized.
@rtype: dns.name.Name object
"""
if origin:
if relativize:
return self.relativize(origin)
else:
return self.derelativize(origin)
else:
return self
def parent(self):
"""Return the parent of the name.
@rtype: dns.name.Name object
@raises NoParent: the name is either the root name or the empty name,
and thus has no parent.
"""
if self == root or self == empty:
raise NoParent
return Name(self.labels[1:])
root = Name([''])
empty = Name([])
def from_unicode(text, origin = root):
"""Convert unicode text into a Name object.
Lables are encoded in IDN ACE form.
@rtype: dns.name.Name object
"""
if not isinstance(text, unicode):
raise ValueError("input to from_unicode() must be a unicode string")
if not (origin is None or isinstance(origin, Name)):
raise ValueError("origin must be a Name or None")
labels = []
label = u''
escaping = False
edigits = 0
total = 0
if text == u'@':
text = u''
if text:
if text == u'.':
return Name(['']) # no Unicode "u" on this constant!
for c in text:
if escaping:
if edigits == 0:
if c.isdigit():
total = int(c)
edigits += 1
else:
label += c
escaping = False
else:
if not c.isdigit():
raise BadEscape
total *= 10
total += int(c)
edigits += 1
if edigits == 3:
escaping = False
label += chr(total)
elif c == u'.' or c == u'\u3002' or \
c == u'\uff0e' or c == u'\uff61':
if len(label) == 0:
raise EmptyLabel
labels.append(encodings.idna.ToASCII(label))
label = u''
elif c == u'\\':
escaping = True
edigits = 0
total = 0
else:
label += c
if escaping:
raise BadEscape
if len(label) > 0:
labels.append(encodings.idna.ToASCII(label))
else:
labels.append('')
if (len(labels) == 0 or labels[-1] != '') and not origin is None:
labels.extend(list(origin.labels))
return Name(labels)
def from_text(text, origin = root):
"""Convert text into a Name object.
@rtype: dns.name.Name object
"""
if not isinstance(text, str):
if isinstance(text, unicode) and sys.hexversion >= 0x02030000:
return from_unicode(text, origin)
else:
raise ValueError("input to from_text() must be a string")
if not (origin is None or isinstance(origin, Name)):
raise ValueError("origin must be a Name or None")
labels = []
label = ''
escaping = False
edigits = 0
total = 0
if text == '@':
text = ''
if text:
if text == '.':
return Name([''])
for c in text:
if escaping:
if edigits == 0:
if c.isdigit():
total = int(c)
edigits += 1
else:
label += c
escaping = False
else:
if not c.isdigit():
raise BadEscape
total *= 10
total += int(c)
edigits += 1
if edigits == 3:
escaping = False
label += chr(total)
elif c == '.':
if len(label) == 0:
raise EmptyLabel
labels.append(label)
label = ''
elif c == '\\':
escaping = True
edigits = 0
total = 0
else:
label += c
if escaping:
raise BadEscape
if len(label) > 0:
labels.append(label)
else:
labels.append('')
if (len(labels) == 0 or labels[-1] != '') and not origin is None:
labels.extend(list(origin.labels))
return Name(labels)
def from_wire(message, current):
"""Convert possibly compressed wire format into a Name.
@param message: the entire DNS message
@type message: string
@param current: the offset of the beginning of the name from the start
of the message
@type current: int
@raises dns.name.BadPointer: a compression pointer did not point backwards
in the message
@raises dns.name.BadLabelType: an invalid label type was encountered.
@returns: a tuple consisting of the name that was read and the number
of bytes of the wire format message which were consumed reading it
@rtype: (dns.name.Name object, int) tuple
"""
if not isinstance(message, str):
raise ValueError("input to from_wire() must be a byte string")
labels = []
biggest_pointer = current
hops = 0
count = ord(message[current])
current += 1
cused = 1
while count != 0:
if count < 64:
labels.append(message[current : current + count])
current += count
if hops == 0:
cused += count
elif count >= 192:
current = (count & 0x3f) * 256 + ord(message[current])
if hops == 0:
cused += 1
if current >= biggest_pointer:
raise BadPointer
biggest_pointer = current
hops += 1
else:
raise BadLabelType
count = ord(message[current])
current += 1
if hops == 0:
cused += 1
labels.append('')
return (Name(labels), cused)
|
cherryleer/storm | refs/heads/moved-to-apache | storm-core/src/py/storm/DistributedRPC.py | 37 | #
# Autogenerated by Thrift Compiler (0.7.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
from thrift.Thrift import *
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def execute(self, functionName, funcArgs):
"""
Parameters:
- functionName
- funcArgs
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def execute(self, functionName, funcArgs):
"""
Parameters:
- functionName
- funcArgs
"""
self.send_execute(functionName, funcArgs)
return self.recv_execute()
def send_execute(self, functionName, funcArgs):
self._oprot.writeMessageBegin('execute', TMessageType.CALL, self._seqid)
args = execute_args()
args.functionName = functionName
args.funcArgs = funcArgs
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_execute(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = execute_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "execute failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["execute"] = Processor.process_execute
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_execute(self, seqid, iprot, oprot):
args = execute_args()
args.read(iprot)
iprot.readMessageEnd()
result = execute_result()
try:
result.success = self._handler.execute(args.functionName, args.funcArgs)
except DRPCExecutionException, e:
result.e = e
oprot.writeMessageBegin("execute", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class execute_args:
"""
Attributes:
- functionName
- funcArgs
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'functionName', None, None, ), # 1
(2, TType.STRING, 'funcArgs', None, None, ), # 2
)
def __hash__(self):
return 0 + hash(self.functionName) + hash(self.funcArgs)
def __init__(self, functionName=None, funcArgs=None,):
self.functionName = functionName
self.funcArgs = funcArgs
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.functionName = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.funcArgs = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('execute_args')
if self.functionName is not None:
oprot.writeFieldBegin('functionName', TType.STRING, 1)
oprot.writeString(self.functionName.encode('utf-8'))
oprot.writeFieldEnd()
if self.funcArgs is not None:
oprot.writeFieldBegin('funcArgs', TType.STRING, 2)
oprot.writeString(self.funcArgs.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class execute_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'e', (DRPCExecutionException, DRPCExecutionException.thrift_spec), None, ), # 1
)
def __hash__(self):
return 0 + hash(self.success) + hash(self.e)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = DRPCExecutionException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('execute_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8'))
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
|
colinnewell/odoo | refs/heads/8.0 | openerp/addons/base/report/preview_report.py | 447 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.report import report_sxw
class rmlparser(report_sxw.rml_parse):
def set_context(self, objects, data, ids, report_type = None):
super(rmlparser,self).set_context(objects, data, ids, report_type)
self.setCompany(objects[0])
report_sxw.report_sxw('report.preview.report', 'res.company',
'addons/base/report/preview_report.rml', parser=rmlparser, header='external')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
vanda/DigitalLabels | refs/heads/master | labels/models.py | 1 | import logging
import os
import urllib2
import httplib
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from django.utils import simplejson
from django.utils.html import strip_tags
from django.utils.safestring import mark_safe
from sorl.thumbnail import ImageField, get_thumbnail
# Create your models here.
# import the logging library
if hasattr(settings, 'COLLECTIONS_API_HOSTNAME'):
COLLECTIONS_API_HOSTNAME = settings.COLLECTIONS_API_HOSTNAME
else:
COLLECTIONS_API_HOSTNAME = 'www.vam.ac.uk'
if hasattr(settings, 'MEDIA_SERVER'):
COLLECTIONS_API_MEDIA_SERVER = settings.COLLECTIONS_API_MEDIA_SERVER
else:
COLLECTIONS_API_MEDIA_SERVER = 'media.vam.ac.uk'
# Get an instance of a logger
logger = logging.getLogger('labels')
class BaseScreen(models.Model):
name = models.CharField(max_length=255, null=False)
timeout_images = models.ManyToManyField("Image", blank=True)
_thumbnail_url = None
def __unicode__(self):
return self.name
def referrer(self):
return self._meta.object_name.lower()
@property
def model_name(self):
return self._meta.object_name.lower()
def _Objects(self):
return self.museumobjects.count()
class Meta:
abstract = True
class BaseLabel(models.Model):
_thumbnail_url = None
def digital_label(self):
href = reverse('admin:%s_%s_change' % (self._meta.app_label, 'digitallabel'),
args=[self.digitallabel.pk])
return mark_safe('<a href="%s">%s</a>' % (href, self.digitallabel))
digital_label.allow_tags = True
def _portal(self):
href = reverse('admin:%s_%s_change' % (self._meta.app_label, 'portal'),
args=[self.portal.pk])
return mark_safe('<a href="%s">%s</a>' % (href, self.portal))
_portal.allow_tags = True
def admin_template(self):
return 'admin:%s_%s_change' % (self._meta.app_label, self._meta.object_name.lower())
@property
def display_text(self):
return NotImplementedError
@property
def thumbnail_url(self):
if not self._thumbnail_url:
if self.image_set.count() > 0:
# images are sorted by priority, so take the first
image_file = self.image_set.all()[0]
im = get_thumbnail(image_file.local_filename, '44x44',
quality=85, pad=True)
self._thumbnail_url = im.url
return self._thumbnail_url
def thumbnail_tag(self):
if self.thumbnail_url:
return mark_safe('<img alt="%s" src="%s" />' % (
strip_tags(self.display_text), self.thumbnail_url))
else:
return mark_safe('<em>No Images</em>')
thumbnail_tag.allow_tags = True
thumbnail_tag.short_description = 'Thumb'
class Meta:
abstract = True
class MuseumObject(BaseLabel):
"""
A label describing an individual object
"""
name = models.CharField(max_length=255, null=False, blank=True)
date_text = models.CharField(max_length=255, null=False, blank=True)
artist_maker = models.CharField("Designer / Maker",
max_length=255, null=False, blank=True)
restored_altered = models.CharField("Restored / Altered",
max_length=255, null=False, blank=True)
place = models.TextField("Place of Design / Manufacture", blank=True)
materials_techniques = models.TextField(blank=True)
museum_number = models.CharField(max_length=255, null=False, blank=True)
object_number = models.CharField(max_length=16, null=False, blank=True,
db_index=True, help_text="""Optional. Unique "O" number, For
example, O9138, as used on
Search the Collections""")
credit_line = models.CharField(max_length=255, null=False, blank=True)
artfund = models.BooleanField(default=False)
main_text = models.TextField(blank=True)
redownload = models.BooleanField(help_text="""WARNING: This may
replace your existing content""")
@property
def display_text(self):
return self.name
class Meta:
verbose_name = "object"
def __unicode__(self):
if self.museum_number:
return u"%s %s (%s)" % (self.object_number,
self.name, self.museum_number)
else:
return self.name
_museumobject_json = None
@property
def museumobject_json(self):
if self._museumobject_json == None and self.object_number:
item_url = 'http://%s/api/json/museumobject/%s/' % (
COLLECTIONS_API_HOSTNAME,
self.object_number)
try:
response = urllib2.urlopen(item_url)
self._museumobject_json = simplejson.load(response)[0]
except urllib2.HTTPError, e:
if e.code == 404:
# Missing object
pass
else:
# other error
pass
return self._museumobject_json
def create_cms_labels(self):
museum_object = self.museumobject_json
if museum_object:
for l in museum_object['fields']['labels']:
cms_label = CMSLabel()
cms_label.date = l['fields']['date']
cms_label.text = l['fields']['label_text']
cms_label.museumobject = self
cms_label.save()
def create_images(self):
museum_object = self.museumobject_json
if museum_object:
for i in museum_object['fields']['image_set']:
image_id = i['fields']['image_id']
try:
cms_image, cr = Image.objects.get_or_create(
museumobject=self, image_id=image_id)
# retreive image from media server
image_success = cms_image.store_vadar_image()
if image_success:
cms_image.caption = image_id
cms_image.image_file = os.path.join(
cms_image.image_file.field.upload_to,
unicode(cms_image.image_id) + '.jpg')
if image_id == \
museum_object['fields']['primary_image_id']:
cms_image.position = 0
cms_image.save()
else:
cms_image.delete()
except urllib2.HTTPError, e:
cms_image.image_file = ''
if e.code == 404:
# Missing object
pass
else:
# other error
pass
class TextLabel(BaseLabel):
"""
A label describing biography or a historical notes
"""
title = models.CharField(max_length=255, null=False, blank=True)
main_text = models.TextField(blank=True)
@property
def display_text(self):
return self.title
def __unicode__(self):
return self.title
class CMSLabel(models.Model):
date = models.CharField(max_length=255, null=False)
text = models.TextField()
museumobject = models.ForeignKey(MuseumObject)
def __unicode__(self):
return u"%s for %s" % (self.date, self.museumobject.museum_number)
class Image(models.Model):
image_id = models.CharField(max_length=16, null=False, blank=True)
caption = models.TextField(blank=True)
image_file = ImageField(upload_to="labels/images")
position = models.PositiveIntegerField(null=False, default=1)
museumobject = models.ForeignKey(MuseumObject, null=True, blank=True)
textlabel = models.ForeignKey(TextLabel, null=True, blank=True)
class Meta:
ordering = ['position']
def __unicode__(self):
if self.museumobject:
desc = self.museumobject
elif self.textlabel:
desc = self.textlabel
else:
desc = self.caption
return '%s - %s' % (os.path.basename(self.image_file.name), desc)
def object_link(self):
href = reverse('admin:%s_%s_change' % (self._meta.app_label, 'museumobject'),
args=[self.museumobject.pk])
return mark_safe('<a href="%s">%s</a>' % (href, self.museumobject))
object_link.allow_tags = True
def label_link(self):
href = reverse('admin:%s_%s_change' % (self._meta.app_label, 'textlabel'),
args=[self.textlabel.pk])
return mark_safe('<a href="%s">%s</a>' % (href, self.textlabel))
label_link.allow_tags = True
@property
def local_filename(self):
"""Where is the file stored regardless of source"""
if unicode(self.image_file):
return os.path.join(settings.MEDIA_ROOT,
self.image_file.field.upload_to,
unicode(self.image_file.file))
else:
return None
def thumb(self):
im = get_thumbnail(self.local_filename, '44x44',
quality=85, pad=True)
return mark_safe('<img alt="%s" src="%s" />' % (
strip_tags(self.caption), im.url))
thumb.allow_tags = True
@property
def local_vadar_filename(self):
"""Where should this image be stored if it can be retrieved?"""
if self.image_id:
return "%s%s/%s.jpg" % (settings.MEDIA_ROOT,
self.image_file.field.upload_to,
unicode(self.image_id))
else:
raise Exception('No Image ID set')
def store_vadar_image(self):
# create the url and the request
image_url = 'http://%s/media/thira/collection_images/%s/%s.jpg' % \
(COLLECTIONS_API_MEDIA_SERVER, self.image_id[:6], self.image_id)
req = urllib2.Request(image_url)
# Open the url
try:
logging.info("downloading " + image_url)
f = urllib2.urlopen(req)
meta = f.info()
if meta.type == 'image/jpeg':
# Open our local file for writing
local_file = open(self.local_vadar_filename, "wb")
# Write to our local file
local_file.write(f.read())
local_file.close()
return True
else:
logging.error("Image Error: Wrong type %s" % (meta.type))
return False
# handle errors
except urllib2.HTTPError, e:
logging.error("HTTP Error: %s %s" % (e.code, image_url))
self.image_file = None
return False
except httplib.BadStatusLine, e:
logging.error("HTTP Bad Status: %s %s" % (e.reason, image_url))
self.image_file = None
return False
except urllib2.URLError, e:
logging.error("URL Error: %s %s" % (e.reason, image_url))
self.image_file = None
return False
class DigitalLabel(BaseScreen):
museumobjects = models.ManyToManyField(MuseumObject, through='DigitalLabelObject')
class Portal(BaseScreen):
museumobjects = models.ManyToManyField(MuseumObject, through='PortalObject')
textlabels = models.ManyToManyField(TextLabel, through='PortalTextLabel')
def _Labels(self):
return self.textlabels.count()
class BaseRelation(models.Model):
position = models.PositiveIntegerField(null=False, default=1)
class Meta:
ordering = ['position']
def target(self):
raise NotImplementedError()
class BaseObjectRelation(BaseRelation):
museumobject = models.ForeignKey(MuseumObject)
def target(self):
return self.museumobject
class Meta:
abstract = True
class DigitalLabelObject(BaseObjectRelation):
digitallabel = models.ForeignKey(DigitalLabel)
gateway_object = models.BooleanField(default=False)
class PortalObject(BaseObjectRelation):
portal = models.ForeignKey(Portal)
class PortalTextLabel(BaseRelation):
portal = models.ForeignKey(Portal)
textlabel = models.ForeignKey(TextLabel)
biography = models.BooleanField(default=False)
def target(self):
return self.textlabel
from django.db.models.signals import pre_save, post_save, m2m_changed
from labels.signals import get_api_data, get_related_api_data, \
create_thumbnails, timeout_thumbnails
pre_save.connect(get_api_data, MuseumObject)
post_save.connect(get_related_api_data, MuseumObject)
post_save.connect(create_thumbnails, Image)
m2m_changed.connect(timeout_thumbnails, sender=BaseScreen.timeout_images.through)
|
wang1986one/SPlayer | refs/heads/master | Thirdparty/jsoncpp/test/generate_expected.py | 257 | import glob
import os.path
for path in glob.glob( '*.json' ):
text = file(path,'rt').read()
target = os.path.splitext(path)[0] + '.expected'
if os.path.exists( target ):
print 'skipping:', target
else:
print 'creating:', target
file(target,'wt').write(text)
|
laurent-george/weboob | refs/heads/master | modules/citibank/__init__.py | 7 | # -*- coding: utf-8 -*-
# Copyright(C) 2014 Oleg Plakhotniuk
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from .module import CitibankModule
__all__ = ['CitibankModule']
|
bobcyw/django | refs/heads/master | tests/validation/test_picklable.py | 576 | import pickle
from unittest import TestCase
from django.core.exceptions import ValidationError
class PickableValidationErrorTestCase(TestCase):
def test_validationerror_is_picklable(self):
original = ValidationError('a', code='something')
unpickled = pickle.loads(pickle.dumps(original))
self.assertIs(unpickled, unpickled.error_list[0])
self.assertEqual(original.message, unpickled.message)
self.assertEqual(original.code, unpickled.code)
original = ValidationError('a', code='something')
unpickled = pickle.loads(pickle.dumps(ValidationError(original)))
self.assertIs(unpickled, unpickled.error_list[0])
self.assertEqual(original.message, unpickled.message)
self.assertEqual(original.code, unpickled.code)
original = ValidationError(['a', 'b'])
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
original = ValidationError(['a', 'b'])
unpickled = pickle.loads(pickle.dumps(ValidationError(original)))
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
original = ValidationError([ValidationError('a'), ValidationError('b')])
unpickled = pickle.loads(pickle.dumps(original))
self.assertIs(unpickled.args[0][0], unpickled.error_list[0])
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
message_dict = {'field1': ['a', 'b'], 'field2': ['c', 'd']}
original = ValidationError(message_dict)
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(unpickled.message_dict, message_dict)
|
tareqak/tareq-lisp | refs/heads/master | chapters/.ycm_extra_conf.py | 2 | # Generated by YCM Generator at 2015-10-30 02:12:26.660952
# This file is NOT licensed under the GPLv3, which is the license for the rest
# of YouCompleteMe.
#
# Here's the license text for this file:
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org/>
import os
import ycm_core
flags = [
'-x',
'c',
'-Wall',
'-Werror',
'-std=c99',
]
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# You can get CMake to generate this file for you by adding:
# set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )
# to your CMakeLists.txt file.
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
compilation_database_folder = ''
if os.path.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.append( new_flag )
return new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking the db for flags for a
# corresponding source file, if any. If one exists, the flags for that file
# should be good enough.
if IsHeaderFile( filename ):
basename = os.path.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists( replacement_file ):
compilation_info = database.GetCompilationInfoForFile(
replacement_file )
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile( filename )
def FlagsForFile( filename, **kwargs ):
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
compilation_info = GetCompilationInfoForFile( filename )
if not compilation_info:
return None
final_flags = MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_ )
else:
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
return {
'flags': final_flags,
'do_cache': True
}
|
Denisolt/Tensorflow_Chat_Bot | refs/heads/master | local/lib/python2.7/site-packages/scipy/__config__.py | 3 | # This file is generated by /private/var/folders/gw/_2jq29095y7b__wtby9dg_5h0000gn/T/pip-WgBZjV-build/-c
# It contains system_info results at the time of building this package.
__all__ = ["get_info","show"]
lapack_opt_info={'extra_link_args': ['-Wl,-framework', '-Wl,Accelerate'], 'define_macros': [('NO_ATLAS_INFO', 3)], 'extra_compile_args': ['-msse3']}
blas_opt_info={'extra_link_args': ['-Wl,-framework', '-Wl,Accelerate'], 'define_macros': [('NO_ATLAS_INFO', 3)], 'extra_compile_args': ['-msse3', '-I/System/Library/Frameworks/vecLib.framework/Headers']}
def get_info(name):
g = globals()
return g.get(name, g.get(name + "_info", {}))
def show():
for name,info_dict in globals().items():
if name[0] == "_" or type(info_dict) is not type({}): continue
print(name + ":")
if not info_dict:
print(" NOT AVAILABLE")
for k,v in info_dict.items():
v = str(v)
if k == "sources" and len(v) > 200:
v = v[:60] + " ...\n... " + v[-60:]
print(" %s = %s" % (k,v))
|
ncdesouza/bookworm | refs/heads/master | env/lib/python2.7/site-packages/wtforms/compat.py | 96 | import sys
if sys.version_info[0] >= 3:
text_type = str
string_types = (str, )
iteritems = lambda o: o.items()
itervalues = lambda o: o.values()
izip = zip
else:
text_type = unicode
string_types = (basestring, )
iteritems = lambda o: o.iteritems()
itervalues = lambda o: o.itervalues()
from itertools import izip
def with_metaclass(meta, base=object):
return meta("NewBase", (base,), {})
|
OpenAcademy-OpenStack/nova-scheduler | refs/heads/master | nova/tests/virt/baremetal/db/base.py | 53 | # Copyright (c) 2012 NTT DOCOMO, INC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Bare-metal DB test base class."""
from oslo.config import cfg
from nova import context as nova_context
from nova import test
from nova.virt.baremetal.db import migration as bm_migration
from nova.virt.baremetal.db.sqlalchemy import session as bm_session
_DB_CACHE = None
CONF = cfg.CONF
CONF.import_opt('sql_connection',
'nova.virt.baremetal.db.sqlalchemy.session',
group='baremetal')
class Database(test.Database):
def post_migrations(self):
pass
class BMDBTestCase(test.TestCase):
def setUp(self):
super(BMDBTestCase, self).setUp()
self.flags(sql_connection='sqlite://', group='baremetal')
global _DB_CACHE
if not _DB_CACHE:
_DB_CACHE = Database(bm_session, bm_migration,
sql_connection=CONF.baremetal.sql_connection,
sqlite_db=None,
sqlite_clean_db=None)
self.useFixture(_DB_CACHE)
self.context = nova_context.get_admin_context()
|
iyedb/py3_fn_pipeline | refs/heads/master | pipes3.py | 1 | from __future__ import print_function
class PipelineException(Exception):
def __init__(self, stage, e):
self.stage = stage
self.e = e
def __str__(self):
return repr('Pipeline failed at stage \'%s\' with exception: %s' %
(getattr(self.stage, '__name__'), self.e))
class PipelineStopExc(PipelineException):
def __init__(self, stage, reason, result=None):
super(PipelineStopExc, self).__init__(stage, None)
self.reason = reason
self.result = result
def __str__(self):
return repr('Pipeline stopped at stage %s: %s' %
(getattr(self.stage, '__name__'), self.reason))
class Pipeline(object):
def __init__(self, func):
self.func = func
self.next = None
def apply(self, *args):
try:
res = self.func(*args)
except PipelineStopExc as s:
raise s
except (BaseException, Exception) as e:
raise PipelineException(self.func, e) from e
else:
if self.next is None:
return res
else:
return self.next.apply(res)
def chain(self, func):
if self.next is None:
self.next = Pipeline(func)
else:
self.next.chain(func)
return self
def __call__(self, *args, **kwargs):
return self.apply(*args)
if __name__ == '__main__':
def fun(x, y):
return x + y
def square(x):
return x*x
def inc(x):
return x + 1
def read_file(name):
# raise PipelineStopExc(read_file, 'won\'t read the file', None)
with open(name) as f:
return f.read()
def count_words(txt):
return len(txt.split())
p = Pipeline(fun)
print(p.chain(lambda x: x + 1).apply(1, 1))
try:
word_count = Pipeline(read_file).chain(count_words).apply('./pipes3.py')
#word_count = Pipeline(read_file).chain(count_words)('./pipes3.py')
except PipelineStopExc as stop:
print(stop, stop.result)
#except PipelineException as pex:
# print(pex)
else:
print(word_count)
|
Pakketeretet2/lammps | refs/heads/master | tools/i-pi/ipi/inputs/outputs.py | 41 | """Deals with creating the output objects.
Copyright (C) 2013, Joshua More and Michele Ceriotti
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http.//www.gnu.org/licenses/>.
Classes:
InputOutputs: Creates a list of all the output objects.
InputProperties: Deals with property output.
InputTrajectory: Deals with trajectory output.
InputCheckpoint: Deals with restart file output.
"""
import numpy as np
from copy import copy
import ipi.engine.outputs
from ipi.utils.depend import *
from ipi.utils.inputvalue import *
from ipi.engine.properties import getkey
__all__=['InputOutputs', 'InputProperties', 'InputTrajectory',
'InputCheckpoint']
class InputProperties(InputArray):
"""Simple input class to describe output for properties.
Storage class for PropertyOutput.
Attributes:
filename: The name of the file to output to.
stride: The number of steps that should be taken between outputting the
data to file.
flush: An integer describing how often the output streams are flushed,
so that it doesn't wait for the buffer to fill before outputting to
file.
"""
default_help = """This class deals with the output of properties to one file. Between each property tag there should be an array of strings, each of which specifies one property to be output."""
default_label = "PROPERTIES"
attribs = copy(InputArray.attribs)
attribs["filename"] = (InputAttribute,{ "dtype" : str, "default": "out",
"help": "A string to specify the name of the file that is output. The file name is given by 'prefix'.'filename' + format_specifier. The format specifier may also include a number if multiple similar files are output."} )
attribs["stride"] = (InputAttribute,{ "dtype" : int, "default": 1,
"help": "The number of steps between successive writes." } )
attribs["flush"] = (InputAttribute, {"dtype" : int, "default" : 1,
"help" : "How often should streams be flushed. 1 means each time, zero means never." })
def __init__(self, help=None, default=None, dtype=None, dimension=None):
"""Initializes InputProperties.
Just calls the parent initialization function with appropriate arguments.
"""
super(InputProperties,self).__init__(help=help, default=default, dtype=str, dimension=dimension)
def fetch(self):
"""Returns a PropertyOutput object."""
return ipi.engine.outputs.PropertyOutput(filename=self.filename.fetch(),
stride=self.stride.fetch(), flush=self.flush.fetch(), outlist=super(InputProperties,self).fetch())
def store(self, prop):
"""Stores a PropertyOutput object."""
super(InputProperties,self).store(prop.outlist)
self.stride.store(prop.stride)
self.flush.store(prop.flush)
self.filename.store(prop.filename)
def check(self):
"""Checks for optional parameters."""
super(InputProperties,self).check()
if self.stride.fetch() < 0:
raise ValueError("The stride length for the properties file output must be positive.")
class InputTrajectory(InputValue):
"""Simple input class to describe output for trajectories.
Storage class for TrajectoryOutput.
Attributes:
filename: The (base) name of the file to output to.
stride: The number of steps that should be taken between outputting the
data to file.
format: The format of the trajectory output file.
cell_units: The units that the cell parameters are given in.
bead: If the trajectory is a per-bead property, this can be used to
specify a single bead to output. If negative, it defaults to
the centroid.
flush: An integer describing how often the output streams are flushed,
so that it doesn't wait for the buffer to fill before outputting to
file.
"""
default_help = """This class defines how one trajectory file should be output. Between each trajectory tag one string should be given, which specifies what data is to be output."""
default_label = "TRAJECTORY"
attribs = copy(InputValue.attribs)
attribs["filename"] = (InputAttribute,{ "dtype" : str, "default": "traj",
"help": "A string to specify the name of the file that is output. The file name is given by 'prefix'.'filename' + format_specifier. The format specifier may also include a number if multiple similar files are output."} )
attribs["stride"] = (InputAttribute,{ "dtype" : int, "default": 1,
"help": "The number of steps between successive writes." } )
attribs["format"] = (InputAttribute,{ "dtype" : str, "default": "xyz",
"help": "The output file format.",
"options": ['xyz', 'pdb'] } )
attribs["cell_units"] = (InputAttribute,{ "dtype" : str, "default": "",
"help": "The units for the cell dimensions." } )
attribs["bead"] = (InputAttribute,{ "dtype" : int, "default": -1,
"help": "Print out only the specified bead. A negative value means print all." } )
attribs["flush"] = (InputAttribute, {"dtype" : int, "default" : 1,
"help" : "How often should streams be flushed. 1 means each time, zero means never." })
def __init__(self, help=None, default=None, dtype=None, dimension=None):
"""Initializes InputTrajectory.
Just calls the parent initialization function with appropriate arguments.
"""
super(InputTrajectory,self).__init__(help=help, default=default, dtype=str, dimension=dimension)
def fetch(self):
"""Returns a TrajectoryOutput object."""
return ipi.engine.outputs.TrajectoryOutput(filename=self.filename.fetch(), stride=self.stride.fetch(),
flush=self.flush.fetch(), what=super(InputTrajectory,self).fetch(),
format=self.format.fetch(), cell_units=self.cell_units.fetch(), ibead=self.bead.fetch())
def store(self, traj):
"""Stores a PropertyOutput object."""
super(InputTrajectory,self).store(traj.what)
self.stride.store(traj.stride)
self.flush.store(traj.flush)
self.filename.store(traj.filename)
self.format.store(traj.format)
self.cell_units.store(traj.cell_units)
self.bead.store(traj.ibead)
def check(self):
"""Checks for optional parameters."""
super(InputTrajectory,self).check()
if self.stride.fetch() < 0:
raise ValueError("The stride length for the trajectory file output must be positive.")
class InputCheckpoint(InputValue):
"""Simple input class to describe output for properties.
Storage class for CheckpointOutput.
Attributes:
filename: The (base) name of the file to output to.
stride: The number of steps that should be taken between outputting the
data to file.
overwrite: whether checkpoints should be overwritten, or multiple
files output.
"""
default_help = """This class defines how a checkpoint file should be output. Optionally, between the checkpoint tags, you can specify one integer giving the current step of the simulation. By default this integer will be zero."""
default_label = "CHECKPOINT"
attribs=copy(InputValue.attribs)
attribs["filename"] = (InputAttribute,{ "dtype" : str, "default": "restart",
"help": "A string to specify the name of the file that is output. The file name is given by 'prefix'.'filename' + format_specifier. The format specifier may also include a number if multiple similar files are output."} )
attribs["stride"] = (InputAttribute,{ "dtype" : int, "default": 1,
"help": "The number of steps between successive writes." } )
attribs["overwrite"] = (InputAttribute,{ "dtype" : bool, "default": True,
"help": "This specifies whether or not each consecutive checkpoint file will overwrite the old one."} )
def __init__(self, help=None, default=None, dtype=None, dimension=None):
"""Initializes InputCheckpoint.
Just calls the parent initialization function with appropriate arguments.
"""
super(InputCheckpoint,self).__init__(help=help, default=default, dtype=int, dimension=dimension)
def fetch(self):
"""Returns a CheckpointOutput object."""
step = super(InputCheckpoint,self).fetch()
return ipi.engine.outputs.CheckpointOutput(self.filename.fetch(), self.stride.fetch(), self.overwrite.fetch(), step=step )
def parse(self, xml=None, text=""):
"""Overwrites the standard parse function so that we can specify this tag
in the input without any data.
We can use the syntax <checkpoint /> to do this
Args:
xml: An xml node containing all the data for the parent tag.
text: The data to read the data from. Will be None if we have not
specified any data.
"""
# just a quick hack to allow an empty element
try:
super(InputCheckpoint,self).parse(xml,text)
except: #TODO make this except a specific exception, not every one
self.value = 0 #This could hide actual errors, at least in theory.
def store(self, chk):
"""Stores a CheckpointOutput object."""
super(InputCheckpoint,self).store(chk.step)
self.stride.store(chk.stride)
self.filename.store(chk.filename)
self.overwrite.store(chk.overwrite)
def check(self):
"""Checks for optional parameters."""
super(InputCheckpoint,self).check()
if self.stride.fetch() < 0:
raise ValueError("The stride length for the checkpoint file output must be positive.")
class InputOutputs(Input):
""" List of outputs input class.
An example of a dynamic input class: a variable number of tags might be
present, corresponding to different output requests. This allows for
instance to print multiple property outputs, with different content
and/or output frequency.
Attributes:
prefix: A string that will be appended to all output files from this
simulation.
Dynamic fields:
trajectory: Specifies a trajectory to be output
properties: Specifies some properties to be output.
checkpoint: Specifies a checkpoint file to be output.
"""
attribs = { "prefix" : ( InputAttribute, { "dtype" : str,
"default" : "i-pi",
"help" : "A string that will be prepended to each output file name. The file name is given by 'prefix'.'filename' + format_specifier. The format specifier may also include a number if multiple similar files are output." })
}
dynamic = { "properties" : (InputProperties, { "help" : "Each of the properties tags specify how to create a file in which one or more properties are written, one line per frame. " } ),
"trajectory" : (InputTrajectory, { "help" : "Each of the trajectory tags specify how to create a trajectory file, containing a list of per-atom coordinate properties. " } ),
"checkpoint" : (InputCheckpoint, { "help" : "Each of the checkpoint tags specify how to create a checkpoint file, which can be used to restart a simulation. " } ),
}
default_help = """This class defines how properties, trajectories and checkpoints should be output during the simulation. May contain zero, one or many instances of properties, trajectory or checkpoint tags, each giving instructions on how one output file should be created and managed."""
default_label = "OUTPUTS"
@classmethod
def make_default(cls):
"""Used to make the default value of the outputs class for use when no
output is specified.
Needed since this is a fairly complicated default, with many mutable
objects, and the default has to be generated by a function that does not
use any mutable objects as arguments.
"""
return [ ipi.engine.outputs.PropertyOutput(filename="i-pi.md", stride=10, outlist=[ "time", "step", "conserved", "temperature", "potential", "kinetic_cv" ] ),
ipi.engine.outputs.TrajectoryOutput(filename="i-pi.pos", stride=100, what="positions", format="xyz"),
ipi.engine.outputs.CheckpointOutput(filename="i-pi.checkpoint", stride=1000, overwrite=True)]
def fetch(self):
"""Returns a list of the output objects included in this dynamic
container.
Returns:
A list of tuples, with each tuple being of the form ('type', 'object')
where 'type' is the type of output object and 'object' is a particular
object of that type.
"""
super(InputOutputs, self).fetch()
outlist = [ p.fetch() for (n, p) in self.extra ]
prefix = self.prefix.fetch()
if not prefix == "":
for p in outlist:
p.filename = prefix + "." + p.filename
return outlist
def store(self, plist):
""" Stores a list of the output objects, creating a sequence of
dynamic containers.
Args:
plist: A list of tuples, with each tuple being of the form
('type', 'object') where 'type' is the type of forcefield and
'object' is a particular object of that type.
"""
super(InputOutputs, self).store()
self.extra = []
self.prefix.store("")
for el in plist:
if (isinstance(el, ipi.engine.outputs.PropertyOutput)):
ip = InputProperties()
ip.store(el)
self.extra.append(("properties", ip))
elif (isinstance(el, ipi.engine.outputs.TrajectoryOutput)):
ip = InputTrajectory()
ip.store(el)
self.extra.append(("trajectory", ip))
elif (isinstance(el, ipi.engine.outputs.CheckpointOutput)):
ip = InputCheckpoint()
ip.store(el)
self.extra.append(("checkpoint", ip))
|
angelapper/edx-platform | refs/heads/master | common/lib/calc/setup.py | 19 | from setuptools import setup
setup(
name="calc",
version="0.2",
packages=["calc"],
install_requires=[
"pyparsing==2.0.7",
"numpy==1.6.2",
"scipy==0.14.0",
],
)
|
fernandoacorreia/DjangoWAWSLogging | refs/heads/master | DjangoWAWSLogging/env/Lib/site-packages/pywin32-218-py2.7-win32.egg/win32comext/axdebug/documents.py | 18 | """ Management of documents for AXDebugging.
"""
import axdebug, gateways
import pythoncom
from util import _wrap, _wrap_remove, RaiseNotImpl, trace
from win32com.server.util import unwrap
import codecontainer
import contexts
from win32com.server.exception import Exception
import win32api, winerror, os, string, sys
#def trace(*args):
# pass
def GetGoodFileName(fname):
if fname[0] != "<":
return win32api.GetFullPathName(fname)
return fname
class DebugDocumentProvider(gateways.DebugDocumentProvider):
def __init__(self, doc):
self.doc = doc
def GetName(self, dnt):
return self.doc.GetName(dnt)
def GetDocumentClassId(self):
return self.doc.GetDocumentClassId()
def GetDocument(self):
return self.doc
class DebugDocumentText(gateways.DebugDocumentInfo, gateways.DebugDocumentText, gateways.DebugDocument):
_com_interfaces_ = gateways.DebugDocumentInfo._com_interfaces_ + \
gateways.DebugDocumentText._com_interfaces_ + \
gateways.DebugDocument._com_interfaces_
_public_methods_ = gateways.DebugDocumentInfo._public_methods_ + \
gateways.DebugDocumentText._public_methods_ + \
gateways.DebugDocument._public_methods_
# A class which implements a DebugDocumentText, using the functionality
# provided by a codeContainer
def __init__(self, codeContainer):
gateways.DebugDocumentText.__init__(self)
gateways.DebugDocumentInfo.__init__(self)
gateways.DebugDocument.__init__(self)
self.codeContainer = codeContainer
def _Close(self):
self.docContexts = None
# self.codeContainer._Close()
self.codeContainer = None
# IDebugDocumentInfo
def GetName(self, dnt):
return self.codeContainer.GetName(dnt)
def GetDocumentClassId(self):
return "{DF630910-1C1D-11d0-AE36-8C0F5E000000}"
# IDebugDocument has no methods!
#
# IDebugDocumentText methods.
# def GetDocumentAttributes
def GetSize(self):
# trace("GetSize")
return self.codeContainer.GetNumLines(), self.codeContainer.GetNumChars()
def GetPositionOfLine(self, cLineNumber):
return self.codeContainer.GetPositionOfLine(cLineNumber)
def GetLineOfPosition(self, charPos):
return self.codeContainer.GetLineOfPosition(charPos)
def GetText(self, charPos, maxChars, wantAttr):
# Get all the attributes, else the tokenizer will get upset.
# XXX - not yet!
# trace("GetText", charPos, maxChars, wantAttr)
cont = self.codeContainer
attr = cont.GetSyntaxColorAttributes()
return cont.GetText(), attr
def GetPositionOfContext(self, context):
trace("GetPositionOfContext", context)
context = unwrap(context)
return context.offset, context.length
# Return a DebugDocumentContext.
def GetContextOfPosition(self, charPos, maxChars):
# Make one
doc = _wrap(self, axdebug.IID_IDebugDocument)
rc = self.codeContainer.GetCodeContextAtPosition(charPos)
return rc.QueryInterface(axdebug.IID_IDebugDocumentContext)
class CodeContainerProvider:
"""An abstract Python class which provides code containers!
Given a Python file name (as the debugger knows it by) this will
return a CodeContainer interface suitable for use.
This provides a simple base imlpementation that simply supports
a dictionary of nodes and providers.
"""
def __init__(self):
self.ccsAndNodes = {}
def AddCodeContainer(self, cc, node = None):
fname = GetGoodFileName(cc.fileName)
self.ccsAndNodes[fname] = cc, node
def FromFileName(self, fname):
cc, node = self.ccsAndNodes.get(GetGoodFileName(fname), (None, None))
# if cc is None:
# print "FromFileName for %s returning None" % fname
return cc
def Close(self):
for cc, node in self.ccsAndNodes.itervalues():
try:
# Must close the node before closing the provider
# as node may make calls on provider (eg Reset breakpoints etc)
if node is not None:
node.Close()
cc._Close()
except pythoncom.com_error:
pass
self.ccsAndNodes = {}
|
ttfseiko/openerp-trunk | refs/heads/master | openerp/addons/website_google_map/__openerp__.py | 37 | {
'name': 'Website Google Map',
'category': 'Hidden',
'summary': '',
'version': '1.0',
'description': """
OpenERP Website Google Map
========================
""",
'author': 'OpenERP SA',
'depends': ['base_geolocalize', 'website_partner', 'crm_partner_assign'],
'data': [
'views/google_map.xml',
],
'installable': True,
'auto_install': False,
}
|
FlaPer87/django-nonrel | refs/heads/master | django/http/utils.py | 145 | """
Functions that modify an HTTP request or response in some way.
"""
# This group of functions are run as part of the response handling, after
# everything else, including all response middleware. Think of them as
# "compulsory response middleware". Be careful about what goes here, because
# it's a little fiddly to override this behavior, so they should be truly
# universally applicable.
def fix_location_header(request, response):
"""
Ensures that we always use an absolute URI in any location header in the
response. This is required by RFC 2616, section 14.30.
Code constructing response objects is free to insert relative paths, as
this function converts them to absolute paths.
"""
if 'Location' in response and request.get_host():
response['Location'] = request.build_absolute_uri(response['Location'])
return response
def conditional_content_removal(request, response):
"""
Removes the content of responses for HEAD requests, 1xx, 204 and 304
responses. Ensures compliance with RFC 2616, section 4.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
response.content = ''
response['Content-Length'] = 0
if request.method == 'HEAD':
response.content = ''
return response
def fix_IE_for_attach(request, response):
"""
This function will prevent Django from serving a Content-Disposition header
while expecting the browser to cache it (only when the browser is IE). This
leads to IE not allowing the client to download.
"""
if 'MSIE' not in request.META.get('HTTP_USER_AGENT', '').upper():
return response
offending_headers = ('no-cache', 'no-store')
if response.has_header('Content-Disposition'):
try:
del response['Pragma']
except KeyError:
pass
if response.has_header('Cache-Control'):
cache_control_values = [value.strip() for value in
response['Cache-Control'].split(',')
if value.strip().lower() not in offending_headers]
if not len(cache_control_values):
del response['Cache-Control']
else:
response['Cache-Control'] = ', '.join(cache_control_values)
return response
def fix_IE_for_vary(request, response):
"""
This function will fix the bug reported at
http://support.microsoft.com/kb/824847/en-us?spid=8722&sid=global
by clearing the Vary header whenever the mime-type is not safe
enough for Internet Explorer to handle. Poor thing.
"""
if 'MSIE' not in request.META.get('HTTP_USER_AGENT', '').upper():
return response
# These mime-types that are decreed "Vary-safe" for IE:
safe_mime_types = ('text/html', 'text/plain', 'text/sgml')
# The first part of the Content-Type field will be the MIME type,
# everything after ';', such as character-set, can be ignored.
if response['Content-Type'].split(';')[0] not in safe_mime_types:
try:
del response['Vary']
except KeyError:
pass
return response
|
mattesCZ/mtbmap | refs/heads/master | osm_data_processing/relations2lines/lineelement.py | 1 | # -*- coding: utf-8 -*-
from .osmcsymbol import OsmcSymbol
# lower index means higher priority: iwn > nwn
networkOrder = ['iwn', 'nwn', 'rwn', 'lwn']
class LineElement:
def __init__(self, relation):
self.osmcSymbol = relation.osmcSymbol
self.network = relation.network
def __eq__(self, other):
return self.osmcSymbol == other.osmcSymbol
def __lt__(self, other):
if (other.network in networkOrder) and (self.network in networkOrder):
#both elements has specified networks or both hasn't
if self.network == other.network:
return OsmcSymbol(self.osmcSymbol) < OsmcSymbol(other.osmcSymbol)
else:
return networkOrder.index(self.network) > networkOrder.index(other.network)
elif (not other.network in networkOrder) and (not self.network in networkOrder):
return OsmcSymbol(self.osmcSymbol) < OsmcSymbol(other.osmcSymbol)
elif self.network in networkOrder:
#other has not specified network, but self has: self is not less than other
return False
else:
#self has not specified network, but other has: self is less than other
return True
def __repr__(self):
return repr((self.osmcSymbol, self.network))
|
idjaw/netman | refs/heads/master | tests/adapters/compliance_tests/get_interface_test.py | 1 | # Copyright 2016 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from hamcrest import assert_that, is_
from netman.core.objects.exceptions import UnknownInterface
from netman.core.objects.interface_states import ON
from tests import has_message
from tests.adapters.compliance_test_case import ComplianceTestCase
class GetInterfaceTest(ComplianceTestCase):
_dev_sample = "juniper"
def setUp(self):
super(GetInterfaceTest, self).setUp()
def test_returns_an_interface(self):
interface = self.client.get_interface(self.test_ports[0].name)
assert_that(interface.name, is_(self.test_ports[0].name))
def test_get_interface_and_get_interfaces_are_same(self):
self.client.add_vlan(1000, name="vlan1000")
self.client.add_vlan(2000, name="vlan2000")
expected = self.test_ports[0]
self.try_to.set_access_vlan(expected.name, 1000)
self.try_to.set_trunk_mode(expected.name)
self.try_to.set_interface_state(expected.name, ON)
self.try_to.set_interface_native_vlan(expected.name, 2000)
self.try_to.set_interface_auto_negotiation_state(expected.name, ON)
self.try_to.set_interface_mtu(expected.name, 5000)
interface_from_single = self.client.get_interface(expected.name)
interfaces = [inte for inte in self.client.get_interfaces() if inte.name == expected.name]
interface_from_multiple = interfaces[0]
assert_that(interface_from_single.name, is_(interface_from_multiple.name))
assert_that(interface_from_single.port_mode, is_(interface_from_multiple.port_mode))
assert_that(interface_from_single.shutdown, is_(interface_from_multiple.shutdown))
assert_that(interface_from_single.trunk_native_vlan, is_(interface_from_multiple.trunk_native_vlan))
assert_that(interface_from_single.trunk_vlans, is_(interface_from_multiple.trunk_vlans))
assert_that(interface_from_single.auto_negotiation, is_(interface_from_multiple.auto_negotiation))
assert_that(interface_from_single.mtu, is_(interface_from_multiple.mtu))
def test_getinterface_nonexistent_raises(self):
with self.assertRaises(UnknownInterface)as expect:
self.client.get_interface('ethernet 1/nonexistent2000')
assert_that(expect.exception, has_message("Unknown interface ethernet 1/nonexistent2000"))
def tearDown(self):
self.janitor.unset_interface_access_vlan(self.test_ports[0].name)
self.janitor.unset_interface_native_vlan(self.test_ports[0].name)
self.janitor.set_access_mode(self.test_ports[0].name)
self.janitor.remove_vlan(1000)
self.janitor.remove_vlan(2000)
super(GetInterfaceTest, self).tearDown()
|
yitian134/chromium | refs/heads/master | third_party/tlslite/tlslite/HandshakeSettings.py | 359 | """Class for setting handshake parameters."""
from constants import CertificateType
from utils import cryptomath
from utils import cipherfactory
class HandshakeSettings:
"""This class encapsulates various parameters that can be used with
a TLS handshake.
@sort: minKeySize, maxKeySize, cipherNames, certificateTypes,
minVersion, maxVersion
@type minKeySize: int
@ivar minKeySize: The minimum bit length for asymmetric keys.
If the other party tries to use SRP, RSA, or Diffie-Hellman
parameters smaller than this length, an alert will be
signalled. The default is 1023.
@type maxKeySize: int
@ivar maxKeySize: The maximum bit length for asymmetric keys.
If the other party tries to use SRP, RSA, or Diffie-Hellman
parameters larger than this length, an alert will be signalled.
The default is 8193.
@type cipherNames: list
@ivar cipherNames: The allowed ciphers, in order of preference.
The allowed values in this list are 'aes256', 'aes128', '3des', and
'rc4'. If these settings are used with a client handshake, they
determine the order of the ciphersuites offered in the ClientHello
message.
If these settings are used with a server handshake, the server will
choose whichever ciphersuite matches the earliest entry in this
list.
NOTE: If '3des' is used in this list, but TLS Lite can't find an
add-on library that supports 3DES, then '3des' will be silently
removed.
The default value is ['aes256', 'aes128', '3des', 'rc4'].
@type certificateTypes: list
@ivar certificateTypes: The allowed certificate types, in order of
preference.
The allowed values in this list are 'x509' and 'cryptoID'. This
list is only used with a client handshake. The client will
advertise to the server which certificate types are supported, and
will check that the server uses one of the appropriate types.
NOTE: If 'cryptoID' is used in this list, but cryptoIDlib is not
installed, then 'cryptoID' will be silently removed.
@type minVersion: tuple
@ivar minVersion: The minimum allowed SSL/TLS version.
This variable can be set to (3,0) for SSL 3.0, (3,1) for
TLS 1.0, or (3,2) for TLS 1.1. If the other party wishes to
use a lower version, a protocol_version alert will be signalled.
The default is (3,0).
@type maxVersion: tuple
@ivar maxVersion: The maximum allowed SSL/TLS version.
This variable can be set to (3,0) for SSL 3.0, (3,1) for
TLS 1.0, or (3,2) for TLS 1.1. If the other party wishes to
use a higher version, a protocol_version alert will be signalled.
The default is (3,2). (WARNING: Some servers may (improperly)
reject clients which offer support for TLS 1.1. In this case,
try lowering maxVersion to (3,1)).
"""
def __init__(self):
self.minKeySize = 1023
self.maxKeySize = 8193
self.cipherNames = ["aes256", "aes128", "3des", "rc4"]
self.cipherImplementations = ["cryptlib", "openssl", "pycrypto",
"python"]
self.certificateTypes = ["x509", "cryptoID"]
self.minVersion = (3,0)
self.maxVersion = (3,2)
#Filters out options that are not supported
def _filter(self):
other = HandshakeSettings()
other.minKeySize = self.minKeySize
other.maxKeySize = self.maxKeySize
other.cipherNames = self.cipherNames
other.cipherImplementations = self.cipherImplementations
other.certificateTypes = self.certificateTypes
other.minVersion = self.minVersion
other.maxVersion = self.maxVersion
if not cipherfactory.tripleDESPresent:
other.cipherNames = [e for e in self.cipherNames if e != "3des"]
if len(other.cipherNames)==0:
raise ValueError("No supported ciphers")
try:
import cryptoIDlib
except ImportError:
other.certificateTypes = [e for e in self.certificateTypes \
if e != "cryptoID"]
if len(other.certificateTypes)==0:
raise ValueError("No supported certificate types")
if not cryptomath.cryptlibpyLoaded:
other.cipherImplementations = [e for e in \
self.cipherImplementations if e != "cryptlib"]
if not cryptomath.m2cryptoLoaded:
other.cipherImplementations = [e for e in \
other.cipherImplementations if e != "openssl"]
if not cryptomath.pycryptoLoaded:
other.cipherImplementations = [e for e in \
other.cipherImplementations if e != "pycrypto"]
if len(other.cipherImplementations)==0:
raise ValueError("No supported cipher implementations")
if other.minKeySize<512:
raise ValueError("minKeySize too small")
if other.minKeySize>16384:
raise ValueError("minKeySize too large")
if other.maxKeySize<512:
raise ValueError("maxKeySize too small")
if other.maxKeySize>16384:
raise ValueError("maxKeySize too large")
for s in other.cipherNames:
if s not in ("aes256", "aes128", "rc4", "3des"):
raise ValueError("Unknown cipher name: '%s'" % s)
for s in other.cipherImplementations:
if s not in ("cryptlib", "openssl", "python", "pycrypto"):
raise ValueError("Unknown cipher implementation: '%s'" % s)
for s in other.certificateTypes:
if s not in ("x509", "cryptoID"):
raise ValueError("Unknown certificate type: '%s'" % s)
if other.minVersion > other.maxVersion:
raise ValueError("Versions set incorrectly")
if not other.minVersion in ((3,0), (3,1), (3,2)):
raise ValueError("minVersion set incorrectly")
if not other.maxVersion in ((3,0), (3,1), (3,2)):
raise ValueError("maxVersion set incorrectly")
return other
def _getCertificateTypes(self):
l = []
for ct in self.certificateTypes:
if ct == "x509":
l.append(CertificateType.x509)
elif ct == "cryptoID":
l.append(CertificateType.cryptoID)
else:
raise AssertionError()
return l
|
autokey-py3/autokey | refs/heads/master | test/configurationmanagertest.py | 53 | import unittest
import lib.configurationmanager as conf
from lib.phrase import *
CONFIG_FILE = "../../config/abbr.ini"
class LegacyImporterTest(unittest.TestCase):
def setUp(self):
self.importer = conf.LegacyImporter()
self.importer.load_config(CONFIG_FILE)
def testGlobalSettings(self):
# Test old global defaults using a phrase that has no custom options defined
# Locate otoh phrase
otohPhrase = None
for phrase in self.importer.phrases:
if phrase.abbreviation == "otoh":
otohPhrase = phrase
break
self.assert_(otohPhrase is not None)
self.assertEqual(otohPhrase.immediate, False)
self.assertEqual(otohPhrase.ignoreCase, False)
self.assertEqual(otohPhrase.matchCase, False)
self.assertEqual(otohPhrase.backspace, True)
self.assertEqual(otohPhrase.omitTrigger, False)
self.assertEqual(otohPhrase.triggerInside, False)
def testPhraseCount(self):
self.assertEqual(len(self.importer.phrases), 23)
def testPhrase(self):
# Locate brb phrase
brbPhrase = None
for phrase in self.importer.phrases:
if phrase.abbreviation == "brb":
brbPhrase = phrase
break
self.assert_(brbPhrase is not None)
self.assertEqual(brbPhrase.phrase, "be right back")
self.assertEqual(brbPhrase.description, "be right back")
self.assertEqual(brbPhrase.mode, PhraseMode.ABBREVIATION)
self.assertEqual(brbPhrase.immediate, True)
|
afronski/grammar-generator | refs/heads/master | grammar-generator/Environment/__init__.py | 12133432 | |
lmorchard/django | refs/heads/master | django/contrib/sitemaps/management/commands/__init__.py | 12133432 | |
gengue/django | refs/heads/master | tests/proxy_models/__init__.py | 12133432 | |
nburn42/tensorflow | refs/heads/master | tensorflow/python/keras/initializers_test.py | 1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras initializers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python import keras
from tensorflow.python.ops import init_ops
from tensorflow.python.platform import test
class KerasInitializersTest(test.TestCase):
def _runner(self, init, shape, target_mean=None, target_std=None,
target_max=None, target_min=None):
variable = keras.backend.variable(init(shape))
output = keras.backend.get_value(variable)
lim = 3e-2
if target_std is not None:
self.assertGreater(lim, abs(output.std() - target_std))
if target_mean is not None:
self.assertGreater(lim, abs(output.mean() - target_mean))
if target_max is not None:
self.assertGreater(lim, abs(output.max() - target_max))
if target_min is not None:
self.assertGreater(lim, abs(output.min() - target_min))
# Test serialization (assumes deterministic behavior).
config = init.get_config()
reconstructed_init = init.__class__.from_config(config)
variable = keras.backend.variable(reconstructed_init(shape))
output_2 = keras.backend.get_value(variable)
self.assertAllClose(output, output_2, atol=1e-4)
def test_uniform(self):
tensor_shape = (9, 6, 7)
with self.test_session():
self._runner(keras.initializers.RandomUniform(minval=-1,
maxval=1,
seed=124),
tensor_shape,
target_mean=0., target_max=1, target_min=-1)
def test_normal(self):
tensor_shape = (8, 12, 99)
with self.test_session():
self._runner(keras.initializers.RandomNormal(mean=0, stddev=1, seed=153),
tensor_shape,
target_mean=0., target_std=1)
def test_truncated_normal(self):
tensor_shape = (12, 99, 7)
with self.test_session():
self._runner(keras.initializers.TruncatedNormal(mean=0,
stddev=1,
seed=126),
tensor_shape,
target_mean=0., target_std=None, target_max=2)
def test_constant(self):
tensor_shape = (5, 6, 4)
with self.test_session():
self._runner(keras.initializers.Constant(2), tensor_shape,
target_mean=2, target_max=2, target_min=2)
def test_lecun_uniform(self):
tensor_shape = (5, 6, 4, 2)
with self.test_session():
fan_in, _ = init_ops._compute_fans(tensor_shape)
scale = np.sqrt(3. / fan_in)
self._runner(keras.initializers.lecun_uniform(seed=123), tensor_shape,
target_mean=0., target_max=scale, target_min=-scale)
def test_glorot_uniform(self):
tensor_shape = (5, 6, 4, 2)
with self.test_session():
fan_in, fan_out = init_ops._compute_fans(tensor_shape)
scale = np.sqrt(6. / (fan_in + fan_out))
self._runner(keras.initializers.glorot_uniform(seed=123), tensor_shape,
target_mean=0., target_max=scale, target_min=-scale)
def test_he_uniform(self):
tensor_shape = (5, 6, 4, 2)
with self.test_session():
fan_in, _ = init_ops._compute_fans(tensor_shape)
scale = np.sqrt(6. / fan_in)
self._runner(keras.initializers.he_uniform(seed=123), tensor_shape,
target_mean=0., target_max=scale, target_min=-scale)
def test_lecun_normal(self):
tensor_shape = (5, 6, 4, 2)
with self.test_session():
fan_in, _ = init_ops._compute_fans(tensor_shape)
scale = np.sqrt(1. / fan_in)
self._runner(keras.initializers.lecun_normal(seed=123), tensor_shape,
target_mean=0., target_std=None, target_max=2 * scale)
def test_glorot_normal(self):
tensor_shape = (5, 6, 4, 2)
with self.test_session():
fan_in, fan_out = init_ops._compute_fans(tensor_shape)
scale = np.sqrt(2. / (fan_in + fan_out))
self._runner(keras.initializers.glorot_normal(seed=123), tensor_shape,
target_mean=0., target_std=None, target_max=2 * scale)
def test_he_normal(self):
tensor_shape = (5, 6, 4, 2)
with self.test_session():
fan_in, _ = init_ops._compute_fans(tensor_shape)
scale = np.sqrt(2. / fan_in)
self._runner(keras.initializers.he_normal(seed=123), tensor_shape,
target_mean=0., target_std=None, target_max=2 * scale)
def test_orthogonal(self):
tensor_shape = (20, 20)
with self.test_session():
self._runner(keras.initializers.orthogonal(seed=123), tensor_shape,
target_mean=0.)
def test_identity(self):
with self.test_session():
tensor_shape = (3, 4, 5)
with self.assertRaises(ValueError):
self._runner(keras.initializers.identity(), tensor_shape,
target_mean=1. / tensor_shape[0], target_max=1.)
tensor_shape = (3, 3)
self._runner(keras.initializers.identity(), tensor_shape,
target_mean=1. / tensor_shape[0], target_max=1.)
def test_zero(self):
tensor_shape = (4, 5)
with self.test_session():
self._runner(keras.initializers.zeros(), tensor_shape,
target_mean=0., target_max=0.)
def test_one(self):
tensor_shape = (4, 5)
with self.test_session():
self._runner(keras.initializers.ones(), tensor_shape,
target_mean=1., target_max=1.)
if __name__ == '__main__':
test.main()
|
megaumi/django | refs/heads/master | tests/middleware/cond_get_urls.py | 499 | from django.conf.urls import url
from django.http import HttpResponse
urlpatterns = [
url(r'^$', lambda request: HttpResponse('root is here')),
]
|
r0balo/pelisalacarta | refs/heads/develop | python/main-classic/channels/unsoloclic.py | 2 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Canal para unsoloclic
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import re
import urlparse
from core import config
from core import logger
from core import scrapertools
from core.item import Item
DEBUG = config.get_setting("debug")
def mainlist(item):
logger.info("[unsoloclic.py] mainlist")
item.url="http://unsoloclic.info";
return novedades(item)
def novedades(item):
logger.info("[unsoloclic.py] novedades")
itemlist = []
# Descarga la página
data = scrapertools.cachePage(item.url)
'''
<div class="post-45732 post type-post status-publish format-standard hentry category-2012 category-blu-ray category-mkv-hd720p" id="post-45732">
<h2 class="title"><a href="http://unsoloclic.info/2012/11/ek-tha-tiger-2012-blu-ray-720p-hd/" rel="bookmark" title="Permanent Link to Pelicula Ek Tha Tiger (2012) BLU-RAY 720p HD">Pelicula Ek Tha Tiger (2012) BLU-RAY 720p HD</a></h2>
<div class="postdate"><img src="http://unsoloclic.info/wp-content/themes/TinyWeb/images/date.png" /> noviembre 5th, 2012
<!--
<img src="http://unsoloclic.info/wp-content/themes/TinyWeb/images/user.png" /> unsoloclic
-->
</div>
<div class="entry">
<p><a href="http://unsoloclic.info/2012/11/ek-tha-tiger-2012-blu-ray-720p-hd/" rel="attachment wp-att-45737"><img src="http://unsoloclic.info/wp-content/uploads/2012/11/Ek-Tha-Tiger-2012.jpg" alt="" title="Ek Tha Tiger (2012)" width="500" height="629" class="aligncenter size-full wp-image-45737" /></a></p>
<h2 style="text-align: center;"></h2>
<div class="readmorecontent">
<a class="readmore" href="http://unsoloclic.info/2012/11/ek-tha-tiger-2012-blu-ray-720p-hd/" rel="bookmark" title="Permanent Link to Pelicula Ek Tha Tiger (2012) BLU-RAY 720p HD">Seguir Leyendo</a>
</div>
</div>
</div><!--/post-45732-->
'''
'''
<div class="post-45923 post type-post status-publish format-standard hentry category-2012 category-blu-ray category-comedia category-drama category-mkv category-mkv-hd720p category-romance tag-chris-messina tag-jenna-fischer tag-lee-kirk tag-the-giant-mechanical-man-pelicula tag-topher-grace" id="post-45923">
<h2 class="title"><a href="http://unsoloclic.info/2012/12/the-giant-mechanical-man-2012-bluray-720p-hd/" rel="bookmark" title="Permanent Link to The Giant Mechanical Man (2012) BluRay 720p HD">The Giant Mechanical Man (2012) BluRay 720p HD</a></h2>
<div class="postdate"><img src="http://unsoloclic.info/wp-content/themes/TinyWeb/images/date.png" /> diciembre 24th, 2012
<!--
<img src="http://unsoloclic.info/wp-content/themes/TinyWeb/images/user.png" /> deportv
-->
</div>
<div class="entry">
<p style="text-align: center;"><a href="http://unsoloclic.info/2012/12/the-giant-mechanical-man-2012-bluray-720p-hd/"><img class="aligncenter size-full wp-image-45924" title="Giant Michanical Man Pelicula Descargar" src="http://unsoloclic.info/wp-content/uploads/2012/12/Giant-Michanical-Man-Pelicula-Descargar.jpg" alt="" width="380" height="500" /></a></p>
<p style="text-align: center;">
<div class="readmorecontent">
<a class="readmore" href="http://unsoloclic.info/2012/12/the-giant-mechanical-man-2012-bluray-720p-hd/" rel="bookmark" title="Permanent Link to The Giant Mechanical Man (2012) BluRay 720p HD">Seguir Leyendo</a>
</div>
</div>
</div><!--/post-45923-->
'''
patron = '<div class="post[^"]+" id="post-\d+">[^<]+'
patron += '<h2 class="title"><a href="([^"]+)" rel="bookmark" title="[^"]+">([^<]+)</a></h2>[^<]+'
patron += '<div class="postdate">.*?</div>[^<]+'
patron += '<div class="entry">[^<]+'
patron += '<p[^<]+<a[^<]+<img.*?src="([^"]+)"'
matches = re.compile(patron,re.DOTALL).findall(data)
if DEBUG: scrapertools.printMatches(matches)
for scrapedurl,scrapedtitle,scrapedthumbnail in matches:
scrapedplot = ""
if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
itemlist.append( Item(channel=item.channel, action="findvideos", title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail , plot=scrapedplot , folder=True) )
'''
<a href="http://unsoloclic.info/page/2/" >« Peliculas anteriores</a>
'''
patron = '<a href="([^"]+)" >\«\; Peliculas anteriores</a>'
matches = re.compile(patron,re.DOTALL).findall(data)
if DEBUG: scrapertools.printMatches(matches)
for match in matches:
scrapedtitle = ">> Página siguiente"
scrapedplot = ""
scrapedurl = urlparse.urljoin(item.url,match)
scrapedthumbnail = ""
if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
itemlist.append( Item(channel=item.channel, action="novedades", title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail , plot=scrapedplot , folder=True) )
return itemlist
def findvideos(item):
logger.info("[unsoloclic.py] findvideos")
data = scrapertools.cache_page(item.url)
itemlist=[]
#<a href="http://67cfb0db.linkbucks.com"><img title="billionuploads" src="http://unsoloclic.info/wp-content/uploads/2012/11/billonuploads2.png" alt="" width="380" height="50" /></a></p>
#<a href="http://1bd02d49.linkbucks.com"><img class="colorbox-57103" title="Freakeshare" alt="" src="http://unsoloclic.info/wp-content/uploads/2013/01/freakshare.png" width="390" height="55" /></a></p>
patron = '<a href="(http.//[a-z0-9]+.linkbucks.c[^"]+)[^>]+><img.*?title="([^"]+)".*?src="([^"]+)"'
matches = re.compile(patron,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
for url,servertag,serverthumb in matches:
itemlist.append( Item(channel=item.channel, action="play", server="linkbucks", title=servertag+" [linkbucks]" , url=url , thumbnail=serverthumb , plot=item.plot , folder=False) )
from core import servertools
itemlist.extend(servertools.find_video_items(data=data))
for videoitem in itemlist:
if videoitem.server!="linkbucks":
videoitem.channel=item.channel
videoitem.action="play"
videoitem.folder=False
videoitem.title = "["+videoitem.server+"]"
return itemlist
def play(item):
logger.info("[unsoloclic.py] play")
itemlist=[]
if item.server=="linkbucks":
logger.info("Es linkbucks")
# Averigua el enlace
from servers.decrypters import linkbucks
location = linkbucks.get_long_url(item.url)
logger.info("location="+location)
# Extrae la URL de saltar el anuncio en adf.ly
if location.startswith("http://adf"):
# Averigua el enlace
from servers.decrypters import adfly
location = adfly.get_long_url(location)
logger.info("location="+location)
from core import servertools
itemlist=servertools.find_video_items(data=location)
for videoitem in itemlist:
videoitem.channel=item.channel
videoitem.folder=False
else:
itemlist.append(item)
return itemlist
|
artwr/airflow | refs/heads/master | tests/contrib/operators/test_file_to_gcs.py | 6 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import datetime
import unittest
from airflow import DAG, configuration
from airflow.contrib.operators.file_to_gcs import FileToGoogleCloudStorageOperator
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
class TestFileToGcsOperator(unittest.TestCase):
_config = {
'src': '/tmp/fake.csv',
'dst': 'fake.csv',
'bucket': 'dummy',
'mime_type': 'application/octet-stream',
'gzip': False
}
def setUp(self):
configuration.load_test_config()
args = {
'owner': 'airflow',
'start_date': datetime.datetime(2017, 1, 1)
}
self.dag = DAG('test_dag_id', default_args=args)
def test_init(self):
operator = FileToGoogleCloudStorageOperator(
task_id='file_to_gcs_operator',
dag=self.dag,
**self._config
)
self.assertEqual(operator.src, self._config['src'])
self.assertEqual(operator.dst, self._config['dst'])
self.assertEqual(operator.bucket, self._config['bucket'])
self.assertEqual(operator.mime_type, self._config['mime_type'])
self.assertEqual(operator.gzip, self._config['gzip'])
@mock.patch('airflow.contrib.operators.file_to_gcs.GoogleCloudStorageHook',
autospec=True)
def test_execute(self, mock_hook):
mock_instance = mock_hook.return_value
operator = FileToGoogleCloudStorageOperator(
task_id='gcs_to_file_sensor',
dag=self.dag,
**self._config
)
operator.execute(None)
mock_instance.upload.assert_called_once_with(
bucket=self._config['bucket'],
filename=self._config['src'],
gzip=self._config['gzip'],
mime_type=self._config['mime_type'],
object=self._config['dst']
)
if __name__ == '__main__':
unittest.main()
|
PhilLidar-DAD/geonode | refs/heads/master | announcements/signals.py | 10 | import django.dispatch
announcement_created = django.dispatch.Signal(providing_args=["announcement", "request"])
announcement_updated = django.dispatch.Signal(providing_args=["announcement", "request"])
announcement_deleted = django.dispatch.Signal(providing_args=["announcement", "request"])
|
huanpc/IoT-1 | refs/heads/master | gui/controller/.venv/lib/python3.5/site-packages/pip/_vendor/distlib/_backport/misc.py | 1428 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Backports for individual classes and functions."""
import os
import sys
__all__ = ['cache_from_source', 'callable', 'fsencode']
try:
from imp import cache_from_source
except ImportError:
def cache_from_source(py_file, debug=__debug__):
ext = debug and 'c' or 'o'
return py_file + ext
try:
callable = callable
except NameError:
from collections import Callable
def callable(obj):
return isinstance(obj, Callable)
try:
fsencode = os.fsencode
except AttributeError:
def fsencode(filename):
if isinstance(filename, bytes):
return filename
elif isinstance(filename, str):
return filename.encode(sys.getfilesystemencoding())
else:
raise TypeError("expect bytes or str, not %s" %
type(filename).__name__)
|
Dino0631/RedRain-Bot | refs/heads/develop | cogs/lib/aiohttp/client_reqrep.py | 17 | import asyncio
import collections
import http.cookies
import io
import json
import mimetypes
import os
import sys
import traceback
import urllib.parse
import warnings
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
import aiohttp
from . import hdrs, helpers, streams
from .helpers import Timeout
from .log import client_logger
from .multipart import MultipartWriter
from .protocol import HttpMessage
from .streams import EOF_MARKER, FlowControlStreamReader
try:
import cchardet as chardet
except ImportError:
import chardet
__all__ = ('ClientRequest', 'ClientResponse')
PY_35 = sys.version_info >= (3, 5)
HTTP_PORT = 80
HTTPS_PORT = 443
class ClientRequest:
GET_METHODS = {hdrs.METH_GET, hdrs.METH_HEAD, hdrs.METH_OPTIONS}
POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
ALL_METHODS = GET_METHODS.union(POST_METHODS).union(
{hdrs.METH_DELETE, hdrs.METH_TRACE})
DEFAULT_HEADERS = {
hdrs.ACCEPT: '*/*',
hdrs.ACCEPT_ENCODING: 'gzip, deflate',
}
SERVER_SOFTWARE = HttpMessage.SERVER_SOFTWARE
body = b''
auth = None
response = None
response_class = None
_writer = None # async task for streaming data
_continue = None # waiter future for '100 Continue' response
# N.B.
# Adding __del__ method with self._writer closing doesn't make sense
# because _writer is instance method, thus it keeps a reference to self.
# Until writer has finished finalizer will not be called.
def __init__(self, method, url, *,
params=None, headers=None, skip_auto_headers=frozenset(),
data=None, cookies=None,
auth=None, encoding='utf-8',
version=aiohttp.HttpVersion11, compress=None,
chunked=None, expect100=False,
loop=None, response_class=None,
proxy=None, proxy_auth=None,
timeout=5*60):
if loop is None:
loop = asyncio.get_event_loop()
self.url = url
self.method = method.upper()
self.encoding = encoding
self.chunked = chunked
self.compress = compress
self.loop = loop
self.response_class = response_class or ClientResponse
self._timeout = timeout
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
self.update_version(version)
self.update_host(url)
self.update_path(params)
self.update_headers(headers)
self.update_auto_headers(skip_auto_headers)
self.update_cookies(cookies)
self.update_content_encoding(data)
self.update_auth(auth)
self.update_proxy(proxy, proxy_auth)
self.update_body_from_data(data, skip_auto_headers)
self.update_transfer_encoding()
self.update_expect_continue(expect100)
def update_host(self, url):
"""Update destination host, port and connection type (ssl)."""
url_parsed = urllib.parse.urlsplit(url)
# check for network location part
netloc = url_parsed.netloc
if not netloc:
raise ValueError('Host could not be detected.')
# get host/port
host = url_parsed.hostname
if not host:
raise ValueError('Host could not be detected.')
try:
port = url_parsed.port
except ValueError:
raise ValueError(
'Port number could not be converted.') from None
# check domain idna encoding
try:
host = host.encode('idna').decode('utf-8')
netloc = self.make_netloc(host, url_parsed.port)
except UnicodeError:
raise ValueError('URL has an invalid label.')
# basic auth info
username, password = url_parsed.username, url_parsed.password
if username:
self.auth = helpers.BasicAuth(username, password or '')
# Record entire netloc for usage in host header
self.netloc = netloc
scheme = url_parsed.scheme
self.ssl = scheme in ('https', 'wss')
# set port number if it isn't already set
if not port:
if self.ssl:
port = HTTPS_PORT
else:
port = HTTP_PORT
self.host, self.port, self.scheme = host, port, scheme
def make_netloc(self, host, port):
ret = host
if port:
ret = ret + ':' + str(port)
return ret
def update_version(self, version):
"""Convert request version to two elements tuple.
parser HTTP version '1.1' => (1, 1)
"""
if isinstance(version, str):
v = [l.strip() for l in version.split('.', 1)]
try:
version = int(v[0]), int(v[1])
except ValueError:
raise ValueError(
'Can not parse http version number: {}'
.format(version)) from None
self.version = version
def update_path(self, params):
"""Build path."""
# extract path
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(self.url)
if not path:
path = '/'
if isinstance(params, collections.Mapping):
params = list(params.items())
if params:
if not isinstance(params, str):
params = urllib.parse.urlencode(params)
if query:
query = '%s&%s' % (query, params)
else:
query = params
self.path = urllib.parse.urlunsplit(('', '', helpers.requote_uri(path),
query, ''))
self.url = urllib.parse.urlunsplit(
(scheme, netloc, self.path, '', fragment))
def update_headers(self, headers):
"""Update request headers."""
self.headers = CIMultiDict()
if headers:
if isinstance(headers, dict):
headers = headers.items()
elif isinstance(headers, (MultiDictProxy, MultiDict)):
headers = headers.items()
for key, value in headers:
self.headers.add(key, value)
def update_auto_headers(self, skip_auto_headers):
self.skip_auto_headers = skip_auto_headers
used_headers = set(self.headers) | skip_auto_headers
for hdr, val in self.DEFAULT_HEADERS.items():
if hdr not in used_headers:
self.headers.add(hdr, val)
# add host
if hdrs.HOST not in used_headers:
self.headers[hdrs.HOST] = self.netloc
if hdrs.USER_AGENT not in used_headers:
self.headers[hdrs.USER_AGENT] = self.SERVER_SOFTWARE
def update_cookies(self, cookies):
"""Update request cookies header."""
if not cookies:
return
c = http.cookies.SimpleCookie()
if hdrs.COOKIE in self.headers:
c.load(self.headers.get(hdrs.COOKIE, ''))
del self.headers[hdrs.COOKIE]
if isinstance(cookies, dict):
cookies = cookies.items()
for name, value in cookies:
if isinstance(value, http.cookies.Morsel):
c[value.key] = value.value
else:
c[name] = value
self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip()
def update_content_encoding(self, data):
"""Set request content encoding."""
if not data:
return
enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower()
if enc:
if self.compress is not False:
self.compress = enc
# enable chunked, no need to deal with length
self.chunked = True
elif self.compress:
if not isinstance(self.compress, str):
self.compress = 'deflate'
self.headers[hdrs.CONTENT_ENCODING] = self.compress
self.chunked = True # enable chunked, no need to deal with length
def update_auth(self, auth):
"""Set basic auth."""
if auth is None:
auth = self.auth
if auth is None:
return
if not isinstance(auth, helpers.BasicAuth):
raise TypeError('BasicAuth() tuple is required instead')
self.headers[hdrs.AUTHORIZATION] = auth.encode()
def update_body_from_data(self, data, skip_auto_headers):
if not data:
return
if isinstance(data, str):
data = data.encode(self.encoding)
if isinstance(data, (bytes, bytearray)):
self.body = data
if (hdrs.CONTENT_TYPE not in self.headers and
hdrs.CONTENT_TYPE not in skip_auto_headers):
self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream'
if hdrs.CONTENT_LENGTH not in self.headers and not self.chunked:
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
elif isinstance(data, (asyncio.StreamReader, streams.StreamReader,
streams.DataQueue)):
self.body = data
elif asyncio.iscoroutine(data):
self.body = data
if (hdrs.CONTENT_LENGTH not in self.headers and
self.chunked is None):
self.chunked = True
elif isinstance(data, io.IOBase):
assert not isinstance(data, io.StringIO), \
'attempt to send text data instead of binary'
self.body = data
if not self.chunked and isinstance(data, io.BytesIO):
# Not chunking if content-length can be determined
size = len(data.getbuffer())
self.headers[hdrs.CONTENT_LENGTH] = str(size)
self.chunked = False
elif not self.chunked and isinstance(data, io.BufferedReader):
# Not chunking if content-length can be determined
try:
size = os.fstat(data.fileno()).st_size - data.tell()
self.headers[hdrs.CONTENT_LENGTH] = str(size)
self.chunked = False
except OSError:
# data.fileno() is not supported, e.g.
# io.BufferedReader(io.BytesIO(b'data'))
self.chunked = True
else:
self.chunked = True
if hasattr(data, 'mode'):
if data.mode == 'r':
raise ValueError('file {!r} should be open in binary mode'
''.format(data))
if (hdrs.CONTENT_TYPE not in self.headers and
hdrs.CONTENT_TYPE not in skip_auto_headers and
hasattr(data, 'name')):
mime = mimetypes.guess_type(data.name)[0]
mime = 'application/octet-stream' if mime is None else mime
self.headers[hdrs.CONTENT_TYPE] = mime
elif isinstance(data, MultipartWriter):
self.body = data.serialize()
self.headers.update(data.headers)
self.chunked = self.chunked or 8192
else:
if not isinstance(data, helpers.FormData):
data = helpers.FormData(data)
self.body = data(self.encoding)
if (hdrs.CONTENT_TYPE not in self.headers and
hdrs.CONTENT_TYPE not in skip_auto_headers):
self.headers[hdrs.CONTENT_TYPE] = data.content_type
if data.is_multipart:
self.chunked = self.chunked or 8192
else:
if (hdrs.CONTENT_LENGTH not in self.headers and
not self.chunked):
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
def update_transfer_encoding(self):
"""Analyze transfer-encoding header."""
te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower()
if self.chunked:
if hdrs.CONTENT_LENGTH in self.headers:
del self.headers[hdrs.CONTENT_LENGTH]
if 'chunked' not in te:
self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'
self.chunked = self.chunked if type(self.chunked) is int else 8192
else:
if 'chunked' in te:
self.chunked = 8192
else:
self.chunked = None
if hdrs.CONTENT_LENGTH not in self.headers:
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
def update_expect_continue(self, expect=False):
if expect:
self.headers[hdrs.EXPECT] = '100-continue'
elif self.headers.get(hdrs.EXPECT, '').lower() == '100-continue':
expect = True
if expect:
self._continue = helpers.create_future(self.loop)
def update_proxy(self, proxy, proxy_auth):
if proxy and not proxy.startswith('http://'):
raise ValueError("Only http proxies are supported")
if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
raise ValueError("proxy_auth must be None or BasicAuth() tuple")
self.proxy = proxy
self.proxy_auth = proxy_auth
@asyncio.coroutine
def write_bytes(self, request, reader):
"""Support coroutines that yields bytes objects."""
# 100 response
if self._continue is not None:
yield from self._continue
try:
if asyncio.iscoroutine(self.body):
request.transport.set_tcp_nodelay(True)
exc = None
value = None
stream = self.body
while True:
try:
if exc is not None:
result = stream.throw(exc)
else:
result = stream.send(value)
except StopIteration as exc:
if isinstance(exc.value, bytes):
yield from request.write(exc.value, drain=True)
break
except:
self.response.close()
raise
if isinstance(result, asyncio.Future):
exc = None
value = None
try:
value = yield result
except Exception as err:
exc = err
elif isinstance(result, (bytes, bytearray)):
yield from request.write(result, drain=True)
value = None
else:
raise ValueError(
'Bytes object is expected, got: %s.' %
type(result))
elif isinstance(self.body, (asyncio.StreamReader,
streams.StreamReader)):
request.transport.set_tcp_nodelay(True)
chunk = yield from self.body.read(streams.DEFAULT_LIMIT)
while chunk:
yield from request.write(chunk, drain=True)
chunk = yield from self.body.read(streams.DEFAULT_LIMIT)
elif isinstance(self.body, streams.DataQueue):
request.transport.set_tcp_nodelay(True)
while True:
try:
chunk = yield from self.body.read()
if chunk is EOF_MARKER:
break
yield from request.write(chunk, drain=True)
except streams.EofStream:
break
elif isinstance(self.body, io.IOBase):
chunk = self.body.read(self.chunked)
while chunk:
request.write(chunk)
chunk = self.body.read(self.chunked)
request.transport.set_tcp_nodelay(True)
else:
if isinstance(self.body, (bytes, bytearray)):
self.body = (self.body,)
for chunk in self.body:
request.write(chunk)
request.transport.set_tcp_nodelay(True)
except Exception as exc:
new_exc = aiohttp.ClientRequestError(
'Can not write request body for %s' % self.url)
new_exc.__context__ = exc
new_exc.__cause__ = exc
reader.set_exception(new_exc)
else:
assert request.transport.tcp_nodelay
try:
ret = request.write_eof()
# NB: in asyncio 3.4.1+ StreamWriter.drain() is coroutine
# see bug #170
if (asyncio.iscoroutine(ret) or
isinstance(ret, asyncio.Future)):
yield from ret
except Exception as exc:
new_exc = aiohttp.ClientRequestError(
'Can not write request body for %s' % self.url)
new_exc.__context__ = exc
new_exc.__cause__ = exc
reader.set_exception(new_exc)
self._writer = None
def send(self, writer, reader):
writer.set_tcp_cork(True)
request = aiohttp.Request(writer, self.method, self.path, self.version)
if self.compress:
request.add_compression_filter(self.compress)
if self.chunked is not None:
request.enable_chunked_encoding()
request.add_chunking_filter(self.chunked)
# set default content-type
if (self.method in self.POST_METHODS and
hdrs.CONTENT_TYPE not in self.skip_auto_headers and
hdrs.CONTENT_TYPE not in self.headers):
self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream'
for k, value in self.headers.items():
request.add_header(k, value)
request.send_headers()
self._writer = helpers.ensure_future(
self.write_bytes(request, reader), loop=self.loop)
self.response = self.response_class(
self.method, self.url, self.host,
writer=self._writer, continue100=self._continue,
timeout=self._timeout)
self.response._post_init(self.loop)
return self.response
@asyncio.coroutine
def close(self):
if self._writer is not None:
try:
yield from self._writer
finally:
self._writer = None
def terminate(self):
if self._writer is not None:
if not self.loop.is_closed():
self._writer.cancel()
self._writer = None
class ClientResponse:
# from the Status-Line of the response
version = None # HTTP-Version
status = None # Status-Code
reason = None # Reason-Phrase
cookies = None # Response cookies (Set-Cookie)
content = None # Payload stream
headers = None # Response headers, CIMultiDictProxy
raw_headers = None # Response raw headers, a sequence of pairs
_connection = None # current connection
flow_control_class = FlowControlStreamReader # reader flow control
_reader = None # input stream
_response_parser = aiohttp.HttpResponseParser()
_source_traceback = None
# setted up by ClientRequest after ClientResponse object creation
# post-init stage allows to not change ctor signature
_loop = None
_closed = True # to allow __del__ for non-initialized properly response
def __init__(self, method, url, host='', *, writer=None, continue100=None,
timeout=5*60):
super().__init__()
self.method = method
self.url = url
self.host = host
self._content = None
self._writer = writer
self._continue = continue100
self._closed = False
self._should_close = True # override by message.should_close later
self._history = ()
self._timeout = timeout
def _post_init(self, loop):
self._loop = loop
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
def __del__(self, _warnings=warnings):
if self._loop is None:
return # not started
if self._closed:
return
self.close()
_warnings.warn("Unclosed response {!r}".format(self),
ResourceWarning)
context = {'client_response': self,
'message': 'Unclosed response'}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
def __repr__(self):
out = io.StringIO()
ascii_encodable_url = self.url.encode('ascii', 'backslashreplace') \
.decode('ascii')
if self.reason:
ascii_encodable_reason = self.reason.encode('ascii',
'backslashreplace') \
.decode('ascii')
else:
ascii_encodable_reason = self.reason
print('<ClientResponse({}) [{} {}]>'.format(
ascii_encodable_url, self.status, ascii_encodable_reason),
file=out)
print(self.headers, file=out)
return out.getvalue()
@property
def connection(self):
return self._connection
@property
def history(self):
"""A sequence of of responses, if redirects occured."""
return self._history
def waiting_for_continue(self):
return self._continue is not None
def _setup_connection(self, connection):
self._reader = connection.reader
self._connection = connection
self.content = self.flow_control_class(
connection.reader, loop=connection.loop, timeout=self._timeout)
def _need_parse_response_body(self):
return (self.method.lower() != 'head' and
self.status not in [204, 304])
@asyncio.coroutine
def start(self, connection, read_until_eof=False):
"""Start response processing."""
self._setup_connection(connection)
while True:
httpstream = self._reader.set_parser(self._response_parser)
# read response
with Timeout(self._timeout, loop=self._loop):
message = yield from httpstream.read()
if message.code != 100:
break
if self._continue is not None and not self._continue.done():
self._continue.set_result(True)
self._continue = None
# response status
self.version = message.version
self.status = message.code
self.reason = message.reason
self._should_close = message.should_close
# headers
self.headers = CIMultiDictProxy(message.headers)
self.raw_headers = tuple(message.raw_headers)
# payload
rwb = self._need_parse_response_body()
self._reader.set_parser(
aiohttp.HttpPayloadParser(message,
readall=read_until_eof,
response_with_body=rwb),
self.content)
# cookies
self.cookies = http.cookies.SimpleCookie()
if hdrs.SET_COOKIE in self.headers:
for hdr in self.headers.getall(hdrs.SET_COOKIE):
try:
self.cookies.load(hdr)
except http.cookies.CookieError as exc:
client_logger.warning(
'Can not load response cookies: %s', exc)
return self
def close(self):
if self._closed:
return
self._closed = True
if self._loop is None or self._loop.is_closed():
return
if self._connection is not None:
self._connection.close()
self._connection = None
self._cleanup_writer()
@asyncio.coroutine
def release(self):
if self._closed:
return
try:
content = self.content
if content is not None and not content.at_eof():
chunk = yield from content.readany()
while chunk is not EOF_MARKER or chunk:
chunk = yield from content.readany()
except Exception:
self._connection.close()
self._connection = None
raise
finally:
self._closed = True
if self._connection is not None:
self._connection.release()
if self._reader is not None:
self._reader.unset_parser()
self._connection = None
self._cleanup_writer()
def raise_for_status(self):
if 400 <= self.status:
raise aiohttp.HttpProcessingError(
code=self.status,
message=self.reason)
def _cleanup_writer(self):
if self._writer is not None and not self._writer.done():
self._writer.cancel()
self._writer = None
@asyncio.coroutine
def wait_for_close(self):
if self._writer is not None:
try:
yield from self._writer
finally:
self._writer = None
yield from self.release()
@asyncio.coroutine
def read(self):
"""Read response payload."""
if self._content is None:
try:
self._content = yield from self.content.read()
except:
self.close()
raise
else:
yield from self.release()
return self._content
def _get_encoding(self):
ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
mtype, stype, _, params = helpers.parse_mimetype(ctype)
encoding = params.get('charset')
if not encoding:
encoding = chardet.detect(self._content)['encoding']
if not encoding:
encoding = 'utf-8'
return encoding
@asyncio.coroutine
def text(self, encoding=None):
"""Read response payload and decode."""
if self._content is None:
yield from self.read()
if encoding is None:
encoding = self._get_encoding()
return self._content.decode(encoding)
@asyncio.coroutine
def json(self, *, encoding=None, loads=json.loads):
"""Read and decodes JSON response."""
if self._content is None:
yield from self.read()
ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
if 'json' not in ctype:
client_logger.warning(
'Attempt to decode JSON with unexpected mimetype: %s', ctype)
stripped = self._content.strip()
if not stripped:
return None
if encoding is None:
encoding = self._get_encoding()
return loads(stripped.decode(encoding))
if PY_35:
@asyncio.coroutine
def __aenter__(self):
return self
@asyncio.coroutine
def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
yield from self.release()
else:
self.close()
|
gkawamoto/yowsup | refs/heads/master | yowsup/layers/protocol_contacts/protocolentities/test_notification_contact_update.py | 68 | from yowsup.layers.protocol_contacts.protocolentities import UpdateContactNotificationProtocolEntity
from yowsup.structs.protocolentity import ProtocolEntityTest
import time
import unittest
entity = UpdateContactNotificationProtocolEntity("1234", "jid@s.whatsapp.net",
int(time.time()), "notify", False,"contactjid@s.whatsapp.net")
class UpdateContactNotificationProtocolEntityTest(ProtocolEntityTest, unittest.TestCase):
def setUp(self):
self.ProtocolEntity = UpdateContactNotificationProtocolEntity
self.node = entity.toProtocolTreeNode()
|
edx/lettuce | refs/heads/master | tests/integration/django/celeries/terrain.py | 18 | # -*- coding: utf-8 -*-
# <Lettuce - Behaviour Driven Development for python>
# Copyright (C) <2010-2012> Gabriel Falcão <gabriel@nacaolivre.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from lettuce import before, after
@before.all
def celeries_before_all():
print "Celeries before all"
@after.all
def celeries_after_all(total):
print "Celeries after all"
@before.harvest
def celeries_before_harvest(variables):
print "Celeries before harvest"
@after.harvest
def celeries_after_harvest(results):
print "Celeries after harvest"
@before.each_feature
def celeries_before_feature(feature):
print "Celeries before feature '%s'" % feature.name
@after.each_feature
def celeries_after_feature(feature):
print "Celeries after feature '%s'" % feature.name
@before.each_scenario
def celeries_before_scenario(scenario):
print "Celeries before scenario '%s'" % scenario.name
@after.each_scenario
def celeries_after_scenario(scenario):
print "Celeries after scenario '%s'" % scenario.name
@before.each_step
def celeries_before_step(step):
print "Celeries before step '%s'" % step.sentence
@after.each_step
def celeries_after_step(step):
print "Celeries after step '%s'" % step.sentence
|
camilonos77/bootstrap-form-python-generator | refs/heads/master | enviroment/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/_collections.py | 327 | # urllib3/_collections.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from collections import Mapping, MutableMapping
try:
from threading import RLock
except ImportError: # Platform-specific: No threads available
class RLock:
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
try: # Python 2.7+
from collections import OrderedDict
except ImportError:
from .packages.ordered_dict import OrderedDict
from .packages.six import itervalues
__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
_Null = object()
class RecentlyUsedContainer(MutableMapping):
"""
Provides a thread-safe dict-like container which maintains up to
``maxsize`` keys while throwing away the least-recently-used keys beyond
``maxsize``.
:param maxsize:
Maximum number of recent elements to retain.
:param dispose_func:
Every time an item is evicted from the container,
``dispose_func(value)`` is called. Callback which will get called
"""
ContainerCls = OrderedDict
def __init__(self, maxsize=10, dispose_func=None):
self._maxsize = maxsize
self.dispose_func = dispose_func
self._container = self.ContainerCls()
self.lock = RLock()
def __getitem__(self, key):
# Re-insert the item, moving it to the end of the eviction line.
with self.lock:
item = self._container.pop(key)
self._container[key] = item
return item
def __setitem__(self, key, value):
evicted_value = _Null
with self.lock:
# Possibly evict the existing value of 'key'
evicted_value = self._container.get(key, _Null)
self._container[key] = value
# If we didn't evict an existing value, we might have to evict the
# least recently used item from the beginning of the container.
if len(self._container) > self._maxsize:
_key, evicted_value = self._container.popitem(last=False)
if self.dispose_func and evicted_value is not _Null:
self.dispose_func(evicted_value)
def __delitem__(self, key):
with self.lock:
value = self._container.pop(key)
if self.dispose_func:
self.dispose_func(value)
def __len__(self):
with self.lock:
return len(self._container)
def __iter__(self):
raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')
def clear(self):
with self.lock:
# Copy pointers to all values, then wipe the mapping
# under Python 2, this copies the list of values twice :-|
values = list(self._container.values())
self._container.clear()
if self.dispose_func:
for value in values:
self.dispose_func(value)
def keys(self):
with self.lock:
return self._container.keys()
class HTTPHeaderDict(MutableMapping):
"""
:param headers:
An iterable of field-value pairs. Must not contain multiple field names
when compared case-insensitively.
:param kwargs:
Additional field-value pairs to pass in to ``dict.update``.
A ``dict`` like container for storing HTTP Headers.
Field names are stored and compared case-insensitively in compliance with
RFC 2616. Iteration provides the first case-sensitive key seen for each
case-insensitive pair.
Using ``__setitem__`` syntax overwrites fields that compare equal
case-insensitively in order to maintain ``dict``'s api. For fields that
compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
in a loop.
If multiple fields that are equal case-insensitively are passed to the
constructor or ``.update``, the behavior is undefined and some will be
lost.
>>> headers = HTTPHeaderDict()
>>> headers.add('Set-Cookie', 'foo=bar')
>>> headers.add('set-cookie', 'baz=quxx')
>>> headers['content-length'] = '7'
>>> headers['SET-cookie']
'foo=bar, baz=quxx'
>>> headers['Content-Length']
'7'
If you want to access the raw headers with their original casing
for debugging purposes you can access the private ``._data`` attribute
which is a normal python ``dict`` that maps the case-insensitive key to a
list of tuples stored as (case-sensitive-original-name, value). Using the
structure from above as our example:
>>> headers._data
{'set-cookie': [('Set-Cookie', 'foo=bar'), ('set-cookie', 'baz=quxx')],
'content-length': [('content-length', '7')]}
"""
def __init__(self, headers=None, **kwargs):
self._data = {}
if headers is None:
headers = {}
self.update(headers, **kwargs)
def add(self, key, value):
"""Adds a (name, value) pair, doesn't overwrite the value if it already
exists.
>>> headers = HTTPHeaderDict(foo='bar')
>>> headers.add('Foo', 'baz')
>>> headers['foo']
'bar, baz'
"""
self._data.setdefault(key.lower(), []).append((key, value))
def getlist(self, key):
"""Returns a list of all the values for the named field. Returns an
empty list if the key doesn't exist."""
return self[key].split(', ') if key in self else []
def copy(self):
h = HTTPHeaderDict()
for key in self._data:
for rawkey, value in self._data[key]:
h.add(rawkey, value)
return h
def __eq__(self, other):
if not isinstance(other, Mapping):
return False
other = HTTPHeaderDict(other)
return dict((k1, self[k1]) for k1 in self._data) == \
dict((k2, other[k2]) for k2 in other._data)
def __getitem__(self, key):
values = self._data[key.lower()]
return ', '.join(value[1] for value in values)
def __setitem__(self, key, value):
self._data[key.lower()] = [(key, value)]
def __delitem__(self, key):
del self._data[key.lower()]
def __len__(self):
return len(self._data)
def __iter__(self):
for headers in itervalues(self._data):
yield headers[0][0]
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
|
eneldoserrata/marcos_openerp | refs/heads/master | addons/subscription/__init__.py | 441 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import subscription
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
thatchristoph/namebench | refs/heads/master | nb_third_party/dns/rdtypes/keybase.py | 248 | # Copyright (C) 2004-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.dnssec
import dns.rdata
_flags_from_text = {
'NOCONF': (0x4000, 0xC000),
'NOAUTH': (0x8000, 0xC000),
'NOKEY': (0xC000, 0xC000),
'FLAG2': (0x2000, 0x2000),
'EXTEND': (0x1000, 0x1000),
'FLAG4': (0x0800, 0x0800),
'FLAG5': (0x0400, 0x0400),
'USER': (0x0000, 0x0300),
'ZONE': (0x0100, 0x0300),
'HOST': (0x0200, 0x0300),
'NTYP3': (0x0300, 0x0300),
'FLAG8': (0x0080, 0x0080),
'FLAG9': (0x0040, 0x0040),
'FLAG10': (0x0020, 0x0020),
'FLAG11': (0x0010, 0x0010),
'SIG0': (0x0000, 0x000f),
'SIG1': (0x0001, 0x000f),
'SIG2': (0x0002, 0x000f),
'SIG3': (0x0003, 0x000f),
'SIG4': (0x0004, 0x000f),
'SIG5': (0x0005, 0x000f),
'SIG6': (0x0006, 0x000f),
'SIG7': (0x0007, 0x000f),
'SIG8': (0x0008, 0x000f),
'SIG9': (0x0009, 0x000f),
'SIG10': (0x000a, 0x000f),
'SIG11': (0x000b, 0x000f),
'SIG12': (0x000c, 0x000f),
'SIG13': (0x000d, 0x000f),
'SIG14': (0x000e, 0x000f),
'SIG15': (0x000f, 0x000f),
}
_protocol_from_text = {
'NONE' : 0,
'TLS' : 1,
'EMAIL' : 2,
'DNSSEC' : 3,
'IPSEC' : 4,
'ALL' : 255,
}
class KEYBase(dns.rdata.Rdata):
"""KEY-like record base
@ivar flags: the key flags
@type flags: int
@ivar protocol: the protocol for which this key may be used
@type protocol: int
@ivar algorithm: the algorithm used for the key
@type algorithm: int
@ivar key: the public key
@type key: string"""
__slots__ = ['flags', 'protocol', 'algorithm', 'key']
def __init__(self, rdclass, rdtype, flags, protocol, algorithm, key):
super(KEYBase, self).__init__(rdclass, rdtype)
self.flags = flags
self.protocol = protocol
self.algorithm = algorithm
self.key = key
def to_text(self, origin=None, relativize=True, **kw):
return '%d %d %d %s' % (self.flags, self.protocol, self.algorithm,
dns.rdata._base64ify(self.key))
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
flags = tok.get_string()
if flags.isdigit():
flags = int(flags)
else:
flag_names = flags.split('|')
flags = 0
for flag in flag_names:
v = _flags_from_text.get(flag)
if v is None:
raise dns.exception.SyntaxError('unknown flag %s' % flag)
flags &= ~v[1]
flags |= v[0]
protocol = tok.get_string()
if protocol.isdigit():
protocol = int(protocol)
else:
protocol = _protocol_from_text.get(protocol)
if protocol is None:
raise dns.exception.SyntaxError('unknown protocol %s' % protocol)
algorithm = dns.dnssec.algorithm_from_text(tok.get_string())
chunks = []
while 1:
t = tok.get().unescape()
if t.is_eol_or_eof():
break
if not t.is_identifier():
raise dns.exception.SyntaxError
chunks.append(t.value)
b64 = ''.join(chunks)
key = b64.decode('base64_codec')
return cls(rdclass, rdtype, flags, protocol, algorithm, key)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
header = struct.pack("!HBB", self.flags, self.protocol, self.algorithm)
file.write(header)
file.write(self.key)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
if rdlen < 4:
raise dns.exception.FormError
header = struct.unpack('!HBB', wire[current : current + 4])
current += 4
rdlen -= 4
key = wire[current : current + rdlen]
return cls(rdclass, rdtype, header[0], header[1], header[2],
key)
from_wire = classmethod(from_wire)
def _cmp(self, other):
hs = struct.pack("!HBB", self.flags, self.protocol, self.algorithm)
ho = struct.pack("!HBB", other.flags, other.protocol, other.algorithm)
v = cmp(hs, ho)
if v == 0:
v = cmp(self.key, other.key)
return v
|
Javex/mixminion | refs/heads/master | lib/mixminion/server/PacketHandler.py | 5 | # Copyright 2002-2011 Nick Mathewson. See LICENSE for licensing information.
"""mixminion.server.PacketHandler: Code to process mixminion packets"""
import binascii
import threading
import types
from mixminion.Common import encodeBase64, formatBase64, LOG
import mixminion.Crypto as Crypto
import mixminion.Packet as Packet
import mixminion.BuildMessage
from mixminion.ServerInfo import PACKET_KEY_BYTES
from mixminion.Common import MixError, MixFatalError, isPrintingAscii
__all__ = [ 'PacketHandler', 'ContentError', 'DeliveryPacket', 'RelayedPacket']
class ContentError(MixError):
"""Exception raised when a packed is malformatted or unacceptable."""
pass
class PacketHandler:
"""Class to handle processing packets. Given an incoming packet,
it removes one layer of encryption, does all necessary integrity
checks, swaps headers if necessary, re-pads, and decides whether
to drop the packet, relay the packet, or send the packet to
an exit handler."""
## Fields:
# privatekeys: a list of 2-tuples of
# (1) a RSA private key that we accept
# (2) a HashLog objects corresponding to the given key
def __init__(self, privatekeys=(), hashlogs=()):
"""Constructs a new packet handler, given a sequence of
private key object for header encryption, and a sequence of
corresponding hashlog object to prevent replays.
The lists must be equally long. When a new packet is
processed, we try each of the private keys in sequence. If
the packet is decodeable with one of the keys, we log it in
the corresponding entry of the hashlog list.
"""
self.privatekeys = []
self.lock = threading.Lock()
assert type(privatekeys) in (types.ListType, types.TupleType)
assert type(hashlogs) in (types.ListType, types.TupleType)
self.setKeys(privatekeys, hashlogs)
def setKeys(self, keys, hashlogs):
"""Change the keys and hashlogs used by this PacketHandler.
Arguments are as to PacketHandler.__init__
"""
self.lock.acquire()
newKeys = {}
try:
# Build a set of asn.1-encoded public keys in *new* set.
for k in keys:
newKeys[k.encode_key(1)] = 1
if k.get_modulus_bytes() != PACKET_KEY_BYTES:
raise MixFatalError("Incorrect packet key length")
# For all old public keys, if they aren't in the new set, close
# their hashlogs.
for k, h in self.privatekeys:
if not newKeys.get(k.encode_key(1)):
h.close()
# Now, set the keys.
self.privatekeys = zip(keys, hashlogs)
finally:
self.lock.release()
def syncLogs(self):
"""Sync all this PacketHandler's hashlogs."""
try:
self.lock.acquire()
for _, h in self.privatekeys:
h.sync()
finally:
self.lock.release()
def close(self):
"""Close all this PacketHandler's hashlogs."""
try:
self.lock.acquire()
for _, h in self.privatekeys:
h.close()
finally:
self.lock.release()
def processPacket(self, msg):
"""Given a 32K mixminion packet, processes it completely.
Return one of:
None [if the packet should be dropped.]
a DeliveryPacket object
a RelayedPacket object
May raise CryptoError, ParseError, or ContentError if the packet
is malformatted, misencrypted, unparseable, repeated, or otherwise
unhandleable.
WARNING: This implementation does nothing to prevent timing
attacks: dropped packets, packets with bad digests, replayed
packets, and exit packets are all processed faster than
forwarded packets. You must prevent timing attacks elsewhere."""
# Break into headers and payload
pkt = Packet.parsePacket(msg)
header1 = Packet.parseHeader(pkt.header1)
encSubh = header1[:Packet.ENC_SUBHEADER_LEN]
header1 = header1[Packet.ENC_SUBHEADER_LEN:]
assert len(header1) == Packet.HEADER_LEN - Packet.ENC_SUBHEADER_LEN
assert len(header1) == (128*16) - 256 == 1792
# Try to decrypt the first subheader. Try each private key in
# order. Only fail if all private keys fail.
subh = None
e = None
self.lock.acquire()
try:
for pk, hashlog in self.privatekeys:
try:
subh = Crypto.pk_decrypt(encSubh, pk)
break
except Crypto.CryptoError, err:
e = err
finally:
self.lock.release()
if not subh:
# Nobody managed to get us the first subheader. Raise the
# most-recently-received error.
raise e
if len(subh) != Packet.MAX_SUBHEADER_LEN:
raise ContentError("Bad length in RSA-encrypted part of subheader")
subh = Packet.parseSubheader(subh) #may raise ParseError
# Check the version: can we read it?
if subh.major != Packet.MAJOR_NO or subh.minor != Packet.MINOR_NO:
raise ContentError("Invalid protocol version")
# Check the digest of all of header1 but the first subheader.
if subh.digest != Crypto.sha1(header1):
raise ContentError("Invalid digest")
# Get ready to generate packet keys.
keys = Crypto.Keyset(subh.secret)
# Replay prevention
replayhash = keys.get(Crypto.REPLAY_PREVENTION_MODE, Crypto.DIGEST_LEN)
if hashlog.seenHash(replayhash):
raise ContentError("Duplicate packet detected.")
else:
hashlog.logHash(replayhash)
# If we're meant to drop, drop now.
rt = subh.routingtype
if rt == Packet.DROP_TYPE:
return None
# Prepare the key to decrypt the header in counter mode. We'll be
# using this more than once.
header_sec_key = Crypto.aes_key(keys.get(Crypto.HEADER_SECRET_MODE))
# Prepare key to generate padding
junk_key = Crypto.aes_key(keys.get(Crypto.RANDOM_JUNK_MODE))
# Pad the rest of header 1
header1 += Crypto.prng(junk_key,
Packet.OAEP_OVERHEAD + Packet.MIN_SUBHEADER_LEN
+ subh.routinglen)
assert len(header1) == (Packet.HEADER_LEN - Packet.ENC_SUBHEADER_LEN
+ Packet.OAEP_OVERHEAD+Packet.MIN_SUBHEADER_LEN
+ subh.routinglen)
assert len(header1) == 1792 + 42 + 42 + subh.routinglen == \
1876 + subh.routinglen
# Decrypt the rest of header 1, encrypting the padding.
header1 = Crypto.ctr_crypt(header1, header_sec_key)
# If the subheader says that we have extra routing info that didn't
# fit in the RSA-encrypted part, get it now.
overflowLength = subh.getOverflowLength()
if overflowLength:
subh.appendOverflow(header1[:overflowLength])
header1 = header1[overflowLength:]
assert len(header1) == (
1876 + subh.routinglen
- max(0,subh.routinglen-Packet.MAX_ROUTING_INFO_LEN))
header1 = subh.underflow + header1
assert len(header1) == Packet.HEADER_LEN
# Decrypt the payload.
payload = Crypto.lioness_decrypt(pkt.payload,
keys.getLionessKeys(Crypto.PAYLOAD_ENCRYPT_MODE))
# If we're an exit node, there's no need to process the headers
# further.
if rt >= Packet.MIN_EXIT_TYPE:
return DeliveryPacket(rt, subh.getExitAddress(0),
keys.get(Crypto.APPLICATION_KEY_MODE),
payload)
# If we're not an exit node, make sure that what we recognize our
# routing type.
if rt not in (Packet.SWAP_FWD_IPV4_TYPE, Packet.FWD_IPV4_TYPE,
Packet.SWAP_FWD_HOST_TYPE, Packet.FWD_HOST_TYPE):
raise ContentError("Unrecognized Mixminion routing type")
# Decrypt header 2.
header2 = Crypto.lioness_decrypt(pkt.header2,
keys.getLionessKeys(Crypto.HEADER_ENCRYPT_MODE))
# If we're the swap node, (1) decrypt the payload with a hash of
# header2... (2) decrypt header2 with a hash of the payload...
# (3) and swap the headers.
if Packet.typeIsSwap(rt):
hkey = Crypto.lioness_keys_from_header(header2)
payload = Crypto.lioness_decrypt(payload, hkey)
hkey = Crypto.lioness_keys_from_payload(payload)
header2 = Crypto.lioness_decrypt(header2, hkey)
header1, header2 = header2, header1
# Build the address object for the next hop
address = Packet.parseRelayInfoByType(rt, subh.routinginfo)
# Construct the packet for the next hop.
pkt = Packet.Packet(header1, header2, payload).pack()
return RelayedPacket(address, pkt)
class RelayedPacket:
"""A packet that is to be relayed to another server; returned by
returned by PacketHandler.processPacket."""
## Fields:
# address -- an instance of IPV4Info DOCDOC
# msg -- a 32K packet.
def __init__(self, address, msg):
"""Create a new packet, given an instance of IPV4Info or
MMTPHostInfo and a 32K packet."""
assert isinstance(address, Packet.IPV4Info) or isinstance(address, Packet.MMTPHostInfo)
assert len(msg) == 1<<15
self.address = address
self.msg = msg
def isDelivery(self):
"""Return true iff this packet is a delivery (non-relay) packet."""
return 0
def getAddress(self):
"""Return an instance of IPV4Info or MMTPHostInfo indicating
the address where this packet is to be delivered."""
return self.address
def getPacket(self):
"""Returns the 32K contents of this packet."""
return self.msg
class DeliveryPacket:
"""A packet that is to be delivered via some exit module; returned by
PacketHandler.processPacket"""
##Fields:
# exitType -- a 2-byte integer indicating which exit module to use.
# address -- a string encoding the address to deliver to.
# key -- the 16-byte application key
# tag -- the 20-byte delivery handle
# payload -- the unencoded 28K payload
# contents -- until decode is called, None. After decode is called,
# the actual contents of this message as delivered.
# type -- until decode is called, None. After decode is called,
# one of 'plain' (plaintext message), 'long' (overcompressed message),
# 'enc' (encrypted message), or 'err' (malformed message).
# headers -- a map from key to value for the delivery headers in
# this message's payload. In the case of a fragment, or a
# non-plaintext message, the map is empty.
# isfrag -- Is this packet a fragment of a complete message? If so, the
# type must be 'plain'.
# dPayload -- An instance of mixminion.Packet.Payload for this object.
# error -- None, or a string containing an error encountered while trying
# to decode the payload.
def __init__(self, routingType, routingInfo, applicationKey, payload):
"""Construct a new DeliveryPacket."""
assert 0 <= routingType <= 0xFFFF
assert len(applicationKey) == 16
assert len(payload) == 28*1024
self.exitType = routingType
self.address = routingInfo
self.key = applicationKey
self.tag = ""
self.payload = payload
self.contents = None
self.type = None
self.headers = None
self.isfrag = 0
self.dPayload = None
self.error = None
def setTagged(self,tagged=1):
"""Re-frame the routingInfo in this packet. If 'tagged' is true,
then the routingInfo starts with TAG_LEN bytes of decoding
handle, and the rest is address. If 'tagged' is false, then
it's all address.
"""
x = self.tag+self.address
if tagged:
if len(x)<Packet.TAG_LEN:
raise Packet.ParseError("Missing decoding handle for exit type")
self.tag = x[:Packet.TAG_LEN]
self.address = x[Packet.TAG_LEN:]
else:
self.tag = ""
self.address = x
def __getstate__(self):
return "V0", self.__dict__
def __setstate__(self, state):
if type(state) == types.TupleType:
if state[0] == 'V0':
self.__dict__.update(state[1])
else:
raise MixError("Unrecognized state version %s" % state[0])
else:
raise MixError("Unrecognized state type %s"% type(state))
def isDelivery(self):
"""Return true iff this packet is a delivery (non-relay) packet."""
return 1
def getExitType(self): return self.exitType
def getAddress(self): return self.address
def getTag(self): return self.tag
def getApplicationKey(self): return self.key
def getPayload(self): return self.payload
def getContents(self):
"""Return the decoded contents of this packet."""
if self.type is None: self.decode()
return self.contents
def getDecodedPayload(self):
"""Return an instance of mixminion.Packet.Payload for this packet."""
if self.type is None: self.decode()
return self.dPayload
def isPlaintext(self):
"""Return true iff this packet is a plaintext, forward packet."""
if self.type is None: self.decode()
return self.type == 'plain'
def isOvercompressed(self):
"""Return true iff this packet is an overcompressed, plaintext, forward
packet."""
if self.type is None: self.decode()
return self.type == 'long'
def isFragment(self):
"""Return true iff this packet is part of a fragmented message."""
if self.type is None: self.decode()
return self.isfrag
def isEncrypted(self):
"""Return true iff this packet may be an encrypted forward or
reply packet."""
if self.type is None: self.decode()
return self.type == 'enc'
def isPrintingAscii(self):
"""Return true iff this packets contents are printing characters
suitable for inclusion in a text transport medium."""
if self.type is None: self.decode()
return isPrintingAscii(self.contents, allowISO=1)
def isError(self):
"""Return true iff this packet is malformed."""
if self.type is None: self.decode()
return self.type == 'err'
def decode(self):
"""Helper method: Determines this message's type and contents."""
if self.payload is None:
return
message = self.payload
self.contents = None
try:
self.dPayload = mixminion.BuildMessage.decodePayload(message, "")
if self.dPayload is None:
# encrypted message
self.type = 'enc'
self.contents = message
self.headers = {}
elif self.dPayload.isSingleton():
# forward message, singleton.
self.type = 'plain'
body = self.dPayload.getUncompressedContents()
self.contents, self.headers = \
Packet.parseMessageAndHeaders(body)
else:
# forward message, fragment.
self.isfrag = 1
self.type = 'plain'
self.contents = message
self.headers = {}
except Packet.CompressedDataTooLong, _:
self.contents = Packet.parsePayload(message).getContents()
self.type = 'long'
self.headers = {}
except MixError, e:
self.contents = message
self.error = str(e)
self.type = 'err'
self.headers = {}
self.payload = None
def getAsciiContents(self):
"""Return the contents of this message, encoded in base64 if they are
not already printable."""
if self.type is None:
self.decode()
if self.type == 'plain' and isPrintingAscii(self.contents, allowISO=1):
return self.contents
else:
return encodeBase64(self.contents)
def getHeaders(self):
"""Return a dict containing the headers for this message."""
if self.type is None:
self.decode()
if self.headers is None:
LOG.warn("getHeaders found no decoded headers")
return {}
return self.headers
def getAsciiTag(self):
"""Return a base64-representation of this message's decoding handle."""
return formatBase64(self.tag)
def getTextEncodedMessage(self):
"""Return a Packet.TextEncodedMessage object for this packet."""
tag = None
if self.isOvercompressed():
tp = 'LONG'
elif self.isEncrypted():
tp = 'ENC'
tag = self.tag
elif self.isPrintingAscii():
assert self.isPlaintext()
tp = 'TXT'
elif self.isFragment():
assert self.isPlaintext()
tp = 'FRAG'
else:
assert self.isPlaintext()
tp = 'BIN'
return Packet.TextEncodedMessage(self.contents, tp, tag)
|
bob-white/UnityIronPythonConsole | refs/heads/master | Assets/IronPythonConsole/Plugins/Lib/smtplib.py | 74 | #! /usr/bin/env python
'''SMTP/ESMTP client class.
This should follow RFC 821 (SMTP), RFC 1869 (ESMTP), RFC 2554 (SMTP
Authentication) and RFC 2487 (Secure SMTP over TLS).
Notes:
Please remember, when doing ESMTP, that the names of the SMTP service
extensions are NOT the same thing as the option keywords for the RCPT
and MAIL commands!
Example:
>>> import smtplib
>>> s=smtplib.SMTP("localhost")
>>> print s.help()
This is Sendmail version 8.8.4
Topics:
HELO EHLO MAIL RCPT DATA
RSET NOOP QUIT HELP VRFY
EXPN VERB ETRN DSN
For more info use "HELP <topic>".
To report bugs in the implementation send email to
sendmail-bugs@sendmail.org.
For local information send email to Postmaster at your site.
End of HELP info
>>> s.putcmd("vrfy","someone@here")
>>> s.getreply()
(250, "Somebody OverHere <somebody@here.my.org>")
>>> s.quit()
'''
# Author: The Dragon De Monsyne <dragondm@integral.org>
# ESMTP support, test code and doc fixes added by
# Eric S. Raymond <esr@thyrsus.com>
# Better RFC 821 compliance (MAIL and RCPT, and CRLF in data)
# by Carey Evans <c.evans@clear.net.nz>, for picky mail servers.
# RFC 2554 (authentication) support by Gerhard Haering <gerhard@bigfoot.de>.
#
# This was modified from the Python 1.5 library HTTP lib.
import socket
import re
import email.utils
import base64
import hmac
from email.base64mime import encode as encode_base64
from sys import stderr
__all__ = ["SMTPException", "SMTPServerDisconnected", "SMTPResponseException",
"SMTPSenderRefused", "SMTPRecipientsRefused", "SMTPDataError",
"SMTPConnectError", "SMTPHeloError", "SMTPAuthenticationError",
"quoteaddr", "quotedata", "SMTP"]
SMTP_PORT = 25
SMTP_SSL_PORT = 465
CRLF = "\r\n"
OLDSTYLE_AUTH = re.compile(r"auth=(.*)", re.I)
# Exception classes used by this module.
class SMTPException(Exception):
"""Base class for all exceptions raised by this module."""
class SMTPServerDisconnected(SMTPException):
"""Not connected to any SMTP server.
This exception is raised when the server unexpectedly disconnects,
or when an attempt is made to use the SMTP instance before
connecting it to a server.
"""
class SMTPResponseException(SMTPException):
"""Base class for all exceptions that include an SMTP error code.
These exceptions are generated in some instances when the SMTP
server returns an error code. The error code is stored in the
`smtp_code' attribute of the error, and the `smtp_error' attribute
is set to the error message.
"""
def __init__(self, code, msg):
self.smtp_code = code
self.smtp_error = msg
self.args = (code, msg)
class SMTPSenderRefused(SMTPResponseException):
"""Sender address refused.
In addition to the attributes set by on all SMTPResponseException
exceptions, this sets `sender' to the string that the SMTP refused.
"""
def __init__(self, code, msg, sender):
self.smtp_code = code
self.smtp_error = msg
self.sender = sender
self.args = (code, msg, sender)
class SMTPRecipientsRefused(SMTPException):
"""All recipient addresses refused.
The errors for each recipient are accessible through the attribute
'recipients', which is a dictionary of exactly the same sort as
SMTP.sendmail() returns.
"""
def __init__(self, recipients):
self.recipients = recipients
self.args = (recipients,)
class SMTPDataError(SMTPResponseException):
"""The SMTP server didn't accept the data."""
class SMTPConnectError(SMTPResponseException):
"""Error during connection establishment."""
class SMTPHeloError(SMTPResponseException):
"""The server refused our HELO reply."""
class SMTPAuthenticationError(SMTPResponseException):
"""Authentication error.
Most probably the server didn't accept the username/password
combination provided.
"""
def quoteaddr(addr):
"""Quote a subset of the email addresses defined by RFC 821.
Should be able to handle anything rfc822.parseaddr can handle.
"""
m = (None, None)
try:
m = email.utils.parseaddr(addr)[1]
except AttributeError:
pass
if m == (None, None): # Indicates parse failure or AttributeError
# something weird here.. punt -ddm
return "<%s>" % addr
elif m is None:
# the sender wants an empty return address
return "<>"
else:
return "<%s>" % m
def quotedata(data):
"""Quote data for email.
Double leading '.', and change Unix newline '\\n', or Mac '\\r' into
Internet CRLF end-of-line.
"""
return re.sub(r'(?m)^\.', '..',
re.sub(r'(?:\r\n|\n|\r(?!\n))', CRLF, data))
try:
import ssl
except ImportError:
_have_ssl = False
else:
class SSLFakeFile:
"""A fake file like object that really wraps a SSLObject.
It only supports what is needed in smtplib.
"""
def __init__(self, sslobj):
self.sslobj = sslobj
def readline(self):
str = ""
chr = None
while chr != "\n":
chr = self.sslobj.read(1)
if not chr:
break
str += chr
return str
def close(self):
pass
_have_ssl = True
class SMTP:
"""This class manages a connection to an SMTP or ESMTP server.
SMTP Objects:
SMTP objects have the following attributes:
helo_resp
This is the message given by the server in response to the
most recent HELO command.
ehlo_resp
This is the message given by the server in response to the
most recent EHLO command. This is usually multiline.
does_esmtp
This is a True value _after you do an EHLO command_, if the
server supports ESMTP.
esmtp_features
This is a dictionary, which, if the server supports ESMTP,
will _after you do an EHLO command_, contain the names of the
SMTP service extensions this server supports, and their
parameters (if any).
Note, all extension names are mapped to lower case in the
dictionary.
See each method's docstrings for details. In general, there is a
method of the same name to perform each SMTP command. There is also a
method called 'sendmail' that will do an entire mail transaction.
"""
debuglevel = 0
file = None
helo_resp = None
ehlo_msg = "ehlo"
ehlo_resp = None
does_esmtp = 0
default_port = SMTP_PORT
def __init__(self, host='', port=0, local_hostname=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
"""Initialize a new instance.
If specified, `host' is the name of the remote host to which to
connect. If specified, `port' specifies the port to which to connect.
By default, smtplib.SMTP_PORT is used. An SMTPConnectError is raised
if the specified `host' doesn't respond correctly. If specified,
`local_hostname` is used as the FQDN of the local host. By default,
the local hostname is found using socket.getfqdn().
"""
self.timeout = timeout
self.esmtp_features = {}
if host:
(code, msg) = self.connect(host, port)
if code != 220:
raise SMTPConnectError(code, msg)
if local_hostname is not None:
self.local_hostname = local_hostname
else:
# RFC 2821 says we should use the fqdn in the EHLO/HELO verb, and
# if that can't be calculated, that we should use a domain literal
# instead (essentially an encoded IP address like [A.B.C.D]).
fqdn = socket.getfqdn()
if '.' in fqdn:
self.local_hostname = fqdn
else:
# We can't find an fqdn hostname, so use a domain literal
addr = '127.0.0.1'
try:
addr = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
pass
self.local_hostname = '[%s]' % addr
def set_debuglevel(self, debuglevel):
"""Set the debug output level.
A non-false value results in debug messages for connection and for all
messages sent to and received from the server.
"""
self.debuglevel = debuglevel
def _get_socket(self, port, host, timeout):
# This makes it simpler for SMTP_SSL to use the SMTP connect code
# and just alter the socket connection bit.
if self.debuglevel > 0:
print>>stderr, 'connect:', (host, port)
return socket.create_connection((port, host), timeout)
def connect(self, host='localhost', port=0):
"""Connect to a host on a given port.
If the hostname ends with a colon (`:') followed by a number, and
there is no port specified, that suffix will be stripped off and the
number interpreted as the port number to use.
Note: This method is automatically invoked by __init__, if a host is
specified during instantiation.
"""
if not port and (host.find(':') == host.rfind(':')):
i = host.rfind(':')
if i >= 0:
host, port = host[:i], host[i + 1:]
try:
port = int(port)
except ValueError:
raise socket.error, "nonnumeric port"
if not port:
port = self.default_port
if self.debuglevel > 0:
print>>stderr, 'connect:', (host, port)
self.sock = self._get_socket(host, port, self.timeout)
(code, msg) = self.getreply()
if self.debuglevel > 0:
print>>stderr, "connect:", msg
return (code, msg)
def send(self, str):
"""Send `str' to the server."""
if self.debuglevel > 0:
print>>stderr, 'send:', repr(str)
if hasattr(self, 'sock') and self.sock:
try:
self.sock.sendall(str)
except socket.error:
self.close()
raise SMTPServerDisconnected('Server not connected')
else:
raise SMTPServerDisconnected('please run connect() first')
def putcmd(self, cmd, args=""):
"""Send a command to the server."""
if args == "":
str = '%s%s' % (cmd, CRLF)
else:
str = '%s %s%s' % (cmd, args, CRLF)
self.send(str)
def getreply(self):
"""Get a reply from the server.
Returns a tuple consisting of:
- server response code (e.g. '250', or such, if all goes well)
Note: returns -1 if it can't read response code.
- server response string corresponding to response code (multiline
responses are converted to a single, multiline string).
Raises SMTPServerDisconnected if end-of-file is reached.
"""
resp = []
if self.file is None:
self.file = self.sock.makefile('rb')
while 1:
try:
line = self.file.readline()
except socket.error:
line = ''
if line == '':
self.close()
raise SMTPServerDisconnected("Connection unexpectedly closed")
if self.debuglevel > 0:
print>>stderr, 'reply:', repr(line)
resp.append(line[4:].strip())
code = line[:3]
# Check that the error code is syntactically correct.
# Don't attempt to read a continuation line if it is broken.
try:
errcode = int(code)
except ValueError:
errcode = -1
break
# Check if multiline response.
if line[3:4] != "-":
break
errmsg = "\n".join(resp)
if self.debuglevel > 0:
print>>stderr, 'reply: retcode (%s); Msg: %s' % (errcode, errmsg)
return errcode, errmsg
def docmd(self, cmd, args=""):
"""Send a command, and return its response code."""
self.putcmd(cmd, args)
return self.getreply()
# std smtp commands
def helo(self, name=''):
"""SMTP 'helo' command.
Hostname to send for this command defaults to the FQDN of the local
host.
"""
self.putcmd("helo", name or self.local_hostname)
(code, msg) = self.getreply()
self.helo_resp = msg
return (code, msg)
def ehlo(self, name=''):
""" SMTP 'ehlo' command.
Hostname to send for this command defaults to the FQDN of the local
host.
"""
self.esmtp_features = {}
self.putcmd(self.ehlo_msg, name or self.local_hostname)
(code, msg) = self.getreply()
# According to RFC1869 some (badly written)
# MTA's will disconnect on an ehlo. Toss an exception if
# that happens -ddm
if code == -1 and len(msg) == 0:
self.close()
raise SMTPServerDisconnected("Server not connected")
self.ehlo_resp = msg
if code != 250:
return (code, msg)
self.does_esmtp = 1
#parse the ehlo response -ddm
resp = self.ehlo_resp.split('\n')
del resp[0]
for each in resp:
# To be able to communicate with as many SMTP servers as possible,
# we have to take the old-style auth advertisement into account,
# because:
# 1) Else our SMTP feature parser gets confused.
# 2) There are some servers that only advertise the auth methods we
# support using the old style.
auth_match = OLDSTYLE_AUTH.match(each)
if auth_match:
# This doesn't remove duplicates, but that's no problem
self.esmtp_features["auth"] = self.esmtp_features.get("auth", "") \
+ " " + auth_match.groups(0)[0]
continue
# RFC 1869 requires a space between ehlo keyword and parameters.
# It's actually stricter, in that only spaces are allowed between
# parameters, but were not going to check for that here. Note
# that the space isn't present if there are no parameters.
m = re.match(r'(?P<feature>[A-Za-z0-9][A-Za-z0-9\-]*) ?', each)
if m:
feature = m.group("feature").lower()
params = m.string[m.end("feature"):].strip()
if feature == "auth":
self.esmtp_features[feature] = self.esmtp_features.get(feature, "") \
+ " " + params
else:
self.esmtp_features[feature] = params
return (code, msg)
def has_extn(self, opt):
"""Does the server support a given SMTP service extension?"""
return opt.lower() in self.esmtp_features
def help(self, args=''):
"""SMTP 'help' command.
Returns help text from server."""
self.putcmd("help", args)
return self.getreply()[1]
def rset(self):
"""SMTP 'rset' command -- resets session."""
return self.docmd("rset")
def noop(self):
"""SMTP 'noop' command -- doesn't do anything :>"""
return self.docmd("noop")
def mail(self, sender, options=[]):
"""SMTP 'mail' command -- begins mail xfer session."""
optionlist = ''
if options and self.does_esmtp:
optionlist = ' ' + ' '.join(options)
self.putcmd("mail", "FROM:%s%s" % (quoteaddr(sender), optionlist))
return self.getreply()
def rcpt(self, recip, options=[]):
"""SMTP 'rcpt' command -- indicates 1 recipient for this mail."""
optionlist = ''
if options and self.does_esmtp:
optionlist = ' ' + ' '.join(options)
self.putcmd("rcpt", "TO:%s%s" % (quoteaddr(recip), optionlist))
return self.getreply()
def data(self, msg):
"""SMTP 'DATA' command -- sends message data to server.
Automatically quotes lines beginning with a period per rfc821.
Raises SMTPDataError if there is an unexpected reply to the
DATA command; the return value from this method is the final
response code received when the all data is sent.
"""
self.putcmd("data")
(code, repl) = self.getreply()
if self.debuglevel > 0:
print>>stderr, "data:", (code, repl)
if code != 354:
raise SMTPDataError(code, repl)
else:
q = quotedata(msg)
if q[-2:] != CRLF:
q = q + CRLF
q = q + "." + CRLF
self.send(q)
(code, msg) = self.getreply()
if self.debuglevel > 0:
print>>stderr, "data:", (code, msg)
return (code, msg)
def verify(self, address):
"""SMTP 'verify' command -- checks for address validity."""
self.putcmd("vrfy", quoteaddr(address))
return self.getreply()
# a.k.a.
vrfy = verify
def expn(self, address):
"""SMTP 'expn' command -- expands a mailing list."""
self.putcmd("expn", quoteaddr(address))
return self.getreply()
# some useful methods
def ehlo_or_helo_if_needed(self):
"""Call self.ehlo() and/or self.helo() if needed.
If there has been no previous EHLO or HELO command this session, this
method tries ESMTP EHLO first.
This method may raise the following exceptions:
SMTPHeloError The server didn't reply properly to
the helo greeting.
"""
if self.helo_resp is None and self.ehlo_resp is None:
if not (200 <= self.ehlo()[0] <= 299):
(code, resp) = self.helo()
if not (200 <= code <= 299):
raise SMTPHeloError(code, resp)
def login(self, user, password):
"""Log in on an SMTP server that requires authentication.
The arguments are:
- user: The user name to authenticate with.
- password: The password for the authentication.
If there has been no previous EHLO or HELO command this session, this
method tries ESMTP EHLO first.
This method will return normally if the authentication was successful.
This method may raise the following exceptions:
SMTPHeloError The server didn't reply properly to
the helo greeting.
SMTPAuthenticationError The server didn't accept the username/
password combination.
SMTPException No suitable authentication method was
found.
"""
def encode_cram_md5(challenge, user, password):
challenge = base64.decodestring(challenge)
response = user + " " + hmac.HMAC(password, challenge).hexdigest()
return encode_base64(response, eol="")
def encode_plain(user, password):
return encode_base64("\0%s\0%s" % (user, password), eol="")
AUTH_PLAIN = "PLAIN"
AUTH_CRAM_MD5 = "CRAM-MD5"
AUTH_LOGIN = "LOGIN"
self.ehlo_or_helo_if_needed()
if not self.has_extn("auth"):
raise SMTPException("SMTP AUTH extension not supported by server.")
# Authentication methods the server supports:
authlist = self.esmtp_features["auth"].split()
# List of authentication methods we support: from preferred to
# less preferred methods. Except for the purpose of testing the weaker
# ones, we prefer stronger methods like CRAM-MD5:
preferred_auths = [AUTH_CRAM_MD5, AUTH_PLAIN, AUTH_LOGIN]
# Determine the authentication method we'll use
authmethod = None
for method in preferred_auths:
if method in authlist:
authmethod = method
break
if authmethod == AUTH_CRAM_MD5:
(code, resp) = self.docmd("AUTH", AUTH_CRAM_MD5)
if code == 503:
# 503 == 'Error: already authenticated'
return (code, resp)
(code, resp) = self.docmd(encode_cram_md5(resp, user, password))
elif authmethod == AUTH_PLAIN:
(code, resp) = self.docmd("AUTH",
AUTH_PLAIN + " " + encode_plain(user, password))
elif authmethod == AUTH_LOGIN:
(code, resp) = self.docmd("AUTH",
"%s %s" % (AUTH_LOGIN, encode_base64(user, eol="")))
if code != 334:
raise SMTPAuthenticationError(code, resp)
(code, resp) = self.docmd(encode_base64(password, eol=""))
elif authmethod is None:
raise SMTPException("No suitable authentication method found.")
if code not in (235, 503):
# 235 == 'Authentication successful'
# 503 == 'Error: already authenticated'
raise SMTPAuthenticationError(code, resp)
return (code, resp)
def starttls(self, keyfile=None, certfile=None):
"""Puts the connection to the SMTP server into TLS mode.
If there has been no previous EHLO or HELO command this session, this
method tries ESMTP EHLO first.
If the server supports TLS, this will encrypt the rest of the SMTP
session. If you provide the keyfile and certfile parameters,
the identity of the SMTP server and client can be checked. This,
however, depends on whether the socket module really checks the
certificates.
This method may raise the following exceptions:
SMTPHeloError The server didn't reply properly to
the helo greeting.
"""
self.ehlo_or_helo_if_needed()
if not self.has_extn("starttls"):
raise SMTPException("STARTTLS extension not supported by server.")
(resp, reply) = self.docmd("STARTTLS")
if resp == 220:
if not _have_ssl:
raise RuntimeError("No SSL support included in this Python")
self.sock = ssl.wrap_socket(self.sock, keyfile, certfile)
self.file = SSLFakeFile(self.sock)
# RFC 3207:
# The client MUST discard any knowledge obtained from
# the server, such as the list of SMTP service extensions,
# which was not obtained from the TLS negotiation itself.
self.helo_resp = None
self.ehlo_resp = None
self.esmtp_features = {}
self.does_esmtp = 0
return (resp, reply)
def sendmail(self, from_addr, to_addrs, msg, mail_options=[],
rcpt_options=[]):
"""This command performs an entire mail transaction.
The arguments are:
- from_addr : The address sending this mail.
- to_addrs : A list of addresses to send this mail to. A bare
string will be treated as a list with 1 address.
- msg : The message to send.
- mail_options : List of ESMTP options (such as 8bitmime) for the
mail command.
- rcpt_options : List of ESMTP options (such as DSN commands) for
all the rcpt commands.
If there has been no previous EHLO or HELO command this session, this
method tries ESMTP EHLO first. If the server does ESMTP, message size
and each of the specified options will be passed to it. If EHLO
fails, HELO will be tried and ESMTP options suppressed.
This method will return normally if the mail is accepted for at least
one recipient. It returns a dictionary, with one entry for each
recipient that was refused. Each entry contains a tuple of the SMTP
error code and the accompanying error message sent by the server.
This method may raise the following exceptions:
SMTPHeloError The server didn't reply properly to
the helo greeting.
SMTPRecipientsRefused The server rejected ALL recipients
(no mail was sent).
SMTPSenderRefused The server didn't accept the from_addr.
SMTPDataError The server replied with an unexpected
error code (other than a refusal of
a recipient).
Note: the connection will be open even after an exception is raised.
Example:
>>> import smtplib
>>> s=smtplib.SMTP("localhost")
>>> tolist=["one@one.org","two@two.org","three@three.org","four@four.org"]
>>> msg = '''\\
... From: Me@my.org
... Subject: testin'...
...
... This is a test '''
>>> s.sendmail("me@my.org",tolist,msg)
{ "three@three.org" : ( 550 ,"User unknown" ) }
>>> s.quit()
In the above example, the message was accepted for delivery to three
of the four addresses, and one was rejected, with the error code
550. If all addresses are accepted, then the method will return an
empty dictionary.
"""
self.ehlo_or_helo_if_needed()
esmtp_opts = []
if self.does_esmtp:
# Hmmm? what's this? -ddm
# self.esmtp_features['7bit']=""
if self.has_extn('size'):
esmtp_opts.append("size=%d" % len(msg))
for option in mail_options:
esmtp_opts.append(option)
(code, resp) = self.mail(from_addr, esmtp_opts)
if code != 250:
self.rset()
raise SMTPSenderRefused(code, resp, from_addr)
senderrs = {}
if isinstance(to_addrs, basestring):
to_addrs = [to_addrs]
for each in to_addrs:
(code, resp) = self.rcpt(each, rcpt_options)
if (code != 250) and (code != 251):
senderrs[each] = (code, resp)
if len(senderrs) == len(to_addrs):
# the server refused all our recipients
self.rset()
raise SMTPRecipientsRefused(senderrs)
(code, resp) = self.data(msg)
if code != 250:
self.rset()
raise SMTPDataError(code, resp)
#if we got here then somebody got our mail
return senderrs
def close(self):
"""Close the connection to the SMTP server."""
if self.file:
self.file.close()
self.file = None
if self.sock:
self.sock.close()
self.sock = None
def quit(self):
"""Terminate the SMTP session."""
res = self.docmd("quit")
self.close()
return res
if _have_ssl:
class SMTP_SSL(SMTP):
""" This is a subclass derived from SMTP that connects over an SSL encrypted
socket (to use this class you need a socket module that was compiled with SSL
support). If host is not specified, '' (the local host) is used. If port is
omitted, the standard SMTP-over-SSL port (465) is used. keyfile and certfile
are also optional - they can contain a PEM formatted private key and
certificate chain file for the SSL connection.
"""
default_port = SMTP_SSL_PORT
def __init__(self, host='', port=0, local_hostname=None,
keyfile=None, certfile=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.keyfile = keyfile
self.certfile = certfile
SMTP.__init__(self, host, port, local_hostname, timeout)
def _get_socket(self, host, port, timeout):
if self.debuglevel > 0:
print>>stderr, 'connect:', (host, port)
new_socket = socket.create_connection((host, port), timeout)
new_socket = ssl.wrap_socket(new_socket, self.keyfile, self.certfile)
self.file = SSLFakeFile(new_socket)
return new_socket
__all__.append("SMTP_SSL")
#
# LMTP extension
#
LMTP_PORT = 2003
class LMTP(SMTP):
"""LMTP - Local Mail Transfer Protocol
The LMTP protocol, which is very similar to ESMTP, is heavily based
on the standard SMTP client. It's common to use Unix sockets for LMTP,
so our connect() method must support that as well as a regular
host:port server. To specify a Unix socket, you must use an absolute
path as the host, starting with a '/'.
Authentication is supported, using the regular SMTP mechanism. When
using a Unix socket, LMTP generally don't support or require any
authentication, but your mileage might vary."""
ehlo_msg = "lhlo"
def __init__(self, host='', port=LMTP_PORT, local_hostname=None):
"""Initialize a new instance."""
SMTP.__init__(self, host, port, local_hostname)
def connect(self, host='localhost', port=0):
"""Connect to the LMTP daemon, on either a Unix or a TCP socket."""
if host[0] != '/':
return SMTP.connect(self, host, port)
# Handle Unix-domain sockets.
try:
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.connect(host)
except socket.error, msg:
if self.debuglevel > 0:
print>>stderr, 'connect fail:', host
if self.sock:
self.sock.close()
self.sock = None
raise socket.error, msg
(code, msg) = self.getreply()
if self.debuglevel > 0:
print>>stderr, "connect:", msg
return (code, msg)
# Test the sendmail method, which tests most of the others.
# Note: This always sends to localhost.
if __name__ == '__main__':
import sys
def prompt(prompt):
sys.stdout.write(prompt + ": ")
return sys.stdin.readline().strip()
fromaddr = prompt("From")
toaddrs = prompt("To").split(',')
print "Enter message, end with ^D:"
msg = ''
while 1:
line = sys.stdin.readline()
if not line:
break
msg = msg + line
print "Message length is %d" % len(msg)
server = SMTP('localhost')
server.set_debuglevel(1)
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
|
jamespcole/home-assistant | refs/heads/master | tests/components/mqtt/test_vacuum.py | 4 | """The tests for the Mqtt vacuum platform."""
import json
import pytest
from homeassistant.components import mqtt, vacuum
from homeassistant.components.mqtt import (
CONF_COMMAND_TOPIC, vacuum as mqttvacuum)
from homeassistant.components.mqtt.discovery import async_start
from homeassistant.components.vacuum import (
ATTR_BATTERY_ICON, ATTR_BATTERY_LEVEL, ATTR_FAN_SPEED, ATTR_STATUS)
from homeassistant.const import (
CONF_NAME, CONF_PLATFORM, STATE_OFF, STATE_ON, STATE_UNAVAILABLE)
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry, async_fire_mqtt_message, async_mock_mqtt_component)
from tests.components.vacuum import common
default_config = {
CONF_PLATFORM: 'mqtt',
CONF_NAME: 'mqtttest',
CONF_COMMAND_TOPIC: 'vacuum/command',
mqttvacuum.CONF_SEND_COMMAND_TOPIC: 'vacuum/send_command',
mqttvacuum.CONF_BATTERY_LEVEL_TOPIC: 'vacuum/state',
mqttvacuum.CONF_BATTERY_LEVEL_TEMPLATE:
'{{ value_json.battery_level }}',
mqttvacuum.CONF_CHARGING_TOPIC: 'vacuum/state',
mqttvacuum.CONF_CHARGING_TEMPLATE: '{{ value_json.charging }}',
mqttvacuum.CONF_CLEANING_TOPIC: 'vacuum/state',
mqttvacuum.CONF_CLEANING_TEMPLATE: '{{ value_json.cleaning }}',
mqttvacuum.CONF_DOCKED_TOPIC: 'vacuum/state',
mqttvacuum.CONF_DOCKED_TEMPLATE: '{{ value_json.docked }}',
mqttvacuum.CONF_STATE_TOPIC: 'vacuum/state',
mqttvacuum.CONF_STATE_TEMPLATE: '{{ value_json.state }}',
mqttvacuum.CONF_FAN_SPEED_TOPIC: 'vacuum/state',
mqttvacuum.CONF_FAN_SPEED_TEMPLATE: '{{ value_json.fan_speed }}',
mqttvacuum.CONF_SET_FAN_SPEED_TOPIC: 'vacuum/set_fan_speed',
mqttvacuum.CONF_FAN_SPEED_LIST: ['min', 'medium', 'high', 'max'],
}
@pytest.fixture
def mock_publish(hass):
"""Initialize components."""
yield hass.loop.run_until_complete(async_mock_mqtt_component(hass))
async def test_default_supported_features(hass, mock_publish):
"""Test that the correct supported features."""
assert await async_setup_component(hass, vacuum.DOMAIN, {
vacuum.DOMAIN: default_config,
})
entity = hass.states.get('vacuum.mqtttest')
entity_features = \
entity.attributes.get(mqttvacuum.CONF_SUPPORTED_FEATURES, 0)
assert sorted(mqttvacuum.services_to_strings(entity_features)) == \
sorted(['turn_on', 'turn_off', 'stop',
'return_home', 'battery', 'status',
'clean_spot'])
async def test_all_commands(hass, mock_publish):
"""Test simple commands to the vacuum."""
default_config[mqttvacuum.CONF_SUPPORTED_FEATURES] = \
mqttvacuum.services_to_strings(mqttvacuum.ALL_SERVICES)
assert await async_setup_component(hass, vacuum.DOMAIN, {
vacuum.DOMAIN: default_config,
})
common.turn_on(hass, 'vacuum.mqtttest')
await hass.async_block_till_done()
await hass.async_block_till_done()
mock_publish.async_publish.assert_called_once_with(
'vacuum/command', 'turn_on', 0, False)
mock_publish.async_publish.reset_mock()
common.turn_off(hass, 'vacuum.mqtttest')
await hass.async_block_till_done()
await hass.async_block_till_done()
mock_publish.async_publish.assert_called_once_with(
'vacuum/command', 'turn_off', 0, False)
mock_publish.async_publish.reset_mock()
common.stop(hass, 'vacuum.mqtttest')
await hass.async_block_till_done()
await hass.async_block_till_done()
mock_publish.async_publish.assert_called_once_with(
'vacuum/command', 'stop', 0, False)
mock_publish.async_publish.reset_mock()
common.clean_spot(hass, 'vacuum.mqtttest')
await hass.async_block_till_done()
await hass.async_block_till_done()
mock_publish.async_publish.assert_called_once_with(
'vacuum/command', 'clean_spot', 0, False)
mock_publish.async_publish.reset_mock()
common.locate(hass, 'vacuum.mqtttest')
await hass.async_block_till_done()
await hass.async_block_till_done()
mock_publish.async_publish.assert_called_once_with(
'vacuum/command', 'locate', 0, False)
mock_publish.async_publish.reset_mock()
common.start_pause(hass, 'vacuum.mqtttest')
await hass.async_block_till_done()
await hass.async_block_till_done()
mock_publish.async_publish.assert_called_once_with(
'vacuum/command', 'start_pause', 0, False)
mock_publish.async_publish.reset_mock()
common.return_to_base(hass, 'vacuum.mqtttest')
await hass.async_block_till_done()
await hass.async_block_till_done()
mock_publish.async_publish.assert_called_once_with(
'vacuum/command', 'return_to_base', 0, False)
mock_publish.async_publish.reset_mock()
common.set_fan_speed(hass, 'high', 'vacuum.mqtttest')
await hass.async_block_till_done()
await hass.async_block_till_done()
mock_publish.async_publish.assert_called_once_with(
'vacuum/set_fan_speed', 'high', 0, False)
mock_publish.async_publish.reset_mock()
common.send_command(hass, '44 FE 93', entity_id='vacuum.mqtttest')
await hass.async_block_till_done()
await hass.async_block_till_done()
mock_publish.async_publish.assert_called_once_with(
'vacuum/send_command', '44 FE 93', 0, False)
async def test_status(hass, mock_publish):
"""Test status updates from the vacuum."""
default_config[mqttvacuum.CONF_SUPPORTED_FEATURES] = \
mqttvacuum.services_to_strings(mqttvacuum.ALL_SERVICES)
assert await async_setup_component(hass, vacuum.DOMAIN, {
vacuum.DOMAIN: default_config,
})
message = """{
"battery_level": 54,
"cleaning": true,
"docked": false,
"charging": false,
"fan_speed": "max"
}"""
async_fire_mqtt_message(hass, 'vacuum/state', message)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.mqtttest')
assert STATE_ON == state.state
assert 'mdi:battery-50' == \
state.attributes.get(ATTR_BATTERY_ICON)
assert 54 == state.attributes.get(ATTR_BATTERY_LEVEL)
assert 'max' == state.attributes.get(ATTR_FAN_SPEED)
message = """{
"battery_level": 61,
"docked": true,
"cleaning": false,
"charging": true,
"fan_speed": "min"
}"""
async_fire_mqtt_message(hass, 'vacuum/state', message)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.mqtttest')
assert STATE_OFF == state.state
assert 'mdi:battery-charging-60' == \
state.attributes.get(ATTR_BATTERY_ICON)
assert 61 == state.attributes.get(ATTR_BATTERY_LEVEL)
assert 'min' == state.attributes.get(ATTR_FAN_SPEED)
async def test_battery_template(hass, mock_publish):
"""Test that you can use non-default templates for battery_level."""
default_config.update({
mqttvacuum.CONF_SUPPORTED_FEATURES:
mqttvacuum.services_to_strings(mqttvacuum.ALL_SERVICES),
mqttvacuum.CONF_BATTERY_LEVEL_TOPIC: "retroroomba/battery_level",
mqttvacuum.CONF_BATTERY_LEVEL_TEMPLATE: "{{ value }}"
})
assert await async_setup_component(hass, vacuum.DOMAIN, {
vacuum.DOMAIN: default_config,
})
async_fire_mqtt_message(hass, 'retroroomba/battery_level', '54')
await hass.async_block_till_done()
state = hass.states.get('vacuum.mqtttest')
assert 54 == state.attributes.get(ATTR_BATTERY_LEVEL)
assert state.attributes.get(ATTR_BATTERY_ICON) == \
'mdi:battery-50'
async def test_status_invalid_json(hass, mock_publish):
"""Test to make sure nothing breaks if the vacuum sends bad JSON."""
default_config[mqttvacuum.CONF_SUPPORTED_FEATURES] = \
mqttvacuum.services_to_strings(mqttvacuum.ALL_SERVICES)
assert await async_setup_component(hass, vacuum.DOMAIN, {
vacuum.DOMAIN: default_config,
})
async_fire_mqtt_message(hass, 'vacuum/state', '{"asdfasas false}')
await hass.async_block_till_done()
state = hass.states.get('vacuum.mqtttest')
assert STATE_OFF == state.state
assert "Stopped" == state.attributes.get(ATTR_STATUS)
async def test_default_availability_payload(hass, mock_publish):
"""Test availability by default payload with defined topic."""
default_config.update({
'availability_topic': 'availability-topic'
})
assert await async_setup_component(hass, vacuum.DOMAIN, {
vacuum.DOMAIN: default_config,
})
state = hass.states.get('vacuum.mqtttest')
assert STATE_UNAVAILABLE == state.state
async_fire_mqtt_message(hass, 'availability-topic', 'online')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.mqtttest')
assert STATE_UNAVAILABLE != state.state
async_fire_mqtt_message(hass, 'availability-topic', 'offline')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.mqtttest')
assert STATE_UNAVAILABLE == state.state
async def test_custom_availability_payload(hass, mock_publish):
"""Test availability by custom payload with defined topic."""
default_config.update({
'availability_topic': 'availability-topic',
'payload_available': 'good',
'payload_not_available': 'nogood'
})
assert await async_setup_component(hass, vacuum.DOMAIN, {
vacuum.DOMAIN: default_config,
})
state = hass.states.get('vacuum.mqtttest')
assert STATE_UNAVAILABLE == state.state
async_fire_mqtt_message(hass, 'availability-topic', 'good')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.mqtttest')
assert STATE_UNAVAILABLE != state.state
async_fire_mqtt_message(hass, 'availability-topic', 'nogood')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.mqtttest')
assert STATE_UNAVAILABLE == state.state
async def test_discovery_removal_vacuum(hass, mock_publish):
"""Test removal of discovered vacuum."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, 'homeassistant', {}, entry)
data = (
'{ "name": "Beer",'
' "command_topic": "test_topic" }'
)
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
data)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.beer')
assert state is not None
assert state.name == 'Beer'
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config', '')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.beer')
assert state is None
async def test_discovery_broken(hass, mqtt_mock, caplog):
"""Test handling of bad discovery message."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, 'homeassistant', {}, entry)
data1 = (
'{ "name": "Beer",'
' "command_topic": "test_topic#" }'
)
data2 = (
'{ "name": "Milk",'
' "command_topic": "test_topic" }'
)
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
data1)
await hass.async_block_till_done()
state = hass.states.get('vacuum.beer')
assert state is None
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
data2)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.milk')
assert state is not None
assert state.name == 'Milk'
state = hass.states.get('vacuum.beer')
assert state is None
async def test_discovery_update_vacuum(hass, mock_publish):
"""Test update of discovered vacuum."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, 'homeassistant', {}, entry)
data1 = (
'{ "name": "Beer",'
' "command_topic": "test_topic" }'
)
data2 = (
'{ "name": "Milk",'
' "command_topic": "test_topic" }'
)
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
data1)
await hass.async_block_till_done()
state = hass.states.get('vacuum.beer')
assert state is not None
assert state.name == 'Beer'
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
data2)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.beer')
assert state is not None
assert state.name == 'Milk'
state = hass.states.get('vacuum.milk')
assert state is None
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
assert await async_setup_component(hass, vacuum.DOMAIN, {
vacuum.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test-topic',
'json_attributes_topic': 'attr-topic'
}
})
async_fire_mqtt_message(hass, 'attr-topic', '{ "val": "100" }')
await hass.async_block_till_done()
state = hass.states.get('vacuum.test')
assert '100' == state.attributes.get('val')
async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(hass, vacuum.DOMAIN, {
vacuum.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test-topic',
'json_attributes_topic': 'attr-topic'
}
})
async_fire_mqtt_message(hass, 'attr-topic', '[ "list", "of", "things"]')
await hass.async_block_till_done()
state = hass.states.get('vacuum.test')
assert state.attributes.get('val') is None
assert 'JSON result was not a dictionary' in caplog.text
async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(hass, vacuum.DOMAIN, {
vacuum.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test-topic',
'json_attributes_topic': 'attr-topic'
}
})
async_fire_mqtt_message(hass, 'attr-topic', 'This is not JSON')
await hass.async_block_till_done()
state = hass.states.get('vacuum.test')
assert state.attributes.get('val') is None
assert 'Erroneous JSON: This is not JSON' in caplog.text
async def test_discovery_update_attr(hass, mqtt_mock, caplog):
"""Test update of discovered MQTTAttributes."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, 'homeassistant', {}, entry)
data1 = (
'{ "name": "Beer",'
' "command_topic": "test_topic",'
' "json_attributes_topic": "attr-topic1" }'
)
data2 = (
'{ "name": "Beer",'
' "command_topic": "test_topic",'
' "json_attributes_topic": "attr-topic2" }'
)
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
data1)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, 'attr-topic1', '{ "val": "100" }')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.beer')
assert '100' == state.attributes.get('val')
# Change json_attributes_topic
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
data2)
await hass.async_block_till_done()
await hass.async_block_till_done()
# Verify we are no longer subscribing to the old topic
async_fire_mqtt_message(hass, 'attr-topic1', '{ "val": "50" }')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.beer')
assert '100' == state.attributes.get('val')
# Verify we are subscribing to the new topic
async_fire_mqtt_message(hass, 'attr-topic2', '{ "val": "75" }')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('vacuum.beer')
assert '75' == state.attributes.get('val')
async def test_unique_id(hass, mock_publish):
"""Test unique id option only creates one vacuum per unique_id."""
await async_mock_mqtt_component(hass)
assert await async_setup_component(hass, vacuum.DOMAIN, {
vacuum.DOMAIN: [{
'platform': 'mqtt',
'name': 'Test 1',
'command_topic': 'command-topic',
'unique_id': 'TOTALLY_UNIQUE'
}, {
'platform': 'mqtt',
'name': 'Test 2',
'command_topic': 'command-topic',
'unique_id': 'TOTALLY_UNIQUE'
}]
})
async_fire_mqtt_message(hass, 'test-topic', 'payload')
await hass.async_block_till_done()
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids()) == 2
# all vacuums group is 1, unique id created is 1
async def test_entity_device_info_with_identifier(hass, mock_publish):
"""Test MQTT vacuum device registry integration."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
entry.add_to_hass(hass)
await async_start(hass, 'homeassistant', {}, entry)
registry = await hass.helpers.device_registry.async_get_registry()
data = json.dumps({
'platform': 'mqtt',
'name': 'Test 1',
'command_topic': 'test-command-topic',
'device': {
'identifiers': ['helloworld'],
'connections': [
["mac", "02:5b:26:a8:dc:12"],
],
'manufacturer': 'Whatever',
'name': 'Beer',
'model': 'Glass',
'sw_version': '0.1-beta',
},
'unique_id': 'veryunique'
})
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
data)
await hass.async_block_till_done()
await hass.async_block_till_done()
device = registry.async_get_device({('mqtt', 'helloworld')}, set())
assert device is not None
assert device.identifiers == {('mqtt', 'helloworld')}
assert device.connections == {('mac', "02:5b:26:a8:dc:12")}
assert device.manufacturer == 'Whatever'
assert device.name == 'Beer'
assert device.model == 'Glass'
assert device.sw_version == '0.1-beta'
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
entry.add_to_hass(hass)
await async_start(hass, 'homeassistant', {}, entry)
registry = await hass.helpers.device_registry.async_get_registry()
config = {
'platform': 'mqtt',
'name': 'Test 1',
'state_topic': 'test-topic',
'command_topic': 'test-command-topic',
'device': {
'identifiers': ['helloworld'],
'connections': [
["mac", "02:5b:26:a8:dc:12"],
],
'manufacturer': 'Whatever',
'name': 'Beer',
'model': 'Glass',
'sw_version': '0.1-beta',
},
'unique_id': 'veryunique'
}
data = json.dumps(config)
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
data)
await hass.async_block_till_done()
await hass.async_block_till_done()
device = registry.async_get_device({('mqtt', 'helloworld')}, set())
assert device is not None
assert device.name == 'Beer'
config['device']['name'] = 'Milk'
data = json.dumps(config)
async_fire_mqtt_message(hass, 'homeassistant/vacuum/bla/config',
data)
await hass.async_block_till_done()
await hass.async_block_till_done()
device = registry.async_get_device({('mqtt', 'helloworld')}, set())
assert device is not None
assert device.name == 'Milk'
|
mricharleon/UnitedSociety | refs/heads/master | apps/society/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
socialsweethearts/django-allauth | refs/heads/master | allauth/socialaccount/providers/hubic/urls.py | 69 | from allauth.socialaccount.providers.oauth2.urls import default_urlpatterns
from .provider import HubicProvider
urlpatterns = default_urlpatterns(HubicProvider)
|
pmghalvorsen/gramps_branch | refs/heads/master | gramps/gui/filters/sidebar/_mediasidebarfilter.py | 1 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
# Copyright (C) 2010 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Python modules
#
#-------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# gtk
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from ... import widgets
from .. import build_filter_model
from . import SidebarFilter
from gramps.gen.constfunc import cuni
from gramps.gen.filters import GenericFilterFactory, rules
from gramps.gen.filters.rules.media import (RegExpIdOf, HasMedia, HasTag,
HasNoteRegexp, MatchesFilter)
GenericMediaFilter = GenericFilterFactory('Media')
#-------------------------------------------------------------------------
#
# MediaSidebarFilter class
#
#-------------------------------------------------------------------------
class MediaSidebarFilter(SidebarFilter):
def __init__(self, dbstate, uistate, clicked):
self.clicked_func = clicked
self.filter_id = widgets.BasicEntry()
self.filter_title = widgets.BasicEntry()
self.filter_type = widgets.BasicEntry()
self.filter_path = widgets.BasicEntry()
self.filter_date = widgets.DateEntry(uistate, [])
self.filter_note = widgets.BasicEntry()
self.filter_regex = Gtk.CheckButton(label=_('Use regular expressions'))
self.tag = Gtk.ComboBox()
self.generic = Gtk.ComboBox()
SidebarFilter.__init__(self, dbstate, uistate, "Media")
def create_widget(self):
cell = Gtk.CellRendererText()
cell.set_property('width', self._FILTER_WIDTH)
cell.set_property('ellipsize', self._FILTER_ELLIPSIZE)
self.generic.pack_start(cell, True)
self.generic.add_attribute(cell, 'text', 0)
self.on_filters_changed('Media')
cell = Gtk.CellRendererText()
cell.set_property('width', self._FILTER_WIDTH)
cell.set_property('ellipsize', self._FILTER_ELLIPSIZE)
self.tag.pack_start(cell, True)
self.tag.add_attribute(cell, 'text', 0)
self.add_text_entry(_('ID'), self.filter_id)
self.add_text_entry(_('Title'), self.filter_title)
self.add_text_entry(_('Type'), self.filter_type)
self.add_text_entry(_('Path'), self.filter_path)
self.add_text_entry(_('Date'), self.filter_date)
self.add_text_entry(_('Note'), self.filter_note)
self.add_entry(_('Tag'), self.tag)
self.add_filter_entry(_('Custom filter'), self.generic)
self.add_regex_entry(self.filter_regex)
def clear(self, obj):
self.filter_id.set_text('')
self.filter_title.set_text('')
self.filter_type.set_text('')
self.filter_path.set_text('')
self.filter_date.set_text('')
self.filter_note.set_text('')
self.tag.set_active(0)
self.generic.set_active(0)
def get_filter(self):
gid = cuni(self.filter_id.get_text()).strip()
title = cuni(self.filter_title.get_text()).strip()
mime = cuni(self.filter_type.get_text()).strip()
path = cuni(self.filter_path.get_text()).strip()
date = cuni(self.filter_date.get_text()).strip()
note = cuni(self.filter_note.get_text()).strip()
regex = self.filter_regex.get_active()
tag = self.tag.get_active() > 0
gen = self.generic.get_active() > 0
empty = not (gid or title or mime or path or date
or note or regex or tag or gen)
if empty:
generic_filter = None
else:
generic_filter = GenericMediaFilter()
if gid:
rule = RegExpIdOf([gid], use_regex=regex)
generic_filter.add_rule(rule)
rule = HasMedia([title, mime, path, date], use_regex=regex)
generic_filter.add_rule(rule)
if note:
rule = HasNoteRegexp([note], use_regex=regex)
generic_filter.add_rule(rule)
# check the Tag
if tag:
model = self.tag.get_model()
node = self.tag.get_active_iter()
attr = model.get_value(node, 0)
rule = HasTag([attr])
generic_filter.add_rule(rule)
if self.generic.get_active() != 0:
model = self.generic.get_model()
node = self.generic.get_active_iter()
obj = cuni(model.get_value(node, 0))
rule = MatchesFilter([obj])
generic_filter.add_rule(rule)
return generic_filter
def on_filters_changed(self, name_space):
if name_space == 'Media':
all_filter = GenericMediaFilter()
all_filter.set_name(_("None"))
all_filter.add_rule(rules.media.AllMedia([]))
self.generic.set_model(build_filter_model('Media', [all_filter]))
self.generic.set_active(0)
def on_tags_changed(self, tag_list):
"""
Update the list of tags in the tag filter.
"""
model = Gtk.ListStore(str)
model.append(('',))
for tag_name in tag_list:
model.append((tag_name,))
self.tag.set_model(model)
self.tag.set_active(0)
|
girving/tensorflow | refs/heads/master | tensorflow/python/debug/cli/curses_widgets.py | 156 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Widgets for Curses-based CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.debug.cli import debugger_cli_common
RL = debugger_cli_common.RichLine
class NavigationHistoryItem(object):
"""Individual item in navigation history."""
def __init__(self, command, screen_output, scroll_position):
"""Constructor of NavigationHistoryItem.
Args:
command: (`str`) the command line text.
screen_output: the screen output of the command.
scroll_position: (`int`) scroll position in the screen output.
"""
self.command = command
self.screen_output = screen_output
self.scroll_position = scroll_position
class CursesNavigationHistory(object):
"""Navigation history containing commands, outputs and scroll info."""
BACK_ARROW_TEXT = "<--"
FORWARD_ARROW_TEXT = "-->"
def __init__(self, capacity):
"""Constructor of CursesNavigationHistory.
Args:
capacity: (`int`) How many items this object can hold. Each item consists
of a command stirng, an output RichTextLines object and a scroll
position.
Raises:
ValueError: If capacity is not a positive number.
"""
if capacity <= 0:
raise ValueError("In valid capacity value: %d" % capacity)
self._capacity = capacity
self._items = []
self._pointer = -1
def add_item(self, command, screen_output, scroll_position):
"""Add an item to the navigation histoyr.
Args:
command: command line text.
screen_output: screen output produced for the command.
scroll_position: (`int`) scroll position in the screen output.
"""
if self._pointer + 1 < len(self._items):
self._items = self._items[:self._pointer + 1]
self._items.append(
NavigationHistoryItem(command, screen_output, scroll_position))
if len(self._items) > self._capacity:
self._items = self._items[-self._capacity:]
self._pointer = len(self._items) - 1
def update_scroll_position(self, new_scroll_position):
"""Update the scroll position of the currently-pointed-to history item.
Args:
new_scroll_position: (`int`) new scroll-position value.
Raises:
ValueError: If the history is empty.
"""
if not self._items:
raise ValueError("Empty navigation history")
self._items[self._pointer].scroll_position = new_scroll_position
def size(self):
return len(self._items)
def pointer(self):
return self._pointer
def go_back(self):
"""Go back one place in the history, if possible.
Decrease the pointer value by 1, if possible. Otherwise, the pointer value
will be unchanged.
Returns:
The updated pointer value.
Raises:
ValueError: If history is empty.
"""
if not self._items:
raise ValueError("Empty navigation history")
if self.can_go_back():
self._pointer -= 1
return self._items[self._pointer]
def go_forward(self):
"""Go forward one place in the history, if possible.
Increase the pointer value by 1, if possible. Otherwise, the pointer value
will be unchanged.
Returns:
The updated pointer value.
Raises:
ValueError: If history is empty.
"""
if not self._items:
raise ValueError("Empty navigation history")
if self.can_go_forward():
self._pointer += 1
return self._items[self._pointer]
def can_go_back(self):
"""Test whether client can go back one place.
Returns:
(`bool`) Whether going back one place is possible.
"""
return self._pointer >= 1
def can_go_forward(self):
"""Test whether client can go forward one place.
Returns:
(`bool`) Whether going back one place is possible.
"""
return self._pointer + 1 < len(self._items)
def render(self,
max_length,
backward_command,
forward_command,
latest_command_attribute="black_on_white",
old_command_attribute="magenta_on_white"):
"""Render the rich text content of the single-line navigation bar.
Args:
max_length: (`int`) Maximum length of the navigation bar, in characters.
backward_command: (`str`) command for going backward. Used to construct
the shortcut menu item.
forward_command: (`str`) command for going forward. Used to construct the
shortcut menu item.
latest_command_attribute: font attribute for lastest command.
old_command_attribute: font attribute for old (non-latest) command.
Returns:
(`debugger_cli_common.RichTextLines`) the navigation bar text with
attributes.
"""
output = RL("| ")
output += RL(
self.BACK_ARROW_TEXT,
(debugger_cli_common.MenuItem(None, backward_command)
if self.can_go_back() else None))
output += RL(" ")
output += RL(
self.FORWARD_ARROW_TEXT,
(debugger_cli_common.MenuItem(None, forward_command)
if self.can_go_forward() else None))
if self._items:
command_attribute = (latest_command_attribute
if (self._pointer == (len(self._items) - 1))
else old_command_attribute)
output += RL(" | ")
if self._pointer != len(self._items) - 1:
output += RL("(-%d) " % (len(self._items) - 1 - self._pointer),
command_attribute)
if len(output) < max_length:
maybe_truncated_command = self._items[self._pointer].command[
:(max_length - len(output))]
output += RL(maybe_truncated_command, command_attribute)
return debugger_cli_common.rich_text_lines_from_rich_line_list([output])
|
fsherratt/custom_pixhawk | refs/heads/Drag_Hack | Tools/LogAnalyzer/tests/TestThrust.py | 261 | from LogAnalyzer import Test,TestResult
import DataflashLog
class TestThrust(Test):
'''test for sufficient thrust (copter only for now)'''
def __init__(self):
Test.__init__(self)
self.name = "Thrust"
def run(self, logdata, verbose):
self.result = TestResult()
self.result.status = TestResult.StatusType.GOOD
if logdata.vehicleType != "ArduCopter":
self.result.status = TestResult.StatusType.NA
return
if not "CTUN" in logdata.channels:
self.result.status = TestResult.StatusType.UNKNOWN
self.result.statusMessage = "No CTUN log data"
return
if not "ATT" in logdata.channels:
self.result.status = TestResult.StatusType.UNKNOWN
self.result.statusMessage = "No ATT log data"
return
# check for throttle (CTUN.ThrOut) above 700 for a chunk of time with copter not rising
highThrottleThreshold = 700
tiltThreshold = 20 # ignore high throttle when roll or tilt is above this value
climbThresholdWARN = 100
climbThresholdFAIL = 50
minSampleLength = 50
highThrottleSegments = []
# find any contiguous chunks where CTUN.ThrOut > highThrottleThreshold, ignore high throttle if tilt > tiltThreshold, and discard any segments shorter than minSampleLength
start = None
data = logdata.channels["CTUN"]["ThrOut"].listData
for i in range(0,len(data)):
(lineNumber,value) = data[i]
isBelowTiltThreshold = True
if value > highThrottleThreshold:
(roll,meh) = logdata.channels["ATT"]["Roll"].getNearestValue(lineNumber)
(pitch,meh) = logdata.channels["ATT"]["Pitch"].getNearestValue(lineNumber)
if (abs(roll) > tiltThreshold) or (abs(pitch) > tiltThreshold):
isBelowTiltThreshold = False
if (value > highThrottleThreshold) and isBelowTiltThreshold:
if start == None:
start = i
elif start != None:
if (i-start) > minSampleLength:
#print "Found high throttle chunk from line %d to %d (%d samples)" % (data[start][0],data[i][0],i-start+1)
highThrottleSegments.append((start,i))
start = None
climbRate = "CRate"
if "CRate" not in logdata.channels["CTUN"]:
climbRate = "CRt"
# loop through each checking climbRate, if < 50 FAIL, if < 100 WARN
# TODO: we should filter climbRate and use its slope rather than value for this test
for seg in highThrottleSegments:
(startLine,endLine) = (data[seg[0]][0], data[seg[1]][0])
avgClimbRate = logdata.channels["CTUN"][climbRate].getSegment(startLine,endLine).avg()
avgThrOut = logdata.channels["CTUN"]["ThrOut"].getSegment(startLine,endLine).avg()
if avgClimbRate < climbThresholdFAIL:
self.result.status = TestResult.StatusType.FAIL
self.result.statusMessage = "Avg climb rate %.2f cm/s for throttle avg %d" % (avgClimbRate,avgThrOut)
return
if avgClimbRate < climbThresholdWARN:
self.result.status = TestResult.StatusType.WARN
self.result.statusMessage = "Avg climb rate %.2f cm/s for throttle avg %d" % (avgClimbRate,avgThrOut)
|
Gateworks/platform-external-chromium_org | refs/heads/imx_kk4.4.3_2.0.0-beta | tools/telemetry/telemetry/page/page_measurement.py | 24 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page_test
class MeasurementFailure(page_test.Failure):
"""Exception that can be thrown from MeasurePage to indicate an undesired but
designed-for problem."""
pass
class PageMeasurement(page_test.PageTest):
"""Glue code for running a measurement across a set of pages.
To use this, subclass from the measurement and override MeasurePage. For
example:
class BodyChildElementMeasurement(PageMeasurement):
def MeasurePage(self, page, tab, results):
body_child_count = tab.EvaluateJavaScript(
'document.body.children.length')
results.Add('body_children', 'count', body_child_count)
if __name__ == '__main__':
page_measurement.Main(BodyChildElementMeasurement())
To add test-specific options:
class BodyChildElementMeasurement(PageMeasurement):
def AddCommandLineOptions(parser):
parser.add_option('--element', action='store', default='body')
def MeasurePage(self, page, tab, results):
body_child_count = tab.EvaluateJavaScript(
'document.querySelector('%s').children.length')
results.Add('children', 'count', child_count)
"""
def __init__(self,
action_name_to_run='',
needs_browser_restart_after_each_run=False,
discard_first_result=False,
clear_cache_before_each_run=False):
super(PageMeasurement, self).__init__(
'_RunTest',
action_name_to_run,
needs_browser_restart_after_each_run,
discard_first_result,
clear_cache_before_each_run)
def _RunTest(self, page, tab, results):
results.WillMeasurePage(page)
self.MeasurePage(page, tab, results)
results.DidMeasurePage()
@property
def results_are_the_same_on_every_page(self):
"""By default, measurements are assumed to output the same values for every
page. This allows incremental output, for example in CSV. If, however, the
measurement discovers what values it can report as it goes, and those values
may vary from page to page, you need to override this function and return
False. Output will not appear in this mode until the entire pageset has
run."""
return True
def MeasurePage(self, page, tab, results):
"""Override to actually measure the page's performance.
page is a page_set.Page
tab is an instance of telemetry.core.Tab
Should call results.Add(name, units, value) for each result, or raise an
exception on failure. The name and units of each Add() call must be
the same across all iterations. The name 'url' must not be used.
Prefer field names that are in accordance with python variable style. E.g.
field_name.
Put together:
def MeasurePage(self, page, tab, results):
res = tab.EvaluateJavaScript('2+2')
if res != 4:
raise Exception('Oh, wow.')
results.Add('two_plus_two', 'count', res)
"""
raise NotImplementedError()
|
cl4rke/scikit-learn | refs/heads/master | examples/model_selection/plot_train_error_vs_test_error.py | 349 | """
=========================
Train error vs Test error
=========================
Illustration of how the performance of an estimator on unseen data (test data)
is not the same as the performance on training data. As the regularization
increases the performance on train decreases while the performance on test
is optimal within a range of values of the regularization parameter.
The example with an Elastic-Net regression model and the performance is
measured using the explained variance a.k.a. R^2.
"""
print(__doc__)
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# License: BSD 3 clause
import numpy as np
from sklearn import linear_model
###############################################################################
# Generate sample data
n_samples_train, n_samples_test, n_features = 75, 150, 500
np.random.seed(0)
coef = np.random.randn(n_features)
coef[50:] = 0.0 # only the top 10 features are impacting the model
X = np.random.randn(n_samples_train + n_samples_test, n_features)
y = np.dot(X, coef)
# Split train and test data
X_train, X_test = X[:n_samples_train], X[n_samples_train:]
y_train, y_test = y[:n_samples_train], y[n_samples_train:]
###############################################################################
# Compute train and test errors
alphas = np.logspace(-5, 1, 60)
enet = linear_model.ElasticNet(l1_ratio=0.7)
train_errors = list()
test_errors = list()
for alpha in alphas:
enet.set_params(alpha=alpha)
enet.fit(X_train, y_train)
train_errors.append(enet.score(X_train, y_train))
test_errors.append(enet.score(X_test, y_test))
i_alpha_optim = np.argmax(test_errors)
alpha_optim = alphas[i_alpha_optim]
print("Optimal regularization parameter : %s" % alpha_optim)
# Estimate the coef_ on full data with optimal regularization parameter
enet.set_params(alpha=alpha_optim)
coef_ = enet.fit(X, y).coef_
###############################################################################
# Plot results functions
import matplotlib.pyplot as plt
plt.subplot(2, 1, 1)
plt.semilogx(alphas, train_errors, label='Train')
plt.semilogx(alphas, test_errors, label='Test')
plt.vlines(alpha_optim, plt.ylim()[0], np.max(test_errors), color='k',
linewidth=3, label='Optimum on test')
plt.legend(loc='lower left')
plt.ylim([0, 1.2])
plt.xlabel('Regularization parameter')
plt.ylabel('Performance')
# Show estimated coef_ vs true coef
plt.subplot(2, 1, 2)
plt.plot(coef, label='True coef')
plt.plot(coef_, label='Estimated coef')
plt.legend()
plt.subplots_adjust(0.09, 0.04, 0.94, 0.94, 0.26, 0.26)
plt.show()
|
csherwood-usgs/landlab | refs/heads/master | landlab/grid/tests/test_raster_funcs/test_gradients_across_adjacent_max.py | 6 | import numpy as np
from numpy.testing import assert_array_equal
from nose import with_setup
try:
from nose.tools import assert_is
except ImportError:
from landlab.testing.tools import assert_is
from nose.tools import assert_equal
from landlab.grid.raster_steepest_descent import (
_calc_steepest_descent_across_adjacent_cells)
def setup_unit_grid():
"""Set up a test grid with unit spacing."""
from landlab import RasterModelGrid
globals()['rmg'] = RasterModelGrid(4, 5)
globals()['values_at_nodes'] = np.arange(20, dtype=float)
def setup_non_unit_grid():
"""Set up a test grid with non-unit spacing."""
from landlab import RasterModelGrid
globals()['rmg'] = RasterModelGrid(4, 5, 2)
globals()['values_at_nodes'] = np.arange(20, dtype=float)
def setup_3x3_grid():
"""Set up a grid of 3 rows and 3 columns."""
from landlab import RasterModelGrid
globals().update({
'rmg_3x3': RasterModelGrid(3, 3),
'values_at_nodes': np.flipud(np.array([6, 7, 8,
3, 4, 5,
0, 1, 2], dtype=float))
})
@with_setup(setup_3x3_grid)
def test_scalar_arg():
"""Test scalar arg for nodes."""
grad = _calc_steepest_descent_across_adjacent_cells(
rmg_3x3, values_at_nodes, 0)
assert_equal(grad, -3.)
grad = _calc_steepest_descent_across_adjacent_cells(
rmg_3x3, values_at_nodes, 0, method='d8')
assert_equal(grad, -3.)
values_at_nodes[2] = -10
grad = _calc_steepest_descent_across_adjacent_cells(
rmg_3x3, values_at_nodes, 0, method='d8')
assert_equal(grad, - (4 + 10) / np.sqrt(2.))
@with_setup(setup_unit_grid)
def test_iterable():
"""Test iterable arg for nodes."""
grad = _calc_steepest_descent_across_adjacent_cells(
rmg, values_at_nodes, [0, 4])
assert_array_equal(grad, [-5., -5.])
@with_setup(setup_unit_grid)
def test_scalar_arg_with_links():
values = np.array([0, 1, 3, 6, 10,
0, 1, 3, 6, 10,
0, 1, 3, 5, 10,
0, 1, -3, 6, 10, ], dtype=float)
(grad, node) = _calc_steepest_descent_across_adjacent_cells(
rmg, values, (0, 4), return_node=True)
assert_array_equal(grad, [-1, -6])
assert_array_equal(node, [5, 17])
values_at_nodes[2] = -10
(grad, node) = _calc_steepest_descent_across_adjacent_cells(
rmg, values_at_nodes, 0, method='d8', return_node=True)
assert_equal(grad, - (6 + 10) / np.sqrt(2.))
assert_equal(node, 2)
@with_setup(setup_unit_grid)
def test_node_id_in_direction_of_max():
values = np.array([-1, 1, 3, 6, 10,
0, 1, 3, 6, 10,
0, 1, 3, 5, 10,
0, 1, -3, 6, 10, ], dtype=float)
(_, node_ids) = _calc_steepest_descent_across_adjacent_cells(
rmg, values, (0, 4), return_node=True)
assert_array_equal(node_ids, [5, 17])
(grads,
node_ids) = _calc_steepest_descent_across_adjacent_cells(
rmg, values, (0, 4), method='d8', return_node=True)
assert_array_equal(node_ids, [0, 17])
@with_setup(setup_3x3_grid)
def test_node_in_direction_of_max():
for node_id in [0, 1, 2, 3, 5, 6, 7, 8]:
values = np.zeros(9)
values[node_id] = -1
(_, node) = _calc_steepest_descent_across_adjacent_cells(
rmg_3x3, values, 0, return_node=True, method='d8')
assert_array_equal(node, node_id)
@with_setup(setup_3x3_grid)
def test_node_in_direction_of_max_with_ties():
values = np.zeros(9)
(_, node) = _calc_steepest_descent_across_adjacent_cells(
rmg_3x3, values, 0, return_node=True, method='d8')
assert_array_equal(node, 5)
for (node_id, expected) in zip([5, 7, 3, 1, 8, 6, 0],
[7, 3, 1, 8, 6, 0, 2]):
values[node_id] = 1
(_, node) = _calc_steepest_descent_across_adjacent_cells(
rmg_3x3, values, 0, return_node=True, method='d8')
assert_array_equal(node, expected)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.