repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
rmfitzpatrick/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/vyos.py
|
1
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2016 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import env_fallback, return_values
from ansible.module_utils.network_common import to_list
from ansible.module_utils.connection import exec_command
_DEVICE_CONFIGS = {}
vyos_provider_spec = {
'host': dict(),
'port': dict(type='int'),
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
'timeout': dict(type='int'),
}
vyos_argument_spec = {
'provider': dict(type='dict', options=vyos_provider_spec),
}
vyos_top_spec = {
'host': dict(removed_in_version=2.9),
'port': dict(removed_in_version=2.9, type='int'),
'username': dict(removed_in_version=2.9),
'password': dict(removed_in_version=2.9, no_log=True),
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
'timeout': dict(removed_in_version=2.9, type='int'),
}
vyos_argument_spec.update(vyos_top_spec)
def get_provider_argspec():
return vyos_provider_spec
def check_args(module, warnings):
for key in vyos_argument_spec:
if module._name == 'vyos_user':
if key not in ['password', 'provider'] and module.params[key]:
warnings.append('argument %s has been deprecated and will be in a future version' % key)
else:
if key != 'provider' and module.params[key]:
warnings.append('argument %s has been deprecated and will be removed in a future version' % key)
def get_config(module, target='commands'):
cmd = ' '.join(['show configuration', target])
try:
return _DEVICE_CONFIGS[cmd]
except KeyError:
rc, out, err = exec_command(module, cmd)
if rc != 0:
module.fail_json(msg='unable to retrieve current config', stderr=to_text(err, errors='surrogate_or_strict'))
cfg = to_text(out, errors='surrogate_or_strict').strip()
_DEVICE_CONFIGS[cmd] = cfg
return cfg
def run_commands(module, commands, check_rc=True):
responses = list()
for cmd in to_list(commands):
rc, out, err = exec_command(module, cmd)
if check_rc and rc != 0:
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), rc=rc)
responses.append(to_text(out, errors='surrogate_or_strict'))
return responses
def load_config(module, commands, commit=False, comment=None):
rc, out, err = exec_command(module, 'configure')
if rc != 0:
module.fail_json(msg='unable to enter configuration mode', output=to_text(err, errors='surrogate_or_strict'))
for cmd in to_list(commands):
rc, out, err = exec_command(module, cmd)
if rc != 0:
# discard any changes in case of failure
exec_command(module, 'exit discard')
module.fail_json(msg='configuration failed')
diff = None
if module._diff:
rc, out, err = exec_command(module, 'compare')
out = to_text(out, errors='surrogate_or_strict')
if not out.startswith('No changes'):
rc, out, err = exec_command(module, 'show')
diff = to_text(out, errors='surrogate_or_strict').strip()
if commit:
cmd = 'commit'
if comment:
cmd += ' comment "%s"' % comment
rc, out, err = exec_command(module, cmd)
if rc != 0:
# discard any changes in case of failure
exec_command(module, 'exit discard')
module.fail_json(msg='commit failed: %s' % err)
if not commit:
exec_command(module, 'exit discard')
else:
exec_command(module, 'exit')
if diff:
return diff
|
tpo/ansible
|
refs/heads/devel
|
test/units/modules/conftest.py
|
35
|
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import pytest
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes
from ansible.module_utils.common._collections_compat import MutableMapping
@pytest.fixture
def patch_ansible_module(request, mocker):
if isinstance(request.param, string_types):
args = request.param
elif isinstance(request.param, MutableMapping):
if 'ANSIBLE_MODULE_ARGS' not in request.param:
request.param = {'ANSIBLE_MODULE_ARGS': request.param}
if '_ansible_remote_tmp' not in request.param['ANSIBLE_MODULE_ARGS']:
request.param['ANSIBLE_MODULE_ARGS']['_ansible_remote_tmp'] = '/tmp'
if '_ansible_keep_remote_files' not in request.param['ANSIBLE_MODULE_ARGS']:
request.param['ANSIBLE_MODULE_ARGS']['_ansible_keep_remote_files'] = False
args = json.dumps(request.param)
else:
raise Exception('Malformed data to the patch_ansible_module pytest fixture')
mocker.patch('ansible.module_utils.basic._ANSIBLE_ARGS', to_bytes(args))
|
MoamerEncsConcordiaCa/tensorflow
|
refs/heads/master
|
tensorflow/contrib/distributions/python/ops/kullback_leibler.py
|
24
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Registration and usage mechanisms for KL-divergences."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import inspect
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
_DIVERGENCES = {}
def _registered_kl(type_a, type_b):
"""Get the KL function registered for classes a and b."""
hierarchy_a = inspect.getmro(type_a)
hierarchy_b = inspect.getmro(type_b)
dist_to_children = None
kl_fn = None
for mro_to_a, parent_a in enumerate(hierarchy_a):
for mro_to_b, parent_b in enumerate(hierarchy_b):
candidate_dist = mro_to_a + mro_to_b
candidate_kl_fn = _DIVERGENCES.get((parent_a, parent_b), None)
if not kl_fn or (candidate_kl_fn and candidate_dist < dist_to_children):
dist_to_children = candidate_dist
kl_fn = candidate_kl_fn
return kl_fn
def kl(dist_a, dist_b, allow_nan_stats=True, name=None):
"""Get the KL-divergence KL(dist_a || dist_b).
If there is no KL method registered specifically for `type(dist_a)` and
`type(dist_b)`, then the class hierarchies of these types are searched.
If one KL method is registered between any pairs of classes in these two
parent hierarchies, it is used.
If more than one such registered method exists, the method whose registered
classes have the shortest sum MRO paths to the input types is used.
If more than one such shortest path exists, the first method
identified in the search is used (favoring a shorter MRO distance to
`type(dist_a)`).
Args:
dist_a: The first distribution.
dist_b: The second distribution.
allow_nan_stats: Python `bool`, default `True`. When `True`,
statistics (e.g., mean, mode, variance) use the value "`NaN`" to
indicate the result is undefined. When `False`, an exception is raised
if one or more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Returns:
A Tensor with the batchwise KL-divergence between dist_a and dist_b.
Raises:
NotImplementedError: If no KL method is defined for distribution types
of dist_a and dist_b.
"""
kl_fn = _registered_kl(type(dist_a), type(dist_b))
if kl_fn is None:
raise NotImplementedError(
"No KL(dist_a || dist_b) registered for dist_a type %s and dist_b "
"type %s" % (type(dist_a).__name__, type(dist_b).__name__))
with ops.name_scope("KullbackLeibler"):
kl_t = kl_fn(dist_a, dist_b, name=name)
if allow_nan_stats:
return kl_t
# Check KL for NaNs
kl_t = array_ops.identity(kl_t, name="kl")
with ops.control_dependencies([
control_flow_ops.Assert(
math_ops.logical_not(
math_ops.reduce_any(math_ops.is_nan(kl_t))),
["KL calculation between %s and %s returned NaN values "
"(and was called with allow_nan_stats=False). Values:"
% (dist_a.name, dist_b.name), kl_t])]):
return array_ops.identity(kl_t, name="checked_kl")
class RegisterKL(object):
"""Decorator to register a KL divergence implementation function.
Usage:
@distributions.RegisterKL(distributions.Normal, distributions.Normal)
def _kl_normal_mvn(norm_a, norm_b):
# Return KL(norm_a || norm_b)
"""
def __init__(self, dist_cls_a, dist_cls_b):
"""Initialize the KL registrar.
Args:
dist_cls_a: the class of the first argument of the KL divergence.
dist_cls_b: the class of the second argument of the KL divergence.
"""
self._key = (dist_cls_a, dist_cls_b)
def __call__(self, kl_fn):
"""Perform the KL registration.
Args:
kl_fn: The function to use for the KL divergence.
Returns:
kl_fn
Raises:
TypeError: if kl_fn is not a callable.
ValueError: if a KL divergence function has already been registered for
the given argument classes.
"""
if not callable(kl_fn):
raise TypeError("kl_fn must be callable, received: %s" % kl_fn)
if self._key in _DIVERGENCES:
raise ValueError("KL(%s || %s) has already been registered to: %s"
% (self._key[0].__name__, self._key[1].__name__,
_DIVERGENCES[self._key]))
_DIVERGENCES[self._key] = kl_fn
return kl_fn
|
ozamiatin/glance
|
refs/heads/master
|
glance/api/common.py
|
9
|
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
from glance.common import exception
from glance.common import wsgi
from glance import i18n
LOG = logging.getLogger(__name__)
_ = i18n._
_LE = i18n._LE
_LW = i18n._LW
CONF = cfg.CONF
_CACHED_THREAD_POOL = {}
def size_checked_iter(response, image_meta, expected_size, image_iter,
notifier):
image_id = image_meta['id']
bytes_written = 0
def notify_image_sent_hook(env):
image_send_notification(bytes_written, expected_size,
image_meta, response.request, notifier)
# Add hook to process after response is fully sent
if 'eventlet.posthooks' in response.request.environ:
response.request.environ['eventlet.posthooks'].append(
(notify_image_sent_hook, (), {}))
try:
for chunk in image_iter:
yield chunk
bytes_written += len(chunk)
except Exception as err:
with excutils.save_and_reraise_exception():
msg = (_LE("An error occurred reading from backend storage for "
"image %(image_id)s: %(err)s") % {'image_id': image_id,
'err': err})
LOG.error(msg)
if expected_size != bytes_written:
msg = (_LE("Backend storage for image %(image_id)s "
"disconnected after writing only %(bytes_written)d "
"bytes") % {'image_id': image_id,
'bytes_written': bytes_written})
LOG.error(msg)
raise exception.GlanceException(_("Corrupt image download for "
"image %(image_id)s") %
{'image_id': image_id})
def image_send_notification(bytes_written, expected_size, image_meta, request,
notifier):
"""Send an image.send message to the notifier."""
try:
context = request.context
payload = {
'bytes_sent': bytes_written,
'image_id': image_meta['id'],
'owner_id': image_meta['owner'],
'receiver_tenant_id': context.tenant,
'receiver_user_id': context.user,
'destination_ip': request.remote_addr,
}
if bytes_written != expected_size:
notify = notifier.error
else:
notify = notifier.info
notify('image.send', payload)
except Exception as err:
msg = (_LE("An error occurred during image.send"
" notification: %(err)s") % {'err': err})
LOG.error(msg)
def get_remaining_quota(context, db_api, image_id=None):
"""Method called to see if the user is allowed to store an image.
Checks if it is allowed based on the given size in glance based on their
quota and current usage.
:param context:
:param db_api: The db_api in use for this configuration
:param image_id: The image that will be replaced with this new data size
:return: The number of bytes the user has remaining under their quota.
None means infinity
"""
# NOTE(jbresnah) in the future this value will come from a call to
# keystone.
users_quota = CONF.user_storage_quota
# set quota must have a number optionally followed by B, KB, MB,
# GB or TB without any spaces in between
pattern = re.compile('^(\d+)((K|M|G|T)?B)?$')
match = pattern.match(users_quota)
if not match:
LOG.error(_LE("Invalid value for option user_storage_quota: "
"%(users_quota)s")
% {'users_quota': users_quota})
raise exception.InvalidOptionValue(option='user_storage_quota',
value=users_quota)
quota_value, quota_unit = (match.groups())[0:2]
# fall back to Bytes if user specified anything other than
# permitted values
quota_unit = quota_unit or "B"
factor = getattr(units, quota_unit.replace('B', 'i'), 1)
users_quota = int(quota_value) * factor
if users_quota <= 0:
return
usage = db_api.user_get_storage_usage(context,
context.owner,
image_id=image_id)
return users_quota - usage
def check_quota(context, image_size, db_api, image_id=None):
"""Method called to see if the user is allowed to store an image.
Checks if it is allowed based on the given size in glance based on their
quota and current usage.
:param context:
:param image_size: The size of the image we hope to store
:param db_api: The db_api in use for this configuration
:param image_id: The image that will be replaced with this new data size
:return:
"""
remaining = get_remaining_quota(context, db_api, image_id=image_id)
if remaining is None:
return
user = getattr(context, 'user', '<unknown>')
if image_size is None:
# NOTE(jbresnah) When the image size is None it means that it is
# not known. In this case the only time we will raise an
# exception is when there is no room left at all, thus we know
# it will not fit
if remaining <= 0:
LOG.warn(_LW("User %(user)s attempted to upload an image of"
" unknown size that will exceed the quota."
" %(remaining)d bytes remaining.")
% {'user': user, 'remaining': remaining})
raise exception.StorageQuotaFull(image_size=image_size,
remaining=remaining)
return
if image_size > remaining:
LOG.warn(_LW("User %(user)s attempted to upload an image of size"
" %(size)d that will exceed the quota. %(remaining)d"
" bytes remaining.")
% {'user': user, 'size': image_size, 'remaining': remaining})
raise exception.StorageQuotaFull(image_size=image_size,
remaining=remaining)
return remaining
def memoize(lock_name):
def memoizer_wrapper(func):
@lockutils.synchronized(lock_name)
def memoizer(lock_name):
if lock_name not in _CACHED_THREAD_POOL:
_CACHED_THREAD_POOL[lock_name] = func()
return _CACHED_THREAD_POOL[lock_name]
return memoizer(lock_name)
return memoizer_wrapper
def get_thread_pool(lock_name, size=1024):
"""Initializes eventlet thread pool.
If thread pool is present in cache, then returns it from cache
else create new pool, stores it in cache and return newly created
pool.
@param lock_name: Name of the lock.
@param size: Size of eventlet pool.
@return: eventlet pool
"""
@memoize(lock_name)
def _get_thread_pool():
return wsgi.get_asynchronous_eventlet_pool(size=size)
return _get_thread_pool
|
liqd/adhocracy
|
refs/heads/develop
|
src/adhocracy/migration/versions/050_instance_require_valid_email.py
|
6
|
from sqlalchemy import Column, ForeignKey, MetaData, Table, Float
from sqlalchemy import Boolean, DateTime, Integer, Unicode, UnicodeText
from sqlalchemy import func
metadata = MetaData()
instance_table = Table(
'instance', metadata,
Column('id', Integer, primary_key=True),
Column('key', Unicode(20), nullable=False, unique=True),
Column('label', Unicode(255), nullable=False),
Column('description', UnicodeText(), nullable=True),
Column('required_majority', Float, nullable=False),
Column('activation_delay', Integer, nullable=False),
Column('create_time', DateTime, default=func.now()),
Column('access_time', DateTime, default=func.now(),
onupdate=func.now()),
Column('delete_time', DateTime, nullable=True),
Column('creator_id', Integer, ForeignKey('user.id'),
nullable=False),
Column('default_group_id', Integer, ForeignKey('group.id'),
nullable=True),
Column('allow_adopt', Boolean, default=True),
Column('allow_delegate', Boolean, default=True),
Column('allow_propose', Boolean, default=True),
Column('allow_index', Boolean, default=True),
Column('hidden', Boolean, default=False),
Column('locale', Unicode(7), nullable=True),
Column('css', UnicodeText(), nullable=True),
Column('frozen', Boolean, default=False),
Column('milestones', Boolean, default=False),
Column('use_norms', Boolean, nullable=True, default=True),
Column('require_selection', Boolean, nullable=True, default=False),
Column('is_authenticated', Boolean, nullable=True, default=False),
Column('hide_global_categories', Boolean, nullable=True, default=False),
Column('editable_comments_default', Boolean, nullable=True, default=True),
)
def upgrade(migrate_engine):
metadata.bind = migrate_engine
require_valid_email = Column('require_valid_email',
Boolean,
nullable=True,
default=True)
require_valid_email.create(instance_table)
u = instance_table.update(values={'require_valid_email': True})
migrate_engine.execute(u)
def downgrade(migrate_engine):
raise NotImplementedError()
|
ondrik/pubman
|
refs/heads/master
|
pubman/models.py
|
1
|
from django.db import models
###############################################################################
class Author(models.Model):
first_name = models.CharField(max_length=128, null=True)
last_name = models.CharField(max_length=128)
def __str__(self):
'''
Conversion to a string.
'''
return self.first_name + " " + self.last_name
###############################################################################
class Publisher(models.Model):
name = models.CharField(max_length=128)
def __str__(self):
'''
Conversion to a string.
'''
return self.name
###############################################################################
class ProceedingsSeries(models.Model):
name = models.CharField(max_length=256)
abbr = models.CharField(max_length=128, null=True)
# published by some publisher
published_by = models.ForeignKey(Publisher, on_delete=models.SET_NULL, null=True)
def __str__(self):
'''
Conversion to a string.
'''
return self.name + "---" + self.abbr
###############################################################################
class ConferenceSeries(models.Model):
# name of the conference series
name = models.TextField()
# abbreviation
abbr = models.CharField(max_length=128, null=True)
def __str__(self):
'''
Conversion to a string.
'''
return self.abbr + "---" + self.name
###############################################################################
class Conference(models.Model):
# series of the conference
conf_series = models.ForeignKey(ConferenceSeries, on_delete=models.SET_NULL, null=True)
# year of the conference
year = models.IntegerField(null=True)
# the run number of the conference
run_num = models.IntegerField(null=True)
# proceedings were published in series ...
published_in = models.ForeignKey(ProceedingsSeries, on_delete=models.SET_NULL, null=True)
# the volume of a notes series proceedings were published in
published_in_vol = models.IntegerField(null=True)
def __str__(self):
'''
Conversion to a string.
'''
return self.conf_series.abbr + "'" + self.year.__str__()
###############################################################################
class Publication(models.Model):
# title of the contribution
title = models.TextField()
# appeared in conference?
conference = models.ForeignKey(Conference, on_delete=models.SET_NULL, null=True)
# authors
authors = models.ManyToManyField(Author, through="AuthorOf")
###############################################################################
class AuthorOf(models.Model):
# author
author = models.ForeignKey(Author, on_delete=models.CASCADE)
# publication
publication = models.ForeignKey(Publication, on_delete=models.CASCADE)
# order of the author
order = models.IntegerField(null=True)
|
R4stl1n/allianceauth
|
refs/heads/master
|
allianceauth/services/modules/teamspeak3/__init__.py
|
5
|
default_app_config = 'allianceauth.services.modules.teamspeak3.apps.Teamspeak3ServiceConfig'
|
vicky2135/lucious
|
refs/heads/master
|
lucious/lib/python2.7/site-packages/setuptools/glob.py
|
130
|
"""
Filename globbing utility. Mostly a copy of `glob` from Python 3.5.
Changes include:
* `yield from` and PEP3102 `*` removed.
* `bytes` changed to `six.binary_type`.
* Hidden files are not ignored.
"""
import os
import re
import fnmatch
from six import binary_type
__all__ = ["glob", "iglob", "escape"]
def glob(pathname, recursive=False):
"""Return a list of paths matching a pathname pattern.
The pattern may contain simple shell-style wildcards a la
fnmatch. However, unlike fnmatch, filenames starting with a
dot are special cases that are not matched by '*' and '?'
patterns.
If recursive is true, the pattern '**' will match any files and
zero or more directories and subdirectories.
"""
return list(iglob(pathname, recursive=recursive))
def iglob(pathname, recursive=False):
"""Return an iterator which yields the paths matching a pathname pattern.
The pattern may contain simple shell-style wildcards a la
fnmatch. However, unlike fnmatch, filenames starting with a
dot are special cases that are not matched by '*' and '?'
patterns.
If recursive is true, the pattern '**' will match any files and
zero or more directories and subdirectories.
"""
it = _iglob(pathname, recursive)
if recursive and _isrecursive(pathname):
s = next(it) # skip empty string
assert not s
return it
def _iglob(pathname, recursive):
dirname, basename = os.path.split(pathname)
if not has_magic(pathname):
if basename:
if os.path.lexists(pathname):
yield pathname
else:
# Patterns ending with a slash should match only directories
if os.path.isdir(dirname):
yield pathname
return
if not dirname:
if recursive and _isrecursive(basename):
for x in glob2(dirname, basename):
yield x
else:
for x in glob1(dirname, basename):
yield x
return
# `os.path.split()` returns the argument itself as a dirname if it is a
# drive or UNC path. Prevent an infinite recursion if a drive or UNC path
# contains magic characters (i.e. r'\\?\C:').
if dirname != pathname and has_magic(dirname):
dirs = _iglob(dirname, recursive)
else:
dirs = [dirname]
if has_magic(basename):
if recursive and _isrecursive(basename):
glob_in_dir = glob2
else:
glob_in_dir = glob1
else:
glob_in_dir = glob0
for dirname in dirs:
for name in glob_in_dir(dirname, basename):
yield os.path.join(dirname, name)
# These 2 helper functions non-recursively glob inside a literal directory.
# They return a list of basenames. `glob1` accepts a pattern while `glob0`
# takes a literal basename (so it only has to check for its existence).
def glob1(dirname, pattern):
if not dirname:
if isinstance(pattern, binary_type):
dirname = os.curdir.encode('ASCII')
else:
dirname = os.curdir
try:
names = os.listdir(dirname)
except OSError:
return []
return fnmatch.filter(names, pattern)
def glob0(dirname, basename):
if not basename:
# `os.path.split()` returns an empty basename for paths ending with a
# directory separator. 'q*x/' should match only directories.
if os.path.isdir(dirname):
return [basename]
else:
if os.path.lexists(os.path.join(dirname, basename)):
return [basename]
return []
# This helper function recursively yields relative pathnames inside a literal
# directory.
def glob2(dirname, pattern):
assert _isrecursive(pattern)
yield pattern[:0]
for x in _rlistdir(dirname):
yield x
# Recursively yields relative pathnames inside a literal directory.
def _rlistdir(dirname):
if not dirname:
if isinstance(dirname, binary_type):
dirname = binary_type(os.curdir, 'ASCII')
else:
dirname = os.curdir
try:
names = os.listdir(dirname)
except os.error:
return
for x in names:
yield x
path = os.path.join(dirname, x) if dirname else x
for y in _rlistdir(path):
yield os.path.join(x, y)
magic_check = re.compile('([*?[])')
magic_check_bytes = re.compile(b'([*?[])')
def has_magic(s):
if isinstance(s, binary_type):
match = magic_check_bytes.search(s)
else:
match = magic_check.search(s)
return match is not None
def _isrecursive(pattern):
if isinstance(pattern, binary_type):
return pattern == b'**'
else:
return pattern == '**'
def escape(pathname):
"""Escape all special characters.
"""
# Escaping is done by wrapping any of "*?[" between square brackets.
# Metacharacters do not work in the drive part and shouldn't be escaped.
drive, pathname = os.path.splitdrive(pathname)
if isinstance(pathname, binary_type):
pathname = magic_check_bytes.sub(br'[\1]', pathname)
else:
pathname = magic_check.sub(r'[\1]', pathname)
return drive + pathname
|
jakobworldpeace/scikit-learn
|
refs/heads/master
|
sklearn/cluster/mean_shift_.py
|
42
|
"""Mean shift clustering algorithm.
Mean shift clustering aims to discover *blobs* in a smooth density of
samples. It is a centroid based algorithm, which works by updating candidates
for centroids to be the mean of the points within a given region. These
candidates are then filtered in a post-processing stage to eliminate
near-duplicates to form the final set of centroids.
Seeding is performed using a binning technique for scalability.
"""
# Authors: Conrad Lee <conradlee@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Martino Sorbaro <martino.sorbaro@ed.ac.uk>
import numpy as np
import warnings
from collections import defaultdict
from ..externals import six
from ..utils.validation import check_is_fitted
from ..utils import extmath, check_random_state, gen_batches, check_array
from ..base import BaseEstimator, ClusterMixin
from ..neighbors import NearestNeighbors
from ..metrics.pairwise import pairwise_distances_argmin
from ..externals.joblib import Parallel
from ..externals.joblib import delayed
def estimate_bandwidth(X, quantile=0.3, n_samples=None, random_state=0,
n_jobs=1):
"""Estimate the bandwidth to use with the mean-shift algorithm.
That this function takes time at least quadratic in n_samples. For large
datasets, it's wise to set that parameter to a small value.
Parameters
----------
X : array-like, shape=[n_samples, n_features]
Input points.
quantile : float, default 0.3
should be between [0, 1]
0.5 means that the median of all pairwise distances is used.
n_samples : int, optional
The number of samples to use. If not given, all samples are used.
random_state : int or RandomState
Pseudo-random number generator state used for random sampling.
n_jobs : int, optional (default = 1)
The number of parallel jobs to run for neighbors search.
If ``-1``, then the number of jobs is set to the number of CPU cores.
Returns
-------
bandwidth : float
The bandwidth parameter.
"""
random_state = check_random_state(random_state)
if n_samples is not None:
idx = random_state.permutation(X.shape[0])[:n_samples]
X = X[idx]
nbrs = NearestNeighbors(n_neighbors=int(X.shape[0] * quantile),
n_jobs=n_jobs)
nbrs.fit(X)
bandwidth = 0.
for batch in gen_batches(len(X), 500):
d, _ = nbrs.kneighbors(X[batch, :], return_distance=True)
bandwidth += np.max(d, axis=1).sum()
return bandwidth / X.shape[0]
# separate function for each seed's iterative loop
def _mean_shift_single_seed(my_mean, X, nbrs, max_iter):
# For each seed, climb gradient until convergence or max_iter
bandwidth = nbrs.get_params()['radius']
stop_thresh = 1e-3 * bandwidth # when mean has converged
completed_iterations = 0
while True:
# Find mean of points within bandwidth
i_nbrs = nbrs.radius_neighbors([my_mean], bandwidth,
return_distance=False)[0]
points_within = X[i_nbrs]
if len(points_within) == 0:
break # Depending on seeding strategy this condition may occur
my_old_mean = my_mean # save the old mean
my_mean = np.mean(points_within, axis=0)
# If converged or at max_iter, adds the cluster
if (extmath.norm(my_mean - my_old_mean) < stop_thresh or
completed_iterations == max_iter):
return tuple(my_mean), len(points_within)
completed_iterations += 1
def mean_shift(X, bandwidth=None, seeds=None, bin_seeding=False,
min_bin_freq=1, cluster_all=True, max_iter=300,
n_jobs=1):
"""Perform mean shift clustering of data using a flat kernel.
Read more in the :ref:`User Guide <mean_shift>`.
Parameters
----------
X : array-like, shape=[n_samples, n_features]
Input data.
bandwidth : float, optional
Kernel bandwidth.
If bandwidth is not given, it is determined using a heuristic based on
the median of all pairwise distances. This will take quadratic time in
the number of samples. The sklearn.cluster.estimate_bandwidth function
can be used to do this more efficiently.
seeds : array-like, shape=[n_seeds, n_features] or None
Point used as initial kernel locations. If None and bin_seeding=False,
each data point is used as a seed. If None and bin_seeding=True,
see bin_seeding.
bin_seeding : boolean, default=False
If true, initial kernel locations are not locations of all
points, but rather the location of the discretized version of
points, where points are binned onto a grid whose coarseness
corresponds to the bandwidth. Setting this option to True will speed
up the algorithm because fewer seeds will be initialized.
Ignored if seeds argument is not None.
min_bin_freq : int, default=1
To speed up the algorithm, accept only those bins with at least
min_bin_freq points as seeds.
cluster_all : boolean, default True
If true, then all points are clustered, even those orphans that are
not within any kernel. Orphans are assigned to the nearest kernel.
If false, then orphans are given cluster label -1.
max_iter : int, default 300
Maximum number of iterations, per seed point before the clustering
operation terminates (for that seed point), if has not converged yet.
n_jobs : int
The number of jobs to use for the computation. This works by computing
each of the n_init runs in parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
.. versionadded:: 0.17
Parallel Execution using *n_jobs*.
Returns
-------
cluster_centers : array, shape=[n_clusters, n_features]
Coordinates of cluster centers.
labels : array, shape=[n_samples]
Cluster labels for each point.
Notes
-----
See examples/cluster/plot_mean_shift.py for an example.
"""
if bandwidth is None:
bandwidth = estimate_bandwidth(X, n_jobs=n_jobs)
elif bandwidth <= 0:
raise ValueError("bandwidth needs to be greater than zero or None,\
got %f" % bandwidth)
if seeds is None:
if bin_seeding:
seeds = get_bin_seeds(X, bandwidth, min_bin_freq)
else:
seeds = X
n_samples, n_features = X.shape
center_intensity_dict = {}
nbrs = NearestNeighbors(radius=bandwidth, n_jobs=n_jobs).fit(X)
# execute iterations on all seeds in parallel
all_res = Parallel(n_jobs=n_jobs)(
delayed(_mean_shift_single_seed)
(seed, X, nbrs, max_iter) for seed in seeds)
# copy results in a dictionary
for i in range(len(seeds)):
if all_res[i] is not None:
center_intensity_dict[all_res[i][0]] = all_res[i][1]
if not center_intensity_dict:
# nothing near seeds
raise ValueError("No point was within bandwidth=%f of any seed."
" Try a different seeding strategy \
or increase the bandwidth."
% bandwidth)
# POST PROCESSING: remove near duplicate points
# If the distance between two kernels is less than the bandwidth,
# then we have to remove one because it is a duplicate. Remove the
# one with fewer points.
sorted_by_intensity = sorted(center_intensity_dict.items(),
key=lambda tup: tup[1], reverse=True)
sorted_centers = np.array([tup[0] for tup in sorted_by_intensity])
unique = np.ones(len(sorted_centers), dtype=np.bool)
nbrs = NearestNeighbors(radius=bandwidth,
n_jobs=n_jobs).fit(sorted_centers)
for i, center in enumerate(sorted_centers):
if unique[i]:
neighbor_idxs = nbrs.radius_neighbors([center],
return_distance=False)[0]
unique[neighbor_idxs] = 0
unique[i] = 1 # leave the current point as unique
cluster_centers = sorted_centers[unique]
# ASSIGN LABELS: a point belongs to the cluster that it is closest to
nbrs = NearestNeighbors(n_neighbors=1, n_jobs=n_jobs).fit(cluster_centers)
labels = np.zeros(n_samples, dtype=np.int)
distances, idxs = nbrs.kneighbors(X)
if cluster_all:
labels = idxs.flatten()
else:
labels.fill(-1)
bool_selector = distances.flatten() <= bandwidth
labels[bool_selector] = idxs.flatten()[bool_selector]
return cluster_centers, labels
def get_bin_seeds(X, bin_size, min_bin_freq=1):
"""Finds seeds for mean_shift.
Finds seeds by first binning data onto a grid whose lines are
spaced bin_size apart, and then choosing those bins with at least
min_bin_freq points.
Parameters
----------
X : array-like, shape=[n_samples, n_features]
Input points, the same points that will be used in mean_shift.
bin_size : float
Controls the coarseness of the binning. Smaller values lead
to more seeding (which is computationally more expensive). If you're
not sure how to set this, set it to the value of the bandwidth used
in clustering.mean_shift.
min_bin_freq : integer, optional
Only bins with at least min_bin_freq will be selected as seeds.
Raising this value decreases the number of seeds found, which
makes mean_shift computationally cheaper.
Returns
-------
bin_seeds : array-like, shape=[n_samples, n_features]
Points used as initial kernel positions in clustering.mean_shift.
"""
# Bin points
bin_sizes = defaultdict(int)
for point in X:
binned_point = np.round(point / bin_size)
bin_sizes[tuple(binned_point)] += 1
# Select only those bins as seeds which have enough members
bin_seeds = np.array([point for point, freq in six.iteritems(bin_sizes) if
freq >= min_bin_freq], dtype=np.float32)
if len(bin_seeds) == len(X):
warnings.warn("Binning data failed with provided bin_size=%f,"
" using data points as seeds." % bin_size)
return X
bin_seeds = bin_seeds * bin_size
return bin_seeds
class MeanShift(BaseEstimator, ClusterMixin):
"""Mean shift clustering using a flat kernel.
Mean shift clustering aims to discover "blobs" in a smooth density of
samples. It is a centroid-based algorithm, which works by updating
candidates for centroids to be the mean of the points within a given
region. These candidates are then filtered in a post-processing stage to
eliminate near-duplicates to form the final set of centroids.
Seeding is performed using a binning technique for scalability.
Read more in the :ref:`User Guide <mean_shift>`.
Parameters
----------
bandwidth : float, optional
Bandwidth used in the RBF kernel.
If not given, the bandwidth is estimated using
sklearn.cluster.estimate_bandwidth; see the documentation for that
function for hints on scalability (see also the Notes, below).
seeds : array, shape=[n_samples, n_features], optional
Seeds used to initialize kernels. If not set,
the seeds are calculated by clustering.get_bin_seeds
with bandwidth as the grid size and default values for
other parameters.
bin_seeding : boolean, optional
If true, initial kernel locations are not locations of all
points, but rather the location of the discretized version of
points, where points are binned onto a grid whose coarseness
corresponds to the bandwidth. Setting this option to True will speed
up the algorithm because fewer seeds will be initialized.
default value: False
Ignored if seeds argument is not None.
min_bin_freq : int, optional
To speed up the algorithm, accept only those bins with at least
min_bin_freq points as seeds. If not defined, set to 1.
cluster_all : boolean, default True
If true, then all points are clustered, even those orphans that are
not within any kernel. Orphans are assigned to the nearest kernel.
If false, then orphans are given cluster label -1.
n_jobs : int
The number of jobs to use for the computation. This works by computing
each of the n_init runs in parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
Attributes
----------
cluster_centers_ : array, [n_clusters, n_features]
Coordinates of cluster centers.
labels_ :
Labels of each point.
Notes
-----
Scalability:
Because this implementation uses a flat kernel and
a Ball Tree to look up members of each kernel, the complexity will tend
towards O(T*n*log(n)) in lower dimensions, with n the number of samples
and T the number of points. In higher dimensions the complexity will
tend towards O(T*n^2).
Scalability can be boosted by using fewer seeds, for example by using
a higher value of min_bin_freq in the get_bin_seeds function.
Note that the estimate_bandwidth function is much less scalable than the
mean shift algorithm and will be the bottleneck if it is used.
References
----------
Dorin Comaniciu and Peter Meer, "Mean Shift: A robust approach toward
feature space analysis". IEEE Transactions on Pattern Analysis and
Machine Intelligence. 2002. pp. 603-619.
"""
def __init__(self, bandwidth=None, seeds=None, bin_seeding=False,
min_bin_freq=1, cluster_all=True, n_jobs=1):
self.bandwidth = bandwidth
self.seeds = seeds
self.bin_seeding = bin_seeding
self.cluster_all = cluster_all
self.min_bin_freq = min_bin_freq
self.n_jobs = n_jobs
def fit(self, X, y=None):
"""Perform clustering.
Parameters
-----------
X : array-like, shape=[n_samples, n_features]
Samples to cluster.
"""
X = check_array(X)
self.cluster_centers_, self.labels_ = \
mean_shift(X, bandwidth=self.bandwidth, seeds=self.seeds,
min_bin_freq=self.min_bin_freq,
bin_seeding=self.bin_seeding,
cluster_all=self.cluster_all, n_jobs=self.n_jobs)
return self
def predict(self, X):
"""Predict the closest cluster each sample in X belongs to.
Parameters
----------
X : {array-like, sparse matrix}, shape=[n_samples, n_features]
New data to predict.
Returns
-------
labels : array, shape [n_samples,]
Index of the cluster each sample belongs to.
"""
check_is_fitted(self, "cluster_centers_")
return pairwise_distances_argmin(X, self.cluster_centers_)
|
achang97/YouTunes
|
refs/heads/master
|
lib/python2.7/site-packages/youtube_dl/extractor/veoh.py
|
18
|
from __future__ import unicode_literals
import re
import json
from .common import InfoExtractor
from ..utils import (
int_or_none,
ExtractorError,
sanitized_Request,
)
class VeohIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?veoh\.com/(?:watch|iphone/#_Watch)/(?P<id>(?:v|e|yapi-)[\da-zA-Z]+)'
_TESTS = [{
'url': 'http://www.veoh.com/watch/v56314296nk7Zdmz3',
'md5': '620e68e6a3cff80086df3348426c9ca3',
'info_dict': {
'id': '56314296',
'ext': 'mp4',
'title': 'Straight Backs Are Stronger',
'uploader': 'LUMOback',
'description': 'At LUMOback, we believe straight backs are stronger. The LUMOback Posture & Movement Sensor: It gently vibrates when you slouch, inspiring improved posture and mobility. Use the app to track your data and improve your posture over time. ',
},
}, {
'url': 'http://www.veoh.com/watch/v27701988pbTc4wzN?h1=Chile+workers+cover+up+to+avoid+skin+damage',
'md5': '4a6ff84b87d536a6a71e6aa6c0ad07fa',
'info_dict': {
'id': '27701988',
'ext': 'mp4',
'title': 'Chile workers cover up to avoid skin damage',
'description': 'md5:2bd151625a60a32822873efc246ba20d',
'uploader': 'afp-news',
'duration': 123,
},
'skip': 'This video has been deleted.',
}, {
'url': 'http://www.veoh.com/watch/v69525809F6Nc4frX',
'md5': '4fde7b9e33577bab2f2f8f260e30e979',
'note': 'Embedded ooyala video',
'info_dict': {
'id': '69525809',
'ext': 'mp4',
'title': 'Doctors Alter Plan For Preteen\'s Weight Loss Surgery',
'description': 'md5:f5a11c51f8fb51d2315bca0937526891',
'uploader': 'newsy-videos',
},
'skip': 'This video has been deleted.',
}, {
'url': 'http://www.veoh.com/watch/e152215AJxZktGS',
'only_matching': True,
}]
def _extract_formats(self, source):
formats = []
link = source.get('aowPermalink')
if link:
formats.append({
'url': link,
'ext': 'mp4',
'format_id': 'aow',
})
link = source.get('fullPreviewHashLowPath')
if link:
formats.append({
'url': link,
'format_id': 'low',
})
link = source.get('fullPreviewHashHighPath')
if link:
formats.append({
'url': link,
'format_id': 'high',
})
return formats
def _extract_video(self, source):
return {
'id': source.get('videoId'),
'title': source.get('title'),
'description': source.get('description'),
'thumbnail': source.get('highResImage') or source.get('medResImage'),
'uploader': source.get('username'),
'duration': int_or_none(source.get('length')),
'view_count': int_or_none(source.get('views')),
'age_limit': 18 if source.get('isMature') == 'true' or source.get('isSexy') == 'true' else 0,
'formats': self._extract_formats(source),
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
if video_id.startswith('v'):
rsp = self._download_xml(
r'http://www.veoh.com/api/findByPermalink?permalink=%s' % video_id, video_id, 'Downloading video XML')
stat = rsp.get('stat')
if stat == 'ok':
return self._extract_video(rsp.find('./videoList/video'))
elif stat == 'fail':
raise ExtractorError(
'%s said: %s' % (self.IE_NAME, rsp.find('./errorList/error').get('errorMessage')), expected=True)
webpage = self._download_webpage(url, video_id)
age_limit = 0
if 'class="adultwarning-container"' in webpage:
self.report_age_confirmation()
age_limit = 18
request = sanitized_Request(url)
request.add_header('Cookie', 'confirmedAdult=true')
webpage = self._download_webpage(request, video_id)
m_youtube = re.search(r'http://www\.youtube\.com/v/(.*?)(\&|"|\?)', webpage)
if m_youtube is not None:
youtube_id = m_youtube.group(1)
self.to_screen('%s: detected Youtube video.' % video_id)
return self.url_result(youtube_id, 'Youtube')
info = json.loads(
self._search_regex(r'videoDetailsJSON = \'({.*?})\';', webpage, 'info').replace('\\\'', '\''))
video = self._extract_video(info)
video['age_limit'] = age_limit
return video
|
sarthakmeh03/django
|
refs/heads/master
|
django/contrib/sessions/backends/signed_cookies.py
|
82
|
from django.conf import settings
from django.contrib.sessions.backends.base import SessionBase
from django.core import signing
class SessionStore(SessionBase):
def load(self):
"""
We load the data from the key itself instead of fetching from
some external data store. Opposite of _get_session_key(),
raises BadSignature if signature fails.
"""
try:
return signing.loads(
self.session_key,
serializer=self.serializer,
# This doesn't handle non-default expiry dates, see #19201
max_age=settings.SESSION_COOKIE_AGE,
salt='django.contrib.sessions.backends.signed_cookies',
)
except Exception:
# BadSignature, ValueError, or unpickling exceptions. If any of
# these happen, reset the session.
self.create()
return {}
def create(self):
"""
To create a new key, we simply make sure that the modified flag is set
so that the cookie is set on the client for the current request.
"""
self.modified = True
def save(self, must_create=False):
"""
To save, we get the session key as a securely signed string and then
set the modified flag so that the cookie is set on the client for the
current request.
"""
self._session_key = self._get_session_key()
self.modified = True
def exists(self, session_key=None):
"""
This method makes sense when you're talking to a shared resource, but
it doesn't matter when you're storing the information in the client's
cookie.
"""
return False
def delete(self, session_key=None):
"""
To delete, we clear the session key and the underlying data structure
and set the modified flag so that the cookie is set on the client for
the current request.
"""
self._session_key = ''
self._session_cache = {}
self.modified = True
def cycle_key(self):
"""
Keeps the same data but with a new key. To do this, we just have to
call ``save()`` and it will automatically save a cookie with a new key
at the end of the request.
"""
self.save()
def _get_session_key(self):
"""
Most session backends don't need to override this method, but we do,
because instead of generating a random string, we want to actually
generate a secure url-safe Base64-encoded string of data as our
session key.
"""
session_cache = getattr(self, '_session_cache', {})
return signing.dumps(
session_cache, compress=True,
salt='django.contrib.sessions.backends.signed_cookies',
serializer=self.serializer,
)
@classmethod
def clear_expired(cls):
pass
|
Zhongqilong/kbengine
|
refs/heads/master
|
kbe/res/scripts/common/Lib/tkinter/test/test_ttk/test_widgets.py
|
59
|
import unittest
import tkinter
from tkinter import ttk
from test.support import requires
import sys
from tkinter.test.test_ttk.test_functions import MockTclObj
from tkinter.test.support import (AbstractTkTest, tcl_version, get_tk_patchlevel,
simulate_mouse_click)
from tkinter.test.widget_tests import (add_standard_options, noconv,
AbstractWidgetTest, StandardOptionsTests, IntegerSizeTests, PixelSizeTests,
setUpModule)
requires('gui')
class StandardTtkOptionsTests(StandardOptionsTests):
def test_class(self):
widget = self.create()
self.assertEqual(widget['class'], '')
errmsg='attempt to change read-only option'
if get_tk_patchlevel() < (8, 6, 0): # actually this was changed in 8.6b3
errmsg='Attempt to change read-only option'
self.checkInvalidParam(widget, 'class', 'Foo', errmsg=errmsg)
widget2 = self.create(class_='Foo')
self.assertEqual(widget2['class'], 'Foo')
def test_padding(self):
widget = self.create()
self.checkParam(widget, 'padding', 0, expected=('0',))
self.checkParam(widget, 'padding', 5, expected=('5',))
self.checkParam(widget, 'padding', (5, 6), expected=('5', '6'))
self.checkParam(widget, 'padding', (5, 6, 7),
expected=('5', '6', '7'))
self.checkParam(widget, 'padding', (5, 6, 7, 8),
expected=('5', '6', '7', '8'))
self.checkParam(widget, 'padding', ('5p', '6p', '7p', '8p'))
self.checkParam(widget, 'padding', (), expected='')
def test_style(self):
widget = self.create()
self.assertEqual(widget['style'], '')
errmsg = 'Layout Foo not found'
if hasattr(self, 'default_orient'):
errmsg = ('Layout %s.Foo not found' %
getattr(self, 'default_orient').title())
self.checkInvalidParam(widget, 'style', 'Foo',
errmsg=errmsg)
widget2 = self.create(class_='Foo')
self.assertEqual(widget2['class'], 'Foo')
# XXX
pass
class WidgetTest(AbstractTkTest, unittest.TestCase):
"""Tests methods available in every ttk widget."""
def setUp(self):
super().setUp()
self.widget = ttk.Button(self.root, width=0, text="Text")
self.widget.pack()
self.widget.wait_visibility()
def test_identify(self):
self.widget.update_idletasks()
self.assertEqual(self.widget.identify(
int(self.widget.winfo_width() / 2),
int(self.widget.winfo_height() / 2)
), "label")
self.assertEqual(self.widget.identify(-1, -1), "")
self.assertRaises(tkinter.TclError, self.widget.identify, None, 5)
self.assertRaises(tkinter.TclError, self.widget.identify, 5, None)
self.assertRaises(tkinter.TclError, self.widget.identify, 5, '')
def test_widget_state(self):
# XXX not sure about the portability of all these tests
self.assertEqual(self.widget.state(), ())
self.assertEqual(self.widget.instate(['!disabled']), True)
# changing from !disabled to disabled
self.assertEqual(self.widget.state(['disabled']), ('!disabled', ))
# no state change
self.assertEqual(self.widget.state(['disabled']), ())
# change back to !disable but also active
self.assertEqual(self.widget.state(['!disabled', 'active']),
('!active', 'disabled'))
# no state changes, again
self.assertEqual(self.widget.state(['!disabled', 'active']), ())
self.assertEqual(self.widget.state(['active', '!disabled']), ())
def test_cb(arg1, **kw):
return arg1, kw
self.assertEqual(self.widget.instate(['!disabled'],
test_cb, "hi", **{"msg": "there"}),
('hi', {'msg': 'there'}))
# attempt to set invalid statespec
currstate = self.widget.state()
self.assertRaises(tkinter.TclError, self.widget.instate,
['badstate'])
self.assertRaises(tkinter.TclError, self.widget.instate,
['disabled', 'badstate'])
# verify that widget didn't change its state
self.assertEqual(currstate, self.widget.state())
# ensuring that passing None as state doesn't modify current state
self.widget.state(['active', '!disabled'])
self.assertEqual(self.widget.state(), ('active', ))
class AbstractToplevelTest(AbstractWidgetTest, PixelSizeTests):
_conv_pixels = noconv
@add_standard_options(StandardTtkOptionsTests)
class FrameTest(AbstractToplevelTest, unittest.TestCase):
OPTIONS = (
'borderwidth', 'class', 'cursor', 'height',
'padding', 'relief', 'style', 'takefocus',
'width',
)
def create(self, **kwargs):
return ttk.Frame(self.root, **kwargs)
@add_standard_options(StandardTtkOptionsTests)
class LabelFrameTest(AbstractToplevelTest, unittest.TestCase):
OPTIONS = (
'borderwidth', 'class', 'cursor', 'height',
'labelanchor', 'labelwidget',
'padding', 'relief', 'style', 'takefocus',
'text', 'underline', 'width',
)
def create(self, **kwargs):
return ttk.LabelFrame(self.root, **kwargs)
def test_labelanchor(self):
widget = self.create()
self.checkEnumParam(widget, 'labelanchor',
'e', 'en', 'es', 'n', 'ne', 'nw', 's', 'se', 'sw', 'w', 'wn', 'ws',
errmsg='Bad label anchor specification {}')
self.checkInvalidParam(widget, 'labelanchor', 'center')
def test_labelwidget(self):
widget = self.create()
label = ttk.Label(self.root, text='Mupp', name='foo')
self.checkParam(widget, 'labelwidget', label, expected='.foo')
label.destroy()
class AbstractLabelTest(AbstractWidgetTest):
def checkImageParam(self, widget, name):
image = tkinter.PhotoImage(master=self.root, name='image1')
image2 = tkinter.PhotoImage(master=self.root, name='image2')
self.checkParam(widget, name, image, expected=('image1',))
self.checkParam(widget, name, 'image1', expected=('image1',))
self.checkParam(widget, name, (image,), expected=('image1',))
self.checkParam(widget, name, (image, 'active', image2),
expected=('image1', 'active', 'image2'))
self.checkParam(widget, name, 'image1 active image2',
expected=('image1', 'active', 'image2'))
self.checkInvalidParam(widget, name, 'spam',
errmsg='image "spam" doesn\'t exist')
def test_compound(self):
widget = self.create()
self.checkEnumParam(widget, 'compound',
'none', 'text', 'image', 'center',
'top', 'bottom', 'left', 'right')
def test_state(self):
widget = self.create()
self.checkParams(widget, 'state', 'active', 'disabled', 'normal')
def test_width(self):
widget = self.create()
self.checkParams(widget, 'width', 402, -402, 0)
@add_standard_options(StandardTtkOptionsTests)
class LabelTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'anchor', 'background',
'class', 'compound', 'cursor', 'font', 'foreground',
'image', 'justify', 'padding', 'relief', 'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'width', 'wraplength',
)
_conv_pixels = noconv
def create(self, **kwargs):
return ttk.Label(self.root, **kwargs)
def test_font(self):
widget = self.create()
self.checkParam(widget, 'font',
'-Adobe-Helvetica-Medium-R-Normal--*-120-*-*-*-*-*-*')
@add_standard_options(StandardTtkOptionsTests)
class ButtonTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'compound', 'cursor', 'default',
'image', 'state', 'style', 'takefocus', 'text', 'textvariable',
'underline', 'width',
)
def create(self, **kwargs):
return ttk.Button(self.root, **kwargs)
def test_default(self):
widget = self.create()
self.checkEnumParam(widget, 'default', 'normal', 'active', 'disabled')
def test_invoke(self):
success = []
btn = ttk.Button(self.root, command=lambda: success.append(1))
btn.invoke()
self.assertTrue(success)
@add_standard_options(StandardTtkOptionsTests)
class CheckbuttonTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'compound', 'cursor',
'image',
'offvalue', 'onvalue',
'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'variable', 'width',
)
def create(self, **kwargs):
return ttk.Checkbutton(self.root, **kwargs)
def test_offvalue(self):
widget = self.create()
self.checkParams(widget, 'offvalue', 1, 2.3, '', 'any string')
def test_onvalue(self):
widget = self.create()
self.checkParams(widget, 'onvalue', 1, 2.3, '', 'any string')
def test_invoke(self):
success = []
def cb_test():
success.append(1)
return "cb test called"
cbtn = ttk.Checkbutton(self.root, command=cb_test)
# the variable automatically created by ttk.Checkbutton is actually
# undefined till we invoke the Checkbutton
self.assertEqual(cbtn.state(), ('alternate', ))
self.assertRaises(tkinter.TclError, cbtn.tk.globalgetvar,
cbtn['variable'])
res = cbtn.invoke()
self.assertEqual(res, "cb test called")
self.assertEqual(cbtn['onvalue'],
cbtn.tk.globalgetvar(cbtn['variable']))
self.assertTrue(success)
cbtn['command'] = ''
res = cbtn.invoke()
self.assertFalse(str(res))
self.assertLessEqual(len(success), 1)
self.assertEqual(cbtn['offvalue'],
cbtn.tk.globalgetvar(cbtn['variable']))
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class ComboboxTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'exportselection', 'height',
'justify', 'postcommand', 'state', 'style',
'takefocus', 'textvariable', 'values', 'width',
)
def setUp(self):
super().setUp()
self.combo = self.create()
def create(self, **kwargs):
return ttk.Combobox(self.root, **kwargs)
def test_height(self):
widget = self.create()
self.checkParams(widget, 'height', 100, 101.2, 102.6, -100, 0, '1i')
def test_state(self):
widget = self.create()
self.checkParams(widget, 'state', 'active', 'disabled', 'normal')
def _show_drop_down_listbox(self):
width = self.combo.winfo_width()
self.combo.event_generate('<ButtonPress-1>', x=width - 5, y=5)
self.combo.event_generate('<ButtonRelease-1>', x=width - 5, y=5)
self.combo.update_idletasks()
def test_virtual_event(self):
success = []
self.combo['values'] = [1]
self.combo.bind('<<ComboboxSelected>>',
lambda evt: success.append(True))
self.combo.pack()
self.combo.wait_visibility()
height = self.combo.winfo_height()
self._show_drop_down_listbox()
self.combo.update()
self.combo.event_generate('<Return>')
self.combo.update()
self.assertTrue(success)
def test_postcommand(self):
success = []
self.combo['postcommand'] = lambda: success.append(True)
self.combo.pack()
self.combo.wait_visibility()
self._show_drop_down_listbox()
self.assertTrue(success)
# testing postcommand removal
self.combo['postcommand'] = ''
self._show_drop_down_listbox()
self.assertEqual(len(success), 1)
def test_values(self):
def check_get_current(getval, currval):
self.assertEqual(self.combo.get(), getval)
self.assertEqual(self.combo.current(), currval)
self.assertEqual(self.combo['values'],
() if tcl_version < (8, 5) else '')
check_get_current('', -1)
self.checkParam(self.combo, 'values', 'mon tue wed thur',
expected=('mon', 'tue', 'wed', 'thur'))
self.checkParam(self.combo, 'values', ('mon', 'tue', 'wed', 'thur'))
self.checkParam(self.combo, 'values', (42, 3.14, '', 'any string'))
self.checkParam(self.combo, 'values', '', expected=())
self.combo['values'] = ['a', 1, 'c']
self.combo.set('c')
check_get_current('c', 2)
self.combo.current(0)
check_get_current('a', 0)
self.combo.set('d')
check_get_current('d', -1)
# testing values with empty string
self.combo.set('')
self.combo['values'] = (1, 2, '', 3)
check_get_current('', 2)
# testing values with empty string set through configure
self.combo.configure(values=[1, '', 2])
self.assertEqual(self.combo['values'],
('1', '', '2') if self.wantobjects else
'1 {} 2')
# testing values with spaces
self.combo['values'] = ['a b', 'a\tb', 'a\nb']
self.assertEqual(self.combo['values'],
('a b', 'a\tb', 'a\nb') if self.wantobjects else
'{a b} {a\tb} {a\nb}')
# testing values with special characters
self.combo['values'] = [r'a\tb', '"a"', '} {']
self.assertEqual(self.combo['values'],
(r'a\tb', '"a"', '} {') if self.wantobjects else
r'a\\tb {"a"} \}\ \{')
# out of range
self.assertRaises(tkinter.TclError, self.combo.current,
len(self.combo['values']))
# it expects an integer (or something that can be converted to int)
self.assertRaises(tkinter.TclError, self.combo.current, '')
# testing creating combobox with empty string in values
combo2 = ttk.Combobox(self.root, values=[1, 2, ''])
self.assertEqual(combo2['values'],
('1', '2', '') if self.wantobjects else '1 2 {}')
combo2.destroy()
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class EntryTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'background', 'class', 'cursor',
'exportselection', 'font',
'invalidcommand', 'justify',
'show', 'state', 'style', 'takefocus', 'textvariable',
'validate', 'validatecommand', 'width', 'xscrollcommand',
)
def setUp(self):
super().setUp()
self.entry = self.create()
def create(self, **kwargs):
return ttk.Entry(self.root, **kwargs)
def test_invalidcommand(self):
widget = self.create()
self.checkCommandParam(widget, 'invalidcommand')
def test_show(self):
widget = self.create()
self.checkParam(widget, 'show', '*')
self.checkParam(widget, 'show', '')
self.checkParam(widget, 'show', ' ')
def test_state(self):
widget = self.create()
self.checkParams(widget, 'state',
'disabled', 'normal', 'readonly')
def test_validate(self):
widget = self.create()
self.checkEnumParam(widget, 'validate',
'all', 'key', 'focus', 'focusin', 'focusout', 'none')
def test_validatecommand(self):
widget = self.create()
self.checkCommandParam(widget, 'validatecommand')
def test_bbox(self):
self.assertIsBoundingBox(self.entry.bbox(0))
self.assertRaises(tkinter.TclError, self.entry.bbox, 'noindex')
self.assertRaises(tkinter.TclError, self.entry.bbox, None)
def test_identify(self):
self.entry.pack()
self.entry.wait_visibility()
self.entry.update_idletasks()
self.assertEqual(self.entry.identify(5, 5), "textarea")
self.assertEqual(self.entry.identify(-1, -1), "")
self.assertRaises(tkinter.TclError, self.entry.identify, None, 5)
self.assertRaises(tkinter.TclError, self.entry.identify, 5, None)
self.assertRaises(tkinter.TclError, self.entry.identify, 5, '')
def test_validation_options(self):
success = []
test_invalid = lambda: success.append(True)
self.entry['validate'] = 'none'
self.entry['validatecommand'] = lambda: False
self.entry['invalidcommand'] = test_invalid
self.entry.validate()
self.assertTrue(success)
self.entry['invalidcommand'] = ''
self.entry.validate()
self.assertEqual(len(success), 1)
self.entry['invalidcommand'] = test_invalid
self.entry['validatecommand'] = lambda: True
self.entry.validate()
self.assertEqual(len(success), 1)
self.entry['validatecommand'] = ''
self.entry.validate()
self.assertEqual(len(success), 1)
self.entry['validatecommand'] = True
self.assertRaises(tkinter.TclError, self.entry.validate)
def test_validation(self):
validation = []
def validate(to_insert):
if not 'a' <= to_insert.lower() <= 'z':
validation.append(False)
return False
validation.append(True)
return True
self.entry['validate'] = 'key'
self.entry['validatecommand'] = self.entry.register(validate), '%S'
self.entry.insert('end', 1)
self.entry.insert('end', 'a')
self.assertEqual(validation, [False, True])
self.assertEqual(self.entry.get(), 'a')
def test_revalidation(self):
def validate(content):
for letter in content:
if not 'a' <= letter.lower() <= 'z':
return False
return True
self.entry['validatecommand'] = self.entry.register(validate), '%P'
self.entry.insert('end', 'avocado')
self.assertEqual(self.entry.validate(), True)
self.assertEqual(self.entry.state(), ())
self.entry.delete(0, 'end')
self.assertEqual(self.entry.get(), '')
self.entry.insert('end', 'a1b')
self.assertEqual(self.entry.validate(), False)
self.assertEqual(self.entry.state(), ('invalid', ))
self.entry.delete(1)
self.assertEqual(self.entry.validate(), True)
self.assertEqual(self.entry.state(), ())
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class PanedWindowTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'height',
'orient', 'style', 'takefocus', 'width',
)
def setUp(self):
super().setUp()
self.paned = self.create()
def create(self, **kwargs):
return ttk.PanedWindow(self.root, **kwargs)
def test_orient(self):
widget = self.create()
self.assertEqual(str(widget['orient']), 'vertical')
errmsg='attempt to change read-only option'
if get_tk_patchlevel() < (8, 6, 0): # actually this was changed in 8.6b3
errmsg='Attempt to change read-only option'
self.checkInvalidParam(widget, 'orient', 'horizontal',
errmsg=errmsg)
widget2 = self.create(orient='horizontal')
self.assertEqual(str(widget2['orient']), 'horizontal')
def test_add(self):
# attempt to add a child that is not a direct child of the paned window
label = ttk.Label(self.paned)
child = ttk.Label(label)
self.assertRaises(tkinter.TclError, self.paned.add, child)
label.destroy()
child.destroy()
# another attempt
label = ttk.Label(self.root)
child = ttk.Label(label)
self.assertRaises(tkinter.TclError, self.paned.add, child)
child.destroy()
label.destroy()
good_child = ttk.Label(self.root)
self.paned.add(good_child)
# re-adding a child is not accepted
self.assertRaises(tkinter.TclError, self.paned.add, good_child)
other_child = ttk.Label(self.paned)
self.paned.add(other_child)
self.assertEqual(self.paned.pane(0), self.paned.pane(1))
self.assertRaises(tkinter.TclError, self.paned.pane, 2)
good_child.destroy()
other_child.destroy()
self.assertRaises(tkinter.TclError, self.paned.pane, 0)
def test_forget(self):
self.assertRaises(tkinter.TclError, self.paned.forget, None)
self.assertRaises(tkinter.TclError, self.paned.forget, 0)
self.paned.add(ttk.Label(self.root))
self.paned.forget(0)
self.assertRaises(tkinter.TclError, self.paned.forget, 0)
def test_insert(self):
self.assertRaises(tkinter.TclError, self.paned.insert, None, 0)
self.assertRaises(tkinter.TclError, self.paned.insert, 0, None)
self.assertRaises(tkinter.TclError, self.paned.insert, 0, 0)
child = ttk.Label(self.root)
child2 = ttk.Label(self.root)
child3 = ttk.Label(self.root)
self.assertRaises(tkinter.TclError, self.paned.insert, 0, child)
self.paned.insert('end', child2)
self.paned.insert(0, child)
self.assertEqual(self.paned.panes(), (str(child), str(child2)))
self.paned.insert(0, child2)
self.assertEqual(self.paned.panes(), (str(child2), str(child)))
self.paned.insert('end', child3)
self.assertEqual(self.paned.panes(),
(str(child2), str(child), str(child3)))
# reinserting a child should move it to its current position
panes = self.paned.panes()
self.paned.insert('end', child3)
self.assertEqual(panes, self.paned.panes())
# moving child3 to child2 position should result in child2 ending up
# in previous child position and child ending up in previous child3
# position
self.paned.insert(child2, child3)
self.assertEqual(self.paned.panes(),
(str(child3), str(child2), str(child)))
def test_pane(self):
self.assertRaises(tkinter.TclError, self.paned.pane, 0)
child = ttk.Label(self.root)
self.paned.add(child)
self.assertIsInstance(self.paned.pane(0), dict)
self.assertEqual(self.paned.pane(0, weight=None),
0 if self.wantobjects else '0')
# newer form for querying a single option
self.assertEqual(self.paned.pane(0, 'weight'),
0 if self.wantobjects else '0')
self.assertEqual(self.paned.pane(0), self.paned.pane(str(child)))
self.assertRaises(tkinter.TclError, self.paned.pane, 0,
badoption='somevalue')
def test_sashpos(self):
self.assertRaises(tkinter.TclError, self.paned.sashpos, None)
self.assertRaises(tkinter.TclError, self.paned.sashpos, '')
self.assertRaises(tkinter.TclError, self.paned.sashpos, 0)
child = ttk.Label(self.paned, text='a')
self.paned.add(child, weight=1)
self.assertRaises(tkinter.TclError, self.paned.sashpos, 0)
child2 = ttk.Label(self.paned, text='b')
self.paned.add(child2)
self.assertRaises(tkinter.TclError, self.paned.sashpos, 1)
self.paned.pack(expand=True, fill='both')
self.paned.wait_visibility()
curr_pos = self.paned.sashpos(0)
self.paned.sashpos(0, 1000)
self.assertNotEqual(curr_pos, self.paned.sashpos(0))
self.assertIsInstance(self.paned.sashpos(0), int)
@add_standard_options(StandardTtkOptionsTests)
class RadiobuttonTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'compound', 'cursor',
'image',
'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'value', 'variable', 'width',
)
def create(self, **kwargs):
return ttk.Radiobutton(self.root, **kwargs)
def test_value(self):
widget = self.create()
self.checkParams(widget, 'value', 1, 2.3, '', 'any string')
def test_invoke(self):
success = []
def cb_test():
success.append(1)
return "cb test called"
myvar = tkinter.IntVar(self.root)
cbtn = ttk.Radiobutton(self.root, command=cb_test,
variable=myvar, value=0)
cbtn2 = ttk.Radiobutton(self.root, command=cb_test,
variable=myvar, value=1)
if self.wantobjects:
conv = lambda x: x
else:
conv = int
res = cbtn.invoke()
self.assertEqual(res, "cb test called")
self.assertEqual(conv(cbtn['value']), myvar.get())
self.assertEqual(myvar.get(),
conv(cbtn.tk.globalgetvar(cbtn['variable'])))
self.assertTrue(success)
cbtn2['command'] = ''
res = cbtn2.invoke()
self.assertEqual(str(res), '')
self.assertLessEqual(len(success), 1)
self.assertEqual(conv(cbtn2['value']), myvar.get())
self.assertEqual(myvar.get(),
conv(cbtn.tk.globalgetvar(cbtn['variable'])))
self.assertEqual(str(cbtn['variable']), str(cbtn2['variable']))
class MenubuttonTest(AbstractLabelTest, unittest.TestCase):
OPTIONS = (
'class', 'compound', 'cursor', 'direction',
'image', 'menu', 'state', 'style',
'takefocus', 'text', 'textvariable',
'underline', 'width',
)
def create(self, **kwargs):
return ttk.Menubutton(self.root, **kwargs)
def test_direction(self):
widget = self.create()
self.checkEnumParam(widget, 'direction',
'above', 'below', 'left', 'right', 'flush')
def test_menu(self):
widget = self.create()
menu = tkinter.Menu(widget, name='menu')
self.checkParam(widget, 'menu', menu, conv=str)
menu.destroy()
@add_standard_options(StandardTtkOptionsTests)
class ScaleTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'cursor', 'from', 'length',
'orient', 'style', 'takefocus', 'to', 'value', 'variable',
)
_conv_pixels = noconv
default_orient = 'horizontal'
def setUp(self):
super().setUp()
self.scale = self.create()
self.scale.pack()
self.scale.update()
def create(self, **kwargs):
return ttk.Scale(self.root, **kwargs)
def test_from(self):
widget = self.create()
self.checkFloatParam(widget, 'from', 100, 14.9, 15.1, conv=False)
def test_length(self):
widget = self.create()
self.checkPixelsParam(widget, 'length', 130, 131.2, 135.6, '5i')
def test_to(self):
widget = self.create()
self.checkFloatParam(widget, 'to', 300, 14.9, 15.1, -10, conv=False)
def test_value(self):
widget = self.create()
self.checkFloatParam(widget, 'value', 300, 14.9, 15.1, -10, conv=False)
def test_custom_event(self):
failure = [1, 1, 1] # will need to be empty
funcid = self.scale.bind('<<RangeChanged>>', lambda evt: failure.pop())
self.scale['from'] = 10
self.scale['from_'] = 10
self.scale['to'] = 3
self.assertFalse(failure)
failure = [1, 1, 1]
self.scale.configure(from_=2, to=5)
self.scale.configure(from_=0, to=-2)
self.scale.configure(to=10)
self.assertFalse(failure)
def test_get(self):
if self.wantobjects:
conv = lambda x: x
else:
conv = float
scale_width = self.scale.winfo_width()
self.assertEqual(self.scale.get(scale_width, 0), self.scale['to'])
self.assertEqual(conv(self.scale.get(0, 0)), conv(self.scale['from']))
self.assertEqual(self.scale.get(), self.scale['value'])
self.scale['value'] = 30
self.assertEqual(self.scale.get(), self.scale['value'])
self.assertRaises(tkinter.TclError, self.scale.get, '', 0)
self.assertRaises(tkinter.TclError, self.scale.get, 0, '')
def test_set(self):
if self.wantobjects:
conv = lambda x: x
else:
conv = float
# set restricts the max/min values according to the current range
max = conv(self.scale['to'])
new_max = max + 10
self.scale.set(new_max)
self.assertEqual(conv(self.scale.get()), max)
min = conv(self.scale['from'])
self.scale.set(min - 1)
self.assertEqual(conv(self.scale.get()), min)
# changing directly the variable doesn't impose this limitation tho
var = tkinter.DoubleVar(self.root)
self.scale['variable'] = var
var.set(max + 5)
self.assertEqual(conv(self.scale.get()), var.get())
self.assertEqual(conv(self.scale.get()), max + 5)
del var
# the same happens with the value option
self.scale['value'] = max + 10
self.assertEqual(conv(self.scale.get()), max + 10)
self.assertEqual(conv(self.scale.get()), conv(self.scale['value']))
# nevertheless, note that the max/min values we can get specifying
# x, y coords are the ones according to the current range
self.assertEqual(conv(self.scale.get(0, 0)), min)
self.assertEqual(conv(self.scale.get(self.scale.winfo_width(), 0)), max)
self.assertRaises(tkinter.TclError, self.scale.set, None)
@add_standard_options(StandardTtkOptionsTests)
class ProgressbarTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'orient', 'length',
'mode', 'maximum', 'phase',
'style', 'takefocus', 'value', 'variable',
)
_conv_pixels = noconv
default_orient = 'horizontal'
def create(self, **kwargs):
return ttk.Progressbar(self.root, **kwargs)
def test_length(self):
widget = self.create()
self.checkPixelsParam(widget, 'length', 100.1, 56.7, '2i')
def test_maximum(self):
widget = self.create()
self.checkFloatParam(widget, 'maximum', 150.2, 77.7, 0, -10, conv=False)
def test_mode(self):
widget = self.create()
self.checkEnumParam(widget, 'mode', 'determinate', 'indeterminate')
def test_phase(self):
# XXX
pass
def test_value(self):
widget = self.create()
self.checkFloatParam(widget, 'value', 150.2, 77.7, 0, -10,
conv=False)
@unittest.skipIf(sys.platform == 'darwin',
'ttk.Scrollbar is special on MacOSX')
@add_standard_options(StandardTtkOptionsTests)
class ScrollbarTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'command', 'cursor', 'orient', 'style', 'takefocus',
)
default_orient = 'vertical'
def create(self, **kwargs):
return ttk.Scrollbar(self.root, **kwargs)
@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests)
class NotebookTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'height', 'padding', 'style', 'takefocus',
)
def setUp(self):
super().setUp()
self.nb = self.create(padding=0)
self.child1 = ttk.Label(self.root)
self.child2 = ttk.Label(self.root)
self.nb.add(self.child1, text='a')
self.nb.add(self.child2, text='b')
def create(self, **kwargs):
return ttk.Notebook(self.root, **kwargs)
def test_tab_identifiers(self):
self.nb.forget(0)
self.nb.hide(self.child2)
self.assertRaises(tkinter.TclError, self.nb.tab, self.child1)
self.assertEqual(self.nb.index('end'), 1)
self.nb.add(self.child2)
self.assertEqual(self.nb.index('end'), 1)
self.nb.select(self.child2)
self.assertTrue(self.nb.tab('current'))
self.nb.add(self.child1, text='a')
self.nb.pack()
self.nb.wait_visibility()
if sys.platform == 'darwin':
tb_idx = "@20,5"
else:
tb_idx = "@5,5"
self.assertEqual(self.nb.tab(tb_idx), self.nb.tab('current'))
for i in range(5, 100, 5):
try:
if self.nb.tab('@%d, 5' % i, text=None) == 'a':
break
except tkinter.TclError:
pass
else:
self.fail("Tab with text 'a' not found")
def test_add_and_hidden(self):
self.assertRaises(tkinter.TclError, self.nb.hide, -1)
self.assertRaises(tkinter.TclError, self.nb.hide, 'hi')
self.assertRaises(tkinter.TclError, self.nb.hide, None)
self.assertRaises(tkinter.TclError, self.nb.add, None)
self.assertRaises(tkinter.TclError, self.nb.add, ttk.Label(self.root),
unknown='option')
tabs = self.nb.tabs()
self.nb.hide(self.child1)
self.nb.add(self.child1)
self.assertEqual(self.nb.tabs(), tabs)
child = ttk.Label(self.root)
self.nb.add(child, text='c')
tabs = self.nb.tabs()
curr = self.nb.index('current')
# verify that the tab gets readded at its previous position
child2_index = self.nb.index(self.child2)
self.nb.hide(self.child2)
self.nb.add(self.child2)
self.assertEqual(self.nb.tabs(), tabs)
self.assertEqual(self.nb.index(self.child2), child2_index)
self.assertEqual(str(self.child2), self.nb.tabs()[child2_index])
# but the tab next to it (not hidden) is the one selected now
self.assertEqual(self.nb.index('current'), curr + 1)
def test_forget(self):
self.assertRaises(tkinter.TclError, self.nb.forget, -1)
self.assertRaises(tkinter.TclError, self.nb.forget, 'hi')
self.assertRaises(tkinter.TclError, self.nb.forget, None)
tabs = self.nb.tabs()
child1_index = self.nb.index(self.child1)
self.nb.forget(self.child1)
self.assertNotIn(str(self.child1), self.nb.tabs())
self.assertEqual(len(tabs) - 1, len(self.nb.tabs()))
self.nb.add(self.child1)
self.assertEqual(self.nb.index(self.child1), 1)
self.assertNotEqual(child1_index, self.nb.index(self.child1))
def test_index(self):
self.assertRaises(tkinter.TclError, self.nb.index, -1)
self.assertRaises(tkinter.TclError, self.nb.index, None)
self.assertIsInstance(self.nb.index('end'), int)
self.assertEqual(self.nb.index(self.child1), 0)
self.assertEqual(self.nb.index(self.child2), 1)
self.assertEqual(self.nb.index('end'), 2)
def test_insert(self):
# moving tabs
tabs = self.nb.tabs()
self.nb.insert(1, tabs[0])
self.assertEqual(self.nb.tabs(), (tabs[1], tabs[0]))
self.nb.insert(self.child1, self.child2)
self.assertEqual(self.nb.tabs(), tabs)
self.nb.insert('end', self.child1)
self.assertEqual(self.nb.tabs(), (tabs[1], tabs[0]))
self.nb.insert('end', 0)
self.assertEqual(self.nb.tabs(), tabs)
# bad moves
self.assertRaises(tkinter.TclError, self.nb.insert, 2, tabs[0])
self.assertRaises(tkinter.TclError, self.nb.insert, -1, tabs[0])
# new tab
child3 = ttk.Label(self.root)
self.nb.insert(1, child3)
self.assertEqual(self.nb.tabs(), (tabs[0], str(child3), tabs[1]))
self.nb.forget(child3)
self.assertEqual(self.nb.tabs(), tabs)
self.nb.insert(self.child1, child3)
self.assertEqual(self.nb.tabs(), (str(child3), ) + tabs)
self.nb.forget(child3)
self.assertRaises(tkinter.TclError, self.nb.insert, 2, child3)
self.assertRaises(tkinter.TclError, self.nb.insert, -1, child3)
# bad inserts
self.assertRaises(tkinter.TclError, self.nb.insert, 'end', None)
self.assertRaises(tkinter.TclError, self.nb.insert, None, 0)
self.assertRaises(tkinter.TclError, self.nb.insert, None, None)
def test_select(self):
self.nb.pack()
self.nb.wait_visibility()
success = []
tab_changed = []
self.child1.bind('<Unmap>', lambda evt: success.append(True))
self.nb.bind('<<NotebookTabChanged>>',
lambda evt: tab_changed.append(True))
self.assertEqual(self.nb.select(), str(self.child1))
self.nb.select(self.child2)
self.assertTrue(success)
self.assertEqual(self.nb.select(), str(self.child2))
self.nb.update()
self.assertTrue(tab_changed)
def test_tab(self):
self.assertRaises(tkinter.TclError, self.nb.tab, -1)
self.assertRaises(tkinter.TclError, self.nb.tab, 'notab')
self.assertRaises(tkinter.TclError, self.nb.tab, None)
self.assertIsInstance(self.nb.tab(self.child1), dict)
self.assertEqual(self.nb.tab(self.child1, text=None), 'a')
# newer form for querying a single option
self.assertEqual(self.nb.tab(self.child1, 'text'), 'a')
self.nb.tab(self.child1, text='abc')
self.assertEqual(self.nb.tab(self.child1, text=None), 'abc')
self.assertEqual(self.nb.tab(self.child1, 'text'), 'abc')
def test_tabs(self):
self.assertEqual(len(self.nb.tabs()), 2)
self.nb.forget(self.child1)
self.nb.forget(self.child2)
self.assertEqual(self.nb.tabs(), ())
def test_traversal(self):
self.nb.pack()
self.nb.wait_visibility()
self.nb.select(0)
simulate_mouse_click(self.nb, 5, 5)
self.nb.focus_force()
self.nb.event_generate('<Control-Tab>')
self.assertEqual(self.nb.select(), str(self.child2))
self.nb.focus_force()
self.nb.event_generate('<Shift-Control-Tab>')
self.assertEqual(self.nb.select(), str(self.child1))
self.nb.focus_force()
self.nb.event_generate('<Shift-Control-Tab>')
self.assertEqual(self.nb.select(), str(self.child2))
self.nb.tab(self.child1, text='a', underline=0)
self.nb.enable_traversal()
self.nb.focus_force()
simulate_mouse_click(self.nb, 5, 5)
if sys.platform == 'darwin':
self.nb.event_generate('<Option-a>')
else:
self.nb.event_generate('<Alt-a>')
self.assertEqual(self.nb.select(), str(self.child1))
@add_standard_options(StandardTtkOptionsTests)
class TreeviewTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'columns', 'cursor', 'displaycolumns',
'height', 'padding', 'selectmode', 'show',
'style', 'takefocus', 'xscrollcommand', 'yscrollcommand',
)
def setUp(self):
super().setUp()
self.tv = self.create(padding=0)
def create(self, **kwargs):
return ttk.Treeview(self.root, **kwargs)
def test_columns(self):
widget = self.create()
self.checkParam(widget, 'columns', 'a b c',
expected=('a', 'b', 'c'))
self.checkParam(widget, 'columns', ('a', 'b', 'c'))
self.checkParam(widget, 'columns', ())
def test_displaycolumns(self):
widget = self.create()
widget['columns'] = ('a', 'b', 'c')
self.checkParam(widget, 'displaycolumns', 'b a c',
expected=('b', 'a', 'c'))
self.checkParam(widget, 'displaycolumns', ('b', 'a', 'c'))
self.checkParam(widget, 'displaycolumns', '#all',
expected=('#all',))
self.checkParam(widget, 'displaycolumns', (2, 1, 0))
self.checkInvalidParam(widget, 'displaycolumns', ('a', 'b', 'd'),
errmsg='Invalid column index d')
self.checkInvalidParam(widget, 'displaycolumns', (1, 2, 3),
errmsg='Column index 3 out of bounds')
self.checkInvalidParam(widget, 'displaycolumns', (1, -2),
errmsg='Column index -2 out of bounds')
def test_height(self):
widget = self.create()
self.checkPixelsParam(widget, 'height', 100, -100, 0, '3c', conv=False)
self.checkPixelsParam(widget, 'height', 101.2, 102.6, conv=noconv)
def test_selectmode(self):
widget = self.create()
self.checkEnumParam(widget, 'selectmode',
'none', 'browse', 'extended')
def test_show(self):
widget = self.create()
self.checkParam(widget, 'show', 'tree headings',
expected=('tree', 'headings'))
self.checkParam(widget, 'show', ('tree', 'headings'))
self.checkParam(widget, 'show', ('headings', 'tree'))
self.checkParam(widget, 'show', 'tree', expected=('tree',))
self.checkParam(widget, 'show', 'headings', expected=('headings',))
def test_bbox(self):
self.tv.pack()
self.assertEqual(self.tv.bbox(''), '')
self.tv.wait_visibility()
self.tv.update()
item_id = self.tv.insert('', 'end')
children = self.tv.get_children()
self.assertTrue(children)
bbox = self.tv.bbox(children[0])
self.assertIsBoundingBox(bbox)
# compare width in bboxes
self.tv['columns'] = ['test']
self.tv.column('test', width=50)
bbox_column0 = self.tv.bbox(children[0], 0)
root_width = self.tv.column('#0', width=None)
if not self.wantobjects:
root_width = int(root_width)
self.assertEqual(bbox_column0[0], bbox[0] + root_width)
# verify that bbox of a closed item is the empty string
child1 = self.tv.insert(item_id, 'end')
self.assertEqual(self.tv.bbox(child1), '')
def test_children(self):
# no children yet, should get an empty tuple
self.assertEqual(self.tv.get_children(), ())
item_id = self.tv.insert('', 'end')
self.assertIsInstance(self.tv.get_children(), tuple)
self.assertEqual(self.tv.get_children()[0], item_id)
# add item_id and child3 as children of child2
child2 = self.tv.insert('', 'end')
child3 = self.tv.insert('', 'end')
self.tv.set_children(child2, item_id, child3)
self.assertEqual(self.tv.get_children(child2), (item_id, child3))
# child3 has child2 as parent, thus trying to set child2 as a children
# of child3 should result in an error
self.assertRaises(tkinter.TclError,
self.tv.set_children, child3, child2)
# remove child2 children
self.tv.set_children(child2)
self.assertEqual(self.tv.get_children(child2), ())
# remove root's children
self.tv.set_children('')
self.assertEqual(self.tv.get_children(), ())
def test_column(self):
# return a dict with all options/values
self.assertIsInstance(self.tv.column('#0'), dict)
# return a single value of the given option
if self.wantobjects:
self.assertIsInstance(self.tv.column('#0', width=None), int)
# set a new value for an option
self.tv.column('#0', width=10)
# testing new way to get option value
self.assertEqual(self.tv.column('#0', 'width'),
10 if self.wantobjects else '10')
self.assertEqual(self.tv.column('#0', width=None),
10 if self.wantobjects else '10')
# check read-only option
self.assertRaises(tkinter.TclError, self.tv.column, '#0', id='X')
self.assertRaises(tkinter.TclError, self.tv.column, 'invalid')
invalid_kws = [
{'unknown_option': 'some value'}, {'stretch': 'wrong'},
{'anchor': 'wrong'}, {'width': 'wrong'}, {'minwidth': 'wrong'}
]
for kw in invalid_kws:
self.assertRaises(tkinter.TclError, self.tv.column, '#0',
**kw)
def test_delete(self):
self.assertRaises(tkinter.TclError, self.tv.delete, '#0')
item_id = self.tv.insert('', 'end')
item2 = self.tv.insert(item_id, 'end')
self.assertEqual(self.tv.get_children(), (item_id, ))
self.assertEqual(self.tv.get_children(item_id), (item2, ))
self.tv.delete(item_id)
self.assertFalse(self.tv.get_children())
# reattach should fail
self.assertRaises(tkinter.TclError,
self.tv.reattach, item_id, '', 'end')
# test multiple item delete
item1 = self.tv.insert('', 'end')
item2 = self.tv.insert('', 'end')
self.assertEqual(self.tv.get_children(), (item1, item2))
self.tv.delete(item1, item2)
self.assertFalse(self.tv.get_children())
def test_detach_reattach(self):
item_id = self.tv.insert('', 'end')
item2 = self.tv.insert(item_id, 'end')
# calling detach without items is valid, although it does nothing
prev = self.tv.get_children()
self.tv.detach() # this should do nothing
self.assertEqual(prev, self.tv.get_children())
self.assertEqual(self.tv.get_children(), (item_id, ))
self.assertEqual(self.tv.get_children(item_id), (item2, ))
# detach item with children
self.tv.detach(item_id)
self.assertFalse(self.tv.get_children())
# reattach item with children
self.tv.reattach(item_id, '', 'end')
self.assertEqual(self.tv.get_children(), (item_id, ))
self.assertEqual(self.tv.get_children(item_id), (item2, ))
# move a children to the root
self.tv.move(item2, '', 'end')
self.assertEqual(self.tv.get_children(), (item_id, item2))
self.assertEqual(self.tv.get_children(item_id), ())
# bad values
self.assertRaises(tkinter.TclError,
self.tv.reattach, 'nonexistent', '', 'end')
self.assertRaises(tkinter.TclError,
self.tv.detach, 'nonexistent')
self.assertRaises(tkinter.TclError,
self.tv.reattach, item2, 'otherparent', 'end')
self.assertRaises(tkinter.TclError,
self.tv.reattach, item2, '', 'invalid')
# multiple detach
self.tv.detach(item_id, item2)
self.assertEqual(self.tv.get_children(), ())
self.assertEqual(self.tv.get_children(item_id), ())
def test_exists(self):
self.assertEqual(self.tv.exists('something'), False)
self.assertEqual(self.tv.exists(''), True)
self.assertEqual(self.tv.exists({}), False)
# the following will make a tk.call equivalent to
# tk.call(treeview, "exists") which should result in an error
# in the tcl interpreter since tk requires an item.
self.assertRaises(tkinter.TclError, self.tv.exists, None)
def test_focus(self):
# nothing is focused right now
self.assertEqual(self.tv.focus(), '')
item1 = self.tv.insert('', 'end')
self.tv.focus(item1)
self.assertEqual(self.tv.focus(), item1)
self.tv.delete(item1)
self.assertEqual(self.tv.focus(), '')
# try focusing inexistent item
self.assertRaises(tkinter.TclError, self.tv.focus, 'hi')
def test_heading(self):
# check a dict is returned
self.assertIsInstance(self.tv.heading('#0'), dict)
# check a value is returned
self.tv.heading('#0', text='hi')
self.assertEqual(self.tv.heading('#0', 'text'), 'hi')
self.assertEqual(self.tv.heading('#0', text=None), 'hi')
# invalid option
self.assertRaises(tkinter.TclError, self.tv.heading, '#0',
background=None)
# invalid value
self.assertRaises(tkinter.TclError, self.tv.heading, '#0',
anchor=1)
def test_heading_callback(self):
def simulate_heading_click(x, y):
simulate_mouse_click(self.tv, x, y)
self.tv.update()
success = [] # no success for now
self.tv.pack()
self.tv.wait_visibility()
self.tv.heading('#0', command=lambda: success.append(True))
self.tv.column('#0', width=100)
self.tv.update()
# assuming that the coords (5, 5) fall into heading #0
simulate_heading_click(5, 5)
if not success:
self.fail("The command associated to the treeview heading wasn't "
"invoked.")
success = []
commands = self.tv.master._tclCommands
self.tv.heading('#0', command=str(self.tv.heading('#0', command=None)))
self.assertEqual(commands, self.tv.master._tclCommands)
simulate_heading_click(5, 5)
if not success:
self.fail("The command associated to the treeview heading wasn't "
"invoked.")
# XXX The following raises an error in a tcl interpreter, but not in
# Python
#self.tv.heading('#0', command='I dont exist')
#simulate_heading_click(5, 5)
def test_index(self):
# item 'what' doesn't exist
self.assertRaises(tkinter.TclError, self.tv.index, 'what')
self.assertEqual(self.tv.index(''), 0)
item1 = self.tv.insert('', 'end')
item2 = self.tv.insert('', 'end')
c1 = self.tv.insert(item1, 'end')
c2 = self.tv.insert(item1, 'end')
self.assertEqual(self.tv.index(item1), 0)
self.assertEqual(self.tv.index(c1), 0)
self.assertEqual(self.tv.index(c2), 1)
self.assertEqual(self.tv.index(item2), 1)
self.tv.move(item2, '', 0)
self.assertEqual(self.tv.index(item2), 0)
self.assertEqual(self.tv.index(item1), 1)
# check that index still works even after its parent and siblings
# have been detached
self.tv.detach(item1)
self.assertEqual(self.tv.index(c2), 1)
self.tv.detach(c1)
self.assertEqual(self.tv.index(c2), 0)
# but it fails after item has been deleted
self.tv.delete(item1)
self.assertRaises(tkinter.TclError, self.tv.index, c2)
def test_insert_item(self):
# parent 'none' doesn't exist
self.assertRaises(tkinter.TclError, self.tv.insert, 'none', 'end')
# open values
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end',
open='')
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end',
open='please')
self.assertFalse(self.tv.delete(self.tv.insert('', 'end', open=True)))
self.assertFalse(self.tv.delete(self.tv.insert('', 'end', open=False)))
# invalid index
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'middle')
# trying to duplicate item id is invalid
itemid = self.tv.insert('', 'end', 'first-item')
self.assertEqual(itemid, 'first-item')
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end',
'first-item')
self.assertRaises(tkinter.TclError, self.tv.insert, '', 'end',
MockTclObj('first-item'))
# unicode values
value = '\xe1ba'
item = self.tv.insert('', 'end', values=(value, ))
self.assertEqual(self.tv.item(item, 'values'),
(value,) if self.wantobjects else value)
self.assertEqual(self.tv.item(item, values=None),
(value,) if self.wantobjects else value)
self.tv.item(item, values=self.root.splitlist(self.tv.item(item, values=None)))
self.assertEqual(self.tv.item(item, values=None),
(value,) if self.wantobjects else value)
self.assertIsInstance(self.tv.item(item), dict)
# erase item values
self.tv.item(item, values='')
self.assertFalse(self.tv.item(item, values=None))
# item tags
item = self.tv.insert('', 'end', tags=[1, 2, value])
self.assertEqual(self.tv.item(item, tags=None),
('1', '2', value) if self.wantobjects else
'1 2 %s' % value)
self.tv.item(item, tags=[])
self.assertFalse(self.tv.item(item, tags=None))
self.tv.item(item, tags=(1, 2))
self.assertEqual(self.tv.item(item, tags=None),
('1', '2') if self.wantobjects else '1 2')
# values with spaces
item = self.tv.insert('', 'end', values=('a b c',
'%s %s' % (value, value)))
self.assertEqual(self.tv.item(item, values=None),
('a b c', '%s %s' % (value, value)) if self.wantobjects else
'{a b c} {%s %s}' % (value, value))
# text
self.assertEqual(self.tv.item(
self.tv.insert('', 'end', text="Label here"), text=None),
"Label here")
self.assertEqual(self.tv.item(
self.tv.insert('', 'end', text=value), text=None),
value)
def test_set(self):
self.tv['columns'] = ['A', 'B']
item = self.tv.insert('', 'end', values=['a', 'b'])
self.assertEqual(self.tv.set(item), {'A': 'a', 'B': 'b'})
self.tv.set(item, 'B', 'a')
self.assertEqual(self.tv.item(item, values=None),
('a', 'a') if self.wantobjects else 'a a')
self.tv['columns'] = ['B']
self.assertEqual(self.tv.set(item), {'B': 'a'})
self.tv.set(item, 'B', 'b')
self.assertEqual(self.tv.set(item, column='B'), 'b')
self.assertEqual(self.tv.item(item, values=None),
('b', 'a') if self.wantobjects else 'b a')
self.tv.set(item, 'B', 123)
self.assertEqual(self.tv.set(item, 'B'),
123 if self.wantobjects else '123')
self.assertEqual(self.tv.item(item, values=None),
(123, 'a') if self.wantobjects else '123 a')
self.assertEqual(self.tv.set(item),
{'B': 123} if self.wantobjects else {'B': '123'})
# inexistent column
self.assertRaises(tkinter.TclError, self.tv.set, item, 'A')
self.assertRaises(tkinter.TclError, self.tv.set, item, 'A', 'b')
# inexistent item
self.assertRaises(tkinter.TclError, self.tv.set, 'notme')
def test_tag_bind(self):
events = []
item1 = self.tv.insert('', 'end', tags=['call'])
item2 = self.tv.insert('', 'end', tags=['call'])
self.tv.tag_bind('call', '<ButtonPress-1>',
lambda evt: events.append(1))
self.tv.tag_bind('call', '<ButtonRelease-1>',
lambda evt: events.append(2))
self.tv.pack()
self.tv.wait_visibility()
self.tv.update()
pos_y = set()
found = set()
for i in range(0, 100, 10):
if len(found) == 2: # item1 and item2 already found
break
item_id = self.tv.identify_row(i)
if item_id and item_id not in found:
pos_y.add(i)
found.add(item_id)
self.assertEqual(len(pos_y), 2) # item1 and item2 y pos
for y in pos_y:
simulate_mouse_click(self.tv, 0, y)
# by now there should be 4 things in the events list, since each
# item had a bind for two events that were simulated above
self.assertEqual(len(events), 4)
for evt in zip(events[::2], events[1::2]):
self.assertEqual(evt, (1, 2))
def test_tag_configure(self):
# Just testing parameter passing for now
self.assertRaises(TypeError, self.tv.tag_configure)
self.assertRaises(tkinter.TclError, self.tv.tag_configure,
'test', sky='blue')
self.tv.tag_configure('test', foreground='blue')
self.assertEqual(str(self.tv.tag_configure('test', 'foreground')),
'blue')
self.assertEqual(str(self.tv.tag_configure('test', foreground=None)),
'blue')
self.assertIsInstance(self.tv.tag_configure('test'), dict)
@add_standard_options(StandardTtkOptionsTests)
class SeparatorTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'orient', 'style', 'takefocus',
# 'state'?
)
default_orient = 'horizontal'
def create(self, **kwargs):
return ttk.Separator(self.root, **kwargs)
@add_standard_options(StandardTtkOptionsTests)
class SizegripTest(AbstractWidgetTest, unittest.TestCase):
OPTIONS = (
'class', 'cursor', 'style', 'takefocus',
# 'state'?
)
def create(self, **kwargs):
return ttk.Sizegrip(self.root, **kwargs)
tests_gui = (
ButtonTest, CheckbuttonTest, ComboboxTest, EntryTest,
FrameTest, LabelFrameTest, LabelTest, MenubuttonTest,
NotebookTest, PanedWindowTest, ProgressbarTest,
RadiobuttonTest, ScaleTest, ScrollbarTest, SeparatorTest,
SizegripTest, TreeviewTest, WidgetTest,
)
if __name__ == "__main__":
unittest.main()
|
jarble/Polyglot-code-generator
|
refs/heads/master
|
examples/grammars/englishToPython.py
|
1
|
englishToPython = [
#Start of final outputs
[["end (else|if|loop|for|while|def)"], "", "final"],
[["the output of this shell command : <<foo>>"], "subprocess.Popen(<<foo>>, stdout=subprocess.PIPE).communicate()[0]", "final"],
[["<<list>> (|(sorted|arranged) )(from longest to shortest|(in|by) (order of length|order of (ascend|increase)ing length|(ascend|increas)ing order of length))", "(sort|arrange) the (list|strings) <<list>> from longest to shortest", "<<list>> (arranged |sorted |)in ascending order of length"], "sorted(<<list>>, key=len, reverse=True)", "final"],
[["<<list>> (|(sorted|arranged) )(from shortest to longest|(in|by) order of decreasing length)", "(sort|arrange) the (list|strings) <<list>> from shortest to longest", "<<list>> (arranged |sorted |)in descending order of length"], "sorted(<<list>>, key=len, reverse=False)", "final"],
[["(the )(average|mean|arithmetic mean) of <<foo>>"], "float(sum(<<foo>>))/len(<<foo>>) if len(<<foo>>) > 0 else float('nan')", "final"],
[["everything in <<foo>> (that|which) is( also|) in <<bar>>"], "list(set(<<foo>>) ^ set(<<bar>>))", "final"],
[["<<foo>> contains ((|everything in|(all|each|every one) of )these( items| things|) :) <<bar>>", "everything in <<foo>> is in <<bar>>"], "all(x in <<foo>> for x in <<bar>>)", "final"],
[["(delete|remove) index <<foo>> (in|of|inside|within) <<bar>>"], "<<foo>>.pop(<<index>>)", "final"],
#Indented
[["for <<foo>> in <<bar>> : <<baz>>", "for <<foo>> in <<bar>> <<baz>>"],"for <<foo>> in <<bar>> :\n#indent\n <<baz>> \n#unindent\n", "final"],
[["<<dictionaryName>> = { <<foo>>"], "<<dictionaryName>> = {\n#indent\n <<foo>> \n#unindent\n", "final"],
[["<<arrayName>> = [ <<foo>>"], "<<arrayName>> = [\n#indent\n <<foo>> \n#unindent\n", "final"],
[["<<dictionaryName>> : { <<foo>>"], "<<dictionaryName>> : {\n#indent\n <<foo>> \n#unindent\n", "final"],
[["<<arrayName>> : [ <<foo>>"], "<<arrayName>> : [\n#indent\n <<foo>> \n#unindent\n", "final"],
[["[ <<foo>>"], "[\n#indent\n <<foo>> \n#unindent\n", "final"],
[["{ <<foo>>"], "{\n#indent\n <<foo>> \n#unindent\n", "final"],
[["(get|create|generate) a string from the file (call|nam)ed <<foo>>"], "pythonFunctions.stringFromTextFile(<<foo>>)", "final"],
[["save the string <<foo>> (as|to) a file (nam|call)ed <<bar>>", "transform the string <<foo>> into a file named <<bar>>", "(create|generate|produce) a file called <<bar>> from a string (call|nam)ed <<foo>>"], "pythonFunctions.writeStringToFile(<<bar>>, <<foo>>)", "final"],
[["import <<foo>> from <<bar>> as <<baz>>", "from <<bar>> import <<foo>> as <<baz>>", "import <<foo>> as <<baz>> from <<bar>>"], "from <<bar>> import <<foo>> as <<baz>>", "final"],
[["from <<foo>> import <<bar>>", "import <<bar>> from <<foo>>"], "from <<foo>> import <<bar>>", "final"],
[["<<foo>> {}"], "<<foo>>()", "final"],
[["<<foo>> is an anagram of <<bar>>"], "(sorted(<<foo>>) == sorted(<<bar>>))", "final"],
[["(pick|choose|select|get) random(|ly) (from|in) <<foo>>"], "random.choice(<<foo>>)", "final"],
#[["<<type>> <<varName>> (=) <<value>> (;)"], "<<type>> <<varName>> = <<value>>;", "final"],
[["(|the )(short|small)est string in <<foo>>"], dict(Python="min(<<foo>>, key=len)"), "final"],
[["(|the )((long|bigg|larg)est) string in <<foo>>"], dict(Python="max(<<foo>>, key=len)"), "final"],
[["(|the )((bigg|larg|great)est) number in <<foo>>"], dict(Python="max(<<foo>>)"), "final"],
[["(|the )((small)est) number in <<foo>>"], dict(Python="min(<<foo>>)"), "final"],
[["(|the )first letter of <<foo>>"], dict(Python="<<foo>>[0]"), "final"],
[["(|the )last letter of <<foo>>"], "<<foo>>[len(<<foo>>)-1]", "final"],
[["<<foo>> (is an integer)"], "pythonFunctions.representsInt(<<foo>>)", "final"],
[["(module) <<body>>"], dict(crosslanguage = "module(<<body>>)"), "final"],
[["(raise) <<foo>>"], "raise <<foo>>", "final"],
[["(import) <<module>>"], "import <<module>>", "final"],
[["<<foo>> (converted to type) <<bar>>", "<<foo>> (converted to) <<bar>> (type)"], dict(Python="<<bar>>(<<foo>>)"), "final"],
[["(the type of) <<foo>>"], dict(Python="type(<<foo>>)"), "final"],
[["<<foo>> (or) <<bar>>"], dict(Python= "<<foo>> or <<bar>>)", Java= "(<<foo>> || <<bar>>)", JavaScript="(<<foo>> || <<bar>>)"), "final"],
[["<<foo>> (\< \=) <<bar>>"], dict(Python="(<<foo>> <= <<bar>>)"), "final"],
[["<<foo>> (\> \=) <<bar>>"], dict(Python="(<<foo>> >= <<bar>>)"), "final"],
[["(((t|T)he |)substring of) <<foo>> (from|between) <<bar>> (to|and) <<baz>>", "(substring (from)) <<bar>> (to|between) <<baz>> (in) <<foo>>"],
dict(Python="<<foo>>[<<bar>>:<<baz>>+1]"),
"final"
],
[["((the |)length of) <<foo>>"], dict(Python="len(<<foo>>)", crosslanguage="arrayLength(<<foo>>)"), "final"],
[["<<foo>> (\{) <<bar>> (\})"], dict(Python="<<foo>>(<<bar>>)"), "final"],
[["((for)( |)(each|every|all)) <<foo>> (in) <<bar>> <<baz>>", "for every <<foo>> in <<bar>> : <<baz>>"], dict(Python="for <<foo>> in <<bar>>:\n#indent\n<<baz>>\n#unindent\n"), "final"],
[["<<foo>> (rounded up)", "(round) <<foo>> (up)"], dict(Python="math.ceil(<<foo>>)"), "final"],
[["<<foo>> (rounded down)", "(round) <<foo>> (down)"], dict(Python="math.floor(<<foo>>)"), "final"],
[["<<foo>> (rounded to the (nearest|closest) integer)", "(round) <<foo>> (to the (nearest|closest) integer)"], dict(Python="Math.round(<<foo>>)"), "final"],
[["((A |a |)random number (between|from)) <<min>> (and|to) <<max>>"], dict(Python="(math.random() * (<<max>> - <<min>>) + <<min>>)"), "final"],
[["(replace( each| every|)( occurrence of|)) <<stringToFind>> ((in|inside( of|)|within)(| the string)) <<containingString>> (with) <<replacementString>>"],dict(Python="<<replacementString>>.join(<<containingString>>.split(<<stringToFind>>))"), "final"],
[["((a |A )random number)"], "math.random()", "final"],
[["<<foo>> (and|&|&&) <<bar>>"], "(<<foo>> and <<bar>>)", "final"],
[["<<herp>> (\,) <<derp>>"], "<<herp>>, <<derp>>", "final"],
[["(class) <<name>> <<body>>", "class <<name>> : <<body>>"], dict(crosslanguage="getClass(<<name>>, [<<body>>])", Python="class <<name>>:\n#indent\n<<body>>\n#unindent\n"), "final"],
[["(default) <<foo>>"], dict(JavaScript="default: { <<foo>> }"), "final"],
[["(case) <<foo>> <<bar>>"], dict(JavaScript = "case <<foo>>: <<bar>>"), "final"],
[["(\[) <<foo>> (\])"], {"Python":"[<<foo>>]", "JavaScript":"[<<foo>>]"}, "final"],
[["(switch) <<condition>> <<statements>>"], dict(JavaScript="switch <<condition>>{ <<statements>> }", crosslanguage="Switch([<<condition>>, <<statements>>]),"), "final"],
[["(elif|else if|elsif|otherwise if) <<foo>> <<bar>>", "(elif|else if|elsif|otherwise if) <<foo>> (then) <<bar>>"], dict(Python="elif <<foo>>:\n#indent\n<<bar>>\n#unindent\n"), "final"],
[["((|(E|e)lse)(| :)) <<foo>>"], "else:\n#indent\n<<foo>>\n#unindent\n", "final"],
[["<<foo>> (\*|times|multiplied by) <<bar>>"], "(<<foo>> * <<bar>>)", "final"],
[["<<foo>> (>|is (more|greater) than) <<bar>>"], "(<<foo>> > <<bar>>)", "final"],
[["<<foo>> (<|is (less) than) <<bar>>"], "(<<foo>> < <<bar>>)", "final"],
[["<<foo>> (-|minus) <<bar>>", "<<bar>> subtracted from <<foo>>"], "(<<foo>> - <<bar>>)", "final"],
[["<<foo>> (is (inside|in|within)) <<bar>>", "<<bar>> (contains) <<foo>>", "<<foo>> (is in|in) <<bar>>"], dict(Python="(<<foo>> in <<bar>>)"), "final"],
[["<<foo>> (plus|\+) <<bar>>", "(the sum of) <<foo>> (and) <<bar>>"], "(<<foo>> + <<bar>>)", "final"],
[["<<foo>> (divided by|\/) <<bar>>"], "(<<foo>> / <<bar>>)", "final"],
[["<<foo>> (==|= =|equals|is equal to) <<bar>>"], "(<<foo>> == <<bar>>)", "final"],
[["<<foo>> (=) <<bar>>"], "<<foo>> = <<bar>>", "final"],
[["<<foo>> (to the power of|\*\*) <<bar>>"], "(<<foo>> ** <<bar>>)", "final"],
[["(if) <<foo>> (:) <<bar>>", "(if) <<foo>> (then) <<bar>>", "<<bar>> (if|if and only if) <<foo>>"], dict(Python="if <<foo>>:\n#indent\n<<bar>>\n#unindent\n", Java="if(foo){ <<bar>> }", JavaScript="if(foo){ <<bar>> }"), "final"],
[["((|do this |keep doing this )while) <<x>> (:) <<y>>", "<<y>> (while) <<x>>", "((|do this |keep doing this )while) <<x>> <<y>>"], dict(Python="while <<x>>:\n#indent\n<<y>>\n#unindent\n", Java="while(<<x>>){ <<y>> }"), "final"],
[["(not|\!) <<foo>>"], "(not <<foo>>)", "final"],
[["<<foo>> (%) <<bar>>"], "(<<foo>> % <<bar>>)", "final"],
[["the negation of <<foo>>"], "! <<foo>>"],
[["(function) <<static>> <<returnType>> <<functionName>> <<parameterNames>> <<parameterTypes>> <<body>>"],
"def <<functionName>> <<parameterNames>>:\n#indent\n<<body>>\n#unindent\n",
"final"],
[["(for) <<foo>> ; <<bar>> ; <<baz>> <<biff>>", "(for) <<foo>> (;) <<bar>> (;) <<baz>> <<biff>>"], dict(JavaScript="for(<<foo>>; <<bar>>; <<baz>>){ <<biff>> }"), "final"],
[["(convert|change) <<foo>> (from base(|s)) <<bar>> ((to|into)(| base)) <<baz>>",
"(convert|change) <<foo>> ((to|into) base) <<baz>> (from base) <<bar>>",
"<<foo>> (converted (to|into) base) <<baz>> (from base) <<bar>>",
"<<foo>> (converted from base) <<bar>> ((to|into) base) <<baz>>"],
"pythonFunctions.convertBases(<<foo>>, <<bar>>, <<baz>>)",
"final"],
#["for each foo in bar"]
[["<<foo>> (\!\=|\! \=) <<bar>>"], {"JavaScript":"(<<foo>> != <<bar>>)", "Python":"(<<foo>> != <<bar>>)"}, "final"],
[["<<foo>> ((recursively divided|divided recursively) into) <<bar>> (by) <<baz>> (arrays)", "(recursively divide) <<foo>> (into) <<bar>> (by) <<baz>> (arrays)", "(divide) <<foo>> (recursively into) <<bar>> (by) <<baz>> (arrays)"], dict(Python="(divide <<foo>> recursively into <<bar>> by <<baz>> arrays)"), "final"],
[["((all matches|(each|every) match) of the (regex|regular expression)) <<foo>> ((in|inside|within) the string) <<bar>>"], dict(Python="re.findall(re.compile(<<foo>>), <<bar>>)"), "final"],
[["(return) <<toReturn>>"],"return <<toReturn>>", "final"],
[["<<foo>> (;)"], dict(Python="<<foo>>\n"), "final"],
[["<<foo>> (;) <<bar>>"], "<<foo>>;\n<<bar>>", "final"],
[["(def|function) <<functionName>> <<parameterNames>> <<body>>"], dict(Python="def <<functionName>>(<<parameterNames>>):\n#indent\n<<body>>\n#unindent\n", JavaScript="function <<functionName>>(<<parameterNames>>){ <<body>> }"), "final"],
[["(var) <<foo>> (=) <<bar>>"], dict(JavaScript="var <<foo>> = <<bar>>", Python = "<<foo>> = <<bar>>"), "final"],
[["<<foo>> (,)"], "<<foo>>, ", "final"],
[["(def|function|defun) <<functionName>> <<functionBody>>"], dict(Python="def <<functionName>>():\n#indent\n<<functionBody>>\n#unindent\n", JavaScript="function <<functionName>>(){ <<functionBody>> }"), "final"],
[["(dictionary|associative array) <<foo>> (\=) <<bar>>", "(dictionary|associative array) <<foo>> <<bar>>"], "<<foo>> = { <<bar>> }", "final"],
[["(list|array) <<foo>> (\=) <<bar>>", "(array) <<foo>> <<bar>>"], "<<foo>> = [ <<bar>> ]", "final"],
[["(key) <<foo>> ((with|and) value) <<bar>>", "(value) <<bar>> ((with|and) key) <<foo>>", "<<foo>> (\:|\-\-\>) <<bar>>"], dict(Python = "<<foo>>: <<bar>>"), "final"],
[["((anonymous |)self-(evaluating|executing|invoking|calling) (anonymous |)function) <<body>>"], dict(JavaScript="{(function(){ <<body>> })()"), "final"],
[["((split|separate)(| the string)) <<foo>> ((with|using)(| the (separator|delimiter))) <<bar>>"], dict(Python="<<foo>>.split(<<bar>>)"), "final"],
[["((t|T)he (regex|regexp|regular expression)) <<regex>> (matches the string) <<string>>", "((t|T)he string) <<string>> (matches the (regex|regexp|regular expression)) <<regex>>", "<<string>> (matches the (regex|regexp|regular expression)) <<regex>>"], dict(Python="re.compile(<<regex>>).match(<<string>>)"), "final"],
[["((every|each|all) (occurrence|appearance|location|index|indice)(s|) of( the string|)) <<foo>> ((in|inside)( the string|)) <<bar>>"], dict(Python="[m.start() for m in re.finditer(<<foo>>, <<bar>>)]"), "final"],
[["((every|each|all) (occurrence|appearance|location|index|indice)(s|) of) <<foo>> ((in|inside)(| the (array|list))) <<bar>>"], dict(Python="[i for i, x in enumerate(<<bar>>) if x == <<foo>>]"), "final"],
[["((the |)dimensions of( the (array|list)|)) <<foo>>"], dict(Python="numpy.array(<<foo>>).shape"), "final"],
[["(insert|put) <<obj>> (after the index) <<index>> ((inside|in) the (list|array)) <<list>>"], dict(Python="insertIntoList(<<list>>, <<index>>+1, <<obj>>)"), "final"],
[["(insert|put) <<obj>> (before the index) <<index>> ((inside|in) the (list|array)) <<list>>"], dict(Python="insertIntoList(<<list>>, <<index>>, <<obj>>)"), "final"],
[["((every|each|all) integer(s|) (between|from)) <<foo>> (and|to) <<bar>>"], dict(Python="range(<<foo>>, <<bar>>)"), "final"],
[["((join|merge) the (array|strings)) <<array>> ((with|using) the separator) <<separator>>"], dict(Python="<<separator>>.join(<<array>>)"),"final"],
[["<<foo>> (written|spelled) backwards"], dict(Python="<<foo>>[::-1]"), "final"],
[["(wait) <<seconds>> (seconds)"], dict(Python="time.sleep(<<seconds>>)"), "final"],
[["((|(the|all|each|every) )factor(|s) of) <<foo>>"], dict(Python="pythonFunctions.factors(<<foo>>)"), "final"],
[["((remove|delete) (each|every|all) occurrence(s|) of( the value|)) <<val>> ((from|in) the (list|array)) <<the_list>>"], dict(Python="remove_values_from_list(<<the_list>>, <<val>>)"), "final"],
[["(absolute value of) <<foo>>"], dict(Python="math.fabs(<<foo>>)"), "final"],
[["(rotate the array) <<foo>> (90 degrees clockwise)"], dict(Python="zip(*<<foo>>[::-1])"), "final"],
[["(rotate the array) <<foo>> (90 degrees counterclockwise)"], dict(Python="zip(*<<foo>>)[::-1]"), "final"],
[["((understand|define)(| the (macro|syntax))) <<foo>> (as|as the (macro|syntax)) <<bar>>", "((define|create|declare|make) (a |the |)(macro|syntax)(| (named|called|with the input))) <<foo>> ((and|with|(that|which) (produces|generates|gives)) the (output|macro)) <<bar>>"], dict(Python="defMacro(<<foo>>, <<bar>>)\n"), "final"],
[["(the functions in) <<functions>> ((that|which) return) <<toReturn>> (for the inputs in) <<inputs>>"], dict(Python="pythonFunctions.functionsMatchingInputs(<<functions>>, <<inputs>>, <<toReturn>>)"), "final"],
[["<<foo>> ((start|begin)s with) <<bar>>"], dict(Python="<<foo>>.startswith(<<bar>>)"), "final"],
[["<<foo>> (ends with) <<bar>>"], dict(Python="<<foo>>.endswith(<<bar>>)"), "final"],
[["((search recursively|recursive(|ly) search) for) <<toFind>> (in the array) <<theArray>>", "(search for) <<toFind>> (recursively in the array) <<theArray>>"], dict(Python="pythonFunctions.recursiveArraySearch(<<toFind>>, <<theArray>>)"), "final"],
[["(every|each) <<item>> (in|inside|within) <<list>> ((that|which) (satisfie|meet|matche)s (the|this) condition(| :)) <<conditional>>"], dict(Python="[<<item>> for <<item>> in <<list>> if <<conditional>>]"), "final"],
[["<<foo>> ((|arranged |sorted )in alphabetical order)", "(arrange|sort) <<foo>> (in alphabetical order)"], "sorted(<<foo>>)", "final"],
#End of final outputs, and beginning of non-final outputs
[["<<foo>> is a prime number"], "pythonFunctions.is_prime{<<foo>>}"],
[["least common multiple of <<foo>> and <<bar>>"], "pythonFunctions.lcm{<<foo>>, <<bar>>}"],
[["(|the )greatest common factor of <<foo>> and <<bar>>"], "pythonFunctions.gcd{<<foo>>, <<bar>>}"],
[["ensure that <<foo>>", "ensure that <<foo>> ;", "<<foo>> (must|should|ought to) be true"], "if (<<foo>> == False) then (raise Exception{'Something is wrong!'})"],
[["the last index (of|in) the array <<foo>>"], "(the length of <<foo>>) - 1"],
#[["split the string <<foo>> at index <<bar>>"], "split the string <<foo>> from index <<bar>> to index <<baz>> , "],
[["<<foo>> ((starts|begins) with) <<bar>> (and ends with) <<baz>>"], "(<<foo>> starts with <<bar>>) and (<<foo>> ends with <<baz>>)"],
[["<<foo>> ((starts|begins) and ends with) <<bar>>"], "<<foo>> starts with <<bar>> and ends with <<bar>>"],
[["((remove|delete) (each|every|all) occurrence(s|) of( the value|)) <<val>> ((from|in) the (string)) <<the_string>>"], "replace every occurrence of <<val>> in the string <<the_string>> with ''"],
[["wait <<minutes>> minutes"], "wait (<<minutes>> * 60) seconds"],
[["wait <<hours>> hours"], "wait (<<hours>> * 60) minutes"],
[["((the )square root of) <<foo>>"], "(<<foo>> to the power of (1 / 2))"],
[["(function) <<hello>> ((always|only) returns) <<hi>>"], "def <<hello>> ((return <<hi>>) ;)"],
[["<<foo>> ((%|percent) of) <<bar>>"], "((<<bar>> / 100) * <<foo>>)"],
[["<<foo>> (\=) <<bar>> (\+) <<baz>>"], "<<foo>> = (<<bar>> + <<baz>>)"],
[["<<foo>> (\=) <<bar>> (\-) <<baz>>"], "<<foo>> = (<<bar>> - <<baz>>)"],
[["<<foo>> (\=) <<bar>> (\^) <<baz>>"], "<<foo>> = (<<bar>> to the power of <<baz>>)"],
[["<<foo>> (\=) <<bar>> (\*) <<baz>>"], "<<foo>> = (<<bar>> * <<baz>>)"],
[["<<foo>> (\=) <<bar>> (\%) <<baz>>"], "<<foo>> = (<<bar>> % <<baz>>)"],
[["<<foo>> (is between) <<bar>> (and) <<baz>>"], "(<<bar>> < <<foo>>) and (<<foo>> < <<baz>>)"],
[["<<foo>> (is greater than or equal to) <<bar>>"], "<<foo>> >= <<bar>>"],
[["<<foo>> (is less than or equal to) <<bar>>"], "<<foo>> <= <<bar>>"],
[["<<foo>> (\+ \=) <<bar>>"], "<<foo>> = <<foo>> + <<bar>>)"],
[["<<foo>> (\- =|\-\=) <<bar>>"], "<<foo>> = <<foo>> - <<bar>>"],
[["<<foo>> (\* =) <<bar>>"], "<<foo>> = <<foo>> * <<bar>>"],
[["<<foo>> (\^ =) <<bar>>"], "<<foo>> = <<foo>> ^ <<bar>>"],
[["<<foo>> += <<bar>>"], "<<foo>> + = <<bar>>"],
[["<<foo>> (\+ \+|\+\+)"], "<<foo>> += 1"],
[["<<foo>> (- -|\-\-)"], "<<foo>> -= 1"],
[["<<foo>> (unless) <<bar>>", "(unless) <<bar>> <<foo>>"], "<<foo>> if (not <<bar>>)"],
[["<<foo>> (is (divisible by|a multiple of)) <<bar>>", "<<bar>> (is a factor of) <<foo>>"], "(<<foo>> % <<bar>>) == 0"],
[["(until) <<x>> (:) <<y>>"], "while (not x) y"],
[["(the product of) <<bar>> (and) <<baz>>"], "<<bar>> multiplied by <<baz>>"],
[["(the quotient of) <<foo>> (and) <<bar>>"], "<<foo>> divided by <<bar>>"],
#[["<<foo>> (divided by) <<bar>>"], "<<foo>> / <<bar>>"]
[["(indices from) <<start>> (to) <<end>> (in|of) <<array>>"], "substring from <<start>> to <<end>> in <<array>>"],
[["<<array>> (is a) <<numberOfDimensions>> (dimensional array)"], "(the length of (the dimensions of <<array>>)) == <<numberOfDimensions>>"],
[["(the array) <<array>> (from) <<start>> (to) <<end>>"], "substring of <<array>> from <<start>> to <<end>>"],
[["<<foo>> (and) <<bar>> (have the same dimensions)"], "(the dimensions of <<foo>>) == (the dimensions of <<bar>>)"],
[["<<foo>> (and) <<bar>> (have the same length)"], "(the length of <<foo>>) == (the length of <<bar>>)"],
[["<<foo>> (and) <<bar>> (have the same type)"], "(the type of <<foo>>) == (the type of <<bar>>)"],
[["<<foo>> ((does not|doesn\'t) equal) <<bar>>", "<<foo>> ((is not|isn\'t) equal to) <<bar>>"], "(<<foo>> != <<bar>>)"],
[["<<foo>> (and) <<bar>> (are not equal)"], "<<foo>> does not equal <<bar>>"],
[["<<foo>> (is (identical to|the same as)) <<bar>>", "<<foo>> (and) <<bar>> (are (identical|the same))"], "<<foo>> == <<bar>>"],
[["<<foo>> (does not contain) <<bar>>"], "not (<<foo>> contains <<bar>>)"],
[["((|the )remainder of) <<foo>> (divided by) <<bar>>"], "<<foo>> % <<bar>>"],
[["(rotate the array) <<foo>> (180 degrees)"], "rotate the array (rotate the array <<foo>> 90 degrees clockwise) 90 degrees clockwise"],
[["(number of times (that|)( |)(the string|)) <<foo>> ((occur|appear)s in(| the string)) <<bar>>", "(number of occurrences of(| the string)) <<foo>> (in(| the string)) <<bar>>"], "length of (all occurrences of <<foo>> in the string <<bar>>)"],
[["(convert) <<foo>> (to) <<bar>> (type)"], "<<foo>> converted to <<bar>> type"],
[["(print) <<foo>>"], "print { <<foo>> }"],
[["(replace each) <<foo>> (with) <<bar>> (in) <<baz>>"], "replace each <<foo>> in <<baz>> with <<bar>>"],
[["<<foo>> ((, |)(but|(and|but) also|as well as|even though|although|(despite|in spite of) the fact that)) <<bar>>"], "<<foo>> and <<bar>>"],
[["(either) <<foo>> (or) <<bar>>"], "<<foo>> or <<bar>>"],
[["<<foo>> is (True|true|not (F|f)alse)"], "<<foo>> == True"],
[["<<foo>> is (False|false|(not|n\'t) (t|T)rue)"], "<<foo>> == False"],
[["(if) <<foo>> (\= \=|equals) <<bar>> (then) <<baz>>", "(if) <<foo>> (\= \=|equals) <<bar>> <<baz>>"], "if (<<foo>> == <<bar>>) then <<baz>>"],
[["(if) <<foo>> (\>) <<bar>> (then) <<baz>>", "(if) <<foo>> (\>) <<bar>> <<baz>>"], "if (<<foo>> > <<bar>>) then <<baz>>"],
[["(if) <<foo>> (\<) <<bar>> (then) <<baz>>", "(if) <<foo>> (\<) <<bar>> <<baz>>"], "if (<<foo>> < <<bar>>) then <<baz>>"],
[["(if) <<foo>> (\> \=) <<bar>> (then) <<baz>>", "(if) <<foo>> (\>\=) <<bar>> <<baz>>"], "if (<<foo>> >= <<bar>>) then <<baz>>"],
[["(if) <<foo>> (\< \=) <<bar>> (then) <<baz>>", "(if) <<foo>> (\<\=) <<bar>> <<baz>>"], "if (<<foo>> <= <<bar>>) then <<baz>>"],
[["(if) <<foo>> (\! \=) <<bar>> (then) <<baz>>", "(if) <<foo>> (\!\=) <<bar>> <<baz>>"], "if (<<foo>> != <<bar>>) then <<baz>>"],
[["<<foo>> (\: \=) <<bar>>"], "<<foo>> = <<bar>>"],
[["((with|using) the (delimiter|separator)) <<foo>> ((split|separate) the string) <<bar>>"], "split the string <<bar>> using the separator <<foo>>"],
[["((every|each|all) (item|number) in) <<array>> ((that|which) is greater than) <<number>>"], "every foo in <<array>> that meets the condition (foo > <<number>>)"],
[["((every|each|all) (item|number) in) <<array>> ((that|which) is less than) <<number>>"], "every foo in <<array>> that meets the condition (foo < <<number>>)"],
[["((every|each|all) (item|number) in) <<array>> ((that|which) is (divisible by|a multiple of)) <<number>>"], "every foo in <<array>> that meets the condition (foo is a multiple of <<number>>)"],
[["((every|each|all) (item|number) in) <<array>> ((that|which) is (a factor of)) <<number>>"], "every foo in <<array>> that meets the condition (foo is a factor of <<number>>)"],
[["((the|all) strings in) <<array>> ((that|which) match the (regex|regular expression)) <<regex>>", "((every|each|all) (item|string)(s|) in) <<array>> ((that|which) (match(es|) the (regex|regular expression))) <<regex>>"], "every foo in <<array>> that meets the condition (foo matches the regex <<regex>>)"],
[["((every|each|all) (item|string|list) in) <<array>> ((that|which) (contains)) <<regex>>"], "every foo in <<array>> that meets the condition (foo contains <<regex>>)"],
[["((every|each|all) (regular expression|regex) in) <<array>> ((that|which) (match(|es) the (string))) <<string>>"], "every foo in <<array>> that meets the condition (<<string>> matches the regex foo)"],
[["((every|each|all) match(es|) of the (regex|regular expression)) <<regex>> (in the (list|array)) <<array>>"], "each string in <<array>> which matches the regular expression <<regex>>"],
[["((|the )first) <<foo>> ((letter|element|item)s (of|in)) <<bar>>"], "substring of <<bar>> from 0 to (<<foo>> - 1)"],
[["((the |)(last|final)) <<foo>> ((letter|element|item)s (of|in)) <<bar>>"], "substring of <<bar>> from ((length of <<bar>>) - <<foo>>) to (length of <<bar>>)"],
[["(define the macro) <<herp>> ((that|which) means) <<derp>>"], "define the macro <<herp>> with the output <<derp>>"],
[["<<foo>> is a palindrome"], "(<<foo>> spelled backwards) equals <<foo>>"],
[["(every) <<foo>> (in) <<bar>> ((satisfies|meets) (the|this) condition(| :)) <<baz>>"], "<<bar>> == (every <<foo>> in <<bar>> that satisfies the condition <<baz>>)"],
[["(this condition is true for (every|each)) <<foo>> (in) <<bar>> : <<baz>>", "<<baz>> ((is true |)for (all|each|every)) <<foo>> (in) <<bar>>"], "every <<foo>> in <<bar>> satisfies the condition <<baz>>"],
[["<<foo>> (are divisible by) <<bar>>"], "(baz is divisible by <<bar>>) for every baz in <<foo>>)"],
[["def <<foo>>(\{\}) <<bar>>", "def <<foo>> (\{\})(:) <<bar>>"], "def <<foo>> <<bar>>"],
[["((the |)longest match of the (regular expression|regex|regexp)) <<foo>> (in the string) <<bar>>"], "the longest string in (every match of the regular expression <<foo>> in the string <<bar>>)"],
[["(print) <<foo>> (\.)"], "print <<foo>>"],
[["<<foo>> ."], "<<foo>>;"],
[["<<foo>> = <<bar>> ."], "<<foo>> = <<bar>>"],
[["<<foo>> is an even number"], "(<<foo>> % 2) == 0"],
[["<<foo>> is an odd number"], "not (<<foo>> is an even number)"],
[["<<foo>> is a positive number"], "<<foo>> is greater than 0"],
[["<<foo>> is a negative number"], "<<foo>> is less than 0"],
[["(split|divide|separate) <<foo>> into <<bar>> equal (parts|pieces)"], "list{pythonFunctions.chunks{<<foo>>, int{len{<<foo>>} / <<bar>>}}}"],
[["<<bar>> is the type of <<foo>>"], "<<foo>> == (the type of <<bar>>)"],
[["the type of <<bar>> is <<foo>>"], "(the type of <<bar>>) == <<foo>>"],
[["<<foo>> (is (greater|more) than) <<bar>> ((and|but) less than) <<baz>>", "<<foo>> (is less than) <<baz>> ((and|but) (greater|more) than) <<bar>>"], "(<<foo>> < <<baz>>) and (<<foo>> > <<bar>>)"],
#[["<<foo>> is no(t|) (greater|more) than <<bar>>"], "not (foo > bar)"],
[["it is true that <<foo>>"], "<<foo>> is true"],
[["it is (false|(not |un)true) that <<foo>>"], "<<foo>> is not true"],
[["(the |)greatest common (factor|denominator) of <<foo>>"], "pythonFunctions.gcm{<<foo>>}"],
[["(save|make|create|generate) a copy of (|the file (|called ))<<foo>> (called|named|and call it|and name it) <<bar>>"], "save the string (create a string from the file called <<foo>>) to a file named <<bar>>"],
["(|(a|the) )list of punctuation marks", "['!', '?', '.']"],
[["(shuffle|(re|)arrange) <<foo>> randomly", "randomly (shuffle|(re|)arrange) <<foo>>", "<<foo>> (shuffled|(re|)arranged|sorted) randomly"], "random.sample{<<foo>>, (the length of <<foo>>)}"],
[["((range of |all)integers|(every|each) integer) between <<foo>> and <<bar>>"], "range{<<foo>>, <<bar>>}"],
[["<<foo>> is <<bar>> less than <<baz>>"], "<<foo>> == (<<baz>> - <<bar>>)"],
[["<<foo>> is <<bar>> (more|greater) than <<baz>>"], "<<foo>> == (<<bar>> + <<baz>>)"],
[["the prime factors of <<foo>>"], "every bar in (the factors of <<foo>>) that meets the condition (bar is a factor of <<foo>>)"],
[["(set|initialize) <<foo>> to <<bar>>", "let <<foo>> (be|equal) <<bar>>"], "<<foo>> = <<bar>>"],
[["(the (location|position) of|find) <<foo>> in <<bar>>"], "search recursively for <<foo>> in the array <<bar>>"],
[["something (picked|chosen|selected|taken) (at random|randomly) from <<foo>>"], "pick random from <<foo>>"],
[["(create|make|generate) an empty file called <<foo>>"], "save the string '' as a file called <<foo>>"],
[["(a |)copy of <<foo>>"], "copy.deepcopy{<<foo>>}"],
["(each|every) <<x>> (in|inside|within) <<array>> (where|(such|so) that) <<y>>", "every <<x>> in <<array>> that matches the condition <<y>>"],
[["((do|repeat)(| this)) <<foo>> times : <<bar>>"], "for _ in (every integer from 0 to <<foo>>) : <<bar>>"],
[["add <<bar>> to <<foo>>"], "<<foo>> += <<bar>>"],
[["divide <<foo>> by <<bar>>"], "<<foo>> = (<<foo>> / <<bar>>)"],
[["subtract <<bar>> from <<foo>>"], "<<foo>> -= <<bar>>"],
[["multiply <<foo>> by <<bar>>"], "<<foo>> *= <<bar>>"],
[["the square of <<foo>>", "<<foo>> squared"], "<<foo>> to the power of 2"],
[["<<foo>> cubed"], "<<foo>> to the power of 3"],
[["(all|each|every one) of these are in <<foo>> : <<bar>>"], "<<bar>> contains all of these : <<foo>>"],
[["for each <<foo>> in <<bar>> : <<baz>>"], "for <<foo>> in <<bar>> : <<baz>>"],
[["for (all integers|(each|every) integer) <<foo>> from <<bar>> to <<baz>> <<derp>>", "for (all integers|(each|every) integer) <<foo>> between <<bar>> and <<baz>> <<derp>>", "for (all integers|(every|each) integer) <<foo>> between <<bar>> and <<baz>> : <<derp>>", "for (all integers|(every|each) integer) <<foo>> from <<bar>> to <<baz>> : <<derp>>"], "for each foo in (every integer between <<bar>> and <<baz>>) : <<derp>>"],
[["<<foo>> is not a factor of <<bar>>"], "not (<<foo>> is a factor of <<bar>>)"],
[["alphabetize <<foo>>", "<<foo>> alphabetized"], "<<foo>> arranged in alphabetical order"],
[["(the|all) command(-| )line arguments", "(each|every) command( |-)line argument"], "sys.argv"],
[["(|(the|all) )(parameter names|names of ((each|every) parameter|(the|all) parameters)|parameters) (of|for) <<foo>>"], "inspect . getargspec{<<foo>>}"],
[["convert the number <<foo>> from base <<bar>> to base <<baz>>", "the number <<foo>> converted from base <<bar>> to base <<baz>>"], "<<foo>> converted from base <<bar>> to base <<baz>>"],
[["do this with (every|each) <<foo>> in <<bar>> : <<baz>>"], "for every <<foo>> in <<bar>>: <<baz>>"],
[["convert <<foo>> from binary to base <<bar>>", "convert <<foo>> to base <<bar>> from binary"], "convert <<foo>> from base 2 to base <<bar>>"],
[["convert <<foo>> from base <<bar>> to binary", "convert <<foo>> to binary from base <<bar>>"], "convert <<foo>> from base <<bar>> to base 2"],
[["<<foo>> : <<bar>> ,"], "(<<foo>> : <<bar>>),"],
[["the ranges <<foo>> and <<bar>> overlap", "the range <<foo>> overlaps with <<bar>>"], "((<<foo>>[0]) <= (<<bar>>[(length of <<bar>>) - 1])) and ((<<foo>>[(length of <<foo>>) - 1]) >= (<<bar>>[0]))"],
[["every <<foo>> in <<bar>> for which <<baz>>"], "each <<foo>> in <<bar>> so that <<baz>>"],
[["(each|every|all) element(s|) in <<foo>> (of|with|(that|which) is of) type <<bar>>"], "every baz in <<foo>> for which (the type of baz is <<bar>>)"],
["(every|each|all) string(s|) in <<foo>>", "every element in <<foo>> of type str"],
["(every|each|all) list(s|) in <<foo>>", "every element in <<foo>> of type list"],
[["<<foo>> (arranged |shuffled |)in a (random|randomized) order"], "<<foo>> rearranged randomly"],
[["save the file <<bar>> from the URL <<foo>>", "(save|download) the (URL|file) <<foo>> (and save it |)as a file (named|called) <<bar>>"], "urllib.urlretrieve{<<foo>>, <<bar>>}"],
[["<<foo>> is a string"], "the type of <<foo>> is str"],
[["<<foo>> are strings"], "(bar is a string) is true for each bar in <<foo>>"],
[["<<foo>> are integers"], "(bar is an integer) is true for each bar in <<foo>>"],
[["this condition is true for every <<bar>> in <<foo>>: <<baz>>"], "<<baz>> is true for every <<bar>> in <<foo>>"],
[["<<foo>> are prime numbers"], "this condition is true for every bar in <<foo>>: (bar is a prime number)"],
[["(run|invoke|execute) (the|this) (shell|system|terminal) command(:|) <<foo>>"], "subprocess.call{<<foo>>}"],
[["save <<foo>> as a file (nam|call)ed <<bar>>"], "save the string <<foo>> as a file named <<bar>>"],
[["(delete|remove) the file(| (named|called)) <<foo>>"], "os.remove{<<foo>>}"],
[["(the |)number of (character|letter)s in the string <<foo>>"], "length of <<foo>>"],
[["the range <<foo>> overlaps with the range <<bar>>", "these ranges overlap : <<foo>> and <<bar>>"], "the ranges <<foo>> and <<bar>> overlap"],
[["loop for <<foo>> in <<bar>> <<baz>>", "loop for <<foo>> in <<bar>> : <<baz>>"], "for <<foo>> in <<bar>> <<baz>>"],
[["for <<foo>> from <<bar>> to <<baz>> do <<biff>>", "for <<foo>> from <<bar>> to <<baz>> <<biff>>"], "for <<foo>> in (all integers from <<bar>> to <<baz>>) <<biff>>"],
[["if <<foo>> <<bar>>"], "if <<foo>> then <<bar>>"],
[["(the|all) strings(| in) <<foo>> match the (regular expression|regex) <<bar>>"], "every bar in <<foo>> meets the condition (<<bar>> matches the regular expression bar)"],
[["(the|all) (regexe|regular expression)s(| in) <<foo>> match the string <<bar>>"], "every bar in <<foo>> meets the condition (bar matches the regular expression <<bar>>)"],
[["(|the string )<<bar>> matches the regular expressions(| in) <<foo>>", "(|(the|all) )(regular expression|regexe)s in <<foo>> (that|which) match(| the string) <<bar>>"], "every foo in <<foo>> that meets the condition (<<bar>> matches the regular expression foo)"],
[["the (regular expression|regex) <<bar>> matches the strings(| in) <<foo>>", "(|(the|all) )strings in <<foo>> (that|which) match(| the (regex|regular expression)) <<bar>>"], "every foo in <<foo>> that meets the condition (foo matches the regular expression <<bar>>)"],
[["twice <<foo>>"], "2*<<foo>>"],
[["thrice <<foo>>"], "3*<<foo>>"],
]
'''
#This is what the new version of this should look like.
output:
pick random from <<foo>>
input:
something taken randomly from <<foo>>
get something randomly from <<foo>>
(choose|pick|select) something from <<foo>>
'''
|
ZubairLK/CI20_linux
|
refs/heads/wip-ci20-v4.0-rc1-myrc1
|
Documentation/target/tcm_mod_builder.py
|
200
|
#!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (kstrtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .aborted_task = " + fabric_mod_name + "_aborted_task,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " fabric->tf_cit_tmpl.tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " fabric->tf_cit_tmpl.tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " fabric->tf_cit_tmpl.tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " fabric->tf_cit_tmpl.tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " fabric->tf_cit_tmpl.tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " fabric->tf_cit_tmpl.tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " fabric->tf_cit_tmpl.tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " fabric->tf_cit_tmpl.tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " fabric->tf_cit_tmpl.tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "void " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('aborted_task\)\(', fo):
buf += "void " + fabric_mod_name + "_aborted_task(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_aborted_task(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + " to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + " to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
|
Work4Labs/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/tests/modeltests/lookup/__init__.py
|
12133432
| |
beni55/olympia
|
refs/heads/master
|
apps/amo/management/commands/__init__.py
|
12133432
| |
ProfessionalIT/maxigenios-website
|
refs/heads/master
|
sdk/google_appengine/lib/django-1.2/django/contrib/localflavor/fr/__init__.py
|
12133432
| |
hydroshare/hydroshare
|
refs/heads/develop
|
hs_geographic_feature_resource/tests/__init__.py
|
12133432
| |
SerCeMan/intellij-community
|
refs/heads/master
|
python/testData/refactoring/move/relativeImportSourceWithSpacesInsideMovedModule/after/src/pkg/__init__.py
|
12133432
| |
izgzhen/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/pytest/testing/python/raises.py
|
171
|
import pytest
class TestRaises:
def test_raises(self):
source = "int('qwe')"
excinfo = pytest.raises(ValueError, source)
code = excinfo.traceback[-1].frame.code
s = str(code.fullsource)
assert s == source
def test_raises_exec(self):
pytest.raises(ValueError, "a,x = []")
def test_raises_syntax_error(self):
pytest.raises(SyntaxError, "qwe qwe qwe")
def test_raises_function(self):
pytest.raises(ValueError, int, 'hello')
def test_raises_callable_no_exception(self):
class A:
def __call__(self):
pass
try:
pytest.raises(ValueError, A())
except pytest.raises.Exception:
pass
def test_raises_flip_builtin_AssertionError(self):
# we replace AssertionError on python level
# however c code might still raise the builtin one
from _pytest.assertion.util import BuiltinAssertionError # noqa
pytest.raises(AssertionError,"""
raise BuiltinAssertionError
""")
def test_raises_as_contextmanager(self, testdir):
testdir.makepyfile("""
from __future__ import with_statement
import py, pytest
import _pytest._code
def test_simple():
with pytest.raises(ZeroDivisionError) as excinfo:
assert isinstance(excinfo, _pytest._code.ExceptionInfo)
1/0
print (excinfo)
assert excinfo.type == ZeroDivisionError
assert isinstance(excinfo.value, ZeroDivisionError)
def test_noraise():
with pytest.raises(pytest.raises.Exception):
with pytest.raises(ValueError):
int()
def test_raise_wrong_exception_passes_by():
with pytest.raises(ZeroDivisionError):
with pytest.raises(ValueError):
1/0
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
'*3 passed*',
])
def test_noclass(self):
with pytest.raises(TypeError):
pytest.raises('wrong', lambda: None)
def test_tuple(self):
with pytest.raises((KeyError, ValueError)):
raise KeyError('oops')
def test_no_raise_message(self):
try:
pytest.raises(ValueError, int, '0')
except pytest.raises.Exception as e:
assert e.msg == "DID NOT RAISE {0}".format(repr(ValueError))
|
cgcgbcbc/django-xadmin
|
refs/heads/master
|
xadmin/plugins/multiselect.py
|
19
|
#coding:utf-8
from itertools import chain
import xadmin
from django import forms
from django.db.models import ManyToManyField
from django.forms.util import flatatt
from django.template import loader
from django.utils.encoding import force_unicode
from django.utils.html import escape, conditional_escape
from django.utils.safestring import mark_safe
from xadmin.util import vendor
from xadmin.views import BaseAdminPlugin, ModelFormAdminView
class SelectMultipleTransfer(forms.SelectMultiple):
@property
def media(self):
return vendor('xadmin.widget.select-transfer.js', 'xadmin.widget.select-transfer.css')
def __init__(self, verbose_name, is_stacked, attrs=None, choices=()):
self.verbose_name = verbose_name
self.is_stacked = is_stacked
super(SelectMultipleTransfer, self).__init__(attrs, choices)
def render_opt(self, selected_choices, option_value, option_label):
option_value = force_unicode(option_value)
return u'<option value="%s">%s</option>' % (
escape(option_value), conditional_escape(force_unicode(option_label))), bool(option_value in selected_choices)
def render(self, name, value, attrs=None, choices=()):
if attrs is None:
attrs = {}
attrs['class'] = ''
if self.is_stacked:
attrs['class'] += 'stacked'
if value is None:
value = []
final_attrs = self.build_attrs(attrs, name=name)
selected_choices = set(force_unicode(v) for v in value)
available_output = []
chosen_output = []
for option_value, option_label in chain(self.choices, choices):
if isinstance(option_label, (list, tuple)):
available_output.append(u'<optgroup label="%s">' %
escape(force_unicode(option_value)))
for option in option_label:
output, selected = self.render_opt(
selected_choices, *option)
if selected:
chosen_output.append(output)
else:
available_output.append(output)
available_output.append(u'</optgroup>')
else:
output, selected = self.render_opt(
selected_choices, option_value, option_label)
if selected:
chosen_output.append(output)
else:
available_output.append(output)
context = {
'verbose_name': self.verbose_name,
'attrs': attrs,
'field_id': attrs['id'],
'flatatts': flatatt(final_attrs),
'available_options': u'\n'.join(available_output),
'chosen_options': u'\n'.join(chosen_output),
}
return mark_safe(loader.render_to_string('xadmin/forms/transfer.html', context))
class SelectMultipleDropdown(forms.SelectMultiple):
@property
def media(self):
return vendor('multiselect.js', 'multiselect.css', 'xadmin.widget.multiselect.js')
def render(self, name, value, attrs=None, choices=()):
if attrs is None:
attrs = {}
attrs['class'] = 'selectmultiple selectdropdown'
return super(SelectMultipleDropdown, self).render(name, value, attrs, choices)
class M2MSelectPlugin(BaseAdminPlugin):
def init_request(self, *args, **kwargs):
return hasattr(self.admin_view, 'style_fields') and \
(
'm2m_transfer' in self.admin_view.style_fields.values() or
'm2m_dropdown' in self.admin_view.style_fields.values()
)
def get_field_style(self, attrs, db_field, style, **kwargs):
if style == 'm2m_transfer' and isinstance(db_field, ManyToManyField):
return {'widget': SelectMultipleTransfer(db_field.verbose_name, False), 'help_text': ''}
if style == 'm2m_dropdown' and isinstance(db_field, ManyToManyField):
return {'widget': SelectMultipleDropdown, 'help_text': ''}
return attrs
xadmin.site.register_plugin(M2MSelectPlugin, ModelFormAdminView)
|
kynikos/outspline
|
refs/heads/master
|
src/outspline/extensions/organism_basicrules/occur_regularly.py
|
1
|
# Outspline - A highly modular and extensible outliner.
# Copyright (C) 2011 Dario Giovannetti <dev@dariogiovannetti.net>
#
# This file is part of Outspline.
#
# Outspline is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Outspline is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Outspline. If not, see <http://www.gnu.org/licenses/>.
from exceptions import BadRuleError
_RULE_NAMES = {'local': 'occur_regularly_local',
'UTC': 'occur_regularly_UTC'}
def make_rule(refstart, interval, rend, ralarm, standard, guiconfig):
"""
@param refstart: A sample occurrence Unix start time.
@param interval: The interval in seconds between two consecutive occurrence
start times.
@param rend: The positive difference in seconds between the sample start
time and the sample end time.
@param ralarm: The difference in seconds between the sample start time and
the sample alarm time; it is negative if the alarm is set
later than the start time.
@param standard: The time standard to be used, either 'local' or 'UTC'.
@param guiconfig: A place to store any configuration needed only by the
interface.
"""
# Make sure this rule can only produce occurrences compliant with the
# requirements defined in organism_api.update_item_rules
# There's no need to check standard because it's imposed by the API
if isinstance(refstart, int) and refstart >= 0 and \
isinstance(interval, int) and interval > 0 and \
(rend is None or (isinstance(rend, int) and rend > 0)) and \
(ralarm is None or isinstance(ralarm, int)):
# Also take a possible negative (late) alarm time into account, in fact
# the occurrence wouldn't be found if the search range included the
# alarm time but not the actual occurrence time span; remember that
# it's normal that the occurrence is not added to the results if the
# search range is between (and doesn't include) the alarm time and the
# actual occurrence time span
if ralarm is None:
rmax = max((rend, 0))
else:
rmax = max((rend, ralarm * -1, 0))
overlaps = rmax // interval
bgap = interval - rmax % interval
return {
'rule': _RULE_NAMES[standard],
'#': (
refstart,
interval,
overlaps,
bgap,
rend,
ralarm,
guiconfig,
)
}
else:
raise BadRuleError()
"""
| search_time
* reference_start_time
< found_start_time
(() occurrence (rmix start rmax)
[[] target_occurrence (rmix start rmax)
( * )--------( ( )----|---[ < ]--------( ( )--------( ( )
( * )--------( ( )--------[ [ | ]--------( < )--------( ( )
( * )--------( ( )--------( ( |--------[ < ]--------( ( )
( ( )----|---[ < ]--------( ( )--------( ( )--------( * )
( ( )--------[ [ | ]--------( < )--------( ( )--------( * )
( ( )--------( ( |--------[ < ]--------( ( )--------( * )
( * ) |
[ [ |]
( ( | )
( ( | )
( ( | )
( (| )
( | < )
|( ( )
| ( ( )
| ( ( )
| ( ( )
| ( ( )
| ( ( )
( * ) |
( ( )
[ [ | ]
( ( | )
( ( | )
( ( | )
( | < )
( ( )
| ( ( )
| ( ( )
| ( ( )
| ( ( )
| ( ( )
( * )
( ( )|
[ [ | ]
( ( | )
( ( | )
( ( | )
( |< )
(| ( )
| ( ( )
| ( ( )
| ( ( )
| ( ( )
| ( ( )
( ( ) |
[ [ |]
( ( | )
( ( | )
( ( | )
( (| )
( | < )
|( ( )
| ( ( )
| ( ( )
| ( ( )
| ( ( )
| ( * )
( ( ) |
( ( )
[ [ | ]
( ( | )
( ( | )
( ( | )
( | < )
( ( )
| ( ( )
| ( ( )
| ( ( )
| ( ( )
| ( * )
( ( )
( ( )|
[ [ | ]
( ( | )
( ( | )
( ( | )
( |< )
(| ( )
| ( ( )
| ( ( )
| ( ( )
| ( ( )
| ( * )
overlaps = rmax // interval
fgap = rmax % interval
bgap = interval - fgap
found_start_time = search_time + (reference_start_time - search_time) % interval
if (found_start_time - search_time) > bgap:
target_start_time = found_start_time - (overlaps + 1) * interval
else:
target_start_time = found_start_time - overlaps * interval
===============================================================================
OLD IMPLEMENTATION
* reference occurrence
| reftime
[] target occurrence
A) mintime = reftime - ((reftime - refmin) % interval)
B) mintime = reftime - ((reftime - refmin) % interval) + interval
C) mintime = reftime - ((reftime - refmin) % interval) - ((refspan // interval) * interval)
D) mintime = reftime - ((reftime - refmin) % interval) - ((refspan // interval) * interval) + interval
G) mintime = reftime - ((reftime - refmax) % interval) - refspan
H) mintime = reftime - ((reftime - refmax) % interval) + interval - refspan
I) (!NOT VERIFIED!) mintime = reftime - ((reftime - refmax) % interval) - refspan + ((refspan // interval) * interval)
J) (!NOT VERIFIED!) mintime = reftime - ((reftime - refmax) % interval) - refspan + ((refspan // interval) * interval) + interval
M) mintime = reftime + ((refmin - reftime) % interval) - interval
N) mintime = reftime + ((refmin - reftime) % interval)
O) mintime = reftime + ((refmin - reftime) % interval) - ((refspan // interval) * interval) - interval
P) mintime = reftime + ((refmin - reftime) % interval) - ((refspan // interval) * interval)
S) mintime = reftime + ((refmax - reftime) % interval) - refspan
T) mintime = reftime + ((refmax - reftime) % interval) + interval - refspan
U) (!NOT VERIFIED!) mintime = reftime + ((refmax - reftime) % interval) - refspan + ((refspan // interval) * interval)
V) (!NOT VERIFIED!) mintime = reftime + ((refmax - reftime) % interval) - refspan + ((refspan // interval) * interval) + interval
--------( * )--------( )--------( )--------[ | ]--------( )-----
AHMS
--------( * )--------( )--------( )-----|--[ ]--------( )-----
BHNS
--------( )--------( )-----|--[ ]--------( )--------( * )-----
BHNS
--------( )--------[ | ]--------( )--------( )--------( * )-----
AHMS
--------( )--------( )--------[ |* ]--------( )--------( )-----
AHMS
--------( * )--------( )--------( )--------| ]--------( )-----
AHNS
--------( * )--------( )--------( |--------[ ]--------( )-----
AHNT
--------( )--------( )--------| ]--------( )--------( * )-----
AHNS
--------{ |--------[ ]--------( )--------( )--------( * )-----
BHNT
--------( )--------( )--------| * ]--------( )--------( )-----
AHNS
--------( )--------( * |--------[ ]--------( )--------( )-----
BHNT
--------:--------*--------:--------:--------:----|---[]-------:--------:--------
BHNS
--------:--------:--------:----|---[]-------:--------:--------*--------:--------
BHNS
--------:--------*--------:--------:--------:--------|--------:--------:--------
AGNS
--------:--------:--------|--------:--------:--------:--------*--------:--------
AGNS
--------:--------:--------:--------|*-------:--------:--------:--------:--------
AGNS
(-------)(---*---)(-------)(-------)(-------)(-------)[---|---](-------)(-------)
AHMS
(-------)[---|---](-------)(-------)(-------)(-------)(---*---)(-------)(-------)
AHMS
(-------)(-------)(-------)(-------)[--|-*--](-------)(-------)(-------)(-------)
AHMS
(-------)(---*---)(-------)(-------)(-------)(-------)|-------](-------)(-------)
AHNT
(-------)|-------](-------)(-------)(-------)(-------)(---*---)(-------)(-------)
AHNT
(-------)(-------)(-------)(-------)|---*---](-------)(-------)(-------)(-------)
AHNT
* |
( ( ( ) ( ) [ ) ( ) ( | ] ( ) ( ) ( ) ) )
0 1 2 0 3 1 4 2 5 3 6 | 4 7 5 8 6 9 7 8 9
( ) |
( * ) |
( ) |
( ) |
[ | ]
( | )
( | )
| ( )
| ( )
| ( )
CHOS
| *
( [ ( ) ( | ] ( ) ( ) ( ) ( ) ( ) ( ) ) )
0 1 2 0 3 | 1 4 2 5 3 6 4 7 5 8 6 9 7 8 9
( ) |
[ | ]
( | )
( | )
| ( )
| ( )
| ( )
| ( * )
| ( )
| ( )
CHOS
* |
( ( ( ) ( ) [ ) ( ) ( | ] ( ) ( ) ( ) ) )
0 1 2 0 3 1 4 2 5 3 6 | 4 7 5 8 6 9 7 8 9
( ) |
( ) |
( ) |
( ) |
[ | ]
( * | )
( | )
| ( )
| ( )
| ( )
CHOS
* |
( ( ( ) ( ) [ ) ( ) | ] ( ) ( ) ( ) ) )
0 1 2 0 3 1 4 2 5 3 6 4 7 5 8 6 9 7 8 9
( ) |
( * ) |
( ) |
( ) |
[ | ]
( | )
| )
| ( )
| ( )
| ( )
CHPS
* |
( ( ( ) ( ) ( ) [ ) ( | ( ] ( ) ( ) ) )
0 1 2 0 3 1 4 2 5 3 6 4 7 5 8 6 9 7 8 9
( ) |
( * ) |
( ) |
( ) |
( |
[ | ]
( | )
| ( )
| ( )
| ( )
DHPT
| *
( [ ( ) | ] ( ) ( ) ( ) ( ) ( ) ( ) ) )
0 1 2 0 3 1 4 2 5 3 6 4 7 5 8 6 9 7 8 9
( ) |
[ | ]
( | )
| )
| ( )
| ( )
| ( )
| ( * )
| ( )
| ( )
DHPS
| *
( { [ ) ( | ( ] ( ) ( ) ( ) ( ) ( ) ) )
0 1 2 0 3 1 4 2 5 3 6 4 7 5 8 6 9 7 8 9
( ) |
{ |
[ | ]
( | )
| ( )
| ( )
| ( )
| ( * )
| ( )
| ( )
DHPT
*
( ( ( ) ( ) [ ) ( ) | ] ( ) ( ) ( ) ) )
0 1 2 0 3 1 4 2 5 3 6 4 7 5 8 6 9 7 8 9
( ) |
( ) |
( ) |
( ) |
[ | ]
( | )
| )
| ( )
| ( )
| ( )
CHPS
* |
( ( ( ) ( ) ( ) [ ) ( | ( ] ( ) ( ) ) )
0 1 2 0 3 1 4 2 5 3 6 4 7 5 8 6 9 7 8 9
( ) |
( ) |
( ) |
( * ) |
( |
[ | ]
( | )
| ( )
| ( )
| ( )
DHPT
"""
def compute_min_time(reftime, refstart, interval, overlaps, bgap):
ftime = reftime + (refstart - reftime) % interval
if (ftime - reftime) > bgap:
return ftime - (overlaps + 1) * interval
else:
return ftime - overlaps * interval
def get_occurrences_range_local(mint, utcmint, maxt, utcoffset, filename, id_,
rule, occs):
interval = rule['#'][1]
# Use utcmint because in Western (negative) time zones (e.g.
# Pacific/Honolulu), the first occurrence to be found would otherwise be
# already too late; in Eastern (positive) time zones the problem would pass
# unnoticed because the first occurrence would be found too early, and
# simply several cycles would not produce occurrences in the search range
start = compute_min_time(utcmint, rule['#'][0], interval, rule['#'][2],
rule['#'][3])
rend = rule['#'][4]
ralarm = rule['#'][5]
while True:
# Every timestamp can have a different UTC offset, depending whether
# it's in a DST period or not
offset = utcoffset.compute(start)
sstart = start + offset
try:
send = sstart + rend
except TypeError:
send = None
try:
salarm = sstart - ralarm
except TypeError:
salarm = None
# Do compare sstart and salarm with maxt, *not* start and alarm
if sstart > maxt and (salarm is None or salarm > maxt):
break
# The rule is checked in make_rule, no need to use occs.add
occs.add_safe({'filename': filename,
'id_': id_,
'start': sstart,
'end': send,
'alarm': salarm})
start += interval
def get_occurrences_range_UTC(mint, utcmint, maxt, utcoffset, filename, id_,
rule, occs):
interval = rule['#'][1]
start = compute_min_time(mint, rule['#'][0], interval, rule['#'][2],
rule['#'][3])
rend = rule['#'][4]
ralarm = rule['#'][5]
while True:
try:
end = start + rend
except TypeError:
end = None
try:
alarm = start - ralarm
except TypeError:
alarm = None
if start > maxt and (alarm is None or alarm > maxt):
break
# The rule is checked in make_rule, no need to use occs.add
occs.add_safe({'filename': filename,
'id_': id_,
'start': start,
'end': end,
'alarm': alarm})
start += interval
def get_next_item_occurrences_local(base_time, utcbase, utcoffset, filename,
id_, rule, occs):
interval = rule['#'][1]
# Use utcbase because in Western (negative) time zones (e.g.
# Pacific/Honolulu), the first occurrence to be found would otherwise be
# already too late; in Eastern (positive) time zones the problem would pass
# unnoticed because the first occurrence would be found too early, and
# simply several cycles would not produce occurrences in the search range
start = compute_min_time(utcbase, rule['#'][0], interval, rule['#'][2],
rule['#'][3])
rend = rule['#'][4]
ralarm = rule['#'][5]
while True:
# Every timestamp can have a different UTC offset, depending whether
# it's in a DST period or not
offset = utcoffset.compute(start)
sstart = start + offset
try:
send = sstart + rend
except TypeError:
send = None
try:
salarm = sstart - ralarm
except TypeError:
salarm = None
occd = {'filename': filename,
'id_': id_,
'start': sstart,
'end': send,
'alarm': salarm}
next_occ = occs.get_next_occurrence_time()
# The rule is checked in make_rule, no need to use occs.add
# Do compare sstart and salarm with next_occ, *not* start and alarm
if occs.add_safe(base_time, occd) or (next_occ and \
sstart > next_occ and \
(salarm is None or salarm > next_occ)):
break
start += interval
def get_next_item_occurrences_UTC(base_time, utcbase, utcoffset, filename,
id_, rule, occs):
interval = rule['#'][1]
start = compute_min_time(base_time, rule['#'][0], interval, rule['#'][2],
rule['#'][3])
rend = rule['#'][4]
ralarm = rule['#'][5]
while True:
try:
end = start + rend
except TypeError:
end = None
try:
alarm = start - ralarm
except TypeError:
alarm = None
occd = {'filename': filename,
'id_': id_,
'start': start,
'end': end,
'alarm': alarm}
next_occ = occs.get_next_occurrence_time()
# The rule is checked in make_rule, no need to use occs.add
if occs.add_safe(base_time, occd) or (next_occ and \
start > next_occ and (alarm is None or alarm > next_occ)):
break
start += interval
|
trunca/enigma2
|
refs/heads/6.5
|
lib/python/Components/Converter/ChannelCryptoInfo.py
|
15
|
#
# ChannelCryptoInfo Converter by mcbain // v0.1 // 20111109
#
from enigma import iServiceInformation
from Components.Converter.Converter import Converter
from Components.Element import cached
from Poll import Poll
import os
ECM_INFO = '/tmp/ecm.info'
old_ecm_mtime = None
data = None
class ChannelCryptoInfo(Poll, Converter, object):
IRDCRYPT = 0
SECACRYPT = 1
NAGRACRYPT = 2
VIACRYPT = 3
CONAXCRYPT = 4
BETACRYPT = 5
CRWCRYPT = 6
NDSCRYPT = 7
IRDECM = 8
SECAECM = 9
NAGRAECM = 10
VIAECM = 11
CONAXECM = 12
BETAECM = 13
CRWECM = 14
NDSECM = 15
def __init__(self, type):
Converter.__init__(self, type)
Poll.__init__(self)
self.poll_interval = 2*1000
self.poll_enabled = True
if type == 'IrdCrypt':
self.type = self.IRDCRYPT
elif type == 'SecaCrypt':
self.type = self.SECACRYPT
elif type == 'NagraCrypt':
self.type = self.NAGRACRYPT
elif type == 'ViaCrypt':
self.type = self.VIACRYPT
elif type == 'ConaxCrypt':
self.type = self.CONAXCRYPT
elif type == 'BetaCrypt':
self.type = self.BETACRYPT
elif type == 'CrwCrypt':
self.type = self.CRWCRYPT
elif type == 'NdsCrypt':
self.type = self.NDSCRYPT
elif type == 'IrdEcm':
self.type = self.IRDECM
elif type == 'SecaEcm':
self.type = self.SECAECM
elif type == 'NagraEcm':
self.type = self.NAGRAECM
elif type == 'ViaEcm':
self.type = self.VIAECM
elif type == 'ConaxEcm':
self.type = self.CONAXECM
elif type == 'BetaEcm':
self.type = self.BETAECM
elif type == 'CrwEcm':
self.type = self.CRWECM
elif type == 'NdsEcm':
self.type = self.NDSECM
@cached
def getBoolean(self):
service = self.source.service
info = service and service.info()
if not info:
return False
if (info.getInfo(iServiceInformation.sIsCrypted) == 1):
currentcaid = self.getCaid()
searchcaids = info.getInfoObject(iServiceInformation.sCAIDs)
if (self.type == self.IRDCRYPT):
caemm = self.getCrypt('06', searchcaids)
return caemm
elif (self.type == self.SECACRYPT):
caemm = self.getCrypt('01', searchcaids)
return caemm
elif (self.type == self.NAGRACRYPT):
caemm = self.getCrypt('18', searchcaids)
return caemm
elif (self.type == self.VIACRYPT):
caemm = self.getCrypt('05', searchcaids)
return caemm
elif (self.type == self.CONAXCRYPT):
caemm = self.getCrypt('0B', searchcaids)
return caemm
elif (self.type == self.BETACRYPT):
caemm = self.getCrypt('17', searchcaids)
return caemm
elif (self.type == self.CRWCRYPT):
caemm = self.getCrypt('0D', searchcaids)
return caemm
elif (self.type == self.NDSCRYPT):
caemm = self.getCrypt('09', searchcaids)
return caemm
elif (self.type == self.IRDECM):
if currentcaid == '06':
return True
elif (self.type == self.SECAECM):
if currentcaid == '01':
return True
elif (self.type == self.NAGRAECM):
if currentcaid == '18':
return True
elif (self.type == self.VIAECM):
if currentcaid == '05':
return True
elif (self.type == self.CONAXECM):
if currentcaid == '0B':
return True
elif (self.type == self.BETAECM):
if currentcaid == '17':
return True
elif (self.type == self.CRWECM):
if currentcaid == '0D':
return True
elif (self.type == self.NDSECM):
if currentcaid == '09':
return True
else:
self.poll_enabled = False
return False
boolean = property(getBoolean)
def getCrypt(self, iscaid, caids):
if caids and len(caids) > 0:
for caid in caids:
caid = self.int2hex(caid)
if (len(caid) == 3):
caid = ("0%s" % caid)
caid = caid[:2]
caid = caid.upper()
if (caid == iscaid):
return True
return False
def getCaid(self):
global old_ecm_mtime
global data
try:
ecm_mtime = os.stat(ECM_INFO).st_mtime
except:
ecm_mtime = None
if ecm_mtime != old_ecm_mtime:
old_ecm_mtime = ecm_mtime
data = self.getCaidFromEcmInfo()
return data
def getCaidFromEcmInfo(self):
try:
ecm = open(ECM_INFO, 'rb').readlines()
info = {}
for line in ecm:
d = line.split(':', 1)
if len(d) > 1:
info[d[0].strip()] = d[1].strip()
caid = info.get('caid', '')
except:
caid = ''
if caid:
idx = caid.index('x')
caid = caid[(idx + 1):]
if (len(caid) == 3):
caid = ('0%s' % caid)
caid = caid[:2]
caid = caid.upper()
return caid
def int2hex(self, int):
return ('%x' % int)
def changed(self, what):
Converter.changed(self, what)
|
MichaelNedzelsky/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyUnresolvedReferencesInspection/superclassAsLocal.py
|
83
|
class A(object):
def method(self):
pass
C = A
class B(C):
pass
b = B()
b.method() #Unresolved attribute reference 'method' for class 'B'
|
msrb/samba
|
refs/heads/master
|
third_party/dnspython/dns/rdtypes/ANY/DNSKEY.py
|
76
|
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.dnssec
import dns.rdata
# flag constants
SEP = 0x0001
REVOKE = 0x0080
ZONE = 0x0100
class DNSKEY(dns.rdata.Rdata):
"""DNSKEY record
@ivar flags: the key flags
@type flags: int
@ivar protocol: the protocol for which this key may be used
@type protocol: int
@ivar algorithm: the algorithm used for the key
@type algorithm: int
@ivar key: the public key
@type key: string"""
__slots__ = ['flags', 'protocol', 'algorithm', 'key']
def __init__(self, rdclass, rdtype, flags, protocol, algorithm, key):
super(DNSKEY, self).__init__(rdclass, rdtype)
self.flags = flags
self.protocol = protocol
self.algorithm = algorithm
self.key = key
def to_text(self, origin=None, relativize=True, **kw):
return '%d %d %d %s' % (self.flags, self.protocol, self.algorithm,
dns.rdata._base64ify(self.key))
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
flags = tok.get_uint16()
protocol = tok.get_uint8()
algorithm = dns.dnssec.algorithm_from_text(tok.get_string())
chunks = []
while 1:
t = tok.get().unescape()
if t.is_eol_or_eof():
break
if not t.is_identifier():
raise dns.exception.SyntaxError
chunks.append(t.value)
b64 = ''.join(chunks)
key = b64.decode('base64_codec')
return cls(rdclass, rdtype, flags, protocol, algorithm, key)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
header = struct.pack("!HBB", self.flags, self.protocol, self.algorithm)
file.write(header)
file.write(self.key)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
if rdlen < 4:
raise dns.exception.FormError
header = struct.unpack('!HBB', wire[current : current + 4])
current += 4
rdlen -= 4
key = wire[current : current + rdlen].unwrap()
return cls(rdclass, rdtype, header[0], header[1], header[2],
key)
from_wire = classmethod(from_wire)
def _cmp(self, other):
hs = struct.pack("!HBB", self.flags, self.protocol, self.algorithm)
ho = struct.pack("!HBB", other.flags, other.protocol, other.algorithm)
v = cmp(hs, ho)
if v == 0:
v = cmp(self.key, other.key)
return v
|
MSeifert04/astropy
|
refs/heads/master
|
astropy/timeseries/tests/test_binned.py
|
7
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import pytest
from numpy.testing import assert_equal, assert_allclose
from astropy import units as u
from astropy.time import Time, TimeDelta
from astropy.utils.data import get_pkg_data_filename
from astropy.timeseries.periodograms import BoxLeastSquares, LombScargle
from astropy.timeseries.binned import BinnedTimeSeries
from astropy.tests.helper import assert_quantity_allclose
CSV_FILE = get_pkg_data_filename('data/binned.csv')
def test_empty_initialization():
ts = BinnedTimeSeries()
ts['time_bin_start'] = Time([1, 2, 3], format='mjd')
def test_empty_initialization_invalid():
# Make sure things crash when the first column added is not a time column
ts = BinnedTimeSeries()
with pytest.raises(ValueError) as exc:
ts['flux'] = [1, 2, 3]
assert exc.value.args[0] == ("BinnedTimeSeries object is invalid - expected "
"'time_bin_start' as the first column but found 'flux'")
def test_initialization_time_bin_invalid():
# Make sure things crash when time_bin_* is passed incorrectly.
with pytest.raises(TypeError) as exc:
BinnedTimeSeries(data=[[1, 4, 3]])
assert exc.value.args[0] == ("'time_bin_start' has not been specified")
with pytest.raises(TypeError) as exc:
BinnedTimeSeries(time_bin_start='2016-03-22T12:30:31', data=[[1, 4, 3]])
assert exc.value.args[0] == ("Either 'time_bin_size' or 'time_bin_end' should be specified")
def test_initialization_time_bin_both():
# Make sure things crash when time_bin_* is passed twice.
with pytest.raises(TypeError) as exc:
BinnedTimeSeries(data={"time_bin_start": ["2016-03-22T12:30:31"]},
time_bin_start="2016-03-22T12:30:31")
assert exc.value.args[0] == ("'time_bin_start' has been given both in the table "
"and as a keyword argument")
with pytest.raises(TypeError) as exc:
BinnedTimeSeries(data={"time_bin_size": ["2016-03-22T12:30:31"]},
time_bin_size=[1]*u.s)
assert exc.value.args[0] == ("'time_bin_size' has been given both in the table "
"and as a keyword argument")
def test_initialization_time_bin_size():
# Make sure things crash when time_bin_size has no units
with pytest.raises(TypeError) as exc:
BinnedTimeSeries(data={"time": ["2016-03-22T12:30:31"]},
time_bin_start="2016-03-22T12:30:31",
time_bin_size=1)
assert exc.value.args[0] == ("'time_bin_size' should be a Quantity or a TimeDelta")
# TimeDelta for time_bin_size
ts = BinnedTimeSeries(data={"time": ["2016-03-22T12:30:31"]},
time_bin_start="2016-03-22T12:30:31",
time_bin_size=TimeDelta(1))
assert isinstance(ts.time_bin_size, u.quantity.Quantity)
def test_initialization_time_bin_start_scalar():
# Make sure things crash when time_bin_start is a scalar with no time_bin_size
with pytest.raises(TypeError) as exc:
BinnedTimeSeries(data={"time": ["2016-03-22T12:30:31"]},
time_bin_start=Time(1, format='mjd'),
time_bin_end=Time(1, format='mjd'))
assert exc.value.args[0] == ("'time_bin_start' is scalar, so 'time_bin_size' is required")
def test_initialization_n_bins():
# Make sure things crash with incorrect n_bins
with pytest.raises(TypeError) as exc:
BinnedTimeSeries(data={"time": ["2016-03-22T12:30:31"]},
time_bin_start=Time(1, format='mjd'),
time_bin_size=1*u.s,
time_bin_end=Time(1, format='mjd'),
n_bins=10)
assert exc.value.args[0] == ("'n_bins' has been given and it is not the "
"same length as the input data.")
def test_initialization_non_scalar_time():
# Make sure things crash with incorrect size of time_bin_start
with pytest.raises(ValueError) as exc:
BinnedTimeSeries(data={"time": ["2016-03-22T12:30:31"]},
time_bin_start=["2016-03-22T12:30:31", "2016-03-22T12:30:32"],
time_bin_size=1*u.s,
time_bin_end=Time(1, format='mjd'))
assert exc.value.args[0] == ("Length of 'time_bin_start' (2) should match table length (1)")
with pytest.raises(TypeError) as exc:
BinnedTimeSeries(data={"time": ["2016-03-22T12:30:31"]},
time_bin_start=["2016-03-22T12:30:31"],
time_bin_size=None,
time_bin_end=None)
assert exc.value.args[0] == ("Either 'time_bin_size' or 'time_bin_end' should be specified")
def test_even_contiguous():
# Initialize a ``BinnedTimeSeries`` with even contiguous bins by specifying
# the bin width:
ts = BinnedTimeSeries(time_bin_start='2016-03-22T12:30:31',
time_bin_size=3 * u.s, data=[[1, 4, 3]])
assert_equal(ts.time_bin_start.isot, ['2016-03-22T12:30:31.000',
'2016-03-22T12:30:34.000',
'2016-03-22T12:30:37.000'])
assert_equal(ts.time_bin_center.isot, ['2016-03-22T12:30:32.500',
'2016-03-22T12:30:35.500',
'2016-03-22T12:30:38.500'])
assert_equal(ts.time_bin_end.isot, ['2016-03-22T12:30:34.000',
'2016-03-22T12:30:37.000',
'2016-03-22T12:30:40.000'])
def test_uneven_contiguous():
# Initialize a ``BinnedTimeSeries`` with uneven contiguous bins by giving an
# end time:
ts = BinnedTimeSeries(time_bin_start=['2016-03-22T12:30:31',
'2016-03-22T12:30:32',
'2016-03-22T12:30:40'],
time_bin_end='2016-03-22T12:30:55',
data=[[1, 4, 3]])
assert_equal(ts.time_bin_start.isot, ['2016-03-22T12:30:31.000',
'2016-03-22T12:30:32.000',
'2016-03-22T12:30:40.000'])
assert_equal(ts.time_bin_center.isot, ['2016-03-22T12:30:31.500',
'2016-03-22T12:30:36.000',
'2016-03-22T12:30:47.500'])
assert_equal(ts.time_bin_end.isot, ['2016-03-22T12:30:32.000',
'2016-03-22T12:30:40.000',
'2016-03-22T12:30:55.000'])
def test_uneven_non_contiguous():
# Initialize a ``BinnedTimeSeries`` with uneven non-contiguous bins with
# lists of start times, bin sizes and data:
ts = BinnedTimeSeries(time_bin_start=['2016-03-22T12:30:31',
'2016-03-22T12:30:38',
'2016-03-22T12:34:40'],
time_bin_size=[5, 100, 2]*u.s,
data=[[1, 4, 3]])
assert_equal(ts.time_bin_start.isot, ['2016-03-22T12:30:31.000',
'2016-03-22T12:30:38.000',
'2016-03-22T12:34:40.000'])
assert_equal(ts.time_bin_center.isot, ['2016-03-22T12:30:33.500',
'2016-03-22T12:31:28.000',
'2016-03-22T12:34:41.000'])
assert_equal(ts.time_bin_end.isot, ['2016-03-22T12:30:36.000',
'2016-03-22T12:32:18.000',
'2016-03-22T12:34:42.000'])
def test_uneven_non_contiguous_full():
# Initialize a ``BinnedTimeSeries`` with uneven non-contiguous bins by
# specifying the start and end times for the bins:
ts = BinnedTimeSeries(time_bin_start=['2016-03-22T12:30:31',
'2016-03-22T12:30:33',
'2016-03-22T12:30:40'],
time_bin_end=['2016-03-22T12:30:32',
'2016-03-22T12:30:35',
'2016-03-22T12:30:41'],
data=[[1, 4, 3]])
assert_equal(ts.time_bin_start.isot, ['2016-03-22T12:30:31.000',
'2016-03-22T12:30:33.000',
'2016-03-22T12:30:40.000'])
assert_equal(ts.time_bin_center.isot, ['2016-03-22T12:30:31.500',
'2016-03-22T12:30:34.000',
'2016-03-22T12:30:40.500'])
assert_equal(ts.time_bin_end.isot, ['2016-03-22T12:30:32.000',
'2016-03-22T12:30:35.000',
'2016-03-22T12:30:41.000'])
def test_read_empty():
with pytest.raises(ValueError) as exc:
BinnedTimeSeries.read(CSV_FILE, format='csv')
assert exc.value.args[0] == '``time_bin_start_column`` should be provided since the default Table readers are being used.'
def test_read_no_size_end():
with pytest.raises(ValueError) as exc:
BinnedTimeSeries.read(CSV_FILE, time_bin_start_column='time_start', format='csv')
assert exc.value.args[0] == 'Either `time_bin_end_column` or `time_bin_size_column` should be provided.'
def test_read_both_extra_bins():
with pytest.raises(ValueError) as exc:
BinnedTimeSeries.read(CSV_FILE, time_bin_start_column='time_start', time_bin_end_column='END', time_bin_size_column='bin_size', format='csv')
assert exc.value.args[0] == "Cannot specify both `time_bin_end_column` and `time_bin_size_column`."
def test_read_size_no_unit():
with pytest.raises(ValueError) as exc:
BinnedTimeSeries.read(CSV_FILE, time_bin_start_column='time_start', time_bin_size_column='bin_size', format='csv')
assert exc.value.args[0] == "The bin size unit should be specified as an astropy Unit using ``time_bin_size_unit``."
def test_read_start_time_missing():
with pytest.raises(ValueError) as exc:
BinnedTimeSeries.read(CSV_FILE, time_bin_start_column='abc', time_bin_size_column='bin_size', time_bin_size_unit=u.second, format='csv')
assert exc.value.args[0] == "Bin start time column 'abc' not found in the input data."
def test_read_end_time_missing():
with pytest.raises(ValueError) as exc:
BinnedTimeSeries.read(CSV_FILE, time_bin_start_column='time_start', time_bin_end_column="missing", format='csv')
assert exc.value.args[0] == "Bin end time column 'missing' not found in the input data."
def test_read_size_missing():
with pytest.raises(ValueError) as exc:
BinnedTimeSeries.read(CSV_FILE, time_bin_start_column='time_start', time_bin_size_column="missing", time_bin_size_unit=u.second, format='csv')
assert exc.value.args[0] == "Bin size column 'missing' not found in the input data."
def test_read_time_unit_missing():
with pytest.raises(ValueError) as exc:
BinnedTimeSeries.read(CSV_FILE, time_bin_start_column='time_start', time_bin_size_column="bin_size", format='csv')
assert exc.value.args[0] == "The bin size unit should be specified as an astropy Unit using ``time_bin_size_unit``."
def test_read():
timeseries = BinnedTimeSeries.read(CSV_FILE, time_bin_start_column='time_start',
time_bin_end_column='time_end', format='csv')
assert timeseries.colnames == ['time_bin_start', 'time_bin_size', 'bin_size', 'A', 'B', 'C', 'D', 'E', 'F']
assert len(timeseries) == 10
assert timeseries['B'].sum() == 1151.54
timeseries = BinnedTimeSeries.read(CSV_FILE, time_bin_start_column='time_start',
time_bin_size_column='bin_size',
time_bin_size_unit=u.second, format='csv')
assert timeseries.colnames == ['time_bin_start', 'time_bin_size', 'time_end', 'A', 'B', 'C', 'D', 'E', 'F']
assert len(timeseries) == 10
assert timeseries['B'].sum() == 1151.54
@pytest.mark.parametrize('cls', [BoxLeastSquares, LombScargle])
def test_periodogram(cls):
# Note that we don't need to check the actual results from the periodogram
# classes here since these are tested extensively in
# astropy.timeseries.periodograms.
ts = BinnedTimeSeries(time_bin_start='2016-03-22T12:30:31',
time_bin_size=3 * u.s, data=[[1, 4, 3], [3, 4, 3]], names=['a', 'b'])
p1 = cls.from_timeseries(ts, 'a')
assert isinstance(p1, cls)
assert_allclose(p1.t.jd, ts.time_bin_center.jd)
assert_equal(p1.y, ts['a'])
assert p1.dy is None
p2 = cls.from_timeseries(ts, 'a', uncertainty='b')
assert_quantity_allclose(p2.dy, ts['b'])
p3 = cls.from_timeseries(ts, 'a', uncertainty=0.1)
assert_allclose(p3.dy, 0.1)
|
Reinaesaya/OUIRL-ChatBot
|
refs/heads/master
|
chatterbot/imgcaption/commu_server.py
|
1
|
import socket
import sys
import os
import time
import client_cv
def run_image_capturing_server(host='', port=8075, target_host='192.168.0.113', target_port=8092, resizefactor=1):
try:
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((host,port))
try:
serveropen = True
while serveropen:
s.listen(10)
print('Socket now listening on '+str(host)+' : '+str(port))
conn,addr = s.accept()
print 'Got a connection from '+str(addr[0])+' : '+str(addr[1])
try:
while True:
data = ""
while len(data) == 0:
data += conn.recv(4096)
print(data)
if data.strip() == "/takepicture":
try:
ss = client_cv.connectSocket(target_host, target_port)
client_cv.sendImage(ss, resizefactor=resizefactor)
ss.close()
except Exception as e:
print(e)
except KeyboardInterrupt:
serveropen = False
except Exception as e:
print(e)
pass
finally:
print "Closing client connection"
conn.close()
except Exception as e:
print(e)
pass
finally:
print "Closing server socket"
s.shutdown(socket.SHUT_RDWR)
s.close()
except Exception as e:
print(e)
pass
if __name__ == "__main__":
run_image_capturing_server()
|
Soya93/Extract-Refactoring
|
refs/heads/master
|
python/lib/Lib/os.py
|
74
|
r"""OS routines for Mac, NT, or Posix depending on what system we're on.
This exports:
- all functions from posix, nt, os2, or ce, e.g. unlink, stat, etc.
- os.path is one of the modules posixpath, or ntpath
- os.name is 'posix', 'nt', 'os2', 'ce' or 'riscos'
- os.curdir is a string representing the current directory ('.' or ':')
- os.pardir is a string representing the parent directory ('..' or '::')
- os.sep is the (or a most common) pathname separator ('/' or ':' or '\\')
- os.extsep is the extension separator ('.' or '/')
- os.altsep is the alternate pathname separator (None or '/')
- os.pathsep is the component separator used in $PATH etc
- os.linesep is the line separator in text files ('\r' or '\n' or '\r\n')
- os.defpath is the default search path for executables
- os.devnull is the file path of the null device ('/dev/null', etc.)
Programs that import and use 'os' stand a better chance of being
portable between different platforms. Of course, they must then
only use functions that are defined by all platforms (e.g., unlink
and opendir), and leave all pathname manipulation to os.path
(e.g., split and join).
"""
#'
import sys, errno
_names = sys.builtin_module_names
# Note: more names are added to __all__ later.
__all__ = ["altsep", "curdir", "pardir", "sep", "extsep", "pathsep", "linesep",
"defpath", "name", "path", "devnull",
"SEEK_SET", "SEEK_CUR", "SEEK_END"]
def _get_exports_list(module):
try:
return list(module.__all__)
except AttributeError:
return [n for n in dir(module) if n[0] != '_']
name = 'java'
if 'posix' in _names:
_name = 'posix'
linesep = '\n'
from posix import *
try:
from posix import _exit
except ImportError:
pass
import posixpath as path
import posix
__all__.extend(_get_exports_list(posix))
del posix
elif 'nt' in _names:
_name = 'nt'
linesep = '\r\n'
from nt import *
try:
from nt import _exit
except ImportError:
pass
import ntpath as path
import nt
__all__.extend(_get_exports_list(nt))
del nt
elif 'os2' in _names:
_name = 'os2'
linesep = '\r\n'
from os2 import *
try:
from os2 import _exit
except ImportError:
pass
if sys.version.find('EMX GCC') == -1:
import ntpath as path
else:
import os2emxpath as path
from _emx_link import link
import os2
__all__.extend(_get_exports_list(os2))
del os2
elif 'ce' in _names:
_name = 'ce'
linesep = '\r\n'
from ce import *
try:
from ce import _exit
except ImportError:
pass
# We can use the standard Windows path.
import ntpath as path
import ce
__all__.extend(_get_exports_list(ce))
del ce
elif 'riscos' in _names:
_name = 'riscos'
linesep = '\n'
from riscos import *
try:
from riscos import _exit
except ImportError:
pass
import riscospath as path
import riscos
__all__.extend(_get_exports_list(riscos))
del riscos
else:
raise ImportError, 'no os specific module found'
sys.modules['os.path'] = path
from os.path import (curdir, pardir, sep, pathsep, defpath, extsep, altsep,
devnull)
del _names
# Python uses fixed values for the SEEK_ constants; they are mapped
# to native constants if necessary in posixmodule.c
SEEK_SET = 0
SEEK_CUR = 1
SEEK_END = 2
#'
# Super directory utilities.
# (Inspired by Eric Raymond; the doc strings are mostly his)
def makedirs(name, mode=0777):
"""makedirs(path [, mode=0777])
Super-mkdir; create a leaf directory and all intermediate ones.
Works like mkdir, except that any intermediate path segment (not
just the rightmost) will be created if it does not exist. This is
recursive.
"""
head, tail = path.split(name)
if not tail:
head, tail = path.split(head)
if head and tail and not path.exists(head):
try:
makedirs(head, mode)
except OSError, e:
# be happy if someone already created the path
if e.errno != errno.EEXIST:
raise
if tail == curdir: # xxx/newdir/. exists if xxx/newdir exists
return
mkdir(name, mode)
def removedirs(name):
"""removedirs(path)
Super-rmdir; remove a leaf directory and all empty intermediate
ones. Works like rmdir except that, if the leaf directory is
successfully removed, directories corresponding to rightmost path
segments will be pruned away until either the whole path is
consumed or an error occurs. Errors during this latter phase are
ignored -- they generally mean that a directory was not empty.
"""
rmdir(name)
head, tail = path.split(name)
if not tail:
head, tail = path.split(head)
while head and tail:
try:
rmdir(head)
except error:
break
head, tail = path.split(head)
def renames(old, new):
"""renames(old, new)
Super-rename; create directories as necessary and delete any left
empty. Works like rename, except creation of any intermediate
directories needed to make the new pathname good is attempted
first. After the rename, directories corresponding to rightmost
path segments of the old name will be pruned way until either the
whole path is consumed or a nonempty directory is found.
Note: this function can fail with the new directory structure made
if you lack permissions needed to unlink the leaf directory or
file.
"""
head, tail = path.split(new)
if head and tail and not path.exists(head):
makedirs(head)
rename(old, new)
head, tail = path.split(old)
if head and tail:
try:
removedirs(head)
except error:
pass
__all__.extend(["makedirs", "removedirs", "renames"])
def walk(top, topdown=True, onerror=None, followlinks=False):
"""Directory tree generator.
For each directory in the directory tree rooted at top (including top
itself, but excluding '.' and '..'), yields a 3-tuple
dirpath, dirnames, filenames
dirpath is a string, the path to the directory. dirnames is a list of
the names of the subdirectories in dirpath (excluding '.' and '..').
filenames is a list of the names of the non-directory files in dirpath.
Note that the names in the lists are just names, with no path components.
To get a full path (which begins with top) to a file or directory in
dirpath, do os.path.join(dirpath, name).
If optional arg 'topdown' is true or not specified, the triple for a
directory is generated before the triples for any of its subdirectories
(directories are generated top down). If topdown is false, the triple
for a directory is generated after the triples for all of its
subdirectories (directories are generated bottom up).
When topdown is true, the caller can modify the dirnames list in-place
(e.g., via del or slice assignment), and walk will only recurse into the
subdirectories whose names remain in dirnames; this can be used to prune
the search, or to impose a specific order of visiting. Modifying
dirnames when topdown is false is ineffective, since the directories in
dirnames have already been generated by the time dirnames itself is
generated.
By default errors from the os.listdir() call are ignored. If
optional arg 'onerror' is specified, it should be a function; it
will be called with one argument, an os.error instance. It can
report the error to continue with the walk, or raise the exception
to abort the walk. Note that the filename is available as the
filename attribute of the exception object.
By default, os.walk does not follow symbolic links to subdirectories on
systems that support them. In order to get this functionality, set the
optional argument 'followlinks' to true.
Caution: if you pass a relative pathname for top, don't change the
current working directory between resumptions of walk. walk never
changes the current directory, and assumes that the client doesn't
either.
Example:
import os
from os.path import join, getsize
for root, dirs, files in os.walk('python/Lib/email'):
print root, "consumes",
print sum([getsize(join(root, name)) for name in files]),
print "bytes in", len(files), "non-directory files"
if 'CVS' in dirs:
dirs.remove('CVS') # don't visit CVS directories
"""
from os.path import join, isdir, islink
# We may not have read permission for top, in which case we can't
# get a list of the files the directory contains. os.path.walk
# always suppressed the exception then, rather than blow up for a
# minor reason when (say) a thousand readable directories are still
# left to visit. That logic is copied here.
try:
# Note that listdir and error are globals in this module due
# to earlier import-*.
names = listdir(top)
except error, err:
if onerror is not None:
onerror(err)
return
dirs, nondirs = [], []
for name in names:
if isdir(join(top, name)):
dirs.append(name)
else:
nondirs.append(name)
if topdown:
yield top, dirs, nondirs
for name in dirs:
path = join(top, name)
if followlinks or not islink(path):
for x in walk(path, topdown, onerror, followlinks):
yield x
if not topdown:
yield top, dirs, nondirs
__all__.append("walk")
# Make sure os.environ exists, at least
try:
environ
except NameError:
environ = {}
def _exists(name):
# CPython eval's the name, whereas looking in __all__ works for
# Jython and is much faster
return name in __all__
if _exists('execv'):
def execl(file, *args):
"""execl(file, *args)
Execute the executable file with argument list args, replacing the
current process. """
execv(file, args)
def execle(file, *args):
"""execle(file, *args, env)
Execute the executable file with argument list args and
environment env, replacing the current process. """
env = args[-1]
execve(file, args[:-1], env)
def execlp(file, *args):
"""execlp(file, *args)
Execute the executable file (which is searched for along $PATH)
with argument list args, replacing the current process. """
execvp(file, args)
def execlpe(file, *args):
"""execlpe(file, *args, env)
Execute the executable file (which is searched for along $PATH)
with argument list args and environment env, replacing the current
process. """
env = args[-1]
execvpe(file, args[:-1], env)
def execvp(file, args):
"""execp(file, args)
Execute the executable file (which is searched for along $PATH)
with argument list args, replacing the current process.
args may be a list or tuple of strings. """
_execvpe(file, args)
def execvpe(file, args, env):
"""execvpe(file, args, env)
Execute the executable file (which is searched for along $PATH)
with argument list args and environment env , replacing the
current process.
args may be a list or tuple of strings. """
_execvpe(file, args, env)
__all__.extend(["execl","execle","execlp","execlpe","execvp","execvpe"])
def _execvpe(file, args, env=None):
if env is not None:
func = execve
argrest = (args, env)
else:
func = execv
argrest = (args,)
env = environ
head, tail = path.split(file)
if head:
func(file, *argrest)
return
if 'PATH' in env:
envpath = env['PATH']
else:
envpath = defpath
PATH = envpath.split(pathsep)
saved_exc = None
saved_tb = None
for dir in PATH:
fullname = path.join(dir, file)
try:
func(fullname, *argrest)
except error, e:
tb = sys.exc_info()[2]
if (e.errno != errno.ENOENT and e.errno != errno.ENOTDIR
and saved_exc is None):
saved_exc = e
saved_tb = tb
if saved_exc:
raise error, saved_exc, saved_tb
raise error, e, tb
# Change environ to automatically call putenv() if it exists
try:
# This will fail if there's no putenv
putenv
except NameError:
pass
else:
# Fake unsetenv() for Windows
# not sure about os2 here but
# I'm guessing they are the same.
if name in ('os2', 'nt'):
def unsetenv(key):
putenv(key, "")
if _name == "riscos":
# On RISC OS, all env access goes through getenv and putenv
from riscosenviron import _Environ
elif _name in ('os2', 'nt'): # Where Env Var Names Must Be UPPERCASE
import UserDict
# But we store them as upper case
class _Environ(UserDict.IterableUserDict):
def __init__(self, environ):
UserDict.UserDict.__init__(self)
data = self.data
for k, v in environ.items():
data[k.upper()] = v
def __setitem__(self, key, item):
self.data[key.upper()] = item
def __getitem__(self, key):
return self.data[key.upper()]
def __delitem__(self, key):
del self.data[key.upper()]
def has_key(self, key):
return key.upper() in self.data
def __contains__(self, key):
return key.upper() in self.data
def get(self, key, failobj=None):
return self.data.get(key.upper(), failobj)
def update(self, dict=None, **kwargs):
if dict:
try:
keys = dict.keys()
except AttributeError:
# List of (key, value)
for k, v in dict:
self[k] = v
else:
# got keys
# cannot use items(), since mappings
# may not have them.
for k in keys:
self[k] = dict[k]
if kwargs:
self.update(kwargs)
def copy(self):
return dict(self)
environ = _Environ(environ)
def getenv(key, default=None):
"""Get an environment variable, return None if it doesn't exist.
The optional second argument can specify an alternate default."""
return environ.get(key, default)
__all__.append("getenv")
# Supply spawn*() (probably only for Unix)
if _exists("fork") and not _exists("spawnv") and _exists("execv"):
P_WAIT = 0
P_NOWAIT = P_NOWAITO = 1
# XXX Should we support P_DETACH? I suppose it could fork()**2
# and close the std I/O streams. Also, P_OVERLAY is the same
# as execv*()?
def _spawnvef(mode, file, args, env, func):
# Internal helper; func is the exec*() function to use
pid = fork()
if not pid:
# Child
try:
if env is None:
func(file, args)
else:
func(file, args, env)
except:
_exit(127)
else:
# Parent
if mode == P_NOWAIT:
return pid # Caller is responsible for waiting!
while 1:
wpid, sts = waitpid(pid, 0)
if WIFSTOPPED(sts):
continue
elif WIFSIGNALED(sts):
return -WTERMSIG(sts)
elif WIFEXITED(sts):
return WEXITSTATUS(sts)
else:
raise error, "Not stopped, signaled or exited???"
def spawnv(mode, file, args):
"""spawnv(mode, file, args) -> integer
Execute file with arguments from args in a subprocess.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
return _spawnvef(mode, file, args, None, execv)
def spawnve(mode, file, args, env):
"""spawnve(mode, file, args, env) -> integer
Execute file with arguments from args in a subprocess with the
specified environment.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
return _spawnvef(mode, file, args, env, execve)
# Note: spawnvp[e] is't currently supported on Windows
def spawnvp(mode, file, args):
"""spawnvp(mode, file, args) -> integer
Execute file (which is looked for along $PATH) with arguments from
args in a subprocess.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
return _spawnvef(mode, file, args, None, execvp)
def spawnvpe(mode, file, args, env):
"""spawnvpe(mode, file, args, env) -> integer
Execute file (which is looked for along $PATH) with arguments from
args in a subprocess with the supplied environment.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
return _spawnvef(mode, file, args, env, execvpe)
if _exists("spawnv"):
# These aren't supplied by the basic Windows code
# but can be easily implemented in Python
def spawnl(mode, file, *args):
"""spawnl(mode, file, *args) -> integer
Execute file with arguments from args in a subprocess.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
return spawnv(mode, file, args)
def spawnle(mode, file, *args):
"""spawnle(mode, file, *args, env) -> integer
Execute file with arguments from args in a subprocess with the
supplied environment.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
env = args[-1]
return spawnve(mode, file, args[:-1], env)
__all__.extend(["spawnv", "spawnve", "spawnl", "spawnle",])
if _exists("spawnvp"):
# At the moment, Windows doesn't implement spawnvp[e],
# so it won't have spawnlp[e] either.
def spawnlp(mode, file, *args):
"""spawnlp(mode, file, *args) -> integer
Execute file (which is looked for along $PATH) with arguments from
args in a subprocess with the supplied environment.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
return spawnvp(mode, file, args)
def spawnlpe(mode, file, *args):
"""spawnlpe(mode, file, *args, env) -> integer
Execute file (which is looked for along $PATH) with arguments from
args in a subprocess with the supplied environment.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
env = args[-1]
return spawnvpe(mode, file, args[:-1], env)
__all__.extend(["spawnvp", "spawnvpe", "spawnlp", "spawnlpe",])
# Supply popen2 etc. (for Unix)
if sys.platform.startswith('java') or _exists("fork"):
if not _exists("popen2"):
def popen2(cmd, mode="t", bufsize=-1):
"""Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd'
may be a sequence, in which case arguments will be passed directly to
the program without shell intervention (as with os.spawnv()). If 'cmd'
is a string it will be passed to the shell (as with os.system()). If
'bufsize' is specified, it sets the buffer size for the I/O pipes. The
file objects (child_stdin, child_stdout) are returned."""
import subprocess
PIPE = subprocess.PIPE
p = subprocess.Popen(cmd, shell=isinstance(cmd, basestring),
bufsize=bufsize, stdin=PIPE, stdout=PIPE,
close_fds=True)
return p.stdin, p.stdout
__all__.append("popen2")
if not _exists("popen3"):
def popen3(cmd, mode="t", bufsize=-1):
"""Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd'
may be a sequence, in which case arguments will be passed directly to
the program without shell intervention (as with os.spawnv()). If 'cmd'
is a string it will be passed to the shell (as with os.system()). If
'bufsize' is specified, it sets the buffer size for the I/O pipes. The
file objects (child_stdin, child_stdout, child_stderr) are returned."""
import subprocess
PIPE = subprocess.PIPE
p = subprocess.Popen(cmd, shell=isinstance(cmd, basestring),
bufsize=bufsize, stdin=PIPE, stdout=PIPE,
stderr=PIPE, close_fds=True)
return p.stdin, p.stdout, p.stderr
__all__.append("popen3")
if not _exists("popen4"):
def popen4(cmd, mode="t", bufsize=-1):
"""Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd'
may be a sequence, in which case arguments will be passed directly to
the program without shell intervention (as with os.spawnv()). If 'cmd'
is a string it will be passed to the shell (as with os.system()). If
'bufsize' is specified, it sets the buffer size for the I/O pipes. The
file objects (child_stdin, child_stdout_stderr) are returned."""
import subprocess
PIPE = subprocess.PIPE
p = subprocess.Popen(cmd, shell=isinstance(cmd, basestring),
bufsize=bufsize, stdin=PIPE, stdout=PIPE,
stderr=subprocess.STDOUT, close_fds=True)
return p.stdin, p.stdout
__all__.append("popen4")
if not _exists("urandom"):
def urandom(n):
"""urandom(n) -> str
Return a string of n random bytes suitable for cryptographic use.
"""
try:
_urandomfd = open("/dev/urandom", O_RDONLY)
except (OSError, IOError):
raise NotImplementedError("/dev/urandom (or equivalent) not found")
bytes = ""
while len(bytes) < n:
bytes += read(_urandomfd, n - len(bytes))
close(_urandomfd)
return bytes
# Supply os.popen()
def popen(cmd, mode='r', bufsize=-1):
"""popen(command [, mode='r' [, bufsize]]) -> pipe
Open a pipe to/from a command returning a file object.
"""
if not isinstance(cmd, (str, unicode)):
raise TypeError('invalid cmd type (%s, expected string)' % type(cmd))
if mode not in ('r', 'w'):
raise ValueError("invalid mode %r" % mode)
import subprocess
if mode == 'r':
proc = subprocess.Popen(cmd, bufsize=bufsize, shell=True,
stdout=subprocess.PIPE)
fp = proc.stdout
elif mode == 'w':
proc = subprocess.Popen(cmd, bufsize=bufsize, shell=True,
stdin=subprocess.PIPE)
fp = proc.stdin
# files from subprocess are in binary mode but popen needs text mode
fp = fdopen(fp.fileno(), mode, bufsize)
return _wrap_close(fp, proc)
# Helper for popen() -- a proxy for a file whose close waits for the process
class _wrap_close(object):
def __init__(self, stream, proc):
self._stream = stream
self._proc = proc
def close(self):
self._stream.close()
returncode = self._proc.wait()
if returncode == 0:
return None
if _name == 'nt':
return returncode
else:
return returncode
def __getattr__(self, name):
return getattr(self._stream, name)
def __iter__(self):
return iter(self._stream)
|
drxaero/calibre
|
refs/heads/master
|
src/calibre/ebooks/oeb/polish/tests/base.py
|
14
|
#!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import os, unittest, shutil
from calibre import CurrentDir
from calibre.ptempfile import TemporaryDirectory
from calibre.ptempfile import PersistentTemporaryDirectory
from calibre.utils.logging import DevNull
import calibre.ebooks.oeb.polish.container as pc
def get_cache():
from calibre.constants import cache_dir
cache = os.path.join(cache_dir(), 'polish-test')
if not os.path.exists(cache):
os.mkdir(cache)
return cache
def needs_recompile(obj, srcs):
if isinstance(srcs, type('')):
srcs = [srcs]
try:
obj_mtime = os.stat(obj).st_mtime
except OSError:
return True
for src in srcs:
if os.stat(src).st_mtime > obj_mtime:
return True
return False
def build_book(src, dest, args=()):
from calibre.ebooks.conversion.cli import main
main(['ebook-convert', src, dest] + list(args))
def add_resources(raw, rmap):
for placeholder, path in rmap.iteritems():
fname = os.path.basename(path)
shutil.copy2(path, '.')
raw = raw.replace(placeholder, fname)
return raw
def get_simple_book(fmt='epub'):
cache = get_cache()
ans = os.path.join(cache, 'simple.'+fmt)
src = os.path.join(os.path.dirname(__file__), 'simple.html')
if needs_recompile(ans, src):
with TemporaryDirectory('bpt') as tdir:
with CurrentDir(tdir):
raw = open(src, 'rb').read().decode('utf-8')
raw = add_resources(raw, {
'LMONOI': P('fonts/liberation/LiberationMono-Italic.ttf'),
'LMONOR': P('fonts/liberation/LiberationMono-Regular.ttf'),
'IMAGE1': I('marked.png'),
'IMAGE2': I('textures/light_wood.png'),
})
shutil.copy2(I('lt.png'), '.')
x = 'index.html'
with open(x, 'wb') as f:
f.write(raw.encode('utf-8'))
build_book(x, ans, args=[
'--level1-toc=//h:h2', '--language=en', '--authors=Kovid Goyal', '--cover=lt.png'])
return ans
def get_split_book(fmt='epub'):
cache = get_cache()
ans = os.path.join(cache, 'split.'+fmt)
src = os.path.join(os.path.dirname(__file__), 'split.html')
if needs_recompile(ans, src):
x = src.replace('split.html', 'index.html')
raw = open(src, 'rb').read().decode('utf-8')
try:
with open(x, 'wb') as f:
f.write(raw.encode('utf-8'))
build_book(x, ans, args=['--level1-toc=//h:h2', '--language=en', '--authors=Kovid Goyal',
'--cover=' + I('lt.png')])
finally:
os.remove(x)
return ans
devnull = DevNull()
class BaseTest(unittest.TestCase):
longMessage = True
maxDiff = None
def setUp(self):
pc.default_log = devnull
self.tdir = PersistentTemporaryDirectory(suffix='-polish-test')
def tearDown(self):
shutil.rmtree(self.tdir, ignore_errors=True)
del self.tdir
def check_links(self, container):
for name in container.name_path_map:
for link in container.iterlinks(name, get_line_numbers=False):
dest = container.href_to_name(link, name)
if dest:
self.assertTrue(container.exists(dest), 'The link %s in %s does not exist' % (link, name))
|
imsplitbit/nova
|
refs/heads/master
|
nova/openstack/common/rpc/service.py
|
13
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2011 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.openstack.common.gettextutils import _ # noqa
from nova.openstack.common import log as logging
from nova.openstack.common import rpc
from nova.openstack.common.rpc import dispatcher as rpc_dispatcher
from nova.openstack.common import service
LOG = logging.getLogger(__name__)
class Service(service.Service):
"""Service object for binaries running on hosts.
A service enables rpc by listening to queues based on topic and host.
"""
def __init__(self, host, topic, manager=None, serializer=None):
super(Service, self).__init__()
self.host = host
self.topic = topic
self.serializer = serializer
if manager is None:
self.manager = self
else:
self.manager = manager
def start(self):
super(Service, self).start()
self.conn = rpc.create_connection(new=True)
LOG.debug(_("Creating Consumer connection for Service %s") %
self.topic)
dispatcher = rpc_dispatcher.RpcDispatcher([self.manager],
self.serializer)
# Share this same connection for these Consumers
self.conn.create_consumer(self.topic, dispatcher, fanout=False)
node_topic = '%s.%s' % (self.topic, self.host)
self.conn.create_consumer(node_topic, dispatcher, fanout=False)
self.conn.create_consumer(self.topic, dispatcher, fanout=True)
# Hook to allow the manager to do other initializations after
# the rpc connection is created.
if callable(getattr(self.manager, 'initialize_service_hook', None)):
self.manager.initialize_service_hook(self)
# Consume from all consumers in a thread
self.conn.consume_in_thread()
def stop(self):
# Try to shut the connection down, but if we get any sort of
# errors, go ahead and ignore them.. as we're shutting down anyway
try:
self.conn.close()
except Exception:
pass
super(Service, self).stop()
|
Widiot/simpleblog
|
refs/heads/master
|
venv/lib/python3.5/site-packages/alembic/autogenerate/rewriter.py
|
38
|
from alembic import util
from alembic.operations import ops
class Rewriter(object):
"""A helper object that allows easy 'rewriting' of ops streams.
The :class:`.Rewriter` object is intended to be passed along
to the
:paramref:`.EnvironmentContext.configure.process_revision_directives`
parameter in an ``env.py`` script. Once constructed, any number
of "rewrites" functions can be associated with it, which will be given
the opportunity to modify the structure without having to have explicit
knowledge of the overall structure.
The function is passed the :class:`.MigrationContext` object and
``revision`` tuple that are passed to the :paramref:`.Environment
Context.configure.process_revision_directives` function normally,
and the third argument is an individual directive of the type
noted in the decorator. The function has the choice of returning
a single op directive, which normally can be the directive that
was actually passed, or a new directive to replace it, or a list
of zero or more directives to replace it.
.. seealso::
:ref:`autogen_rewriter` - usage example
.. versionadded:: 0.8
"""
_traverse = util.Dispatcher()
_chained = None
def __init__(self):
self.dispatch = util.Dispatcher()
def chain(self, other):
"""Produce a "chain" of this :class:`.Rewriter` to another.
This allows two rewriters to operate serially on a stream,
e.g.::
writer1 = autogenerate.Rewriter()
writer2 = autogenerate.Rewriter()
@writer1.rewrites(ops.AddColumnOp)
def add_column_nullable(context, revision, op):
op.column.nullable = True
return op
@writer2.rewrites(ops.AddColumnOp)
def add_column_idx(context, revision, op):
idx_op = ops.CreateIndexOp(
'ixc', op.table_name, [op.column.name])
return [
op,
idx_op
]
writer = writer1.chain(writer2)
:param other: a :class:`.Rewriter` instance
:return: a new :class:`.Rewriter` that will run the operations
of this writer, then the "other" writer, in succession.
"""
wr = self.__class__.__new__(self.__class__)
wr.__dict__.update(self.__dict__)
wr._chained = other
return wr
def rewrites(self, operator):
"""Register a function as rewriter for a given type.
The function should receive three arguments, which are
the :class:`.MigrationContext`, a ``revision`` tuple, and
an op directive of the type indicated. E.g.::
@writer1.rewrites(ops.AddColumnOp)
def add_column_nullable(context, revision, op):
op.column.nullable = True
return op
"""
return self.dispatch.dispatch_for(operator)
def _rewrite(self, context, revision, directive):
try:
_rewriter = self.dispatch.dispatch(directive)
except ValueError:
_rewriter = None
yield directive
else:
for r_directive in util.to_list(
_rewriter(context, revision, directive)):
yield r_directive
def __call__(self, context, revision, directives):
self.process_revision_directives(context, revision, directives)
if self._chained:
self._chained(context, revision, directives)
@_traverse.dispatch_for(ops.MigrationScript)
def _traverse_script(self, context, revision, directive):
upgrade_ops_list = []
for upgrade_ops in directive.upgrade_ops_list:
ret = self._traverse_for(context, revision, directive.upgrade_ops)
if len(ret) != 1:
raise ValueError(
"Can only return single object for UpgradeOps traverse")
upgrade_ops_list.append(ret[0])
directive.upgrade_ops = upgrade_ops_list
downgrade_ops_list = []
for downgrade_ops in directive.downgrade_ops_list:
ret = self._traverse_for(
context, revision, directive.downgrade_ops)
if len(ret) != 1:
raise ValueError(
"Can only return single object for DowngradeOps traverse")
downgrade_ops_list.append(ret[0])
directive.downgrade_ops = downgrade_ops_list
@_traverse.dispatch_for(ops.OpContainer)
def _traverse_op_container(self, context, revision, directive):
self._traverse_list(context, revision, directive.ops)
@_traverse.dispatch_for(ops.MigrateOperation)
def _traverse_any_directive(self, context, revision, directive):
pass
def _traverse_for(self, context, revision, directive):
directives = list(self._rewrite(context, revision, directive))
for directive in directives:
traverser = self._traverse.dispatch(directive)
traverser(self, context, revision, directive)
return directives
def _traverse_list(self, context, revision, directives):
dest = []
for directive in directives:
dest.extend(self._traverse_for(context, revision, directive))
directives[:] = dest
def process_revision_directives(self, context, revision, directives):
self._traverse_list(context, revision, directives)
|
SAM-IT-SA/odoo
|
refs/heads/8.0
|
addons/l10n_ca/__init__.py
|
438
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 Savoir-faire Linux (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
shubhdev/openedx
|
refs/heads/master
|
common/lib/xmodule/xmodule/partitions/tests/__init__.py
|
12133432
| |
praekelt/vumi-go
|
refs/heads/develop
|
go/conversation/templatetags/__init__.py
|
12133432
| |
mexeniz/django-oscar
|
refs/heads/master
|
tests/unit/dashboard/__init__.py
|
12133432
| |
richardcs/ansible
|
refs/heads/devel
|
lib/ansible/modules/net_tools/netbox/__init__.py
|
12133432
| |
kuba/letsencrypt
|
refs/heads/master
|
acme/acme/util.py
|
52
|
"""ACME utilities."""
import six
def map_keys(dikt, func):
"""Map dictionary keys."""
return dict((func(key), value) for key, value in six.iteritems(dikt))
|
openlabs/mongo-python-driver
|
refs/heads/master
|
distribute_setup.py
|
6
|
#!python
"""Bootstrap distribute installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from distribute_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import os
import sys
import time
import fnmatch
import tempfile
import tarfile
from distutils import log
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
try:
import subprocess
def _python_cmd(*args):
args = (sys.executable,) + args
return subprocess.call(args) == 0
except ImportError:
# will be used for python 2.3
def _python_cmd(*args):
args = (sys.executable,) + args
# quoting arguments if windows
if sys.platform == 'win32':
def quote(arg):
if ' ' in arg:
return '"%s"' % arg
return arg
args = [quote(arg) for arg in args]
return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
DEFAULT_VERSION = "0.6.25"
DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
SETUPTOOLS_FAKED_VERSION = "0.6c11"
SETUPTOOLS_PKG_INFO = """\
Metadata-Version: 1.0
Name: setuptools
Version: %s
Summary: xxxx
Home-page: xxx
Author: xxx
Author-email: xxx
License: xxx
Description: xxx
""" % SETUPTOOLS_FAKED_VERSION
def _install(tarball, install_args=()):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# installing
log.warn('Installing Distribute')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
finally:
os.chdir(old_wd)
def _build_egg(egg, tarball, to_dir):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# building an egg
log.warn('Building a Distribute egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
finally:
os.chdir(old_wd)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
tarball = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15, no_fake=True):
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
was_imported = 'pkg_resources' in sys.modules or \
'setuptools' in sys.modules
try:
try:
import pkg_resources
if not hasattr(pkg_resources, '_distribute'):
if not no_fake:
_fake_setuptools()
raise ImportError
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("distribute>="+version)
return
except pkg_resources.VersionConflict:
e = sys.exc_info()[1]
if was_imported:
sys.stderr.write(
"The required version of distribute (>=%s) is not available,\n"
"and can't be installed while this script is running. Please\n"
"install a more recent version first, using\n"
"'easy_install -U distribute'."
"\n\n(Currently using %r)\n" % (version, e.args[0]))
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return _do_download(version, download_base, to_dir,
download_delay)
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir,
download_delay)
finally:
if not no_fake:
_create_fake_setuptools_pkg_info(to_dir)
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15):
"""Download distribute from a specified location and return its filename
`version` should be a valid distribute version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
tgz_name = "distribute-%s.tar.gz" % version
url = download_base + tgz_name
saveto = os.path.join(to_dir, tgz_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
log.warn("Downloading %s", url)
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(saveto, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
return os.path.realpath(saveto)
def _no_sandbox(function):
def __no_sandbox(*args, **kw):
try:
from setuptools.sandbox import DirectorySandbox
if not hasattr(DirectorySandbox, '_old'):
def violation(*args):
pass
DirectorySandbox._old = DirectorySandbox._violation
DirectorySandbox._violation = violation
patched = True
else:
patched = False
except ImportError:
patched = False
try:
return function(*args, **kw)
finally:
if patched:
DirectorySandbox._violation = DirectorySandbox._old
del DirectorySandbox._old
return __no_sandbox
def _patch_file(path, content):
"""Will backup the file then patch it"""
existing_content = open(path).read()
if existing_content == content:
# already patched
log.warn('Already patched.')
return False
log.warn('Patching...')
_rename_path(path)
f = open(path, 'w')
try:
f.write(content)
finally:
f.close()
return True
_patch_file = _no_sandbox(_patch_file)
def _same_content(path, content):
return open(path).read() == content
def _rename_path(path):
new_name = path + '.OLD.%s' % time.time()
log.warn('Renaming %s into %s', path, new_name)
os.rename(path, new_name)
return new_name
def _remove_flat_installation(placeholder):
if not os.path.isdir(placeholder):
log.warn('Unkown installation at %s', placeholder)
return False
found = False
for file in os.listdir(placeholder):
if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
found = True
break
if not found:
log.warn('Could not locate setuptools*.egg-info')
return
log.warn('Removing elements out of the way...')
pkg_info = os.path.join(placeholder, file)
if os.path.isdir(pkg_info):
patched = _patch_egg_dir(pkg_info)
else:
patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
if not patched:
log.warn('%s already patched.', pkg_info)
return False
# now let's move the files out of the way
for element in ('setuptools', 'pkg_resources.py', 'site.py'):
element = os.path.join(placeholder, element)
if os.path.exists(element):
_rename_path(element)
else:
log.warn('Could not find the %s element of the '
'Setuptools distribution', element)
return True
_remove_flat_installation = _no_sandbox(_remove_flat_installation)
def _after_install(dist):
log.warn('After install bootstrap.')
placeholder = dist.get_command_obj('install').install_purelib
_create_fake_setuptools_pkg_info(placeholder)
def _create_fake_setuptools_pkg_info(placeholder):
if not placeholder or not os.path.exists(placeholder):
log.warn('Could not find the install location')
return
pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
setuptools_file = 'setuptools-%s-py%s.egg-info' % \
(SETUPTOOLS_FAKED_VERSION, pyver)
pkg_info = os.path.join(placeholder, setuptools_file)
if os.path.exists(pkg_info):
log.warn('%s already exists', pkg_info)
return
log.warn('Creating %s', pkg_info)
f = open(pkg_info, 'w')
try:
f.write(SETUPTOOLS_PKG_INFO)
finally:
f.close()
pth_file = os.path.join(placeholder, 'setuptools.pth')
log.warn('Creating %s', pth_file)
f = open(pth_file, 'w')
try:
f.write(os.path.join(os.curdir, setuptools_file))
finally:
f.close()
_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info)
def _patch_egg_dir(path):
# let's check if it's already patched
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
if os.path.exists(pkg_info):
if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
log.warn('%s already patched.', pkg_info)
return False
_rename_path(path)
os.mkdir(path)
os.mkdir(os.path.join(path, 'EGG-INFO'))
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
f = open(pkg_info, 'w')
try:
f.write(SETUPTOOLS_PKG_INFO)
finally:
f.close()
return True
_patch_egg_dir = _no_sandbox(_patch_egg_dir)
def _before_install():
log.warn('Before install bootstrap.')
_fake_setuptools()
def _under_prefix(location):
if 'install' not in sys.argv:
return True
args = sys.argv[sys.argv.index('install')+1:]
for index, arg in enumerate(args):
for option in ('--root', '--prefix'):
if arg.startswith('%s=' % option):
top_dir = arg.split('root=')[-1]
return location.startswith(top_dir)
elif arg == option:
if len(args) > index:
top_dir = args[index+1]
return location.startswith(top_dir)
if arg == '--user' and USER_SITE is not None:
return location.startswith(USER_SITE)
return True
def _fake_setuptools():
log.warn('Scanning installed packages')
try:
import pkg_resources
except ImportError:
# we're cool
log.warn('Setuptools or Distribute does not seem to be installed.')
return
ws = pkg_resources.working_set
try:
setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools',
replacement=False))
except TypeError:
# old distribute API
setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools'))
if setuptools_dist is None:
log.warn('No setuptools distribution found')
return
# detecting if it was already faked
setuptools_location = setuptools_dist.location
log.warn('Setuptools installation detected at %s', setuptools_location)
# if --root or --preix was provided, and if
# setuptools is not located in them, we don't patch it
if not _under_prefix(setuptools_location):
log.warn('Not patching, --root or --prefix is installing Distribute'
' in another location')
return
# let's see if its an egg
if not setuptools_location.endswith('.egg'):
log.warn('Non-egg installation')
res = _remove_flat_installation(setuptools_location)
if not res:
return
else:
log.warn('Egg installation')
pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
if (os.path.exists(pkg_info) and
_same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
log.warn('Already patched.')
return
log.warn('Patching...')
# let's create a fake egg replacing setuptools one
res = _patch_egg_dir(setuptools_location)
if not res:
return
log.warn('Patched done.')
_relaunch()
def _relaunch():
log.warn('Relaunching...')
# we have to relaunch the process
# pip marker to avoid a relaunch bug
if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']:
sys.argv[0] = 'setup.py'
args = [sys.executable] + sys.argv
sys.exit(subprocess.call(args))
def _extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
import copy
import operator
from tarfile import ExtractError
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 448 # decimal for oct 0700
self.extract(tarinfo, path)
# Reverse sort directories.
if sys.version_info < (2, 4):
def sorter(dir1, dir2):
return cmp(dir1.name, dir2.name)
directories.sort(sorter)
directories.reverse()
else:
directories.sort(key=operator.attrgetter('name'), reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError:
e = sys.exc_info()[1]
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def _build_install_args(argv):
install_args = []
user_install = '--user' in argv
if user_install and sys.version_info < (2,6):
log.warn("--user requires Python 2.6 or later")
raise SystemExit(1)
if user_install:
install_args.append('--user')
return install_args
def main(argv, version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
tarball = download_setuptools()
_install(tarball, _build_install_args(argv))
if __name__ == '__main__':
main(sys.argv[1:])
|
samtx/whatsmyrankine
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/langthaimodel.py
|
2929
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# The following result for thai was collected from a limited sample (1M).
# Character Mapping Table:
TIS620CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40
188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50
253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60
96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70
209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,
49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,
45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,
22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,
68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 92.6386%
# first 1024 sequences:7.3177%
# rest sequences: 1.0230%
# negative sequences: 0.0436%
ThaiLangModel = (
0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)
TIS620ThaiModel = {
'charToOrderMap': TIS620CharToOrderMap,
'precedenceMatrix': ThaiLangModel,
'mTypicalPositiveRatio': 0.926386,
'keepEnglishLetter': False,
'charsetName': "TIS-620"
}
# flake8: noqa
|
viki9698/jizhanggroup
|
refs/heads/master
|
django/contrib/webdesign/lorem_ipsum.py
|
230
|
"""
Utility functions for generating "lorem ipsum" Latin text.
"""
from __future__ import unicode_literals
import random
COMMON_P = 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'
WORDS = ('exercitationem', 'perferendis', 'perspiciatis', 'laborum', 'eveniet',
'sunt', 'iure', 'nam', 'nobis', 'eum', 'cum', 'officiis', 'excepturi',
'odio', 'consectetur', 'quasi', 'aut', 'quisquam', 'vel', 'eligendi',
'itaque', 'non', 'odit', 'tempore', 'quaerat', 'dignissimos',
'facilis', 'neque', 'nihil', 'expedita', 'vitae', 'vero', 'ipsum',
'nisi', 'animi', 'cumque', 'pariatur', 'velit', 'modi', 'natus',
'iusto', 'eaque', 'sequi', 'illo', 'sed', 'ex', 'et', 'voluptatibus',
'tempora', 'veritatis', 'ratione', 'assumenda', 'incidunt', 'nostrum',
'placeat', 'aliquid', 'fuga', 'provident', 'praesentium', 'rem',
'necessitatibus', 'suscipit', 'adipisci', 'quidem', 'possimus',
'voluptas', 'debitis', 'sint', 'accusantium', 'unde', 'sapiente',
'voluptate', 'qui', 'aspernatur', 'laudantium', 'soluta', 'amet',
'quo', 'aliquam', 'saepe', 'culpa', 'libero', 'ipsa', 'dicta',
'reiciendis', 'nesciunt', 'doloribus', 'autem', 'impedit', 'minima',
'maiores', 'repudiandae', 'ipsam', 'obcaecati', 'ullam', 'enim',
'totam', 'delectus', 'ducimus', 'quis', 'voluptates', 'dolores',
'molestiae', 'harum', 'dolorem', 'quia', 'voluptatem', 'molestias',
'magni', 'distinctio', 'omnis', 'illum', 'dolorum', 'voluptatum', 'ea',
'quas', 'quam', 'corporis', 'quae', 'blanditiis', 'atque', 'deserunt',
'laboriosam', 'earum', 'consequuntur', 'hic', 'cupiditate',
'quibusdam', 'accusamus', 'ut', 'rerum', 'error', 'minus', 'eius',
'ab', 'ad', 'nemo', 'fugit', 'officia', 'at', 'in', 'id', 'quos',
'reprehenderit', 'numquam', 'iste', 'fugiat', 'sit', 'inventore',
'beatae', 'repellendus', 'magnam', 'recusandae', 'quod', 'explicabo',
'doloremque', 'aperiam', 'consequatur', 'asperiores', 'commodi',
'optio', 'dolor', 'labore', 'temporibus', 'repellat', 'veniam',
'architecto', 'est', 'esse', 'mollitia', 'nulla', 'a', 'similique',
'eos', 'alias', 'dolore', 'tenetur', 'deleniti', 'porro', 'facere',
'maxime', 'corrupti')
COMMON_WORDS = ('lorem', 'ipsum', 'dolor', 'sit', 'amet', 'consectetur',
'adipisicing', 'elit', 'sed', 'do', 'eiusmod', 'tempor', 'incididunt',
'ut', 'labore', 'et', 'dolore', 'magna', 'aliqua')
def sentence():
"""
Returns a randomly generated sentence of lorem ipsum text.
The first word is capitalized, and the sentence ends in either a period or
question mark. Commas are added at random.
"""
# Determine the number of comma-separated sections and number of words in
# each section for this sentence.
sections = [' '.join(random.sample(WORDS, random.randint(3, 12))) for i in range(random.randint(1, 5))]
s = ', '.join(sections)
# Convert to sentence case and add end punctuation.
return '%s%s%s' % (s[0].upper(), s[1:], random.choice('?.'))
def paragraph():
"""
Returns a randomly generated paragraph of lorem ipsum text.
The paragraph consists of between 1 and 4 sentences, inclusive.
"""
return ' '.join([sentence() for i in range(random.randint(1, 4))])
def paragraphs(count, common=True):
"""
Returns a list of paragraphs as returned by paragraph().
If `common` is True, then the first paragraph will be the standard
'lorem ipsum' paragraph. Otherwise, the first paragraph will be random
Latin text. Either way, subsequent paragraphs will be random Latin text.
"""
paras = []
for i in range(count):
if common and i == 0:
paras.append(COMMON_P)
else:
paras.append(paragraph())
return paras
def words(count, common=True):
"""
Returns a string of `count` lorem ipsum words separated by a single space.
If `common` is True, then the first 19 words will be the standard
'lorem ipsum' words. Otherwise, all words will be selected randomly.
"""
if common:
word_list = list(COMMON_WORDS)
else:
word_list = []
c = len(word_list)
if count > c:
count -= c
while count > 0:
c = min(count, len(WORDS))
count -= c
word_list += random.sample(WORDS, c)
else:
word_list = word_list[:count]
return ' '.join(word_list)
|
venn177/heroesoflegend.py
|
refs/heads/master
|
setup.py
|
1
|
from distutils.core import setup
setup(
name = 'heroesoflegend.py',
packages = ['heroesoflegend.py'], # this must be the same as the name above
version = '0.1',
description = 'Converting Heroes of Legend to a python package',
author = 'venn177',
author_email = 'venn177@gmail.com',
url = 'https://github.com/venn177/heroesoflegend.py', # use the URL to the github repo
download_url = 'https://github.com/peterldowns/mypackage/tarball/0.1', # I'll explain this in a second
keywords = ['testing', 'dnd'], # arbitrary keywords
classifiers = [],
)
|
Fsero/security_monkey
|
refs/heads/master
|
security_monkey/views/account.py
|
6
|
# Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from security_monkey.views import AuthenticatedService
from security_monkey.views import __check_auth__
from security_monkey.views import ACCOUNT_FIELDS
from security_monkey.datastore import Account
from security_monkey.datastore import User
from security_monkey import db
from flask.ext.restful import marshal, reqparse
class AccountGetPutDelete(AuthenticatedService):
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(AccountGetPutDelete, self).__init__()
def get(self, account_id):
"""
.. http:get:: /api/1/account/<int:id>
Get a list of Accounts matching the given criteria
**Example Request**:
.. sourcecode:: http
GET /api/1/account/1 HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
third_party: false,
name: "example_name",
notes: null,
role_name: null,
number: "111111111111",
active: true,
id: 1,
s3_name: "example_name",
auth: {
authenticated: true,
user: "user@example.com"
}
}
:statuscode 200: no error
:statuscode 401: Authentication failure. Please login.
"""
auth, retval = __check_auth__(self.auth_dict)
if auth:
return retval
result = Account.query.filter(Account.id == account_id).first()
account_marshaled = marshal(result.__dict__, ACCOUNT_FIELDS)
account_marshaled['auth'] = self.auth_dict
return account_marshaled, 200
def put(self, account_id):
"""
.. http:put:: /api/1/account/1
Edit an existing account.
**Example Request**:
.. sourcecode:: http
PUT /api/1/account/1 HTTP/1.1
Host: example.com
Accept: application/json
{
'name': 'edited_account'
's3_name': 'edited_account',
'number': '0123456789',
'notes': 'this account is for ...',
'role_name': 'CustomRole',
'active': true,
'third_party': false
}
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
'name': 'edited_account'
's3_name': 'edited_account',
'number': '0123456789',
'notes': 'this account is for ...',
'role_name': 'CustomRole',
'active': true,
'third_party': false
}
:statuscode 200: no error
:statuscode 401: Authentication Error. Please Login.
"""
auth, retval = __check_auth__(self.auth_dict)
if auth:
return retval
self.reqparse.add_argument('name', required=False, type=unicode, help='Must provide account name', location='json')
self.reqparse.add_argument('s3_name', required=False, type=unicode, help='Will use name if s3_name not provided.', location='json')
self.reqparse.add_argument('number', required=False, type=unicode, help='Add the account number if available.', location='json')
self.reqparse.add_argument('notes', required=False, type=unicode, help='Add context.', location='json')
self.reqparse.add_argument('role_name', required=False, type=unicode, help='Custom role name.', location='json')
self.reqparse.add_argument('active', required=False, type=bool, help='Determines whether this account should be interrogated by security monkey.', location='json')
self.reqparse.add_argument('third_party', required=False, type=bool, help='Determines whether this account is a known friendly third party account.', location='json')
args = self.reqparse.parse_args()
account = Account.query.filter(Account.id == account_id).first()
if not account:
return {'status': 'error. Account ID not found.'}, 404
account.name = args['name']
account.s3_name = args['s3_name']
account.number = args['number']
account.notes = args['notes']
account.role_name = args['role_name']
account.active = args['active']
account.third_party = args['third_party']
db.session.add(account)
db.session.commit()
db.session.refresh(account)
marshaled_account = marshal(account.__dict__, ACCOUNT_FIELDS)
marshaled_account['auth'] = self.auth_dict
return marshaled_account, 200
def delete(self, account_id):
"""
.. http:delete:: /api/1/account/1
Delete an account.
**Example Request**:
.. sourcecode:: http
DELETE /api/1/account/1 HTTP/1.1
Host: example.com
Accept: application/json
**Example Response**:
.. sourcecode:: http
HTTP/1.1 202 Accepted
Vary: Accept
Content-Type: application/json
{
'status': 'deleted'
}
:statuscode 202: accepted
:statuscode 401: Authentication Error. Please Login.
"""
auth, retval = __check_auth__(self.auth_dict)
if auth:
return retval
# Need to unsubscribe any users first:
users = User.query.filter(User.accounts.any(Account.id == account_id)).all()
for user in users:
user.accounts = [account for account in user.accounts if not account.id == account_id]
db.session.add(user)
db.session.commit()
account = Account.query.filter(Account.id == account_id).first()
db.session.delete(account)
db.session.commit()
return {'status': 'deleted'}, 202
class AccountPostList(AuthenticatedService):
def __init__(self):
super(AccountPostList, self).__init__()
self.reqparse = reqparse.RequestParser()
def post(self):
"""
.. http:post:: /api/1/account/
Create a new account.
**Example Request**:
.. sourcecode:: http
POST /api/1/account/ HTTP/1.1
Host: example.com
Accept: application/json
{
'name': 'new_account'
's3_name': 'new_account',
'number': '0123456789',
'notes': 'this account is for ...',
'role_name': 'CustomRole',
'active': true,
'third_party': false
}
**Example Response**:
.. sourcecode:: http
HTTP/1.1 201 Created
Vary: Accept
Content-Type: application/json
{
'name': 'new_account'
's3_name': 'new_account',
'number': '0123456789',
'notes': 'this account is for ...',
'role_name': 'CustomRole',
'active': true,
'third_party': false
}
:statuscode 201: created
:statuscode 401: Authentication Error. Please Login.
"""
auth, retval = __check_auth__(self.auth_dict)
if auth:
return retval
self.reqparse.add_argument('name', required=True, type=unicode, help='Must provide account name', location='json')
self.reqparse.add_argument('s3_name', required=False, type=unicode, help='Will use name if s3_name not provided.', location='json')
self.reqparse.add_argument('number', required=False, type=unicode, help='Add the account number if available.', location='json')
self.reqparse.add_argument('notes', required=False, type=unicode, help='Add context.', location='json')
self.reqparse.add_argument('role_name', required=False, type=unicode, help='Custom role name.', location='json')
self.reqparse.add_argument('active', required=False, type=bool, help='Determines whether this account should be interrogated by security monkey.', location='json')
self.reqparse.add_argument('third_party', required=False, type=bool, help='Determines whether this account is a known friendly third party account.', location='json')
args = self.reqparse.parse_args()
account = Account()
account.name = args['name']
account.s3_name = args.get('s3_name', args['name'])
account.number = args['number']
account.notes = args['notes']
account.active = args['active']
account.third_party = args['third_party']
db.session.add(account)
db.session.commit()
db.session.refresh(account)
marshaled_account = marshal(account.__dict__, ACCOUNT_FIELDS)
marshaled_account['auth'] = self.auth_dict
return marshaled_account, 201
def get(self):
"""
.. http:get:: /api/1/accounts
Get a list of Accounts matching the given criteria
**Example Request**:
.. sourcecode:: http
GET /api/1/accounts HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
count: 1,
items: [
{
third_party: false,
name: "example_name",
notes: null,
role_name: null,
number: "111111111111",
active: true,
id: 1,
s3_name: "example_name"
},
],
total: 1,
page: 1,
auth: {
authenticated: true,
user: "user@example.com"
}
}
:statuscode 200: no error
:statuscode 401: Authentication failure. Please login.
"""
auth, retval = __check_auth__(self.auth_dict)
if auth:
return retval
self.reqparse.add_argument('count', type=int, default=30, location='args')
self.reqparse.add_argument('page', type=int, default=1, location='args')
args = self.reqparse.parse_args()
page = args.pop('page', None)
count = args.pop('count', None)
result = Account.query.order_by(Account.id).paginate(page, count, error_out=False)
items = []
for account in result.items:
account_marshaled = marshal(account.__dict__, ACCOUNT_FIELDS)
items.append(account_marshaled)
marshaled_dict = {
'total': result.total,
'count': len(items),
'page': result.page,
'items': items,
'auth': self.auth_dict
}
return marshaled_dict, 200
|
ysekky/chainer
|
refs/heads/master
|
tests/chainer_tests/functions_tests/array_tests/test_dstack.py
|
6
|
import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.utils import type_check
@testing.parameterize(*testing.product_dict(
[
{'shape': (2, 3, 4), 'y_shape': (2, 3, 8), 'xs_length': 2},
{'shape': (3, 4), 'y_shape': (3, 4, 2), 'xs_length': 2},
{'shape': (3), 'y_shape': (1, 3, 2), 'xs_length': 2},
{'shape': (), 'y_shape': (1, 1, 2), 'xs_length': 2},
{'shape': (3, 4), 'y_shape': (3, 4, 1), 'xs_length': 1},
{'shape': (3), 'y_shape': (1, 3, 1), 'xs_length': 1},
{'shape': (), 'y_shape': (1, 1, 1), 'xs_length': 1},
],
[
{'dtype': numpy.float16},
{'dtype': numpy.float32},
{'dtype': numpy.float64},
]
))
class TestDstack(unittest.TestCase):
def setUp(self):
self.xs = [
numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
for i in six.moves.range(self.xs_length)
]
self.g = numpy.random.uniform(-1, 1, self.y_shape).astype(self.dtype)
def check_forward(self, xs_data):
xs = [chainer.Variable(x) for x in xs_data]
y = functions.dstack(xs)
expect = numpy.dstack(self.xs)
testing.assert_allclose(y.data, expect)
def test_forward_cpu(self):
self.check_forward(self.xs)
@attr.gpu
def test_forward_gpu(self):
self.check_forward([cuda.to_gpu(x) for x in self.xs])
def check_backward(self, xs_data, g_data):
def func(*xs):
return functions.dstack(xs)
gradient_check.check_backward(
func, xs_data, g_data, eps=2.0 ** -2, atol=1e-3, rtol=1e-3)
def test_backward_cpu(self):
self.check_backward(self.xs, self.g)
@attr.gpu
def test_backward_gpu(self):
self.check_backward(
[cuda.to_gpu(x) for x in self.xs], cuda.to_gpu(self.g))
@testing.parameterize(
{'a_shape': (2, 3, 4, 6), 'b_shape': (2, 3, 4, 5), 'valid': False},
{'a_shape': (2, 3, 5, 6), 'b_shape': (2, 3, 4, 6), 'valid': True},
{'a_shape': (2, 4, 5), 'b_shape': (3, 4, 5), 'valid': False},
{'a_shape': (3, 4, 6), 'b_shape': (3, 4, 5), 'valid': True},
{'a_shape': (3, 6, 5), 'b_shape': (3, 4, 5), 'valid': False},
{'a_shape': (3, 4), 'b_shape': (4, 4), 'valid': False},
{'a_shape': (3, 4), 'b_shape': (3, 3), 'valid': False},
{'a_shape': (3,), 'b_shape': (4,), 'valid': False},
{'a_shape': (3), 'b_shape': (3, 3), 'valid': False},
{'a_shape': (), 'b_shape': (1), 'valid': False},
)
class TestDstackTypeCheck(unittest.TestCase):
def setUp(self):
self.xs = [
numpy.random.uniform(-1, 1, self.a_shape).astype(numpy.float32),
numpy.random.uniform(-1, 1, self.b_shape).astype(numpy.float32),
]
def check_value_check(self):
if self.valid:
# Check if it throws nothing
functions.dstack(self.xs)
else:
with self.assertRaises(type_check.InvalidType):
functions.dstack(self.xs)
def test_value_check_cpu(self):
self.check_value_check()
@attr.gpu
def test_value_check_gpu(self):
self.check_value_check()
testing.run_module(__name__, __file__)
|
au9ustine/org.au9ustine.puzzles.codility
|
refs/heads/master
|
test/__init__.py
|
75
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
|
dluschan/school
|
refs/heads/master
|
olymp/divisible.py
|
1
|
digits = [0 for i in range(10)]
for c in input():
digits[int(c)] += 1
sum = 0
for i in range(10):
sum += i * digits[i]
if sum % 3 != 0:
for i in range(10):
if digits[i] > 0 and (sum - i) % 3 == 0:
digits[i] -= 1
sum -= i
break
for step in range(2):
if sum % 3 != 0:
for i in range(10):
if digits[i] > 0 and i % 3 != 0:
digits[i] -= 1
sum -= i
break
for i in range(9, -1, -1):
print(str(i) * digits[i], end='')
print()
|
lulandco/SickRage
|
refs/heads/develop
|
lib/js2py/utils/__init__.py
|
12133432
| |
sudheerchintala/LearnEraPlatForm
|
refs/heads/master
|
common/test/acceptance/pages/__init__.py
|
12133432
| |
yencarnacion/jaikuengine
|
refs/heads/master
|
.google_appengine/lib/django-1.5/tests/modeltests/serializers/tests.py
|
51
|
from __future__ import absolute_import, unicode_literals
# -*- coding: utf-8 -*-
import json
from datetime import datetime
from xml.dom import minidom
from django.conf import settings
from django.core import serializers
from django.db import transaction, connection
from django.test import TestCase, TransactionTestCase, Approximate
from django.utils import six
from django.utils.six import StringIO
from django.utils import unittest
from .models import (Category, Author, Article, AuthorProfile, Actor, Movie,
Score, Player, Team)
class SerializerRegistrationTests(unittest.TestCase):
def setUp(self):
self.old_SERIALIZATION_MODULES = getattr(settings, 'SERIALIZATION_MODULES', None)
self.old_serializers = serializers._serializers
serializers._serializers = {}
settings.SERIALIZATION_MODULES = {
"json2" : "django.core.serializers.json",
}
def tearDown(self):
serializers._serializers = self.old_serializers
if self.old_SERIALIZATION_MODULES:
settings.SERIALIZATION_MODULES = self.old_SERIALIZATION_MODULES
else:
delattr(settings, 'SERIALIZATION_MODULES')
def test_register(self):
"Registering a new serializer populates the full registry. Refs #14823"
serializers.register_serializer('json3', 'django.core.serializers.json')
public_formats = serializers.get_public_serializer_formats()
self.assertIn('json3', public_formats)
self.assertIn('json2', public_formats)
self.assertIn('xml', public_formats)
def test_unregister(self):
"Unregistering a serializer doesn't cause the registry to be repopulated. Refs #14823"
serializers.unregister_serializer('xml')
serializers.register_serializer('json3', 'django.core.serializers.json')
public_formats = serializers.get_public_serializer_formats()
self.assertNotIn('xml', public_formats)
self.assertIn('json3', public_formats)
def test_builtin_serializers(self):
"Requesting a list of serializer formats popuates the registry"
all_formats = set(serializers.get_serializer_formats())
public_formats = set(serializers.get_public_serializer_formats())
self.assertIn('xml', all_formats),
self.assertIn('xml', public_formats)
self.assertIn('json2', all_formats)
self.assertIn('json2', public_formats)
self.assertIn('python', all_formats)
self.assertNotIn('python', public_formats)
class SerializersTestBase(object):
@staticmethod
def _comparison_value(value):
return value
def setUp(self):
sports = Category.objects.create(name="Sports")
music = Category.objects.create(name="Music")
op_ed = Category.objects.create(name="Op-Ed")
self.joe = Author.objects.create(name="Joe")
self.jane = Author.objects.create(name="Jane")
self.a1 = Article(
author=self.jane,
headline="Poker has no place on ESPN",
pub_date=datetime(2006, 6, 16, 11, 00)
)
self.a1.save()
self.a1.categories = [sports, op_ed]
self.a2 = Article(
author=self.joe,
headline="Time to reform copyright",
pub_date=datetime(2006, 6, 16, 13, 00, 11, 345)
)
self.a2.save()
self.a2.categories = [music, op_ed]
def test_serialize(self):
"""Tests that basic serialization works."""
serial_str = serializers.serialize(self.serializer_name,
Article.objects.all())
self.assertTrue(self._validate_output(serial_str))
def test_serializer_roundtrip(self):
"""Tests that serialized content can be deserialized."""
serial_str = serializers.serialize(self.serializer_name,
Article.objects.all())
models = list(serializers.deserialize(self.serializer_name, serial_str))
self.assertEqual(len(models), 2)
def test_altering_serialized_output(self):
"""
Tests the ability to create new objects by
modifying serialized content.
"""
old_headline = "Poker has no place on ESPN"
new_headline = "Poker has no place on television"
serial_str = serializers.serialize(self.serializer_name,
Article.objects.all())
serial_str = serial_str.replace(old_headline, new_headline)
models = list(serializers.deserialize(self.serializer_name, serial_str))
# Prior to saving, old headline is in place
self.assertTrue(Article.objects.filter(headline=old_headline))
self.assertFalse(Article.objects.filter(headline=new_headline))
for model in models:
model.save()
# After saving, new headline is in place
self.assertTrue(Article.objects.filter(headline=new_headline))
self.assertFalse(Article.objects.filter(headline=old_headline))
def test_one_to_one_as_pk(self):
"""
Tests that if you use your own primary key field
(such as a OneToOneField), it doesn't appear in the
serialized field list - it replaces the pk identifier.
"""
profile = AuthorProfile(author=self.joe,
date_of_birth=datetime(1970,1,1))
profile.save()
serial_str = serializers.serialize(self.serializer_name,
AuthorProfile.objects.all())
self.assertFalse(self._get_field_values(serial_str, 'author'))
for obj in serializers.deserialize(self.serializer_name, serial_str):
self.assertEqual(obj.object.pk, self._comparison_value(self.joe.pk))
def test_serialize_field_subset(self):
"""Tests that output can be restricted to a subset of fields"""
valid_fields = ('headline','pub_date')
invalid_fields = ("author", "categories")
serial_str = serializers.serialize(self.serializer_name,
Article.objects.all(),
fields=valid_fields)
for field_name in invalid_fields:
self.assertFalse(self._get_field_values(serial_str, field_name))
for field_name in valid_fields:
self.assertTrue(self._get_field_values(serial_str, field_name))
def test_serialize_unicode(self):
"""Tests that unicode makes the roundtrip intact"""
actor_name = "Za\u017c\u00f3\u0142\u0107"
movie_title = 'G\u0119\u015bl\u0105 ja\u017a\u0144'
ac = Actor(name=actor_name)
mv = Movie(title=movie_title, actor=ac)
ac.save()
mv.save()
serial_str = serializers.serialize(self.serializer_name, [mv])
self.assertEqual(self._get_field_values(serial_str, "title")[0], movie_title)
self.assertEqual(self._get_field_values(serial_str, "actor")[0], actor_name)
obj_list = list(serializers.deserialize(self.serializer_name, serial_str))
mv_obj = obj_list[0].object
self.assertEqual(mv_obj.title, movie_title)
def test_serialize_superfluous_queries(self):
"""Ensure no superfluous queries are made when serializing ForeignKeys
#17602
"""
ac = Actor(name='Actor name')
ac.save()
mv = Movie(title='Movie title', actor_id=ac.pk)
mv.save()
with self.assertNumQueries(0):
serial_str = serializers.serialize(self.serializer_name, [mv])
def test_serialize_with_null_pk(self):
"""
Tests that serialized data with no primary key results
in a model instance with no id
"""
category = Category(name="Reference")
serial_str = serializers.serialize(self.serializer_name, [category])
pk_value = self._get_pk_values(serial_str)[0]
self.assertFalse(pk_value)
cat_obj = list(serializers.deserialize(self.serializer_name,
serial_str))[0].object
self.assertEqual(cat_obj.id, None)
def test_float_serialization(self):
"""Tests that float values serialize and deserialize intact"""
sc = Score(score=3.4)
sc.save()
serial_str = serializers.serialize(self.serializer_name, [sc])
deserial_objs = list(serializers.deserialize(self.serializer_name,
serial_str))
self.assertEqual(deserial_objs[0].object.score, Approximate(3.4, places=1))
def test_custom_field_serialization(self):
"""Tests that custom fields serialize and deserialize intact"""
team_str = "Spartak Moskva"
player = Player()
player.name = "Soslan Djanaev"
player.rank = 1
player.team = Team(team_str)
player.save()
serial_str = serializers.serialize(self.serializer_name,
Player.objects.all())
team = self._get_field_values(serial_str, "team")
self.assertTrue(team)
self.assertEqual(team[0], team_str)
deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str))
self.assertEqual(deserial_objs[0].object.team.to_string(),
player.team.to_string())
def test_pre_1000ad_date(self):
"""Tests that year values before 1000AD are properly formatted"""
# Regression for #12524 -- dates before 1000AD get prefixed
# 0's on the year
a = Article.objects.create(
author = self.jane,
headline = "Nobody remembers the early years",
pub_date = datetime(1, 2, 3, 4, 5, 6))
serial_str = serializers.serialize(self.serializer_name, [a])
date_values = self._get_field_values(serial_str, "pub_date")
self.assertEqual(date_values[0].replace('T', ' '), "0001-02-03 04:05:06")
def test_pkless_serialized_strings(self):
"""
Tests that serialized strings without PKs
can be turned into models
"""
deserial_objs = list(serializers.deserialize(self.serializer_name,
self.pkless_str))
for obj in deserial_objs:
self.assertFalse(obj.object.id)
obj.save()
self.assertEqual(Category.objects.all().count(), 4)
class SerializersTransactionTestBase(object):
def test_forward_refs(self):
"""
Tests that objects ids can be referenced before they are
defined in the serialization data.
"""
# The deserialization process needs to be contained
# within a transaction in order to test forward reference
# handling.
transaction.enter_transaction_management()
transaction.managed(True)
objs = serializers.deserialize(self.serializer_name, self.fwd_ref_str)
with connection.constraint_checks_disabled():
for obj in objs:
obj.save()
transaction.commit()
transaction.leave_transaction_management()
for model_cls in (Category, Author, Article):
self.assertEqual(model_cls.objects.all().count(), 1)
art_obj = Article.objects.all()[0]
self.assertEqual(art_obj.categories.all().count(), 1)
self.assertEqual(art_obj.author.name, "Agnes")
class XmlSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "xml"
pkless_str = """<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object model="serializers.category">
<field type="CharField" name="name">Reference</field>
</object>
</django-objects>"""
@staticmethod
def _comparison_value(value):
# The XML serializer handles everything as strings, so comparisons
# need to be performed on the stringified value
return six.text_type(value)
@staticmethod
def _validate_output(serial_str):
try:
minidom.parseString(serial_str)
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
ret_list = []
dom = minidom.parseString(serial_str)
fields = dom.getElementsByTagName("object")
for field in fields:
ret_list.append(field.getAttribute("pk"))
return ret_list
@staticmethod
def _get_field_values(serial_str, field_name):
ret_list = []
dom = minidom.parseString(serial_str)
fields = dom.getElementsByTagName("field")
for field in fields:
if field.getAttribute("name") == field_name:
temp = []
for child in field.childNodes:
temp.append(child.nodeValue)
ret_list.append("".join(temp))
return ret_list
class XmlSerializerTransactionTestCase(SerializersTransactionTestBase, TransactionTestCase):
serializer_name = "xml"
fwd_ref_str = """<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="1" model="serializers.article">
<field to="serializers.author" name="author" rel="ManyToOneRel">1</field>
<field type="CharField" name="headline">Forward references pose no problem</field>
<field type="DateTimeField" name="pub_date">2006-06-16T15:00:00</field>
<field to="serializers.category" name="categories" rel="ManyToManyRel">
<object pk="1"></object>
</field>
</object>
<object pk="1" model="serializers.author">
<field type="CharField" name="name">Agnes</field>
</object>
<object pk="1" model="serializers.category">
<field type="CharField" name="name">Reference</field></object>
</django-objects>"""
class JsonSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "json"
pkless_str = """[{"pk": null, "model": "serializers.category", "fields": {"name": "Reference"}}]"""
@staticmethod
def _validate_output(serial_str):
try:
json.loads(serial_str)
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
ret_list = []
serial_list = json.loads(serial_str)
for obj_dict in serial_list:
ret_list.append(obj_dict["pk"])
return ret_list
@staticmethod
def _get_field_values(serial_str, field_name):
ret_list = []
serial_list = json.loads(serial_str)
for obj_dict in serial_list:
if field_name in obj_dict["fields"]:
ret_list.append(obj_dict["fields"][field_name])
return ret_list
class JsonSerializerTransactionTestCase(SerializersTransactionTestBase, TransactionTestCase):
serializer_name = "json"
fwd_ref_str = """[
{
"pk": 1,
"model": "serializers.article",
"fields": {
"headline": "Forward references pose no problem",
"pub_date": "2006-06-16T15:00:00",
"categories": [1],
"author": 1
}
},
{
"pk": 1,
"model": "serializers.category",
"fields": {
"name": "Reference"
}
},
{
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}]"""
try:
import yaml
except ImportError:
pass
else:
class YamlSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "yaml"
fwd_ref_str = """- fields:
headline: Forward references pose no problem
pub_date: 2006-06-16 15:00:00
categories: [1]
author: 1
pk: 1
model: serializers.article
- fields:
name: Reference
pk: 1
model: serializers.category
- fields:
name: Agnes
pk: 1
model: serializers.author"""
pkless_str = """- fields:
name: Reference
pk: null
model: serializers.category"""
@staticmethod
def _validate_output(serial_str):
try:
yaml.safe_load(StringIO(serial_str))
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
ret_list = []
stream = StringIO(serial_str)
for obj_dict in yaml.safe_load(stream):
ret_list.append(obj_dict["pk"])
return ret_list
@staticmethod
def _get_field_values(serial_str, field_name):
ret_list = []
stream = StringIO(serial_str)
for obj_dict in yaml.safe_load(stream):
if "fields" in obj_dict and field_name in obj_dict["fields"]:
field_value = obj_dict["fields"][field_name]
# yaml.safe_load will return non-string objects for some
# of the fields we are interested in, this ensures that
# everything comes back as a string
if isinstance(field_value, six.string_types):
ret_list.append(field_value)
else:
ret_list.append(str(field_value))
return ret_list
class YamlSerializerTransactionTestCase(SerializersTransactionTestBase, TransactionTestCase):
serializer_name = "yaml"
fwd_ref_str = """- fields:
headline: Forward references pose no problem
pub_date: 2006-06-16 15:00:00
categories: [1]
author: 1
pk: 1
model: serializers.article
- fields:
name: Reference
pk: 1
model: serializers.category
- fields:
name: Agnes
pk: 1
model: serializers.author"""
|
onitake/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/azure/azure_rm_trafficmanagerendpoint.py
|
15
|
#!/usr/bin/python
#
# Copyright (c) 2018 Hai Cao, <t-haicao@microsoft.com>, Yunge Zhu <yungez@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_trafficmanagerendpoint
version_added: "2.7"
short_description: Manage Azure Traffic Manager endpoint.
description:
- Create, update and delete Azure Traffic Manager endpoint.
options:
resource_group:
description:
- Name of a resource group where the Traffic Manager endpoint exists or will be created.
type: str
required: true
name:
description:
- The name of the endpoint.
type: str
required: true
profile_name:
description: Name of Traffic Manager profile where this endpoints attaches to.
type: str
required: true
type:
description:
- The type of the endpoint.
required: true
choices:
- azure_endpoints
- external_endpoints
- nested_endpoints
target_resource_id:
description:
- The Azure Resource URI of the of the endpoint.
- Not applicable to endpoints of I(type) C(external_endpoints).
type: str
target:
description:
- The fully-qualified DNS name of the endpoint.
type: str
enabled:
description:
- The status of the endpoint.
type: bool
default: true
weight:
description:
- The weight of this endpoint when traffic manager profile has routing_method of C(weighted).
- Possible values are from 1 to 1000.
type: int
priority:
description:
- The priority of this endpoint when traffic manager profile has routing_method of C(priority).
- Possible values are from 1 to 1000, lower values represent higher priority.
- This is an optional parameter. If specified, it must be specified on all endpoints.
- No two endpoints can share the same priority value.
type: int
location:
description:
- Specifies the location of the external or nested endpoints when using the 'Performance' traffic routing method.
type: str
min_child_endpoints:
description:
- The minimum number of endpoints that must be available in the child profile in order for the parent profile to be considered available.
- Only applicable to endpoint of I(type) (nested_endpoints).
type: int
geo_mapping:
description:
- The list of countries/regions mapped to this endpoint when traffic manager profile has routing_method of C(geographic).
type: str
state:
description:
- Assert the state of the Traffic Manager endpoint. Use C(present) to create or update a Traffic Manager endpoint and C(absent) to delete it.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
author:
- "Hai Cao (@caohai) <t-haicao@microsoft.com>"
- "Yunge Zhu (@yungezz) <yungez@microsoft.com>"
'''
EXAMPLES = '''
- name: create a endpoint for a traffic manager profile
azure_rm_trafficmanagerendpoint:
resource_group: testresourcegroup
profile_name: myprofilename
name: testendpoint1
type: external_endpoints
location: westus
priority: 2
weight: 1
target: 1.2.3.4
'''
RETURN = '''
id:
description: The ID of the traffic manager endpoint
returned: when traffic manager endpoint exists
type: str
example:
"/subscriptions/<subsid>/resourceGroups/testRg/providers/Microsoft.Network/trafficManagerProfiles/testProfile/externalEndpoints/testendpoint"
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase, normalize_location_name
from ansible.module_utils.common.dict_transformations import _snake_to_camel
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.trafficmanager.models import (
Endpoint, DnsConfig, MonitorConfig
)
except ImportError:
# This is handled in azure_rm_common
pass
def traffic_manager_endpoint_to_dict(endpoint):
return dict(
id=endpoint.id,
name=endpoint.name,
type=endpoint.type,
target_resource_id=endpoint.target_resource_id,
target=endpoint.target,
status=endpoint.endpoint_status,
weight=endpoint.weight,
priority=endpoint.priority,
location=endpoint.endpoint_location,
monitor_status=endpoint.endpoint_monitor_status,
min_child_endpoints=endpoint.min_child_endpoints,
geo_mapping=endpoint.geo_mapping
)
class Actions:
NoAction, CreateOrUpdate, Delete = range(3)
class AzureRMTrafficManagerEndpoint(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
profile_name=dict(
type='str',
required=True
),
type=dict(
type='str',
choices=['azure_endpoints', 'external_endpoints', 'nested_endpoints'],
required=True
),
target=dict(type='str'),
target_resource_id=dict(type='str'),
enabled=dict(type='bool', default=True),
weight=dict(type='int'),
priority=dict(type='int'),
location=dict(type='str'),
min_child_endpoints=dict(type='int'),
geo_mapping=dict(type='list', elements='str'),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
),
)
self.resource_group = None
self.name = None
self.state = None
self.profile_name = None
self.type = None
self.target_resource_id = None
self.enabled = None
self.weight = None
self.priority = None
self.location = None
self.min_child_endpoints = None
self.geo_mapping = None
self.endpoint_status = 'Enabled'
self.action = Actions.NoAction
self.results = dict(
changed=False
)
super(AzureRMTrafficManagerEndpoint, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=False)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()):
setattr(self, key, kwargs[key])
if self.type:
self.type = _snake_to_camel(self.type)
to_be_updated = False
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
self.location = resource_group.location
if self.enabled is not None and self.enabled is False:
self.endpoint_status = 'Disabled'
response = self.get_traffic_manager_endpoint()
if response:
self.log('Results : {0}'.format(response))
self.results['id'] = response['id']
if self.state == 'present':
# check update
to_be_update = self.check_update(response)
if to_be_update:
self.action = Actions.CreateOrUpdate
elif self.state == 'absent':
# delete
self.action = Actions.Delete
else:
if self.state == 'present':
self.action = Actions.CreateOrUpdate
elif self.state == 'absent':
# delete when no exists
self.fail("Traffic Manager endpoint {0} not exists.".format(self.name))
if self.action == Actions.CreateOrUpdate:
self.results['changed'] = True
if self.check_mode:
return self.results
response = self.create_update_traffic_manager_endpoint()
self.results['id'] = response['id']
if self.action == Actions.Delete:
self.results['changed'] = True
if self.check_mode:
return self.results
response = self.delete_traffic_manager_endpoint()
return self.results
def get_traffic_manager_endpoint(self):
'''
Gets the properties of the specified Traffic Manager endpoint
:return: deserialized Traffic Manager endpoint dict
'''
self.log("Checking if Traffic Manager endpoint {0} is present".format(self.name))
try:
response = self.traffic_manager_management_client.endpoints.get(self.resource_group, self.profile_name, self.type, self.name)
self.log("Response : {0}".format(response))
return traffic_manager_endpoint_to_dict(response)
except CloudError:
self.log('Did not find the Traffic Manager endpoint.')
return False
def delete_traffic_manager_endpoint(self):
'''
Deletes the specified Traffic Manager endpoint.
:return: True
'''
self.log("Deleting the Traffic Manager endpoint {0}".format(self.name))
try:
operation_result = self.traffic_manager_management_client.endpoints.delete(self.resource_group, self.profile_name, self.type, self.name)
return True
except CloudError as exc:
request_id = exc.request_id if exc.request_id else ''
self.fail("Error deleting the Traffic Manager endpoint {0}, request id {1} - {2}".format(self.name, request_id, str(exc)))
return False
def create_update_traffic_manager_endpoint(self):
'''
Creates or updates a Traffic Manager endpoint.
:return: deserialized Traffic Manager endpoint state dictionary
'''
self.log("Creating / Updating the Traffic Manager endpoint {0}".format(self.name))
parameters = Endpoint(target_resource_id=self.target_resource_id,
target=self.target,
endpoint_status=self.endpoint_status,
weight=self.weight,
priority=self.priority,
endpoint_location=self.location,
min_child_endpoints=self.min_child_endpoints,
geo_mapping=self.geo_mapping)
try:
response = self.traffic_manager_management_client.endpoints.create_or_update(self.resource_group,
self.profile_name,
self.type,
self.name,
parameters)
return traffic_manager_endpoint_to_dict(response)
except CloudError as exc:
request_id = exc.request_id if exc.request_id else ''
self.fail("Error creating the Traffic Manager endpoint {0}, request id {1} - {2}".format(self.name, request_id, str(exc)))
def check_update(self, response):
if self.endpoint_status is not None and response['status'].lower() != self.endpoint_status.lower():
self.log("Status Diff - Origin {0} / Update {1}".format(response['status'], self.endpoint_status))
return True
if self.type and response['type'].lower() != "Microsoft.network/TrafficManagerProfiles/{0}".format(self.type).lower():
self.log("Type Diff - Origin {0} / Update {1}".format(response['type'], self.type))
return True
if self.target_resource_id and response['target_resource_id'] != self.target_resource_id:
self.log("target_resource_id Diff - Origin {0} / Update {1}".format(response['target_resource_id'], self.target_resource_id))
return True
if self.target and response['target'] != self.target:
self.log("target Diff - Origin {0} / Update {1}".format(response['target'], self.target))
return True
if self.weight and int(response['weight']) != self.weight:
self.log("weight Diff - Origin {0} / Update {1}".format(response['weight'], self.weight))
return True
if self.priority and int(response['priority']) != self.priority:
self.log("priority Diff - Origin {0} / Update {1}".format(response['priority'], self.priority))
return True
if self.min_child_endpoints and int(response['min_child_endpoints']) != self.min_child_endpoints:
self.log("min_child_endpoints Diff - Origin {0} / Update {1}".format(response['min_child_endpoints'], self.min_child_endpoints))
return True
if self.geo_mapping and response['geo_mapping'] != self.geo_mapping:
self.log("geo_mapping Diff - Origin {0} / Update {1}".format(response['geo_mapping'], self.geo_mapping))
return True
return False
def main():
"""Main execution"""
AzureRMTrafficManagerEndpoint()
if __name__ == '__main__':
main()
|
0x7678/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/bet.py
|
16
|
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import compat_urllib_parse
from ..utils import (
xpath_text,
xpath_with_ns,
int_or_none,
parse_iso8601,
)
class BetIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?bet\.com/(?:[^/]+/)+(?P<id>.+?)\.html'
_TESTS = [
{
'url': 'http://www.bet.com/news/politics/2014/12/08/in-bet-exclusive-obama-talks-race-and-racism.html',
'info_dict': {
'id': '417cd61c-c793-4e8e-b006-e445ecc45add',
'display_id': 'in-bet-exclusive-obama-talks-race-and-racism',
'ext': 'flv',
'title': 'BET News Presents: A Conversation With President Obama',
'description': 'md5:5a88d8ae912c1b33e090290af7ec33c6',
'duration': 1534,
'timestamp': 1418075340,
'upload_date': '20141208',
'uploader': 'admin',
'thumbnail': 're:(?i)^https?://.*\.jpg$',
},
'params': {
# rtmp download
'skip_download': True,
},
},
{
'url': 'http://www.bet.com/video/news/national/2014/justice-for-ferguson-a-community-reacts.html',
'info_dict': {
'id': '4160e53b-ad41-43b1-980f-8d85f63121f4',
'display_id': 'justice-for-ferguson-a-community-reacts',
'ext': 'flv',
'title': 'Justice for Ferguson: A Community Reacts',
'description': 'A BET News special.',
'duration': 1696,
'timestamp': 1416942360,
'upload_date': '20141125',
'uploader': 'admin',
'thumbnail': 're:(?i)^https?://.*\.jpg$',
},
'params': {
# rtmp download
'skip_download': True,
},
}
]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
media_url = compat_urllib_parse.unquote(self._search_regex(
[r'mediaURL\s*:\s*"([^"]+)"', r"var\s+mrssMediaUrl\s*=\s*'([^']+)'"],
webpage, 'media URL'))
mrss = self._download_xml(media_url, display_id)
item = mrss.find('./channel/item')
NS_MAP = {
'dc': 'http://purl.org/dc/elements/1.1/',
'media': 'http://search.yahoo.com/mrss/',
'ka': 'http://kickapps.com/karss',
}
title = xpath_text(item, './title', 'title')
description = xpath_text(
item, './description', 'description', fatal=False)
video_id = xpath_text(item, './guid', 'video id', fatal=False)
timestamp = parse_iso8601(xpath_text(
item, xpath_with_ns('./dc:date', NS_MAP),
'upload date', fatal=False))
uploader = xpath_text(
item, xpath_with_ns('./dc:creator', NS_MAP),
'uploader', fatal=False)
media_content = item.find(
xpath_with_ns('./media:content', NS_MAP))
duration = int_or_none(media_content.get('duration'))
smil_url = media_content.get('url')
thumbnail = media_content.find(
xpath_with_ns('./media:thumbnail', NS_MAP)).get('url')
formats = self._extract_smil_formats(smil_url, display_id)
return {
'id': video_id,
'display_id': display_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'timestamp': timestamp,
'uploader': uploader,
'duration': duration,
'formats': formats,
}
|
jumpstarter-io/neutron
|
refs/heads/master
|
neutron/tests/unit/test_auth.py
|
9
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from neutron import auth
from neutron.openstack.common.middleware import request_id
from neutron.tests import base
class NeutronKeystoneContextTestCase(base.BaseTestCase):
def setUp(self):
super(NeutronKeystoneContextTestCase, self).setUp()
@webob.dec.wsgify
def fake_app(req):
self.context = req.environ['neutron.context']
return webob.Response()
self.context = None
self.middleware = auth.NeutronKeystoneContext(fake_app)
self.request = webob.Request.blank('/')
self.request.headers['X_AUTH_TOKEN'] = 'testauthtoken'
def test_no_user_id(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '401 Unauthorized')
def test_with_user_id(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.user_id, 'testuserid')
self.assertEqual(self.context.user, 'testuserid')
def test_with_tenant_id(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'test_user_id'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.tenant_id, 'testtenantid')
self.assertEqual(self.context.tenant, 'testtenantid')
def test_roles_no_admin(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.headers['X_ROLES'] = 'role1, role2 , role3,role4,role5'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.roles, ['role1', 'role2', 'role3',
'role4', 'role5'])
self.assertEqual(self.context.is_admin, False)
def test_roles_with_admin(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.headers['X_ROLES'] = ('role1, role2 , role3,role4,role5,'
'AdMiN')
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.roles, ['role1', 'role2', 'role3',
'role4', 'role5', 'AdMiN'])
self.assertEqual(self.context.is_admin, True)
def test_with_user_tenant_name(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.headers['X_PROJECT_NAME'] = 'testtenantname'
self.request.headers['X_USER_NAME'] = 'testusername'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.user_id, 'testuserid')
self.assertEqual(self.context.user_name, 'testusername')
self.assertEqual(self.context.tenant_id, 'testtenantid')
self.assertEqual(self.context.tenant_name, 'testtenantname')
def test_request_id_extracted_from_env(self):
req_id = 'dummy-request-id'
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.environ[request_id.ENV_REQUEST_ID] = req_id
self.request.get_response(self.middleware)
self.assertEqual(req_id, self.context.request_id)
def test_with_auth_token(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.auth_token, 'testauthtoken')
def test_without_auth_token(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
del self.request.headers['X_AUTH_TOKEN']
self.request.get_response(self.middleware)
self.assertIsNone(self.context.auth_token)
|
shosca/django-rest-witchcraft
|
refs/heads/master
|
rest_witchcraft/__init__.py
|
1
|
from .__version__ import __author__, __author_email__, __description__, __version__ # noqa
|
Gateworks/platform-external-chromium_org
|
refs/heads/imx_kk4.4.3_2.0.0-beta
|
tools/lsan/PRESUBMIT.py
|
24
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details on the presubmit API built into gcl.
"""
import re
def CheckChange(input_api, output_api):
errors = []
for f in input_api.AffectedFiles():
if not f.LocalPath().endswith('suppressions.txt'):
continue
for line_num, line in enumerate(f.NewContents()):
line = line.strip()
if line.startswith('#') or not line:
continue
if not line.startswith('leak:'):
errors.append('"%s" should be "leak:..." in %s line %d' %
(line, f.LocalPath(), line_num))
if errors:
return [output_api.PresubmitError('\n'.join(errors))]
return []
def CheckChangeOnUpload(input_api, output_api):
return CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CheckChange(input_api, output_api)
def GetPreferredTrySlaves():
return ['linux_asan']
|
p4datasystems/CarnotKE
|
refs/heads/master
|
jyhton/Lib/test/pickletester.py
|
10
|
import unittest
import pickle
import cPickle
import StringIO
import cStringIO
import pickletools
import copy_reg
from test.test_support import (TestFailed, have_unicode, TESTFN, _2G, _1M,
precisionbigmemtest, is_jython)
# Tests that try a number of pickle protocols should have a
# for proto in protocols:
# kind of outer loop.
assert pickle.HIGHEST_PROTOCOL == cPickle.HIGHEST_PROTOCOL == 2
protocols = range(pickle.HIGHEST_PROTOCOL + 1)
# Copy of test.test_support.run_with_locale. This is needed to support Python
# 2.4, which didn't include it. This is all to support test_xpickle, which
# bounces pickled objects through older Python versions to test backwards
# compatibility.
def run_with_locale(catstr, *locales):
def decorator(func):
def inner(*args, **kwds):
try:
import locale
category = getattr(locale, catstr)
orig_locale = locale.setlocale(category)
except AttributeError:
# if the test author gives us an invalid category string
raise
except:
# cannot retrieve original locale, so do nothing
locale = orig_locale = None
else:
for loc in locales:
try:
locale.setlocale(category, loc)
break
except:
pass
# now run the function, resetting the locale on exceptions
try:
return func(*args, **kwds)
finally:
if locale and orig_locale:
locale.setlocale(category, orig_locale)
inner.func_name = func.func_name
inner.__doc__ = func.__doc__
return inner
return decorator
# Return True if opcode code appears in the pickle, else False.
def opcode_in_pickle(code, pickle):
for op, dummy, dummy in pickletools.genops(pickle):
if op.code == code:
return True
return False
# Return the number of times opcode code appears in pickle.
def count_opcode(code, pickle):
n = 0
for op, dummy, dummy in pickletools.genops(pickle):
if op.code == code:
n += 1
return n
# We can't very well test the extension registry without putting known stuff
# in it, but we have to be careful to restore its original state. Code
# should do this:
#
# e = ExtensionSaver(extension_code)
# try:
# fiddle w/ the extension registry's stuff for extension_code
# finally:
# e.restore()
class ExtensionSaver:
# Remember current registration for code (if any), and remove it (if
# there is one).
def __init__(self, code):
self.code = code
if code in copy_reg._inverted_registry:
self.pair = copy_reg._inverted_registry[code]
copy_reg.remove_extension(self.pair[0], self.pair[1], code)
else:
self.pair = None
# Restore previous registration for code.
def restore(self):
code = self.code
curpair = copy_reg._inverted_registry.get(code)
if curpair is not None:
copy_reg.remove_extension(curpair[0], curpair[1], code)
pair = self.pair
if pair is not None:
copy_reg.add_extension(pair[0], pair[1], code)
class C:
def __cmp__(self, other):
return cmp(self.__dict__, other.__dict__)
import __main__
__main__.C = C
C.__module__ = "__main__"
class myint(int):
def __init__(self, x):
self.str = str(x)
class initarg(C):
def __init__(self, a, b):
self.a = a
self.b = b
def __getinitargs__(self):
return self.a, self.b
class metaclass(type):
pass
class use_metaclass(object):
__metaclass__ = metaclass
class pickling_metaclass(type):
def __eq__(self, other):
return (type(self) == type(other) and
self.reduce_args == other.reduce_args)
def __reduce__(self):
return (create_dynamic_class, self.reduce_args)
__hash__ = None
def create_dynamic_class(name, bases):
result = pickling_metaclass(name, bases, dict())
result.reduce_args = (name, bases)
return result
# DATA0 .. DATA2 are the pickles we expect under the various protocols, for
# the object returned by create_data().
# break into multiple strings to avoid confusing font-lock-mode
DATA0 = """(lp1
I0
aL1L
aF2
ac__builtin__
complex
p2
""" + \
"""(F3
F0
tRp3
aI1
aI-1
aI255
aI-255
aI-256
aI65535
aI-65535
aI-65536
aI2147483647
aI-2147483647
aI-2147483648
a""" + \
"""(S'abc'
p4
g4
""" + \
"""(i__main__
C
p5
""" + \
"""(dp6
S'foo'
p7
I1
sS'bar'
p8
I2
sbg5
tp9
ag9
aI5
a.
"""
# Disassembly of DATA0.
DATA0_DIS = """\
0: ( MARK
1: l LIST (MARK at 0)
2: p PUT 1
5: I INT 0
8: a APPEND
9: L LONG 1L
13: a APPEND
14: F FLOAT 2.0
17: a APPEND
18: c GLOBAL '__builtin__ complex'
39: p PUT 2
42: ( MARK
43: F FLOAT 3.0
46: F FLOAT 0.0
49: t TUPLE (MARK at 42)
50: R REDUCE
51: p PUT 3
54: a APPEND
55: I INT 1
58: a APPEND
59: I INT -1
63: a APPEND
64: I INT 255
69: a APPEND
70: I INT -255
76: a APPEND
77: I INT -256
83: a APPEND
84: I INT 65535
91: a APPEND
92: I INT -65535
100: a APPEND
101: I INT -65536
109: a APPEND
110: I INT 2147483647
122: a APPEND
123: I INT -2147483647
136: a APPEND
137: I INT -2147483648
150: a APPEND
151: ( MARK
152: S STRING 'abc'
159: p PUT 4
162: g GET 4
165: ( MARK
166: i INST '__main__ C' (MARK at 165)
178: p PUT 5
181: ( MARK
182: d DICT (MARK at 181)
183: p PUT 6
186: S STRING 'foo'
193: p PUT 7
196: I INT 1
199: s SETITEM
200: S STRING 'bar'
207: p PUT 8
210: I INT 2
213: s SETITEM
214: b BUILD
215: g GET 5
218: t TUPLE (MARK at 151)
219: p PUT 9
222: a APPEND
223: g GET 9
226: a APPEND
227: I INT 5
230: a APPEND
231: . STOP
highest protocol among opcodes = 0
"""
DATA1 = (']q\x01(K\x00L1L\nG@\x00\x00\x00\x00\x00\x00\x00'
'c__builtin__\ncomplex\nq\x02(G@\x08\x00\x00\x00\x00\x00'
'\x00G\x00\x00\x00\x00\x00\x00\x00\x00tRq\x03K\x01J\xff\xff'
'\xff\xffK\xffJ\x01\xff\xff\xffJ\x00\xff\xff\xffM\xff\xff'
'J\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff\xff\xff\x7fJ\x01\x00'
'\x00\x80J\x00\x00\x00\x80(U\x03abcq\x04h\x04(c__main__\n'
'C\nq\x05oq\x06}q\x07(U\x03fooq\x08K\x01U\x03barq\tK\x02ubh'
'\x06tq\nh\nK\x05e.'
)
# Disassembly of DATA1.
DATA1_DIS = """\
0: ] EMPTY_LIST
1: q BINPUT 1
3: ( MARK
4: K BININT1 0
6: L LONG 1L
10: G BINFLOAT 2.0
19: c GLOBAL '__builtin__ complex'
40: q BINPUT 2
42: ( MARK
43: G BINFLOAT 3.0
52: G BINFLOAT 0.0
61: t TUPLE (MARK at 42)
62: R REDUCE
63: q BINPUT 3
65: K BININT1 1
67: J BININT -1
72: K BININT1 255
74: J BININT -255
79: J BININT -256
84: M BININT2 65535
87: J BININT -65535
92: J BININT -65536
97: J BININT 2147483647
102: J BININT -2147483647
107: J BININT -2147483648
112: ( MARK
113: U SHORT_BINSTRING 'abc'
118: q BINPUT 4
120: h BINGET 4
122: ( MARK
123: c GLOBAL '__main__ C'
135: q BINPUT 5
137: o OBJ (MARK at 122)
138: q BINPUT 6
140: } EMPTY_DICT
141: q BINPUT 7
143: ( MARK
144: U SHORT_BINSTRING 'foo'
149: q BINPUT 8
151: K BININT1 1
153: U SHORT_BINSTRING 'bar'
158: q BINPUT 9
160: K BININT1 2
162: u SETITEMS (MARK at 143)
163: b BUILD
164: h BINGET 6
166: t TUPLE (MARK at 112)
167: q BINPUT 10
169: h BINGET 10
171: K BININT1 5
173: e APPENDS (MARK at 3)
174: . STOP
highest protocol among opcodes = 1
"""
DATA2 = ('\x80\x02]q\x01(K\x00\x8a\x01\x01G@\x00\x00\x00\x00\x00\x00\x00'
'c__builtin__\ncomplex\nq\x02G@\x08\x00\x00\x00\x00\x00\x00G\x00'
'\x00\x00\x00\x00\x00\x00\x00\x86Rq\x03K\x01J\xff\xff\xff\xffK'
'\xffJ\x01\xff\xff\xffJ\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xff'
'J\x00\x00\xff\xffJ\xff\xff\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00'
'\x80(U\x03abcq\x04h\x04(c__main__\nC\nq\x05oq\x06}q\x07(U\x03foo'
'q\x08K\x01U\x03barq\tK\x02ubh\x06tq\nh\nK\x05e.')
# Disassembly of DATA2.
DATA2_DIS = """\
0: \x80 PROTO 2
2: ] EMPTY_LIST
3: q BINPUT 1
5: ( MARK
6: K BININT1 0
8: \x8a LONG1 1L
11: G BINFLOAT 2.0
20: c GLOBAL '__builtin__ complex'
41: q BINPUT 2
43: G BINFLOAT 3.0
52: G BINFLOAT 0.0
61: \x86 TUPLE2
62: R REDUCE
63: q BINPUT 3
65: K BININT1 1
67: J BININT -1
72: K BININT1 255
74: J BININT -255
79: J BININT -256
84: M BININT2 65535
87: J BININT -65535
92: J BININT -65536
97: J BININT 2147483647
102: J BININT -2147483647
107: J BININT -2147483648
112: ( MARK
113: U SHORT_BINSTRING 'abc'
118: q BINPUT 4
120: h BINGET 4
122: ( MARK
123: c GLOBAL '__main__ C'
135: q BINPUT 5
137: o OBJ (MARK at 122)
138: q BINPUT 6
140: } EMPTY_DICT
141: q BINPUT 7
143: ( MARK
144: U SHORT_BINSTRING 'foo'
149: q BINPUT 8
151: K BININT1 1
153: U SHORT_BINSTRING 'bar'
158: q BINPUT 9
160: K BININT1 2
162: u SETITEMS (MARK at 143)
163: b BUILD
164: h BINGET 6
166: t TUPLE (MARK at 112)
167: q BINPUT 10
169: h BINGET 10
171: K BININT1 5
173: e APPENDS (MARK at 5)
174: . STOP
highest protocol among opcodes = 2
"""
def create_data():
c = C()
c.foo = 1
c.bar = 2
x = [0, 1L, 2.0, 3.0+0j]
# Append some integer test cases at cPickle.c's internal size
# cutoffs.
uint1max = 0xff
uint2max = 0xffff
int4max = 0x7fffffff
x.extend([1, -1,
uint1max, -uint1max, -uint1max-1,
uint2max, -uint2max, -uint2max-1,
int4max, -int4max, -int4max-1])
y = ('abc', 'abc', c, c)
x.append(y)
x.append(y)
x.append(5)
return x
class AbstractPickleTests(unittest.TestCase):
# Subclass must define self.dumps, self.loads, self.error.
_testdata = create_data()
def setUp(self):
pass
def test_misc(self):
# test various datatypes not tested by testdata
for proto in protocols:
x = myint(4)
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
x = (1, ())
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
x = initarg(1, x)
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
# XXX test __reduce__ protocol?
def test_roundtrip_equality(self):
expected = self._testdata
for proto in protocols:
s = self.dumps(expected, proto)
got = self.loads(s)
self.assertEqual(expected, got)
def test_load_from_canned_string(self):
expected = self._testdata
for canned in DATA0, DATA1, DATA2:
got = self.loads(canned)
self.assertEqual(expected, got)
# There are gratuitous differences between pickles produced by
# pickle and cPickle, largely because cPickle starts PUT indices at
# 1 and pickle starts them at 0. See XXX comment in cPickle's put2() --
# there's a comment with an exclamation point there whose meaning
# is a mystery. cPickle also suppresses PUT for objects with a refcount
# of 1.
def dont_test_disassembly(self):
from pickletools import dis
for proto, expected in (0, DATA0_DIS), (1, DATA1_DIS):
s = self.dumps(self._testdata, proto)
filelike = cStringIO.StringIO()
dis(s, out=filelike)
got = filelike.getvalue()
self.assertEqual(expected, got)
def test_recursive_list(self):
l = []
l.append(l)
for proto in protocols:
s = self.dumps(l, proto)
x = self.loads(s)
self.assertEqual(len(x), 1)
self.assertTrue(x is x[0])
def test_recursive_tuple(self):
t = ([],)
t[0].append(t)
for proto in protocols:
s = self.dumps(t, proto)
x = self.loads(s)
self.assertEqual(len(x), 1)
self.assertEqual(len(x[0]), 1)
self.assertTrue(x is x[0][0])
def test_recursive_dict(self):
d = {}
d[1] = d
for proto in protocols:
s = self.dumps(d, proto)
x = self.loads(s)
self.assertEqual(x.keys(), [1])
self.assertTrue(x[1] is x)
def test_recursive_inst(self):
i = C()
i.attr = i
for proto in protocols:
s = self.dumps(i, proto)
x = self.loads(s)
self.assertEqual(dir(x), dir(i))
self.assertIs(x.attr, x)
def test_recursive_multi(self):
l = []
d = {1:l}
i = C()
i.attr = d
l.append(i)
for proto in protocols:
s = self.dumps(l, proto)
x = self.loads(s)
self.assertEqual(len(x), 1)
self.assertEqual(dir(x[0]), dir(i))
self.assertEqual(x[0].attr.keys(), [1])
self.assertTrue(x[0].attr[1] is x)
def test_garyp(self):
self.assertRaises(self.error, self.loads, 'garyp')
def test_insecure_strings(self):
insecure = ["abc", "2 + 2", # not quoted
#"'abc' + 'def'", # not a single quoted string
"'abc", # quote is not closed
"'abc\"", # open quote and close quote don't match
"'abc' ?", # junk after close quote
"'\\'", # trailing backslash
# some tests of the quoting rules
#"'abc\"\''",
#"'\\\\a\'\'\'\\\'\\\\\''",
]
for s in insecure:
buf = "S" + s + "\012p0\012."
self.assertRaises(ValueError, self.loads, buf)
if have_unicode:
def test_unicode(self):
endcases = [u'', u'<\\u>', u'<\\\u1234>', u'<\n>',
u'<\\>', u'<\\\U00012345>']
for proto in protocols:
for u in endcases:
p = self.dumps(u, proto)
u2 = self.loads(p)
self.assertEqual(u2, u)
def test_unicode_high_plane(self):
t = u'\U00012345'
for proto in protocols:
p = self.dumps(t, proto)
t2 = self.loads(p)
self.assertEqual(t2, t)
def test_ints(self):
import sys
for proto in protocols:
n = sys.maxint
while n:
for expected in (-n, n):
s = self.dumps(expected, proto)
n2 = self.loads(s)
self.assertEqual(expected, n2)
n = n >> 1
def test_maxint64(self):
maxint64 = (1L << 63) - 1
data = 'I' + str(maxint64) + '\n.'
got = self.loads(data)
self.assertEqual(got, maxint64)
# Try too with a bogus literal.
data = 'I' + str(maxint64) + 'JUNK\n.'
self.assertRaises(ValueError, self.loads, data)
def test_long(self):
for proto in protocols:
# 256 bytes is where LONG4 begins.
for nbits in 1, 8, 8*254, 8*255, 8*256, 8*257:
nbase = 1L << nbits
for npos in nbase-1, nbase, nbase+1:
for n in npos, -npos:
pickle = self.dumps(n, proto)
got = self.loads(pickle)
self.assertEqual(n, got)
# Try a monster. This is quadratic-time in protos 0 & 1, so don't
# bother with those.
nbase = long("deadbeeffeedface", 16)
nbase += nbase << 1000000
for n in nbase, -nbase:
p = self.dumps(n, 2)
got = self.loads(p)
self.assertEqual(n, got)
def test_float(self):
test_values = [0.0, 4.94e-324, 1e-310, 7e-308, 6.626e-34, 0.1, 0.5,
3.14, 263.44582062374053, 6.022e23, 1e30]
test_values = test_values + [-x for x in test_values]
for proto in protocols:
for value in test_values:
pickle = self.dumps(value, proto)
got = self.loads(pickle)
self.assertEqual(value, got)
@run_with_locale('LC_ALL', 'de_DE', 'fr_FR')
def test_float_format(self):
# make sure that floats are formatted locale independent
self.assertEqual(self.dumps(1.2)[0:3], 'F1.')
def test_reduce(self):
pass
def test_getinitargs(self):
pass
def test_metaclass(self):
a = use_metaclass()
for proto in protocols:
s = self.dumps(a, proto)
b = self.loads(s)
self.assertEqual(a.__class__, b.__class__)
def test_dynamic_class(self):
a = create_dynamic_class("my_dynamic_class", (object,))
copy_reg.pickle(pickling_metaclass, pickling_metaclass.__reduce__)
for proto in protocols:
s = self.dumps(a, proto)
b = self.loads(s)
self.assertEqual(a, b)
def test_structseq(self):
import time
import os
t = time.localtime()
for proto in protocols:
s = self.dumps(t, proto)
u = self.loads(s)
self.assertEqual(t, u)
if hasattr(os, "stat"):
t = os.stat(os.curdir)
s = self.dumps(t, proto)
u = self.loads(s)
self.assertEqual(t, u)
if hasattr(os, "statvfs"):
t = os.statvfs(os.curdir)
s = self.dumps(t, proto)
u = self.loads(s)
self.assertEqual(t, u)
# Tests for protocol 2
def test_proto(self):
build_none = pickle.NONE + pickle.STOP
for proto in protocols:
expected = build_none
if proto >= 2:
expected = pickle.PROTO + chr(proto) + expected
p = self.dumps(None, proto)
self.assertEqual(p, expected)
oob = protocols[-1] + 1 # a future protocol
badpickle = pickle.PROTO + chr(oob) + build_none
try:
self.loads(badpickle)
except ValueError, detail:
self.assertTrue(str(detail).startswith(
"unsupported pickle protocol"))
else:
self.fail("expected bad protocol number to raise ValueError")
def test_long1(self):
x = 12345678910111213141516178920L
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
self.assertEqual(opcode_in_pickle(pickle.LONG1, s), proto >= 2)
def test_long4(self):
x = 12345678910111213141516178920L << (256*8)
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
self.assertEqual(opcode_in_pickle(pickle.LONG4, s), proto >= 2)
def test_short_tuples(self):
# Map (proto, len(tuple)) to expected opcode.
expected_opcode = {(0, 0): pickle.TUPLE,
(0, 1): pickle.TUPLE,
(0, 2): pickle.TUPLE,
(0, 3): pickle.TUPLE,
(0, 4): pickle.TUPLE,
(1, 0): pickle.EMPTY_TUPLE,
(1, 1): pickle.TUPLE,
(1, 2): pickle.TUPLE,
(1, 3): pickle.TUPLE,
(1, 4): pickle.TUPLE,
(2, 0): pickle.EMPTY_TUPLE,
(2, 1): pickle.TUPLE1,
(2, 2): pickle.TUPLE2,
(2, 3): pickle.TUPLE3,
(2, 4): pickle.TUPLE,
}
a = ()
b = (1,)
c = (1, 2)
d = (1, 2, 3)
e = (1, 2, 3, 4)
for proto in protocols:
for x in a, b, c, d, e:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y, (proto, x, s, y))
expected = expected_opcode[proto, len(x)]
self.assertEqual(opcode_in_pickle(expected, s), True)
def test_singletons(self):
# Map (proto, singleton) to expected opcode.
expected_opcode = {(0, None): pickle.NONE,
(1, None): pickle.NONE,
(2, None): pickle.NONE,
(0, True): pickle.INT,
(1, True): pickle.INT,
(2, True): pickle.NEWTRUE,
(0, False): pickle.INT,
(1, False): pickle.INT,
(2, False): pickle.NEWFALSE,
}
for proto in protocols:
for x in None, False, True:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertTrue(x is y, (proto, x, s, y))
expected = expected_opcode[proto, x]
self.assertEqual(opcode_in_pickle(expected, s), True)
def test_newobj_tuple(self):
x = MyTuple([1, 2, 3])
x.foo = 42
x.bar = "hello"
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(tuple(x), tuple(y))
self.assertEqual(x.__dict__, y.__dict__)
def test_newobj_list(self):
x = MyList([1, 2, 3])
x.foo = 42
x.bar = "hello"
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
def test_newobj_generic(self):
for proto in protocols:
for C in myclasses:
B = C.__base__
x = C(C.sample)
x.foo = 42
s = self.dumps(x, proto)
y = self.loads(s)
detail = (proto, C, B, x, y, type(y))
self.assertEqual(B(x), B(y), detail)
self.assertEqual(x.__dict__, y.__dict__, detail)
# Register a type with copy_reg, with extension code extcode. Pickle
# an object of that type. Check that the resulting pickle uses opcode
# (EXT[124]) under proto 2, and not in proto 1.
def produce_global_ext(self, extcode, opcode):
e = ExtensionSaver(extcode)
try:
copy_reg.add_extension(__name__, "MyList", extcode)
x = MyList([1, 2, 3])
x.foo = 42
x.bar = "hello"
# Dump using protocol 1 for comparison.
s1 = self.dumps(x, 1)
self.assertIn(__name__, s1)
self.assertIn("MyList", s1)
self.assertEqual(opcode_in_pickle(opcode, s1), False)
y = self.loads(s1)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
# Dump using protocol 2 for test.
s2 = self.dumps(x, 2)
self.assertNotIn(__name__, s2)
self.assertNotIn("MyList", s2)
self.assertEqual(opcode_in_pickle(opcode, s2), True)
y = self.loads(s2)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
finally:
e.restore()
def test_global_ext1(self):
self.produce_global_ext(0x00000001, pickle.EXT1) # smallest EXT1 code
self.produce_global_ext(0x000000ff, pickle.EXT1) # largest EXT1 code
def test_global_ext2(self):
self.produce_global_ext(0x00000100, pickle.EXT2) # smallest EXT2 code
self.produce_global_ext(0x0000ffff, pickle.EXT2) # largest EXT2 code
self.produce_global_ext(0x0000abcd, pickle.EXT2) # check endianness
def test_global_ext4(self):
self.produce_global_ext(0x00010000, pickle.EXT4) # smallest EXT4 code
self.produce_global_ext(0x7fffffff, pickle.EXT4) # largest EXT4 code
self.produce_global_ext(0x12abcdef, pickle.EXT4) # check endianness
def test_list_chunking(self):
n = 10 # too small to chunk
x = range(n)
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
num_appends = count_opcode(pickle.APPENDS, s)
self.assertEqual(num_appends, proto > 0)
n = 2500 # expect at least two chunks when proto > 0
x = range(n)
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
num_appends = count_opcode(pickle.APPENDS, s)
if proto == 0:
self.assertEqual(num_appends, 0)
else:
self.assertTrue(num_appends >= 2)
def test_dict_chunking(self):
n = 10 # too small to chunk
x = dict.fromkeys(range(n))
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
num_setitems = count_opcode(pickle.SETITEMS, s)
self.assertEqual(num_setitems, proto > 0)
n = 2500 # expect at least two chunks when proto > 0
x = dict.fromkeys(range(n))
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
num_setitems = count_opcode(pickle.SETITEMS, s)
if proto == 0:
self.assertEqual(num_setitems, 0)
else:
self.assertTrue(num_setitems >= 2)
def test_simple_newobj(self):
x = object.__new__(SimpleNewObj) # avoid __init__
x.abc = 666
for proto in protocols:
s = self.dumps(x, proto)
self.assertEqual(opcode_in_pickle(pickle.NEWOBJ, s), proto >= 2)
y = self.loads(s) # will raise TypeError if __init__ called
self.assertEqual(y.abc, 666)
self.assertEqual(x.__dict__, y.__dict__)
def test_newobj_list_slots(self):
x = SlotList([1, 2, 3])
x.foo = 42
x.bar = "hello"
s = self.dumps(x, 2)
y = self.loads(s)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
self.assertEqual(x.foo, y.foo)
self.assertEqual(x.bar, y.bar)
def test_reduce_overrides_default_reduce_ex(self):
for proto in protocols:
x = REX_one()
self.assertEqual(x._reduce_called, 0)
s = self.dumps(x, proto)
self.assertEqual(x._reduce_called, 1)
y = self.loads(s)
self.assertEqual(y._reduce_called, 0)
def test_reduce_ex_called(self):
for proto in protocols:
x = REX_two()
self.assertEqual(x._proto, None)
s = self.dumps(x, proto)
self.assertEqual(x._proto, proto)
y = self.loads(s)
self.assertEqual(y._proto, None)
def test_reduce_ex_overrides_reduce(self):
for proto in protocols:
x = REX_three()
self.assertEqual(x._proto, None)
s = self.dumps(x, proto)
self.assertEqual(x._proto, proto)
y = self.loads(s)
self.assertEqual(y._proto, None)
def test_reduce_ex_calls_base(self):
for proto in protocols:
x = REX_four()
self.assertEqual(x._proto, None)
s = self.dumps(x, proto)
self.assertEqual(x._proto, proto)
y = self.loads(s)
self.assertEqual(y._proto, proto)
def test_reduce_calls_base(self):
for proto in protocols:
x = REX_five()
self.assertEqual(x._reduce_called, 0)
s = self.dumps(x, proto)
self.assertEqual(x._reduce_called, 1)
y = self.loads(s)
self.assertEqual(y._reduce_called, 1)
def test_reduce_bad_iterator(self):
# Issue4176: crash when 4th and 5th items of __reduce__()
# are not iterators
class C(object):
def __reduce__(self):
# 4th item is not an iterator
return list, (), None, [], None
class D(object):
def __reduce__(self):
# 5th item is not an iterator
return dict, (), None, None, []
# Protocol 0 is less strict and also accept iterables.
for proto in protocols:
try:
self.dumps(C(), proto)
except (AttributeError, pickle.PickleError, cPickle.PickleError):
pass
try:
self.dumps(D(), proto)
except (AttributeError, pickle.PickleError, cPickle.PickleError):
pass
def test_many_puts_and_gets(self):
# Test that internal data structures correctly deal with lots of
# puts/gets.
keys = ("aaa" + str(i) for i in xrange(100))
large_dict = dict((k, [4, 5, 6]) for k in keys)
obj = [dict(large_dict), dict(large_dict), dict(large_dict)]
for proto in protocols:
dumped = self.dumps(obj, proto)
loaded = self.loads(dumped)
self.assertEqual(loaded, obj,
"Failed protocol %d: %r != %r"
% (proto, obj, loaded))
@unittest.skipIf(is_jython, "FIXME: not working on Jython")
def test_attribute_name_interning(self):
# Test that attribute names of pickled objects are interned when
# unpickling.
for proto in protocols:
x = C()
x.foo = 42
x.bar = "hello"
s = self.dumps(x, proto)
y = self.loads(s)
x_keys = sorted(x.__dict__)
y_keys = sorted(y.__dict__)
for x_key, y_key in zip(x_keys, y_keys):
self.assertIs(x_key, y_key)
# Test classes for reduce_ex
class REX_one(object):
_reduce_called = 0
def __reduce__(self):
self._reduce_called = 1
return REX_one, ()
# No __reduce_ex__ here, but inheriting it from object
class REX_two(object):
_proto = None
def __reduce_ex__(self, proto):
self._proto = proto
return REX_two, ()
# No __reduce__ here, but inheriting it from object
class REX_three(object):
_proto = None
def __reduce_ex__(self, proto):
self._proto = proto
return REX_two, ()
def __reduce__(self):
raise TestFailed, "This __reduce__ shouldn't be called"
class REX_four(object):
_proto = None
def __reduce_ex__(self, proto):
self._proto = proto
return object.__reduce_ex__(self, proto)
# Calling base class method should succeed
class REX_five(object):
_reduce_called = 0
def __reduce__(self):
self._reduce_called = 1
return object.__reduce__(self)
# This one used to fail with infinite recursion
# Test classes for newobj
class MyInt(int):
sample = 1
class MyLong(long):
sample = 1L
class MyFloat(float):
sample = 1.0
class MyComplex(complex):
sample = 1.0 + 0.0j
class MyStr(str):
sample = "hello"
class MyUnicode(unicode):
sample = u"hello \u1234"
class MyTuple(tuple):
sample = (1, 2, 3)
class MyList(list):
sample = [1, 2, 3]
class MyDict(dict):
sample = {"a": 1, "b": 2}
myclasses = [MyInt, MyLong, MyFloat,
MyComplex,
MyStr, MyUnicode,
MyTuple, MyList, MyDict]
class SlotList(MyList):
__slots__ = ["foo"]
class SimpleNewObj(object):
def __init__(self, a, b, c):
# raise an error, to make sure this isn't called
raise TypeError("SimpleNewObj.__init__() didn't expect to get called")
class AbstractPickleModuleTests(unittest.TestCase):
def test_dump_closed_file(self):
import os
f = open(TESTFN, "w")
try:
f.close()
self.assertRaises(ValueError, self.module.dump, 123, f)
finally:
os.remove(TESTFN)
def test_load_closed_file(self):
import os
f = open(TESTFN, "w")
try:
f.close()
self.assertRaises(ValueError, self.module.dump, 123, f)
finally:
os.remove(TESTFN)
def test_load_from_and_dump_to_file(self):
stream = cStringIO.StringIO()
data = [123, {}, 124]
self.module.dump(data, stream)
stream.seek(0)
unpickled = self.module.load(stream)
self.assertEqual(unpickled, data)
def test_highest_protocol(self):
# Of course this needs to be changed when HIGHEST_PROTOCOL changes.
self.assertEqual(self.module.HIGHEST_PROTOCOL, 2)
def test_callapi(self):
f = cStringIO.StringIO()
# With and without keyword arguments
self.module.dump(123, f, -1)
self.module.dump(123, file=f, protocol=-1)
self.module.dumps(123, -1)
self.module.dumps(123, protocol=-1)
self.module.Pickler(f, -1)
self.module.Pickler(f, protocol=-1)
def test_incomplete_input(self):
s = StringIO.StringIO("X''.")
self.assertRaises(EOFError, self.module.load, s)
@unittest.skipIf(is_jython, "FIXME: not working on Jython, do similar patch for http://bugs.python.org/issue7128")
def test_restricted(self):
# issue7128: cPickle failed in restricted mode
builtins = {self.module.__name__: self.module,
'__import__': __import__}
d = {}
teststr = "def f(): {0}.dumps(0)".format(self.module.__name__)
exec teststr in {'__builtins__': builtins}, d
d['f']()
def test_bad_input(self):
# Test issue4298
s = '\x58\0\0\0\x54'
self.assertRaises(EOFError, self.module.loads, s)
# Test issue7455
s = '0'
# XXX Why doesn't pickle raise UnpicklingError?
self.assertRaises((IndexError, cPickle.UnpicklingError),
self.module.loads, s)
class AbstractPersistentPicklerTests(unittest.TestCase):
# This class defines persistent_id() and persistent_load()
# functions that should be used by the pickler. All even integers
# are pickled using persistent ids.
def persistent_id(self, object):
if isinstance(object, int) and object % 2 == 0:
self.id_count += 1
return str(object)
else:
return None
def persistent_load(self, oid):
self.load_count += 1
object = int(oid)
assert object % 2 == 0
return object
def test_persistence(self):
self.id_count = 0
self.load_count = 0
L = range(10)
self.assertEqual(self.loads(self.dumps(L)), L)
self.assertEqual(self.id_count, 5)
self.assertEqual(self.load_count, 5)
def test_bin_persistence(self):
self.id_count = 0
self.load_count = 0
L = range(10)
self.assertEqual(self.loads(self.dumps(L, 1)), L)
self.assertEqual(self.id_count, 5)
self.assertEqual(self.load_count, 5)
class AbstractPicklerUnpicklerObjectTests(unittest.TestCase):
pickler_class = None
unpickler_class = None
def setUp(self):
assert self.pickler_class
assert self.unpickler_class
def test_clear_pickler_memo(self):
# To test whether clear_memo() has any effect, we pickle an object,
# then pickle it again without clearing the memo; the two serialized
# forms should be different. If we clear_memo() and then pickle the
# object again, the third serialized form should be identical to the
# first one we obtained.
data = ["abcdefg", "abcdefg", 44]
f = cStringIO.StringIO()
pickler = self.pickler_class(f)
pickler.dump(data)
first_pickled = f.getvalue()
# Reset StringIO object.
f.seek(0)
f.truncate()
pickler.dump(data)
second_pickled = f.getvalue()
# Reset the Pickler and StringIO objects.
pickler.clear_memo()
f.seek(0)
f.truncate()
pickler.dump(data)
third_pickled = f.getvalue()
self.assertNotEqual(first_pickled, second_pickled)
self.assertEqual(first_pickled, third_pickled)
def test_priming_pickler_memo(self):
# Verify that we can set the Pickler's memo attribute.
data = ["abcdefg", "abcdefg", 44]
f = cStringIO.StringIO()
pickler = self.pickler_class(f)
pickler.dump(data)
first_pickled = f.getvalue()
f = cStringIO.StringIO()
primed = self.pickler_class(f)
primed.memo = pickler.memo
primed.dump(data)
primed_pickled = f.getvalue()
self.assertNotEqual(first_pickled, primed_pickled)
def test_priming_unpickler_memo(self):
# Verify that we can set the Unpickler's memo attribute.
data = ["abcdefg", "abcdefg", 44]
f = cStringIO.StringIO()
pickler = self.pickler_class(f)
pickler.dump(data)
first_pickled = f.getvalue()
f = cStringIO.StringIO()
primed = self.pickler_class(f)
primed.memo = pickler.memo
primed.dump(data)
primed_pickled = f.getvalue()
unpickler = self.unpickler_class(cStringIO.StringIO(first_pickled))
unpickled_data1 = unpickler.load()
self.assertEqual(unpickled_data1, data)
primed = self.unpickler_class(cStringIO.StringIO(primed_pickled))
primed.memo = unpickler.memo
unpickled_data2 = primed.load()
primed.memo.clear()
self.assertEqual(unpickled_data2, data)
self.assertTrue(unpickled_data2 is unpickled_data1)
def test_reusing_unpickler_objects(self):
data1 = ["abcdefg", "abcdefg", 44]
f = cStringIO.StringIO()
pickler = self.pickler_class(f)
pickler.dump(data1)
pickled1 = f.getvalue()
data2 = ["abcdefg", 44, 44]
f = cStringIO.StringIO()
pickler = self.pickler_class(f)
pickler.dump(data2)
pickled2 = f.getvalue()
f = cStringIO.StringIO()
f.write(pickled1)
f.seek(0)
unpickler = self.unpickler_class(f)
self.assertEqual(unpickler.load(), data1)
f.seek(0)
f.truncate()
f.write(pickled2)
f.seek(0)
self.assertEqual(unpickler.load(), data2)
class BigmemPickleTests(unittest.TestCase):
# Memory requirements: 1 byte per character for input strings, 1 byte
# for pickled data, 1 byte for unpickled strings, 1 byte for internal
# buffer and 1 byte of free space for resizing of internal buffer.
@precisionbigmemtest(size=_2G + 100*_1M, memuse=5)
def test_huge_strlist(self, size):
chunksize = 2**20
data = []
while size > chunksize:
data.append('x' * chunksize)
size -= chunksize
chunksize += 1
data.append('y' * size)
try:
for proto in protocols:
try:
pickled = self.dumps(data, proto)
res = self.loads(pickled)
self.assertEqual(res, data)
finally:
res = None
pickled = None
finally:
data = None
|
adammaikai/OmicsPipe2.0
|
refs/heads/master
|
omics_pipe/main.py
|
2
|
#!/usr/bin/env python
import os
import sys
import stat
import urllib
from optparse import OptionParser
import webbrowser as browser
import argparse
from omics_pipe.parameters.default_parameters import default_parameters
import yaml
from omics_pipe.utils import *
import runpy
import subprocess
import os.path
import csv
from raven import Client
def main():
'''make command line interface, read in analysis type, execute correct analysis pipeline script'''
client = Client('http://44fd4bee8b9b4d6fa33e29d297c70cec:6e1f13b4911d4a5f915c25edc328c381@sentry.sulab.org/2')
parser = argparse.ArgumentParser(prog = 'omics_pipe', description = 'Run omics_pipe')
parser.add_argument('analysis_type', action = "store",
choices = ['RNAseq_Tuxedo', 'RNAseq_count_based', 'RNAseq_cancer_report', 'TCGA_download', 'Variant_annotation', 'RNAseq_TCGA', 'RNAseq_TCGA_counts',
'Tumorseq_MUTECT', 'miRNAseq_count_based', 'miRNAseq_tuxedo', 'WES_GATK_report', 'WES_GATK', 'WES_GATK_group_calling', 'WGS_GATK_optimized', 'WGS_GATK', 'WGS_GATK_group_calling',
'SomaticInDels', 'ChIPseq_MACS', 'ChIPseq_HOMER', 'test', 'custom'],
help = 'type of analysis to run: RNAseq_Tuxedo, RNAseq_count_based, RNAseq_cancer_report, TCGA_download, Variant_annotation, RNAseq_TCGA, WGS_GATK_optimized, RNAseq_TCGA_counts, Tumorseq_MUTECT, WES_GATK_report, miRNAseq_count_based, miRNAseq_tuxedo, WES_GATK, WGS_GATK, WES_GATK_group_calling, SomaticInDels, ChIPseq_MACS, ChIPseq_HOMER, custom')
parser.add_argument('parameter_file', action = "store", help = 'specify parameter file to use for analysis')
parser.add_argument('--custom_script_path', action = "store", help = 'specify custom script file with full path (/example/script.py) to use for analysis if you specify analysis type as custom')
parser.add_argument('--custom_script_name', action = "store", help = 'specify custom script file with full path (/example/script.py) to use for analysis if you specify analysis type as custom')
parser.add_argument('--compression', action = "store", help = 'select bzip or gzip if your fastq files are compressed. Leave this option off if your files are uncompressed', choices = ['gzip', 'bzip'])
args = parser.parse_args()
print args
print args.analysis_type
print args.custom_script_path
print args.custom_script_name
print args.parameter_file
print args.compression
analysis = args.analysis_type
parameters = os.path.abspath(args.parameter_file)
path = args.custom_script_path
script = args.custom_script_name
compression = args.compression
stream = file(parameters, 'r')
params = yaml.load(stream)
default_parameters.update(params) #Update default parameters to user defined parameter file
p = Bunch(default_parameters)
if type(p.SAMPLE_LIST) == str: #Handles reading a list of files from a text file
sample_file = open(p.SAMPLE_LIST, 'r')
reader = csv.reader(sample_file)
sample_list = [row for row in reader]
sample_list2 = [item for sublist in sample_list for item in sublist]
default_parameters.update(SAMPLE_LIST = sample_list2) #Update default parameters to user defined parameter file
p = Bunch(default_parameters)
check_create_dir(p.OMICSPIPE["LOG_PATH"])
check_create_dir(p.OMICSPIPE["FLAG_PATH"])
current_cwd = os.getcwd()
os.chdir(p.OMICSPIPE["WORKING_DIR"])
for x in os.listdir(p.OMICSPIPE["WORKING_DIR"]):
os.chmod(x,0755)
os.environ["DRMAA_LIBRARY_PATH"] = p.OMICSPIPE["DRMAA_PATH"]
start_time = time.time()
print start_time
#decompress(p.DATA["RAW_DATA"], args.compression) #Check if files zipped, if so, unzip them
# record, project = sumatra_start(p.SUMATRA["REPOSITORY"], p.SUMATRA["DB_PATH"], p.SUMATRA["RESULTS"], p.OMICSPIPE["WORKING_DIR"], p.SUMATRA["HG_USERNAME"], p.SUMATRA["RUN_NAME"], parameters) #Create repo and sumatra project, start recording
os.chdir(p.OMICSPIPE["WORKING_DIR"])
if args.analysis_type == 'RNAseq_Tuxedo':
runpy.run_module('omics_pipe.RNAseq_Tuxedo', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'RNAseq_count_based':
runpy.run_module('omics_pipe.RNAseq_count_based', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'RNAseq_cancer_report':
runpy.run_module('omics_pipe.RNAseq_cancer_report', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'RNAseq_TCGA_counts':
runpy.run_module('omics_pipe.RNAseq_TCGA_counts', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'RNAseq_TCGA':
runpy.run_module('omics_pipe.RNAseq_TCGA', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'Tumorseq_MUTECT':
runpy.run_module('omics_pipe.Tumorseq_MUTECT', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'miRNAseq_count_based':
runpy.run_module('omics_pipe.miRNAseq_count_based', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'miRNAseq_tuxedo':
runpy.run_module('omics_pipe.miRNAseq_tuxedo', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'WES_GATK':
runpy.run_module('omics_pipe.WES_GATK', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'WES_GATK_group_calling':
runpy.run_module('omics_pipe.WES_GATK_group_calling', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'WGS_GATK':
runpy.run_module('omics_pipe.WGS_GATK', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'WES_GATK_report':
runpy.run_module('omics_pipe.WES_GATK_report', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'WGS_GATK_group_calling':
runpy.run_module('omics_pipe.WGS_GATK_group_calling', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'SomaticInDels':
runpy.run_module('omics_pipe.SomaticInDels', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'ChIPseq_MACS':
runpy.run_module('omics_pipe.ChIPseq_MACS', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'ChIPseq_HOMER':
runpy.run_module('omics_pipe.ChIPseq_HOMER', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'TCGA_download':
runpy.run_module('omics_pipe.TCGA_download', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'Variant_annotation':
runpy.run_module('omics_pipe.Variant_annotation', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'WGS_GATK_optimized':
runpy.run_module('omics_pipe.WGS_GATK_optimized', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'test':
runpy.run_module('omics_pipe.test', run_name="__main__", alter_sys = True)
compress(p.DATA["RAW_DATA"], args.compression)
sumatra_end(start_time, record, project)
sys.exit(0)
elif args.analysis_type == 'custom':
os.chdir(path)
sys.path.append(path)
runpy.run_module(script, run_name="__main__", alter_sys = True)
#compress(p.DATA["RAW_DATA"], args.compression)
# sumatra_end(start_time, record, project)
sys.exit(0)
else:
print 'Error: unsupported analysis type. Please try again.'
return
if __name__ == '__main__':
main()
|
Piasy/proxy-searcher
|
refs/heads/master
|
site-packages/django/contrib/localflavor/jp/__init__.py
|
12133432
| |
terotic/devheldev
|
refs/heads/master
|
users/migrations/__init__.py
|
12133432
| |
lilydjwg/tornado
|
refs/heads/master
|
tornado/platform/__init__.py
|
12133432
| |
bratsche/Neutron-Drive
|
refs/heads/master
|
google_appengine/lib/django_1_3/tests/regressiontests/settings_tests/models.py
|
12133432
| |
semonte/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyMissingTypeHintsInspection/noAnnotations.py
|
30
|
def <weak_warning descr="Type hinting is missing for function definition">foo</weak_warning>(x, y):
pass
|
jazkarta/edx-platform
|
refs/heads/master
|
lms/lib/comment_client/user.py
|
144
|
from .utils import merge_dict, perform_request, CommentClientRequestError
import models
import settings
class User(models.Model):
accessible_fields = [
'username', 'follower_ids', 'upvoted_ids', 'downvoted_ids',
'id', 'external_id', 'subscribed_user_ids', 'children', 'course_id',
'group_id', 'subscribed_thread_ids', 'subscribed_commentable_ids',
'subscribed_course_ids', 'threads_count', 'comments_count',
'default_sort_key'
]
updatable_fields = ['username', 'external_id', 'default_sort_key']
initializable_fields = updatable_fields
metric_tag_fields = ['course_id']
base_url = "{prefix}/users".format(prefix=settings.PREFIX)
default_retrieve_params = {'complete': True}
type = 'user'
@classmethod
def from_django_user(cls, user):
return cls(id=str(user.id),
external_id=str(user.id),
username=user.username)
def follow(self, source):
params = {'source_type': source.type, 'source_id': source.id}
response = perform_request(
'post',
_url_for_subscription(self.id),
params,
metric_action='user.follow',
metric_tags=self._metric_tags + ['target.type:{}'.format(source.type)],
)
def unfollow(self, source):
params = {'source_type': source.type, 'source_id': source.id}
response = perform_request(
'delete',
_url_for_subscription(self.id),
params,
metric_action='user.unfollow',
metric_tags=self._metric_tags + ['target.type:{}'.format(source.type)],
)
def vote(self, voteable, value):
if voteable.type == 'thread':
url = _url_for_vote_thread(voteable.id)
elif voteable.type == 'comment':
url = _url_for_vote_comment(voteable.id)
else:
raise CommentClientRequestError("Can only vote / unvote for threads or comments")
params = {'user_id': self.id, 'value': value}
response = perform_request(
'put',
url,
params,
metric_action='user.vote',
metric_tags=self._metric_tags + ['target.type:{}'.format(voteable.type)],
)
voteable._update_from_response(response)
def unvote(self, voteable):
if voteable.type == 'thread':
url = _url_for_vote_thread(voteable.id)
elif voteable.type == 'comment':
url = _url_for_vote_comment(voteable.id)
else:
raise CommentClientRequestError("Can only vote / unvote for threads or comments")
params = {'user_id': self.id}
response = perform_request(
'delete',
url,
params,
metric_action='user.unvote',
metric_tags=self._metric_tags + ['target.type:{}'.format(voteable.type)],
)
voteable._update_from_response(response)
def active_threads(self, query_params={}):
if not self.course_id:
raise CommentClientRequestError("Must provide course_id when retrieving active threads for the user")
url = _url_for_user_active_threads(self.id)
params = {'course_id': self.course_id.to_deprecated_string()}
params = merge_dict(params, query_params)
response = perform_request(
'get',
url,
params,
metric_action='user.active_threads',
metric_tags=self._metric_tags,
paged_results=True,
)
return response.get('collection', []), response.get('page', 1), response.get('num_pages', 1)
def subscribed_threads(self, query_params={}):
if not self.course_id:
raise CommentClientRequestError("Must provide course_id when retrieving subscribed threads for the user")
url = _url_for_user_subscribed_threads(self.id)
params = {'course_id': self.course_id.to_deprecated_string()}
params = merge_dict(params, query_params)
response = perform_request(
'get',
url,
params,
metric_action='user.subscribed_threads',
metric_tags=self._metric_tags,
paged_results=True
)
return response.get('collection', []), response.get('page', 1), response.get('num_pages', 1)
def _retrieve(self, *args, **kwargs):
url = self.url(action='get', params=self.attributes)
retrieve_params = self.default_retrieve_params.copy()
retrieve_params.update(kwargs)
if self.attributes.get('course_id'):
retrieve_params['course_id'] = self.course_id.to_deprecated_string()
if self.attributes.get('group_id'):
retrieve_params['group_id'] = self.group_id
try:
response = perform_request(
'get',
url,
retrieve_params,
metric_action='model.retrieve',
metric_tags=self._metric_tags,
)
except CommentClientRequestError as e:
if e.status_code == 404:
# attempt to gracefully recover from a previous failure
# to sync this user to the comments service.
self.save()
response = perform_request(
'get',
url,
retrieve_params,
metric_action='model.retrieve',
metric_tags=self._metric_tags,
)
else:
raise
self._update_from_response(response)
def _url_for_vote_comment(comment_id):
return "{prefix}/comments/{comment_id}/votes".format(prefix=settings.PREFIX, comment_id=comment_id)
def _url_for_vote_thread(thread_id):
return "{prefix}/threads/{thread_id}/votes".format(prefix=settings.PREFIX, thread_id=thread_id)
def _url_for_subscription(user_id):
return "{prefix}/users/{user_id}/subscriptions".format(prefix=settings.PREFIX, user_id=user_id)
def _url_for_user_active_threads(user_id):
return "{prefix}/users/{user_id}/active_threads".format(prefix=settings.PREFIX, user_id=user_id)
def _url_for_user_subscribed_threads(user_id):
return "{prefix}/users/{user_id}/subscribed_threads".format(prefix=settings.PREFIX, user_id=user_id)
|
scienceopen/robust-flow
|
refs/heads/master
|
BlackRobustFlow.py
|
1
|
#!/usr/bin/env python
import logging
import imageio
from pathlib import Path
import numpy as np
from robustflow import runblack,loadflow
from matplotlib.pyplot import figure,show
if __name__ == '__main__':
from argparse import ArgumentParser
p = ArgumentParser()
p.add_argument('pgmstem',help='stem of pgm files')
p.add_argument('-f','--frames',help='start stop frame indices',nargs=2,type=int,default=[0,1])
p.add_argument('--srcpath',help='path to C code and EXE',default='bin')
p.add_argument('-o','--outpath',default='results')
p = p.parse_args()
runblack(p.pgmstem, p.srcpath, p.frames, p.outpath)
u,v = loadflow(p.pgmstem, p.frames, p.outpath)
# %%
if (u[0,0] == u).all():
logging.error(f'all elements of U identical {u[0,0]}')
if (v[0,0] == v).all():
logging.error(f'all elements of V identical {v[0,0]}')
stem = Path(p.pgmstem).expanduser()
imgfn = stem.parent / (stem.name+f'{p.frames[1]}.pgm')
img = imageio.imread(imgfn)
y,x = img.shape
s = 10
X = np.arange(0,x,s)
Y = np.arange(0,y,s)
X,Y = np.meshgrid(X,Y)
ax = figure().gca()
ax.imshow(img,cmap='gray',origin='upper')
ax.quiver(X, Y,
u[::s,::s],v[::s,::s])
ax.set_title(f'{imgfn} robust optical flow')
show()
|
weave-lab/nw.js
|
refs/heads/nw13
|
tools/package_binaries.py
|
64
|
#!/usr/bin/env python
import argparse
import getnwisrelease
import getnwversion
import gzip
import os
import platform
import shutil
import sys
import tarfile
import zipfile
from subprocess import call
steps = ['nw', 'chromedriver', 'symbol', 'headers', 'others']
################################
# Parse command line args
parser = argparse.ArgumentParser(description='Package nw binaries.')
parser.add_argument('-p','--path', help='Where to find the binaries, like out/Release', required=False)
parser.add_argument('-a','--arch', help='target arch', required=False)
parser.add_argument('-m','--mode', help='package mode', required=False)
parser.add_argument('-i','--icudat', help='icudat override', required=False)
group = parser.add_mutually_exclusive_group()
group.add_argument('-s','--step', choices=steps, help='Execute specified step.', required=False)
group.add_argument('-n','--skip', choices=steps, help='Skip specified step.', required=False)
args = parser.parse_args()
################################
# Init variables.
binaries_location = None # .../out/Release
platform_name = None # win/linux/osx
arch = None # ia32/x64
step = None # nw/chromedriver/symbol
skip = None
nw_ver = None # x.xx
dist_dir = None # .../out/Release/dist
flavor = args.mode
is_headers_ok = False # record whether nw-headers generated
package_name = 'nwjs'
if flavor in ['sdk', 'nacl']:
package_name = 'nwjs-' + args.mode
step = args.step
skip = args.skip
binaries_location = args.path
# If the binaries location is not given, calculate it from script related dir.
if binaries_location == None:
binaries_location = os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir, 'out', 'Release')
if not os.path.isabs(binaries_location):
binaries_location = os.path.join(os.getcwd(), binaries_location)
if not os.path.isdir(binaries_location):
print 'Invalid path: ' + binaries_location
exit(-1)
binaries_location = os.path.normpath(binaries_location)
dist_dir = os.path.join(binaries_location, 'dist')
print 'Working on ' + binaries_location
if args.icudat != None:
#FIXME: for some reason they are the same file (hard link) and copy will fail
os.remove(os.path.join(binaries_location, 'icudtl.dat'))
shutil.copy(args.icudat, binaries_location)
if sys.platform.startswith('linux'):
platform_name = 'linux'
elif sys.platform in ('win32', 'cygwin'):
platform_name = 'win'
elif sys.platform == 'darwin':
platform_name = 'osx'
else:
print 'Unsupported platform: ' + sys.platform
exit(-1)
_arch = platform.architecture()[0]
if _arch == '64bit':
arch = 'x64'
elif _arch == '32bit':
arch = 'ia32'
else:
print 'Unsupported arch: ' + _arch
exit(-1)
if platform_name == 'win':
libfile = os.path.join(binaries_location, 'nw.lib')
expfile = os.path.join(binaries_location, 'nw.exp')
shutil.copy(os.path.join(binaries_location, 'nw.dll.lib'), libfile)
shutil.copy(os.path.join(binaries_location, 'nw.dll.exp'), expfile)
if platform_name == 'win':
arch = 'ia32'
if platform_name != 'osx':
try:
os.remove(os.path.join(binaries_location, 'en-US.pak'))
except OSError:
pass
shutil.copy(os.path.join(binaries_location, 'locales', 'en-US.pak'), binaries_location)
shutil.rmtree(os.path.join(binaries_location, 'locales'))
os.mkdir(os.path.join(binaries_location, 'locales'))
shutil.copy(os.path.join(binaries_location, 'en-US.pak'), os.path.join(binaries_location, 'locales'))
if platform_name == 'osx':
# detect output arch
nw_bin = binaries_location + '/nwjs.app/Contents/MacOS/nwjs'
import subprocess
if 'i386' in subprocess.check_output(['file',nw_bin]):
arch = 'ia32'
else: # should be 'x86_64'
arch = 'x64'
if args.arch != None:
arch = args.arch
nw_ver = getnwversion.nw_version
if getnwisrelease.release == 0:
nw_ver += getnwisrelease.postfix
################################
# Generate targets
#
# target example:
# {
# 'input' : [ 'nw', 'nw.pak', ... ]
# 'output' : 'nwjs-v0.9.2-linux-x64'
# 'compress' : 'tar.gz'
# 'folder' : True # Optional. More than 2 files will be put into a seprate folder
# # normally, if you want do to this for only 1 file, set this flag.
# }
def generate_target_nw(platform_name, arch, version):
target = {}
# Output
target['output'] = ''.join([
package_name, '-',
'v', version,
'-', platform_name,
'-', arch])
# Compress type
if platform_name == 'linux':
target['compress'] = 'tar.gz'
else:
target['compress'] = 'zip'
# Input
if platform_name == 'linux':
target['input'] = [
'credits.html',
'resources.pak',
'nw_100_percent.pak',
'nw',
'icudtl.dat',
'locales',
'snapshot_blob.bin',
'natives_blob.bin',
]
if flavor in ['nacl','sdk'] :
target['input'] += ['nacl_helper', 'nacl_helper_bootstrap', 'pnacl']
if arch == 'x64':
target['input'].append('nacl_irt_x86_64.nexe')
else:
target['input'].append('nacl_irt_x86_32.nexe')
elif platform_name == 'win':
target['input'] = [
'snapshot_blob.bin',
'natives_blob.bin',
'd3dcompiler_47.dll',
'libEGL.dll',
'libGLESv2.dll',
'nw.dll',
'nw_elf.dll',
'nw.exe',
'locales',
'icudtl.dat',
'credits.html',
'resources.pak',
'nw_100_percent.pak',
'nw_200_percent.pak',
]
if flavor in ['nacl','sdk'] :
target['input'].append('pnacl')
if arch == 'x64':
target['input'].append('nacl_irt_x86_64.nexe')
else:
target['input'].append('nacl_irt_x86_32.nexe')
elif platform_name == 'osx':
target['input'] = [
'nwjs.app',
'credits.html',
]
else:
print 'Unsupported platform: ' + platform_name
exit(-1)
return target
def generate_target_chromedriver(platform_name, arch, version):
if args.mode != 'sdk':
return generate_target_empty(platform_name, arch, version)
target = {}
# Output
target['output'] = ''.join([
'chromedriver-nw-',
'v', version,
'-', platform_name,
'-', arch])
# Compress type
if platform_name == 'linux':
target['compress'] = 'tar.gz'
else:
target['compress'] = 'zip'
# Input
if platform_name == 'win':
target['input'] = ['chromedriver.exe']
else:
target['input'] = ['chromedriver']
target['folder'] = True # always create a folder
return target
def generate_target_symbols(platform_name, arch, version):
target = {}
target['output'] = ''.join([package_name, '-symbol-',
'v', version,
'-', platform_name,
'-', arch])
if platform_name == 'linux':
target['compress'] = 'tar.gz'
target['input'] = ['nw.breakpad.' + arch]
target['folder'] = True
elif platform_name == 'win':
target['compress'] = None
target['input'] = ['nw.sym.7z']
target['output'] = ''.join([package_name, '-symbol-',
'v', version,
'-', platform_name,
'-', arch, '.7z'])
elif platform_name == 'osx':
target['compress'] = 'zip'
target['input'] = [
'nwjs.breakpad.tar'
]
target['folder'] = True
else:
print 'Unsupported platform: ' + platform_name
exit(-1)
return target
def generate_target_headers(platform_name, arch, version):
# here, call make_nw_header tool to generate headers
# then, move to binaries_location
target = {}
target['output'] = ''
target['compress'] = None
if platform_name == 'osx':
target['input'] = []
# here , call make-nw-headers.py to generate nw headers
make_nw_header = os.path.join(os.path.dirname(__file__), \
'make-nw-headers.py')
print make_nw_header
res = call(['python', make_nw_header])
if res == 0:
print 'nw-headers generated'
nw_headers_name = 'nw-headers-v' + version + '.tar.gz'
nw_headers_path = os.path.join(os.path.dirname(__file__), \
os.pardir, 'tmp', nw_headers_name)
if os.path.isfile(os.path.join(binaries_location, nw_headers_name)):
os.remove(os.path.join(binaries_location, nw_headers_name))
shutil.move(nw_headers_path, binaries_location)
target['input'].append(nw_headers_name)
else:
#TODO, handle err
print 'nw-headers generate failed'
elif platform_name == 'win':
target['input'] = []
elif platform_name == 'linux':
target['input'] = []
else:
print 'Unsupported platform: ' + platform_name
exit(-1)
return target
def generate_target_empty(platform_name, arch, version):
target = {}
target['output'] = ''
target['compress'] = None
if platform_name == 'win':
target['input'] = []
elif platform_name == 'linux' :
target['input'] = []
else:
target['input'] = []
return target
def generate_target_others(platform_name, arch, version):
target = {}
target['output'] = ''
target['compress'] = None
if platform_name == 'win':
target['input'] = ['nw.exp', 'nw.lib']
elif platform_name == 'linux' :
target['input'] = []
else:
target['input'] = []
return target
################################
# Make packages
def compress(from_dir, to_dir, fname, compress):
from_dir = os.path.normpath(from_dir)
to_dir = os.path.normpath(to_dir)
_from = os.path.join(from_dir, fname)
_to = os.path.join(to_dir, fname)
if compress == 'zip':
z = zipfile.ZipFile(_to + '.zip', 'w', compression=zipfile.ZIP_DEFLATED)
if os.path.isdir(_from):
for root, dirs, files in os.walk(_from):
for f in files:
_path = os.path.join(root, f)
z.write(_path, _path.replace(from_dir+os.sep, ''))
else:
z.write(_from, fname)
z.close()
elif compress == 'tar.gz': # only for folders
if not os.path.isdir(_from):
print 'Will not create tar.gz for a single file: ' + _from
exit(-1)
with tarfile.open(_to + '.tar.gz', 'w:gz') as tar:
tar.add(_from, arcname=os.path.basename(_from))
elif compress == 'gz': # only for single file
if os.path.isdir(_from):
print 'Will not create gz for a folder: ' + _from
exit(-1)
f_in = open(_from, 'rb')
f_out = gzip.open(_to + '.gz', 'wb')
f_out.writelines(f_in)
f_out.close()
f_in.close()
else:
print 'Unsupported compression format: ' + compress
exit(-1)
def make_packages(targets):
# check file existance
for t in targets:
for f in t['input']:
src = os.path.join(binaries_location, f)
if not os.path.exists(src):
print 'File does not exist: ', src
exit(-1)
# clear the output folder
if os.path.exists(dist_dir):
if not os.path.isdir(dist_dir):
print 'Invalid path: ' + dist_dir
exit(-1)
else:
shutil.rmtree(dist_dir)
# now let's do it
os.mkdir(dist_dir)
for t in targets:
if len(t['input']) == 0:
continue
if t['compress'] == None:
for f in t['input']:
src = os.path.join(binaries_location, f)
if t['output'] != '':
dest = os.path.join(dist_dir, t['output'])
else:
dest = os.path.join(dist_dir, f)
print "Copying " + f
shutil.copy(src, dest)
elif (t.has_key('folder') and t['folder'] == True) or len(t['input']) > 1:
print 'Making "' + t['output'] + '.' + t['compress'] + '"'
# copy files into a folder then pack
folder = os.path.join(dist_dir, t['output'])
os.mkdir(folder)
for f in t['input']:
src = os.path.join(binaries_location, f)
dest = os.path.join(folder, f)
if os.path.isdir(src): # like nw.app
shutil.copytree(src, dest)
else:
shutil.copy(src, dest)
compress(dist_dir, dist_dir, t['output'], t['compress'])
# remove temp folders
shutil.rmtree(folder)
else:
# single file
print 'Making "' + t['output'] + '.' + t['compress'] + '"'
compress(binaries_location, dist_dir, t['input'][0], t['compress'])
# must be aligned with steps
generators = {}
generators['nw'] = generate_target_nw
generators['chromedriver'] = generate_target_chromedriver
generators['symbol'] = generate_target_symbols
generators['headers'] = generate_target_headers
generators['others'] = generate_target_others
################################
# Process targets
targets = []
for s in steps:
if (step != None) and (s != step):
continue
if (skip != None) and (s == skip):
continue
targets.append(generators[s](platform_name, arch, nw_ver))
print 'Creating packages...'
make_packages(targets)
# vim: et:ts=4:sw=4
|
albertjan/pypyjs-presentation
|
refs/heads/gh-pages
|
assets/js/pypy.js-0.3.1/lib/modules/encodings/mac_iceland.py
|
593
|
""" Python Character Mapping Codec mac_iceland generated from 'MAPPINGS/VENDORS/APPLE/ICELAND.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-iceland',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
u'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE
u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE
u'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE
u'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xdd' # 0xA0 -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xb0' # 0xA1 -> DEGREE SIGN
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa7' # 0xA4 -> SECTION SIGN
u'\u2022' # 0xA5 -> BULLET
u'\xb6' # 0xA6 -> PILCROW SIGN
u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u2122' # 0xAA -> TRADE MARK SIGN
u'\xb4' # 0xAB -> ACUTE ACCENT
u'\xa8' # 0xAC -> DIAERESIS
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\xc6' # 0xAE -> LATIN CAPITAL LETTER AE
u'\xd8' # 0xAF -> LATIN CAPITAL LETTER O WITH STROKE
u'\u221e' # 0xB0 -> INFINITY
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\xa5' # 0xB4 -> YEN SIGN
u'\xb5' # 0xB5 -> MICRO SIGN
u'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL
u'\u2211' # 0xB7 -> N-ARY SUMMATION
u'\u220f' # 0xB8 -> N-ARY PRODUCT
u'\u03c0' # 0xB9 -> GREEK SMALL LETTER PI
u'\u222b' # 0xBA -> INTEGRAL
u'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR
u'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR
u'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA
u'\xe6' # 0xBE -> LATIN SMALL LETTER AE
u'\xf8' # 0xBF -> LATIN SMALL LETTER O WITH STROKE
u'\xbf' # 0xC0 -> INVERTED QUESTION MARK
u'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK
u'\xac' # 0xC2 -> NOT SIGN
u'\u221a' # 0xC3 -> SQUARE ROOT
u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u2206' # 0xC6 -> INCREMENT
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE
u'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE
u'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE
u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
u'\u2013' # 0xD0 -> EN DASH
u'\u2014' # 0xD1 -> EM DASH
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u25ca' # 0xD7 -> LOZENGE
u'\xff' # 0xD8 -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\u0178' # 0xD9 -> LATIN CAPITAL LETTER Y WITH DIAERESIS
u'\u2044' # 0xDA -> FRACTION SLASH
u'\u20ac' # 0xDB -> EURO SIGN
u'\xd0' # 0xDC -> LATIN CAPITAL LETTER ETH
u'\xf0' # 0xDD -> LATIN SMALL LETTER ETH
u'\xde' # 0xDE -> LATIN CAPITAL LETTER THORN
u'\xfe' # 0xDF -> LATIN SMALL LETTER THORN
u'\xfd' # 0xE0 -> LATIN SMALL LETTER Y WITH ACUTE
u'\xb7' # 0xE1 -> MIDDLE DOT
u'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK
u'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2030' # 0xE4 -> PER MILLE SIGN
u'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xca' # 0xE6 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xcb' # 0xE8 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\uf8ff' # 0xF0 -> Apple logo
u'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I
u'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u02dc' # 0xF7 -> SMALL TILDE
u'\xaf' # 0xF8 -> MACRON
u'\u02d8' # 0xF9 -> BREVE
u'\u02d9' # 0xFA -> DOT ABOVE
u'\u02da' # 0xFB -> RING ABOVE
u'\xb8' # 0xFC -> CEDILLA
u'\u02dd' # 0xFD -> DOUBLE ACUTE ACCENT
u'\u02db' # 0xFE -> OGONEK
u'\u02c7' # 0xFF -> CARON
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
hickford/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/foxnews.py
|
50
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
parse_iso8601,
int_or_none,
)
class FoxNewsIE(InfoExtractor):
IE_DESC = 'Fox News and Fox Business Video'
_VALID_URL = r'https?://(?P<host>video\.fox(?:news|business)\.com)/v/(?:video-embed\.html\?video_id=)?(?P<id>\d+)'
_TESTS = [
{
'url': 'http://video.foxnews.com/v/3937480/frozen-in-time/#sp=show-clips',
'md5': '32aaded6ba3ef0d1c04e238d01031e5e',
'info_dict': {
'id': '3937480',
'ext': 'flv',
'title': 'Frozen in Time',
'description': 'Doctors baffled by 16-year-old girl that is the size of a toddler',
'duration': 265,
'timestamp': 1304411491,
'upload_date': '20110503',
'thumbnail': 're:^https?://.*\.jpg$',
},
},
{
'url': 'http://video.foxnews.com/v/3922535568001/rep-luis-gutierrez-on-if-obamas-immigration-plan-is-legal/#sp=show-clips',
'md5': '5846c64a1ea05ec78175421b8323e2df',
'info_dict': {
'id': '3922535568001',
'ext': 'mp4',
'title': "Rep. Luis Gutierrez on if Obama's immigration plan is legal",
'description': "Congressman discusses the president's executive action",
'duration': 292,
'timestamp': 1417662047,
'upload_date': '20141204',
'thumbnail': 're:^https?://.*\.jpg$',
},
},
{
'url': 'http://video.foxnews.com/v/video-embed.html?video_id=3937480&d=video.foxnews.com',
'only_matching': True,
},
{
'url': 'http://video.foxbusiness.com/v/4442309889001',
'only_matching': True,
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
host = mobj.group('host')
video = self._download_json(
'http://%s/v/feed/video/%s.js?template=fox' % (host, video_id), video_id)
item = video['channel']['item']
title = item['title']
description = item['description']
timestamp = parse_iso8601(item['dc-date'])
media_group = item['media-group']
duration = None
formats = []
for media in media_group['media-content']:
attributes = media['@attributes']
video_url = attributes['url']
if video_url.endswith('.f4m'):
formats.extend(self._extract_f4m_formats(video_url + '?hdcore=3.4.0&plugin=aasp-3.4.0.132.124', video_id))
elif video_url.endswith('.m3u8'):
formats.extend(self._extract_m3u8_formats(video_url, video_id, 'flv'))
elif not video_url.endswith('.smil'):
duration = int_or_none(attributes.get('duration'))
formats.append({
'url': video_url,
'format_id': media['media-category']['@attributes']['label'],
'preference': 1,
'vbr': int_or_none(attributes.get('bitrate')),
'filesize': int_or_none(attributes.get('fileSize'))
})
self._sort_formats(formats)
media_thumbnail = media_group['media-thumbnail']['@attributes']
thumbnails = [{
'url': media_thumbnail['url'],
'width': int_or_none(media_thumbnail.get('width')),
'height': int_or_none(media_thumbnail.get('height')),
}] if media_thumbnail else []
return {
'id': video_id,
'title': title,
'description': description,
'duration': duration,
'timestamp': timestamp,
'formats': formats,
'thumbnails': thumbnails,
}
|
rationalAgent/edx-platform-custom
|
refs/heads/master
|
lms/envs/dev_with_worker.py
|
148
|
"""
This config file follows the dev enviroment, but adds the
requirement of a celery worker running in the background to process
celery tasks.
The worker can be executed using:
django_admin.py celery worker
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=W0401, W0614
from dev import *
################################# CELERY ######################################
# Requires a separate celery worker
CELERY_ALWAYS_EAGER = False
# Use django db as the broker and result store
BROKER_URL = 'django://'
INSTALLED_APPS += ('djcelery.transport', )
CELERY_RESULT_BACKEND = 'database'
DJKOMBU_POLLING_INTERVAL = 1.0
# Disable transaction management because we are using a worker. Views
# that request a task and wait for the result will deadlock otherwise.
MIDDLEWARE_CLASSES = tuple(
c for c in MIDDLEWARE_CLASSES
if c != 'django.middleware.transaction.TransactionMiddleware')
# Note: other alternatives for disabling transactions don't work in 1.4
# https://code.djangoproject.com/ticket/2304
# https://code.djangoproject.com/ticket/16039
|
lpirl/ansible
|
refs/heads/devel
|
lib/ansible/plugins/action/win_template.py
|
113
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.plugins.action.template import ActionModule as TemplateActionModule
# Even though TemplateActionModule inherits from ActionBase, we still need to
# directly inherit from ActionBase to appease the plugin loader.
class ActionModule(TemplateActionModule, ActionBase):
pass
|
xiebinhqy/Dynamomysql
|
refs/heads/master
|
nodes/0.7.x/python/Document.ListBuiltInCategories.py
|
10
|
import clr
clr.AddReference('RevitAPI')
clr.AddReference('RevitAPIUI')
from Autodesk.Revit.DB import *
import Autodesk
OUT = dir(BuiltInCategory)
|
piffey/ansible
|
refs/heads/devel
|
lib/ansible/plugins/callback/debug.py
|
28
|
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
callback: debug
type: stdout
short_description: formated stdout/stderr display
description:
- Use this callback to sort though extensive debug output
version_added: "2.4"
extends_documentation_fragment:
- default_callback
requirements:
- set as stdout in configuration
'''
from ansible.plugins.callback.default import CallbackModule as CallbackModule_default
class CallbackModule(CallbackModule_default): # pylint: disable=too-few-public-methods,no-init
'''
Override for the default callback module.
Render std err/out outside of the rest of the result which it prints with
indentation.
'''
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'debug'
def _dump_results(self, result, indent=None, sort_keys=True, keep_invocation=False):
'''Return the text to output for a result.'''
# Enable JSON identation
result['_ansible_verbose_always'] = True
save = {}
for key in ['stdout', 'stdout_lines', 'stderr', 'stderr_lines', 'msg', 'module_stdout', 'module_stderr']:
if key in result:
save[key] = result.pop(key)
output = CallbackModule_default._dump_results(self, result)
for key in ['stdout', 'stderr', 'msg', 'module_stdout', 'module_stderr']:
if key in save and save[key]:
output += '\n\n%s:\n\n%s\n' % (key.upper(), save[key])
for key, value in save.items():
result[key] = value
return output
|
Vgr255/Wolfbot
|
refs/heads/master
|
modules/common.py
|
1
|
# The bot commands implemented in here are present no matter which module is loaded
import botconfig
from tools import decorators
import logging
import tools.moduleloader as ld
import traceback
from settings import common as var
from base64 import b64encode
import imp
import settings.wolfgame as settings
import sys
import os
settings.ERRORS = 0
def on_privmsg(cli, rawnick, chan, msg, notice = False):
currmod = ld.MODULES[ld.CURRENT_MODULE]
if botconfig.IGNORE_HIDDEN_COMMANDS and (chan.startswith("@#") or chan.startswith("+#")):
return
if (notice and ((chan != botconfig.NICK and not botconfig.ALLOW_NOTICE_COMMANDS) or
(chan == botconfig.NICK and not botconfig.ALLOW_PRIVATE_NOTICE_COMMANDS))
and "NickServ" not in rawnick):
return # not allowed in settings
if chan != botconfig.NICK: #not a PM
if currmod and "" in currmod.COMMANDS.keys():
for fn in currmod.COMMANDS[""]:
try:
fn(cli, rawnick, chan, msg)
except Exception as e:
if botconfig.DEBUG_MODE:
raise e
else:
logging.error(traceback.format_exc())
cli.msg(chan, "An error has occurred and has been logged.")
if botconfig.SPECIAL_CHAN != "":
cli.msg(botconfig.SPECIAL_CHAN, traceback.format_exc())
settings.ERRORS += 1
if settings.ERRORS == settings.MAX_ERRORS:
cli.quit("An error has been encountered")
# Now that is always called first.
for x in set(list(COMMANDS.keys()) + (list(currmod.COMMANDS.keys()) if currmod else list())):
if x and msg.lower().startswith(botconfig.CMD_CHAR+x) and x not in botconfig.DISABLED_COMMANDS:
h = msg[len(x)+1:]
if not h or h[0] == " " or not x:
for fn in COMMANDS.get(x,[])+(currmod.COMMANDS.get(x,[]) if currmod else []):
try:
fn(cli, rawnick, chan, h.lstrip())
except Exception as e:
if botconfig.DEBUG_MODE:
raise e
else:
logging.error(traceback.format_exc())
cli.msg(chan, "An error has occurred and has been logged.")
if botconfig.SPECIAL_CHAN != "":
cli.msg(botconfig.SPECIAL_CHAN, traceback.format_exc())
settings.ERRORS += 1
if settings.ERRORS == settings.MAX_ERRORS:
cli.quit("An error has been encountered")
else:
for x in set(list(PM_COMMANDS.keys()) + (list(currmod.PM_COMMANDS.keys()) if currmod else list())):
if msg.lower().startswith(botconfig.CMD_CHAR+x):
h = msg[len(x)+1:]
elif not x or msg.lower().startswith(x):
h = msg[len(x):]
else:
continue
if not h or h[0] == " " or not x:
for fn in PM_COMMANDS.get(x, [])+(currmod.PM_COMMANDS.get(x,[]) if currmod else []):
try:
fn(cli, rawnick, h.lstrip())
except Exception as e:
if botconfig.DEBUG_MODE:
raise e
else:
logging.error(traceback.format_exc())
cli.msg(chan, "An error has occurred and has been logged.")
if botconfig.SPECIAL_CHAN != "":
cli.msg(botconfig.SPECIAL_CHAN, traceback.format_exc())
settings.ERRORS += 1
if settings.ERRORS == settings.MAX_ERRORS:
cli.quit("An error has been encountered")
def __unhandled__(cli, prefix, cmd, *args):
currmod = ld.MODULES[ld.CURRENT_MODULE]
if cmd in set(list(HOOKS.keys())+(list(currmod.HOOKS.keys()) if currmod else list())):
largs = list(args)
for i,arg in enumerate(largs):
if isinstance(arg, bytes): largs[i] = arg.decode('ascii')
for fn in HOOKS.get(cmd, [])+(currmod.HOOKS.get(cmd, []) if currmod else []):
try:
fn(cli, prefix, *largs)
except Exception as e:
if botconfig.DEBUG_MODE:
raise e
else:
logging.error(traceback.format_exc())
cli.msg(botconfig.CHANNEL, "An error has occurred and has been logged.")
if botconfig.SPECIAL_CHAN != "":
cli.msg(botconfig.SPECIAL_CHAN, traceback.format_exc())
settings.ERRORS += 1
if settings.ERRORS == settings.MAX_ERRORS:
cli.quit("An error has been encountered")
else:
logging.debug('Unhandled command {0}({1})'.format(cmd, [arg.decode('utf_8')
for arg in args
if isinstance(arg, bytes)]))
COMMANDS = {}
PM_COMMANDS = {}
HOOKS = {}
cmd = decorators.generate(COMMANDS)
pmcmd = decorators.generate(PM_COMMANDS)
hook = decorators.generate(HOOKS, raw_nick=True, permissions=False)
@hook("error")
def restart_on_quit(cli, prefix, msg):
print("RESTARTING")
python = sys.executable
os.execl(python, python, *sys.argv)
@pmcmd("access")
def check_flags(cli, nick, rest):
if nick == "NickServ":
for botconfig.CHANNEL in rest:
if 'O' in rest:
settings.AUTO_OP_FLAG = True
settings.AUTO_OP_FAIL = False
return
if 'o' in rest:
settings.AUTO_OP_FLAG = False
settings.AUTO_OP_FAIL = False
return
if 'O' not in rest and 'o' not in rest:
settings.AUTO_OP_FLAG = False
settings.AUTO_OP_FAIL = True
return
def connect_callback(cli):
def send_listchans(*args):
if botconfig.PASS:
cli.msg("NickServ", "listchans")
def prepare_stuff(*args):
cli.join(botconfig.CHANNEL)
if settings.AUTO_OP_FLAG == False:
cli.msg("ChanServ", "op "+botconfig.CHANNEL)
if settings.LOG_CHAN == True or settings.MINIMALIST_LOG == True:
cli.join(botconfig.ADMIN_CHAN)
chan = botconfig.ADMIN_CHAN
if settings.LOG_CHAN == False:
chan = botconfig.CHANNEL
if settings.AUTO_OP_FAIL == True and botconfig.OP_NEEDED == True:
cli.msg(chan, "\u0002Error\u0002: OP status is needed for the game to work.")
if settings.RAW_JOIN == True and botconfig.ALT_CHANS != "":
cli.join(botconfig.ALT_CHANS)
if botconfig.SPECIAL_CHAN != "":
cli.join(botconfig.SPECIAL_CHAN)
if botconfig.PERFORM != "":
cli.send(botconfig.PERFORM)
if botconfig.ADMIN_CHAN == "":
var.LOG_CHAN = False
if botconfig.DEV_CHAN != "" and settings.ALLOW_GIT == True:
cli.join(botconfig.DEV_CHAN)
cli.cap("REQ", "extended-join")
cli.cap("REQ", "account-notify")
try:
ld.MODULES[ld.CURRENT_MODULE].connect_callback(cli)
except AttributeError:
pass # no connect_callback for this one
cli.nick(botconfig.NICK) # very important (for regain/release)
prepare_stuff = hook("endofmotd", hookid=294)(prepare_stuff)
def mustregain(cli, *blah):
if not botconfig.PASS:
return
cli.ns_ghost()
cli.nick(botconfig.NICK)
def mustrelease(cli, *rest):
if not botconfig.PASS:
return
cli.ns_release()
cli.nick(botconfig.NICK)
@hook("unavailresource", hookid=239)
@hook("nicknameinuse", hookid=239)
def must_use_temp_nick(cli, *etc):
cli.nick(botconfig.NICK+"_")
cli.user(botconfig.NICK, "")
decorators.unhook(HOOKS, 239)
hook("unavailresource")(mustrelease)
hook("nicknameinuse")(mustregain)
if botconfig.SASL_AUTHENTICATION:
@hook("authenticate")
def auth_plus(cli, something, plus):
if plus == "+":
nick_b = bytes(botconfig.USERNAME if botconfig.USERNAME else botconfig.NICK, "utf-8")
pass_b = bytes(botconfig.PASS, "utf-8")
secrt_msg = b'\0'.join((nick_b, nick_b, pass_b))
cli.send("AUTHENTICATE " + b64encode(secrt_msg).decode("utf-8"))
@hook("cap")
def on_cap(cli, svr, mynick, ack, cap):
if ack.upper() == "ACK" and "sasl" in cap:
cli.send("AUTHENTICATE PLAIN")
@hook("903")
def on_successful_auth(cli, blah, blahh, blahhh):
cli.cap("END")
@hook("904")
@hook("905")
@hook("906")
@hook("907")
def on_failure_auth(cli, *etc):
cli.quit()
print("Authentication failed. Did you fill the account name "+
"in botconfig.USERNAME if it's different from the bot nick?")
@hook("ping")
def on_ping(cli, prefix, server):
cli.send('PONG', server)
if botconfig.DEBUG_MODE:
@cmd("module", admin_only = True)
def ch_module(cli, nick, chan, rest):
rest = rest.strip()
if rest in ld.MODULES.keys():
ld.CURRENT_MODULE = rest
ld.MODULES[rest].connect_callback(cli)
cli.msg(chan, "Module {0} is now active.".format(rest))
else:
cli.msg(chan, "Module {0} does not exist.".format(rest))
## Logging
@hook("nosuchnick")
def nosuchnick(cli, server, *rest): # cli, server, you, action, output
if settings.LOG_CHAN == True and botconfig.ADMIN_CHAN != "":
rest = list(rest)
cli.msg(botconfig.ADMIN_CHAN, "processCommand (b'{0}')nosuchnick({1})".format(server, rest))
@hook("cannotsendtochan")
def cannotsendtochan(cli, server, *rest): # cli, server, you, action, output
if settings.LOG_CHAN == True and botconfig.ADMIN_CHAN != "" and botconfig.ADMIN_CHAN not in rest:
rest = list(rest)
cli.msg(botconfig.ADMIN_CHAN, "processCommand (b'{0}')cannotsendtochan({1})".format(server, rest))
@hook("unknowncommand")
def unknowncommand(cli, server, *rest): # cli, server, you, action, output
if settings.LOG_CHAN == True and botconfig.ADMIN_CHAN != "":
rest = list(rest)
cli.msg(botconfig.ADMIN_CHAN, "processCommand (b'{0}')unknowncommand({1})".format(server, rest))
@hook("join")
def join(cli, nick, *chan):
chan = list(chan)
if settings.LOG_CHAN == True and botconfig.ADMIN_CHAN != "":
cli.msg(botconfig.ADMIN_CHAN, "processCommand (b'{0}')join({1})".format(nick, chan))
if nick in settings.IS_ADMIN and settings.IS_ADMIN[nick] == True and settings.AUTO_LOG_TOGGLE == True and settings.LOG_CHAN == False:
settings.LOG_CHAN = True
cli.msg(chan, "Auto-logging has been enabled.")
if nick == botconfig.NICK and chan == botconfig.ADMIN_CHAN:
settings.TOGGLE_ENABLED = False
cli.who(botconfig.ADMIN_CHAN, "%nuchaf")
@hook("whospcrpl", hookid=652)
def log_toggle_join(cli, server, me, chanj, ident, host, nick, status, account):
if nick in settings.IS_ADMIN and settings.IS_ADMIN[nick] == True:
settings.TOGGLE_ENABLED = True
@hook("endofwho", hookid=652)
def toggle_check_join(*stuff):
if settings.TOGGLE_ENABLED == False:
settings.LOG_CHAN = False
decorators.unhook(HOOKS, 652)
@hook("part")
def part(cli, nick, *chan):
chan = list(chan)
if settings.LOG_CHAN == True and botconfig.ADMIN_CHAN != "":
cli.msg(botconfig.ADMIN_CHAN, "processCommand (b'{0}')part({1})".format(nick, chan))
if settings.AUTO_LOG_TOGGLE == True:
settings.DISABLE_AUTO_LOG = True
cli.who(botconfig.ADMIN_CHAN, "%nuchaf")
@hook("whospcrpl", hookid=652)
def log_toggle_part(cli, server, me, chanp, ident, host, nick, status, account):
if nick in settings.IS_ADMIN and settings.IS_ADMIN[nick] == True:
settings.DISABLE_AUTO_LOG = False
@hook("endofwho", hookid=652)
def toggle_check_part(*stuff):
if settings.DISABLE_AUTO_LOG == True:
settings.LOG_CHAN = False
decorators.unhook(HOOKS, 652)
if nick in settings.IS_OP and chan == botconfig.CHANNEL:
settings.IS_OP.remove(nick)
if nick in settings.WAS_OP and chan == botconfig.CHANNEL:
settings.WAS_OP.remove(nick)
@hook("kick")
def kick(cli, nick, *rest): # cli, nick, chan, target, reason
if settings.LOG_CHAN == True and botconfig.ADMIN_CHAN != "":
rest = list(rest)
cli.msg(botconfig.ADMIN_CHAN, "processCommand (b'{0}')kick({1})".format(nick, rest))
if settings.AUTO_LOG_TOGGLE == True:
settings.DISABLE_AUTO_LOG = True
cli.who(botconfig.ADMIN_CHAN, "%nuchaf")
@hook("whospcrpl", hookid=652)
def log_toggle_kick(cli, server, me, chank, ident, host, nick, status, account):
if nick in settings.IS_ADMIN and settings.IS_ADMIN[nick] == True:
settings.DISABLE_AUTO_LOG = False
@hook("endofwho", hookid=652)
def toggle_check_kick(*stuff):
if settings.DISABLE_AUTO_LOG == True:
settings.LOG_CHAN = False
decorators.unhook(HOOKS, 652)
@hook("mode")
def mode(cli, nick, *rest): # nick, chan, mode, *params
if settings.LOG_CHAN == True and botconfig.ADMIN_CHAN != "":
params = list(rest)
cli.msg(botconfig.ADMIN_CHAN, "processCommand (b'{0}')mode({1})".format(nick, params))
@hook("quit")
def quit(cli, nick, *message):
if settings.LOG_CHAN == True and botconfig.ADMIN_CHAN != "" and botconfig.NICK != nick:
msg = list(message)
cli.msg(botconfig.ADMIN_CHAN, "processCommand (b'{0}')quit({1})".format(nick, msg))
if settings.AUTO_LOG_TOGGLE == True:
settings.DISABLE_AUTO_LOG = True
cli.who(botconfig.ADMIN_CHAN, "%nuchaf")
@hook("whospcrpl", hookid=652)
def log_toggle_quit(cli, server, me, chan, ident, host, nick, status, account):
if nick in settings.IS_ADMIN and settings.IS_ADMIN[nick] == True:
settings.DISABLE_AUTO_LOG = False
@hook("endofwho", hookid=652)
def toggle_check_quit(*stuff):
if settings.DISABLE_AUTO_LOG == True:
settings.LOG_CHAN = False
decorators.unhook(HOOKS, 652)
if nick in settings.IS_OP:
settings.IS_OP.remove(nick)
if nick in settings.WAS_OP:
settings.WAS_OP.remove(nick)
@hook("invite")
def notice(cli, nick, *rest): # cli, nick, target, chan
if settings.LOG_CHAN == True and botconfig.ADMIN_CHAN != "" and botconfig.NICK != nick:
rest = list(rest)
cli.msg(botconfig.ADMIN_CHAN, "processCommand (b'{0}')invite({1})".format(nick, rest))
@hook("account")
def account(cli, nick, *account):
if settings.LOG_CHAN == True and botconfig.ADMIN_CHAN != "":
account = list(account)
cli.msg(botconfig.ADMIN_CHAN, "processCommand (b'{0}')account({1})".format(nick, account))
@hook("nick")
def nick_change(cli, nick, *newnick):
if settings.LOG_CHAN == True and botconfig.ADMIN_CHAN != "":
newnick = list(newnick)
cli.msg(botconfig.ADMIN_CHAN, "processCommand (b'{0}')nick({1})".format(nick, newnick))
@hook("needmoreparams")
def params_missing(cli, server, *rest): # cli, server, you, command, output
if settings.LOG_CHAN == True and botconfig.ADMIN_CHAN != "":
rest = list(rest)
cli.msg(botconfig.ADMIN_CHAN, "processCommand (b'{0}')needmoreparams({1})".format(server, rest))
|
zimmerle/gnuradio
|
refs/heads/master
|
gr-uhd/examples/python/usrp_nbfm_ptt.py
|
8
|
#!/usr/bin/env python
#
# Copyright 2005,2007,2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import math
import sys
import wx
from optparse import OptionParser
from gnuradio import gr, audio, blks2, uhd
from gnuradio.eng_option import eng_option
from gnuradio.wxgui import stdgui2, fftsink2, scopesink2, slider, form
from numpy import convolve, array
#import os
#print "pid =", os.getpid()
#raw_input('Press Enter to continue: ')
# ////////////////////////////////////////////////////////////////////////
# Control Stuff
# ////////////////////////////////////////////////////////////////////////
class ptt_block(stdgui2.std_top_block):
def __init__(self, frame, panel, vbox, argv):
stdgui2.std_top_block.__init__ (self, frame, panel, vbox, argv)
self.frame = frame
self.space_bar_pressed = False
parser = OptionParser (option_class=eng_option)
parser.add_option("-a", "--args", type="string", default="",
help="UHD device address args [default=%default]")
parser.add_option("", "--spec", type="string", default=None,
help="Subdevice of UHD device where appropriate")
parser.add_option("-A", "--antenna", type="string", default=None,
help="select Rx Antenna where appropriate")
parser.add_option ("-f", "--freq", type="eng_float", default=442.1e6,
help="set Tx and Rx frequency to FREQ", metavar="FREQ")
parser.add_option ("-g", "--rx-gain", type="eng_float", default=None,
help="set rx gain [default=midpoint in dB]")
parser.add_option ("", "--tx-gain", type="eng_float", default=None,
help="set tx gain [default=midpoint in dB]")
parser.add_option("-I", "--audio-input", type="string", default="",
help="pcm input device name. E.g., hw:0,0 or /dev/dsp")
parser.add_option("-O", "--audio-output", type="string", default="",
help="pcm output device name. E.g., hw:0,0 or /dev/dsp")
parser.add_option ("-N", "--no-gui", action="store_true", default=False)
(options, args) = parser.parse_args ()
if len(args) != 0:
parser.print_help()
sys.exit(1)
if options.freq < 1e6:
options.freq *= 1e6
self.txpath = transmit_path(options.args, options.spec,
options.antenna, options.tx_gain,
options.audio_input)
self.rxpath = receive_path(options.args, options.spec,
options.antenna, options.rx_gain,
options.audio_output)
self.connect(self.txpath)
self.connect(self.rxpath)
self._build_gui(frame, panel, vbox, argv, options.no_gui)
self.set_transmit(False)
self.set_freq(options.freq)
self.set_rx_gain(self.rxpath.gain) # update gui
self.set_volume(self.rxpath.volume) # update gui
self.set_squelch(self.rxpath.threshold()) # update gui
def set_transmit(self, enabled):
self.txpath.set_enable(enabled)
self.rxpath.set_enable(not(enabled))
if enabled:
self.frame.SetStatusText ("Transmitter ON", 1)
else:
self.frame.SetStatusText ("Receiver ON", 1)
def set_rx_gain(self, gain):
self.myform['rx_gain'].set_value(gain) # update displayed value
self.rxpath.set_gain(gain)
def set_tx_gain(self, gain):
self.txpath.set_gain(gain)
def set_squelch(self, threshold):
self.rxpath.set_squelch(threshold)
self.myform['squelch'].set_value(self.rxpath.threshold())
def set_volume (self, vol):
self.rxpath.set_volume(vol)
self.myform['volume'].set_value(self.rxpath.volume)
#self.update_status_bar ()
def set_freq(self, freq):
r1 = self.txpath.set_freq(freq)
r2 = self.rxpath.set_freq(freq)
#print "txpath.set_freq =", r1
#print "rxpath.set_freq =", r2
if r1 and r2:
self.myform['freq'].set_value(freq) # update displayed value
return r1 and r2
def _build_gui(self, frame, panel, vbox, argv, no_gui):
def _form_set_freq(kv):
return self.set_freq(kv['freq'])
self.panel = panel
# FIXME This REALLY needs to be replaced with a hand-crafted button
# that sends both button down and button up events
hbox = wx.BoxSizer(wx.HORIZONTAL)
hbox.Add((10,0), 1)
self.status_msg = wx.StaticText(panel, -1, "Press Space Bar to Transmit")
of = self.status_msg.GetFont()
self.status_msg.SetFont(wx.Font(15, of.GetFamily(), of.GetStyle(), of.GetWeight()))
hbox.Add(self.status_msg, 0, wx.ALIGN_CENTER)
hbox.Add((10,0), 1)
vbox.Add(hbox, 0, wx.EXPAND | wx.ALIGN_CENTER)
panel.Bind(wx.EVT_KEY_DOWN, self._on_key_down)
panel.Bind(wx.EVT_KEY_UP, self._on_key_up)
panel.Bind(wx.EVT_KILL_FOCUS, self._on_kill_focus)
panel.SetFocus()
if 1 and not(no_gui):
rx_fft = fftsink2.fft_sink_c(panel, title="Rx Input", fft_size=512,
sample_rate=self.rxpath.if_rate,
ref_level=80, y_per_div=20)
self.connect (self.rxpath.u, rx_fft)
vbox.Add (rx_fft.win, 1, wx.EXPAND)
if 1 and not(no_gui):
rx_fft = fftsink2.fft_sink_c(panel, title="Post s/w Resampler",
fft_size=512, sample_rate=self.rxpath.quad_rate,
ref_level=80, y_per_div=20)
self.connect (self.rxpath.resamp, rx_fft)
vbox.Add (rx_fft.win, 1, wx.EXPAND)
if 0 and not(no_gui):
foo = scopesink2.scope_sink_f(panel, title="Squelch",
sample_rate=32000)
self.connect (self.rxpath.fmrx.div, (foo,0))
self.connect (self.rxpath.fmrx.gate, (foo,1))
self.connect (self.rxpath.fmrx.squelch_lpf, (foo,2))
vbox.Add (foo.win, 1, wx.EXPAND)
if 0 and not(no_gui):
tx_fft = fftsink2.fft_sink_c(panel, title="Tx Output",
fft_size=512, sample_rate=self.txpath.usrp_rate)
self.connect (self.txpath.amp, tx_fft)
vbox.Add (tx_fft.win, 1, wx.EXPAND)
# add control area at the bottom
self.myform = myform = form.form()
# first row
hbox = wx.BoxSizer(wx.HORIZONTAL)
hbox.Add((5,0), 0, 0)
myform['freq'] = form.float_field(
parent=panel, sizer=hbox, label="Freq", weight=1,
callback=myform.check_input_and_call(_form_set_freq, self._set_status_msg))
hbox.Add((5,0), 0, 0)
vbox.Add(hbox, 0, wx.EXPAND)
# second row
hbox = wx.BoxSizer(wx.HORIZONTAL)
myform['volume'] = \
form.quantized_slider_field(parent=self.panel, sizer=hbox, label="Volume",
weight=3, range=self.rxpath.volume_range(),
callback=self.set_volume)
hbox.Add((5,0), 0)
myform['squelch'] = \
form.quantized_slider_field(parent=self.panel, sizer=hbox, label="Squelch",
weight=3, range=self.rxpath.squelch_range(),
callback=self.set_squelch)
g = self.rxpath.u.get_gain_range()
hbox.Add((5,0), 0)
myform['rx_gain'] = \
form.quantized_slider_field(parent=self.panel, sizer=hbox, label="Rx Gain",
weight=3, range=(g.start(), g.stop(), g.step()),
callback=self.set_rx_gain)
hbox.Add((5,0), 0)
vbox.Add(hbox, 0, wx.EXPAND)
self._build_subpanel(vbox)
def _build_subpanel(self, vbox_arg):
# build a secondary information panel (sometimes hidden)
# FIXME figure out how to have this be a subpanel that is always
# created, but has its visibility controlled by foo.Show(True/False)
#if not(self.show_debug_info):
# return
panel = self.panel
vbox = vbox_arg
myform = self.myform
#panel = wx.Panel(self.panel, -1)
#vbox = wx.BoxSizer(wx.VERTICAL)
hbox = wx.BoxSizer(wx.HORIZONTAL)
hbox.Add((5,0), 0)
#myform['decim'] = form.static_float_field(
# parent=panel, sizer=hbox, label="Decim")
#hbox.Add((5,0), 1)
#myform['fs@usb'] = form.static_float_field(
# parent=panel, sizer=hbox, label="Fs@USB")
#hbox.Add((5,0), 1)
#myform['dbname'] = form.static_text_field(
# parent=panel, sizer=hbox)
hbox.Add((5,0), 0)
vbox.Add(hbox, 0, wx.EXPAND)
def _set_status_msg(self, msg, which=0):
self.frame.GetStatusBar().SetStatusText(msg, which)
def _on_key_down(self, evt):
# print "key_down:", evt.m_keyCode
if evt.m_keyCode == wx.WXK_SPACE and not(self.space_bar_pressed):
self.space_bar_pressed = True
self.set_transmit(True)
def _on_key_up(self, evt):
# print "key_up", evt.m_keyCode
if evt.m_keyCode == wx.WXK_SPACE:
self.space_bar_pressed = False
self.set_transmit(False)
def _on_kill_focus(self, evt):
# if we lose the keyboard focus, turn off the transmitter
self.space_bar_pressed = False
self.set_transmit(False)
# ////////////////////////////////////////////////////////////////////////
# Transmit Path
# ////////////////////////////////////////////////////////////////////////
class transmit_path(gr.hier_block2):
def __init__(self, args, spec, antenna, gain, audio_input):
gr.hier_block2.__init__(self, "transmit_path",
gr.io_signature(0, 0, 0), # Input signature
gr.io_signature(0, 0, 0)) # Output signature
self.u = uhd.usrp_sink(device_addr=args, stream_args=uhd.stream_args('fc32'))
# Set the subdevice spec
if(spec):
self.u.set_subdev_spec(spec, 0)
# Set the antenna
if(antenna):
self.u.set_antenna(antenna, 0)
self.if_rate = 320e3
self.audio_rate = 32e3
self.u.set_samp_rate(self.if_rate)
dev_rate = self.u.get_samp_rate()
self.audio_gain = 10
self.normal_gain = 32000
self.audio = audio.source(int(self.audio_rate), audio_input)
self.audio_amp = gr.multiply_const_ff(self.audio_gain)
lpf = gr.firdes.low_pass (1, # gain
self.audio_rate, # sampling rate
3800, # low pass cutoff freq
300, # width of trans. band
gr.firdes.WIN_HANN) # filter type
hpf = gr.firdes.high_pass (1, # gain
self.audio_rate, # sampling rate
325, # low pass cutoff freq
50, # width of trans. band
gr.firdes.WIN_HANN) # filter type
audio_taps = convolve(array(lpf),array(hpf))
self.audio_filt = gr.fir_filter_fff(1,audio_taps)
self.pl = blks2.ctcss_gen_f(self.audio_rate,123.0)
self.add_pl = gr.add_ff()
self.connect(self.pl,(self.add_pl,1))
self.fmtx = blks2.nbfm_tx(self.audio_rate, self.if_rate)
self.amp = gr.multiply_const_cc (self.normal_gain)
rrate = dev_rate / self.if_rate
self.resamp = blks2.pfb_arb_resampler_ccf(rrate)
self.connect(self.audio, self.audio_amp, self.audio_filt,
(self.add_pl,0), self.fmtx, self.amp,
self.resamp, self.u)
if gain is None:
# if no gain was specified, use the mid-point in dB
g = self.u.get_gain_range()
gain = float(g.start() + g.stop())/2.0
self.set_gain(gain)
self.set_enable(False)
def set_freq(self, target_freq):
"""
Set the center frequency we're interested in.
@param target_freq: frequency in Hz
@rypte: bool
"""
r = self.u.set_center_freq(target_freq)
if r:
return True
return False
def set_gain(self, gain):
self.gain = gain
self.u.set_gain(gain)
def set_enable(self, enable):
if enable:
self.amp.set_k (self.normal_gain)
else:
self.amp.set_k (0)
# ////////////////////////////////////////////////////////////////////////
# Receive Path
# ////////////////////////////////////////////////////////////////////////
class receive_path(gr.hier_block2):
def __init__(self, args, gain, audio_output):
gr.hier_block2.__init__(self, "receive_path",
gr.io_signature(0, 0, 0), # Input signature
gr.io_signature(0, 0, 0)) # Output signature
self.u = uhd.usrp_source(device_addr=args,
io_type=uhd.io_type.COMPLEX_FLOAT32,
num_channels=1)
self.if_rate = 256e3
self.quad_rate = 64e3
self.audio_rate = 32e3
self.u.set_samp_rate(self.if_rate)
dev_rate = self.u.get_samp_rate()
# Create filter to get actual channel we want
nfilts = 32
chan_coeffs = gr.firdes.low_pass (nfilts, # gain
nfilts*dev_rate, # sampling rate
13e3, # low pass cutoff freq
4e3, # width of trans. band
gr.firdes.WIN_HANN) # filter type
rrate = self.quad_rate / dev_rate
self.resamp = blks2.pfb_arb_resampler_ccf(rrate, chan_coeffs, nfilts)
# instantiate the guts of the single channel receiver
self.fmrx = blks2.nbfm_rx(self.audio_rate, self.quad_rate)
# standard squelch block
self.squelch = blks2.standard_squelch(self.audio_rate)
# audio gain / mute block
self._audio_gain = gr.multiply_const_ff(1.0)
# sound card as final sink
audio_sink = audio.sink (int(self.audio_rate), audio_output)
# now wire it all together
self.connect (self.u, self.resamp, self.fmrx, self.squelch,
self._audio_gain, audio_sink)
if gain is None:
# if no gain was specified, use the mid-point in dB
g = self.u.get_gain_range()
gain = float(g.start() + g.stop())/2.0
self.enabled = True
self.set_gain(gain)
v = self.volume_range()
self.set_volume((v[0]+v[1])/2)
s = self.squelch_range()
self.set_squelch((s[0]+s[1])/2)
# Set the subdevice spec
if(spec):
self.u.set_subdev_spec(spec, 0)
# Set the antenna
if(antenna):
self.u.set_antenna(antenna, 0)
def volume_range(self):
return (-20.0, 0.0, 0.5)
def set_volume (self, vol):
g = self.volume_range()
self.volume = max(g[0], min(g[1], vol))
self._update_audio_gain()
def set_enable(self, enable):
self.enabled = enable
self._update_audio_gain()
def _update_audio_gain(self):
if self.enabled:
self._audio_gain.set_k(10**(self.volume/10))
else:
self._audio_gain.set_k(0)
def squelch_range(self):
return self.squelch.squelch_range()
def set_squelch(self, threshold):
print "SQL =", threshold
self.squelch.set_threshold(threshold)
def threshold(self):
return self.squelch.threshold()
def set_freq(self, target_freq):
"""
Set the center frequency we're interested in.
@param target_freq: frequency in Hz
@rypte: bool
"""
r = self.u.set_center_freq(target_freq)
if r:
return True
return False
def set_gain(self, gain):
self.gain = gain
self.u.set_gain(gain)
# ////////////////////////////////////////////////////////////////////////
# Main
# ////////////////////////////////////////////////////////////////////////
def main():
app = stdgui2.stdapp(ptt_block, "NBFM Push to Talk")
app.MainLoop()
if __name__ == '__main__':
main()
|
dmpetrov/dataversioncontrol
|
refs/heads/master
|
tests/unit/scm/test_git.py
|
1
|
import os
from tests.basic_env import TestDvcGit
class TestGit(TestDvcGit):
def test_belongs_to_scm_true_on_gitignore(self):
path = os.path.join("path", "to", ".gitignore")
self.assertTrue(self.dvc.scm.belongs_to_scm(path))
def test_belongs_to_scm_true_on_git_internal(self):
path = os.path.join("path", "to", ".git", "internal", "file")
self.assertTrue(self.dvc.scm.belongs_to_scm(path))
def test_belongs_to_scm_false(self):
path = os.path.join("some", "non-.git", "file")
self.assertFalse(self.dvc.scm.belongs_to_scm(path))
def test_walk_with_submodules(tmp_dir, scm, git_dir):
git_dir.scm_gen(
{"foo": "foo", "bar": "bar", "dir": {"data": "data"}},
commit="add dir and files",
)
scm.repo.create_submodule("submodule", "submodule", url=os.fspath(git_dir))
scm.commit("added submodule")
files = []
dirs = []
tree = scm.get_tree("HEAD")
for _, dnames, fnames in tree.walk("."):
dirs.extend(dnames)
files.extend(fnames)
# currently we don't walk through submodules
assert not dirs
assert set(files) == {".gitmodules", "submodule"}
def test_is_tracked(tmp_dir, scm):
tmp_dir.scm_gen(
{
"tracked": "tracked",
"dir": {"data": "data", "subdir": {"subdata": "subdata"}},
},
commit="add dirs and files",
)
tmp_dir.gen({"untracked": "untracked", "dir": {"untracked": "untracked"}})
# sanity check
assert (tmp_dir / "untracked").exists()
assert (tmp_dir / "tracked").exists()
assert (tmp_dir / "dir" / "untracked").exists()
assert (tmp_dir / "dir" / "data").exists()
assert (tmp_dir / "dir" / "subdir" / "subdata").exists()
assert not scm.is_tracked("untracked")
assert not scm.is_tracked(os.path.join("dir", "untracked"))
assert scm.is_tracked("tracked")
assert scm.is_tracked("dir")
assert scm.is_tracked(os.path.join("dir", "data"))
assert scm.is_tracked(os.path.join("dir", "subdir"))
assert scm.is_tracked(os.path.join("dir", "subdir", "subdata"))
def test_is_tracked_unicode(tmp_dir, scm):
tmp_dir.scm_gen("ṭṝḁḉḵḗḋ", "tracked", commit="add unicode")
tmp_dir.gen("ṳṋṭṝḁḉḵḗḋ", "untracked")
assert scm.is_tracked("ṭṝḁḉḵḗḋ")
assert not scm.is_tracked("ṳṋṭṝḁḉḵḗḋ")
def test_no_commits(tmp_dir):
from tests.dir_helpers import git_init
from dvc.scm.git import Git
git_init(".")
assert Git().no_commits
tmp_dir.gen("foo", "foo")
Git().add(["foo"])
Git().commit("foo")
assert not Git().no_commits
|
pyoceans/gridded
|
refs/heads/master
|
setup.py
|
1
|
from setuptools import setup, find_packages
from gridded import __version__
reqs = [line.strip() for line in open('requirements.txt')]
def readme():
with open('README.md') as f:
return f.read()
setup(
name = "gridded",
version = __version__,
description = "API for interpolation on regular grid, curvilinear orthogonal grid, unstructured grid",
long_description = readme(),
license = "MIT License",
author = "Rob Hetland, Rich Signell, Kyle Wilcox",
author_email = "hetland@tamu.edu, rsignell@usgs.gov, kyle@axiomdatascience.com",
url = "https://github.com/pyoceans/gridded",
packages = find_packages(),
install_requires = reqs,
tests_require = ['pytest'],
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
entry_points = {
'gridded.dimension_adapters': [
'test_dimension_adapter = gridded.gridded:TestDimensionAdapter',
]
}
)
|
moonkun123/Bot-Kicker-1
|
refs/heads/master
|
LineAlpha/LineThrift/MessageService.py
|
4
|
#
# Autogenerated by Thrift Compiler (0.9.3)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import logging
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def fetchMessageOperations(self, localRevision, lastOpTimestamp, count):
"""
Parameters:
- localRevision
- lastOpTimestamp
- count
"""
pass
def getLastReadMessageIds(self, chatId):
"""
Parameters:
- chatId
"""
pass
def multiGetLastReadMessageIds(self, chatIds):
"""
Parameters:
- chatIds
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def fetchMessageOperations(self, localRevision, lastOpTimestamp, count):
"""
Parameters:
- localRevision
- lastOpTimestamp
- count
"""
self.send_fetchMessageOperations(localRevision, lastOpTimestamp, count)
return self.recv_fetchMessageOperations()
def send_fetchMessageOperations(self, localRevision, lastOpTimestamp, count):
self._oprot.writeMessageBegin('fetchMessageOperations', TMessageType.CALL, self._seqid)
args = fetchMessageOperations_args()
args.localRevision = localRevision
args.lastOpTimestamp = lastOpTimestamp
args.count = count
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_fetchMessageOperations(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = fetchMessageOperations_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchMessageOperations failed: unknown result")
def getLastReadMessageIds(self, chatId):
"""
Parameters:
- chatId
"""
self.send_getLastReadMessageIds(chatId)
return self.recv_getLastReadMessageIds()
def send_getLastReadMessageIds(self, chatId):
self._oprot.writeMessageBegin('getLastReadMessageIds', TMessageType.CALL, self._seqid)
args = getLastReadMessageIds_args()
args.chatId = chatId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getLastReadMessageIds(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getLastReadMessageIds_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getLastReadMessageIds failed: unknown result")
def multiGetLastReadMessageIds(self, chatIds):
"""
Parameters:
- chatIds
"""
self.send_multiGetLastReadMessageIds(chatIds)
return self.recv_multiGetLastReadMessageIds()
def send_multiGetLastReadMessageIds(self, chatIds):
self._oprot.writeMessageBegin('multiGetLastReadMessageIds', TMessageType.CALL, self._seqid)
args = multiGetLastReadMessageIds_args()
args.chatIds = chatIds
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_multiGetLastReadMessageIds(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = multiGetLastReadMessageIds_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "multiGetLastReadMessageIds failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["fetchMessageOperations"] = Processor.process_fetchMessageOperations
self._processMap["getLastReadMessageIds"] = Processor.process_getLastReadMessageIds
self._processMap["multiGetLastReadMessageIds"] = Processor.process_multiGetLastReadMessageIds
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_fetchMessageOperations(self, seqid, iprot, oprot):
args = fetchMessageOperations_args()
args.read(iprot)
iprot.readMessageEnd()
result = fetchMessageOperations_result()
try:
result.success = self._handler.fetchMessageOperations(args.localRevision, args.lastOpTimestamp, args.count)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TalkException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("fetchMessageOperations", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getLastReadMessageIds(self, seqid, iprot, oprot):
args = getLastReadMessageIds_args()
args.read(iprot)
iprot.readMessageEnd()
result = getLastReadMessageIds_result()
try:
result.success = self._handler.getLastReadMessageIds(args.chatId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TalkException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getLastReadMessageIds", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_multiGetLastReadMessageIds(self, seqid, iprot, oprot):
args = multiGetLastReadMessageIds_args()
args.read(iprot)
iprot.readMessageEnd()
result = multiGetLastReadMessageIds_result()
try:
result.success = self._handler.multiGetLastReadMessageIds(args.chatIds)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TalkException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("multiGetLastReadMessageIds", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class fetchMessageOperations_args:
"""
Attributes:
- localRevision
- lastOpTimestamp
- count
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'localRevision', None, None, ), # 2
(3, TType.I64, 'lastOpTimestamp', None, None, ), # 3
(4, TType.I32, 'count', None, None, ), # 4
)
def __init__(self, localRevision=None, lastOpTimestamp=None, count=None,):
self.localRevision = localRevision
self.lastOpTimestamp = lastOpTimestamp
self.count = count
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.localRevision = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.lastOpTimestamp = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.count = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('fetchMessageOperations_args')
if self.localRevision is not None:
oprot.writeFieldBegin('localRevision', TType.I64, 2)
oprot.writeI64(self.localRevision)
oprot.writeFieldEnd()
if self.lastOpTimestamp is not None:
oprot.writeFieldBegin('lastOpTimestamp', TType.I64, 3)
oprot.writeI64(self.lastOpTimestamp)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 4)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.localRevision)
value = (value * 31) ^ hash(self.lastOpTimestamp)
value = (value * 31) ^ hash(self.count)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class fetchMessageOperations_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (MessageOperations, MessageOperations.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TalkException, TalkException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = MessageOperations()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TalkException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('fetchMessageOperations_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getLastReadMessageIds_args:
"""
Attributes:
- chatId
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.STRING, 'chatId', None, None, ), # 2
)
def __init__(self, chatId=None,):
self.chatId = chatId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.STRING:
self.chatId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getLastReadMessageIds_args')
if self.chatId is not None:
oprot.writeFieldBegin('chatId', TType.STRING, 2)
oprot.writeString(self.chatId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.chatId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getLastReadMessageIds_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (LastReadMessageIds, LastReadMessageIds.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TalkException, TalkException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = LastReadMessageIds()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TalkException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getLastReadMessageIds_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class multiGetLastReadMessageIds_args:
"""
Attributes:
- chatIds
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.LIST, 'chatIds', (TType.STRING,None), None, ), # 2
)
def __init__(self, chatIds=None,):
self.chatIds = chatIds
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.LIST:
self.chatIds = []
(_etype666, _size663) = iprot.readListBegin()
for _i667 in xrange(_size663):
_elem668 = iprot.readString()
self.chatIds.append(_elem668)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('multiGetLastReadMessageIds_args')
if self.chatIds is not None:
oprot.writeFieldBegin('chatIds', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.chatIds))
for iter669 in self.chatIds:
oprot.writeString(iter669)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.chatIds)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class multiGetLastReadMessageIds_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(LastReadMessageIds, LastReadMessageIds.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'e', (TalkException, TalkException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype673, _size670) = iprot.readListBegin()
for _i674 in xrange(_size670):
_elem675 = LastReadMessageIds()
_elem675.read(iprot)
self.success.append(_elem675)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TalkException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('multiGetLastReadMessageIds_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter676 in self.success:
iter676.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
|
js0701/chromium-crosswalk
|
refs/heads/master
|
tools/telemetry/third_party/modulegraph/modulegraph_tests/testdata/syspath/mymodule3.py
|
26
|
""" fake module """
|
swift-lang/swift-e-lab
|
refs/heads/master
|
parsl/providers/cobalt/template.py
|
2
|
template_string = '''#!/bin/bash -e
${scheduler_options}
${worker_init}
echo "Starting Cobalt job script"
echo "----Cobalt Nodefile: -----"
cat $$COBALT_NODEFILE
echo "--------------------------"
export JOBNAME="${jobname}"
$user_script
echo "End of Cobalt job"
'''
|
stscieisenhamer/glue
|
refs/heads/master
|
glue/main.py
|
1
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function
import sys
import optparse
from glue import __version__
from glue.logger import logger
try:
from glue.utils.qt.decorators import die_on_error
except ImportError:
from glue.utils.decorators import die_on_error
def parse(argv):
""" Parse argument list, check validity
:param argv: Arguments passed to program
*Returns*
A tuple of options, position arguments
"""
usage = """usage: %prog [options] [FILE FILE...]
# start a new session
%prog
# start a new session and load a file
%prog image.fits
#start a new session with multiple files
%prog image.fits catalog.csv
#restore a saved session
%prog saved_session.glu
or
%prog -g saved_session.glu
#run a script
%prog -x script.py
#run the test suite
%prog -t
"""
parser = optparse.OptionParser(usage=usage,
version=str(__version__))
parser.add_option('-x', '--execute', action='store_true', dest='script',
help="Execute FILE as a python script", default=False)
parser.add_option('-g', action='store_true', dest='restore',
help="Restore glue session from FILE", default=False)
parser.add_option('-t', '--test', action='store_true', dest='test',
help="Run test suite", default=False)
parser.add_option('-c', '--config', type='string', dest='config',
metavar='CONFIG',
help='use CONFIG as configuration file')
parser.add_option('-v', '--verbose', action='store_true',
help="Increase the vebosity level", default=False)
parser.add_option('--no-maximized', action='store_true', dest='nomax',
help="Do not start Glue maximized", default=False)
parser.add_option('--startup', dest='startup', type='string',
help="Startup actions to carry out", default='')
parser.add_option('--auto-merge', dest='auto_merge', action='store_true',
help="Automatically merge any data passed on the command-line", default='')
err_msg = verify(parser, argv)
if err_msg:
sys.stderr.write('\n%s\n' % err_msg)
parser.print_help()
sys.exit(1)
return parser.parse_args(argv)
def verify(parser, argv):
""" Check for input errors
:param parser: OptionParser instance
:param argv: Argument list
:type argv: List of strings
*Returns*
An error message, or None
"""
opts, args = parser.parse_args(argv)
err_msg = None
if opts.script and opts.restore:
err_msg = "Cannot specify -g with -x"
elif opts.script and opts.config:
err_msg = "Cannot specify -c with -x"
elif opts.script and len(args) != 1:
err_msg = "Must provide a script\n"
elif opts.restore and len(args) != 1:
err_msg = "Must provide a .glu file\n"
return err_msg
@die_on_error("Error restoring Glue session")
def restore_session(gluefile):
"""Load a .glu file and return a DataCollection, Hub tuple"""
from glue.app.qt import GlueApplication
return GlueApplication.restore_session(gluefile)
@die_on_error("Error reading data file")
def load_data_files(datafiles):
"""Load data files and return a list of datasets"""
from glue.core.data_factories import auto_data, load_data
datasets = []
for df in datafiles:
datasets.append(load_data(df, auto_data))
return datasets
def run_tests():
from glue import test
test()
def start_glue(gluefile=None, config=None, datafiles=None, maximized=True,
startup_actions=None, auto_merge=False):
"""Run a glue session and exit
Parameters
----------
gluefile : str
An optional ``.glu`` file to restore.
config : str
An optional configuration file to use.
datafiles : str
An optional list of data files to load.
maximized : bool
Maximize screen on startup. Otherwise, use default size.
auto_merge : bool, optional
Whether to automatically merge data passed in `datafiles` (default is `False`)
"""
import glue
from glue.utils.qt import get_qapp
app = get_qapp()
splash = get_splash()
splash.show()
# Start off by loading plugins. We need to do this before restoring
# the session or loading the configuration since these may use existing
# plugins.
load_plugins(splash=splash)
from glue.app.qt import GlueApplication
datafiles = datafiles or []
hub = None
from qtpy.QtCore import QTimer
timer = QTimer()
timer.setInterval(1000)
timer.setSingleShot(True)
timer.timeout.connect(splash.close)
timer.start()
if gluefile is not None:
app = restore_session(gluefile)
return app.start()
if config is not None:
glue.env = glue.config.load_configuration(search_path=[config])
data_collection = glue.core.DataCollection()
hub = data_collection.hub
splash.set_progress(100)
session = glue.core.Session(data_collection=data_collection, hub=hub)
ga = GlueApplication(session=session)
if datafiles:
datasets = load_data_files(datafiles)
ga.add_datasets(data_collection, datasets, auto_merge=auto_merge)
if startup_actions is not None:
for name in startup_actions:
ga.run_startup_action(name)
return ga.start(maximized=maximized)
@die_on_error("Error running script")
def execute_script(script):
""" Run a python script and exit.
Provides a way for people with pre-installed binaries to use
the glue library
"""
with open(script) as fin:
exec(fin.read())
sys.exit(0)
def get_splash():
"""Instantiate a splash screen"""
from glue.app.qt.splash_screen import QtSplashScreen
splash = QtSplashScreen()
return splash
def main(argv=sys.argv):
opt, args = parse(argv[1:])
if opt.verbose:
logger.setLevel("INFO")
logger.info("Input arguments: %s", sys.argv)
# Global keywords for Glue startup.
kwargs = {'config': opt.config,
'maximized': not opt.nomax,
'auto_merge': opt.auto_merge}
if opt.startup:
kwargs['startup_actions'] = opt.startup.split(',')
if opt.test:
return run_tests()
elif opt.restore:
start_glue(gluefile=args[0], **kwargs)
elif opt.script:
execute_script(args[0])
else:
has_file = len(args) == 1
has_files = len(args) > 1
has_py = has_file and args[0].endswith('.py')
has_glu = has_file and args[0].endswith('.glu')
if has_py:
execute_script(args[0])
elif has_glu:
start_glue(gluefile=args[0], **kwargs)
elif has_file or has_files:
start_glue(datafiles=args, **kwargs)
else:
start_glue(**kwargs)
_loaded_plugins = set()
_installed_plugins = set()
def load_plugins(splash=None):
# Search for plugins installed via entry_points. Basically, any package can
# define plugins for glue, and needs to define an entry point using the
# following format:
#
# entry_points = """
# [glue.plugins]
# webcam_importer=glue_exp.importers.webcam:setup
# vizier_importer=glue_exp.importers.vizier:setup
# dataverse_importer=glue_exp.importers.dataverse:setup
# """
#
# where ``setup`` is a function that does whatever is needed to set up the
# plugin, such as add items to various registries.
import setuptools
logger.info("Loading external plugins using "
"setuptools=={0}".format(setuptools.__version__))
from glue._plugin_helpers import iter_plugin_entry_points, PluginConfig
config = PluginConfig.load()
n_plugins = len(list(iter_plugin_entry_points()))
for iplugin, item in enumerate(iter_plugin_entry_points()):
if item.module_name not in _installed_plugins:
_installed_plugins.add(item.name)
if item.module_name in _loaded_plugins:
logger.info("Plugin {0} already loaded".format(item.name))
continue
if not config.plugins[item.name]:
continue
try:
function = item.load()
function()
except Exception as exc:
logger.info("Loading plugin {0} failed "
"(Exception: {1})".format(item.name, exc))
else:
logger.info("Loading plugin {0} succeeded".format(item.name))
_loaded_plugins.add(item.module_name)
if splash is not None:
splash.set_progress(100. * iplugin / float(n_plugins))
try:
config.save()
except Exception as e:
logger.warn("Failed to load plugin configuration")
# Reload the settings now that we have loaded plugins, since some plugins
# may have added some settings. Note that this will not re-read settings
# that were previously read.
from glue._settings_helpers import load_settings
load_settings()
if __name__ == "__main__":
sys.exit(main(sys.argv)) # pragma: no cover
|
jansed26/tmg-server
|
refs/heads/master
|
extensions/ConfirmEdit/captcha.py
|
63
|
#!/usr/bin/python
#
# Script to generate distorted text images for a captcha system.
#
# Copyright (C) 2005 Neil Harris
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# http://www.gnu.org/copyleft/gpl.html
#
# Further tweaks by Brion Vibber <brion@pobox.com>:
# 2006-01-26: Add command-line options for the various parameters
# 2007-02-19: Add --dirs param for hash subdirectory splits
# Tweaks by Greg Sabino Mullane <greg@turnstep.com>:
# 2008-01-06: Add regex check to skip words containing other than a-z
import random
import math
import hashlib
from optparse import OptionParser
import os
import sys
import re
try:
import Image
import ImageFont
import ImageDraw
import ImageEnhance
import ImageOps
except:
sys.exit("This script requires the Python Imaging Library - http://www.pythonware.com/products/pil/")
nonalpha = re.compile('[^a-z]') # regex to test for suitability of words
# Does X-axis wobbly copy, sandwiched between two rotates
def wobbly_copy(src, wob, col, scale, ang):
x, y = src.size
f = random.uniform(4*scale, 5*scale)
p = random.uniform(0, math.pi*2)
rr = ang+random.uniform(-30, 30) # vary, but not too much
int_d = Image.new('RGB', src.size, 0) # a black rectangle
rot = src.rotate(rr, Image.BILINEAR)
# Do a cheap bounding-box op here to try to limit work below
bbx = rot.getbbox()
if bbx == None:
return src
else:
l, t, r, b= bbx
# and only do lines with content on
for i in range(t, b+1):
# Drop a scan line in
xoff = int(math.sin(p+(i*f/y))*wob)
xoff += int(random.uniform(-wob*0.5, wob*0.5))
int_d.paste(rot.crop((0, i, x, i+1)), (xoff, i))
# try to stop blurring from building up
int_d = int_d.rotate(-rr, Image.BILINEAR)
enh = ImageEnhance.Sharpness(int_d)
return enh.enhance(2)
def gen_captcha(text, fontname, fontsize, file_name):
"""Generate a captcha image"""
# white text on a black background
bgcolor = 0x0
fgcolor = 0xffffff
# create a font object
font = ImageFont.truetype(fontname,fontsize)
# determine dimensions of the text
dim = font.getsize(text)
# create a new image significantly larger that the text
edge = max(dim[0], dim[1]) + 2*min(dim[0], dim[1])
im = Image.new('RGB', (edge, edge), bgcolor)
d = ImageDraw.Draw(im)
x, y = im.size
# add the text to the image
d.text((x/2-dim[0]/2, y/2-dim[1]/2), text, font=font, fill=fgcolor)
k = 3
wob = 0.20*dim[1]/k
rot = 45
# Apply lots of small stirring operations, rather than a few large ones
# in order to get some uniformity of treatment, whilst
# maintaining randomness
for i in range(k):
im = wobbly_copy(im, wob, bgcolor, i*2+3, rot+0)
im = wobbly_copy(im, wob, bgcolor, i*2+1, rot+45)
im = wobbly_copy(im, wob, bgcolor, i*2+2, rot+90)
rot += 30
# now get the bounding box of the nonzero parts of the image
bbox = im.getbbox()
bord = min(dim[0], dim[1])/4 # a bit of a border
im = im.crop((bbox[0]-bord, bbox[1]-bord, bbox[2]+bord, bbox[3]+bord))
# and turn into black on white
im = ImageOps.invert(im)
# save the image, in format determined from filename
im.save(file_name)
def gen_subdir(basedir, md5hash, levels):
"""Generate a subdirectory path out of the first _levels_
characters of _hash_, and ensure the directories exist
under _basedir_."""
subdir = None
for i in range(0, levels):
char = md5hash[i]
if subdir:
subdir = os.path.join(subdir, char)
else:
subdir = char
fulldir = os.path.join(basedir, subdir)
if not os.path.exists(fulldir):
os.mkdir(fulldir)
return subdir
def try_pick_word(words, blacklist, verbose, nwords, min_length, max_length):
if words is not None:
word = words[random.randint(0,len(words)-1)]
while nwords > 1:
word2 = words[random.randint(0,len(words)-1)]
word = word + word2
nwords = nwords - 1
else:
word = ''
max_length = max_length if max_length > 0 else 10
for i in range(0, random.randint(min_length, max_length)):
word = word + chr(97 + random.randint(0,25))
if verbose:
print "word is %s" % word
if len(word) < min_length:
if verbose:
print "skipping word pair '%s' because it has fewer than %d characters" % (word, min_length)
return None
if max_length > 0 and len(word) > max_length:
if verbose:
print "skipping word pair '%s' because it has more than %d characters" % (word, max_length)
return None
if nonalpha.search(word):
if verbose:
print "skipping word pair '%s' because it contains non-alphabetic characters" % word
return None
for naughty in blacklist:
if naughty in word:
if verbose:
print "skipping word pair '%s' because it contains blacklisted word '%s'" % (word, naughty)
return None
return word
def pick_word(words, blacklist, verbose, nwords, min_length, max_length):
for x in range(1000): # If we can't find a valid combination in 1000 tries, just give up
word = try_pick_word(words, blacklist, verbose, nwords, min_length, max_length)
if word:
return word
sys.exit("Unable to find valid word combinations")
def read_wordlist(filename):
f = open(filename)
words = [x.strip().lower() for x in f.readlines()]
f.close()
return words
if __name__ == '__main__':
"""This grabs random words from the dictionary 'words' (one
word per line) and generates a captcha image for each one,
with a keyed salted hash of the correct answer in the filename.
To check a reply, hash it in the same way with the same salt and
secret key, then compare with the hash value given.
"""
script_dir = os.path.dirname(os.path.realpath(__file__))
parser = OptionParser()
parser.add_option("--wordlist", help="A list of words (required)", metavar="WORDS.txt")
parser.add_option("--random", help="Use random charcters instead of a wordlist", action="store_true")
parser.add_option("--key", help="The passphrase set as $wgCaptchaSecret (required)", metavar="KEY")
parser.add_option("--output", help="The directory to put the images in - $wgCaptchaDirectory (required)", metavar="DIR")
parser.add_option("--font", help="The font to use (required)", metavar="FONT.ttf")
parser.add_option("--font-size", help="The font size (default 40)", metavar="N", type='int', default=40)
parser.add_option("--count", help="The maximum number of images to make (default 20)", metavar="N", type='int', default=20)
parser.add_option("--blacklist", help="A blacklist of words that should not be used", metavar="FILE", default=os.path.join(script_dir, "blacklist"))
parser.add_option("--fill", help="Fill the output directory to contain N files, overrides count, cannot be used with --dirs", metavar="N", type='int')
parser.add_option("--dirs", help="Put the images into subdirectories N levels deep - $wgCaptchaDirectoryLevels", metavar="N", type='int')
parser.add_option("--verbose", "-v", help="Show debugging information", action='store_true')
parser.add_option("--number-words", help="Number of words from the wordlist which make a captcha challenge (default 2)", type='int', default=2)
parser.add_option("--min-length", help="Minimum length for a captcha challenge", type='int', default=1)
parser.add_option("--max-length", help="Maximum length for a captcha challenge", type='int', default=-1)
opts, args = parser.parse_args()
if opts.wordlist:
wordlist = opts.wordlist
elif opts.random:
wordlist = None
else:
sys.exit("Need to specify a wordlist")
if opts.key:
key = opts.key
else:
sys.exit("Need to specify a key")
if opts.output:
output = opts.output
else:
sys.exit("Need to specify an output directory")
if opts.font and os.path.exists(opts.font):
font = opts.font
else:
sys.exit("Need to specify the location of a font")
blacklist = read_wordlist(opts.blacklist)
count = opts.count
fill = opts.fill
dirs = opts.dirs
verbose = opts.verbose
fontsize = opts.font_size
if fill:
count = max(0, fill - len(os.listdir(output)))
words = None
if wordlist:
words = read_wordlist(wordlist)
words = [x for x in words
if len(x) in (4,5) and x[0] != "f"
and x[0] != x[1] and x[-1] != x[-2]]
for i in range(count):
word = pick_word(words, blacklist, verbose, opts.number_words, opts.min_length, opts.max_length)
salt = "%08x" % random.randrange(2**32)
# 64 bits of hash is plenty for this purpose
md5hash = hashlib.md5(key+salt+word+key+salt).hexdigest()[:16]
filename = "image_%s_%s.png" % (salt, md5hash)
if dirs:
subdir = gen_subdir(output, md5hash, dirs)
filename = os.path.join(subdir, filename)
if verbose:
print filename
gen_captcha(word, font, fontsize, os.path.join(output, filename))
|
40223121/w17
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/browser/object_storage.py
|
627
|
import pickle
class __UnProvided():
pass
class ObjectStorage():
def __init__(self, storage):
self.storage = storage
def __delitem__(self, key):
del self.storage[pickle.dumps(key)]
def __getitem__(self, key):
return pickle.loads(self.storage[pickle.dumps(key)])
def __setitem__(self, key, value):
self.storage[pickle.dumps(key)] = pickle.dumps(value)
def __contains__(self, key):
return pickle.dumps(key) in self.storage
def get(self, key, default=None):
if pickle.dumps(key) in self.storage:
return self.storage[pickle.dumps(key)]
return default
def pop(self, key, default=__UnProvided()):
if type(default) is __UnProvided or pickle.dumps(key) in self.storage:
return pickle.loads(self.storage.pop(pickle.dumps(key)))
return default
def __iter__(self):
keys = self.keys()
return keys.__iter__()
def keys(self):
return [pickle.loads(key) for key in self.storage.keys()]
def values(self):
return [pickle.loads(val) for val in self.storage.values()]
def items(self):
return list(zip(self.keys(), self.values()))
def clear(self):
self.storage.clear()
def __len__(self):
return len(self.storage)
|
frederick-masterton/django
|
refs/heads/master
|
tests/admin_changelist/admin.py
|
57
|
from django.contrib import admin
from django.core.paginator import Paginator
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from .models import Event, Child, Parent, Swallow
site = admin.AdminSite(name="admin")
site.register(User, UserAdmin)
class CustomPaginator(Paginator):
def __init__(self, queryset, page_size, orphans=0, allow_empty_first_page=True):
super(CustomPaginator, self).__init__(queryset, 5, orphans=2,
allow_empty_first_page=allow_empty_first_page)
class EventAdmin(admin.ModelAdmin):
list_display = ['event_date_func']
def event_date_func(self, event):
return event.date
site.register(Event, EventAdmin)
class ParentAdmin(admin.ModelAdmin):
list_filter = ['child__name']
search_fields = ['child__name']
class ChildAdmin(admin.ModelAdmin):
list_display = ['name', 'parent']
list_per_page = 10
list_filter = ['parent', 'age']
def get_queryset(self, request):
return super(ChildAdmin, self).get_queryset(request).select_related("parent__name")
class CustomPaginationAdmin(ChildAdmin):
paginator = CustomPaginator
class FilteredChildAdmin(admin.ModelAdmin):
list_display = ['name', 'parent']
list_per_page = 10
def get_queryset(self, request):
return super(FilteredChildAdmin, self).get_queryset(request).filter(
name__contains='filtered')
class BandAdmin(admin.ModelAdmin):
list_filter = ['genres']
class GroupAdmin(admin.ModelAdmin):
list_filter = ['members']
class QuartetAdmin(admin.ModelAdmin):
list_filter = ['members']
class ChordsBandAdmin(admin.ModelAdmin):
list_filter = ['members']
class InvitationAdmin(admin.ModelAdmin):
list_display = ('band', 'player')
list_select_related = ('player',)
class DynamicListDisplayChildAdmin(admin.ModelAdmin):
list_display = ('parent', 'name', 'age')
def get_list_display(self, request):
my_list_display = super(DynamicListDisplayChildAdmin, self).get_list_display(request)
if request.user.username == 'noparents':
my_list_display = list(my_list_display)
my_list_display.remove('parent')
return my_list_display
class DynamicListDisplayLinksChildAdmin(admin.ModelAdmin):
list_display = ('parent', 'name', 'age')
list_display_links = ['parent', 'name']
def get_list_display_links(self, request, list_display):
return ['age']
site.register(Child, DynamicListDisplayChildAdmin)
class NoListDisplayLinksParentAdmin(admin.ModelAdmin):
list_display_links = None
site.register(Parent, NoListDisplayLinksParentAdmin)
class SwallowAdmin(admin.ModelAdmin):
actions = None # prevent ['action_checkbox'] + list(list_display)
list_display = ('origin', 'load', 'speed')
site.register(Swallow, SwallowAdmin)
class DynamicListFilterChildAdmin(admin.ModelAdmin):
list_filter = ('parent', 'name', 'age')
def get_list_filter(self, request):
my_list_filter = super(DynamicListFilterChildAdmin, self).get_list_filter(request)
if request.user.username == 'noparents':
my_list_filter = list(my_list_filter)
my_list_filter.remove('parent')
return my_list_filter
class DynamicSearchFieldsChildAdmin(admin.ModelAdmin):
search_fields = ('name',)
def get_search_fields(self, request):
search_fields = super(DynamicSearchFieldsChildAdmin, self).get_search_fields(request)
search_fields += ('age',)
return search_fields
|
mammique/django
|
refs/heads/tp_alpha
|
django/contrib/auth/tests/remote_user.py
|
91
|
from datetime import datetime
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.backends import RemoteUserBackend
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.test import TestCase
from django.utils import timezone
@skipIfCustomUser
class RemoteUserTest(TestCase):
urls = 'django.contrib.auth.tests.urls'
middleware = 'django.contrib.auth.middleware.RemoteUserMiddleware'
backend = 'django.contrib.auth.backends.RemoteUserBackend'
# Usernames to be passed in REMOTE_USER for the test_known_user test case.
known_user = 'knownuser'
known_user2 = 'knownuser2'
def setUp(self):
self.curr_middleware = settings.MIDDLEWARE_CLASSES
self.curr_auth = settings.AUTHENTICATION_BACKENDS
settings.MIDDLEWARE_CLASSES += (self.middleware,)
settings.AUTHENTICATION_BACKENDS += (self.backend,)
def test_no_remote_user(self):
"""
Tests requests where no remote user is specified and insures that no
users get created.
"""
num_users = User.objects.count()
response = self.client.get('/remote_user/')
self.assertTrue(response.context['user'].is_anonymous())
self.assertEqual(User.objects.count(), num_users)
response = self.client.get('/remote_user/', REMOTE_USER=None)
self.assertTrue(response.context['user'].is_anonymous())
self.assertEqual(User.objects.count(), num_users)
response = self.client.get('/remote_user/', REMOTE_USER='')
self.assertTrue(response.context['user'].is_anonymous())
self.assertEqual(User.objects.count(), num_users)
def test_unknown_user(self):
"""
Tests the case where the username passed in the header does not exist
as a User.
"""
num_users = User.objects.count()
response = self.client.get('/remote_user/', REMOTE_USER='newuser')
self.assertEqual(response.context['user'].username, 'newuser')
self.assertEqual(User.objects.count(), num_users + 1)
User.objects.get(username='newuser')
# Another request with same user should not create any new users.
response = self.client.get('/remote_user/', REMOTE_USER='newuser')
self.assertEqual(User.objects.count(), num_users + 1)
def test_known_user(self):
"""
Tests the case where the username passed in the header is a valid User.
"""
User.objects.create(username='knownuser')
User.objects.create(username='knownuser2')
num_users = User.objects.count()
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user)
self.assertEqual(response.context['user'].username, 'knownuser')
self.assertEqual(User.objects.count(), num_users)
# Test that a different user passed in the headers causes the new user
# to be logged in.
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user2)
self.assertEqual(response.context['user'].username, 'knownuser2')
self.assertEqual(User.objects.count(), num_users)
def test_last_login(self):
"""
Tests that a user's last_login is set the first time they make a
request but not updated in subsequent requests with the same session.
"""
user = User.objects.create(username='knownuser')
# Set last_login to something so we can determine if it changes.
default_login = datetime(2000, 1, 1)
if settings.USE_TZ:
default_login = default_login.replace(tzinfo=timezone.utc)
user.last_login = default_login
user.save()
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user)
self.assertNotEqual(default_login, response.context['user'].last_login)
user = User.objects.get(username='knownuser')
user.last_login = default_login
user.save()
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user)
self.assertEqual(default_login, response.context['user'].last_login)
def test_header_disappears(self):
"""
Tests that a logged in user is logged out automatically when
the REMOTE_USER header disappears during the same browser session.
"""
User.objects.create(username='knownuser')
# Known user authenticates
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user)
self.assertEqual(response.context['user'].username, 'knownuser')
# During the session, the REMOTE_USER header disappears. Should trigger logout.
response = self.client.get('/remote_user/')
self.assertEqual(response.context['user'].is_anonymous(), True)
# verify the remoteuser middleware will not remove a user
# authenticated via another backend
User.objects.create_user(username='modeluser', password='foo')
self.client.login(username='modeluser', password='foo')
authenticate(username='modeluser', password='foo')
response = self.client.get('/remote_user/')
self.assertEqual(response.context['user'].username, 'modeluser')
def tearDown(self):
"""Restores settings to avoid breaking other tests."""
settings.MIDDLEWARE_CLASSES = self.curr_middleware
settings.AUTHENTICATION_BACKENDS = self.curr_auth
class RemoteUserNoCreateBackend(RemoteUserBackend):
"""Backend that doesn't create unknown users."""
create_unknown_user = False
@skipIfCustomUser
class RemoteUserNoCreateTest(RemoteUserTest):
"""
Contains the same tests as RemoteUserTest, but using a custom auth backend
class that doesn't create unknown users.
"""
backend =\
'django.contrib.auth.tests.remote_user.RemoteUserNoCreateBackend'
def test_unknown_user(self):
num_users = User.objects.count()
response = self.client.get('/remote_user/', REMOTE_USER='newuser')
self.assertTrue(response.context['user'].is_anonymous())
self.assertEqual(User.objects.count(), num_users)
class CustomRemoteUserBackend(RemoteUserBackend):
"""
Backend that overrides RemoteUserBackend methods.
"""
def clean_username(self, username):
"""
Grabs username before the @ character.
"""
return username.split('@')[0]
def configure_user(self, user):
"""
Sets user's email address.
"""
user.email = 'user@example.com'
user.save()
return user
@skipIfCustomUser
class RemoteUserCustomTest(RemoteUserTest):
"""
Tests a custom RemoteUserBackend subclass that overrides the clean_username
and configure_user methods.
"""
backend =\
'django.contrib.auth.tests.remote_user.CustomRemoteUserBackend'
# REMOTE_USER strings with email addresses for the custom backend to
# clean.
known_user = 'knownuser@example.com'
known_user2 = 'knownuser2@example.com'
def test_known_user(self):
"""
The strings passed in REMOTE_USER should be cleaned and the known users
should not have been configured with an email address.
"""
super(RemoteUserCustomTest, self).test_known_user()
self.assertEqual(User.objects.get(username='knownuser').email, '')
self.assertEqual(User.objects.get(username='knownuser2').email, '')
def test_unknown_user(self):
"""
The unknown user created should be configured with an email address.
"""
super(RemoteUserCustomTest, self).test_unknown_user()
newuser = User.objects.get(username='newuser')
self.assertEqual(newuser.email, 'user@example.com')
|
mhvk/astropy
|
refs/heads/placeholder
|
astropy/samp/hub_script.py
|
12
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import copy
import time
import sys
import argparse
from astropy import log, __version__
from .hub import SAMPHubServer
__all__ = ['hub_script']
def hub_script(timeout=0):
"""
This main function is executed by the ``samp_hub`` command line tool.
"""
parser = argparse.ArgumentParser(prog="samp_hub " + __version__)
parser.add_argument("-k", "--secret", dest="secret", metavar="CODE",
help="custom secret code.")
parser.add_argument("-d", "--addr", dest="addr", metavar="ADDR",
help="listening address (or IP).")
parser.add_argument("-p", "--port", dest="port", metavar="PORT", type=int,
help="listening port number.")
parser.add_argument("-f", "--lockfile", dest="lockfile", metavar="FILE",
help="custom lockfile.")
parser.add_argument("-w", "--no-web-profile", dest="web_profile", action="store_false",
help="run the Hub disabling the Web Profile.", default=True)
parser.add_argument("-P", "--pool-size", dest="pool_size", metavar="SIZE", type=int,
help="the socket connections pool size.", default=20)
timeout_group = parser.add_argument_group("Timeout group",
"Special options to setup hub and client timeouts."
"It contains a set of special options that allows to set up the Hub and "
"clients inactivity timeouts, that is the Hub or client inactivity time "
"interval after which the Hub shuts down or unregisters the client. "
"Notification of samp.hub.disconnect MType is sent to the clients "
"forcibly unregistered for timeout expiration.")
timeout_group.add_argument("-t", "--timeout", dest="timeout", metavar="SECONDS",
help="set the Hub inactivity timeout in SECONDS. By default it "
"is set to 0, that is the Hub never expires.", type=int, default=0)
timeout_group.add_argument("-c", "--client-timeout", dest="client_timeout", metavar="SECONDS",
help="set the client inactivity timeout in SECONDS. By default it "
"is set to 0, that is the client never expires.", type=int, default=0)
parser.add_argument_group(timeout_group)
log_group = parser.add_argument_group("Logging options",
"Additional options which allow to customize the logging output. By "
"default the SAMP Hub uses the standard output and standard error "
"devices to print out INFO level logging messages. Using the options "
"here below it is possible to modify the logging level and also "
"specify the output files where redirect the logging messages.")
log_group.add_argument("-L", "--log-level", dest="loglevel", metavar="LEVEL",
help="set the Hub instance log level (OFF, ERROR, WARNING, INFO, DEBUG).",
type=str, choices=["OFF", "ERROR", "WARNING", "INFO", "DEBUG"], default='INFO')
log_group.add_argument("-O", "--log-output", dest="logout", metavar="FILE",
help="set the output file for the log messages.", default="")
parser.add_argument_group(log_group)
adv_group = parser.add_argument_group("Advanced group",
"Advanced options addressed to facilitate administrative tasks and "
"allow new non-standard Hub behaviors. In particular the --label "
"options is used to assign a value to hub.label token and is used to "
"assign a name to the Hub instance. "
"The very special --multi option allows to start a Hub in multi-instance mode. "
"Multi-instance mode is a non-standard Hub behavior that enables "
"multiple contemporaneous running Hubs. Multi-instance hubs place "
"their non-standard lock-files within the <home directory>/.samp-1 "
"directory naming them making use of the format: "
"samp-hub-<PID>-<ID>, where PID is the Hub process ID while ID is an "
"internal ID (integer).")
adv_group.add_argument("-l", "--label", dest="label", metavar="LABEL",
help="assign a LABEL to the Hub.", default="")
adv_group.add_argument("-m", "--multi", dest="mode",
help="run the Hub in multi-instance mode generating a custom "
"lockfile with a random name.",
action="store_const", const='multiple', default='single')
parser.add_argument_group(adv_group)
options = parser.parse_args()
try:
if options.loglevel in ("OFF", "ERROR", "WARNING", "DEBUG", "INFO"):
log.setLevel(options.loglevel)
if options.logout != "":
context = log.log_to_file(options.logout)
else:
class dummy_context:
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
context = dummy_context()
with context:
args = copy.deepcopy(options.__dict__)
del(args["loglevel"])
del(args["logout"])
hub = SAMPHubServer(**args)
hub.start(False)
if not timeout:
while hub.is_running:
time.sleep(0.01)
else:
time.sleep(timeout)
hub.stop()
except KeyboardInterrupt:
try:
hub.stop()
except NameError:
pass
except OSError as e:
print(f"[SAMP] Error: I/O error({e.errno}): {e.strerror}")
sys.exit(1)
except SystemExit:
pass
|
mdanielwork/intellij-community
|
refs/heads/master
|
python/lib/Lib/site-packages/django/contrib/localflavor/uk/forms.py
|
313
|
"""
UK-specific Form helpers
"""
import re
from django.forms.fields import CharField, Select
from django.forms import ValidationError
from django.utils.translation import ugettext_lazy as _
class UKPostcodeField(CharField):
"""
A form field that validates its input is a UK postcode.
The regular expression used is sourced from the schema for British Standard
BS7666 address types: http://www.govtalk.gov.uk/gdsc/schemas/bs7666-v2-0.xsd
The value is uppercased and a space added in the correct place, if required.
"""
default_error_messages = {
'invalid': _(u'Enter a valid postcode.'),
}
outcode_pattern = '[A-PR-UWYZ]([0-9]{1,2}|([A-HIK-Y][0-9](|[0-9]|[ABEHMNPRVWXY]))|[0-9][A-HJKSTUW])'
incode_pattern = '[0-9][ABD-HJLNP-UW-Z]{2}'
postcode_regex = re.compile(r'^(GIR 0AA|%s %s)$' % (outcode_pattern, incode_pattern))
space_regex = re.compile(r' *(%s)$' % incode_pattern)
def clean(self, value):
value = super(UKPostcodeField, self).clean(value)
if value == u'':
return value
postcode = value.upper().strip()
# Put a single space before the incode (second part).
postcode = self.space_regex.sub(r' \1', postcode)
if not self.postcode_regex.search(postcode):
raise ValidationError(self.error_messages['invalid'])
return postcode
class UKCountySelect(Select):
"""
A Select widget that uses a list of UK Counties/Regions as its choices.
"""
def __init__(self, attrs=None):
from uk_regions import UK_REGION_CHOICES
super(UKCountySelect, self).__init__(attrs, choices=UK_REGION_CHOICES)
class UKNationSelect(Select):
"""
A Select widget that uses a list of UK Nations as its choices.
"""
def __init__(self, attrs=None):
from uk_regions import UK_NATIONS_CHOICES
super(UKNationSelect, self).__init__(attrs, choices=UK_NATIONS_CHOICES)
|
Bogh/django-oscar
|
refs/heads/master
|
tests/integration/dashboard/__init__.py
|
1349
|
# -*- coding: utf-8 -*-
|
reubano/csvkit
|
refs/heads/master
|
tests/test_py2.py
|
21
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import six
try:
import unittest2 as unittest
except ImportError:
import unittest
import csvkit
@unittest.skipIf(six.PY3, "Not supported in Python 3.")
class TestCSVKitReader(unittest.TestCase):
def test_utf8(self):
with open('examples/test_utf8.csv') as f:
reader = csvkit.CSVKitReader(f, encoding='utf-8')
self.assertEqual(next(reader), ['a', 'b', 'c'])
self.assertEqual(next(reader), ['1', '2', '3'])
self.assertEqual(next(reader), ['4', '5', u'ʤ'])
def test_reader_alias(self):
with open('examples/test_utf8.csv') as f:
reader = csvkit.reader(f, encoding='utf-8')
self.assertEqual(next(reader), ['a', 'b', 'c'])
self.assertEqual(next(reader), ['1', '2', '3'])
self.assertEqual(next(reader), ['4', '5', u'ʤ'])
@unittest.skipIf(six.PY3, "Not supported in Python 3.")
class TestCSVKitWriter(unittest.TestCase):
def test_utf8(self):
output = six.StringIO()
writer = csvkit.CSVKitWriter(output, encoding='utf-8')
self.assertEqual(writer._eight_bit, True)
writer.writerow(['a', 'b', 'c'])
writer.writerow(['1', '2', '3'])
writer.writerow(['4', '5', u'ʤ'])
written = six.StringIO(output.getvalue())
reader = csvkit.CSVKitReader(written, encoding='utf-8')
self.assertEqual(next(reader), ['a', 'b', 'c'])
self.assertEqual(next(reader), ['1', '2', '3'])
self.assertEqual(next(reader), ['4', '5', u'ʤ'])
def test_writer_alias(self):
output = six.StringIO()
writer = csvkit.writer(output, encoding='utf-8')
self.assertEqual(writer._eight_bit, True)
writer.writerow(['a', 'b', 'c'])
writer.writerow(['1', '2', '3'])
writer.writerow(['4', '5', u'ʤ'])
written = six.StringIO(output.getvalue())
reader = csvkit.reader(written, encoding='utf-8')
self.assertEqual(next(reader), ['a', 'b', 'c'])
self.assertEqual(next(reader), ['1', '2', '3'])
self.assertEqual(next(reader), ['4', '5', u'ʤ'])
@unittest.skipIf(six.PY3, "Not supported in Python 3.")
class TestCSVKitDictReader(unittest.TestCase):
def setUp(self):
self.f = open('examples/dummy.csv')
def tearDown(self):
self.f.close()
def test_reader(self):
reader = csvkit.CSVKitDictReader(self.f)
self.assertEqual(next(reader), {
u'a': u'1',
u'b': u'2',
u'c': u'3'
})
def test_reader_alias(self):
reader = csvkit.DictReader(self.f)
self.assertEqual(next(reader), {
u'a': u'1',
u'b': u'2',
u'c': u'3'
})
@unittest.skipIf(six.PY3, "Not supported in Python 3.")
class TestCSVKitDictWriter(unittest.TestCase):
def setUp(self):
self.output = six.StringIO()
def tearDown(self):
self.output.close()
def test_writer(self):
writer = csvkit.CSVKitDictWriter(self.output, ['a', 'b', 'c'])
writer.writeheader()
writer.writerow({
u'a': u'1',
u'b': u'2',
u'c': u'☃'
})
result = self.output.getvalue()
self.assertEqual(result, 'a,b,c\n1,2,☃\n')
def test_writer_alias(self):
writer = csvkit.DictWriter(self.output, ['a', 'b', 'c'])
writer.writeheader()
writer.writerow({
u'a': u'1',
u'b': u'2',
u'c': u'☃'
})
result = self.output.getvalue()
self.assertEqual(result, 'a,b,c\n1,2,☃\n')
|
jumpstarter-io/nova
|
refs/heads/master
|
nova/virt/xenapi/__init__.py
|
126
|
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`xenapi` -- Nova support for XenServer and XCP through XenAPI
==================================================================
"""
from nova.virt.xenapi import driver
XenAPIDriver = driver.XenAPIDriver
|
andersk/zulip
|
refs/heads/master
|
zilencer/tests.py
|
126
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import ujson
from django.test import TestCase
class EndpointDiscoveryTest(TestCase):
def test_staging_user(self):
response = self.client.get("/api/v1/deployments/endpoints", {"email": "lfaraone@zulip.com"})
data = ujson.loads(response.content)
self.assertEqual(data["result"]["base_site_url"], "https://zulip.com/")
self.assertEqual(data["result"]["base_api_url"], "https://zulip.com/api/")
def test_prod_user(self):
response = self.client.get("/api/v1/deployments/endpoints", {"email": "lfaraone@mit.edu"})
data = ujson.loads(response.content)
self.assertEqual(data["result"]["base_site_url"], "https://zulip.com/")
self.assertEqual(data["result"]["base_api_url"], "https://api.zulip.com/")
|
kingsdigitallab/dprr-django
|
refs/heads/master
|
promrep/migrations/0055_add_highest_office_field.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('promrep', '0054_rebuild_provinces_tree'),
]
operations = [
migrations.AddField(
model_name='person',
name='highest_office',
field=models.CharField(max_length=1024, null=True, blank=True),
),
migrations.AddField(
model_name='person',
name='highest_office_edited',
field=models.BooleanField(default=False),
),
]
|
DataONEorg/d1_python
|
refs/heads/master
|
gmn/src/d1_gmn/tests/gmn_direct.py
|
1
|
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Call the GMN D1 APIs directly through the Django test client.
These methods provide a way to issue non-compliant requests to GMN that
(hopefully) cannot be created via d1_client. Examples of broken requests include
requests with incorrectly formatted URLs, multipart documents, and DataONE XML
types.
By issuing intentionally broken requests, unit tests can ensure that the error
paths in GMN work correctly.
These methods also allow testing handling of timezones in datetimes. Some such
tests cannot be issued via d1_client because PyXB, being based on the XML DOM,
automatically adjusts all non-naive datetimes to UTC.
"""
import io
import logging
import xml.etree.ElementTree
import d1_common.url
import d1_common.util
import d1_common.utils.ulog
import d1_common.wrap.simple_xml
import django.test
import d1_gmn.tests.gmn_mock
def create(version_tag, sciobj_bytes, sysmeta_xml):
"""Call MNStorage.create()"""
with d1_gmn.tests.gmn_mock.disable_sysmeta_sanity_checks():
with d1_common.wrap.simple_xml.wrap(sysmeta_xml) as xml_wrapper:
return _get_resp_dict(
django.test.Client().post(
d1_common.url.joinPathElements("/", version_tag, "object"),
{
"pid": xml_wrapper.get_element_text("identifier"),
"object": ("content.bin", io.BytesIO(sciobj_bytes)),
"sysmeta": ("sysmeta.xml", io.BytesIO(sysmeta_xml)),
},
)
)
def create_stream(version_tag, sciobj_bytestream, sysmeta_xml):
"""Call MNStorage.create()"""
with d1_gmn.tests.gmn_mock.disable_sysmeta_sanity_checks():
with d1_common.wrap.simple_xml.wrap(sysmeta_xml) as xml_wrapper:
return _get_resp_dict(
django.test.Client().post(
d1_common.url.joinPathElements("/", version_tag, "object"),
{
"pid": xml_wrapper.get_element_text("identifier"),
"object": ("content.bin", sciobj_bytestream),
"sysmeta": ("sysmeta.xml", io.StringIO(sysmeta_xml)),
},
)
)
def get(version_tag, pid):
"""Call MNRead.get()"""
return _get_resp_dict(
django.test.Client().get(
d1_common.url.joinPathElements(
"/", version_tag, "object", pid.encode("utf-8")
)
)
)
def get_system_metadata(version_tag, pid):
"""Call MNRead.getSystemMetadata()"""
return _get_resp_dict(
django.test.Client().get(
d1_common.url.joinPathElements("/", version_tag, "meta", pid)
)
)
def list_objects(version_tag, pid=None, start=None, count=None):
"""Call MNRead.listObjects()"""
url_path = d1_common.url.joinPathElements("/", version_tag, "object")
query_dict = {}
if pid is not None:
query_dict["identifier"] = pid
if start is not None:
query_dict["start"] = start
if count is not None:
query_dict["count"] = count
url_str = _add_query(query_dict, url_path)
return _get_resp_dict(django.test.Client().get(url_str))
def get_log_records(version_tag, pid=None, start=None, count=None):
"""Call MNCore.getLogRecords()"""
url_path = d1_common.url.joinPathElements("/", version_tag, "log")
query_dict = {}
if pid is not None:
query_dict["identifier"] = pid
if start is not None:
query_dict["start"] = start
if count is not None:
query_dict["count"] = count
url_str = _add_query(query_dict, url_path)
return _get_resp_dict(django.test.Client().get(url_str))
def _add_query(query_dict, url_path):
if query_dict:
url_str = "{}?{}".format(
url_path, d1_common.url.encodePathElement().urlencode(query_dict)
)
else:
url_str = url_path
return url_str
def get_object_count(version_tag):
"""Get total number of objects for which one or more subj in ``session_subj_list``
have read access or better."""
url_path = d1_common.url.joinPathElements("/", version_tag, "object")
# url_path += "?identifier={}".format(d1_common.url.encodeQueryElement(pid))
resp_dict = _get_resp_dict(django.test.Client().get(url_path))
if resp_dict["is_ok"]:
return int(
xml.etree.ElementTree.fromstring(resp_dict["body_str"]).attrib["count"]
)
resp_dict.pop("response", None)
raise Exception(
"Unable to get object count. resp_dict={}".format(
d1_common.util.serialize_to_normalized_compact_json(resp_dict)
)
)
def _get_resp_dict(response):
"""Log return status of a django.http.response.HttpResponse and arrange the response
into a dict of items generally more convenient to work with from tests."""
body_str = (
"".join(response.streaming_content) if response.streaming else response.content
)
is_ok = response.status_code in (200,)
if not is_ok:
logging.warning(
'Request returned unexpected status code. status_code={} body="{}"'.format(
response.status_code, body_str
)
)
else:
logging.info("Request successful. status_code={}".format(response.status_code))
return {
"is_ok": is_ok,
"status_code_int": response.status_code,
"header_dict": dict(list(response.items())),
"body_str": body_str,
"response": response,
}
|
CTSRD-SOAAP/chromium-42.0.2311.135
|
refs/heads/master
|
remoting/tools/zip2msi.py
|
89
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates .msi from a .zip archive or an unpacked directory.
The structure of the input archive or directory should look like this:
+- archive.zip
+- archive
+- parameters.json
The name of the archive and the top level directory in the archive must match.
When an unpacked directory is used as the input "archive.zip/archive" should
be passed via the command line.
'parameters.json' specifies the parameters to be passed to candle/light and
must have the following structure:
{
"defines": { "name": "value" },
"extensions": [ "WixFirewallExtension.dll" ],
"switches": [ '-nologo' ],
"source": "chromoting.wxs",
"bind_path": "files",
"sign": [ ... ],
"candle": { ... },
"light": { ... }
}
"source" specifies the name of the input .wxs relative to
"archive.zip/archive".
"bind_path" specifies the path where to look for binary files referenced by
.wxs relative to "archive.zip/archive".
This script is used for both building Chromoting Host installation during
Chromuim build and for signing Chromoting Host installation later. There are two
copies of this script because of that:
- one in Chromium tree at src/remoting/tools/zip2msi.py.
- another one next to the signing scripts.
The copies of the script can be out of sync so make sure that a newer version is
compatible with the older ones when updating the script.
"""
import copy
import json
from optparse import OptionParser
import os
import re
import subprocess
import sys
import zipfile
def UnpackZip(target, source):
"""Unpacks |source| archive to |target| directory."""
target = os.path.normpath(target)
archive = zipfile.ZipFile(source, 'r')
for f in archive.namelist():
target_file = os.path.normpath(os.path.join(target, f))
# Sanity check to make sure .zip uses relative paths.
if os.path.commonprefix([target_file, target]) != target:
print "Failed to unpack '%s': '%s' is not under '%s'" % (
source, target_file, target)
return 1
# Create intermediate directories.
target_dir = os.path.dirname(target_file)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
archive.extract(f, target)
return 0
def Merge(left, right):
"""Merges two values.
Raises:
TypeError: |left| and |right| cannot be merged.
Returns:
- if both |left| and |right| are dictionaries, they are merged recursively.
- if both |left| and |right| are lists, the result is a list containing
elements from both lists.
- if both |left| and |right| are simple value, |right| is returned.
- |TypeError| exception is raised if a dictionary or a list are merged with
a non-dictionary or non-list correspondingly.
"""
if isinstance(left, dict):
if isinstance(right, dict):
retval = copy.copy(left)
for key, value in right.iteritems():
if key in retval:
retval[key] = Merge(retval[key], value)
else:
retval[key] = value
return retval
else:
raise TypeError('Error: merging a dictionary and non-dictionary value')
elif isinstance(left, list):
if isinstance(right, list):
return left + right
else:
raise TypeError('Error: merging a list and non-list value')
else:
if isinstance(right, dict):
raise TypeError('Error: merging a dictionary and non-dictionary value')
elif isinstance(right, list):
raise TypeError('Error: merging a dictionary and non-dictionary value')
else:
return right
quote_matcher_regex = re.compile(r'\s|"')
quote_replacer_regex = re.compile(r'(\\*)"')
def QuoteArgument(arg):
"""Escapes a Windows command-line argument.
So that the Win32 CommandLineToArgv function will turn the escaped result back
into the original string.
See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
("Parsing C++ Command-Line Arguments") to understand why we have to do
this.
Args:
arg: the string to be escaped.
Returns:
the escaped string.
"""
def _Replace(match):
# For a literal quote, CommandLineToArgv requires an odd number of
# backslashes preceding it, and it produces half as many literal backslashes
# (rounded down). So we need to produce 2n+1 backslashes.
return 2 * match.group(1) + '\\"'
if re.search(quote_matcher_regex, arg):
# Escape all quotes so that they are interpreted literally.
arg = quote_replacer_regex.sub(_Replace, arg)
# Now add unescaped quotes so that any whitespace is interpreted literally.
return '"' + arg + '"'
else:
return arg
def GenerateCommandLine(tool, source, dest, parameters):
"""Generates the command line for |tool|."""
# Merge/apply tool-specific parameters
params = copy.copy(parameters)
if tool in parameters:
params = Merge(params, params[tool])
wix_path = os.path.normpath(params.get('wix_path', ''))
switches = [os.path.join(wix_path, tool), '-nologo']
# Append the list of defines and extensions to the command line switches.
for name, value in params.get('defines', {}).iteritems():
switches.append('-d%s=%s' % (name, value))
for ext in params.get('extensions', []):
switches += ('-ext', os.path.join(wix_path, ext))
# Append raw switches
switches += params.get('switches', [])
# Append the input and output files
switches += ('-out', dest, source)
# Generate the actual command line
#return ' '.join(map(QuoteArgument, switches))
return switches
def Run(args):
"""Runs a command interpreting the passed |args| as a command line."""
command = ' '.join(map(QuoteArgument, args))
popen = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
if popen.returncode:
print command
for line in out.splitlines():
print line
print '%s returned %d' % (args[0], popen.returncode)
return popen.returncode
def GenerateMsi(target, source, parameters):
"""Generates .msi from the installation files prepared by Chromium build."""
parameters['basename'] = os.path.splitext(os.path.basename(source))[0]
# The script can handle both forms of input a directory with unpacked files or
# a ZIP archive with the same files. In the latter case the archive should be
# unpacked to the intermediate directory.
source_dir = None
if os.path.isdir(source):
# Just use unpacked files from the supplied directory.
source_dir = source
else:
# Unpack .zip
rc = UnpackZip(parameters['intermediate_dir'], source)
if rc != 0:
return rc
source_dir = '%(intermediate_dir)s\\%(basename)s' % parameters
# Read parameters from 'parameters.json'.
f = open(os.path.join(source_dir, 'parameters.json'))
parameters = Merge(json.load(f), parameters)
f.close()
if 'source' not in parameters:
print 'The source .wxs is not specified'
return 1
if 'bind_path' not in parameters:
print 'The binding path is not specified'
return 1
wxs = os.path.join(source_dir, parameters['source'])
# Add the binding path to the light-specific parameters.
bind_path = os.path.join(source_dir, parameters['bind_path'])
parameters = Merge(parameters, {'light': {'switches': ['-b', bind_path]}})
target_arch = parameters['target_arch']
if target_arch == 'ia32':
arch_param = 'x86'
elif target_arch == 'x64':
arch_param = 'x64'
else:
print 'Invalid target_arch parameter value'
return 1
# Add the architecture to candle-specific parameters.
parameters = Merge(
parameters, {'candle': {'switches': ['-arch', arch_param]}})
# Run candle and light to generate the installation.
wixobj = '%(intermediate_dir)s\\%(basename)s.wixobj' % parameters
args = GenerateCommandLine('candle', wxs, wixobj, parameters)
rc = Run(args)
if rc:
return rc
args = GenerateCommandLine('light', wixobj, target, parameters)
rc = Run(args)
if rc:
return rc
return 0
def main():
usage = 'Usage: zip2msi [options] <input.zip> <output.msi>'
parser = OptionParser(usage=usage)
parser.add_option('--intermediate_dir', dest='intermediate_dir', default='.')
parser.add_option('--wix_path', dest='wix_path', default='.')
parser.add_option('--target_arch', dest='target_arch', default='x86')
options, args = parser.parse_args()
if len(args) != 2:
parser.error('two positional arguments expected')
return GenerateMsi(args[1], args[0], dict(options.__dict__))
if __name__ == '__main__':
sys.exit(main())
|
thaines/rfam
|
refs/heads/master
|
performance/sim_asset_creation.py
|
1
|
#! /usr/bin/env python3
import time
import random
import json
from urllib.parse import urljoin
from browser import Browser
# Specify how many of each asset type to create...
create = {
'mat' : 50,
'prop' : 100,
'set' : 6,
'char' : 6,
'ani' : 12,
'shot' : 40,
'comp' : 40,
'video' : 2
}
# Directory to create each asset type in...
directory = {
'mat' : 'materials',
'prop' : 'props',
'set' : 'sets',
'char' : 'characters',
'ani' : 'animations',
'shot' : 'shots',
'comp' : 'composites',
'video' : 'final'
}
# Generate a random sequence of asset names, suitably long, to use above...
parts_a = ['giant', 'puny', 'ugly', 'purple', 'angry', 'hyper', 'spotted', 'lesser', 'tentacled', 'flying', 'dead', 'rainbow', 'spider', 'stone', 'antique']
parts_b = ['toad', 'unicorn', 'bear', 'shark', 'human', 'parrot', 'statue', 'tree', 'rock', 'horse', 'orc', 'butterfly', 'sheep', 'book', 'kettle', 'onion', 'cat', 'tree', 'waterfall', 'lake', 'hovercraft', 'zombie', 'park']
names = []
for pa in parts_a:
for pb in parts_b:
names.append(pa + ' ' + pb)
random.shuffle(names)
# Create a list of assets to create, and randomise the order...
to_create = []
for key, value in create.items():
for _ in range(value):
to_create.append(key)
random.shuffle(names)
# Load the target information and setup a simulated web browser...
f = open('target.json', 'r')
target = json.load(f)
f.close()
browser = Browser()
# Login...
user = browser.login(target)
print('Logged in with user %s' % user)
# Loop and process each asset in turn...
start = time.time()
times = []
for i, kind in enumerate(to_create):
inner_start = time.time()
# Open assets page...
browser.open(urljoin(target['url'], 'assets'))
# Open new asset page...
browser.open(urljoin(target['url'], 'new'))
# Create asset...
browser.getJSON(urljoin(target['url'], 'add/asset'), {'name':names[i], 'type':kind, 'filename':directory[kind] +'/' + names[i] + '.blend', 'description':''})
# Time stuff...
inner_end = time.time()
times.append(inner_end - inner_start)
end = time.time()
# Print out statistics...
print('Created %i assets' % len(to_create))
print('Added assets at %.1f per second.' % (len(to_create) / (end - start)))
print('Longest asset took %.2f seconds' % max(times))
print('First 64 assets took %.2f second' % sum(times[:64]))
print('Last 64 assets took %.2f second' % sum(times[-64:]))
|
pblottiere/QGIS
|
refs/heads/master
|
python/plugins/processing/algs/grass7/ext/r_li_padrange.py
|
45
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
r_li_padrange.py
----------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
from .r_li import checkMovingWindow, configFile
def checkParameterValuesBeforeExecuting(alg, parameters, context):
return checkMovingWindow(alg, parameters, context)
def processCommand(alg, parameters, context, feedback):
configFile(alg, parameters, context, feedback)
|
ChinaMassClouds/copenstack-server
|
refs/heads/master
|
openstack/src/nova-2014.2/nova/api/openstack/compute/schemas/v3/create_backup.py
|
14
|
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.validation import parameter_types
create_backup = {
'type': 'object',
'properties': {
'createBackup': {
'type': 'object',
'properties': {
'name': parameter_types.name,
'backup_type': {
'type': 'string',
'enum': ['daily', 'weekly'],
},
'rotation': {
'type': ['integer', 'string'],
'pattern': '^[0-9]+$',
'minimum': 0,
},
'metadata': {
'type': 'object',
}
},
'required': ['name', 'backup_type', 'rotation'],
'additionalProperties': False,
},
},
'required': ['createBackup'],
'additionalProperties': False,
}
|
cryptobanana/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/cloudengine/ce_vxlan_gateway.py
|
22
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: ce_vxlan_gateway
version_added: "2.4"
short_description: Manages gateway for the VXLAN network on HUAWEI CloudEngine devices.
description:
- Configuring Centralized All-Active Gateways or Distributed Gateway for
the VXLAN Network on HUAWEI CloudEngine devices.
author: QijunPan (@CloudEngine-Ansible)
notes:
- Ensure All-Active Gateways or Distributed Gateway for the VXLAN Network can not configure at the same time.
options:
dfs_id:
description:
- Specifies the ID of a DFS group.
The value must be 1.
required: false
default: null
dfs_source_ip:
description:
- Specifies the IPv4 address bound to a DFS group.
The value is in dotted decimal notation.
required: false
default: null
dfs_source_vpn:
description:
- Specifies the name of a VPN instance bound to a DFS group.
The value is a string of 1 to 31 case-sensitive characters without spaces.
If the character string is quoted by double quotation marks, the character string can contain spaces.
The value C(_public_) is reserved and cannot be used as the VPN instance name.
required: false
default: null
dfs_udp_port:
description:
- Specifies the UDP port number of the DFS group.
The value is an integer that ranges from 1025 to 65535.
required: false
default: null
dfs_all_active:
description:
- Creates all-active gateways.
required: false
choices: ['enable', 'disable']
default: null
dfs_peer_ip:
description:
- Configure the IP address of an all-active gateway peer.
The value is in dotted decimal notation.
required: false
default: null
dfs_peer_vpn:
description:
- Specifies the name of the VPN instance that is associated with all-active gateway peer.
The value is a string of 1 to 31 case-sensitive characters, spaces not supported.
When double quotation marks are used around the string, spaces are allowed in the string.
The value C(_public_) is reserved and cannot be used as the VPN instance name.
required: false
default: null
vpn_instance:
description:
- Specifies the name of a VPN instance.
The value is a string of 1 to 31 case-sensitive characters, spaces not supported.
When double quotation marks are used around the string, spaces are allowed in the string.
The value C(_public_) is reserved and cannot be used as the VPN instance name.
required: false
default: null
vpn_vni:
description:
- Specifies a VNI ID.
Binds a VXLAN network identifier (VNI) to a virtual private network (VPN) instance.
The value is an integer ranging from 1 to 16000000.
required: false
default: null
vbdif_name:
description:
- Full name of VBDIF interface, i.e. Vbdif100.
required: false
default: null
vbdif_bind_vpn:
description:
- Specifies the name of the VPN instance that is associated with the interface.
The value is a string of 1 to 31 case-sensitive characters, spaces not supported.
When double quotation marks are used around the string, spaces are allowed in the string.
The value C(_public_) is reserved and cannot be used as the VPN instance name.
required: false
default: null
vbdif_mac:
description:
- Specifies a MAC address for a VBDIF interface.
The value is in the format of H-H-H. Each H is a 4-digit hexadecimal number, such as C(00e0) or C(fc01).
If an H contains less than four digits, 0s are added ahead. For example, C(e0) is equal to C(00e0).
A MAC address cannot be all 0s or 1s or a multicast MAC address.
required: false
default: null
arp_distribute_gateway:
description:
- Enable the distributed gateway function on VBDIF interface.
required: false
choices: ['enable','disable']
default: null
arp_direct_route:
description:
- Enable VLINK direct route on VBDIF interface.
required: false
choices: ['enable','disable']
default: null
state:
description:
- Determines whether the config should be present or not
on the device.
required: false
default: present
choices: ['present', 'absent']
"""
EXAMPLES = '''
- name: vxlan gateway module test
hosts: ce128
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Configuring Centralized All-Active Gateways for the VXLAN Network
ce_vxlan_gateway:
dfs_id: 1
dfs_source_ip: 6.6.6.6
dfs_all_active: enable
dfs_peer_ip: 7.7.7.7
provider: "{{ cli }}"
- name: Bind the VPN instance to a Layer 3 gateway, enable distributed gateway, and configure host route advertisement.
ce_vxlan_gateway:
vbdif_name: Vbdif100
vbdif_bind_vpn: vpn1
arp_distribute_gateway: enable
arp_direct_route: enable
provider: "{{ cli }}"
- name: Assign a VNI to a VPN instance.
ce_vxlan_gateway:
vpn_instance: vpn1
vpn_vni: 100
provider: "{{ cli }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"dfs_id": "1", "dfs_source_ip": "6.6.6.6", "dfs_all_active":"enable", "dfs_peer_ip": "7.7.7.7"}
existing:
description: k/v pairs of existing configuration
returned: verbose mode
type: dict
sample: {"dfs_id": "1", "dfs_source_ip": null, "evn_peer_ip": [], "dfs_all_active": "disable"}
end_state:
description: k/v pairs of configuration after module execution
returned: verbose mode
type: dict
sample: {"dfs_id": "1", "evn_source_ip": "6.6.6.6", "evn_source_vpn": null,
"evn_peers": [{"ip": "7.7.7.7", "vpn": ""}], "dfs_all_active": "enable"}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["dfs-group 1",
"source ip 6.6.6.6",
"active-active-gateway",
"peer 7.7.7.7"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_config, load_config
from ansible.module_utils.network.cloudengine.ce import ce_argument_spec
def is_config_exist(cmp_cfg, test_cfg):
"""is configuration exist?"""
if not cmp_cfg or not test_cfg:
return False
return bool(test_cfg in cmp_cfg)
def is_valid_v4addr(addr):
"""check is ipv4 addr"""
if not addr:
return False
if addr.count('.') == 3:
addr_list = addr.split('.')
if len(addr_list) != 4:
return False
for each_num in addr_list:
if not each_num.isdigit():
return False
if int(each_num) > 255:
return False
return True
return False
def mac_format(mac):
"""convert mac format to xxxx-xxxx-xxxx"""
if not mac:
return None
if mac.count("-") != 2:
return None
addrs = mac.split("-")
for i in range(3):
if not addrs[i] or not addrs[i].isalnum():
return None
if len(addrs[i]) < 1 or len(addrs[i]) > 4:
return None
try:
addrs[i] = int(addrs[i], 16)
except ValueError:
return None
try:
return "%04x-%04x-%04x" % (addrs[0], addrs[1], addrs[2])
except ValueError:
return None
except TypeError:
return None
def get_dfs_source_ip(config):
"""get dfs source ip address"""
get = re.findall(r"source ip ([0-9]+.[0-9]+.[0-9]+.[0-9]+)", config)
if not get:
return None
else:
return get[0]
def get_dfs_source_vpn(config):
"""get dfs source ip vpn instance name"""
get = re.findall(
r"source ip [0-9]+.[0-9]+.[0-9]+.[0-9]+ vpn-instance (\S+)", config)
if not get:
return None
else:
return get[0]
def get_dfs_udp_port(config):
"""get dfs udp port"""
get = re.findall(r"udp port (\d+)", config)
if not get:
return None
else:
return get[0]
def get_dfs_peers(config):
"""get evn peer ip list"""
get = re.findall(
r"peer ([0-9]+.[0-9]+.[0-9]+.[0-9]+)\s?(vpn-instance)?\s?(\S*)", config)
if not get:
return None
else:
peers = list()
for item in get:
peers.append(dict(ip=item[0], vpn=item[2]))
return peers
def get_ip_vpn(config):
"""get ip vpn instance"""
get = re.findall(r"ip vpn-instance (\S+)", config)
if not get:
return None
else:
return get[0]
def get_ip_vpn_vni(config):
"""get ip vpn vxlan vni"""
get = re.findall(r"vxlan vni (\d+)", config)
if not get:
return None
else:
return get[0]
def get_vbdif_vpn(config):
"""get ip vpn name of interface vbdif"""
get = re.findall(r"ip binding vpn-instance (\S+)", config)
if not get:
return None
else:
return get[0]
def get_vbdif_mac(config):
"""get mac address of interface vbdif"""
get = re.findall(
r" mac-address ([0-9a-fA-F]{1,4}-[0-9a-fA-F]{1,4}-[0-9a-fA-F]{1,4})", config)
if not get:
return None
else:
return get[0]
class VxlanGateway(object):
"""
Manages Gateway for the VXLAN Network.
"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.init_module()
# module input info
self.dfs_id = self.module.params['dfs_id']
self.dfs_source_ip = self.module.params['dfs_source_ip']
self.dfs_source_vpn = self.module.params['dfs_source_vpn']
self.dfs_udp_port = self.module.params['dfs_udp_port']
self.dfs_all_active = self.module.params['dfs_all_active']
self.dfs_peer_ip = self.module.params['dfs_peer_ip']
self.dfs_peer_vpn = self.module.params['dfs_peer_vpn']
self.vpn_instance = self.module.params['vpn_instance']
self.vpn_vni = self.module.params['vpn_vni']
self.vbdif_name = self.module.params['vbdif_name']
self.vbdif_mac = self.module.params['vbdif_mac']
self.vbdif_bind_vpn = self.module.params['vbdif_bind_vpn']
self.arp_distribute_gateway = self.module.params['arp_distribute_gateway']
self.arp_direct_route = self.module.params['arp_direct_route']
self.state = self.module.params['state']
# host info
self.host = self.module.params['host']
self.username = self.module.params['username']
self.port = self.module.params['port']
# state
self.config = "" # current config
self.changed = False
self.updates_cmd = list()
self.commands = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def init_module(self):
"""init module"""
self.module = AnsibleModule(
argument_spec=self.spec, supports_check_mode=True)
def cli_load_config(self, commands):
"""load config by cli"""
if not self.module.check_mode:
load_config(self.module, commands)
def get_current_config(self):
"""get current configuration"""
flags = list()
exp = " | ignore-case section include dfs-group"
if self.vpn_instance:
exp += "|^ip vpn-instance %s$" % self.vpn_instance
if self.vbdif_name:
exp += "|^interface %s$" % self.vbdif_name
flags.append(exp)
return get_config(self.module, flags)
def cli_add_command(self, command, undo=False):
"""add command to self.update_cmd and self.commands"""
if undo and command.lower() not in ["quit", "return"]:
cmd = "undo " + command
else:
cmd = command
self.commands.append(cmd) # set to device
if command.lower() not in ["quit", "return"]:
self.updates_cmd.append(cmd) # show updates result
def config_dfs_group(self):
"""manage Dynamic Fabric Service (DFS) group configuration"""
if not self.dfs_id:
return
dfs_view = False
view_cmd = "dfs-group %s" % self.dfs_id
exist = is_config_exist(self.config, view_cmd)
if self.state == "present" and not exist:
self.cli_add_command(view_cmd)
dfs_view = True
# undo dfs-group dfs-group-id
if self.state == "absent" and exist:
if not self.dfs_source_ip and not self.dfs_udp_port and not self.dfs_all_active and not self.dfs_peer_ip:
self.cli_add_command(view_cmd, undo=True)
return
# [undo] source ip ip-address [ vpn-instance vpn-instance-name ]
if self.dfs_source_ip:
cmd = "source ip %s" % self.dfs_source_ip
if self.dfs_source_vpn:
cmd += " vpn-instance %s" % self.dfs_source_vpn
exist = is_config_exist(self.config, cmd)
if self.state == "present" and not exist:
if not dfs_view:
self.cli_add_command(view_cmd)
dfs_view = True
self.cli_add_command(cmd)
if self.state == "absent" and exist:
if not dfs_view:
self.cli_add_command(view_cmd)
dfs_view = True
self.cli_add_command(cmd, undo=True)
# [undo] udp port port-number
if self.dfs_udp_port:
cmd = "udp port %s" % self.dfs_udp_port
exist = is_config_exist(self.config, cmd)
if self.state == "present" and not exist:
if not dfs_view:
self.cli_add_command(view_cmd)
dfs_view = True
self.cli_add_command(cmd)
elif self.state == "absent" and exist:
if not dfs_view:
self.cli_add_command(view_cmd)
dfs_view = True
self.cli_add_command(cmd, undo=True)
# [undo] active-active-gateway
# [undo]peer[ vpn-instance vpn-instance-name ]
aa_cmd = "active-active-gateway"
aa_exist = is_config_exist(self.config, aa_cmd)
aa_view = False
if self.dfs_all_active == "disable":
if aa_exist:
cmd = "peer %s" % self.dfs_peer_ip
if self.dfs_source_vpn:
cmd += " vpn-instance %s" % self.dfs_peer_vpn
exist = is_config_exist(self.config, cmd)
if self.state == "absent" and exist:
if not dfs_view:
self.cli_add_command(view_cmd)
dfs_view = True
self.cli_add_command(aa_cmd)
self.cli_add_command(cmd, undo=True)
self.cli_add_command("quit")
if not dfs_view:
self.cli_add_command(view_cmd)
dfs_view = True
self.cli_add_command(aa_cmd, undo=True)
elif self.dfs_all_active == "enable":
if not aa_exist:
if not dfs_view:
self.cli_add_command(view_cmd)
dfs_view = True
self.cli_add_command(aa_cmd)
aa_view = True
if self.dfs_peer_ip:
cmd = "peer %s" % self.dfs_peer_ip
if self.dfs_peer_vpn:
cmd += " vpn-instance %s" % self.dfs_peer_vpn
exist = is_config_exist(self.config, cmd)
if self.state == "present" and not exist:
if not dfs_view:
self.cli_add_command(view_cmd)
dfs_view = True
if not aa_view:
self.cli_add_command(aa_cmd)
self.cli_add_command(cmd)
self.cli_add_command("quit")
elif self.state == "absent" and exist:
if not dfs_view:
self.cli_add_command(view_cmd)
dfs_view = True
if not aa_view:
self.cli_add_command(aa_cmd)
self.cli_add_command(cmd, undo=True)
self.cli_add_command("quit")
else: # not input dfs_all_active
if aa_exist and self.dfs_peer_ip:
cmd = "peer %s" % self.dfs_peer_ip
if self.dfs_peer_vpn:
cmd += " vpn-instance %s" % self.dfs_peer_vpn
exist = is_config_exist(self.config, cmd)
if self.state == "present" and not exist:
if not dfs_view:
self.cli_add_command(view_cmd)
dfs_view = True
self.cli_add_command(aa_cmd)
self.cli_add_command(cmd)
self.cli_add_command("quit")
elif self.state == "absent" and exist:
if not dfs_view:
self.cli_add_command(view_cmd)
dfs_view = True
self.cli_add_command(aa_cmd)
self.cli_add_command(cmd, undo=True)
self.cli_add_command("quit")
else:
pass
elif not aa_exist and self.dfs_peer_ip and self.state == "present":
self.module.fail_json(
msg="Error: All-active gateways is not enable.")
else:
pass
if dfs_view:
self.cli_add_command("quit")
def config_ip_vpn(self):
"""configure command at the ip vpn view"""
if not self.vpn_instance or not self.vpn_vni:
return
# ip vpn-instance vpn-instance-name
view_cmd = "ip vpn-instance %s" % self.vpn_instance
exist = is_config_exist(self.config, view_cmd)
if not exist:
self.module.fail_json(
msg="Error: ip vpn instance %s is not exist." % self.vpn_instance)
# [undo] vxlan vni vni-id
cmd = "vxlan vni %s" % self.vpn_vni
exist = is_config_exist(self.config, cmd)
if self.state == "present" and not exist:
self.cli_add_command(view_cmd)
self.cli_add_command(cmd)
self.cli_add_command("quit")
elif self.state == "absent" and exist:
self.cli_add_command(view_cmd)
self.cli_add_command(cmd, undo=True)
self.cli_add_command("quit")
def config_vbdif(self):
"""configure command at the VBDIF interface view"""
if not self.vbdif_name:
return
vbdif_cmd = "interface %s" % self.vbdif_name.lower().capitalize()
exist = is_config_exist(self.config, vbdif_cmd)
if not exist:
self.module.fail_json(
msg="Error: Interface %s is not exist." % self.vbdif_name)
# interface vbdif bd-id
# [undo] ip binding vpn-instance vpn-instance-name
vbdif_view = False
if self.vbdif_bind_vpn:
cmd = "ip binding vpn-instance %s" % self.vbdif_bind_vpn
exist = is_config_exist(self.config, cmd)
if self.state == "present" and not exist:
if not vbdif_view:
self.cli_add_command(vbdif_cmd)
vbdif_view = True
self.cli_add_command(cmd)
elif self.state == "absent" and exist:
if not vbdif_view:
self.cli_add_command(vbdif_cmd)
vbdif_view = True
self.cli_add_command(cmd, undo=True)
# [undo] arp distribute-gateway enable
if self.arp_distribute_gateway:
cmd = "arp distribute-gateway enable"
exist = is_config_exist(self.config, cmd)
if self.arp_distribute_gateway == "enable" and not exist:
if not vbdif_view:
self.cli_add_command(vbdif_cmd)
vbdif_view = True
self.cli_add_command(cmd)
elif self.arp_distribute_gateway == "disable" and exist:
if not vbdif_view:
self.cli_add_command(vbdif_cmd)
vbdif_view = True
self.cli_add_command(cmd, undo=True)
# [undo] arp direct-route enable
if self.arp_direct_route:
cmd = "arp direct-route enable"
exist = is_config_exist(self.config, cmd)
if self.arp_direct_route == "enable" and not exist:
if not vbdif_view:
self.cli_add_command(vbdif_cmd)
vbdif_view = True
self.cli_add_command(cmd)
elif self.arp_direct_route == "disable" and exist:
if not vbdif_view:
self.cli_add_command(vbdif_cmd)
vbdif_view = True
self.cli_add_command(cmd, undo=True)
# mac-address mac-address
# undo mac-address
if self.vbdif_mac:
cmd = "mac-address %s" % self.vbdif_mac
exist = is_config_exist(self.config, cmd)
if self.state == "present" and not exist:
if not vbdif_view:
self.cli_add_command(vbdif_cmd)
vbdif_view = True
self.cli_add_command(cmd)
elif self.state == "absent" and exist:
if not vbdif_view:
self.cli_add_command(vbdif_cmd)
vbdif_view = True
self.cli_add_command("undo mac-address")
# quit
if vbdif_view:
self.cli_add_command("quit")
def is_valid_vbdif(self, ifname):
"""check is interface vbdif"""
if not ifname.upper().startswith('VBDIF'):
return False
bdid = self.vbdif_name.replace(" ", "").upper().replace("VBDIF", "")
if not bdid.isdigit():
return False
if int(bdid) < 1 or int(bdid) > 16777215:
return False
return True
def is_valid_ip_vpn(self, vpname):
"""check ip vpn"""
if not vpname:
return False
if vpname == "_public_":
self.module.fail_json(
msg="Error: The value C(_public_) is reserved and cannot be used as the VPN instance name.")
if len(vpname) < 1 or len(vpname) > 31:
self.module.fail_json(
msg="Error: IP vpn name length is not in the range from 1 to 31.")
return True
def check_params(self):
"""Check all input params"""
# dfs id check
if self.dfs_id:
if not self.dfs_id.isdigit():
self.module.fail_json(msg="Error: DFS id is not digit.")
if int(self.dfs_id) != 1:
self.module.fail_json(msg="Error: DFS is not 1.")
# dfs_source_ip check
if self.dfs_source_ip:
if not is_valid_v4addr(self.dfs_source_ip):
self.module.fail_json(msg="Error: dfs_source_ip is invalid.")
# dfs_source_vpn check
if self.dfs_source_vpn and not self.is_valid_ip_vpn(self.dfs_source_vpn):
self.module.fail_json(msg="Error: dfs_source_vpn is invalid.")
# dfs_source_vpn and dfs_source_ip must set at the same time
if self.dfs_source_vpn and not self.dfs_source_ip:
self.module.fail_json(
msg="Error: dfs_source_vpn and dfs_source_ip must set at the same time.")
# dfs_udp_port check
if self.dfs_udp_port:
if not self.dfs_udp_port.isdigit():
self.module.fail_json(
msg="Error: dfs_udp_port id is not digit.")
if int(self.dfs_udp_port) < 1025 or int(self.dfs_udp_port) > 65535:
self.module.fail_json(
msg="dfs_udp_port is not ranges from 1025 to 65535.")
# dfs_peer_ip check
if self.dfs_peer_ip:
if not is_valid_v4addr(self.dfs_peer_ip):
self.module.fail_json(msg="Error: dfs_peer_ip is invalid.")
# dfs_peer_vpn check
if self.dfs_peer_vpn and not self.is_valid_ip_vpn(self.dfs_peer_vpn):
self.module.fail_json(msg="Error: dfs_peer_vpn is invalid.")
# dfs_peer_vpn and dfs_peer_ip must set at the same time
if self.dfs_peer_vpn and not self.dfs_peer_ip:
self.module.fail_json(
msg="Error: dfs_peer_vpn and dfs_peer_ip must set at the same time.")
# vpn_instance check
if self.vpn_instance and not self.is_valid_ip_vpn(self.vpn_instance):
self.module.fail_json(msg="Error: vpn_instance is invalid.")
# vpn_vni check
if self.vpn_vni:
if not self.vpn_vni.isdigit():
self.module.fail_json(msg="Error: vpn_vni id is not digit.")
if int(self.vpn_vni) < 1 or int(self.vpn_vni) > 16000000:
self.module.fail_json(
msg="vpn_vni is not ranges from 1 to 16000000.")
# vpn_instance and vpn_vni must set at the same time
if bool(self.vpn_instance) != bool(self.vpn_vni):
self.module.fail_json(
msg="Error: vpn_instance and vpn_vni must set at the same time.")
# vbdif_name check
if self.vbdif_name:
self.vbdif_name = self.vbdif_name.replace(" ", "").lower().capitalize()
if not self.is_valid_vbdif(self.vbdif_name):
self.module.fail_json(msg="Error: vbdif_name is invalid.")
# vbdif_mac check
if self.vbdif_mac:
mac = mac_format(self.vbdif_mac)
if not mac:
self.module.fail_json(msg="Error: vbdif_mac is invalid.")
self.vbdif_mac = mac
# vbdif_bind_vpn check
if self.vbdif_bind_vpn and not self.is_valid_ip_vpn(self.vbdif_bind_vpn):
self.module.fail_json(msg="Error: vbdif_bind_vpn is invalid.")
# All-Active Gateways or Distributed Gateway config can not set at the
# same time.
if self.dfs_id:
if self.vpn_vni or self.arp_distribute_gateway == "enable":
self.module.fail_json(msg="Error: All-Active Gateways or Distributed Gateway config "
"can not set at the same time.")
def get_proposed(self):
"""get proposed info"""
if self.dfs_id:
self.proposed["dfs_id"] = self.dfs_id
self.proposed["dfs_source_ip"] = self.dfs_source_ip
self.proposed["dfs_source_vpn"] = self.dfs_source_vpn
self.proposed["dfs_udp_port"] = self.dfs_udp_port
self.proposed["dfs_all_active"] = self.dfs_all_active
self.proposed["dfs_peer_ip"] = self.dfs_peer_ip
self.proposed["dfs_peer_vpn"] = self.dfs_peer_vpn
if self.vpn_instance:
self.proposed["vpn_instance"] = self.vpn_instance
self.proposed["vpn_vni"] = self.vpn_vni
if self.vbdif_name:
self.proposed["vbdif_name"] = self.vbdif_name
self.proposed["vbdif_mac"] = self.vbdif_mac
self.proposed["vbdif_bind_vpn"] = self.vbdif_bind_vpn
self.proposed[
"arp_distribute_gateway"] = self.arp_distribute_gateway
self.proposed["arp_direct_route"] = self.arp_direct_route
self.proposed["state"] = self.state
def get_existing(self):
"""get existing info"""
if not self.config:
return
if is_config_exist(self.config, "dfs-group 1"):
self.existing["dfs_id"] = "1"
self.existing["dfs_source_ip"] = get_dfs_source_ip(self.config)
self.existing["dfs_source_vpn"] = get_dfs_source_vpn(self.config)
self.existing["dfs_udp_port"] = get_dfs_udp_port(self.config)
if is_config_exist(self.config, "active-active-gateway"):
self.existing["dfs_all_active"] = "enable"
self.existing["dfs_peers"] = get_dfs_peers(self.config)
else:
self.existing["dfs_all_active"] = "disable"
if self.vpn_instance:
self.existing["vpn_instance"] = get_ip_vpn(self.config)
self.existing["vpn_vni"] = get_ip_vpn_vni(self.config)
if self.vbdif_name:
self.existing["vbdif_name"] = self.vbdif_name
self.existing["vbdif_mac"] = get_vbdif_mac(self.config)
self.existing["vbdif_bind_vpn"] = get_vbdif_vpn(self.config)
if is_config_exist(self.config, "arp distribute-gateway enable"):
self.existing["arp_distribute_gateway"] = "enable"
else:
self.existing["arp_distribute_gateway"] = "disable"
if is_config_exist(self.config, "arp direct-route enable"):
self.existing["arp_direct_route"] = "enable"
else:
self.existing["arp_direct_route"] = "disable"
def get_end_state(self):
"""get end state info"""
config = self.get_current_config()
if not config:
return
if is_config_exist(config, "dfs-group 1"):
self.end_state["dfs_id"] = "1"
self.end_state["dfs_source_ip"] = get_dfs_source_ip(config)
self.end_state["dfs_source_vpn"] = get_dfs_source_vpn(config)
self.end_state["dfs_udp_port"] = get_dfs_udp_port(config)
if is_config_exist(config, "active-active-gateway"):
self.end_state["dfs_all_active"] = "enable"
self.end_state["dfs_peers"] = get_dfs_peers(config)
else:
self.end_state["dfs_all_active"] = "disable"
if self.vpn_instance:
self.end_state["vpn_instance"] = get_ip_vpn(config)
self.end_state["vpn_vni"] = get_ip_vpn_vni(config)
if self.vbdif_name:
self.end_state["vbdif_name"] = self.vbdif_name
self.end_state["vbdif_mac"] = get_vbdif_mac(config)
self.end_state["vbdif_bind_vpn"] = get_vbdif_vpn(config)
if is_config_exist(config, "arp distribute-gateway enable"):
self.end_state["arp_distribute_gateway"] = "enable"
else:
self.end_state["arp_distribute_gateway"] = "disable"
if is_config_exist(config, "arp direct-route enable"):
self.end_state["arp_direct_route"] = "enable"
else:
self.end_state["arp_direct_route"] = "disable"
def work(self):
"""worker"""
self.check_params()
self.config = self.get_current_config()
self.get_existing()
self.get_proposed()
# deal present or absent
if self.dfs_id:
self.config_dfs_group()
if self.vpn_instance:
self.config_ip_vpn()
if self.vbdif_name:
self.config_vbdif()
if self.commands:
self.cli_load_config(self.commands)
self.changed = True
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
"""Module main"""
argument_spec = dict(
dfs_id=dict(required=False, type='str'),
dfs_source_ip=dict(required=False, type='str'),
dfs_source_vpn=dict(required=False, type='str'),
dfs_udp_port=dict(required=False, type='str'),
dfs_all_active=dict(required=False, type='str',
choices=['enable', 'disable']),
dfs_peer_ip=dict(required=False, type='str'),
dfs_peer_vpn=dict(required=False, type='str'),
vpn_instance=dict(required=False, type='str'),
vpn_vni=dict(required=False, type='str'),
vbdif_name=dict(required=False, type='str'),
vbdif_mac=dict(required=False, type='str'),
vbdif_bind_vpn=dict(required=False, type='str'),
arp_distribute_gateway=dict(
required=False, type='str', choices=['enable', 'disable']),
arp_direct_route=dict(required=False, type='str',
choices=['enable', 'disable']),
state=dict(required=False, default='present',
choices=['present', 'absent'])
)
argument_spec.update(ce_argument_spec)
module = VxlanGateway(argument_spec)
module.work()
if __name__ == '__main__':
main()
|
jmachuca77/ardupilot
|
refs/heads/master
|
Tools/scripts/decode_watchdog.py
|
2
|
#!/usr/bin/env python
'''
decode an watchdog message
/Tools/scripts/decode_watchdog.py "WDOG, 2641424, -3, 0, 0, 0, 0, 0, 0, 122, 3, 0, 181, 4196355, 135203219, SPI1"
'''
import re
import sys
import optparse
from collections import OrderedDict
import decode_ICSR
class DecodeWatchDog(object):
class Component(object):
def __init__(self, value):
self.value = value
def prefix(self):
m = re.match(".*Component([A-Z]+)", str(type(self)))
return m.group(1)
def decode(self):
return "?????"
def string_value(self):
return str(self.value)
def print_decoded(self):
print("%5s %25s: %12s: %s" % (
self.prefix(),
self.expansion(),
self.string_value(),
self.decode()
))
class ComponentT(Component):
def expansion(self):
return "Scheduler Task"
def decode(self):
if int(self.value) == -3:
return "Waiting for sample"
if int(self.value) == -1:
return "Pre-loop"
if int(self.value) == -2:
return "Fast loop"
return self.value
class ComponentSL(Component):
def expansion(self):
return "Semaphore Line"
def decode(self):
if int(self.value) == 0:
return "Not waiting on semaphore"
return self.value
class ComponentFL(Component):
def expansion(self):
return "Fault Line"
class ComponentFT(Component):
def expansion(self):
return "Fault Type"
def decode(self):
x = int(self.value)
# this list taken from AP_HAL_ChibiOS/system.cpp
fault_types = {
1: "Reset",
2: "NMI",
3: "HardFault",
4: "MemManage",
5: "BusFault",
6: "UsageFault",
}
if x in fault_types:
return fault_types[x]
return super(DecodeWatchDog.ComponentFT, self).decode()
class ComponentFA(Component):
def expansion(self):
return "Fault Address"
def string_value(self):
return hex(int(self.value, 16))
class ComponentFTP(Component):
def expansion(self):
return "Fault Thread Priority"
class ComponentFLR(Component):
def expansion(self):
return "Fault Long Return Address" # ?? FIXME: is this really what LR stands for?
def string_value(self):
return "0x" + self.value
class ComponentFICSR(Component):
def expansion(self):
return "Fault ICS Register" # ?? FIXME: expand further
def string_value(self):
return hex(int(self.value, 16))
def decode(self):
return "[Below]"
def print_decoded(self):
super(DecodeWatchDog.ComponentFICSR, self).print_decoded()
decoder = decode_ICSR.DecodeICSR()
text = decoder.string(int(self.value))
sys.stdout.write(re.sub("^", " ", text, flags=re.M))
class ComponentMM(Component):
def expansion(self):
return "MAVLink Message"
def decode(self):
if int(self.value) == 0:
return "[None]"
return super(DecodeWatchDog.ComponentMM, self).decode()
class ComponentMC(Component):
def expansion(self):
return "MAVLink Command"
def decode(self):
if int(self.value) == 0:
return "[None]"
return super(DecodeWatchDog.ComponentMC, self).decode()
class ComponentIE(Component):
def expansion(self):
return "Internal Error Mask"
class ComponentIEHex(ComponentIE):
def expansion(self):
return "Internal Error Mask"
def string_value(self):
return hex(int(self.value, 16))
class ComponentIEC(Component):
def expansion(self):
return "Internal Error Count"
def decode(self):
return self.value
class ComponentIEL(Component):
def expansion(self):
return "Internal Error Line"
def decode(self):
return self.value
class ComponentTN(Component):
def expansion(self):
return "Thread name"
def __init__(self):
self.components = OrderedDict()
self.components["T"] = DecodeWatchDog.ComponentT
self.components["SL"] = DecodeWatchDog.ComponentSL
self.components["FL"] = DecodeWatchDog.ComponentFL
self.components["FT"] = DecodeWatchDog.ComponentFT
self.components["FA"] = DecodeWatchDog.ComponentFA
self.components["FTP"] = DecodeWatchDog.ComponentFTP
self.components["FLR"] = DecodeWatchDog.ComponentFLR
self.components["FICSR"] = DecodeWatchDog.ComponentFICSR
self.components["MM"] = DecodeWatchDog.ComponentMM
self.components["MC"] = DecodeWatchDog.ComponentMC
self.components["IE"] = DecodeWatchDog.ComponentIEHex
self.components["IEC"] = DecodeWatchDog.ComponentIEC
self.components["TN"] = DecodeWatchDog.ComponentTN
self.df_components = {}
self.df_components["Task"] = DecodeWatchDog.ComponentT
self.df_components["Tsk"] = DecodeWatchDog.ComponentT
self.df_components["IErr"] = DecodeWatchDog.ComponentIE
self.df_components["IE"] = DecodeWatchDog.ComponentIE
self.df_components["IEC"] = DecodeWatchDog.ComponentIEC
self.df_components["IEL"] = DecodeWatchDog.ComponentIEL
self.df_components["MavMsg"] = DecodeWatchDog.ComponentMM
self.df_components["MvMsg"] = DecodeWatchDog.ComponentMM
self.df_components["MvCmd"] = DecodeWatchDog.ComponentMC
self.df_components["SemLine"] = DecodeWatchDog.ComponentSL
self.df_components["SmLn"] = DecodeWatchDog.ComponentSL
self.df_components["FL"] = DecodeWatchDog.ComponentFL
self.df_components["FT"] = DecodeWatchDog.ComponentFT
self.df_components["FA"] = DecodeWatchDog.ComponentFA
self.df_components["FP"] = DecodeWatchDog.ComponentFTP
self.df_components["LR"] = DecodeWatchDog.ComponentFLR
self.df_components["ICSR"] = DecodeWatchDog.ComponentFICSR
self.df_components["TN"] = DecodeWatchDog.ComponentTN
def run(self, text):
# see if the supplied string is a statustext message:
re_string = "(?:APM: )?WDG:"
for component in self.components.keys():
re_string += " %s(?P<%s>[^ ]+)" % (component, component)
# print("string: %s" % text)
# print("re_string: %s" % re_string)
wdg_re = re.compile(re_string)
m = wdg_re.match(text)
if m is not None:
comp = []
for group in m.groupdict():
comp.append(self.components[group](m.group(group)))
for c in comp:
c.print_decoded()
return
# not a statustext message; see if it a WDOG dataflash message
df_re = re.compile("WDOG {(.*)}")
m = df_re.match(text)
if m is not None:
pairs = m.group(1).split(",")
for pair in pairs:
(name, value) = pair.split(":")
name = name.strip()
if name == "TimeUS":
continue
value = value.strip()
# print("(%s)=(%s)" % (name, value))
if name not in self.df_components:
raise KeyError(name)
self.df_components[name](value).print_decoded()
return
# not a statustext message and not a mavlogdump dump of a WDOG
# dataflash message. See if it is a .log-style CSV line
log_re = re.compile("WDOG, (\d+), ([-\d]+), ([-\d]+), ([-\d]+), ([-\d]+), ([-\d]+), ([-\d]+), ([-\d]+), ([-\d]+), ([-\d]+), ([-\d]+), ([-\d]+), ([-\d]+), ([-\d]+), (\w+)")
column_names = "TimeUS,Tsk,IE,IEC,IEL,MvMsg,MvCmd,SmLn,FL,FT,FA,FP,ICSR,LR,TN"
cols = column_names.split(",")
m = log_re.match(text)
if m is not None:
for i in range(0,len(cols)):
name = cols[i]
if name == 'TimeUS':
continue
value = m.group(i+1)
# convert some things from base10 to hex:
if name in ["LR", "FICSR"]:
value = int(value, 10)
value = hex(value)
value = value[2:]
if name not in self.df_components:
raise KeyError(name)
self.df_components[name](value).print_decoded()
return
raise ValueError("Text not recognised")
# 2020-06-10 17:20:08.45: WDOG {TimeUS : 949568, Task : -2, IErr : 0, IErrCnt : 0, MavMsg : 0, MavCmd : 0, SemLine : 0, FL : 100, FT : 3, FA : 404947019, FP : 183, ICSR : 4196355}
# APM: WDG: T-3 SL0 FL122 FT3 FA0 FTP177 FLR80CBB35 FICSR4196355 MM0 MC0 IE67108864 IEC12353 TN:rcin
# FMT, 254, 47, WDOG, QbIHHHHHHHIBIIn, TimeUS,Tsk,IE,IEC,IEL,MvMsg,MvCmd,SmLn,FL,FT,FA,FP,ICSR,LR,TN
# WDOG, 2641424, -3, 0, 0, 0, 0, 0, 0, 122, 3, 0, 181, 4196355, 135203219, SPI1
if __name__ == '__main__':
parser = optparse.OptionParser(__file__)
opts, args = parser.parse_args()
if len(args) == 0:
print("Usage: %s" % parser.usage)
sys.exit(0)
text = args[0]
decoder = DecodeWatchDog()
decoder.run(text)
|
lecaoquochung/ddnb.django
|
refs/heads/master
|
tests/resolve_url/__init__.py
|
12133432
| |
atul-bhouraskar/django
|
refs/heads/master
|
django/contrib/postgres/aggregates/statistics.py
|
493
|
from django.db.models import FloatField, IntegerField
from django.db.models.aggregates import Aggregate
__all__ = [
'CovarPop', 'Corr', 'RegrAvgX', 'RegrAvgY', 'RegrCount', 'RegrIntercept',
'RegrR2', 'RegrSlope', 'RegrSXX', 'RegrSXY', 'RegrSYY', 'StatAggregate',
]
class StatAggregate(Aggregate):
def __init__(self, y, x, output_field=FloatField()):
if not x or not y:
raise ValueError('Both y and x must be provided.')
super(StatAggregate, self).__init__(y=y, x=x, output_field=output_field)
self.x = x
self.y = y
self.source_expressions = self._parse_expressions(self.y, self.x)
def get_source_expressions(self):
return self.y, self.x
def set_source_expressions(self, exprs):
self.y, self.x = exprs
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
return super(Aggregate, self).resolve_expression(query, allow_joins, reuse, summarize)
class Corr(StatAggregate):
function = 'CORR'
class CovarPop(StatAggregate):
def __init__(self, y, x, sample=False):
self.function = 'COVAR_SAMP' if sample else 'COVAR_POP'
super(CovarPop, self).__init__(y, x)
class RegrAvgX(StatAggregate):
function = 'REGR_AVGX'
class RegrAvgY(StatAggregate):
function = 'REGR_AVGY'
class RegrCount(StatAggregate):
function = 'REGR_COUNT'
def __init__(self, y, x):
super(RegrCount, self).__init__(y=y, x=x, output_field=IntegerField())
def convert_value(self, value, expression, connection, context):
if value is None:
return 0
return int(value)
class RegrIntercept(StatAggregate):
function = 'REGR_INTERCEPT'
class RegrR2(StatAggregate):
function = 'REGR_R2'
class RegrSlope(StatAggregate):
function = 'REGR_SLOPE'
class RegrSXX(StatAggregate):
function = 'REGR_SXX'
class RegrSXY(StatAggregate):
function = 'REGR_SXY'
class RegrSYY(StatAggregate):
function = 'REGR_SYY'
|
dantebarba/docker-media-server
|
refs/heads/master
|
plex/Subliminal.bundle/Contents/Libraries/Shared/stevedore/tests/test_enabled.py
|
8
|
from stevedore import enabled
def test_enabled():
def check_enabled(ep):
return ep.name == 't2'
em = enabled.EnabledExtensionManager(
'stevedore.test.extension',
check_enabled,
invoke_on_load=True,
invoke_args=('a',),
invoke_kwds={'b': 'B'},
)
assert len(em.extensions) == 1
assert em.names() == ['t2']
def test_enabled_after_load():
def check_enabled(ext):
return ext.obj and ext.name == 't2'
em = enabled.EnabledExtensionManager(
'stevedore.test.extension',
check_enabled,
invoke_on_load=True,
invoke_args=('a',),
invoke_kwds={'b': 'B'},
)
assert len(em.extensions) == 1
assert em.names() == ['t2']
|
shaggytwodope/qutebrowser
|
refs/heads/master
|
qutebrowser/utils/version.py
|
2
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2016 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Utilities to show various version informations."""
import re
import sys
import glob
import os.path
import platform
import subprocess
import importlib
import collections
from PyQt5.QtCore import QT_VERSION_STR, PYQT_VERSION_STR, qVersion
from PyQt5.QtNetwork import QSslSocket
from PyQt5.QtWidgets import QApplication
try:
from PyQt5.QtWebKit import qWebKitVersion
except ImportError: # pragma: no cover
qWebKitVersion = None
import qutebrowser
from qutebrowser.utils import log, utils, standarddir
from qutebrowser.browser import pdfjs
def _git_str():
"""Try to find out git version.
Return:
string containing the git commit ID.
None if there was an error or we're not in a git repo.
"""
# First try via subprocess if possible
commit = None
if not hasattr(sys, "frozen"):
try:
gitpath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
os.path.pardir, os.path.pardir)
except (NameError, OSError):
log.misc.exception("Error while getting git path")
else:
commit = _git_str_subprocess(gitpath)
if commit is not None:
return commit
# If that fails, check the git-commit-id file.
try:
return utils.read_file('git-commit-id')
except (OSError, ImportError):
return None
def _git_str_subprocess(gitpath):
"""Try to get the git commit ID and timestamp by calling git.
Args:
gitpath: The path where the .git folder is.
Return:
The ID/timestamp on success, None on failure.
"""
if not os.path.isdir(os.path.join(gitpath, ".git")):
return None
try:
cid = subprocess.check_output(
['git', 'describe', '--tags', '--dirty', '--always'],
cwd=gitpath).decode('UTF-8').strip()
date = subprocess.check_output(
['git', 'show', '-s', '--format=%ci', 'HEAD'],
cwd=gitpath).decode('UTF-8').strip()
return '{} ({})'.format(cid, date)
except (subprocess.CalledProcessError, OSError):
return None
def _release_info():
"""Try to gather distribution release informations.
Return:
list of (filename, content) tuples.
"""
blacklisted = ['ANSI_COLOR=', 'HOME_URL=', 'SUPPORT_URL=',
'BUG_REPORT_URL=']
data = []
for fn in glob.glob("/etc/*-release"):
lines = []
try:
with open(fn, 'r', encoding='utf-8') as f:
for line in f.read().strip().splitlines():
if not any(line.startswith(bl) for bl in blacklisted):
lines.append(line)
if lines:
data.append((fn, '\n'.join(lines)))
except OSError:
log.misc.exception("Error while reading {}.".format(fn))
return data
def _module_versions():
"""Get versions of optional modules.
Return:
A list of lines with version info.
"""
lines = []
modules = collections.OrderedDict([
('sip', ['SIP_VERSION_STR']),
('colorama', ['VERSION', '__version__']),
('pypeg2', ['__version__']),
('jinja2', ['__version__']),
('pygments', ['__version__']),
('yaml', ['__version__']),
('cssutils', ['__version__']),
('typing', []),
('PyQt5.QtWebEngineWidgets', []),
])
for name, attributes in modules.items():
try:
module = importlib.import_module(name)
except ImportError:
text = '{}: no'.format(name)
else:
for attr in attributes:
try:
text = '{}: {}'.format(name, getattr(module, attr))
except AttributeError:
pass
else:
break
else:
text = '{}: yes'.format(name)
lines.append(text)
return lines
def _path_info():
"""Get info about important path names.
Return:
A dictionary of descriptive to actual path names.
"""
return {
'config': standarddir.config(),
'data': standarddir.data(),
'system_data': standarddir.system_data(),
'cache': standarddir.cache(),
'download': standarddir.download(),
'runtime': standarddir.runtime(),
}
def _os_info():
"""Get operating system info.
Return:
A list of lines with version info.
"""
lines = []
releaseinfo = None
if sys.platform == 'linux':
osver = ''
releaseinfo = _release_info()
elif sys.platform == 'win32':
osver = ', '.join(platform.win32_ver())
elif sys.platform == 'darwin':
release, versioninfo, machine = platform.mac_ver()
if all(not e for e in versioninfo):
versioninfo = ''
else:
versioninfo = '.'.join(versioninfo)
osver = ', '.join([e for e in [release, versioninfo, machine] if e])
else:
osver = '?'
lines.append('OS Version: {}'.format(osver))
if releaseinfo is not None:
for (fn, data) in releaseinfo:
lines += ['', '--- {} ---'.format(fn), data]
return lines
def _pdfjs_version():
"""Get the pdf.js version.
Return:
A string with the version number.
"""
try:
pdfjs_file, file_path = pdfjs.get_pdfjs_res_and_path('build/pdf.js')
except pdfjs.PDFJSNotFound:
return 'no'
else:
pdfjs_file = pdfjs_file.decode('utf-8')
version_re = re.compile(
r"^(PDFJS\.version|var pdfjsVersion) = '([^']+)';$", re.MULTILINE)
match = version_re.search(pdfjs_file)
if not match:
pdfjs_version = 'unknown'
else:
pdfjs_version = match.group(2)
if file_path is None:
file_path = 'bundled'
return '{} ({})'.format(pdfjs_version, file_path)
def version():
"""Return a string with various version informations."""
lines = ["qutebrowser v{}".format(qutebrowser.__version__)]
gitver = _git_str()
if gitver is not None:
lines.append("Git commit: {}".format(gitver))
if qVersion() != QT_VERSION_STR:
qt_version = 'Qt: {} (compiled {})'.format(qVersion(), QT_VERSION_STR)
else:
qt_version = 'Qt: {}'.format(qVersion())
lines += [
'',
'{}: {}'.format(platform.python_implementation(),
platform.python_version()),
qt_version,
'PyQt: {}'.format(PYQT_VERSION_STR),
'',
]
lines += _module_versions()
lines += ['pdf.js: {}'.format(_pdfjs_version())]
if qWebKitVersion is None:
lines.append('Webkit: no')
else:
lines.append('Webkit: {}'.format(qWebKitVersion()))
lines += [
'SSL: {}'.format(QSslSocket.sslLibraryVersionString()),
'',
]
qapp = QApplication.instance()
if qapp:
style = qapp.style()
lines.append('Style: {}'.format(style.metaObject().className()))
importpath = os.path.dirname(os.path.abspath(qutebrowser.__file__))
lines += [
'Platform: {}, {}'.format(platform.platform(),
platform.architecture()[0]),
'Frozen: {}'.format(hasattr(sys, 'frozen')),
"Imported from {}".format(importpath),
]
lines += _os_info()
lines += [
'',
'Paths:',
]
for name, path in _path_info().items():
lines += ['{}: {}'.format(name, path)]
return '\n'.join(lines)
|
msherry/PyXB-1.1.4
|
refs/heads/master
|
pyxb_114/bundles/wssplat/wsdl11.py
|
1
|
# Copyright 2009, Peter A. Bigot
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pyxb_114.bundles.wssplat.raw.wsdl11 import *
import pyxb_114.bundles.wssplat.raw.wsdl11 as raw_wsdl11
import pyxb_114.namespace
import pyxb_114.utils.domutils as domutils
import xml.dom
def ImportRelatedNamespaces ():
"""Import modules for related namespaces so they are available to
create binding instances from the WSDL sources."""
try:
import pyxb_114.bundles.wssplat.soapbind11
except ImportError:
pass
try:
import pyxb_114.bundles.wssplat.soapbind12
except ImportError:
pass
try:
import pyxb_114.bundles.wssplat.soap11
except ImportError:
pass
try:
import pyxb_114.bundles.wssplat.soap12
except ImportError:
pass
try:
import pyxb_114.bundles.wssplat.soapenv
except ImportError:
pass
try:
import pyxb_114.bundles.wssplat.httpbind
except ImportError:
pass
try:
import pyxb_114.bundles.wssplat.mimebind
except ImportError:
pass
class _WSDL_binding_mixin (object):
"""Mix-in class to mark element Python bindings that are expected
to be wildcard matches in WSDL binding elements."""
pass
class _WSDL_port_mixin (object):
"""Mix-in class to mark element Python bindings that are expected
to be wildcard matches in WSDL port elements."""
pass
class _WSDL_operation_mixin (object):
"""Mix-in class to mark element Python bindings that are expected
to be wildcard matches in WSDL (binding) operation elements."""
pass
class tPort (raw_wsdl11.tPort):
def __getBindingReference (self):
return self.__bindingReference
def _setBindingReference (self, binding_reference):
self.__bindingReference = binding_reference
__bindingReference = None
bindingReference = property(__getBindingReference)
def __getAddressReference (self):
return self.__addressReference
def _setAddressReference (self, address_reference):
self.__addressReference = address_reference
__addressReference = None
addressReference = property(__getAddressReference)
raw_wsdl11.tPort._SetSupersedingClass(tPort)
class tBinding (raw_wsdl11.tBinding):
def __getPortTypeReference (self):
return self.__portTypeReference
def setPortTypeReference (self, port_type_reference):
self.__portTypeReference = port_type_reference
__portTypeReference = None
portTypeReference = property(__getPortTypeReference)
def __getProtocolBinding (self):
"""Return the protocol-specific binding information."""
return self.__protocolBinding
def _setProtocolBinding (self, protocol_binding):
self.__protocolBinding = protocol_binding
__protocolBinding = None
protocolBinding = property(__getProtocolBinding)
def operationMap (self):
return self.__operationMap
__operationMap = None
def __init__ (self, *args, **kw):
super(tBinding, self).__init__(*args, **kw)
self.__operationMap = { }
raw_wsdl11.tBinding._SetSupersedingClass(tBinding)
class tPortType (raw_wsdl11.tPortType):
def operationMap (self):
return self.__operationMap
__operationMap = None
def __init__ (self, *args, **kw):
super(tPortType, self).__init__(*args, **kw)
self.__operationMap = { }
raw_wsdl11.tPortType._SetSupersedingClass(tPortType)
class tParam (raw_wsdl11.tParam):
def __getMessageReference (self):
return self.__messageReference
def _setMessageReference (self, message_reference):
self.__messageReference = message_reference
__messageReference = None
messageReference = property(__getMessageReference)
raw_wsdl11.tParam._SetSupersedingClass(tParam)
class tFault (raw_wsdl11.tFault):
def __getMessageReference (self):
return self.__messageReference
def _setMessageReference (self, message_reference):
self.__messageReference = message_reference
__messageReference = None
messageReference = property(__getMessageReference)
raw_wsdl11.tFault._SetSupersedingClass(tFault)
class tPart (raw_wsdl11.tPart):
def __getElementReference (self):
return self.__elementReference
def _setElementReference (self, element_reference):
self.__elementReference = element_reference
__elementReference = None
elementReference = property(__getElementReference)
def __getTypeReference (self):
return self.__typeReference
def _setTypeReference (self, type_reference):
self.__typeReference = type_reference
__typeReference = None
typeReference = property(__getTypeReference)
raw_wsdl11.tPart._SetSupersedingClass(tPart)
class tBindingOperation (raw_wsdl11.tBindingOperation):
def __getOperationReference (self):
return self.__operationReference
def _setOperationReference (self, operation_reference):
self.__operationReference = operation_reference
__operationReference = None
operationReference = property(__getOperationReference)
raw_wsdl11.tBindingOperation._SetSupersedingClass(tBindingOperation)
class tDefinitions (raw_wsdl11.tDefinitions):
def messageMap (self):
return self.targetNamespace().messages()
def namespaceContext (self):
return self.__namespaceContext
__namespaceContext = None
def bindingMap (self):
return self.__bindingMap
__bindingMap = None
def targetNamespace (self):
return self.namespaceContext().targetNamespace()
def namespace (self):
return self.__namespace
__namespace = None
def _addToMap (self, map, qname, value):
map[qname] = value
(ns, ln) = qname
if (ns == self.targetNamespace()):
map[(None, ln)] = value
elif (ns is None):
map[(self.targetNamespace(), ln)] = value
return map
def schema (self):
return self.__schema
__schema = None
@classmethod
def _PreFactory_vx (self, args, kw):
# Import standard bindings. If we do this, then wildcard
# binding, port, and operation elements will be recognized and
# converted into bindings.
import pyxb_114.bundles.wssplat.soapbind11
import pyxb_114.bundles.wssplat.soapbind12
import pyxb_114.bundles.wssplat.httpbind
# Ensure we have definitions for any externally-referenced
# things we might need. @todo: This might have to
# chronologically precede the import above.
pyxb_114.namespace.archive.NamespaceArchive.PreLoadArchives()
raw_wsdl11.Namespace.validateComponentModel()
state = ( kw.pop('process_schema', False),
kw.pop('generation_uid', None),
kw.get('_dom_node', None) )
return state
def _postFactory_vx (self, state):
(process_schema, generation_uid, dom_node) = state
assert isinstance(dom_node, xml.dom.Node)
node_en = pyxb_114.namespace.ExpandedName(dom_node)
self.__namespaceContext = pyxb_114.namespace.resolution.NamespaceContext.GetNodeContext(dom_node)
self.__buildMaps()
if process_schema:
self.__processSchema(generation_uid)
self.__finalizeReferences()
return self
__WSDLCategories = ( 'service', 'port', 'message', 'binding', 'portType' )
def __buildMaps (self):
tns = self.namespaceContext().targetNamespace()
tns.configureCategories(self.__WSDLCategories)
for m in self.message:
tns.messages()[m.name] = m
for pt in self.portType:
tns.portTypes()[pt.name] = pt
for op in pt.operation:
pt.operationMap()[op.name] = op
params = op.fault[:]
if op.input is not None:
params.append(op.input)
if op.output is not None:
params.append(op.output)
for p in params:
msg_en = m._namespaceContext().interpretQName(p.message)
p._setMessageReference(msg_en.message())
for b in self.binding:
tns.bindings()[b.name] = b
port_type_en = b._namespaceContext().interpretQName(b.type)
b.setPortTypeReference(port_type_en.portType())
for wc in b.wildcardElements():
if isinstance(wc, _WSDL_binding_mixin):
b._setProtocolBinding(wc)
break
for op in b.operation:
b.operationMap()[op.name] = op
for wc in op.wildcardElements():
if isinstance(wc, _WSDL_operation_mixin):
op._setOperationReference(wc)
break
for s in self.service:
tns.services()[s.name] = s
for p in s.port:
binding_en = p._namespaceContext().interpretQName(p.binding)
p._setBindingReference(binding_en.binding())
for wc in p.wildcardElements():
if isinstance(wc, _WSDL_port_mixin):
p._setAddressReference(wc)
break
def __processSchema (self, generation_uid):
global pyxb_114
import pyxb_114.xmlschema
print 'PS %s' % (generation_uid,)
if self.__schema is not None:
print 'Already have schema'
return self.__schema
for t in self.types:
for wc in t.wildcardElements():
if isinstance(wc, xml.dom.Node) and pyxb_114.namespace.XMLSchema.nodeIsNamed(wc, 'schema'):
# Try to load component models for any namespace referenced by this.
# Probably shouldn't need to do this except for imported ones.
for ns in self.namespaceContext().inScopeNamespaces().values():
try:
ns.validateComponentModel()
except Exception, e:
print 'Error validating component model for %s: %s' % (ns.uri(), e)
self.__schema = pyxb_114.xmlschema.schema.CreateFromDOM(wc, namespace_context=self.namespaceContext(), generation_uid=generation_uid)
elif isinstance(wc, pyxb_114.xmlschema.schema):
self.__schema = wc
else:
print 'No match: %s %s' % (wc.namespaceURI, namespace.localName)
if self.__schema is not None:
return self.__schema
return None
def __finalizeReferences (self):
tns = self.namespaceContext().targetNamespace()
for m in tns.messages().values():
for p in m.part:
if (p.element is not None) and (p.elementReference is None):
elt_en = p._namespaceContext().interpretQName(p.element)
p._setElementReference(elt_en.elementDeclaration())
if (p.type is not None) and (p.typeReference is None):
type_en = p._namespaceContext().interpretQName(p.type)
p._setTypeReference(type_en.typeDefinition())
raw_wsdl11.tDefinitions._SetSupersedingClass(tDefinitions)
pyxb_114.namespace.resolution.NamespaceContext._AddTargetNamespaceAttribute(raw_wsdl11.Namespace.createExpandedName('definitions'), pyxb_114.namespace.ExpandedName('targetNamespace'))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.