repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
blaze225/zulip | refs/heads/master | zerver/webhooks/sentry/tests.py | 43 | # -*- coding: utf-8 -*-
from zerver.lib.test_classes import WebhookTestCase
class SentryHookTests(WebhookTestCase):
STREAM_NAME = 'sentry'
URL_TEMPLATE = "/api/v1/external/sentry?&api_key={api_key}"
FIXTURE_DIR_NAME = 'sentry'
def test_error_issue_message(self):
# type: () -> None
expected_subject = u"zulip"
expected_message = u"New ERROR [issue](https://sentry.io/zulip/zulip/issues/156699934/): This is an example python exception."
self.send_and_test_stream_message(
'exception_message',
expected_subject,
expected_message
)
|
ammarkhann/FinalSeniorCode | refs/heads/master | lib/python2.7/site-packages/cffi/setuptools_ext.py | 3 | import os
try:
basestring
except NameError:
# Python 3.x
basestring = str
def error(msg):
from distutils.errors import DistutilsSetupError
raise DistutilsSetupError(msg)
def execfile(filename, glob):
# We use execfile() (here rewritten for Python 3) instead of
# __import__() to load the build script. The problem with
# a normal import is that in some packages, the intermediate
# __init__.py files may already try to import the file that
# we are generating.
with open(filename) as f:
src = f.read()
src += '\n' # Python 2.6 compatibility
code = compile(src, filename, 'exec')
exec(code, glob, glob)
def add_cffi_module(dist, mod_spec):
from cffi.api import FFI
if not isinstance(mod_spec, basestring):
error("argument to 'cffi_modules=...' must be a str or a list of str,"
" not %r" % (type(mod_spec).__name__,))
mod_spec = str(mod_spec)
try:
build_file_name, ffi_var_name = mod_spec.split(':')
except ValueError:
error("%r must be of the form 'path/build.py:ffi_variable'" %
(mod_spec,))
if not os.path.exists(build_file_name):
ext = ''
rewritten = build_file_name.replace('.', '/') + '.py'
if os.path.exists(rewritten):
ext = ' (rewrite cffi_modules to [%r])' % (
rewritten + ':' + ffi_var_name,)
error("%r does not name an existing file%s" % (build_file_name, ext))
mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
execfile(build_file_name, mod_vars)
try:
ffi = mod_vars[ffi_var_name]
except KeyError:
error("%r: object %r not found in module" % (mod_spec,
ffi_var_name))
if not isinstance(ffi, FFI):
ffi = ffi() # maybe it's a function instead of directly an ffi
if not isinstance(ffi, FFI):
error("%r is not an FFI instance (got %r)" % (mod_spec,
type(ffi).__name__))
if not hasattr(ffi, '_assigned_source'):
error("%r: the set_source() method was not called" % (mod_spec,))
module_name, source, source_extension, kwds = ffi._assigned_source
if ffi._windows_unicode:
kwds = kwds.copy()
ffi._apply_windows_unicode(kwds)
if source is None:
_add_py_module(dist, ffi, module_name)
else:
_add_c_module(dist, ffi, module_name, source, source_extension, kwds)
def _set_py_limited_api(Extension, kwds):
"""
Add py_limited_api to kwds if setuptools >= 26 is in use.
Do not alter the setting if it already exists.
Setuptools takes care of ignoring the flag on Python 2 and PyPy.
"""
if 'py_limited_api' not in kwds:
import setuptools
try:
setuptools_major_version = int(setuptools.__version__.partition('.')[0])
if setuptools_major_version >= 26:
kwds['py_limited_api'] = True
except ValueError: # certain development versions of setuptools
# If we don't know the version number of setuptools, we
# try to set 'py_limited_api' anyway. At worst, we get a
# warning.
kwds['py_limited_api'] = True
return kwds
def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
from distutils.core import Extension
# We are a setuptools extension. Need this build_ext for py_limited_api.
from setuptools.command.build_ext import build_ext
from distutils.dir_util import mkpath
from distutils import log
from cffi import recompiler
allsources = ['$PLACEHOLDER']
allsources.extend(kwds.pop('sources', []))
kwds = _set_py_limited_api(Extension, kwds)
ext = Extension(name=module_name, sources=allsources, **kwds)
def make_mod(tmpdir, pre_run=None):
c_file = os.path.join(tmpdir, module_name + source_extension)
log.info("generating cffi module %r" % c_file)
mkpath(tmpdir)
# a setuptools-only, API-only hook: called with the "ext" and "ffi"
# arguments just before we turn the ffi into C code. To use it,
# subclass the 'distutils.command.build_ext.build_ext' class and
# add a method 'def pre_run(self, ext, ffi)'.
if pre_run is not None:
pre_run(ext, ffi)
updated = recompiler.make_c_source(ffi, module_name, source, c_file)
if not updated:
log.info("already up-to-date")
return c_file
if dist.ext_modules is None:
dist.ext_modules = []
dist.ext_modules.append(ext)
base_class = dist.cmdclass.get('build_ext', build_ext)
class build_ext_make_mod(base_class):
def run(self):
if ext.sources[0] == '$PLACEHOLDER':
pre_run = getattr(self, 'pre_run', None)
ext.sources[0] = make_mod(self.build_temp, pre_run)
base_class.run(self)
dist.cmdclass['build_ext'] = build_ext_make_mod
# NB. multiple runs here will create multiple 'build_ext_make_mod'
# classes. Even in this case the 'build_ext' command should be
# run once; but just in case, the logic above does nothing if
# called again.
def _add_py_module(dist, ffi, module_name):
from distutils.dir_util import mkpath
from distutils.command.build_py import build_py
from distutils.command.build_ext import build_ext
from distutils import log
from cffi import recompiler
def generate_mod(py_file):
log.info("generating cffi module %r" % py_file)
mkpath(os.path.dirname(py_file))
updated = recompiler.make_py_source(ffi, module_name, py_file)
if not updated:
log.info("already up-to-date")
base_class = dist.cmdclass.get('build_py', build_py)
class build_py_make_mod(base_class):
def run(self):
base_class.run(self)
module_path = module_name.split('.')
module_path[-1] += '.py'
generate_mod(os.path.join(self.build_lib, *module_path))
dist.cmdclass['build_py'] = build_py_make_mod
# the following is only for "build_ext -i"
base_class_2 = dist.cmdclass.get('build_ext', build_ext)
class build_ext_make_mod(base_class_2):
def run(self):
base_class_2.run(self)
if self.inplace:
# from get_ext_fullpath() in distutils/command/build_ext.py
module_path = module_name.split('.')
package = '.'.join(module_path[:-1])
build_py = self.get_finalized_command('build_py')
package_dir = build_py.get_package_dir(package)
file_name = module_path[-1] + '.py'
generate_mod(os.path.join(package_dir, file_name))
dist.cmdclass['build_ext'] = build_ext_make_mod
def cffi_modules(dist, attr, value):
assert attr == 'cffi_modules'
if isinstance(value, basestring):
value = [value]
for cffi_module in value:
add_cffi_module(dist, cffi_module)
|
leungmanhin/opencog | refs/heads/master | opencog/python/attic/freebase/quad_mapper.py | 34 | __author__ = 'keyvan&ramin'
from opencog.atomspace import AtomSpace, TruthValue
import quad_reader
DEFAULT_TV = TruthValue(1,1)
def map_from_path(quad_dump_path, atomspace):
quads = quad_reader.extract_quads(quad_dump_path)
for quad in quads:
try:
value = int(quad.value)
value_node_type = 'NumberNode'
except:
value_node_type = 'ConceptNode'
if quad.value is None:
atomspace.add_link('EvaluationLink',
[atomspace.add_node('PredicateNode', quad.predicate, DEFAULT_TV),
atomspace.add_link('ListLink',
[atomspace.add_node('ObjectEntityNode', quad.subject, DEFAULT_TV),
atomspace.add_node('ObjectEntityNode', quad.destination, DEFAULT_TV)
])
])
elif quad.destination is None:
atomspace.add_link('EvaluationLink',
[atomspace.add_node('PredicateNode', quad.predicate, DEFAULT_TV),
atomspace.add_link('ListLink',
[atomspace.add_node('ObjectEntityNode', quad.subject, DEFAULT_TV),
atomspace.add_node(value_node_type, quad.value, DEFAULT_TV)
])
])
else:
atomspace.add_link('EvaluationLink',
[atomspace.add_node('PredicateNode', quad.predicate, DEFAULT_TV),
atomspace.add_link('ListLink',
[atomspace.add_node('ObjectEntityNode', quad.subject, DEFAULT_TV),
atomspace.add_link('EvaluationLink',
[atomspace.add_node('PredicateNode', 'is_key', DEFAULT_TV),
atomspace.add_link('ListLink',
[atomspace.add_node(value_node_type, quad.value, DEFAULT_TV),
atomspace.add_node('ObjectEntityNode', quad.destination, DEFAULT_TV)
])
])
])
])
if __name__ == '__main__':
atomspace = AtomSpace()
map_from_path('http://wiki.freebase.com/images/e/eb/Steve-martin-quad-sample.txt', atomspace)
atomspace.print_list()
|
nectR-Tutoring/nectr | refs/heads/new_development | config/urls.py | 1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='homepage.html'), name='home'),
url(r'^about/$', TemplateView.as_view(template_name='about_nectr.html'), name='about'),
# Django Admin, use {% url 'admin:index' %}
url(settings.ADMIN_URL, admin.site.urls),
# User management
url(r'^users/', include('nectr.users.urls', namespace='users')),
url(r'^accounts/', include('allauth.urls')),
url(r'^tutor/', include('nectr.tutor.urls', namespace='tutors')),
# Dashboard Management
url(r'^dashboard/', include('nectr.dashboard.urls', namespace='dashboard')),
# Your stuff: custom urls includes go here
url(r'^search/', include('nectr.search.urls')),
# Search the Hive
# url(r'^search_the_hive', TemplateView.as_view(template_name='look_nectr.html')),
# Join the Hive
url(r'^join_the_hive', TemplateView.as_view(template_name='joinpage_nectr.html'), name='join'),
# How it Works
url(r'^how_it_works', TemplateView.as_view(template_name='how_nectr.html'), name='how_it_works'),
url(r'^test_profile', TemplateView.as_view(template_name='profile/base_profile.html'), name='base_profile'),
# url(r'^test_joinpage_nectr', TemplateView.as_view(template_name='joinpage_nectr.html'), name='test5'),
# Messaging Include
url(r'^messages/', include('postman.urls', namespace='postman', app_name='postman')),
url(r'^chat/', include('nectr.chat.urls'))
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
vincentlooi/FCIS | refs/heads/master | lib/utils/symbol.py | 2 | # --------------------------------------------------------
# Fully Convolutional Instance-aware Semantic Segmentation
# Copyright (c) 2017 Microsoft
# Licensed under The Apache-2.0 License [see LICENSE for details]
# Written by Yuwen Xiong
# --------------------------------------------------------
import numpy as np
class Symbol:
def __init__(self):
self.arg_shape_dict = None
self.out_shape_dict = None
self.aux_shape_dict = None
self.sym = None
@property
def symbol(self):
return self.sym
def get_symbol(self, cfg, is_train=True):
"""
return a generated symbol, it also need to be assigned to self.sym
"""
raise NotImplementedError()
def init_weights(self, cfg, arg_params, aux_params):
raise NotImplementedError()
def get_msra_std(self, shape):
fan_in = float(shape[1])
if len(shape) > 2:
fan_in *= np.prod(shape[2:])
print(np.sqrt(2 / fan_in))
return np.sqrt(2 / fan_in)
def infer_shape(self, data_shape_dict):
# infer shape
arg_shape, out_shape, aux_shape = self.sym.infer_shape(**data_shape_dict)
self.arg_shape_dict = dict(zip(self.sym.list_arguments(), arg_shape))
self.out_shape_dict = dict(zip(self.sym.list_outputs(), out_shape))
self.aux_shape_dict = dict(zip(self.sym.list_auxiliary_states(), aux_shape))
def check_parameter_shapes(self, arg_params, aux_params, data_shape_dict, is_train=True):
for k in self.sym.list_arguments():
if k in data_shape_dict or (False if is_train else 'label' in k):
continue
assert k in arg_params, k + ' not initialized'
assert arg_params[k].shape == self.arg_shape_dict[k], \
'shape inconsistent for ' + k + ' inferred ' + str(self.arg_shape_dict[k]) + ' provided ' + str(
arg_params[k].shape)
for k in self.sym.list_auxiliary_states():
assert k in aux_params, k + ' not initialized'
assert aux_params[k].shape == self.aux_shape_dict[k], \
'shape inconsistent for ' + k + ' inferred ' + str(self.aux_shape_dict[k]) + ' provided ' + str(
aux_params[k].shape)
|
prakashwaghwani/rails-vs-mean-stack | refs/heads/gh-pages | node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py | 1869 | # Copyright 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A clone of the default copy.deepcopy that doesn't handle cyclic
structures or complex types except for dicts and lists. This is
because gyp copies so large structure that small copy overhead ends up
taking seconds in a project the size of Chromium."""
class Error(Exception):
pass
__all__ = ["Error", "deepcopy"]
def deepcopy(x):
"""Deep copy operation on gyp objects such as strings, ints, dicts
and lists. More than twice as fast as copy.deepcopy but much less
generic."""
try:
return _deepcopy_dispatch[type(x)](x)
except KeyError:
raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy ' +
'or expand simple_copy support.' % type(x))
_deepcopy_dispatch = d = {}
def _deepcopy_atomic(x):
return x
for x in (type(None), int, long, float,
bool, str, unicode, type):
d[x] = _deepcopy_atomic
def _deepcopy_list(x):
return [deepcopy(a) for a in x]
d[list] = _deepcopy_list
def _deepcopy_dict(x):
y = {}
for key, value in x.iteritems():
y[deepcopy(key)] = deepcopy(value)
return y
d[dict] = _deepcopy_dict
del d
|
doheekim/chuizonetest | refs/heads/master | lib/wtforms/ext/sqlalchemy/validators.py | 40 | from __future__ import unicode_literals
import warnings
from wtforms import ValidationError
from sqlalchemy.orm.exc import NoResultFound
class Unique(object):
"""Checks field value unicity against specified table field.
:param get_session:
A function that return a SQAlchemy Session.
:param model:
The model to check unicity against.
:param column:
The unique column.
:param message:
The error message.
"""
field_flags = ('unique', )
def __init__(self, get_session, model, column, message=None):
warnings.warn('The Unique validator will be removed in WTForms 1.1', DeprecationWarning)
self.get_session = get_session
self.model = model
self.column = column
self.message = message
def __call__(self, form, field):
try:
obj = self.get_session().query(self.model)\
.filter(self.column == field.data).one()
if not hasattr(form, '_obj') or not form._obj == obj:
if self.message is None:
self.message = field.gettext('Already exists.')
raise ValidationError(self.message)
except NoResultFound:
pass
|
ujenmr/ansible | refs/heads/devel | lib/ansible/module_utils/facts/hardware/netbsd.py | 223 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
from ansible.module_utils.six.moves import reduce
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
from ansible.module_utils.facts.timeout import TimeoutError, timeout
from ansible.module_utils.facts.utils import get_file_content, get_file_lines, get_mount_size
from ansible.module_utils.facts.sysctl import get_sysctl
class NetBSDHardware(Hardware):
"""
NetBSD-specific subclass of Hardware. Defines memory and CPU facts:
- memfree_mb
- memtotal_mb
- swapfree_mb
- swaptotal_mb
- processor (a list)
- processor_cores
- processor_count
- devices
"""
platform = 'NetBSD'
MEMORY_FACTS = ['MemTotal', 'SwapTotal', 'MemFree', 'SwapFree']
def populate(self, collected_facts=None):
hardware_facts = {}
self.sysctl = get_sysctl(self.module, ['machdep'])
cpu_facts = self.get_cpu_facts()
memory_facts = self.get_memory_facts()
mount_facts = {}
try:
mount_facts = self.get_mount_facts()
except TimeoutError:
pass
dmi_facts = self.get_dmi_facts()
hardware_facts.update(cpu_facts)
hardware_facts.update(memory_facts)
hardware_facts.update(mount_facts)
hardware_facts.update(dmi_facts)
return hardware_facts
def get_cpu_facts(self):
cpu_facts = {}
i = 0
physid = 0
sockets = {}
if not os.access("/proc/cpuinfo", os.R_OK):
return cpu_facts
cpu_facts['processor'] = []
for line in get_file_lines("/proc/cpuinfo"):
data = line.split(":", 1)
key = data[0].strip()
# model name is for Intel arch, Processor (mind the uppercase P)
# works for some ARM devices, like the Sheevaplug.
if key == 'model name' or key == 'Processor':
if 'processor' not in cpu_facts:
cpu_facts['processor'] = []
cpu_facts['processor'].append(data[1].strip())
i += 1
elif key == 'physical id':
physid = data[1].strip()
if physid not in sockets:
sockets[physid] = 1
elif key == 'cpu cores':
sockets[physid] = int(data[1].strip())
if len(sockets) > 0:
cpu_facts['processor_count'] = len(sockets)
cpu_facts['processor_cores'] = reduce(lambda x, y: x + y, sockets.values())
else:
cpu_facts['processor_count'] = i
cpu_facts['processor_cores'] = 'NA'
return cpu_facts
def get_memory_facts(self):
memory_facts = {}
if not os.access("/proc/meminfo", os.R_OK):
return memory_facts
for line in get_file_lines("/proc/meminfo"):
data = line.split(":", 1)
key = data[0]
if key in NetBSDHardware.MEMORY_FACTS:
val = data[1].strip().split(' ')[0]
memory_facts["%s_mb" % key.lower()] = int(val) // 1024
return memory_facts
@timeout()
def get_mount_facts(self):
mount_facts = {}
mount_facts['mounts'] = []
fstab = get_file_content('/etc/fstab')
if not fstab:
return mount_facts
for line in fstab.splitlines():
if line.startswith('#') or line.strip() == '':
continue
fields = re.sub(r'\s+', ' ', line).split()
mount_statvfs_info = get_mount_size(fields[1])
mount_info = {'mount': fields[1],
'device': fields[0],
'fstype': fields[2],
'options': fields[3]}
mount_info.update(mount_statvfs_info)
mount_facts['mounts'].append(mount_info)
return mount_facts
def get_dmi_facts(self):
dmi_facts = {}
# We don't use dmidecode(8) here because:
# - it would add dependency on an external package
# - dmidecode(8) can only be ran as root
# So instead we rely on sysctl(8) to provide us the information on a
# best-effort basis. As a bonus we also get facts on non-amd64/i386
# platforms this way.
sysctl_to_dmi = {
'machdep.dmi.system-product': 'product_name',
'machdep.dmi.system-version': 'product_version',
'machdep.dmi.system-uuid': 'product_uuid',
'machdep.dmi.system-serial': 'product_serial',
'machdep.dmi.system-vendor': 'system_vendor',
}
for mib in sysctl_to_dmi:
if mib in self.sysctl:
dmi_facts[sysctl_to_dmi[mib]] = self.sysctl[mib]
return dmi_facts
class NetBSDHardwareCollector(HardwareCollector):
_fact_class = NetBSDHardware
_platform = 'NetBSD'
|
klmitch/nova | refs/heads/master | nova/tests/functional/notification_sample_tests/test_exception_notification.py | 4 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api import client as api_client
from nova.tests.functional.notification_sample_tests \
import notification_sample_base
from nova.tests.unit import fake_notifier
class TestExceptionNotificationSample(
notification_sample_base.NotificationSampleTestBase):
def test_versioned_exception_notification_with_correct_params(
self):
post = {
"aggregate": {
"name": "versioned_exc_aggregate",
"availability_zone": "nova"
}
}
self.admin_api.api_post('os-aggregates', post)
# recreating the aggregate raises exception
self.assertRaises(api_client.OpenStackApiException,
self.admin_api.api_post, 'os-aggregates', post)
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
traceback = fake_notifier.VERSIONED_NOTIFICATIONS[3][
'payload']['nova_object.data']['traceback']
self.assertIn('AggregateNameExists', traceback)
self._verify_notification(
'compute-exception',
replacements={
'traceback': self.ANY},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
|
rahul67/hue | refs/heads/master | desktop/core/ext-py/Django-1.6.10/tests/fixtures_model_package/__init__.py | 45382 | |
Trixter69/BookMarka | refs/heads/master | lib/unidecode/x09e.py | 252 | data = (
'Shu ', # 0x00
'Luo ', # 0x01
'Qi ', # 0x02
'Yi ', # 0x03
'Ji ', # 0x04
'Zhe ', # 0x05
'Yu ', # 0x06
'Zhan ', # 0x07
'Ye ', # 0x08
'Yang ', # 0x09
'Pi ', # 0x0a
'Ning ', # 0x0b
'Huo ', # 0x0c
'Mi ', # 0x0d
'Ying ', # 0x0e
'Meng ', # 0x0f
'Di ', # 0x10
'Yue ', # 0x11
'Yu ', # 0x12
'Lei ', # 0x13
'Bao ', # 0x14
'Lu ', # 0x15
'He ', # 0x16
'Long ', # 0x17
'Shuang ', # 0x18
'Yue ', # 0x19
'Ying ', # 0x1a
'Guan ', # 0x1b
'Qu ', # 0x1c
'Li ', # 0x1d
'Luan ', # 0x1e
'Niao ', # 0x1f
'Jiu ', # 0x20
'Ji ', # 0x21
'Yuan ', # 0x22
'Ming ', # 0x23
'Shi ', # 0x24
'Ou ', # 0x25
'Ya ', # 0x26
'Cang ', # 0x27
'Bao ', # 0x28
'Zhen ', # 0x29
'Gu ', # 0x2a
'Dong ', # 0x2b
'Lu ', # 0x2c
'Ya ', # 0x2d
'Xiao ', # 0x2e
'Yang ', # 0x2f
'Ling ', # 0x30
'Zhi ', # 0x31
'Qu ', # 0x32
'Yuan ', # 0x33
'Xue ', # 0x34
'Tuo ', # 0x35
'Si ', # 0x36
'Zhi ', # 0x37
'Er ', # 0x38
'Gua ', # 0x39
'Xiu ', # 0x3a
'Heng ', # 0x3b
'Zhou ', # 0x3c
'Ge ', # 0x3d
'Luan ', # 0x3e
'Hong ', # 0x3f
'Wu ', # 0x40
'Bo ', # 0x41
'Li ', # 0x42
'Juan ', # 0x43
'Hu ', # 0x44
'E ', # 0x45
'Yu ', # 0x46
'Xian ', # 0x47
'Ti ', # 0x48
'Wu ', # 0x49
'Que ', # 0x4a
'Miao ', # 0x4b
'An ', # 0x4c
'Kun ', # 0x4d
'Bei ', # 0x4e
'Peng ', # 0x4f
'Qian ', # 0x50
'Chun ', # 0x51
'Geng ', # 0x52
'Yuan ', # 0x53
'Su ', # 0x54
'Hu ', # 0x55
'He ', # 0x56
'E ', # 0x57
'Gu ', # 0x58
'Qiu ', # 0x59
'Zi ', # 0x5a
'Mei ', # 0x5b
'Mu ', # 0x5c
'Ni ', # 0x5d
'Yao ', # 0x5e
'Weng ', # 0x5f
'Liu ', # 0x60
'Ji ', # 0x61
'Ni ', # 0x62
'Jian ', # 0x63
'He ', # 0x64
'Yi ', # 0x65
'Ying ', # 0x66
'Zhe ', # 0x67
'Liao ', # 0x68
'Liao ', # 0x69
'Jiao ', # 0x6a
'Jiu ', # 0x6b
'Yu ', # 0x6c
'Lu ', # 0x6d
'Xuan ', # 0x6e
'Zhan ', # 0x6f
'Ying ', # 0x70
'Huo ', # 0x71
'Meng ', # 0x72
'Guan ', # 0x73
'Shuang ', # 0x74
'Lu ', # 0x75
'Jin ', # 0x76
'Ling ', # 0x77
'Jian ', # 0x78
'Xian ', # 0x79
'Cuo ', # 0x7a
'Jian ', # 0x7b
'Jian ', # 0x7c
'Yan ', # 0x7d
'Cuo ', # 0x7e
'Lu ', # 0x7f
'You ', # 0x80
'Cu ', # 0x81
'Ji ', # 0x82
'Biao ', # 0x83
'Cu ', # 0x84
'Biao ', # 0x85
'Zhu ', # 0x86
'Jun ', # 0x87
'Zhu ', # 0x88
'Jian ', # 0x89
'Mi ', # 0x8a
'Mi ', # 0x8b
'Wu ', # 0x8c
'Liu ', # 0x8d
'Chen ', # 0x8e
'Jun ', # 0x8f
'Lin ', # 0x90
'Ni ', # 0x91
'Qi ', # 0x92
'Lu ', # 0x93
'Jiu ', # 0x94
'Jun ', # 0x95
'Jing ', # 0x96
'Li ', # 0x97
'Xiang ', # 0x98
'Yan ', # 0x99
'Jia ', # 0x9a
'Mi ', # 0x9b
'Li ', # 0x9c
'She ', # 0x9d
'Zhang ', # 0x9e
'Lin ', # 0x9f
'Jing ', # 0xa0
'Ji ', # 0xa1
'Ling ', # 0xa2
'Yan ', # 0xa3
'Cu ', # 0xa4
'Mai ', # 0xa5
'Mai ', # 0xa6
'Ge ', # 0xa7
'Chao ', # 0xa8
'Fu ', # 0xa9
'Mian ', # 0xaa
'Mian ', # 0xab
'Fu ', # 0xac
'Pao ', # 0xad
'Qu ', # 0xae
'Qu ', # 0xaf
'Mou ', # 0xb0
'Fu ', # 0xb1
'Xian ', # 0xb2
'Lai ', # 0xb3
'Qu ', # 0xb4
'Mian ', # 0xb5
'[?] ', # 0xb6
'Feng ', # 0xb7
'Fu ', # 0xb8
'Qu ', # 0xb9
'Mian ', # 0xba
'Ma ', # 0xbb
'Mo ', # 0xbc
'Mo ', # 0xbd
'Hui ', # 0xbe
'Ma ', # 0xbf
'Zou ', # 0xc0
'Nen ', # 0xc1
'Fen ', # 0xc2
'Huang ', # 0xc3
'Huang ', # 0xc4
'Jin ', # 0xc5
'Guang ', # 0xc6
'Tian ', # 0xc7
'Tou ', # 0xc8
'Heng ', # 0xc9
'Xi ', # 0xca
'Kuang ', # 0xcb
'Heng ', # 0xcc
'Shu ', # 0xcd
'Li ', # 0xce
'Nian ', # 0xcf
'Chi ', # 0xd0
'Hei ', # 0xd1
'Hei ', # 0xd2
'Yi ', # 0xd3
'Qian ', # 0xd4
'Dan ', # 0xd5
'Xi ', # 0xd6
'Tuan ', # 0xd7
'Mo ', # 0xd8
'Mo ', # 0xd9
'Qian ', # 0xda
'Dai ', # 0xdb
'Chu ', # 0xdc
'You ', # 0xdd
'Dian ', # 0xde
'Yi ', # 0xdf
'Xia ', # 0xe0
'Yan ', # 0xe1
'Qu ', # 0xe2
'Mei ', # 0xe3
'Yan ', # 0xe4
'Jing ', # 0xe5
'Yu ', # 0xe6
'Li ', # 0xe7
'Dang ', # 0xe8
'Du ', # 0xe9
'Can ', # 0xea
'Yin ', # 0xeb
'An ', # 0xec
'Yan ', # 0xed
'Tan ', # 0xee
'An ', # 0xef
'Zhen ', # 0xf0
'Dai ', # 0xf1
'Can ', # 0xf2
'Yi ', # 0xf3
'Mei ', # 0xf4
'Dan ', # 0xf5
'Yan ', # 0xf6
'Du ', # 0xf7
'Lu ', # 0xf8
'Zhi ', # 0xf9
'Fen ', # 0xfa
'Fu ', # 0xfb
'Fu ', # 0xfc
'Min ', # 0xfd
'Min ', # 0xfe
'Yuan ', # 0xff
)
|
badock/nova | refs/heads/master | nova/tests/virt/test_driver.py | 26 | # Copyright (c) 2013 Citrix Systems, Inc.
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import test
from nova.virt import driver
class FakeDriver(object):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class FakeDriver2(FakeDriver):
pass
class ToDriverRegistryTestCase(test.NoDBTestCase):
def assertDriverInstance(self, inst, class_, *args, **kwargs):
self.assertEqual(class_, inst.__class__)
self.assertEqual(args, inst.args)
self.assertEqual(kwargs, inst.kwargs)
def test_driver_dict_from_config(self):
drvs = driver.driver_dict_from_config(
[
'key1=nova.tests.virt.test_driver.FakeDriver',
'key2=nova.tests.virt.test_driver.FakeDriver2',
], 'arg1', 'arg2', param1='value1', param2='value2'
)
self.assertEqual(
sorted(['key1', 'key2']),
sorted(drvs.keys())
)
self.assertDriverInstance(
drvs['key1'],
FakeDriver, 'arg1', 'arg2', param1='value1',
param2='value2')
self.assertDriverInstance(
drvs['key2'],
FakeDriver2, 'arg1', 'arg2', param1='value1',
param2='value2')
|
DataDog/kafka-python | refs/heads/master | kafka/protocol/group.py | 8 | from __future__ import absolute_import
from kafka.protocol.api import Request, Response
from kafka.protocol.struct import Struct
from kafka.protocol.types import Array, Bytes, Int16, Int32, Schema, String
class JoinGroupResponse_v0(Response):
API_KEY = 11
API_VERSION = 0
SCHEMA = Schema(
('error_code', Int16),
('generation_id', Int32),
('group_protocol', String('utf-8')),
('leader_id', String('utf-8')),
('member_id', String('utf-8')),
('members', Array(
('member_id', String('utf-8')),
('member_metadata', Bytes)))
)
class JoinGroupResponse_v1(Response):
API_KEY = 11
API_VERSION = 1
SCHEMA = JoinGroupResponse_v0.SCHEMA
class JoinGroupResponse_v2(Response):
API_KEY = 11
API_VERSION = 2
SCHEMA = Schema(
('throttle_time_ms', Int32),
('error_code', Int16),
('generation_id', Int32),
('group_protocol', String('utf-8')),
('leader_id', String('utf-8')),
('member_id', String('utf-8')),
('members', Array(
('member_id', String('utf-8')),
('member_metadata', Bytes)))
)
class JoinGroupRequest_v0(Request):
API_KEY = 11
API_VERSION = 0
RESPONSE_TYPE = JoinGroupResponse_v0
SCHEMA = Schema(
('group', String('utf-8')),
('session_timeout', Int32),
('member_id', String('utf-8')),
('protocol_type', String('utf-8')),
('group_protocols', Array(
('protocol_name', String('utf-8')),
('protocol_metadata', Bytes)))
)
UNKNOWN_MEMBER_ID = ''
class JoinGroupRequest_v1(Request):
API_KEY = 11
API_VERSION = 1
RESPONSE_TYPE = JoinGroupResponse_v1
SCHEMA = Schema(
('group', String('utf-8')),
('session_timeout', Int32),
('rebalance_timeout', Int32),
('member_id', String('utf-8')),
('protocol_type', String('utf-8')),
('group_protocols', Array(
('protocol_name', String('utf-8')),
('protocol_metadata', Bytes)))
)
UNKNOWN_MEMBER_ID = ''
class JoinGroupRequest_v2(Request):
API_KEY = 11
API_VERSION = 2
RESPONSE_TYPE = JoinGroupResponse_v2
SCHEMA = JoinGroupRequest_v1.SCHEMA
UNKNOWN_MEMBER_ID = ''
JoinGroupRequest = [
JoinGroupRequest_v0, JoinGroupRequest_v1, JoinGroupRequest_v2
]
JoinGroupResponse = [
JoinGroupResponse_v0, JoinGroupResponse_v1, JoinGroupResponse_v2
]
class ProtocolMetadata(Struct):
SCHEMA = Schema(
('version', Int16),
('subscription', Array(String('utf-8'))), # topics list
('user_data', Bytes)
)
class SyncGroupResponse_v0(Response):
API_KEY = 14
API_VERSION = 0
SCHEMA = Schema(
('error_code', Int16),
('member_assignment', Bytes)
)
class SyncGroupResponse_v1(Response):
API_KEY = 14
API_VERSION = 1
SCHEMA = Schema(
('throttle_time_ms', Int32),
('error_code', Int16),
('member_assignment', Bytes)
)
class SyncGroupRequest_v0(Request):
API_KEY = 14
API_VERSION = 0
RESPONSE_TYPE = SyncGroupResponse_v0
SCHEMA = Schema(
('group', String('utf-8')),
('generation_id', Int32),
('member_id', String('utf-8')),
('group_assignment', Array(
('member_id', String('utf-8')),
('member_metadata', Bytes)))
)
class SyncGroupRequest_v1(Request):
API_KEY = 14
API_VERSION = 1
RESPONSE_TYPE = SyncGroupResponse_v1
SCHEMA = SyncGroupRequest_v0.SCHEMA
SyncGroupRequest = [SyncGroupRequest_v0, SyncGroupRequest_v1]
SyncGroupResponse = [SyncGroupResponse_v0, SyncGroupResponse_v1]
class MemberAssignment(Struct):
SCHEMA = Schema(
('version', Int16),
('assignment', Array(
('topic', String('utf-8')),
('partitions', Array(Int32)))),
('user_data', Bytes)
)
class HeartbeatResponse_v0(Response):
API_KEY = 12
API_VERSION = 0
SCHEMA = Schema(
('error_code', Int16)
)
class HeartbeatResponse_v1(Response):
API_KEY = 12
API_VERSION = 1
SCHEMA = Schema(
('throttle_time_ms', Int32),
('error_code', Int16)
)
class HeartbeatRequest_v0(Request):
API_KEY = 12
API_VERSION = 0
RESPONSE_TYPE = HeartbeatResponse_v0
SCHEMA = Schema(
('group', String('utf-8')),
('generation_id', Int32),
('member_id', String('utf-8'))
)
class HeartbeatRequest_v1(Request):
API_KEY = 12
API_VERSION = 1
RESPONSE_TYPE = HeartbeatResponse_v1
SCHEMA = HeartbeatRequest_v0.SCHEMA
HeartbeatRequest = [HeartbeatRequest_v0, HeartbeatRequest_v1]
HeartbeatResponse = [HeartbeatResponse_v0, HeartbeatResponse_v1]
class LeaveGroupResponse_v0(Response):
API_KEY = 13
API_VERSION = 0
SCHEMA = Schema(
('error_code', Int16)
)
class LeaveGroupResponse_v1(Response):
API_KEY = 13
API_VERSION = 1
SCHEMA = Schema(
('throttle_time_ms', Int32),
('error_code', Int16)
)
class LeaveGroupRequest_v0(Request):
API_KEY = 13
API_VERSION = 0
RESPONSE_TYPE = LeaveGroupResponse_v0
SCHEMA = Schema(
('group', String('utf-8')),
('member_id', String('utf-8'))
)
class LeaveGroupRequest_v1(Request):
API_KEY = 13
API_VERSION = 1
RESPONSE_TYPE = LeaveGroupResponse_v1
SCHEMA = LeaveGroupRequest_v0.SCHEMA
LeaveGroupRequest = [LeaveGroupRequest_v0, LeaveGroupRequest_v1]
LeaveGroupResponse = [LeaveGroupResponse_v0, LeaveGroupResponse_v1]
|
mhbu50/erpnext | refs/heads/develop | erpnext/non_profit/doctype/member/member_dashboard.py | 7 | from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('Member Activity'),
'fieldname': 'member',
'transactions': [
{
'label': _('Membership Details'),
'items': ['Membership']
}
]
} |
devs1991/test_edx_docmode | refs/heads/master | venv/lib/python2.7/site-packages/mako/parsetree.py | 39 | # mako/parsetree.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""defines the parse tree components for Mako templates."""
from mako import exceptions, ast, util, filters, compat
import re
class Node(object):
"""base class for a Node in the parse tree."""
def __init__(self, source, lineno, pos, filename):
self.source = source
self.lineno = lineno
self.pos = pos
self.filename = filename
@property
def exception_kwargs(self):
return {'source': self.source, 'lineno': self.lineno,
'pos': self.pos, 'filename': self.filename}
def get_children(self):
return []
def accept_visitor(self, visitor):
def traverse(node):
for n in node.get_children():
n.accept_visitor(visitor)
method = getattr(visitor, "visit" + self.__class__.__name__, traverse)
method(self)
class TemplateNode(Node):
"""a 'container' node that stores the overall collection of nodes."""
def __init__(self, filename):
super(TemplateNode, self).__init__('', 0, 0, filename)
self.nodes = []
self.page_attributes = {}
def get_children(self):
return self.nodes
def __repr__(self):
return "TemplateNode(%s, %r)" % (
util.sorted_dict_repr(self.page_attributes),
self.nodes)
class ControlLine(Node):
"""defines a control line, a line-oriented python line or end tag.
e.g.::
% if foo:
(markup)
% endif
"""
has_loop_context = False
def __init__(self, keyword, isend, text, **kwargs):
super(ControlLine, self).__init__(**kwargs)
self.text = text
self.keyword = keyword
self.isend = isend
self.is_primary = keyword in ['for', 'if', 'while', 'try', 'with']
self.nodes = []
if self.isend:
self._declared_identifiers = []
self._undeclared_identifiers = []
else:
code = ast.PythonFragment(text, **self.exception_kwargs)
self._declared_identifiers = code.declared_identifiers
self._undeclared_identifiers = code.undeclared_identifiers
def get_children(self):
return self.nodes
def declared_identifiers(self):
return self._declared_identifiers
def undeclared_identifiers(self):
return self._undeclared_identifiers
def is_ternary(self, keyword):
"""return true if the given keyword is a ternary keyword
for this ControlLine"""
return keyword in {
'if': set(['else', 'elif']),
'try': set(['except', 'finally']),
'for': set(['else'])
}.get(self.keyword, [])
def __repr__(self):
return "ControlLine(%r, %r, %r, %r)" % (
self.keyword,
self.text,
self.isend,
(self.lineno, self.pos)
)
class Text(Node):
"""defines plain text in the template."""
def __init__(self, content, **kwargs):
super(Text, self).__init__(**kwargs)
self.content = content
def __repr__(self):
return "Text(%r, %r)" % (self.content, (self.lineno, self.pos))
class Code(Node):
"""defines a Python code block, either inline or module level.
e.g.::
inline:
<%
x = 12
%>
module level:
<%!
import logger
%>
"""
def __init__(self, text, ismodule, **kwargs):
super(Code, self).__init__(**kwargs)
self.text = text
self.ismodule = ismodule
self.code = ast.PythonCode(text, **self.exception_kwargs)
def declared_identifiers(self):
return self.code.declared_identifiers
def undeclared_identifiers(self):
return self.code.undeclared_identifiers
def __repr__(self):
return "Code(%r, %r, %r)" % (
self.text,
self.ismodule,
(self.lineno, self.pos)
)
class Comment(Node):
"""defines a comment line.
# this is a comment
"""
def __init__(self, text, **kwargs):
super(Comment, self).__init__(**kwargs)
self.text = text
def __repr__(self):
return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos))
class Expression(Node):
"""defines an inline expression.
${x+y}
"""
def __init__(self, text, escapes, **kwargs):
super(Expression, self).__init__(**kwargs)
self.text = text
self.escapes = escapes
self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs)
self.code = ast.PythonCode(text, **self.exception_kwargs)
def declared_identifiers(self):
return []
def undeclared_identifiers(self):
# TODO: make the "filter" shortcut list configurable at parse/gen time
return self.code.undeclared_identifiers.union(
self.escapes_code.undeclared_identifiers.difference(
set(filters.DEFAULT_ESCAPES.keys())
)
).difference(self.code.declared_identifiers)
def __repr__(self):
return "Expression(%r, %r, %r)" % (
self.text,
self.escapes_code.args,
(self.lineno, self.pos)
)
class _TagMeta(type):
"""metaclass to allow Tag to produce a subclass according to
its keyword"""
_classmap = {}
def __init__(cls, clsname, bases, dict):
if getattr(cls, '__keyword__', None) is not None:
cls._classmap[cls.__keyword__] = cls
super(_TagMeta, cls).__init__(clsname, bases, dict)
def __call__(cls, keyword, attributes, **kwargs):
if ":" in keyword:
ns, defname = keyword.split(':')
return type.__call__(CallNamespaceTag, ns, defname,
attributes, **kwargs)
try:
cls = _TagMeta._classmap[keyword]
except KeyError:
raise exceptions.CompileException(
"No such tag: '%s'" % keyword,
source=kwargs['source'],
lineno=kwargs['lineno'],
pos=kwargs['pos'],
filename=kwargs['filename']
)
return type.__call__(cls, keyword, attributes, **kwargs)
class Tag(compat.with_metaclass(_TagMeta, Node)):
"""abstract base class for tags.
<%sometag/>
<%someothertag>
stuff
</%someothertag>
"""
__keyword__ = None
def __init__(self, keyword, attributes, expressions,
nonexpressions, required, **kwargs):
"""construct a new Tag instance.
this constructor not called directly, and is only called
by subclasses.
:param keyword: the tag keyword
:param attributes: raw dictionary of attribute key/value pairs
:param expressions: a set of identifiers that are legal attributes,
which can also contain embedded expressions
:param nonexpressions: a set of identifiers that are legal
attributes, which cannot contain embedded expressions
:param \**kwargs:
other arguments passed to the Node superclass (lineno, pos)
"""
super(Tag, self).__init__(**kwargs)
self.keyword = keyword
self.attributes = attributes
self._parse_attributes(expressions, nonexpressions)
missing = [r for r in required if r not in self.parsed_attributes]
if len(missing):
raise exceptions.CompileException(
"Missing attribute(s): %s" %
",".join([repr(m) for m in missing]),
**self.exception_kwargs)
self.parent = None
self.nodes = []
def is_root(self):
return self.parent is None
def get_children(self):
return self.nodes
def _parse_attributes(self, expressions, nonexpressions):
undeclared_identifiers = set()
self.parsed_attributes = {}
for key in self.attributes:
if key in expressions:
expr = []
for x in re.compile(r'(\${.+?})',
re.S).split(self.attributes[key]):
m = re.compile(r'^\${(.+?)}$', re.S).match(x)
if m:
code = ast.PythonCode(m.group(1).rstrip(),
**self.exception_kwargs)
# we aren't discarding "declared_identifiers" here,
# which we do so that list comprehension-declared
# variables aren't counted. As yet can't find a
# condition that requires it here.
undeclared_identifiers = \
undeclared_identifiers.union(
code.undeclared_identifiers)
expr.append('(%s)' % m.group(1))
else:
if x:
expr.append(repr(x))
self.parsed_attributes[key] = " + ".join(expr) or repr('')
elif key in nonexpressions:
if re.search(r'\${.+?}', self.attributes[key]):
raise exceptions.CompileException(
"Attibute '%s' in tag '%s' does not allow embedded "
"expressions" % (key, self.keyword),
**self.exception_kwargs)
self.parsed_attributes[key] = repr(self.attributes[key])
else:
raise exceptions.CompileException(
"Invalid attribute for tag '%s': '%s'" %
(self.keyword, key),
**self.exception_kwargs)
self.expression_undeclared_identifiers = undeclared_identifiers
def declared_identifiers(self):
return []
def undeclared_identifiers(self):
return self.expression_undeclared_identifiers
def __repr__(self):
return "%s(%r, %s, %r, %r)" % (self.__class__.__name__,
self.keyword,
util.sorted_dict_repr(self.attributes),
(self.lineno, self.pos),
self.nodes
)
class IncludeTag(Tag):
__keyword__ = 'include'
def __init__(self, keyword, attributes, **kwargs):
super(IncludeTag, self).__init__(
keyword,
attributes,
('file', 'import', 'args'),
(), ('file',), **kwargs)
self.page_args = ast.PythonCode(
"__DUMMY(%s)" % attributes.get('args', ''),
**self.exception_kwargs)
def declared_identifiers(self):
return []
def undeclared_identifiers(self):
identifiers = self.page_args.undeclared_identifiers.\
difference(set(["__DUMMY"])).\
difference(self.page_args.declared_identifiers)
return identifiers.union(super(IncludeTag, self).
undeclared_identifiers())
class NamespaceTag(Tag):
__keyword__ = 'namespace'
def __init__(self, keyword, attributes, **kwargs):
super(NamespaceTag, self).__init__(
keyword, attributes,
('file',),
('name', 'inheritable',
'import', 'module'),
(), **kwargs)
self.name = attributes.get('name', '__anon_%s' % hex(abs(id(self))))
if 'name' not in attributes and 'import' not in attributes:
raise exceptions.CompileException(
"'name' and/or 'import' attributes are required "
"for <%namespace>",
**self.exception_kwargs)
if 'file' in attributes and 'module' in attributes:
raise exceptions.CompileException(
"<%namespace> may only have one of 'file' or 'module'",
**self.exception_kwargs
)
def declared_identifiers(self):
return []
class TextTag(Tag):
__keyword__ = 'text'
def __init__(self, keyword, attributes, **kwargs):
super(TextTag, self).__init__(
keyword,
attributes, (),
('filter'), (), **kwargs)
self.filter_args = ast.ArgumentList(
attributes.get('filter', ''),
**self.exception_kwargs)
def undeclared_identifiers(self):
return self.filter_args.\
undeclared_identifiers.\
difference(filters.DEFAULT_ESCAPES.keys()).union(
self.expression_undeclared_identifiers
)
class DefTag(Tag):
__keyword__ = 'def'
def __init__(self, keyword, attributes, **kwargs):
expressions = ['buffered', 'cached'] + [
c for c in attributes if c.startswith('cache_')]
super(DefTag, self).__init__(
keyword,
attributes,
expressions,
('name', 'filter', 'decorator'),
('name',),
**kwargs)
name = attributes['name']
if re.match(r'^[\w_]+$', name):
raise exceptions.CompileException(
"Missing parenthesis in %def",
**self.exception_kwargs)
self.function_decl = ast.FunctionDecl("def " + name + ":pass",
**self.exception_kwargs)
self.name = self.function_decl.funcname
self.decorator = attributes.get('decorator', '')
self.filter_args = ast.ArgumentList(
attributes.get('filter', ''),
**self.exception_kwargs)
is_anonymous = False
is_block = False
@property
def funcname(self):
return self.function_decl.funcname
def get_argument_expressions(self, **kw):
return self.function_decl.get_argument_expressions(**kw)
def declared_identifiers(self):
return self.function_decl.allargnames
def undeclared_identifiers(self):
res = []
for c in self.function_decl.defaults:
res += list(ast.PythonCode(c, **self.exception_kwargs).
undeclared_identifiers)
return set(res).union(
self.filter_args.
undeclared_identifiers.
difference(filters.DEFAULT_ESCAPES.keys())
).union(
self.expression_undeclared_identifiers
).difference(
self.function_decl.allargnames
)
class BlockTag(Tag):
__keyword__ = 'block'
def __init__(self, keyword, attributes, **kwargs):
expressions = ['buffered', 'cached', 'args'] + [
c for c in attributes if c.startswith('cache_')]
super(BlockTag, self).__init__(
keyword,
attributes,
expressions,
('name', 'filter', 'decorator'),
(),
**kwargs)
name = attributes.get('name')
if name and not re.match(r'^[\w_]+$', name):
raise exceptions.CompileException(
"%block may not specify an argument signature",
**self.exception_kwargs)
if not name and attributes.get('args', None):
raise exceptions.CompileException(
"Only named %blocks may specify args",
**self.exception_kwargs
)
self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
**self.exception_kwargs)
self.name = name
self.decorator = attributes.get('decorator', '')
self.filter_args = ast.ArgumentList(
attributes.get('filter', ''),
**self.exception_kwargs)
is_block = True
@property
def is_anonymous(self):
return self.name is None
@property
def funcname(self):
return self.name or "__M_anon_%d" % (self.lineno, )
def get_argument_expressions(self, **kw):
return self.body_decl.get_argument_expressions(**kw)
def declared_identifiers(self):
return self.body_decl.allargnames
def undeclared_identifiers(self):
return (self.filter_args.
undeclared_identifiers.
difference(filters.DEFAULT_ESCAPES.keys())
).union(self.expression_undeclared_identifiers)
class CallTag(Tag):
__keyword__ = 'call'
def __init__(self, keyword, attributes, **kwargs):
super(CallTag, self).__init__(keyword, attributes,
('args'), ('expr',), ('expr',), **kwargs)
self.expression = attributes['expr']
self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
**self.exception_kwargs)
def declared_identifiers(self):
return self.code.declared_identifiers.union(self.body_decl.allargnames)
def undeclared_identifiers(self):
return self.code.undeclared_identifiers.\
difference(self.code.declared_identifiers)
class CallNamespaceTag(Tag):
def __init__(self, namespace, defname, attributes, **kwargs):
super(CallNamespaceTag, self).__init__(
namespace + ":" + defname,
attributes,
tuple(attributes.keys()) + ('args', ),
(),
(),
**kwargs)
self.expression = "%s.%s(%s)" % (
namespace,
defname,
",".join(["%s=%s" % (k, v) for k, v in
self.parsed_attributes.items()
if k != 'args'])
)
self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
self.body_decl = ast.FunctionArgs(
attributes.get('args', ''),
**self.exception_kwargs)
def declared_identifiers(self):
return self.code.declared_identifiers.union(self.body_decl.allargnames)
def undeclared_identifiers(self):
return self.code.undeclared_identifiers.\
difference(self.code.declared_identifiers)
class InheritTag(Tag):
__keyword__ = 'inherit'
def __init__(self, keyword, attributes, **kwargs):
super(InheritTag, self).__init__(
keyword, attributes,
('file',), (), ('file',), **kwargs)
class PageTag(Tag):
__keyword__ = 'page'
def __init__(self, keyword, attributes, **kwargs):
expressions = \
['cached', 'args', 'expression_filter', 'enable_loop'] + \
[c for c in attributes if c.startswith('cache_')]
super(PageTag, self).__init__(
keyword,
attributes,
expressions,
(),
(),
**kwargs)
self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
**self.exception_kwargs)
self.filter_args = ast.ArgumentList(
attributes.get('expression_filter', ''),
**self.exception_kwargs)
def declared_identifiers(self):
return self.body_decl.allargnames
|
opensanca/trilha-python | refs/heads/master | 03-django/ecommerce/ecom/carrinho/admin.py | 1 | from django.contrib import admin
from carrinho.models import Product
admin.site.register(Product)
|
yamahata/neutron | refs/heads/master | neutron/plugins/vmware/api_client/eventlet_client.py | 9 | # Copyright 2012 VMware, Inc.
#
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import eventlet
import time
from neutron.openstack.common import log as logging
from neutron.plugins.vmware.api_client import base
from neutron.plugins.vmware.api_client import eventlet_request
eventlet.monkey_patch()
LOG = logging.getLogger(__name__)
class EventletApiClient(base.ApiClientBase):
"""Eventlet-based implementation of NSX ApiClient ABC."""
def __init__(self, api_providers, user, password,
concurrent_connections=base.DEFAULT_CONCURRENT_CONNECTIONS,
gen_timeout=base.GENERATION_ID_TIMEOUT,
use_https=True,
connect_timeout=base.DEFAULT_CONNECT_TIMEOUT):
'''Constructor
:param api_providers: a list of tuples of the form: (host, port,
is_ssl).
:param user: login username.
:param password: login password.
:param concurrent_connections: total number of concurrent connections.
:param use_https: whether or not to use https for requests.
:param connect_timeout: connection timeout in seconds.
:param gen_timeout controls how long the generation id is kept
if set to -1 the generation id is never timed out
'''
if not api_providers:
api_providers = []
self._api_providers = set([tuple(p) for p in api_providers])
self._api_provider_data = {} # tuple(semaphore, session_cookie)
for p in self._api_providers:
self._set_provider_data(p, (eventlet.semaphore.Semaphore(1), None))
self._user = user
self._password = password
self._concurrent_connections = concurrent_connections
self._use_https = use_https
self._connect_timeout = connect_timeout
self._config_gen = None
self._config_gen_ts = None
self._gen_timeout = gen_timeout
# Connection pool is a list of queues.
self._conn_pool = eventlet.queue.PriorityQueue()
self._next_conn_priority = 1
for host, port, is_ssl in api_providers:
for _ in range(concurrent_connections):
conn = self._create_connection(host, port, is_ssl)
self._conn_pool.put((self._next_conn_priority, conn))
self._next_conn_priority += 1
def acquire_redirect_connection(self, conn_params, auto_login=True,
headers=None):
"""Check out or create connection to redirected NSX API server.
Args:
conn_params: tuple specifying target of redirect, see
self._conn_params()
auto_login: returned connection should have valid session cookie
headers: headers to pass on if auto_login
Returns: An available HTTPConnection instance corresponding to the
specified conn_params. If a connection did not previously
exist, new connections are created with the highest prioity
in the connection pool and one of these new connections
returned.
"""
result_conn = None
data = self._get_provider_data(conn_params)
if data:
# redirect target already exists in provider data and connections
# to the provider have been added to the connection pool. Try to
# obtain a connection from the pool, note that it's possible that
# all connection to the provider are currently in use.
conns = []
while not self._conn_pool.empty():
priority, conn = self._conn_pool.get_nowait()
if not result_conn and self._conn_params(conn) == conn_params:
conn.priority = priority
result_conn = conn
else:
conns.append((priority, conn))
for priority, conn in conns:
self._conn_pool.put((priority, conn))
# hack: if no free connections available, create new connection
# and stash "no_release" attribute (so that we only exceed
# self._concurrent_connections temporarily)
if not result_conn:
conn = self._create_connection(*conn_params)
conn.priority = 0 # redirect connections have highest priority
conn.no_release = True
result_conn = conn
else:
#redirect target not already known, setup provider lists
self._api_providers.update([conn_params])
self._set_provider_data(conn_params,
(eventlet.semaphore.Semaphore(1), None))
# redirects occur during cluster upgrades, i.e. results to old
# redirects to new, so give redirect targets highest priority
priority = 0
for i in range(self._concurrent_connections):
conn = self._create_connection(*conn_params)
conn.priority = priority
if i == self._concurrent_connections - 1:
break
self._conn_pool.put((priority, conn))
result_conn = conn
if result_conn:
result_conn.last_used = time.time()
if auto_login and self.auth_cookie(conn) is None:
self._wait_for_login(result_conn, headers)
return result_conn
def _login(self, conn=None, headers=None):
'''Issue login request and update authentication cookie.'''
cookie = None
g = eventlet_request.LoginRequestEventlet(
self, self._user, self._password, conn, headers)
g.start()
ret = g.join()
if ret:
if isinstance(ret, Exception):
LOG.error(_('Login error "%s"'), ret)
raise ret
cookie = ret.getheader("Set-Cookie")
if cookie:
LOG.debug(_("Saving new authentication cookie '%s'"), cookie)
return cookie
# Register as subclass.
base.ApiClientBase.register(EventletApiClient)
|
PressLabs/cobalt | refs/heads/master | tests/unit/engine/test_engine.py | 1 | # Copyright 2016 Presslabs SRL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from etcd import Lock
from pytest import mark
from pytest_mock import mock_module
from utils import Service
from engine import Engine, Lease, Executor
class TestEngine:
def test_inherits_from_service(self, engine):
assert isinstance(engine, Service)
def test_start(self, mocker, engine, m_lease, p_gevent_spawn):
mocked_greenlet = mocker.MagicMock()
p_gevent_spawn.return_value = mocked_greenlet
engine.lease = m_lease
assert engine.start() == [mocked_greenlet, mocked_greenlet, mocked_greenlet]
assert not engine.start()
call = mock_module.call
p_gevent_spawn.assert_has_calls([call(m_lease.acquire), call(engine._run), call(engine._machine_heartbeat)],
any_order=True)
assert engine.stop()
@mark.usefixtures('p_gevent_spawn')
def test_stop(self, engine):
assert not engine.stop()
engine.start()
assert engine.stop()
assert engine._quit
assert not engine.stop()
@mark.parametrize('lease_held', [False, True])
def test_run_with_lease(self, mocker, p_time_sleep, lease_held, engine, m_lease, m_executor):
engine._started = True
engine.lease = m_lease
engine.executor = m_executor
type(engine)._quit = mocker.PropertyMock(side_effect=[False, True])
type(m_lease).is_held = mocker.PropertyMock(return_value=lease_held)
engine._run()
if lease_held:
m_executor.tick.assert_called_once_with()
p_time_sleep.assert_called_once_with(0)
else:
m_executor.timeout.assert_called_once_with()
m_executor.reset.assert_called_once_with()
def test_machine_heartbeat_quit(self, engine, mocker):
type(engine)._quit = mocker.PropertyMock(return_value=True)
timeout = mocker.patch.object(engine.executor, 'timeout')
reset = mocker.patch.object(engine.executor, 'reset')
engine._machine_heartbeat()
assert not timeout.called
assert not reset.called
@mark.usefixtures('p_engine_executor_timeout', 'p_engine_executor_reset')
def test_machine_heartbeat_no_lease(self, engine, mocker, m_lease):
engine.lease = m_lease
type(m_lease).is_held = mocker.PropertyMock(return_value=False)
type(engine)._quit = mocker.PropertyMock(side_effect=[False, True])
engine._machine_heartbeat()
executor = engine.executor
executor.timeout.assert_called_once_with()
assert not executor.reset.called
@mark.usefixtures('p_engine_executor_timeout', 'p_engine_executor_reset')
def test_machine_heartbeat_with_lease_once(self, engine, mocker, m_lease, p_machine_manager_all_keys):
engine.lease = m_lease
type(m_lease).is_held = mocker.PropertyMock(return_value=True)
type(engine)._quit = mocker.PropertyMock(side_effect=[False, True])
p_machine_manager_all_keys.return_value = ['1', '2']
executor = engine.executor
engine._machine_heartbeat()
executor.timeout.assert_called_once_with()
assert p_machine_manager_all_keys.called
assert not executor.reset.called
@mark.usefixtures('p_engine_executor_timeout', 'p_engine_executor_reset')
def test_machine_heartbeat_with_lease_twice_no_change(self, engine, mocker, m_lease,
p_machine_manager_all_keys):
engine.lease = m_lease
type(m_lease).is_held = mocker.PropertyMock(return_value=True)
type(engine)._quit = mocker.PropertyMock(side_effect=[False, False, True])
p_machine_manager_all_keys.return_value = ['1', '2']
executor = engine.executor
engine._machine_heartbeat()
call = mock_module.call
executor.timeout.has_calls([call(), call()])
p_machine_manager_all_keys.has_calls([call(), call()])
assert not executor.reset.called
@mark.usefixtures('p_engine_executor_timeout', 'p_engine_executor_reset')
def test_machine_heartbeat_with_lease_twice_with_change(self, engine, mocker, m_lease,
p_machine_manager_all_keys):
engine.lease = m_lease
type(m_lease).is_held = mocker.PropertyMock(return_value=True)
type(engine)._quit = mocker.PropertyMock(side_effect=[False, False, True])
p_machine_manager_all_keys.side_effect = [['1', '2'], []]
executor = engine.executor
engine._machine_heartbeat()
call = mock_module.call
executor.timeout.has_calls([call(), call()])
p_machine_manager_all_keys.has_calls([call(), call()])
assert executor.reset.called
@mark.usefixtures('p_engine_executor_timeout', 'p_engine_executor_reset')
def test_machine_heartbeat_with_lease_thrice_no_change(self, engine, mocker, m_lease,
p_machine_manager_all_keys):
engine.lease = m_lease
type(m_lease).is_held = mocker.PropertyMock(return_value=True)
type(engine)._quit = mocker.PropertyMock(side_effect=[False, False, False, True])
p_machine_manager_all_keys.side_effect = [['1', '2'], [], []]
executor = engine.executor
engine._machine_heartbeat()
call = mock_module.call
executor.timeout.has_calls([call(), call(), call()])
p_machine_manager_all_keys.has_calls([call(), call(), call()])
executor.reset.assert_called_once_with()
def test_create_lock(self, m_etcd_client):
actual = Engine._create_lock(m_etcd_client)
assert isinstance(actual, Lock)
def test_create_leaser(self, m_lock):
actual = Engine._create_leaser(m_lock, {'refresh_ttl': 10, 'lease_ttl': 9})
assert isinstance(actual, Lease)
def test_create_executor(self, volume_manager, machine_manager):
actual = Engine._create_executor(volume_manager, machine_manager, {'timeout': 0})
assert isinstance(actual, Executor)
|
ddy88958620/lib | refs/heads/master | Python/scrapy/bosch_russian_professional/__init__.py | 2 | ACCOUNT_NAME = 'Bosch Russian Professional'
|
justinsalamon/scaper | refs/heads/master | scaper/__init__.py | 1 | #!/usr/bin/env python
"""Top-level module for scaper"""
from .core import Scaper
from .core import generate_from_jams
from .core import trim
from .version import version as __version__
|
halberom/ansible | refs/heads/devel | test/units/modules/network/vyos/test_vyos_system.py | 113 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.vyos import vyos_system
from .vyos_module import TestVyosModule, load_fixture, set_module_args
class TestVyosSystemModule(TestVyosModule):
module = vyos_system
def setUp(self):
self.mock_get_config = patch('ansible.modules.network.vyos.vyos_system.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.vyos.vyos_system.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None):
self.get_config.return_value = load_fixture('vyos_config_config.cfg')
def test_vyos_system_hostname(self):
set_module_args(dict(host_name='foo'))
commands = ["set system host-name 'foo'"]
self.execute_module(changed=True, commands=commands)
def test_vyos_system_clear_hostname(self):
set_module_args(dict(host_name='foo', state='absent'))
commands = ["delete system host-name"]
self.execute_module(changed=True, commands=commands)
def test_vyos_remove_single_name_server(self):
set_module_args(dict(name_server=['8.8.4.4'], state='absent'))
commands = ["delete system name-server '8.8.4.4'"]
self.execute_module(changed=True, commands=commands)
def test_vyos_system_domain_name(self):
set_module_args(dict(domain_name='example2.com'))
commands = ["set system domain-name 'example2.com'"]
self.execute_module(changed=True, commands=commands)
def test_vyos_system_clear_domain_name(self):
set_module_args(dict(domain_name='example.com', state='absent'))
commands = ['delete system domain-name']
self.execute_module(changed=True, commands=commands)
def test_vyos_system_domain_search(self):
set_module_args(dict(domain_search=['foo.example.com', 'bar.example.com']))
commands = ["set system domain-search domain 'foo.example.com'",
"set system domain-search domain 'bar.example.com'"]
self.execute_module(changed=True, commands=commands)
def test_vyos_system_clear_domain_search(self):
set_module_args(dict(domain_search=[]))
commands = ['delete system domain-search domain']
self.execute_module(changed=True, commands=commands)
def test_vyos_system_no_change(self):
set_module_args(dict(host_name='router', domain_name='example.com', name_server=['8.8.8.8', '8.8.4.4']))
result = self.execute_module()
self.assertEqual([], result['commands'])
def test_vyos_system_clear_all(self):
set_module_args(dict(state='absent'))
commands = ['delete system host-name',
'delete system domain-search domain',
'delete system domain-name',
'delete system name-server']
self.execute_module(changed=True, commands=commands)
|
nicememory/pie | refs/heads/master | pyglet/pyglet/window/key.py | 3 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
"""Key constants and utilities for pyglet.window.
Usage::
from pyglet.window import Window
from pyglet.window import key
window = Window()
@window.event
def on_key_press(symbol, modifiers):
# Symbolic names:
if symbol == key.RETURN:
# Alphabet keys:
elif symbol == key.Z:
# Number keys:
elif symbol == key._1:
# Number keypad keys:
elif symbol == key.NUM_1:
# Modifiers:
if modifiers & key.MOD_CTRL:
"""
from builtins import str
from pyglet import compat_platform
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
class KeyStateHandler(dict):
"""Simple handler that tracks the state of keys on the keyboard. If a
key is pressed then this handler holds a True value for it.
For example::
>>> win = window.Window
>>> keyboard = key.KeyStateHandler()
>>> win.push_handlers(keyboard)
# Hold down the "up" arrow...
>>> keyboard[key.UP]
True
>>> keyboard[key.DOWN]
False
"""
def on_key_press(self, symbol, modifiers):
self[symbol] = True
def on_key_release(self, symbol, modifiers):
self[symbol] = False
def __getitem__(self, key):
return self.get(key, False)
def modifiers_string(modifiers):
"""Return a string describing a set of modifiers.
Example::
>>> modifiers_string(MOD_SHIFT | MOD_CTRL)
'MOD_SHIFT|MOD_CTRL'
:Parameters:
`modifiers` : int
Bitwise combination of modifier constants.
:rtype: str
"""
mod_names = []
if modifiers & MOD_SHIFT:
mod_names.append('MOD_SHIFT')
if modifiers & MOD_CTRL:
mod_names.append('MOD_CTRL')
if modifiers & MOD_ALT:
mod_names.append('MOD_ALT')
if modifiers & MOD_CAPSLOCK:
mod_names.append('MOD_CAPSLOCK')
if modifiers & MOD_NUMLOCK:
mod_names.append('MOD_NUMLOCK')
if modifiers & MOD_SCROLLLOCK:
mod_names.append('MOD_SCROLLLOCK')
if modifiers & MOD_COMMAND:
mod_names.append('MOD_COMMAND')
if modifiers & MOD_OPTION:
mod_names.append('MOD_OPTION')
if modifiers & MOD_FUNCTION:
mod_names.append('MOD_FUNCTION')
return '|'.join(mod_names)
def symbol_string(symbol):
"""Return a string describing a key symbol.
Example::
>>> symbol_string(BACKSPACE)
'BACKSPACE'
:Parameters:
`symbol` : int
Symbolic key constant.
:rtype: str
"""
if symbol < 1 << 32:
return _key_names.get(symbol, str(symbol))
else:
return 'user_key(%x)' % (symbol >> 32)
def motion_string(motion):
"""Return a string describing a text motion.
Example::
>>> motion_string(MOTION_NEXT_WORD)
'MOTION_NEXT_WORD'
:Parameters:
`motion` : int
Text motion constant.
:rtype: str
"""
return _motion_names.get(motion, str(motion))
def user_key(scancode):
"""Return a key symbol for a key not supported by pyglet.
This can be used to map virtual keys or scancodes from unsupported
keyboard layouts into a machine-specific symbol. The symbol will
be meaningless on any other machine, or under a different keyboard layout.
Applications should use user-keys only when user explicitly binds them
(for example, mapping keys to actions in a game options screen).
"""
assert scancode > 0
return scancode << 32
# Modifier mask constants
MOD_SHIFT = 1 << 0
MOD_CTRL = 1 << 1
MOD_ALT = 1 << 2
MOD_CAPSLOCK = 1 << 3
MOD_NUMLOCK = 1 << 4
MOD_WINDOWS = 1 << 5
MOD_COMMAND = 1 << 6
MOD_OPTION = 1 << 7
MOD_SCROLLLOCK = 1 << 8
MOD_FUNCTION = 1 << 9
#: Accelerator modifier. On Windows and Linux, this is ``MOD_CTRL``, on
#: Mac OS X it's ``MOD_COMMAND``.
MOD_ACCEL = MOD_CTRL
if compat_platform == 'darwin':
MOD_ACCEL = MOD_COMMAND
# Key symbol constants
# ASCII commands
BACKSPACE = 0xff08
TAB = 0xff09
LINEFEED = 0xff0a
CLEAR = 0xff0b
RETURN = 0xff0d
ENTER = 0xff0d # synonym
PAUSE = 0xff13
SCROLLLOCK = 0xff14
SYSREQ = 0xff15
ESCAPE = 0xff1b
SPACE = 0xff20
# Cursor control and motion
HOME = 0xff50
LEFT = 0xff51
UP = 0xff52
RIGHT = 0xff53
DOWN = 0xff54
PAGEUP = 0xff55
PAGEDOWN = 0xff56
END = 0xff57
BEGIN = 0xff58
# Misc functions
DELETE = 0xffff
SELECT = 0xff60
PRINT = 0xff61
EXECUTE = 0xff62
INSERT = 0xff63
UNDO = 0xff65
REDO = 0xff66
MENU = 0xff67
FIND = 0xff68
CANCEL = 0xff69
HELP = 0xff6a
BREAK = 0xff6b
MODESWITCH = 0xff7e
SCRIPTSWITCH = 0xff7e
FUNCTION = 0xffd2
# Text motion constants: these are allowed to clash with key constants
MOTION_UP = UP
MOTION_RIGHT = RIGHT
MOTION_DOWN = DOWN
MOTION_LEFT = LEFT
MOTION_NEXT_WORD = 1
MOTION_PREVIOUS_WORD = 2
MOTION_BEGINNING_OF_LINE = 3
MOTION_END_OF_LINE = 4
MOTION_NEXT_PAGE = PAGEDOWN
MOTION_PREVIOUS_PAGE = PAGEUP
MOTION_BEGINNING_OF_FILE = 5
MOTION_END_OF_FILE = 6
MOTION_BACKSPACE = BACKSPACE
MOTION_DELETE = DELETE
# Number pad
NUMLOCK = 0xff7f
NUM_SPACE = 0xff80
NUM_TAB = 0xff89
NUM_ENTER = 0xff8d
NUM_F1 = 0xff91
NUM_F2 = 0xff92
NUM_F3 = 0xff93
NUM_F4 = 0xff94
NUM_HOME = 0xff95
NUM_LEFT = 0xff96
NUM_UP = 0xff97
NUM_RIGHT = 0xff98
NUM_DOWN = 0xff99
NUM_PRIOR = 0xff9a
NUM_PAGE_UP = 0xff9a
NUM_NEXT = 0xff9b
NUM_PAGE_DOWN = 0xff9b
NUM_END = 0xff9c
NUM_BEGIN = 0xff9d
NUM_INSERT = 0xff9e
NUM_DELETE = 0xff9f
NUM_EQUAL = 0xffbd
NUM_MULTIPLY = 0xffaa
NUM_ADD = 0xffab
NUM_SEPARATOR = 0xffac
NUM_SUBTRACT = 0xffad
NUM_DECIMAL = 0xffae
NUM_DIVIDE = 0xffaf
NUM_0 = 0xffb0
NUM_1 = 0xffb1
NUM_2 = 0xffb2
NUM_3 = 0xffb3
NUM_4 = 0xffb4
NUM_5 = 0xffb5
NUM_6 = 0xffb6
NUM_7 = 0xffb7
NUM_8 = 0xffb8
NUM_9 = 0xffb9
# Function keys
F1 = 0xffbe
F2 = 0xffbf
F3 = 0xffc0
F4 = 0xffc1
F5 = 0xffc2
F6 = 0xffc3
F7 = 0xffc4
F8 = 0xffc5
F9 = 0xffc6
F10 = 0xffc7
F11 = 0xffc8
F12 = 0xffc9
F13 = 0xffca
F14 = 0xffcb
F15 = 0xffcc
F16 = 0xffcd
F17 = 0xffce
F18 = 0xffcf
F19 = 0xffd0
F20 = 0xffd1
# Modifiers
LSHIFT = 0xffe1
RSHIFT = 0xffe2
LCTRL = 0xffe3
RCTRL = 0xffe4
CAPSLOCK = 0xffe5
LMETA = 0xffe7
RMETA = 0xffe8
LALT = 0xffe9
RALT = 0xffea
LWINDOWS = 0xffeb
RWINDOWS = 0xffec
LCOMMAND = 0xffed
RCOMMAND = 0xffee
LOPTION = 0xffef
ROPTION = 0xfff0
# Latin-1
SPACE = 0x020
EXCLAMATION = 0x021
DOUBLEQUOTE = 0x022
HASH = 0x023
POUND = 0x023 # synonym
DOLLAR = 0x024
PERCENT = 0x025
AMPERSAND = 0x026
APOSTROPHE = 0x027
PARENLEFT = 0x028
PARENRIGHT = 0x029
ASTERISK = 0x02a
PLUS = 0x02b
COMMA = 0x02c
MINUS = 0x02d
PERIOD = 0x02e
SLASH = 0x02f
_0 = 0x030
_1 = 0x031
_2 = 0x032
_3 = 0x033
_4 = 0x034
_5 = 0x035
_6 = 0x036
_7 = 0x037
_8 = 0x038
_9 = 0x039
COLON = 0x03a
SEMICOLON = 0x03b
LESS = 0x03c
EQUAL = 0x03d
GREATER = 0x03e
QUESTION = 0x03f
AT = 0x040
BRACKETLEFT = 0x05b
BACKSLASH = 0x05c
BRACKETRIGHT = 0x05d
ASCIICIRCUM = 0x05e
UNDERSCORE = 0x05f
GRAVE = 0x060
QUOTELEFT = 0x060
A = 0x061
B = 0x062
C = 0x063
D = 0x064
E = 0x065
F = 0x066
G = 0x067
H = 0x068
I = 0x069
J = 0x06a
K = 0x06b
L = 0x06c
M = 0x06d
N = 0x06e
O = 0x06f
P = 0x070
Q = 0x071
R = 0x072
S = 0x073
T = 0x074
U = 0x075
V = 0x076
W = 0x077
X = 0x078
Y = 0x079
Z = 0x07a
BRACELEFT = 0x07b
BAR = 0x07c
BRACERIGHT = 0x07d
ASCIITILDE = 0x07e
_key_names = {}
_motion_names = {}
for _name, _value in locals().copy().items():
if _name[:2] != '__' and _name.upper() == _name and \
not _name.startswith('MOD_'):
if _name.startswith('MOTION_'):
_motion_names[_value] = _name
else:
_key_names[_value] = _name
|
wozio/home-system | refs/heads/dev | io-control/rules.py | 1 | #!/usr/bin/env python
import logging
import rule
import configuration
rules = {}
def init():
logging.debug("Rules init")
for r in configuration.rules:
rules[r["name"]] = rule.rule(r["name"], r["rule"], r["inputs"], r["outputs"])
def exit():
pass
|
ka2le/pepperflask-1485946909371 | refs/heads/master | naoqipythonlib/qi/path.py | 5 | from _qi import findBin, findLib, findConf, findData, listData, confPaths, \
dataPaths, binPaths, libPaths, setWritablePath, \
userWritableDataPath, userWritableConfPath, sdkPrefix
__all__ = [ "findBin",
"findLib",
"findConf",
"findData",
"listData",
"confPaths",
"dataPaths",
"binPaths",
"libPaths",
"setWritablePath",
"userWritableDataPath",
"userWritableConfPath",
"sdkPrefix" ]
|
bdh1011/wau | refs/heads/master | venv/lib/python2.7/site-packages/twisted/conch/ssh/agent.py | 10 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Implements the SSH v2 key agent protocol. This protocol is documented in the
SSH source code, in the file
U{PROTOCOL.agent<http://www.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/PROTOCOL.agent>}.
Maintainer: Paul Swartz
"""
import struct
from twisted.conch.ssh.common import NS, getNS, getMP
from twisted.conch.error import ConchError, MissingKeyStoreError
from twisted.conch.ssh import keys
from twisted.internet import defer, protocol
class SSHAgentClient(protocol.Protocol):
"""
The client side of the SSH agent protocol. This is equivalent to
ssh-add(1) and can be used with either ssh-agent(1) or the SSHAgentServer
protocol, also in this package.
"""
def __init__(self):
self.buf = ''
self.deferreds = []
def dataReceived(self, data):
self.buf += data
while 1:
if len(self.buf) <= 4:
return
packLen = struct.unpack('!L', self.buf[:4])[0]
if len(self.buf) < 4 + packLen:
return
packet, self.buf = self.buf[4:4 + packLen], self.buf[4 + packLen:]
reqType = ord(packet[0])
d = self.deferreds.pop(0)
if reqType == AGENT_FAILURE:
d.errback(ConchError('agent failure'))
elif reqType == AGENT_SUCCESS:
d.callback('')
else:
d.callback(packet)
def sendRequest(self, reqType, data):
pack = struct.pack('!LB',len(data) + 1, reqType) + data
self.transport.write(pack)
d = defer.Deferred()
self.deferreds.append(d)
return d
def requestIdentities(self):
"""
@return: A L{Deferred} which will fire with a list of all keys found in
the SSH agent. The list of keys is comprised of (public key blob,
comment) tuples.
"""
d = self.sendRequest(AGENTC_REQUEST_IDENTITIES, '')
d.addCallback(self._cbRequestIdentities)
return d
def _cbRequestIdentities(self, data):
"""
Unpack a collection of identities into a list of tuples comprised of
public key blobs and comments.
"""
if ord(data[0]) != AGENT_IDENTITIES_ANSWER:
raise ConchError('unexpected response: %i' % ord(data[0]))
numKeys = struct.unpack('!L', data[1:5])[0]
result = []
data = data[5:]
for i in range(numKeys):
blob, data = getNS(data)
comment, data = getNS(data)
result.append((blob, comment))
return result
def addIdentity(self, blob, comment = ''):
"""
Add a private key blob to the agent's collection of keys.
"""
req = blob
req += NS(comment)
return self.sendRequest(AGENTC_ADD_IDENTITY, req)
def signData(self, blob, data):
"""
Request that the agent sign the given C{data} with the private key
which corresponds to the public key given by C{blob}. The private
key should have been added to the agent already.
@type blob: C{str}
@type data: C{str}
@return: A L{Deferred} which fires with a signature for given data
created with the given key.
"""
req = NS(blob)
req += NS(data)
req += '\000\000\000\000' # flags
return self.sendRequest(AGENTC_SIGN_REQUEST, req).addCallback(self._cbSignData)
def _cbSignData(self, data):
if ord(data[0]) != AGENT_SIGN_RESPONSE:
raise ConchError('unexpected data: %i' % ord(data[0]))
signature = getNS(data[1:])[0]
return signature
def removeIdentity(self, blob):
"""
Remove the private key corresponding to the public key in blob from the
running agent.
"""
req = NS(blob)
return self.sendRequest(AGENTC_REMOVE_IDENTITY, req)
def removeAllIdentities(self):
"""
Remove all keys from the running agent.
"""
return self.sendRequest(AGENTC_REMOVE_ALL_IDENTITIES, '')
class SSHAgentServer(protocol.Protocol):
"""
The server side of the SSH agent protocol. This is equivalent to
ssh-agent(1) and can be used with either ssh-add(1) or the SSHAgentClient
protocol, also in this package.
"""
def __init__(self):
self.buf = ''
def dataReceived(self, data):
self.buf += data
while 1:
if len(self.buf) <= 4:
return
packLen = struct.unpack('!L', self.buf[:4])[0]
if len(self.buf) < 4 + packLen:
return
packet, self.buf = self.buf[4:4 + packLen], self.buf[4 + packLen:]
reqType = ord(packet[0])
reqName = messages.get(reqType, None)
if not reqName:
self.sendResponse(AGENT_FAILURE, '')
else:
f = getattr(self, 'agentc_%s' % reqName)
if getattr(self.factory, 'keys', None) is None:
self.sendResponse(AGENT_FAILURE, '')
raise MissingKeyStoreError()
f(packet[1:])
def sendResponse(self, reqType, data):
pack = struct.pack('!LB', len(data) + 1, reqType) + data
self.transport.write(pack)
def agentc_REQUEST_IDENTITIES(self, data):
"""
Return all of the identities that have been added to the server
"""
assert data == ''
numKeys = len(self.factory.keys)
resp = []
resp.append(struct.pack('!L', numKeys))
for key, comment in self.factory.keys.itervalues():
resp.append(NS(key.blob())) # yes, wrapped in an NS
resp.append(NS(comment))
self.sendResponse(AGENT_IDENTITIES_ANSWER, ''.join(resp))
def agentc_SIGN_REQUEST(self, data):
"""
Data is a structure with a reference to an already added key object and
some data that the clients wants signed with that key. If the key
object wasn't loaded, return AGENT_FAILURE, else return the signature.
"""
blob, data = getNS(data)
if blob not in self.factory.keys:
return self.sendResponse(AGENT_FAILURE, '')
signData, data = getNS(data)
assert data == '\000\000\000\000'
self.sendResponse(AGENT_SIGN_RESPONSE, NS(self.factory.keys[blob][0].sign(signData)))
def agentc_ADD_IDENTITY(self, data):
"""
Adds a private key to the agent's collection of identities. On
subsequent interactions, the private key can be accessed using only the
corresponding public key.
"""
# need to pre-read the key data so we can get past it to the comment string
keyType, rest = getNS(data)
if keyType == 'ssh-rsa':
nmp = 6
elif keyType == 'ssh-dss':
nmp = 5
else:
raise keys.BadKeyError('unknown blob type: %s' % keyType)
rest = getMP(rest, nmp)[-1] # ignore the key data for now, we just want the comment
comment, rest = getNS(rest) # the comment, tacked onto the end of the key blob
k = keys.Key.fromString(data, type='private_blob') # not wrapped in NS here
self.factory.keys[k.blob()] = (k, comment)
self.sendResponse(AGENT_SUCCESS, '')
def agentc_REMOVE_IDENTITY(self, data):
"""
Remove a specific key from the agent's collection of identities.
"""
blob, _ = getNS(data)
k = keys.Key.fromString(blob, type='blob')
del self.factory.keys[k.blob()]
self.sendResponse(AGENT_SUCCESS, '')
def agentc_REMOVE_ALL_IDENTITIES(self, data):
"""
Remove all keys from the agent's collection of identities.
"""
assert data == ''
self.factory.keys = {}
self.sendResponse(AGENT_SUCCESS, '')
# v1 messages that we ignore because we don't keep v1 keys
# open-ssh sends both v1 and v2 commands, so we have to
# do no-ops for v1 commands or we'll get "bad request" errors
def agentc_REQUEST_RSA_IDENTITIES(self, data):
"""
v1 message for listing RSA1 keys; superseded by
agentc_REQUEST_IDENTITIES, which handles different key types.
"""
self.sendResponse(AGENT_RSA_IDENTITIES_ANSWER, struct.pack('!L', 0))
def agentc_REMOVE_RSA_IDENTITY(self, data):
"""
v1 message for removing RSA1 keys; superseded by
agentc_REMOVE_IDENTITY, which handles different key types.
"""
self.sendResponse(AGENT_SUCCESS, '')
def agentc_REMOVE_ALL_RSA_IDENTITIES(self, data):
"""
v1 message for removing all RSA1 keys; superseded by
agentc_REMOVE_ALL_IDENTITIES, which handles different key types.
"""
self.sendResponse(AGENT_SUCCESS, '')
AGENTC_REQUEST_RSA_IDENTITIES = 1
AGENT_RSA_IDENTITIES_ANSWER = 2
AGENT_FAILURE = 5
AGENT_SUCCESS = 6
AGENTC_REMOVE_RSA_IDENTITY = 8
AGENTC_REMOVE_ALL_RSA_IDENTITIES = 9
AGENTC_REQUEST_IDENTITIES = 11
AGENT_IDENTITIES_ANSWER = 12
AGENTC_SIGN_REQUEST = 13
AGENT_SIGN_RESPONSE = 14
AGENTC_ADD_IDENTITY = 17
AGENTC_REMOVE_IDENTITY = 18
AGENTC_REMOVE_ALL_IDENTITIES = 19
messages = {}
for name, value in locals().copy().items():
if name[:7] == 'AGENTC_':
messages[value] = name[7:] # doesn't handle doubles
|
mxamin/youtube-dl | refs/heads/master | youtube_dl/extractor/ministrygrid.py | 9 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
ExtractorError,
smuggle_url,
)
class MinistryGridIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?ministrygrid\.com/([^/?#]*/)*(?P<id>[^/#?]+)/?(?:$|[?#])'
_TEST = {
'url': 'http://www.ministrygrid.com/training-viewer/-/training/t4g-2014-conference/the-gospel-by-numbers-4/the-gospel-by-numbers',
'md5': '844be0d2a1340422759c2a9101bab017',
'info_dict': {
'id': '3453494717001',
'ext': 'mp4',
'title': 'The Gospel by Numbers',
'thumbnail': 're:^https?://.*\.jpg',
'upload_date': '20140410',
'description': 'Coming soon from T4G 2014!',
'uploader_id': '2034960640001',
'timestamp': 1397145591,
},
'params': {
# m3u8 download
'skip_download': True,
},
'add_ie': ['TDSLifeway'],
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
portlets = self._parse_json(self._search_regex(
r'Liferay\.Portlet\.list=(\[.+?\])', webpage, 'portlet list'),
video_id)
pl_id = self._search_regex(
r'getPlid:function\(\){return"(\d+)"}', webpage, 'p_l_id')
for i, portlet in enumerate(portlets):
portlet_url = 'http://www.ministrygrid.com/c/portal/render_portlet?p_l_id=%s&p_p_id=%s' % (pl_id, portlet)
portlet_code = self._download_webpage(
portlet_url, video_id,
note='Looking in portlet %s (%d/%d)' % (portlet, i + 1, len(portlets)),
fatal=False)
video_iframe_url = self._search_regex(
r'<iframe.*?src="([^"]+)"', portlet_code, 'video iframe',
default=None)
if video_iframe_url:
return self.url_result(
smuggle_url(video_iframe_url, {'force_videoid': video_id}),
video_id=video_id)
raise ExtractorError('Could not find video iframe in any portlets')
|
lamastex/scalable-data-science | refs/heads/master | dbcArchives/2021/000_0-sds-3-x-projects/student-project-08_group-DistributedEnsemble/development/model_1.py | 1 | # Databricks notebook source
import torch
import torch.nn as nn
import torch.optim as optim
import random
#Feedforward network for classification
class MLP(nn.Module):
def __init__(self,shape):
#shape: number of neurons in each layer (including the input and output layers)
super(MLP,self).__init__()
self.units=nn.ModuleList()
for i in range(len(shape)-1):
self.units.append(nn.Linear(shape[i],shape[i+1]))
self._shape=shape
self._nlayers=len(shape)
def forward(self,x):
y=x
for i,layer in enumerate(self.units):
if i<self._nlayers-2:
y=nn.functional.tanh(layer(y))
else:
y=nn.functional.softmax(layer(y),dim=1)
return y
# COMMAND ----------
#Updates the model parameters with one step of stochastic gradient descent given a batch of labeled data
def SGDStep(net_params,net_shape,x,y,lr=0.1):
#x=torch.Tensor(x)
#y=torch.Tensor(y)
net=MLP(net_shape)
net.load_state_dict(net_params)
opt=optim.SGD(net.parameters(),lr)
opt.zero_grad()
loss=nn.CrossEntropyLoss()
yhat=net(x)
err=loss(yhat,y)
err.backward()
opt.step()
lossval=float(err.detach().numpy())
#returns updated parameters, network shape, and loss
return (net.state_dict(),net_shape,lossval)
# COMMAND ----------
n_models=5 #ensemble size
model_data=[] #pairs of model parameters and their training data
shapes=[] #shape of networks
inputdims=10 #features dimensions
nclasses=2 #number of classes
#initialization
for i in range(n_models):
#pick random number of hidden layers and neurons for each network
nhidden=random.randint(1,4)
shape=[inputdims]
for k in range(nhidden):
shape.append(random.randint(5,15))
shape.append(nclasses)
net=MLP(shape)
shapes.append(shape)
#-to be replaced with batch loader
x=torch.rand([10,inputdims])
y=torch.ones([10,]).long()
#-
model_data.append((net.state_dict(),shape,x,y))
#main training loop
numepochs=6
for epoch in range(numepochs):
model_data_par=sc.parallelize(model_data)
updated_models= model_data_par.map(lambda t: SGDStep(*t))
updated_models=updated_models.collect()
print("loss:")
print([u[2] for u in updated_models])
#loading batches of data, and reconstructing the model-data array
model_data=[]
for i in range(n_models):
#-to be replaced with batch loader
x=torch.rand([10,inputdims])
y=torch.ones([10,]).long()
#-
model_data.append((updated_models[i][0],shapes[i],x,y)) |
numpy/numpy | refs/heads/main | numpy/typing/tests/data/reveal/mod.py | 6 | from typing import Any
import numpy as np
f8 = np.float64()
i8 = np.int64()
u8 = np.uint64()
f4 = np.float32()
i4 = np.int32()
u4 = np.uint32()
td = np.timedelta64(0, "D")
b_ = np.bool_()
b = bool()
f = float()
i = int()
AR_b: np.ndarray[Any, np.dtype[np.bool_]]
AR_m: np.ndarray[Any, np.dtype[np.timedelta64]]
# Time structures
reveal_type(td % td) # E: numpy.timedelta64
reveal_type(AR_m % td) # E: Any
reveal_type(td % AR_m) # E: Any
reveal_type(divmod(td, td)) # E: Tuple[{int64}, numpy.timedelta64]
reveal_type(divmod(AR_m, td)) # E: Tuple[numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[numpy.typing._64Bit]]], numpy.ndarray[Any, numpy.dtype[numpy.timedelta64]]]
reveal_type(divmod(td, AR_m)) # E: Tuple[numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[numpy.typing._64Bit]]], numpy.ndarray[Any, numpy.dtype[numpy.timedelta64]]]
# Bool
reveal_type(b_ % b) # E: {int8}
reveal_type(b_ % i) # E: {int_}
reveal_type(b_ % f) # E: {float64}
reveal_type(b_ % b_) # E: {int8}
reveal_type(b_ % i8) # E: {int64}
reveal_type(b_ % u8) # E: {uint64}
reveal_type(b_ % f8) # E: {float64}
reveal_type(b_ % AR_b) # E: numpy.ndarray[Any, numpy.dtype[{int8}]]
reveal_type(divmod(b_, b)) # E: Tuple[{int8}, {int8}]
reveal_type(divmod(b_, i)) # E: Tuple[{int_}, {int_}]
reveal_type(divmod(b_, f)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(b_, b_)) # E: Tuple[{int8}, {int8}]
reveal_type(divmod(b_, i8)) # E: Tuple[{int64}, {int64}]
reveal_type(divmod(b_, u8)) # E: Tuple[{uint64}, {uint64}]
reveal_type(divmod(b_, f8)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(b_, AR_b)) # E: numpy.ndarray[Any, numpy.dtype[{int8}]], numpy.ndarray[Any, numpy.dtype[{int8}]]]
reveal_type(b % b_) # E: {int8}
reveal_type(i % b_) # E: {int_}
reveal_type(f % b_) # E: {float64}
reveal_type(b_ % b_) # E: {int8}
reveal_type(i8 % b_) # E: {int64}
reveal_type(u8 % b_) # E: {uint64}
reveal_type(f8 % b_) # E: {float64}
reveal_type(AR_b % b_) # E: numpy.ndarray[Any, numpy.dtype[{int8}]]
reveal_type(divmod(b, b_)) # E: Tuple[{int8}, {int8}]
reveal_type(divmod(i, b_)) # E: Tuple[{int_}, {int_}]
reveal_type(divmod(f, b_)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(b_, b_)) # E: Tuple[{int8}, {int8}]
reveal_type(divmod(i8, b_)) # E: Tuple[{int64}, {int64}]
reveal_type(divmod(u8, b_)) # E: Tuple[{uint64}, {uint64}]
reveal_type(divmod(f8, b_)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(AR_b, b_)) # E: numpy.ndarray[Any, numpy.dtype[{int8}]], numpy.ndarray[Any, numpy.dtype[{int8}]]]
# int
reveal_type(i8 % b) # E: {int64}
reveal_type(i8 % i) # E: {int64}
reveal_type(i8 % f) # E: {float64}
reveal_type(i8 % i8) # E: {int64}
reveal_type(i8 % f8) # E: {float64}
reveal_type(i4 % i8) # E: {int64}
reveal_type(i4 % f8) # E: {float64}
reveal_type(i4 % i4) # E: {int32}
reveal_type(i4 % f4) # E: {float32}
reveal_type(i8 % AR_b) # E: numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[Any]]]
reveal_type(divmod(i8, b)) # E: Tuple[{int64}, {int64}]
reveal_type(divmod(i8, i)) # E: Tuple[{int64}, {int64}]
reveal_type(divmod(i8, f)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(i8, i8)) # E: Tuple[{int64}, {int64}]
reveal_type(divmod(i8, f8)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(i8, i4)) # E: Tuple[{int64}, {int64}]
reveal_type(divmod(i8, f4)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(i4, i4)) # E: Tuple[{int32}, {int32}]
reveal_type(divmod(i4, f4)) # E: Tuple[{float32}, {float32}]
reveal_type(divmod(i8, AR_b)) # E: Tuple[numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[Any]]], numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[Any]]]]
reveal_type(b % i8) # E: {int64}
reveal_type(i % i8) # E: {int64}
reveal_type(f % i8) # E: {float64}
reveal_type(i8 % i8) # E: {int64}
reveal_type(f8 % i8) # E: {float64}
reveal_type(i8 % i4) # E: {int64}
reveal_type(f8 % i4) # E: {float64}
reveal_type(i4 % i4) # E: {int32}
reveal_type(f4 % i4) # E: {float32}
reveal_type(AR_b % i8) # E: numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[Any]]]
reveal_type(divmod(b, i8)) # E: Tuple[{int64}, {int64}]
reveal_type(divmod(i, i8)) # E: Tuple[{int64}, {int64}]
reveal_type(divmod(f, i8)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(i8, i8)) # E: Tuple[{int64}, {int64}]
reveal_type(divmod(f8, i8)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(i4, i8)) # E: Tuple[{int64}, {int64}]
reveal_type(divmod(f4, i8)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(i4, i4)) # E: Tuple[{int32}, {int32}]
reveal_type(divmod(f4, i4)) # E: Tuple[{float32}, {float32}]
reveal_type(divmod(AR_b, i8)) # E: Tuple[numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[Any]]], numpy.ndarray[Any, numpy.dtype[numpy.signedinteger[Any]]]]
# float
reveal_type(f8 % b) # E: {float64}
reveal_type(f8 % i) # E: {float64}
reveal_type(f8 % f) # E: {float64}
reveal_type(i8 % f4) # E: {float64}
reveal_type(f4 % f4) # E: {float32}
reveal_type(f8 % AR_b) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[Any]]]
reveal_type(divmod(f8, b)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(f8, i)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(f8, f)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(f8, f8)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(f8, f4)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(f4, f4)) # E: Tuple[{float32}, {float32}]
reveal_type(divmod(f8, AR_b)) # E: Tuple[numpy.ndarray[Any, numpy.dtype[numpy.floating[Any]]], numpy.ndarray[Any, numpy.dtype[numpy.floating[Any]]]]
reveal_type(b % f8) # E: {float64}
reveal_type(i % f8) # E: {float64}
reveal_type(f % f8) # E: {float64}
reveal_type(f8 % f8) # E: {float64}
reveal_type(f8 % f8) # E: {float64}
reveal_type(f4 % f4) # E: {float32}
reveal_type(AR_b % f8) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[Any]]]
reveal_type(divmod(b, f8)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(i, f8)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(f, f8)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(f8, f8)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(f4, f8)) # E: Tuple[{float64}, {float64}]
reveal_type(divmod(f4, f4)) # E: Tuple[{float32}, {float32}]
reveal_type(divmod(AR_b, f8)) # E: Tuple[numpy.ndarray[Any, numpy.dtype[numpy.floating[Any]]], numpy.ndarray[Any, numpy.dtype[numpy.floating[Any]]]]
|
richardotis/scipy | refs/heads/master | scipy/weave/examples/object.py | 100 | """ Attribute and method access on Python objects from C++.
Note: std::cout type operations currently crash python...
Not sure what is up with this...
"""
from __future__ import absolute_import, print_function
import scipy.weave as weave
#----------------------------------------------------------------------------
# get/set attribute and call methods example
#----------------------------------------------------------------------------
class Foo(object):
def __init__(self):
self.val = 1
def inc(self,amount):
self.val += amount
return self.val
obj = Foo()
code = """
py::tuple result(3);
int i = obj.attr("val");
result[0] = i;
py::tuple args(1);
args[0] = 2;
i = obj.mcall("inc",args);
result[1] = i;
obj.set_attr("val",5);
i = obj.attr("val");
result[2] = i;
return_val = result;
"""
print('initial, inc(2), set(5)/get:', weave.inline(code,['obj']))
#----------------------------------------------------------------------------
# indexing of values.
#----------------------------------------------------------------------------
from UserList import UserList
obj = UserList([1,[1,2],"hello"])
code = """
int i;
// find obj length and access each of its items
//std::cout << "UserList items: ";
//for(i = 0; i < obj.length(); i++)
// std::cout << obj[i].str() << " ";
//std::cout << std::endl;
// assign new values to each of its items
for(i = 0; i < obj.length(); i++)
obj[i] = "goodbye";
"""
weave.inline(code,['obj'])
print("obj with new values:", obj)
|
netroby/vitess | refs/heads/master | test/mysqlctl.py | 8 | #!/usr/bin/env python
import warnings
# Dropping a table inexplicably produces a warning despite
# the "IF EXISTS" clause. Squelch these warnings.
warnings.simplefilter("ignore")
import os
import logging
import unittest
import environment
import utils
import tablet
master_tablet = tablet.Tablet()
replica_tablet = tablet.Tablet()
def setUpModule():
try:
environment.topo_server().setup()
utils.Vtctld().start()
setup_procs = [
master_tablet.init_mysql(),
replica_tablet.init_mysql(),
]
utils.wait_procs(setup_procs)
utils.run_vtctl(['CreateKeyspace', 'test_keyspace'])
master_tablet.init_tablet('master', 'test_keyspace', '0')
replica_tablet.init_tablet('replica', 'test_keyspace', '0')
utils.run_vtctl(['RebuildShardGraph', 'test_keyspace/0'])
utils.run_vtctl(['RebuildKeyspaceGraph', 'test_keyspace'], auto_log=True)
master_tablet.create_db('vt_test_keyspace')
replica_tablet.create_db('vt_test_keyspace')
except:
tearDownModule()
raise
def tearDownModule():
if utils.options.skip_teardown:
return
tablet.kill_tablets([master_tablet, replica_tablet])
teardown_procs = [
master_tablet.teardown_mysql(),
replica_tablet.teardown_mysql(),
]
utils.wait_procs(teardown_procs, raise_on_error=False)
environment.topo_server().teardown()
utils.kill_sub_processes()
utils.remove_tmp_files()
master_tablet.remove_tree()
replica_tablet.remove_tree()
class TestMysqlctl(unittest.TestCase):
def tearDown(self):
tablet.Tablet.check_vttablet_count()
for t in [master_tablet, replica_tablet]:
t.reset_replication()
t.clean_dbs()
def test_mysqlctl_restart(self):
utils.pause('mysqld initialized')
utils.wait_procs([master_tablet.shutdown_mysql()])
utils.wait_procs([master_tablet.start_mysql()])
def test_auto_detect(self):
# start up tablets with an empty MYSQL_FLAVOR, which means auto-detect
master_tablet.start_vttablet(wait_for_state=None,
extra_env={'MYSQL_FLAVOR': ''})
replica_tablet.start_vttablet(wait_for_state=None,
extra_env={'MYSQL_FLAVOR': ''})
master_tablet.wait_for_vttablet_state('SERVING')
replica_tablet.wait_for_vttablet_state('SERVING')
# reparent tablets, which requires flavor detection
utils.run_vtctl(['InitShardMaster', 'test_keyspace/0',
master_tablet.tablet_alias], auto_log=True)
master_tablet.kill_vttablet()
replica_tablet.kill_vttablet()
if __name__ == '__main__':
utils.main()
|
inovtec-solutions/OpenERP | refs/heads/branch_openerp | openerp/addons/base_report_designer/wizard/__init__.py | 421 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base_report_designer_modify
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
arrabito/DIRAC | refs/heads/integration | Resources/MessageQueue/MQCommunication.py | 3 | """ General Message Queue Interface to create Consumers and Producers
"""
__RCSID__ = "$Id$"
from DIRAC import gLogger, S_OK
from DIRAC.Resources.MessageQueue.MQProducer import MQProducer
from DIRAC.Resources.MessageQueue.MQConsumer import MQConsumer
from DIRAC.Resources.MessageQueue.MQConnectionManager import MQConnectionManager
from DIRAC.Resources.MessageQueue.Utilities import getMQParamsFromCS
from DIRAC.Resources.MessageQueue.Utilities import generateDefaultCallback
connectionManager = MQConnectionManager() #To manage the active MQ connections.
def createConsumer( mqURI, callback = generateDefaultCallback() ):
"""
Function creates MQConsumer. All parameters are taken from the
Configuration Service based on the mqURI value.
Args:
mqURI(str):Pseudo URI identifing MQ service. It has the following format
mqConnection::DestinationType::DestinationName
e.g. blabla.cern.ch::Queue::MyQueue1
callback: callback function that can be used to process the incoming messages
Returns:
S_OK/S_ERROR: with the consumer object in S_OK.
"""
result = _setupConnection( mqURI = mqURI, mType = "consumer" )
if not result['OK']:
gLogger.error( 'Failed to createConsumer:', result['Message'] )
return result
return S_OK( MQConsumer( mqManager = connectionManager,
mqURI = mqURI,
consumerId = result['Value'],
callback = callback ) )
def createProducer( mqURI ):
"""
Function creates MQProducer. All parameters are taken from
the Configuration Service based on the mqURI value.
Args:
mqURI(str):Pseudo URI identifing MQ service. It has the following format
mqConnection::DestinationType::DestinationName
e.g. blabla.cern.ch::Queue::MyQueue1
Returns:
S_OK/S_ERROR: with the producer object in S_OK.
"""
result = _setupConnection( mqURI = mqURI, mType = "producer" )
if not result['OK']:
gLogger.error( 'Failed to createProducer:', result['Message'] )
return result
return S_OK( MQProducer( mqManager = connectionManager,
mqURI = mqURI,
producerId = result['Value'] ) )
def _setupConnection( mqURI, mType ):
""" Function sets up the active MQ connection. All parameters are taken
from the Configuration Service based on the mqURI
value and the messenger Type mType.
Args:
mqURI(str):Pseudo URI identifing the MQ service. It has the following format:
mqConnection::DestinationType::DestinationName
e.g. blabla.cern.ch::Queue::MyQueue1
mType(str): 'consumer' or 'producer'
Returns:
S_OK/S_ERROR: with the value of the messenger Id ( e.g. 'consumer4' ) in S_OK.
"""
result = getMQParamsFromCS( mqURI = mqURI )
if not result['OK']:
gLogger.error( 'Failed to setupConnection:', '%s' % ( result['Message'] ) )
return result
params = result['Value']
return connectionManager.startConnection( mqURI, params, mType )
|
krieger-od/nwjs_chromium.src | refs/heads/master | build/android/pylib/gtest/gtest_test_instance.py | 9 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import re
import shutil
import sys
from pylib import constants
from pylib.base import base_test_result
from pylib.base import test_instance
sys.path.append(os.path.join(
constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common'))
import unittest_util
# Used for filtering large data deps at a finer grain than what's allowed in
# isolate files since pushing deps to devices is expensive.
# Wildcards are allowed.
_DEPS_EXCLUSION_LIST = [
'chrome/test/data/extensions/api_test',
'chrome/test/data/extensions/secure_shell',
'chrome/test/data/firefox*',
'chrome/test/data/gpu',
'chrome/test/data/image_decoding',
'chrome/test/data/import',
'chrome/test/data/page_cycler',
'chrome/test/data/perf',
'chrome/test/data/pyauto_private',
'chrome/test/data/safari_import',
'chrome/test/data/scroll',
'chrome/test/data/third_party',
'third_party/hunspell_dictionaries/*.dic',
# crbug.com/258690
'webkit/data/bmp_decoder',
'webkit/data/ico_decoder',
]
# TODO(jbudorick): Remove these once we're no longer parsing stdout to generate
# results.
_RE_TEST_STATUS = re.compile(
r'\[ +((?:RUN)|(?:FAILED)|(?:OK)) +\] ?([^ ]+)(?: \((\d+) ms\))?$')
_RE_TEST_RUN_STATUS = re.compile(
r'\[ +(PASSED|RUNNER_FAILED|CRASHED) \] ?[^ ]+')
# TODO(jbudorick): Make this a class method of GtestTestInstance once
# test_package_apk and test_package_exe are gone.
def ParseGTestListTests(raw_list):
"""Parses a raw test list as provided by --gtest_list_tests.
Args:
raw_list: The raw test listing with the following format:
IPCChannelTest.
SendMessageInChannelConnected
IPCSyncChannelTest.
Simple
DISABLED_SendWithTimeoutMixedOKAndTimeout
Returns:
A list of all tests. For the above raw listing:
[IPCChannelTest.SendMessageInChannelConnected, IPCSyncChannelTest.Simple,
IPCSyncChannelTest.DISABLED_SendWithTimeoutMixedOKAndTimeout]
"""
ret = []
current = ''
for test in raw_list:
if not test:
continue
if test[0] != ' ':
test_case = test.split()[0]
if test_case.endswith('.'):
current = test_case
elif not 'YOU HAVE' in test:
test_name = test.split()[0]
ret += [current + test_name]
return ret
class GtestTestInstance(test_instance.TestInstance):
def __init__(self, args, isolate_delegate, error_func):
super(GtestTestInstance, self).__init__()
# TODO(jbudorick): Support multiple test suites.
if len(args.suite_name) > 1:
raise ValueError('Platform mode currently supports only 1 gtest suite')
self._suite = args.suite_name[0]
if self._suite == 'content_browsertests':
error_func('content_browsertests are not currently supported '
'in platform mode.')
self._apk_path = os.path.join(
constants.GetOutDirectory(), 'apks', '%s.apk' % self._suite)
else:
self._apk_path = os.path.join(
constants.GetOutDirectory(), '%s_apk' % self._suite,
'%s-debug.apk' % self._suite)
self._exe_path = os.path.join(constants.GetOutDirectory(),
self._suite)
if not os.path.exists(self._apk_path):
self._apk_path = None
if not os.path.exists(self._exe_path):
self._exe_path = None
if not self._apk_path and not self._exe_path:
error_func('Could not find apk or executable for %s' % self._suite)
self._data_deps = []
self._gtest_filter = args.test_filter
if args.isolate_file_path:
self._isolate_abs_path = os.path.abspath(args.isolate_file_path)
self._isolate_delegate = isolate_delegate
self._isolated_abs_path = os.path.join(
constants.GetOutDirectory(), '%s.isolated' % self._suite)
else:
logging.warning('No isolate file provided. No data deps will be pushed.');
self._isolate_delegate = None
#override
def TestType(self):
return 'gtest'
#override
def SetUp(self):
"""Map data dependencies via isolate."""
if self._isolate_delegate:
self._isolate_delegate.Remap(
self._isolate_abs_path, self._isolated_abs_path)
self._isolate_delegate.PurgeExcluded(_DEPS_EXCLUSION_LIST)
self._isolate_delegate.MoveOutputDeps()
dest_dir = None
if self._suite == 'breakpad_unittests':
dest_dir = '/data/local/tmp/'
self._data_deps.extend([(constants.ISOLATE_DEPS_DIR, dest_dir)])
def GetDataDependencies(self):
"""Returns the test suite's data dependencies.
Returns:
A list of (host_path, device_path) tuples to push. If device_path is
None, the client is responsible for determining where to push the file.
"""
return self._data_deps
def FilterTests(self, test_list, disabled_prefixes=None):
"""Filters |test_list| based on prefixes and, if present, a filter string.
Args:
test_list: The list of tests to filter.
disabled_prefixes: A list of test prefixes to filter. Defaults to
DISABLED_, FLAKY_, FAILS_, PRE_, and MANUAL_
Returns:
A filtered list of tests to run.
"""
gtest_filter_strings = [
self._GenerateDisabledFilterString(disabled_prefixes)]
if self._gtest_filter:
gtest_filter_strings.append(self._gtest_filter)
filtered_test_list = test_list
for gtest_filter_string in gtest_filter_strings:
filtered_test_list = unittest_util.FilterTestNames(
filtered_test_list, gtest_filter_string)
return filtered_test_list
def _GenerateDisabledFilterString(self, disabled_prefixes):
disabled_filter_items = []
if disabled_prefixes is None:
disabled_prefixes = ['DISABLED_', 'FLAKY_', 'FAILS_', 'PRE_', 'MANUAL_']
disabled_filter_items += ['%s*' % dp for dp in disabled_prefixes]
disabled_filter_items += ['*.%s*' % dp for dp in disabled_prefixes]
disabled_tests_file_path = os.path.join(
constants.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'gtest',
'filter', '%s_disabled' % self._suite)
if disabled_tests_file_path and os.path.exists(disabled_tests_file_path):
with open(disabled_tests_file_path) as disabled_tests_file:
disabled_filter_items += [
'%s' % l for l in (line.strip() for line in disabled_tests_file)
if l and not l.startswith('#')]
return '*-%s' % ':'.join(disabled_filter_items)
def ParseGTestOutput(self, output):
"""Parses raw gtest output and returns a list of results.
Args:
output: A list of output lines.
Returns:
A list of base_test_result.BaseTestResults.
"""
results = []
for l in output:
matcher = _RE_TEST_STATUS.match(l)
if matcher:
result_type = None
if matcher.group(1) == 'OK':
result_type = base_test_result.ResultType.PASS
elif matcher.group(1) == 'FAILED':
result_type = base_test_result.ResultType.FAIL
if result_type:
test_name = matcher.group(2)
duration = matcher.group(3) if matcher.group(3) else 0
results.append(base_test_result.BaseTestResult(
test_name, result_type, duration))
logging.info(l)
return results
#override
def TearDown(self):
"""Clear the mappings created by SetUp."""
if self._isolate_delegate:
self._isolate_delegate.Clear()
@property
def apk(self):
return self._apk_path
@property
def exe(self):
return self._exe_path
@property
def suite(self):
return self._suite
|
sassoftware/conary | refs/heads/master | conary/cmds/updatecmd.py | 1 | #
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
import json
import os
import itertools
import sys
import threading
import urllib2
from conary import callbacks
from conary import conaryclient
from conary import display
from conary import errors
from conary import trove
from conary import trovetup
from conary import versions
from conary.deps import deps
from conary.lib import api
from conary.lib import log
from conary.lib import util
from conary.local import database
from conary.repository import changeset, filecontainer
from conary.conaryclient import cmdline, modelupdate
from conary.conaryclient.cmdline import parseTroveSpec
# FIXME client should instantiated once per execution of the command line
# conary client
class CriticalUpdateInfo(conaryclient.CriticalUpdateInfo):
criticalTroveRegexps = ['conary:.*']
def locked(method):
# this decorator used to be defined in UpdateCallback
# The problem is you cannot subclass UpdateCallback and use the decorator
# because python complains it is an unbound function.
# And you can't define it as @staticmethod either, it would break the
# decorated functions.
# Somewhat related (staticmethod objects not callable) topic:
# http://mail.python.org/pipermail/python-dev/2006-March/061948.html
def wrapper(self, *args, **kwargs):
self.lock.acquire()
try:
return method(self, *args, **kwargs)
finally:
self.lock.release()
wrapper.__doc__ = method.__doc__
wrapper.func_name = method.func_name
return wrapper
class UpdateCallback(callbacks.LineOutput, callbacks.UpdateCallback):
def done(self):
"""
@see: callbacks.UpdateCallback.done
"""
self._message('')
def _message(self, text):
"""
Called when this callback object needs to output progress information.
The information is written to stdout.
@return: None
"""
callbacks.LineOutput._message(self, text)
def update(self):
"""
Called by this callback object to update the status. This method
sanitizes text. This method is not thread safe - obtain a lock before
calling.
@return: None
"""
t = ""
if self.updateText:
t += self.updateText
if self.csText:
t = self.csText + ' '
if t and len(t) < 76:
t = t[:76]
t += '...'
self._message(t)
@locked
def updateMsg(self, text):
"""
Called when the update thread has status updates.
@param text: new status text
@type text: string
@return: None
"""
self.updateText = text
self.update()
@locked
def csMsg(self, text):
"""
Called when the download thread has status updates.
@param text: new status text
@type text: string
@return: None
"""
self.csText = text
self.update()
def executingSystemModel(self):
self.updateMsg("Processing system model")
def loadingModelCache(self):
self.updateMsg("Loading system model cache")
def savingModelCache(self):
self.updateMsg("Saving system model cache")
def preparingChangeSet(self):
"""
@see: callbacks.ChangesetCallback.preparingChangeSet
"""
self.updateMsg("Preparing changeset request")
def resolvingDependencies(self):
"""
@see: callbacks.UpdateCallback.resolvingDependencies
"""
self.updateMsg("Resolving dependencies")
@locked
def updateDone(self):
"""
@see: callbacks.UpdateCallback.updateDone
"""
self._message('')
self.updateText = None
@locked
def _downloading(self, msg, got, rate, need):
"""
Called by this callback object to handle different kinds of
download-related progress information. This method puts together
download rate information.
@param msg: status message
@type msg: string
@param got: number of bytes retrieved so far
@type got: integer
@param rate: bytes per second
@type rate: integer
@param need: number of bytes total to be retrieved
@type need: integer
@return: None
"""
# This function acquires a lock just because it looks at self.csHunk
# and self.updateText directly. Otherwise, self.csMsg will acquire the
# lock (which is now reentrant)
if got == need:
self.csMsg(None)
elif need != 0:
if self.csHunk[1] < 2 or not self.updateText:
self.csMsg("%s %dKB (%d%%) of %dKB at %dKB/sec"
% (msg, got/1024, (got*100)/need, need/1024, rate/1024))
else:
self.csMsg("%s %d of %d: %dKB (%d%%) of %dKB at %dKB/sec"
% ((msg,) + self.csHunk + \
(got/1024, (got*100)/need, need/1024, rate/1024)))
else: # no idea how much we need, just keep on counting...
self.csMsg("%s (got %dKB at %dKB/s so far)" % (msg, got/1024, rate/1024))
def downloadingFileContents(self, got, need):
"""
@see: callbacks.ChangesetCallback.downloadingFileContents
"""
self._downloading('Downloading files for changeset', got, self.rate, need)
def downloadingChangeSet(self, got, need):
"""
@see: callbacks.ChangesetCallback.downloadingChangeSet
"""
self._downloading('Downloading', got, self.rate, need)
def requestingFileContents(self):
"""
@see: callbacks.ChangesetCallback.requestingFileContents
"""
if self.csHunk[1] < 2:
self.csMsg("Requesting file contents")
else:
self.csMsg("Requesting file contents for changeset %d of %d" % self.csHunk)
def requestingChangeSet(self):
"""
@see: callbacks.ChangesetCallback.requestingChangeSet
"""
if self.csHunk[1] < 2:
self.csMsg("Requesting changeset")
else:
self.csMsg("Requesting changeset %d of %d" % self.csHunk)
def creatingRollback(self):
"""
@see: callbacks.UpdateCallback.creatingRollback
"""
self.updateMsg("Creating rollback")
def preparingUpdate(self, troveNum, troveCount):
"""
@see: callbacks.UpdateCallback.preparingUpdate
"""
self.updateMsg("Preparing update (%d of %d)" %
(troveNum, troveCount))
@locked
def restoreFiles(self, size, totalSize):
"""
@see: callbacks.UpdateCallback.restoreFiles
"""
# Locked, because we modify self.restored
if totalSize != 0:
self.restored += size
self.updateMsg("Writing %dk of %dk (%d%%)"
% (self.restored / 1024 , totalSize / 1024,
(self.restored * 100) / totalSize))
def removeFiles(self, fileNum, total):
"""
@see: callbacks.UpdateCallback.removeFiles
"""
if total != 0:
self.updateMsg("Removing %d of %d (%d%%)"
% (fileNum , total, (fileNum * 100) / total))
def creatingDatabaseTransaction(self, troveNum, troveCount):
"""
@see: callbacks.UpdateCallback.creatingDatabaseTransaction
"""
self.updateMsg("Creating database transaction (%d of %d)" %
(troveNum, troveCount))
def updatingDatabase(self, step, stepNum, stepCount):
if step == 'latest':
self.updateMsg('Updating list of latest versions: (%d of %d)' %
(stepNum, stepCount))
else:
self.updateMsg('Updating database: (%d of %d)' %
(stepNum, stepCount))
def runningPreTagHandlers(self):
"""
@see: callbacks.UpdateCallback.runningPreTagHandlers
"""
self.updateMsg("Running tag prescripts")
def runningPostTagHandlers(self):
"""
@see: callbacks.UpdateCallback.runningPostTagHandlers
"""
self.updateMsg("Running tag post-scripts")
def committingTransaction(self):
"""
@see: callbacks.UpdateCallback.committingTransaction
"""
self.updateMsg("Committing database transaction")
@locked
def setChangesetHunk(self, num, total):
"""
@see: callbacks.ChangesetCallback.setChangesetHunk
"""
self.csHunk = (num, total)
@locked
def setUpdateHunk(self, num, total):
"""
@see: callbacks.UpdateCallback.setUpdateHunk
"""
self.restored = 0
self.updateHunk = (num, total)
@locked
def setUpdateJob(self, jobs):
"""
@see: callbacks.UpdateCallback.setUpdateJob
"""
self._message('')
if self.updateHunk[1] < 2:
self.out.write('Applying update job:\n')
else:
self.out.write('Applying update job %d of %d:\n' % self.updateHunk)
# erase anything that is currently displayed
self._message('')
self.formatter.prepareJobs(jobs)
for line in self.formatter.formatJobTups(jobs, indent=' '):
self.out.write(line + '\n')
@locked
def tagHandlerOutput(self, tag, msg, stderr = False):
"""
@see: callbacks.UpdateCallback.tagHandlerOutput
"""
self._message('')
self.out.write('[%s] %s\n' % (tag, msg))
@locked
def troveScriptOutput(self, typ, msg):
"""
@see: callbacks.UpdateCallback.troveScriptOutput
"""
self._message('')
self.out.write("[%s] %s" % (typ, msg))
@locked
def troveScriptFailure(self, typ, errcode):
"""
@see: callbacks.UpdateCallback.troveScriptFailure
"""
self._message('')
self.out.write("[%s] %s" % (typ, errcode))
def capsuleSyncScan(self, capsuleType):
self.updateMsg("Scanning for %s capsule changes" % capsuleType)
def capsuleSyncCreate(self, capsuleType, name, num, total):
self.updateMsg("Collecting modifications to %s database (%d of %d)" %
(capsuleType, num, total))
def capsuleSyncApply(self, added, removed):
self._message('')
self.out.write('Synchronizing database with capsule changes\n')
def __init__(self, cfg=None, modelFile=None):
"""
Initialize this callback object.
@param cfg: Conary configuration
@type cfg: A ConaryConfiguration object.
@return: None
"""
callbacks.UpdateCallback.__init__(self)
if cfg:
self.setTrustThreshold(cfg.trustThreshold)
callbacks.LineOutput.__init__(self)
self.restored = 0
self.csHunk = (0, 0)
self.updateHunk = (0, 0)
self.csText = None
self.updateText = None
self.lock = threading.RLock()
if cfg:
fullVersions = cfg.fullVersions
showFlavors = cfg.fullFlavors
showLabels = cfg.showLabels
baseFlavors = cfg.flavor
showComponents = cfg.showComponents
db = conaryclient.ConaryClient(cfg, modelFile=modelFile).db
else:
fullVersions = showFlavors = showLabels = db = baseFlavors = None
showComponents = None
self.formatter = display.JobTupFormatter(affinityDb=db)
self.formatter.dcfg.setTroveDisplay(fullVersions=fullVersions,
fullFlavors=showFlavors,
showLabels=showLabels,
baseFlavors=baseFlavors,
showComponents=showComponents)
self.formatter.dcfg.setJobDisplay(compressJobs=not showComponents)
class JsonUpdateCallback(UpdateCallback):
def __del__(self):
pass
def _message(self, msg):
self.out.write('%s\n' % msg)
def _capsuleSync(self, name, step, done=None, total=None, rate=None):
step = max(step, 1)
self.updateMsg(
step_name=name, step=step, step_total=3, phase=1,
phase_name="Capsule sync", done=done, total=total, rate=rate)
def _calculateUpdate(self, name, step, done=None, total=None, rate=None):
step = max(step, 1)
self.updateMsg(
step_name=name, step=step, step_total=4, phase=2,
phase_name="Calculate update", done=done, total=total, rate=rate)
def _applyUpdate(self, name, done=None, total=None, rate=None, jobs=None):
step, step_total = self.updateHunk
step = max(step, 1)
step_total = max(step_total, 1)
if jobs:
self.updateMsg(
step_name=name, step=step, step_total=step_total,
phase=3, phase_name="Apply update", done=done, total=total,
rate=rate, jobs=jobs)
else:
self.updateMsg(
step_name=name, step=step, step_total=step_total,
phase=3, phase_name="Apply update", done=done, total=total,
rate=rate)
def _applyUpdateCS(self, name, done=None, total=None, rate=None):
step, step_total = self.updateHunk
step = max(step, 1)
step_total = max(step_total, 1)
self.updateMsg(
step_name=name, step=step, step_total=step_total,
phase=3, phase_name="Apply update", done=done, total=total,
rate=rate)
def update(self):
"""
Called by this callback object to udpate the status. This method
convets dictionaries into json strings. This method is not thread safe
- obtain a lock before calling.
@return None
"""
if self.updateText:
t = self.updateText
if self.csText:
t = self.csText
t['percent'] = None
if t.get('done') is not None and t.get('total'):
t['percent'] = (t['done'] * 100) / t['total']
if t:
self._message(json.dumps(t))
@locked
def updateMsg(self, *args, **kwargs):
self.updateText = kwargs
self.updateText['phase_total'] = 3
if args:
self.updateText['msg'] = args[0]
self.update()
@locked
def csMsg(self, *args, **kwargs):
self.csText = kwargs
self.csText['phase_total'] = 3
if args:
if args[0] is None:
self.csText = dict()
else:
self.csText['msg'] = args[0]
self.update()
def executingSystemModel(self):
self._calculateUpdate("Processing system model", step=2)
def loadingModelCache(self):
self._calculateUpdate("Loading system model cache", step=1)
def savingModelCache(self):
self._calculateUpdate("Saving system model cache", step=4)
def preparingChangeSet(self):
self._applyUpdate("Preparing changeset request")
def resolvingDependencies(self):
self._calculateUpdate("Resolving dependencies", step=3)
def creatingRollback(self):
"""
@see: callbacks.UpdateCallback.creatingRollback
"""
self._applyUpdate("Creating rollback")
def preparingUpdate(self, troveNum, troveCount):
"""
@see: callbacks.UpdateCallback.preparingUpdate
"""
self._applyUpdate("Preparing update", done=troveNum, total=troveCount)
@locked
def restoreFiles(self, size, totalSize):
"""
@see: callbacks.UpdateCallback.restoreFiles
"""
# Locked, because we modify self.restored
if totalSize != 0:
self.restored += size
self._applyUpdate("Restoring Files", done=self.restored / 1024,
total=totalSize / 1024)
def removeFiles(self, fileNum, total):
"""
@see: callbacks.UpdateCallback.removeFiles
"""
if total != 0:
self._applyUpdate("Removing Files", done=fileNum, total=total)
def creatingDatabaseTransaction(self, troveNum, troveCount):
"""
@see: callbacks.UpdateCallback.creatingDatabaseTransaction
"""
self._applyUpdate("Creating database transaction", done=troveNum,
total=troveCount)
def updatingDatabase(self, step, stepNum, stepCount):
if step == 'latest':
self._applyUpdate(
'Updating list of latest versions',
done=stepNum,
total=stepCount,
)
else:
self._applyUpdate(
'Updating database', done=stepNum, total=stepCount)
def runningPreTagHandlers(self):
"""
@see: callbacks.UpdateCallback.runningPreTagHandlers
"""
self._applyUpdate("Running tag prescripts")
def runningPostTagHandlers(self):
"""
@see: callbacks.UpdateCallback.runningPostTagHandlers
"""
self._applyUpdate("Running tag post-scripts")
def committingTransaction(self):
"""
@see: callbacks.UpdateCallback.committingTransaction
"""
self._applyUpdate("Committing database transaction")
@locked
def setUpdateJob(self, jobs):
"""
@see: callbacks.UpdateCallback.setUpdateJob
"""
jobs_collection = []
self.formatter.prepareJobs(jobs)
for line in self.formatter.formatJobTups(jobs):
action, trove_spec = line.split(None, 1)
jobs_collection.append(dict(action=action, trove=trove_spec))
self._applyUpdate(
'Applying update job',
jobs=jobs_collection,
)
def capsuleSyncScan(self, capsuleType):
self._capsuleSync(
"Scanning for %s capsule changes" % capsuleType, step=1)
def capsuleSyncCreate(self, capsuleType, name, num, total):
self._capsuleSync(
"Collecting modifications to %s database" % capsuleType,
step=2, done=num, total=total)
@locked
def _downloading(self, msg, got, rate, need):
"""
Called by this callback object to handle different kinds of
download-related progress information. This method puts together
download rate information.
@param msg: status message
@type msg: string
@param got: number of bytes retrieved so far
@type got: integer
@param rate: bytes per second
@type rate: integer
@param need: number of bytes total to be retrieved
@type need: integer
@return: None
"""
# This function acquires a lock just because it looks at self.csHunk
# and self.updateText directly. Otherwise, self.csMsg will acquire the
# lock (which is now reentrant)
if got == need:
self.csMsg(None)
elif need != 0:
if self.csHunk[1] < 2 or not self.updateText:
self._applyUpdateCS(msg, done=got / 1024, total=need / 1024,
rate=rate / 1024)
else:
self._applyUpdateCS("%s %d of %d" % ((msg,) + self.csHunk),
done=got / 1024, total=need / 1024,
rate=rate / 1024)
else:
# no idea how much we need, just keep on counting...
self._applyUpdateCS(msg, done=got / 1024, rate=rate / 1024)
def downloadingFileContents(self, got, need):
"""
@see: callbacks.ChangesetCallback.downloadingFileContents
"""
self._applyUpdateCS('Downloading files for changeset', done=got,
rate=self.rate, total=need)
def downloadingChangeSet(self, got, need):
"""
@see: callbacks.ChangesetCallback.downloadingChangeSet
"""
self._applyUpdateCS('Downloading', done=got, rate=self.rate,
total=need)
def requestingFileContents(self):
"""
@see: callbacks.ChangesetCallback.requestingFileContents
"""
self._applyUpdateCS(
"Requesting file contents for changeset",
done=max(self.csHunk[0], 1),
total=max(self.csHunk[1], 1),
)
def requestingChangeSet(self):
"""
@see: callbacks.ChangesetCallback.requestingChangeSet
"""
self._applyUpdateCS(
"Requesting changeset",
done=max(self.csHunk[0], 1),
total=max(self.csHunk[1], 1),
)
@locked
def troveScriptOutput(self, typ, msg):
"""
@see: callbacks.UpdateCallback.troveScriptOutput
"""
self._applyUpdate("[%s] %s" % (typ, msg))
@locked
def troveScriptFailure(self, typ, errcode):
"""
@see: callbacks.UpdateCallback.troveScriptFailure
"""
self._applyUpdate("[%s] %s" % (typ, errcode))
def capsuleSyncApply(self, added, removed):
self._capsuleSync('Synchronizing database with capsule changes',
step=3)
def __init__(self, *args, **kwargs):
UpdateCallback.__init__(self, *args, **kwargs)
self.updateText = {}
self.csText = {}
def displayChangedJobs(addedJobs, removedJobs, cfg):
db = conaryclient.ConaryClient(cfg).db
formatter = display.JobTupFormatter(affinityDb=db)
formatter.dcfg.setTroveDisplay(fullVersions=cfg.fullVersions,
fullFlavors=cfg.fullFlavors,
showLabels=cfg.showLabels,
baseFlavors=cfg.flavor,
showComponents=cfg.showComponents)
formatter.dcfg.setJobDisplay(compressJobs=not cfg.showComponents)
formatter.prepareJobLists([removedJobs | addedJobs])
if removedJobs:
print 'No longer part of job:'
for line in formatter.formatJobTups(removedJobs, indent=' '):
print line
if addedJobs:
print 'Added to job:'
for line in formatter.formatJobTups(addedJobs, indent=' '):
print line
def displayUpdateInfo(updJob, cfg, noRestart=False):
jobLists = updJob.getJobs()
db = conaryclient.ConaryClient(cfg).db
formatter = display.JobTupFormatter(affinityDb=db)
formatter.dcfg.setTroveDisplay(fullVersions=cfg.fullVersions,
fullFlavors=cfg.fullFlavors,
showLabels=cfg.showLabels,
baseFlavors=cfg.flavor,
showComponents=cfg.showComponents)
formatter.dcfg.setJobDisplay(compressJobs=not cfg.showComponents)
formatter.prepareJobLists(jobLists)
totalJobs = len(jobLists)
for num, job in enumerate(jobLists):
if totalJobs > 1:
if num in updJob.getCriticalJobs():
print '** ',
print 'Job %d of %d:' % (num + 1, totalJobs)
for line in formatter.formatJobTups(job, indent=' '):
print line
if updJob.getCriticalJobs() and not noRestart:
criticalJobs = updJob.getCriticalJobs()
if len(criticalJobs) > 1:
jobPlural = 's'
else:
jobPlural = ''
jobList = ', '.join([str(x + 1) for x in criticalJobs])
print
print '** The update will restart itself after job%s %s and continue updating' % (jobPlural, jobList)
return
@api.developerApi
def doUpdate(cfg, changeSpecs, **kwargs):
callback = kwargs.get('callback', None)
if not callback:
callback = callbacks.UpdateCallback(trustThreshold=cfg.trustThreshold)
kwargs['callback'] = callback
else:
callback.setTrustThreshold(cfg.trustThreshold)
syncChildren = kwargs.get('syncChildren', False)
syncUpdate = kwargs.pop('syncUpdate', False)
restartInfo = kwargs.get('restartInfo', None)
if syncChildren or syncUpdate:
installMissing = True
else:
installMissing = False
kwargs['installMissing'] = installMissing
fromChangesets = []
for path in kwargs.pop('fromFiles', []):
cs = changeset.ChangeSetFromFile(path)
fromChangesets.append(cs)
kwargs['fromChangesets'] = fromChangesets
# Look for items which look like files in the applyList and convert
# them into fromChangesets w/ the primary sets
for item in changeSpecs[:]:
if os.access(item, os.R_OK):
try:
cs = changeset.ChangeSetFromFile(item)
except:
continue
fromChangesets.append(cs)
changeSpecs.remove(item)
for troveTuple in cs.getPrimaryTroveList():
changeSpecs.append(trovetup.TroveTuple(*troveTuple).asString())
if kwargs.get('restartInfo', None):
# We don't care about applyList, we will set it later
applyList = None
else:
keepExisting = kwargs.get('keepExisting')
updateByDefault = kwargs.get('updateByDefault', True)
applyList = cmdline.parseChangeList(changeSpecs, keepExisting,
updateByDefault,
allowChangeSets=True)
_updateTroves(cfg, applyList, **kwargs)
# Clean up after ourselves
if restartInfo:
util.rmtree(restartInfo, ignore_errors=True)
def doModelUpdate(cfg, sysmodel, modelFile, otherArgs, **kwargs):
kwargs['systemModel'] = sysmodel
kwargs['systemModelFile'] = modelFile
kwargs['loadTroveCache'] = True
kwargs.setdefault('updateByDefault', True) # erase is not default case
kwargs.setdefault('model', False)
kwargs.setdefault('keepExisting', True) # prefer "install" to "update"
restartInfo = kwargs.get('restartInfo', None)
patchArgs = kwargs.pop('patchSpec', None)
fromChangesets = []
applyList = []
callback = kwargs.get('callback', None)
if not callback:
callback = callbacks.UpdateCallback(trustThreshold=cfg.trustThreshold)
kwargs['callback'] = callback
else:
callback.setTrustThreshold(cfg.trustThreshold)
if restartInfo is None:
addArgs = [x[1:] for x in otherArgs if x.startswith('+')]
rmArgs = [x[1:] for x in otherArgs if x.startswith('-')]
defArgs = [x for x in otherArgs
if not (x.startswith('+') or x.startswith('-'))]
# find any default arguments that represent changesets to
# install/update
for defArg in list(defArgs):
if kwargs['updateByDefault'] and os.path.isfile(defArg):
try:
cs = changeset.ChangeSetFromFile(defArg)
fromChangesets.append((cs, defArg))
defArgs.remove(defArg)
except filecontainer.BadContainer:
# not a changeset, must be a trove name
pass
if kwargs['updateByDefault']:
addArgs += defArgs
else:
rmArgs += defArgs
if rmArgs:
sysmodel.appendOpByName('erase', text=rmArgs)
updateName = { False: 'update',
True: 'install' }[kwargs['keepExisting']]
branchArgs = {}
for index, spec in enumerate(addArgs):
try:
troveSpec = trovetup.TroveSpec(spec)
version = versions.Label(troveSpec.version)
branchArgs[troveSpec] = index
except:
# Any exception is a parse failure in one of the
# two steps, and so we do not convert that argument
pass
if branchArgs:
client = conaryclient.ConaryClient(cfg)
repos = client.getRepos()
foundTroves = repos.findTroves(cfg.installLabelPath,
branchArgs.keys(),
defaultFlavor = cfg.flavor)
for troveSpec in foundTroves:
index = branchArgs[troveSpec]
foundTrove = foundTroves[troveSpec][0]
addArgs[index] = addArgs[index].replace(
troveSpec.version,
'%s/%s' %(foundTrove[1].trailingLabel(),
foundTrove[1].trailingRevision()))
disallowedChangesets = []
for cs, argName in fromChangesets:
for troveTuple in cs.getPrimaryTroveList():
# group and redirect changesets will break the model the
# next time it is run, so prevent them from getting in
# the model in the first place
if troveTuple[1].isOnLocalHost():
if troveTuple[0].startswith('group-'):
disallowedChangesets.append((argName, 'group',
trovetup.TroveTuple(*troveTuple).asString()))
continue
trvCs = cs.getNewTroveVersion(*troveTuple)
if trvCs.getType() == trove.TROVE_TYPE_REDIRECT:
disallowedChangesets.append((argName, 'redirect',
trovetup.TroveTuple(*troveTuple).asString()))
continue
addArgs.append(
trovetup.TroveTuple(*troveTuple).asString())
if disallowedChangesets:
raise errors.ConaryError(
'group and redirect changesets on a local label'
' cannot be installed:\n ' + '\n '.join(
'%s contains local %s: %s' % x
for x in disallowedChangesets))
if addArgs:
sysmodel.appendOpByName(updateName, text=addArgs)
if patchArgs:
sysmodel.appendOpByName('patch', text=patchArgs)
kwargs['fromChangesets'] = [x[0] for x in fromChangesets]
if kwargs.pop('model'):
sysmodel.write(sys.stdout)
sys.stdout.flush()
return None
keepExisting = kwargs.get('keepExisting')
updateByDefault = kwargs.get('updateByDefault', True)
applyList = cmdline.parseChangeList([], keepExisting,
updateByDefault,
allowChangeSets=True)
else:
# In the restart case, applyList == [] which says "sync to model"
pass
_updateTroves(cfg, applyList, **kwargs)
# Clean up after ourselves
if restartInfo:
util.rmtree(restartInfo, ignore_errors=True)
def _updateTroves(cfg, applyList, **kwargs):
# Take out the apply-related keyword arguments
applyDefaults = dict(
replaceFiles = False,
replaceManagedFiles = False,
replaceUnmanagedFiles = False,
replaceModifiedFiles = False,
replaceModifiedConfigFiles = False,
tagScript = None,
justDatabase = False,
skipCapsuleOps = False,
info = False,
keepJournal = False,
noRestart = False,
noScripts = False,
)
applyKwargs = {}
for k in applyDefaults:
if k in kwargs:
applyKwargs[k] = kwargs.pop(k)
callback = kwargs.pop('callback')
loadTroveCache = kwargs.pop('loadTroveCache', False)
applyKwargs['test'] = kwargs.get('test', False)
applyKwargs['localRollbacks'] = cfg.localRollbacks
applyKwargs['autoPinList'] = cfg.pinTroves
model = kwargs.pop('systemModel', None)
modelFile = kwargs.pop('systemModelFile', None)
modelGraph = kwargs.pop('modelGraph', None)
modelTrace = kwargs.pop('modelTrace', None)
noRestart = applyKwargs.get('noRestart', False)
client = conaryclient.ConaryClient(cfg, modelFile=modelFile)
client.setUpdateCallback(callback)
if kwargs.pop('disconnected', False):
client.disconnectRepos()
migrate = kwargs.get('migrate', False)
# even though we no longer differentiate forceMigrate, we still
# remove it from kwargs to avoid confusing prepareUpdateJob
kwargs.pop('forceMigrate', False)
restartInfo = kwargs.get('restartInfo', None)
# Initialize the critical update set
applyCriticalOnly = kwargs.get('applyCriticalOnly', False)
if kwargs.get('criticalUpdateInfo') is not None:
kwargs['criticalUpdateInfo'].criticalOnly = applyCriticalOnly
else:
kwargs['criticalUpdateInfo'] = CriticalUpdateInfo(applyCriticalOnly)
info = applyKwargs.pop('info', False)
# Rename depCheck to resolveDeps
depCheck = kwargs.pop('depCheck', True)
kwargs['resolveDeps'] = depCheck
if not info:
client.checkWriteableRoot()
# Unfortunately there's no easy way to make 'test' or 'info' mode work
# with capsule sync, doubly so because it influences the decisions made
# later on about what troves to update. So this will always really
# apply, but the good news is that it never modifies the system outside
# of the Conary DB.
client.syncCapsuleDatabase(callback, makePins=True)
updJob = client.newUpdateJob()
try:
if model:
changeSetList = kwargs.get('fromChangesets', [])
criticalUpdates = kwargs.get('criticalUpdateInfo', None)
tc = modelupdate.CMLTroveCache(client.getDatabase(),
client.getRepos(),
callback = callback,
changeSetList =
changeSetList)
tcPath = cfg.root + cfg.dbPath + '/modelcache'
if loadTroveCache:
if os.path.exists(tcPath):
log.info("loading %s", tcPath)
callback.loadingModelCache()
tc.load(tcPath)
ts = client.cmlGraph(model, changeSetList = changeSetList)
if modelGraph is not None:
ts.g.generateDotFile(modelGraph)
suggMap = client._updateFromTroveSetGraph(updJob, ts, tc,
fromChangesets = changeSetList,
criticalUpdateInfo = criticalUpdates,
callback = callback)
if modelTrace is not None:
ts.g.trace([ parseTroveSpec(x) for x in modelTrace ] )
finalModel = copy.deepcopy(model)
if model.suggestSimplifications(tc, ts.g):
log.info("possible system model simplifications found")
ts2 = client.cmlGraph(model, changeSetList = changeSetList)
updJob2 = client.newUpdateJob()
try:
suggMap2 = client._updateFromTroveSetGraph(updJob2, ts2,
tc,
fromChangesets = changeSetList,
criticalUpdateInfo = criticalUpdates)
except errors.TroveNotFound:
log.info("bad model generated; bailing")
else:
if (suggMap == suggMap2 and
updJob.getJobs() == updJob2.getJobs()):
log.info("simplified model verfied; using it instead")
ts = ts2
finalModel = model
updJob = updJob2
suggMap = suggMap2
else:
log.info("simplified model changed result; ignoring")
model = finalModel
modelFile.model = finalModel
if tc.cacheModified():
log.info("saving %s", tcPath)
callback.savingModelCache()
tc.save(tcPath)
callback.done()
else:
suggMap = client.prepareUpdateJob(updJob, applyList, **kwargs)
except:
callback.done()
client.close()
raise
if info:
callback.done()
displayUpdateInfo(updJob, cfg, noRestart=noRestart)
if restartInfo and not model:
callback.done()
newJobs = set(itertools.chain(*updJob.getJobs()))
oldJobs = set(updJob.getItemList())
addedJobs = newJobs - oldJobs
removedJobs = oldJobs - newJobs
if addedJobs or removedJobs:
print
print 'NOTE: after critical updates were applied, the contents of the update were recalculated:'
print
displayChangedJobs(addedJobs, removedJobs, cfg)
updJob.close()
client.close()
return
if model:
missingLocalTroves = model.getMissingLocalTroves(tc, ts)
if missingLocalTroves:
print 'Update would leave references to missing local troves:'
for troveTup in missingLocalTroves:
if not isinstance(troveTup, trovetup.TroveTuple):
troveTup = trovetup.TroveTuple(troveTup)
print "\t" + str(troveTup)
client.close()
return
if suggMap:
callback.done()
dcfg = display.DisplayConfig()
dcfg.setTroveDisplay(fullFlavors = cfg.fullFlavors,
fullVersions = cfg.fullVersions,
showLabels = cfg.showLabels)
formatter = display.TroveTupFormatter(dcfg)
print "Including extra troves to resolve dependencies:"
print " ",
items = sorted(set(formatter.formatNVF(*x)
for x in itertools.chain(*suggMap.itervalues())))
print " ".join(items)
askInteractive = cfg.interactive
if restartInfo:
callback.done()
newJobs = set(itertools.chain(*updJob.getJobs()))
oldJobs = set(updJob.getItemList())
addedJobs = newJobs - oldJobs
removedJobs = oldJobs - newJobs
if not model and addedJobs or removedJobs:
print 'NOTE: after critical updates were applied, the contents of the update were recalculated:'
displayChangedJobs(addedJobs, removedJobs, cfg)
else:
askInteractive = False
if not updJob.jobs:
# Nothing to do
print 'Update would not modify system'
if model and not kwargs.get('test'):
# Make sure 'conary sync' clears model.next even if nothing needs
# to be done.
modelFile.closeSnapshot()
updJob.close()
client.close()
return
elif askInteractive:
print 'The following updates will be performed:'
displayUpdateInfo(updJob, cfg, noRestart=noRestart)
if migrate and cfg.interactive:
print ('Migrate erases all troves not referenced in the groups'
' specified.')
if askInteractive:
if migrate:
style = 'migrate'
else:
style = 'update'
okay = cmdline.askYn('continue with %s? [Y/n]' % style, default=True)
if not okay:
updJob.close()
client.close()
return
if not noRestart and updJob.getCriticalJobs():
print "Performing critical system updates, will then restart update."
try:
restartDir = client.applyUpdateJob(updJob, **applyKwargs)
finally:
updJob.close()
client.close()
if restartDir:
params = sys.argv
# Write command line to disk
import xmlrpclib
cmdlinefile = open(os.path.join(restartDir, 'cmdline'), "w")
cmdlinefile.write(xmlrpclib.dumps((params, ), methodresponse = True))
cmdlinefile.close()
# CNY-980: we should have the whole script of changes to perform in
# the restart directory (in the job list); if in migrate mode, re-exec
# as regular update
if migrate and 'migrate' in params:
params[params.index('migrate')] = 'update'
params.extend(['--restart-info=%s' % restartDir])
client.close()
raise errors.ReexecRequired(
'Critical update completed, rerunning command...', params,
restartDir)
else:
if (not kwargs.get('test', False)) and model:
modelFile.closeSnapshot()
class UpdateAllFormatter(object):
def formatNVF(self, name, version, flavor):
if version and (flavor is not None) and not flavor.isEmpty():
return "'%s=%s[%s]'" % (name, version.asString(), deps.formatFlavor(flavor))
if (flavor is not None) and not flavor.isEmpty():
return "'%s[%s]'" % (name, deps.formatFlavor(flavor))
if version:
return "%s=%s" % (name, version.asString())
return name
def updateAll(cfg, **kwargs):
showItems = kwargs.pop('showItems', False)
restartInfo = kwargs.get('restartInfo', None)
migrate = kwargs.pop('migrate', False)
modelArg = kwargs.pop('model', False)
modelFile = kwargs.get('systemModelFile', None)
model = kwargs.get('systemModel', None)
infoArg = kwargs.get('info', False)
if model and modelFile and modelFile.exists() and restartInfo is None:
model.refreshVersionSnapshots()
if modelArg:
model.write(sys.stdout)
sys.stdout.flush()
return None
kwargs['installMissing'] = kwargs['removeNotByDefault'] = migrate
if 'callback' not in kwargs or not kwargs.get('callback'):
kwargs['callback'] = UpdateCallback(cfg)
# load trove cache only if --info provided
kwargs['loadTroveCache'] = infoArg
client = conaryclient.ConaryClient(cfg)
# We want to be careful not to break the old style display, for whoever
# might have a parser for that output.
withLongDisplay = (cfg.fullFlavors or cfg.fullVersions or cfg.showLabels)
formatter = UpdateAllFormatter()
if restartInfo or (model and modelFile and modelFile.exists()):
updateItems = []
applyList = None
else:
if showItems and withLongDisplay:
updateItems = client.getUpdateItemList()
dcfg = display.DisplayConfig()
dcfg.setTroveDisplay(fullFlavors = cfg.fullFlavors,
fullVersions = cfg.fullVersions,
showLabels = cfg.showLabels)
formatter = display.TroveTupFormatter(dcfg)
else:
updateItems = client.fullUpdateItemList()
applyList = [ (x[0], (None, None), x[1:], True) for x in updateItems ]
if showItems:
for (name, version, flavor) in sorted(updateItems, key=lambda x:x[0]):
print formatter.formatNVF(name, version, flavor)
return
_updateTroves(cfg, applyList, **kwargs)
# Clean up after ourselves
if restartInfo:
util.rmtree(restartInfo, ignore_errors=True)
def changePins(cfg, troveStrList, pin = True,
systemModel = None, systemModelFile = None,
callback = None):
client = conaryclient.ConaryClient(cfg)
client.checkWriteableRoot()
troveList = []
for item in troveStrList:
name, ver, flv = parseTroveSpec(item)
troves = client.db.findTrove(None, (name, ver, flv))
troveList += troves
client.pinTroves(troveList, pin = pin)
if systemModel and systemModelFile and not pin:
doModelUpdate(cfg, systemModel, systemModelFile, [], callback=callback)
def revert(cfg):
conaryclient.ConaryClient.revertJournal(cfg)
|
HousekeepLtd/django | refs/heads/master | django/conf/locale/de_CH/formats.py | 504 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
from __future__ import unicode_literals
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
]
# these are the separators for non-monetary numbers. For monetary numbers,
# the DECIMAL_SEPARATOR is a . (decimal point) and the THOUSAND_SEPARATOR is a
# ' (single quote).
# For details, please refer to http://www.bk.admin.ch/dokumentation/sprachen/04915/05016/index.html?lang=de
# (in German) and the documentation
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
|
botswana-harvard/edc-map | refs/heads/develop | edc_map/admin_site.py | 1 | from django.contrib.admin import AdminSite
class EdcMapAdminSite(AdminSite):
site_title = 'Edc Map'
site_header = 'Edc Map'
index_title = 'Edc Map'
site_url = '/'
edc_map_admin = EdcMapAdminSite(name='edc_map_admin')
|
qilicun/python | refs/heads/master | python2/PyMOTW-1.132/PyMOTW/optparse/optparse_version.py | 1 | #!/usr/bin/env python
#
# Copyright 2007 Doug Hellmann.
"""Explicit usage message
"""
#end_pymotw_header
import optparse
parser = optparse.OptionParser(usage='%prog [options] <arg1> <arg2> [<arg3>...]',
version='1.0',
)
parser.parse_args()
|
sgerhart/ansible | refs/heads/maintenance_policy_module | contrib/inventory/openshift.py | 42 | #!/usr/bin/env python
# (c) 2013, Michael Scherer <misc@zarb.org>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
inventory: openshift
short_description: Openshift gears external inventory script
description:
- Generates inventory of Openshift gears using the REST interface
- this permit to reuse playbook to setup an Openshift gear
version_added: None
author: Michael Scherer
'''
import json
import os
import os.path
import sys
import ConfigParser
import StringIO
from ansible.module_utils.urls import open_url
configparser = None
def get_from_rhc_config(variable):
global configparser
CONF_FILE = os.path.expanduser('~/.openshift/express.conf')
if os.path.exists(CONF_FILE):
if not configparser:
ini_str = '[root]\n' + open(CONF_FILE, 'r').read()
configparser = ConfigParser.SafeConfigParser()
configparser.readfp(StringIO.StringIO(ini_str))
try:
return configparser.get('root', variable)
except ConfigParser.NoOptionError:
return None
def get_config(env_var, config_var):
result = os.getenv(env_var)
if not result:
result = get_from_rhc_config(config_var)
if not result:
sys.exit("failed=True msg='missing %s'" % env_var)
return result
def get_json_from_api(url, username, password):
headers = {'Accept': 'application/json; version=1.5'}
response = open_url(url, headers=headers, url_username=username, url_password=password)
return json.loads(response.read())['data']
username = get_config('ANSIBLE_OPENSHIFT_USERNAME', 'default_rhlogin')
password = get_config('ANSIBLE_OPENSHIFT_PASSWORD', 'password')
broker_url = 'https://%s/broker/rest/' % get_config('ANSIBLE_OPENSHIFT_BROKER', 'libra_server')
response = get_json_from_api(broker_url + '/domains', username, password)
response = get_json_from_api("%s/domains/%s/applications" %
(broker_url, response[0]['id']), username, password)
result = {}
for app in response:
# ssh://520311404832ce3e570000ff@blog-johndoe.example.org
(user, host) = app['ssh_url'][6:].split('@')
app_name = host.split('-')[0]
result[app_name] = {}
result[app_name]['hosts'] = []
result[app_name]['hosts'].append(host)
result[app_name]['vars'] = {}
result[app_name]['vars']['ansible_ssh_user'] = user
if len(sys.argv) == 2 and sys.argv[1] == '--list':
print(json.dumps(result))
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print(json.dumps({}))
else:
print("Need an argument, either --list or --host <host>")
|
rayantony/three.js | refs/heads/master | utils/converters/msgpack/msgpack/_version.py | 648 | version = (0, 4, 2)
|
cccfran/sympy | refs/heads/master | sympy/physics/optics/tests/test_utils.py | 15 | from __future__ import division
from sympy.physics.optics.utils import (refraction_angle, deviation,
lens_makers_formula, mirror_formula, lens_formula)
from sympy.physics.optics.medium import Medium
from sympy.physics.units import e0
from sympy import symbols, sqrt, Matrix, oo
from sympy.geometry.point3d import Point3D
from sympy.geometry.line3d import Ray3D
from sympy.geometry.plane import Plane
from sympy.utilities.pytest import XFAIL
def test_refraction_angle():
n1, n2 = symbols('n1, n2')
m1 = Medium('m1')
m2 = Medium('m2')
r1 = Ray3D(Point3D(-1, -1, 1), Point3D(0, 0, 0))
i = Matrix([1, 1, 1])
n = Matrix([0, 0, 1])
normal_ray = Ray3D(Point3D(0, 0, 0), Point3D(0, 0, 1))
P = Plane(Point3D(0, 0, 0), normal_vector=[0, 0, 1])
assert refraction_angle(r1, 1, 1, n) == Matrix([
[ 1],
[ 1],
[-1]])
assert refraction_angle([1, 1, 1], 1, 1, n) == Matrix([
[ 1],
[ 1],
[-1]])
assert refraction_angle((1, 1, 1), 1, 1, n) == Matrix([
[ 1],
[ 1],
[-1]])
assert refraction_angle(i, 1, 1, [0, 0, 1]) == Matrix([
[ 1],
[ 1],
[-1]])
assert refraction_angle(i, 1, 1, (0, 0, 1)) == Matrix([
[ 1],
[ 1],
[-1]])
assert refraction_angle(i, 1, 1, normal_ray) == Matrix([
[ 1],
[ 1],
[-1]])
assert refraction_angle(i, 1, 1, plane=P) == Matrix([
[ 1],
[ 1],
[-1]])
assert refraction_angle(r1, 1, 1, plane=P) == \
Ray3D(Point3D(0, 0, 0), Point3D(1, 1, -1))
assert refraction_angle(r1, m1, 1.33, plane=P) == \
Ray3D(Point3D(0, 0, 0), Point3D(100/133, 100/133, -789378201649271*sqrt(3)/1000000000000000))
assert refraction_angle(r1, 1, m2, plane=P) == \
Ray3D(Point3D(0, 0, 0), Point3D(1, 1, -1))
assert refraction_angle(r1, n1, n2, plane=P) == \
Ray3D(Point3D(0, 0, 0), Point3D(n1/n2, n1/n2, -sqrt(3)*sqrt(-2*n1**2/(3*n2**2) + 1)))
assert refraction_angle(r1, 1.33, 1, plane=P) == 0 # TIR
assert refraction_angle(r1, 1, 1, normal_ray) == \
Ray3D(Point3D(0, 0, 0), direction_ratio=[1, 1, -1])
def test_deviation():
n1, n2 = symbols('n1, n2')
m1 = Medium('m1')
m2 = Medium('m2')
r1 = Ray3D(Point3D(-1, -1, 1), Point3D(0, 0, 0))
n = Matrix([0, 0, 1])
i = Matrix([-1, -1, -1])
normal_ray = Ray3D(Point3D(0, 0, 0), Point3D(0, 0, 1))
P = Plane(Point3D(0, 0, 0), normal_vector=[0, 0, 1])
assert deviation(r1, 1, 1, normal=n) == 0
assert deviation(r1, 1, 1, plane=P) == 0
assert deviation(r1, 1, 1.1, plane=P).evalf(3) + 0.119 < 1e-3
assert deviation(i, 1, 1.1, normal=normal_ray).evalf(3) + 0.119 < 1e-3
assert deviation(r1, 1.33, 1, plane=P) is None # TIR
assert deviation(r1, 1, 1, normal=[0, 0, 1]) == 0
assert deviation([-1, -1, -1], 1, 1, normal=[0, 0, 1]) == 0
def test_lens_makers_formula():
n1, n2 = symbols('n1, n2')
m1 = Medium('m1', permittivity=e0, n=1)
m2 = Medium('m2', permittivity=e0, n=1.33)
assert lens_makers_formula(n1, n2, 10, -10) == 5*n2/(n1 - n2)
assert round(lens_makers_formula(m1, m2, 10, -10), 2) == -20.15
assert round(lens_makers_formula(1.33, 1, 10, -10), 2) == 15.15
def test_mirror_formula():
u, v, f = symbols('u, v, f')
assert mirror_formula(focal_length=f, u=u) == f*u/(-f + u)
assert mirror_formula(focal_length=f, v=v) == f*v/(-f + v)
assert mirror_formula(u=u, v=v) == u*v/(u + v)
assert mirror_formula(u=oo, v=v) == v
assert mirror_formula(u=oo, v=oo) == oo
def test_lens_formula():
u, v, f = symbols('u, v, f')
assert lens_formula(focal_length=f, u=u) == f*u/(f + u)
assert lens_formula(focal_length=f, v=v) == f*v/(f - v)
assert lens_formula(u=u, v=v) == u*v/(u - v)
assert lens_formula(u=oo, v=v) == v
assert lens_formula(u=oo, v=oo) == oo
|
wzb56/Sparkngin | refs/heads/master | adapters/zmq2kafka-streamer/test/zmq-push.py | 2 | #!/usr/bin/python
import zmq
import time
if __name__ == "__main__":
context = zmq.Context()
push = context.socket(zmq.PUSH)
push.bind("tcp://0:5555")
for n in xrange(100):
msg = "Pushmsg %d" % n
push.send(msg)
#time.sleep(0.5)
time.sleep(1) # Time to flush
|
filias/django | refs/heads/master | tests/admin_changelist/urls.py | 810 | from django.conf.urls import url
from . import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
|
rickmendes/ansible-modules-extras | refs/heads/devel | cloud/openstack/os_port_facts.py | 31 | #!/usr/bin/python
# Copyright (c) 2016 IBM
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
DOCUMENTATION = '''
module: os_port_facts
short_description: Retrieve facts about ports within OpenStack.
version_added: "2.1"
author: "David Shrewsbury (@Shrews)"
description:
- Retrieve facts about ports from OpenStack.
notes:
- Facts are placed in the C(openstack_ports) variable.
requirements:
- "python >= 2.6"
- "shade"
options:
port:
description:
- Unique name or ID of a port.
required: false
default: null
filters:
description:
- A dictionary of meta data to use for further filtering. Elements
of this dictionary will be matched against the returned port
dictionaries. Matching is currently limited to strings within
the port dictionary, or strings within nested dictionaries.
required: false
default: null
extends_documentation_fragment: openstack
'''
EXAMPLES = '''
# Gather facts about all ports
- os_port_facts:
cloud: mycloud
# Gather facts about a single port
- os_port_facts:
cloud: mycloud
port: 6140317d-e676-31e1-8a4a-b1913814a471
# Gather facts about all ports that have device_id set to a specific value
# and with a status of ACTIVE.
- os_port_facts:
cloud: mycloud
filters:
device_id: 1038a010-3a37-4a9d-82ea-652f1da36597
status: ACTIVE
'''
RETURN = '''
openstack_ports:
description: List of port dictionaries. A subset of the dictionary keys
listed below may be returned, depending on your cloud provider.
returned: always, but can be null
type: complex
contains:
admin_state_up:
description: The administrative state of the router, which is
up (true) or down (false).
returned: success
type: boolean
sample: true
allowed_address_pairs:
description: A set of zero or more allowed address pairs. An
address pair consists of an IP address and MAC address.
returned: success
type: list
sample: []
"binding:host_id":
description: The UUID of the host where the port is allocated.
returned: success
type: string
sample: "b4bd682d-234a-4091-aa5b-4b025a6a7759"
"binding:profile":
description: A dictionary the enables the application running on
the host to pass and receive VIF port-specific
information to the plug-in.
returned: success
type: dict
sample: {}
"binding:vif_details":
description: A dictionary that enables the application to pass
information about functions that the Networking API
provides.
returned: success
type: dict
sample: {"port_filter": true}
"binding:vif_type":
description: The VIF type for the port.
returned: success
type: dict
sample: "ovs"
"binding:vnic_type":
description: The virtual network interface card (vNIC) type that is
bound to the neutron port.
returned: success
type: string
sample: "normal"
device_id:
description: The UUID of the device that uses this port.
returned: success
type: string
sample: "b4bd682d-234a-4091-aa5b-4b025a6a7759"
device_owner:
description: The UUID of the entity that uses this port.
returned: success
type: string
sample: "network:router_interface"
dns_assignment:
description: DNS assignment information.
returned: success
type: list
dns_name:
description: DNS name
returned: success
type: string
sample: ""
extra_dhcp_opts:
description: A set of zero or more extra DHCP option pairs.
An option pair consists of an option value and name.
returned: success
type: list
sample: []
fixed_ips:
description: The IP addresses for the port. Includes the IP address
and UUID of the subnet.
returned: success
type: list
id:
description: The UUID of the port.
returned: success
type: string
sample: "3ec25c97-7052-4ab8-a8ba-92faf84148de"
ip_address:
description: The IP address.
returned: success
type: string
sample: "127.0.0.1"
mac_address:
description: The MAC address.
returned: success
type: string
sample: "fa:16:30:5f:10:f1"
name:
description: The port name.
returned: success
type: string
sample: "port_name"
network_id:
description: The UUID of the attached network.
returned: success
type: string
sample: "dd1ede4f-3952-4131-aab6-3b8902268c7d"
port_security_enabled:
description: The port security status. The status is enabled (true) or disabled (false).
returned: success
type: boolean
sample: false
security_groups:
description: The UUIDs of any attached security groups.
returned: success
type: list
status:
description: The port status.
returned: success
type: string
sample: "ACTIVE"
tenant_id:
description: The UUID of the tenant who owns the network.
returned: success
type: string
sample: "51fce036d7984ba6af4f6c849f65ef00"
'''
def main():
argument_spec = openstack_full_argument_spec(
port=dict(required=False),
filters=dict(type='dict', required=False),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(argument_spec, **module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
port = module.params.pop('port')
filters = module.params.pop('filters')
try:
cloud = shade.openstack_cloud(**module.params)
ports = cloud.search_ports(port, filters)
module.exit_json(changed=False, ansible_facts=dict(
openstack_ports=ports))
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e))
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
|
e1ven/Waymoot | refs/heads/master | libs/tornado-2.2/build/lib/tornado/platform/posix.py | 11 | #!/usr/bin/env python
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Posix implementations of platform-specific functionality."""
import fcntl
import os
from tornado.platform import interface
from tornado.util import b
def set_close_exec(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC)
def _set_nonblocking(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
class Waker(interface.Waker):
def __init__(self):
r, w = os.pipe()
_set_nonblocking(r)
_set_nonblocking(w)
set_close_exec(r)
set_close_exec(w)
self.reader = os.fdopen(r, "rb", 0)
self.writer = os.fdopen(w, "wb", 0)
def fileno(self):
return self.reader.fileno()
def wake(self):
try:
self.writer.write(b("x"))
except IOError:
pass
def consume(self):
try:
while True:
result = self.reader.read()
if not result: break;
except IOError:
pass
def close(self):
self.reader.close()
self.writer.close()
|
Chilledheart/chromium | refs/heads/master | tools/telemetry/third_party/gsutilz/third_party/oauth2client/oauth2client/old_run.py | 52 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module holds the old run() function which is deprecated, the
tools.run_flow() function should be used in its place."""
from __future__ import print_function
import logging
import socket
import sys
import webbrowser
import gflags
from six.moves import input
from oauth2client import client
from oauth2client import util
from oauth2client.tools import ClientRedirectHandler
from oauth2client.tools import ClientRedirectServer
FLAGS = gflags.FLAGS
gflags.DEFINE_boolean('auth_local_webserver', True,
('Run a local web server to handle redirects during '
'OAuth authorization.'))
gflags.DEFINE_string('auth_host_name', 'localhost',
('Host name to use when running a local web server to '
'handle redirects during OAuth authorization.'))
gflags.DEFINE_multi_int('auth_host_port', [8080, 8090],
('Port to use when running a local web server to '
'handle redirects during OAuth authorization.'))
@util.positional(2)
def run(flow, storage, http=None):
"""Core code for a command-line application.
The ``run()`` function is called from your application and runs
through all the steps to obtain credentials. It takes a ``Flow``
argument and attempts to open an authorization server page in the
user's default web browser. The server asks the user to grant your
application access to the user's data. If the user grants access,
the ``run()`` function returns new credentials. The new credentials
are also stored in the ``storage`` argument, which updates the file
associated with the ``Storage`` object.
It presumes it is run from a command-line application and supports the
following flags:
``--auth_host_name`` (string, default: ``localhost``)
Host name to use when running a local web server to handle
redirects during OAuth authorization.
``--auth_host_port`` (integer, default: ``[8080, 8090]``)
Port to use when running a local web server to handle redirects
during OAuth authorization. Repeat this option to specify a list
of values.
``--[no]auth_local_webserver`` (boolean, default: ``True``)
Run a local web server to handle redirects during OAuth authorization.
Since it uses flags make sure to initialize the ``gflags`` module before
calling ``run()``.
Args:
flow: Flow, an OAuth 2.0 Flow to step through.
storage: Storage, a ``Storage`` to store the credential in.
http: An instance of ``httplib2.Http.request`` or something that acts
like it.
Returns:
Credentials, the obtained credential.
"""
logging.warning('This function, oauth2client.tools.run(), and the use of '
'the gflags library are deprecated and will be removed in a future '
'version of the library.')
if FLAGS.auth_local_webserver:
success = False
port_number = 0
for port in FLAGS.auth_host_port:
port_number = port
try:
httpd = ClientRedirectServer((FLAGS.auth_host_name, port),
ClientRedirectHandler)
except socket.error as e:
pass
else:
success = True
break
FLAGS.auth_local_webserver = success
if not success:
print('Failed to start a local webserver listening on either port 8080')
print('or port 9090. Please check your firewall settings and locally')
print('running programs that may be blocking or using those ports.')
print()
print('Falling back to --noauth_local_webserver and continuing with')
print('authorization.')
print()
if FLAGS.auth_local_webserver:
oauth_callback = 'http://%s:%s/' % (FLAGS.auth_host_name, port_number)
else:
oauth_callback = client.OOB_CALLBACK_URN
flow.redirect_uri = oauth_callback
authorize_url = flow.step1_get_authorize_url()
if FLAGS.auth_local_webserver:
webbrowser.open(authorize_url, new=1, autoraise=True)
print('Your browser has been opened to visit:')
print()
print(' ' + authorize_url)
print()
print('If your browser is on a different machine then exit and re-run')
print('this application with the command-line parameter ')
print()
print(' --noauth_local_webserver')
print()
else:
print('Go to the following link in your browser:')
print()
print(' ' + authorize_url)
print()
code = None
if FLAGS.auth_local_webserver:
httpd.handle_request()
if 'error' in httpd.query_params:
sys.exit('Authentication request was rejected.')
if 'code' in httpd.query_params:
code = httpd.query_params['code']
else:
print('Failed to find "code" in the query parameters of the redirect.')
sys.exit('Try running with --noauth_local_webserver.')
else:
code = input('Enter verification code: ').strip()
try:
credential = flow.step2_exchange(code, http=http)
except client.FlowExchangeError as e:
sys.exit('Authentication has failed: %s' % e)
storage.put(credential)
credential.set_store(storage)
print('Authentication successful.')
return credential
|
tonningp/spritewalker | refs/heads/master | assets/space/ch09/carVec.py | 1 | """ carVec.py
Use vector projection to create a smoothly-turning
car sprite.
"""
import pygame, math
pygame.init()
class Car(pygame.sprite.Sprite):
def __init__(self, screen):
pygame.sprite.Sprite.__init__(self)
self.screen = screen
self.imageMaster = pygame.image.load("car.gif")
self.imageMaster = self.imageMaster.convert()
self.imageMaster = pygame.transform.scale(self.imageMaster, (50, 35))
self.rect = self.imageMaster.get_rect()
self.dir = 0
self.turnRate = 3
self.accel = .1
self.x = 320.0
self.y = 240.0
self.speed = 0
self.rect.center = (self.x, self.y)
def update(self):
self.checkKeys()
self.rotate()
self.calcVector()
self.checkBounds()
self.rect.center = (self.x, self.y)
def checkKeys(self):
keys = pygame.key.get_pressed()
if keys[pygame.K_RIGHT]:
self.dir -= self.turnRate
if self.dir < 0:
self.dir = 360 - self.turnRate
if keys[pygame.K_LEFT]:
self.dir += self.turnRate
if self.dir > 360:
self.dir = self.turnRate
if keys[pygame.K_UP]:
self.speed += self.accel
if self.speed > 10:
self.speed = 10
if keys[pygame.K_DOWN]:
self.speed -= self.accel
if self.speed < -3:
self.speed = -3
def rotate(self):
oldCenter = self.rect.center
self.image = pygame.transform.rotate(self.imageMaster, self.dir)
self.rect = self.image.get_rect()
self.rect.center = oldCenter
def calcVector(self):
radians = self.dir * math.pi / 180
self.dx = math.cos(radians)
self.dy = math.sin(radians)
self.dx *= self.speed
self.dy *= self.speed
self.dy *= -1
self.x += self.dx
self.y += self.dy
def checkBounds(self):
if self.x > self.screen.get_width():
self.x = 0
if self.x < 0:
self.x = self.screen.get_width()
if self.y > self.screen.get_height():
self.y = 0
if self.y < 0:
self.y = self.screen.get_height()
def main():
screen = pygame.display.set_mode((640, 480))
pygame.display.set_caption("vector projection")
background = pygame.Surface(screen.get_size())
background.fill((0xcc, 0xcc, 0xcc))
screen.blit(background, (0, 0))
car = Car(screen)
allSprites = pygame.sprite.Group(car)
keepGoing = True
clock = pygame.time.Clock()
while keepGoing:
clock.tick(30)
for event in pygame.event.get():
if event.type == pygame.QUIT:
keepGoing = False
allSprites.clear(screen, background)
allSprites.update()
allSprites.draw(screen)
pygame.display.flip()
if __name__ == "__main__":
main()
|
Frostman/eho-horizon | refs/heads/master | openstack_dashboard/dashboards/admin/instances/panel.py | 19 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
class Instances(horizon.Panel):
name = _("Instances")
slug = 'instances'
permissions = ('openstack.roles.admin',)
dashboard.Admin.register(Instances)
|
elkingtonmcb/django | refs/heads/master | tests/user_commands/management/commands/dance.py | 314 | from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = "Dance around like a madman."
args = ''
requires_system_checks = True
def add_arguments(self, parser):
parser.add_argument("-s", "--style", default="Rock'n'Roll")
parser.add_argument("-x", "--example")
parser.add_argument("--opt-3", action='store_true', dest='option3')
def handle(self, *args, **options):
example = options["example"]
if example == "raise":
raise CommandError()
if options['verbosity'] > 0:
self.stdout.write("I don't feel like dancing %s." % options["style"])
self.stdout.write(','.join(options.keys()))
|
kuiwei/kuiwei | refs/heads/master | lms/djangoapps/instructor_analytics/tests/test_basic.py | 12 | """
Tests for instructor.basic
"""
from django.test import TestCase
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from shoppingcart.models import CourseRegistrationCode, RegistrationCodeRedemption, Order
from instructor_analytics.basic import enrolled_students_features, course_registration_features, AVAILABLE_FEATURES, STUDENT_FEATURES, PROFILE_FEATURES
class TestAnalyticsBasic(TestCase):
""" Test basic analytics functions. """
def setUp(self):
self.course_key = SlashSeparatedCourseKey('robot', 'course', 'id')
self.users = tuple(UserFactory() for _ in xrange(30))
self.ces = tuple(CourseEnrollment.enroll(user, self.course_key)
for user in self.users)
def test_enrolled_students_features_username(self):
self.assertIn('username', AVAILABLE_FEATURES)
userreports = enrolled_students_features(self.course_key, ['username'])
self.assertEqual(len(userreports), len(self.users))
for userreport in userreports:
self.assertEqual(userreport.keys(), ['username'])
self.assertIn(userreport['username'], [user.username for user in self.users])
def test_enrolled_students_features_keys(self):
query_features = ('username', 'name', 'email')
for feature in query_features:
self.assertIn(feature, AVAILABLE_FEATURES)
userreports = enrolled_students_features(self.course_key, query_features)
self.assertEqual(len(userreports), len(self.users))
for userreport in userreports:
self.assertEqual(set(userreport.keys()), set(query_features))
self.assertIn(userreport['username'], [user.username for user in self.users])
self.assertIn(userreport['email'], [user.email for user in self.users])
self.assertIn(userreport['name'], [user.profile.name for user in self.users])
def test_available_features(self):
self.assertEqual(len(AVAILABLE_FEATURES), len(STUDENT_FEATURES + PROFILE_FEATURES))
self.assertEqual(set(AVAILABLE_FEATURES), set(STUDENT_FEATURES + PROFILE_FEATURES))
def test_course_registration_features(self):
query_features = ['code', 'course_id', 'transaction_group_name', 'created_by', 'redeemed_by']
for i in range(5):
course_code = CourseRegistrationCode(
code="test_code{}".format(i), course_id=self.course_key.to_deprecated_string(),
transaction_group_name='TestName', created_by=self.users[0]
)
course_code.save()
order = Order(user=self.users[0], status='purchased')
order.save()
registration_code_redemption = RegistrationCodeRedemption(
order=order, registration_code_id=1, redeemed_by=self.users[0]
)
registration_code_redemption.save()
registration_codes = CourseRegistrationCode.objects.all()
course_registration_list = course_registration_features(query_features, registration_codes, csv_type='download')
self.assertEqual(len(course_registration_list), len(registration_codes))
for course_registration in course_registration_list:
self.assertEqual(set(course_registration.keys()), set(query_features))
self.assertIn(course_registration['code'], [registration_code.code for registration_code in registration_codes])
self.assertIn(
course_registration['course_id'],
[registration_code.course_id.to_deprecated_string() for registration_code in registration_codes]
)
self.assertIn(
course_registration['transaction_group_name'],
[registration_code.transaction_group_name for registration_code in registration_codes]
)
|
Lekanich/intellij-community | refs/heads/master | python/testData/mover/commentOut_afterDown.py | 83 | if True:
a = 1
else:
a = 2
#comment <caret>
|
kchodorow/bazel-1 | refs/heads/master | third_party/py/mock/tests/testsentinel.py | 111 | # Copyright (C) 2007-2012 Michael Foord & the mock team
# E-mail: fuzzyman AT voidspace DOT org DOT uk
# http://www.voidspace.org.uk/python/mock/
from tests.support import unittest2
from mock import sentinel, DEFAULT
class SentinelTest(unittest2.TestCase):
def testSentinels(self):
self.assertEqual(sentinel.whatever, sentinel.whatever,
'sentinel not stored')
self.assertNotEqual(sentinel.whatever, sentinel.whateverelse,
'sentinel should be unique')
def testSentinelName(self):
self.assertEqual(str(sentinel.whatever), 'sentinel.whatever',
'sentinel name incorrect')
def testDEFAULT(self):
self.assertTrue(DEFAULT is sentinel.DEFAULT)
def testBases(self):
# If this doesn't raise an AttributeError then help(mock) is broken
self.assertRaises(AttributeError, lambda: sentinel.__bases__)
if __name__ == '__main__':
unittest2.main()
|
iamhuy/rumour-veracity-verification | refs/heads/master | settings.py | 1 | import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
DATA_RAW_ROOT = os.path.join(PROJECT_ROOT, 'data', 'raw')
DATA_INTERIM_ROOT = os.path.join(PROJECT_ROOT, 'data', 'interim')
DATA_PROCESSED_ROOT = os.path.join(PROJECT_ROOT, 'data', 'processed')
DATA_EXTERNAL_ROOT = os.path.join(PROJECT_ROOT, 'data', 'external')
MODELS_ROOT = os.path.join(PROJECT_ROOT, 'models')
TRAINING_OPTIONS = ['instance-based', 'svm', 'j48', 'bayes', 'random-forest'] |
danny1173/TriCoin | refs/heads/master | share/qt/make_spinner.py | 4415 | #!/usr/bin/env python
# W.J. van der Laan, 2011
# Make spinning .mng animation from a .png
# Requires imagemagick 6.7+
from __future__ import division
from os import path
from PIL import Image
from subprocess import Popen
SRC='img/reload_scaled.png'
DST='../../src/qt/res/movies/update_spinner.mng'
TMPDIR='/tmp'
TMPNAME='tmp-%03i.png'
NUMFRAMES=35
FRAMERATE=10.0
CONVERT='convert'
CLOCKWISE=True
DSIZE=(16,16)
im_src = Image.open(SRC)
if CLOCKWISE:
im_src = im_src.transpose(Image.FLIP_LEFT_RIGHT)
def frame_to_filename(frame):
return path.join(TMPDIR, TMPNAME % frame)
frame_files = []
for frame in xrange(NUMFRAMES):
rotation = (frame + 0.5) / NUMFRAMES * 360.0
if CLOCKWISE:
rotation = -rotation
im_new = im_src.rotate(rotation, Image.BICUBIC)
im_new.thumbnail(DSIZE, Image.ANTIALIAS)
outfile = frame_to_filename(frame)
im_new.save(outfile, 'png')
frame_files.append(outfile)
p = Popen([CONVERT, "-delay", str(FRAMERATE), "-dispose", "2"] + frame_files + [DST])
p.communicate()
|
diegoguimaraes/django | refs/heads/master | tests/admin_scripts/app_raising_warning/models.py | 391 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core import checks
from django.db import models
class ModelRaisingMessages(models.Model):
@classmethod
def check(self, **kwargs):
return [
checks.Warning(
'A warning',
hint=None,
),
]
|
mmbtba/odoo | refs/heads/8.0 | addons/portal_project/__init__.py | 438 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import project
|
lino-framework/xl | refs/heads/master | lino_xl/lib/excerpts/__init__.py | 1 | # Copyright 2013-2019 Rumma & Ko Ltd
# License: GNU Affero General Public License v3 (see file COPYING for details)
"""
Provides a framework for configuring and generating printable
documents called "database excerpts".
See also :doc:`/specs/excerpts`.
.. autosummary::
:toctree:
roles
doctools
fixtures.std
fixtures.demo2
"""
from lino import ad, _
class Plugin(ad.Plugin):
"See :class:`lino.core.Plugin`."
verbose_name = _("Excerpts")
needs_plugins = [
'lino.modlib.gfks', 'lino.modlib.printing',
'lino.modlib.office', 'lino_xl.lib.xl']
# _default_template_handlers = {}
responsible_user = None
"""
The username of the user responsible for monitoring the excerpts
system. This is currently used only by
:mod:`lino_xl.lib.excerpts.fixtures.demo2`.
"""
def setup_main_menu(self, site, user_type, m):
mg = site.plugins.office
m = m.add_menu(mg.app_label, mg.verbose_name)
m.add_action('excerpts.MyExcerpts')
def setup_config_menu(self, site, user_type, m):
mg = site.plugins.office
m = m.add_menu(mg.app_label, mg.verbose_name)
m.add_action('excerpts.ExcerptTypes')
def setup_explorer_menu(self, site, user_type, m):
mg = site.plugins.office
m = m.add_menu(mg.app_label, mg.verbose_name)
m.add_action('excerpts.AllExcerpts')
|
wyq200704/ardupilot | refs/heads/master | Tools/LogAnalyzer/tests/TestDualGyroDrift.py | 273 | from LogAnalyzer import Test,TestResult
import DataflashLog
# import scipy
# import pylab #### TEMP!!! only for dev
# from scipy import signal
class TestDualGyroDrift(Test):
'''test for gyro drift between dual IMU data'''
def __init__(self):
Test.__init__(self)
self.name = "Gyro Drift"
self.enable = False
def run(self, logdata, verbose):
self.result = TestResult()
self.result.status = TestResult.StatusType.GOOD
# if "IMU" not in logdata.channels or "IMU2" not in logdata.channels:
# self.result.status = TestResult.StatusType.NA
# return
# imuX = logdata.channels["IMU"]["GyrX"].listData
# imu2X = logdata.channels["IMU2"]["GyrX"].listData
# # NOTE: weird thing about Holger's log is that the counts of IMU+IMU2 are different
# print "length 1: %.2f, length 2: %.2f" % (len(imuX),len(imu2X))
# #assert(len(imuX) == len(imu2X))
# # divide the curve into segments and get the average of each segment
# # we will get the diff between those averages, rather than a per-sample diff as the IMU+IMU2 arrays are often not the same length
# diffThresholdWARN = 0.03
# diffThresholdFAIL = 0.05
# nSamples = 10
# imu1XAverages, imu1YAverages, imu1ZAverages, imu2XAverages, imu2YAverages, imu2ZAverages = ([],[],[],[],[],[])
# imuXDiffAverages, imuYDiffAverages, imuZDiffAverages = ([],[],[])
# maxDiffX, maxDiffY, maxDiffZ = (0,0,0)
# sliceLength1 = len(logdata.channels["IMU"]["GyrX"].dictData.values()) / nSamples
# sliceLength2 = len(logdata.channels["IMU2"]["GyrX"].dictData.values()) / nSamples
# for i in range(0,nSamples):
# imu1XAverages.append(numpy.mean(logdata.channels["IMU"]["GyrX"].dictData.values()[i*sliceLength1:i*sliceLength1+sliceLength1]))
# imu1YAverages.append(numpy.mean(logdata.channels["IMU"]["GyrY"].dictData.values()[i*sliceLength1:i*sliceLength1+sliceLength1]))
# imu1ZAverages.append(numpy.mean(logdata.channels["IMU"]["GyrZ"].dictData.values()[i*sliceLength1:i*sliceLength1+sliceLength1]))
# imu2XAverages.append(numpy.mean(logdata.channels["IMU2"]["GyrX"].dictData.values()[i*sliceLength2:i*sliceLength2+sliceLength2]))
# imu2YAverages.append(numpy.mean(logdata.channels["IMU2"]["GyrY"].dictData.values()[i*sliceLength2:i*sliceLength2+sliceLength2]))
# imu2ZAverages.append(numpy.mean(logdata.channels["IMU2"]["GyrZ"].dictData.values()[i*sliceLength2:i*sliceLength2+sliceLength2]))
# imuXDiffAverages.append(imu2XAverages[-1]-imu1XAverages[-1])
# imuYDiffAverages.append(imu2YAverages[-1]-imu1YAverages[-1])
# imuZDiffAverages.append(imu2ZAverages[-1]-imu1ZAverages[-1])
# if abs(imuXDiffAverages[-1]) > maxDiffX:
# maxDiffX = imuXDiffAverages[-1]
# if abs(imuYDiffAverages[-1]) > maxDiffY:
# maxDiffY = imuYDiffAverages[-1]
# if abs(imuZDiffAverages[-1]) > maxDiffZ:
# maxDiffZ = imuZDiffAverages[-1]
# if max(maxDiffX,maxDiffY,maxDiffZ) > diffThresholdFAIL:
# self.result.status = TestResult.StatusType.FAIL
# self.result.statusMessage = "IMU/IMU2 gyro averages differ by more than %s radians" % diffThresholdFAIL
# elif max(maxDiffX,maxDiffY,maxDiffZ) > diffThresholdWARN:
# self.result.status = TestResult.StatusType.WARN
# self.result.statusMessage = "IMU/IMU2 gyro averages differ by more than %s radians" % diffThresholdWARN
# # pylab.plot(zip(*imuX)[0], zip(*imuX)[1], 'g')
# # pylab.plot(zip(*imu2X)[0], zip(*imu2X)[1], 'r')
# #pylab.plot(range(0,(nSamples*sliceLength1),sliceLength1), imu1ZAverages, 'b')
# print "Gyro averages1X: " + `imu1XAverages`
# print "Gyro averages1Y: " + `imu1YAverages`
# print "Gyro averages1Z: " + `imu1ZAverages` + "\n"
# print "Gyro averages2X: " + `imu2XAverages`
# print "Gyro averages2Y: " + `imu2YAverages`
# print "Gyro averages2Z: " + `imu2ZAverages` + "\n"
# print "Gyro averages diff X: " + `imuXDiffAverages`
# print "Gyro averages diff Y: " + `imuYDiffAverages`
# print "Gyro averages diff Z: " + `imuZDiffAverages`
# # lowpass filter using numpy
# # cutoff = 100
# # fs = 10000.0
# # b,a = scipy.signal.filter_design.butter(5,cutoff/(fs/2))
# # imuXFiltered = scipy.signal.filtfilt(b,a,zip(*imuX)[1])
# # imu2XFiltered = scipy.signal.filtfilt(b,a,zip(*imu2X)[1])
# #pylab.plot(imuXFiltered, 'r')
# # TMP: DISPLAY BEFORE+AFTER plots
# pylab.show()
# # print "imuX average before lowpass filter: %.8f" % logdata.channels["IMU"]["GyrX"].avg()
# # print "imuX average after lowpass filter: %.8f" % numpy.mean(imuXFiltered)
# # print "imu2X average before lowpass filter: %.8f" % logdata.channels["IMU2"]["GyrX"].avg()
# # print "imu2X average after lowpass filter: %.8f" % numpy.mean(imu2XFiltered)
# avg1X = logdata.channels["IMU"]["GyrX"].avg()
# avg1Y = logdata.channels["IMU"]["GyrY"].avg()
# avg1Z = logdata.channels["IMU"]["GyrZ"].avg()
# avg2X = logdata.channels["IMU2"]["GyrX"].avg()
# avg2Y = logdata.channels["IMU2"]["GyrY"].avg()
# avg2Z = logdata.channels["IMU2"]["GyrZ"].avg()
# avgRatioX = (max(avg1X,avg2X) - min(avg1X,avg2X)) / #abs(max(avg1X,avg2X) / min(avg1X,avg2X))
# avgRatioY = abs(max(avg1Y,avg2Y) / min(avg1Y,avg2Y))
# avgRatioZ = abs(max(avg1Z,avg2Z) / min(avg1Z,avg2Z))
# self.result.statusMessage = "IMU gyro avg: %.4f,%.4f,%.4f\nIMU2 gyro avg: %.4f,%.4f,%.4f\nAvg ratio: %.4f,%.4f,%.4f" % (avg1X,avg1Y,avg1Z, avg2X,avg2Y,avg2Z, avgRatioX,avgRatioY,avgRatioZ)
|
devendermishrajio/nova_test_latest | refs/heads/master | nova/tests/unit/api/openstack/compute/contrib/test_image_size.py | 63 | # Copyright 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
import webob
from nova.image import glance
from nova import test
from nova.tests.unit.api.openstack import fakes
NOW_API_FORMAT = "2010-10-11T10:30:22Z"
IMAGES = [{
'id': '123',
'name': 'public image',
'metadata': {'key1': 'value1'},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
'minDisk': 10,
'minRam': 128,
'size': 12345678,
"links": [{
"rel": "self",
"href": "http://localhost/v2/fake/images/123",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/123",
}],
},
{
'id': '124',
'name': 'queued snapshot',
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'SAVING',
'progress': 25,
'minDisk': 0,
'minRam': 0,
'size': 87654321,
"links": [{
"rel": "self",
"href": "http://localhost/v2/fake/images/124",
},
{
"rel": "bookmark",
"href": "http://localhost/fake/images/124",
}],
}]
def fake_show(*args, **kwargs):
return IMAGES[0]
def fake_detail(*args, **kwargs):
return IMAGES
class ImageSizeTestV21(test.NoDBTestCase):
content_type = 'application/json'
prefix = 'OS-EXT-IMG-SIZE'
def setUp(self):
super(ImageSizeTestV21, self).setUp()
self.stubs.Set(glance.GlanceImageService, 'show', fake_show)
self.stubs.Set(glance.GlanceImageService, 'detail', fake_detail)
self.flags(osapi_compute_extension=['nova.api.openstack.compute'
'.contrib.image_size.Image_size'])
def _make_request(self, url):
req = webob.Request.blank(url)
req.headers['Accept'] = self.content_type
res = req.get_response(self._get_app())
return res
def _get_app(self):
return fakes.wsgi_app_v21()
def _get_image(self, body):
return jsonutils.loads(body).get('image')
def _get_images(self, body):
return jsonutils.loads(body).get('images')
def assertImageSize(self, image, size):
self.assertEqual(image.get('%s:size' % self.prefix), size)
def test_show(self):
url = '/v2/fake/images/1'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
image = self._get_image(res.body)
self.assertImageSize(image, 12345678)
def test_detail(self):
url = '/v2/fake/images/detail'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
images = self._get_images(res.body)
self.assertImageSize(images[0], 12345678)
self.assertImageSize(images[1], 87654321)
class ImageSizeTestV2(ImageSizeTestV21):
def _get_app(self):
return fakes.wsgi_app()
|
QinerTech/QinerApps | refs/heads/master | openerp/addons/website_forum_doc/controllers/__init__.py | 4497 | # -*- coding: utf-8 -*-
import main
|
zhangfangyan/devide | refs/heads/master | modules/viewers/SkeletonAUIViewerFrame.py | 7 | # Copyright (c) Charl P. Botha, TU Delft.
# All rights reserved.
# See COPYRIGHT for details.
import cStringIO
from vtk.wx.wxVTKRenderWindowInteractor import wxVTKRenderWindowInteractor
import wx
# wxPython 2.8.8.1 wx.aui bugs severely on GTK. See:
# http://trac.wxwidgets.org/ticket/9716
# Until this is fixed, use this PyAUI to which I've added a
# wx.aui compatibility layer.
if wx.Platform == "__WXGTK__":
from external import PyAUI
wx.aui = PyAUI
else:
import wx.aui
# one could have loaded a wxGlade created resource like this:
#from resources.python import DICOMBrowserPanels
#reload(DICOMBrowserPanels)
class SkeletonAUIViewerFrame(wx.Frame):
"""wx.Frame child class used by SkeletonAUIViewer for its
interface.
This is an AUI-managed window, so we create the top-level frame,
and then populate it with AUI panes.
"""
def __init__(self, parent, id=-1, title="", name=""):
wx.Frame.__init__(self, parent, id=id, title=title,
pos=wx.DefaultPosition, size=(800,800), name=name)
self.menubar = wx.MenuBar()
self.SetMenuBar(self.menubar)
file_menu = wx.Menu()
self.id_file_open = wx.NewId()
file_menu.Append(self.id_file_open, "&Open\tCtrl-O",
"Open a file", wx.ITEM_NORMAL)
self.menubar.Append(file_menu, "&File")
views_menu = wx.Menu()
views_default_id = wx.NewId()
views_menu.Append(views_default_id, "&Default\tCtrl-0",
"Activate default view layout.", wx.ITEM_NORMAL)
views_max_image_id = wx.NewId()
views_menu.Append(views_max_image_id, "&Maximum image size\tCtrl-1",
"Activate maximum image view size layout.",
wx.ITEM_NORMAL)
self.menubar.Append(views_menu, "&Views")
# tell FrameManager to manage this frame
self._mgr = wx.aui.AuiManager()
self._mgr.SetManagedWindow(self)
self._mgr.AddPane(self._create_series_pane(), wx.aui.AuiPaneInfo().
Name("series").Caption("Series").Top().
BestSize(wx.Size(600, 100)).
CloseButton(False).MaximizeButton(True))
self._mgr.AddPane(self._create_files_pane(), wx.aui.AuiPaneInfo().
Name("files").Caption("Image Files").
Left().
BestSize(wx.Size(200,400)).
CloseButton(False).MaximizeButton(True))
self._mgr.AddPane(self._create_meta_pane(), wx.aui.AuiPaneInfo().
Name("meta").Caption("Image Metadata").
Left().
BestSize(wx.Size(200,400)).
CloseButton(False).MaximizeButton(True))
self._mgr.AddPane(self._create_rwi_pane(), wx.aui.AuiPaneInfo().
Name("rwi").Caption("3D Renderer").
Center().
BestSize(wx.Size(400,400)).
CloseButton(False).MaximizeButton(True))
self.SetMinSize(wx.Size(400, 300))
# first we save this default perspective with all panes
# visible
self._perspectives = {}
self._perspectives['default'] = self._mgr.SavePerspective()
# then we hide all of the panes except the renderer
self._mgr.GetPane("series").Hide()
self._mgr.GetPane("files").Hide()
self._mgr.GetPane("meta").Hide()
# save the perspective again
self._perspectives['max_image'] = self._mgr.SavePerspective()
# and put back the default perspective / view
self._mgr.LoadPerspective(self._perspectives['default'])
# finally tell the AUI manager to do everything that we've
# asked
self._mgr.Update()
# we bind the views events here, because the functionality is
# completely encapsulated in the frame and does not need to
# round-trip to the DICOMBrowser main module.
self.Bind(wx.EVT_MENU, self._handler_default_view,
id=views_default_id)
self.Bind(wx.EVT_MENU, self._handler_max_image_view,
id=views_max_image_id)
def close(self):
self.Destroy()
def _create_files_pane(self):
sl = wx.ListCtrl(self, -1,
style=wx.LC_REPORT)
sl.InsertColumn(0, "Full name")
# we'll autosize this column later
sl.SetColumnWidth(0, 300)
#sl.InsertColumn(SeriesColumns.modality, "Modality")
self.files_lc = sl
return sl
def _create_rwi_pane(self):
panel = wx.Panel(self, -1)
self.rwi = wxVTKRenderWindowInteractor(panel, -1, (400,400))
self.button1 = wx.Button(panel, -1, "Add Superquadric")
self.button2 = wx.Button(panel, -1, "Reset Camera")
self.button3 = wx.Button(panel, -1, "Start Timer Event")
button_sizer = wx.BoxSizer(wx.HORIZONTAL)
button_sizer.Add(self.button1)
button_sizer.Add(self.button2)
button_sizer.Add(self.button3)
sizer1 = wx.BoxSizer(wx.VERTICAL)
sizer1.Add(self.rwi, 1, wx.EXPAND|wx.BOTTOM, 7)
sizer1.Add(button_sizer)
tl_sizer = wx.BoxSizer(wx.VERTICAL)
tl_sizer.Add(sizer1, 1, wx.ALL|wx.EXPAND, 7)
panel.SetSizer(tl_sizer)
tl_sizer.Fit(panel)
return panel
def _create_meta_pane(self):
ml = wx.ListCtrl(self, -1,
style=wx.LC_REPORT |
wx.LC_HRULES | wx.LC_VRULES |
wx.LC_SINGLE_SEL)
ml.InsertColumn(0, "Key")
ml.SetColumnWidth(0, 70)
ml.InsertColumn(1, "Value")
ml.SetColumnWidth(1, 70)
self.meta_lc = ml
return ml
def _create_series_pane(self):
sl = wx.ListCtrl(self, -1,
style=wx.LC_REPORT | wx.LC_HRULES | wx.LC_SINGLE_SEL,
size=(600,120))
sl.InsertColumn(0, "Description")
sl.SetColumnWidth(1, 170)
sl.InsertColumn(2, "Modality")
sl.InsertColumn(3, "# Images")
sl.InsertColumn(4, "Size")
self.series_lc = sl
return sl
def render(self):
"""Update embedded RWI, i.e. update the image.
"""
self.rwi.Render()
def _handler_default_view(self, event):
"""Event handler for when the user selects View | Default from
the main menu.
"""
self._mgr.LoadPerspective(
self._perspectives['default'])
def _handler_max_image_view(self, event):
"""Event handler for when the user selects View | Max Image
from the main menu.
"""
self._mgr.LoadPerspective(
self._perspectives['max_image'])
|
Djabbz/scikit-learn | refs/heads/master | examples/svm/plot_oneclass.py | 249 | """
==========================================
One-class SVM with non-linear kernel (RBF)
==========================================
An example using a one-class SVM for novelty detection.
:ref:`One-class SVM <svm_outlier_detection>` is an unsupervised
algorithm that learns a decision function for novelty detection:
classifying new data as similar or different to the training set.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.font_manager
from sklearn import svm
xx, yy = np.meshgrid(np.linspace(-5, 5, 500), np.linspace(-5, 5, 500))
# Generate train data
X = 0.3 * np.random.randn(100, 2)
X_train = np.r_[X + 2, X - 2]
# Generate some regular novel observations
X = 0.3 * np.random.randn(20, 2)
X_test = np.r_[X + 2, X - 2]
# Generate some abnormal novel observations
X_outliers = np.random.uniform(low=-4, high=4, size=(20, 2))
# fit the model
clf = svm.OneClassSVM(nu=0.1, kernel="rbf", gamma=0.1)
clf.fit(X_train)
y_pred_train = clf.predict(X_train)
y_pred_test = clf.predict(X_test)
y_pred_outliers = clf.predict(X_outliers)
n_error_train = y_pred_train[y_pred_train == -1].size
n_error_test = y_pred_test[y_pred_test == -1].size
n_error_outliers = y_pred_outliers[y_pred_outliers == 1].size
# plot the line, the points, and the nearest vectors to the plane
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.title("Novelty Detection")
plt.contourf(xx, yy, Z, levels=np.linspace(Z.min(), 0, 7), cmap=plt.cm.Blues_r)
a = plt.contour(xx, yy, Z, levels=[0], linewidths=2, colors='red')
plt.contourf(xx, yy, Z, levels=[0, Z.max()], colors='orange')
b1 = plt.scatter(X_train[:, 0], X_train[:, 1], c='white')
b2 = plt.scatter(X_test[:, 0], X_test[:, 1], c='green')
c = plt.scatter(X_outliers[:, 0], X_outliers[:, 1], c='red')
plt.axis('tight')
plt.xlim((-5, 5))
plt.ylim((-5, 5))
plt.legend([a.collections[0], b1, b2, c],
["learned frontier", "training observations",
"new regular observations", "new abnormal observations"],
loc="upper left",
prop=matplotlib.font_manager.FontProperties(size=11))
plt.xlabel(
"error train: %d/200 ; errors novel regular: %d/40 ; "
"errors novel abnormal: %d/40"
% (n_error_train, n_error_test, n_error_outliers))
plt.show()
|
Rafeh01/data-science-toolbox | refs/heads/master | manager/dst/dst.py | 6 | #!/usr/bin/env python
"""usage: dst [--version] [--help]
<command> [<args>...]
options:
-v, --version Print the version and exit
-h, --help Print this help
Available commands:
add Add a bundle
info Print bundle information
list List all available bundles
setup Set up a bundle
For help on any individual command run `dst COMMAND -h`
"""
import yaml
import inspect
import logging
import os
import sys
from os import listdir
from os.path import join, isdir, realpath, dirname
from docopt import docopt
from schema import Schema
def run_ansible_playbook(filename):
command = ["/usr/local/bin/ansible-playbook", filename, "-c", "local",
"-i", "'127.0.0.1,'", "--extra-vars=\"dst_username=$(whoami)\""]
os.system(' '.join(command))
class DataScienceToolbox(object):
log_format = '%(asctime)-15s [%(levelname)s] - %(name)s: %(message)s'
logging.basicConfig(format=log_format, level=logging.DEBUG)
log = logging.getLogger('data-science-toolbox')
bundle_dir = '/usr/lib/data-science-toolbox/bundles'
def __init__(self):
pass
def add(self, bundle_id):
"""usage: dst add [options] <name>
options:
-h, --help
-v, --verbose be verbose
"""
run_ansible_playbook(join(self.bundle_dir, bundle_id, 'install.yml'))
def list(self):
"""usage: dst list [options]
options:
-h, --help
-v, --verbose be verbose
"""
bundles = [f for f in listdir(self.bundle_dir) if isdir(join(self.bundle_dir,f))]
print "The following Data Science Toolbox bundles are available:"
print
for bundle in sorted(bundles):
try:
with open(join(bundle_dir, bundle, 'info.yml')) as f:
info = yaml.load(f.read())
except:
continue
print "%-10s - %s" % (bundle, info['title'])
print
print "For more information about a bundle, run `dst info BUNDLE`"
def update(self):
"""usage: dst update [options]
options:
-h, --help
-v, --verbose be verbose
"""
os.system('cd /usr/lib/data-science-toolbox && sudo git pull')
def info(self, bundle_id):
"""usage: dst info [options] <name>
options:
-h, --help
-v, --verbose be verbose
"""
try:
with open(join(self.bundle_dir, bundle_id, 'info.yml')) as f:
info = yaml.load(f.read())
for k, v in sorted(info.iteritems()):
print "%-10s: %s" % (k.capitalize(), v)
except:
print "Cannot get information of bundle %s" % bundle_id
print
def setup(self, bundle_id):
"""usage: dst setup [options] <name>
options:
-h, --help
-v, --verbose be verbose
"""
pb = join(self.bundle_dir, bundle_id, 'setup.yml')
run_ansible_playbook(pb)
def main():
args = docopt(__doc__, version='Data Science Toolbox version 0.1.5', options_first=True)
help = {m[0]: inspect.getdoc(m[1]) for m in \
inspect.getmembers(DataScienceToolbox, predicate=inspect.ismethod) \
if not m[0].startswith('_')}
argv = [args['<command>']] + args['<args>']
if args['<command>'] not in help:
exit("%r is not a dst command. See 'dst --help'." % args['<command>'])
else:
args = docopt(help[args['<command>']], argv=argv)
dst = DataScienceToolbox()
if 'add' in args:
dst.add(args['<name>'])
if 'setup' in args:
dst.setup(args['<name>'])
if 'info' in args:
dst.info(args['<name>'])
if 'list' in args:
dst.list()
if 'update' in args:
dst.update()
return 0
if __name__ == "__main__":
exit(main())
|
Hateman31/wotreplays-RD | refs/heads/slides | spider.py | 2 | import os
import requests as r
from bs4 import BeautifulSoup as bs
class ConnectionError(r.exceptions.ConnectionError):
pass
class Site:
def __init__(self,url):
self.url = url
self.page = 1
self.max_page_number = 0
self.html = None
self.openPage()
def openPage(self):
#Если загрузка упала - вернуть сообщение об этом
try:
self.html = bs(r.get(self.url,timeout=30).content,"html5lib")
except:
print('Loading crash! Try later')
raise
if self.page == 1:
self.max_page_number = self.last_page_number()
self.page+=1
self.prepare_next_URL()
#@property
#def NotLastPage(self):
def notLastPage(self):
return self.page < self.max_page_number
def prepare_next_URL(self):
if 'page' in self.url:
num = int(self.url[-2])+1
self.url = self.url[:-2]+str(num)+'/'
else:
self.url = self.url+'page/2/'
#self.url = self.url[:-2]+str(self.page)+'/'
def last_page_number(self):
css = 'script[type="text/javascript"]'
text = self.html.select(css)[-1].text
text = text.split('total:')[-1]
return int(text.split(',')[0])
if __name__ == "__main__":
test_url = 'https://wotreplays.ru/site/index/version/43/tank/837/map/5/battle_type/1/sort/uploaded_at.desc/'
test = Site(test_url)
test.openPage()
print(test.last_page_number())
|
BellScurry/gem5-fault-injection | refs/heads/master | src/arch/x86/isa/insts/simd64/integer/data_transfer/move_non_temporal.py | 88 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
# MOVNTQ
def macroop MASKMOVQ_MMX_MMX {
ldfp ufp1, ds, [1, t0, rdi], dataSize=8
maskmov ufp1, mmx, mmxm, size=1
stfp ufp1, ds, [1, t0, rdi], dataSize=8
};
'''
|
rysson/filmkodi | refs/heads/master | plugin.video.fanfilm/resources/lib/resolvers/promptfile.py | 2 | # -*- coding: utf-8 -*-
'''
FanFilm Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib
from resources.lib.libraries import client
def resolve(url):
try:
result = client.request(url)
post = {}
f = client.parseDOM(result, 'form', attrs = {'method': 'post'})[0]
k = client.parseDOM(f, 'input', ret='name', attrs = {'type': 'hidden'})
for i in k: post.update({i: client.parseDOM(f, 'input', ret='value', attrs = {'name': i})[0]})
result = client.request(url, post=post)
url = client.parseDOM(result, 'a', ret='href', attrs = {'class': 'view_dl_link'})[0]
url = client.request(url, output='geturl', post=post)
print("URL",url)
return url
except:
return
|
nugget/home-assistant | refs/heads/dev | homeassistant/components/google/calendar.py | 2 | """Support for Google Calendar Search binary sensors."""
import logging
from datetime import timedelta
from homeassistant.components.calendar import CalendarEventDevice
from homeassistant.components.google import (
CONF_CAL_ID, CONF_ENTITIES, CONF_TRACK, TOKEN_FILE,
CONF_IGNORE_AVAILABILITY, CONF_SEARCH,
GoogleCalendarService)
from homeassistant.util import Throttle, dt
_LOGGER = logging.getLogger(__name__)
DEFAULT_GOOGLE_SEARCH_PARAMS = {
'orderBy': 'startTime',
'maxResults': 5,
'singleEvents': True,
}
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15)
def setup_platform(hass, config, add_entities, disc_info=None):
"""Set up the calendar platform for event devices."""
if disc_info is None:
return
if not any(data[CONF_TRACK] for data in disc_info[CONF_ENTITIES]):
return
calendar_service = GoogleCalendarService(hass.config.path(TOKEN_FILE))
add_entities([GoogleCalendarEventDevice(hass, calendar_service,
disc_info[CONF_CAL_ID], data)
for data in disc_info[CONF_ENTITIES] if data[CONF_TRACK]])
class GoogleCalendarEventDevice(CalendarEventDevice):
"""A calendar event device."""
def __init__(self, hass, calendar_service, calendar, data):
"""Create the Calendar event device."""
self.data = GoogleCalendarData(calendar_service, calendar,
data.get(CONF_SEARCH),
data.get(CONF_IGNORE_AVAILABILITY))
super().__init__(hass, data)
async def async_get_events(self, hass, start_date, end_date):
"""Get all events in a specific time frame."""
return await self.data.async_get_events(hass, start_date, end_date)
class GoogleCalendarData:
"""Class to utilize calendar service object to get next event."""
def __init__(self, calendar_service, calendar_id, search,
ignore_availability):
"""Set up how we are going to search the google calendar."""
self.calendar_service = calendar_service
self.calendar_id = calendar_id
self.search = search
self.ignore_availability = ignore_availability
self.event = None
def _prepare_query(self):
# pylint: disable=import-error
from httplib2 import ServerNotFoundError
try:
service = self.calendar_service.get()
except ServerNotFoundError:
_LOGGER.warning("Unable to connect to Google, using cached data")
return False
params = dict(DEFAULT_GOOGLE_SEARCH_PARAMS)
params['calendarId'] = self.calendar_id
if self.search:
params['q'] = self.search
return service, params
async def async_get_events(self, hass, start_date, end_date):
"""Get all events in a specific time frame."""
service, params = await hass.async_add_job(self._prepare_query)
params['timeMin'] = start_date.isoformat('T')
params['timeMax'] = end_date.isoformat('T')
events = await hass.async_add_job(service.events)
result = await hass.async_add_job(events.list(**params).execute)
items = result.get('items', [])
event_list = []
for item in items:
if (not self.ignore_availability
and 'transparency' in item.keys()):
if item['transparency'] == 'opaque':
event_list.append(item)
else:
event_list.append(item)
return event_list
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data."""
service, params = self._prepare_query()
params['timeMin'] = dt.now().isoformat('T')
events = service.events()
result = events.list(**params).execute()
items = result.get('items', [])
new_event = None
for item in items:
if (not self.ignore_availability
and 'transparency' in item.keys()):
if item['transparency'] == 'opaque':
new_event = item
break
else:
new_event = item
break
self.event = new_event
return True
|
Andrew-Katcha/storyteller | refs/heads/master | env/lib/python3.4/site-packages/pip/exceptions.py | 344 | """Exceptions used throughout package"""
from __future__ import absolute_import
from itertools import chain, groupby, repeat
from pip._vendor.six import iteritems
class PipError(Exception):
"""Base pip exception"""
class InstallationError(PipError):
"""General exception during installation"""
class UninstallationError(PipError):
"""General exception during uninstallation"""
class DistributionNotFound(InstallationError):
"""Raised when a distribution cannot be found to satisfy a requirement"""
class RequirementsFileParseError(InstallationError):
"""Raised when a general error occurs parsing a requirements file line."""
class BestVersionAlreadyInstalled(PipError):
"""Raised when the most up-to-date version of a package is already
installed."""
class BadCommand(PipError):
"""Raised when virtualenv or a command is not found"""
class CommandError(PipError):
"""Raised when there is an error in command-line arguments"""
class PreviousBuildDirError(PipError):
"""Raised when there's a previous conflicting build directory"""
class InvalidWheelFilename(InstallationError):
"""Invalid wheel filename."""
class UnsupportedWheel(InstallationError):
"""Unsupported wheel."""
class HashErrors(InstallationError):
"""Multiple HashError instances rolled into one for reporting"""
def __init__(self):
self.errors = []
def append(self, error):
self.errors.append(error)
def __str__(self):
lines = []
self.errors.sort(key=lambda e: e.order)
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
lines.append(cls.head)
lines.extend(e.body() for e in errors_of_cls)
if lines:
return '\n'.join(lines)
def __nonzero__(self):
return bool(self.errors)
def __bool__(self):
return self.__nonzero__()
class HashError(InstallationError):
"""
A failure to verify a package against known-good hashes
:cvar order: An int sorting hash exception classes by difficulty of
recovery (lower being harder), so the user doesn't bother fretting
about unpinned packages when he has deeper issues, like VCS
dependencies, to deal with. Also keeps error reports in a
deterministic order.
:cvar head: A section heading for display above potentially many
exceptions of this kind
:ivar req: The InstallRequirement that triggered this error. This is
pasted on after the exception is instantiated, because it's not
typically available earlier.
"""
req = None
head = ''
def body(self):
"""Return a summary of me for display under the heading.
This default implementation simply prints a description of the
triggering requirement.
:param req: The InstallRequirement that provoked this error, with
populate_link() having already been called
"""
return ' %s' % self._requirement_name()
def __str__(self):
return '%s\n%s' % (self.head, self.body())
def _requirement_name(self):
"""Return a description of the requirement that triggered me.
This default implementation returns long description of the req, with
line numbers
"""
return str(self.req) if self.req else 'unknown package'
class VcsHashUnsupported(HashError):
"""A hash was provided for a version-control-system-based requirement, but
we don't have a method for hashing those."""
order = 0
head = ("Can't verify hashes for these requirements because we don't "
"have a way to hash version control repositories:")
class DirectoryUrlHashUnsupported(HashError):
"""A hash was provided for a version-control-system-based requirement, but
we don't have a method for hashing those."""
order = 1
head = ("Can't verify hashes for these file:// requirements because they "
"point to directories:")
class HashMissing(HashError):
"""A hash was needed for a requirement but is absent."""
order = 2
head = ('Hashes are required in --require-hashes mode, but they are '
'missing from some requirements. Here is a list of those '
'requirements along with the hashes their downloaded archives '
'actually had. Add lines like these to your requirements files to '
'prevent tampering. (If you did not enable --require-hashes '
'manually, note that it turns on automatically when any package '
'has a hash.)')
def __init__(self, gotten_hash):
"""
:param gotten_hash: The hash of the (possibly malicious) archive we
just downloaded
"""
self.gotten_hash = gotten_hash
def body(self):
from pip.utils.hashes import FAVORITE_HASH # Dodge circular import.
package = None
if self.req:
# In the case of URL-based requirements, display the original URL
# seen in the requirements file rather than the package name,
# so the output can be directly copied into the requirements file.
package = (self.req.original_link if self.req.original_link
# In case someone feeds something downright stupid
# to InstallRequirement's constructor.
else getattr(self.req, 'req', None))
return ' %s --hash=%s:%s' % (package or 'unknown package',
FAVORITE_HASH,
self.gotten_hash)
class HashUnpinned(HashError):
"""A requirement had a hash specified but was not pinned to a specific
version."""
order = 3
head = ('In --require-hashes mode, all requirements must have their '
'versions pinned with ==. These do not:')
class HashMismatch(HashError):
"""
Distribution file hash values don't match.
:ivar package_name: The name of the package that triggered the hash
mismatch. Feel free to write to this after the exception is raise to
improve its error message.
"""
order = 4
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
'FILE. If you have updated the package versions, please update '
'the hashes. Otherwise, examine the package contents carefully; '
'someone may have tampered with them.')
def __init__(self, allowed, gots):
"""
:param allowed: A dict of algorithm names pointing to lists of allowed
hex digests
:param gots: A dict of algorithm names pointing to hashes we
actually got from the files under suspicion
"""
self.allowed = allowed
self.gots = gots
def body(self):
return ' %s:\n%s' % (self._requirement_name(),
self._hash_comparison())
def _hash_comparison(self):
"""
Return a comparison of actual and expected hash values.
Example::
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
or 123451234512345123451234512345123451234512345
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
"""
def hash_then_or(hash_name):
# For now, all the decent hashes have 6-char names, so we can get
# away with hard-coding space literals.
return chain([hash_name], repeat(' or'))
lines = []
for hash_name, expecteds in iteritems(self.allowed):
prefix = hash_then_or(hash_name)
lines.extend((' Expected %s %s' % (next(prefix), e))
for e in expecteds)
lines.append(' Got %s\n' %
self.gots[hash_name].hexdigest())
prefix = ' or'
return '\n'.join(lines)
class UnsupportedPythonVersion(InstallationError):
"""Unsupported python version according to Requires-Python package
metadata."""
|
DaniilLeksin/theblog | refs/heads/master | env/lib/python2.7/site-packages/django/utils/dates.py | 115 | "Commonly-used date structures"
from django.utils.translation import ugettext_lazy as _, pgettext_lazy
WEEKDAYS = {
0: _('Monday'), 1: _('Tuesday'), 2: _('Wednesday'), 3: _('Thursday'), 4: _('Friday'),
5: _('Saturday'), 6: _('Sunday')
}
WEEKDAYS_ABBR = {
0: _('Mon'), 1: _('Tue'), 2: _('Wed'), 3: _('Thu'), 4: _('Fri'),
5: _('Sat'), 6: _('Sun')
}
WEEKDAYS_REV = {
'monday': 0, 'tuesday': 1, 'wednesday': 2, 'thursday': 3, 'friday': 4,
'saturday': 5, 'sunday': 6
}
MONTHS = {
1: _('January'), 2: _('February'), 3: _('March'), 4: _('April'), 5: _('May'), 6: _('June'),
7: _('July'), 8: _('August'), 9: _('September'), 10: _('October'), 11: _('November'),
12: _('December')
}
MONTHS_3 = {
1: _('jan'), 2: _('feb'), 3: _('mar'), 4: _('apr'), 5: _('may'), 6: _('jun'),
7: _('jul'), 8: _('aug'), 9: _('sep'), 10: _('oct'), 11: _('nov'), 12: _('dec')
}
MONTHS_3_REV = {
'jan': 1, 'feb': 2, 'mar': 3, 'apr': 4, 'may': 5, 'jun': 6, 'jul': 7, 'aug': 8,
'sep': 9, 'oct': 10, 'nov': 11, 'dec': 12
}
MONTHS_AP = { # month names in Associated Press style
1: pgettext_lazy('abbrev. month', 'Jan.'),
2: pgettext_lazy('abbrev. month', 'Feb.'),
3: pgettext_lazy('abbrev. month', 'March'),
4: pgettext_lazy('abbrev. month', 'April'),
5: pgettext_lazy('abbrev. month', 'May'),
6: pgettext_lazy('abbrev. month', 'June'),
7: pgettext_lazy('abbrev. month', 'July'),
8: pgettext_lazy('abbrev. month', 'Aug.'),
9: pgettext_lazy('abbrev. month', 'Sept.'),
10: pgettext_lazy('abbrev. month', 'Oct.'),
11: pgettext_lazy('abbrev. month', 'Nov.'),
12: pgettext_lazy('abbrev. month', 'Dec.')
}
MONTHS_ALT = { # required for long date representation by some locales
1: pgettext_lazy('alt. month', 'January'),
2: pgettext_lazy('alt. month', 'February'),
3: pgettext_lazy('alt. month', 'March'),
4: pgettext_lazy('alt. month', 'April'),
5: pgettext_lazy('alt. month', 'May'),
6: pgettext_lazy('alt. month', 'June'),
7: pgettext_lazy('alt. month', 'July'),
8: pgettext_lazy('alt. month', 'August'),
9: pgettext_lazy('alt. month', 'September'),
10: pgettext_lazy('alt. month', 'October'),
11: pgettext_lazy('alt. month', 'November'),
12: pgettext_lazy('alt. month', 'December')
}
|
tiborsimko/invenio-testing | refs/heads/master | docs/_ext/ultramock.py | 164 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Hijacks `mock` to fake as many non-available modules as possible."""
import sys
import types
try:
import unittest.mock as mock
except ImportError:
import mock
# skip `_is_magic` check.
orig_is_magic = mock._is_magic
def always_false(*args, **kwargs):
return False
# avoid spec configuration for mocked classes with super classes.
# honestly this does not happen very often and is kind of a tricky case.
orig_mock_add_spec = mock.NonCallableMock._mock_add_spec
def mock_add_spec_fake(self, spec, spec_set):
orig_mock_add_spec(self, None, None)
# special MagicMock with empty docs
class MyMagicMock(mock.MagicMock):
""""""
# set up a fake class-metaclass hierarchy
class SuperMockMetaMeta(MyMagicMock):
__metaclass__ = MyMagicMock()
class SuperMockMeta(MyMagicMock):
__metaclass__ = SuperMockMetaMeta
class SuperMock(MyMagicMock):
__metaclass__ = SuperMockMeta
class MockedModule(types.ModuleType):
def __init__(self, name):
super(types.ModuleType, self).__init__(name)
self.__name__ = super.__name__
self.__file__ = self.__name__.replace('.', '/') + '.py'
sys.modules[self.__name__] = self
def __getattr__(self, key):
obj = SuperMock
setattr(self, key, obj)
return obj
# overwrite imports
orig_import = __import__
def import_mock(name, *args, **kwargs):
try:
return orig_import(name, *args, **kwargs)
except ImportError:
return MockedModule(name)
import_patch = mock.patch('__builtin__.__import__', side_effect=import_mock)
# public methods
def activate():
mock._is_magic = always_false
mock.NonCallableMock._mock_add_spec = mock_add_spec_fake
import_patch.start()
def deactivate():
import_patch.stop()
mock.NonCallableMock._mock_add_spec = orig_mock_add_spec
mock._is_magic = orig_is_magic
|
Harunx9/2DXngine | refs/heads/master | thirdparty/googletest/googlemock/scripts/gmock_doctor.py | 346 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Converts compiler's errors in code using Google Mock to plain English."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import re
import sys
_VERSION = '1.0.3'
_EMAIL = 'googlemock@googlegroups.com'
_COMMON_GMOCK_SYMBOLS = [
# Matchers
'_',
'A',
'AddressSatisfies',
'AllOf',
'An',
'AnyOf',
'ContainerEq',
'Contains',
'ContainsRegex',
'DoubleEq',
'ElementsAre',
'ElementsAreArray',
'EndsWith',
'Eq',
'Field',
'FloatEq',
'Ge',
'Gt',
'HasSubstr',
'IsInitializedProto',
'Le',
'Lt',
'MatcherCast',
'Matches',
'MatchesRegex',
'NanSensitiveDoubleEq',
'NanSensitiveFloatEq',
'Ne',
'Not',
'NotNull',
'Pointee',
'Property',
'Ref',
'ResultOf',
'SafeMatcherCast',
'StartsWith',
'StrCaseEq',
'StrCaseNe',
'StrEq',
'StrNe',
'Truly',
'TypedEq',
'Value',
# Actions
'Assign',
'ByRef',
'DeleteArg',
'DoAll',
'DoDefault',
'IgnoreResult',
'Invoke',
'InvokeArgument',
'InvokeWithoutArgs',
'Return',
'ReturnNew',
'ReturnNull',
'ReturnRef',
'SaveArg',
'SetArgReferee',
'SetArgPointee',
'SetArgumentPointee',
'SetArrayArgument',
'SetErrnoAndReturn',
'Throw',
'WithArg',
'WithArgs',
'WithoutArgs',
# Cardinalities
'AnyNumber',
'AtLeast',
'AtMost',
'Between',
'Exactly',
# Sequences
'InSequence',
'Sequence',
# Misc
'DefaultValue',
'Mock',
]
# Regex for matching source file path and line number in the compiler's errors.
_GCC_FILE_LINE_RE = r'(?P<file>.*):(?P<line>\d+):(\d+:)?\s+'
_CLANG_FILE_LINE_RE = r'(?P<file>.*):(?P<line>\d+):(?P<column>\d+):\s+'
_CLANG_NON_GMOCK_FILE_LINE_RE = (
r'(?P<file>.*[/\\^](?!gmock-)[^/\\]+):(?P<line>\d+):(?P<column>\d+):\s+')
def _FindAllMatches(regex, s):
"""Generates all matches of regex in string s."""
r = re.compile(regex)
return r.finditer(s)
def _GenericDiagnoser(short_name, long_name, diagnoses, msg):
"""Diagnoses the given disease by pattern matching.
Can provide different diagnoses for different patterns.
Args:
short_name: Short name of the disease.
long_name: Long name of the disease.
diagnoses: A list of pairs (regex, pattern for formatting the diagnosis
for matching regex).
msg: Compiler's error messages.
Yields:
Tuples of the form
(short name of disease, long name of disease, diagnosis).
"""
for regex, diagnosis in diagnoses:
if re.search(regex, msg):
diagnosis = '%(file)s:%(line)s:' + diagnosis
for m in _FindAllMatches(regex, msg):
yield (short_name, long_name, diagnosis % m.groupdict())
def _NeedToReturnReferenceDiagnoser(msg):
"""Diagnoses the NRR disease, given the error messages by the compiler."""
gcc_regex = (r'In member function \'testing::internal::ReturnAction<R>.*\n'
+ _GCC_FILE_LINE_RE + r'instantiated from here\n'
r'.*gmock-actions\.h.*error: creating array with negative size')
clang_regex = (r'error:.*array.*negative.*\r?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE +
r'note: in instantiation of function template specialization '
r'\'testing::internal::ReturnAction<(?P<type>.*)>'
r'::operator Action<.*>\' requested here')
clang11_re = (r'use_ReturnRef_instead_of_Return_to_return_a_reference.*'
r'(.*\n)*?' + _CLANG_NON_GMOCK_FILE_LINE_RE)
diagnosis = """
You are using a Return() action in a function that returns a reference to
%(type)s. Please use ReturnRef() instead."""
return _GenericDiagnoser('NRR', 'Need to Return Reference',
[(clang_regex, diagnosis),
(clang11_re, diagnosis % {'type': 'a type'}),
(gcc_regex, diagnosis % {'type': 'a type'})],
msg)
def _NeedToReturnSomethingDiagnoser(msg):
"""Diagnoses the NRS disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'(instantiated from here\n.'
r'*gmock.*actions\.h.*error: void value not ignored)'
r'|(error: control reaches end of non-void function)')
clang_regex1 = (_CLANG_FILE_LINE_RE +
r'error: cannot initialize return object '
r'of type \'Result\' \(aka \'(?P<return_type>.*)\'\) '
r'with an rvalue of type \'void\'')
clang_regex2 = (_CLANG_FILE_LINE_RE +
r'error: cannot initialize return object '
r'of type \'(?P<return_type>.*)\' '
r'with an rvalue of type \'void\'')
diagnosis = """
You are using an action that returns void, but it needs to return
%(return_type)s. Please tell it *what* to return. Perhaps you can use
the pattern DoAll(some_action, Return(some_value))?"""
return _GenericDiagnoser(
'NRS',
'Need to Return Something',
[(gcc_regex, diagnosis % {'return_type': '*something*'}),
(clang_regex1, diagnosis),
(clang_regex2, diagnosis)],
msg)
def _NeedToReturnNothingDiagnoser(msg):
"""Diagnoses the NRN disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'instantiated from here\n'
r'.*gmock-actions\.h.*error: instantiation of '
r'\'testing::internal::ReturnAction<R>::Impl<F>::value_\' '
r'as type \'void\'')
clang_regex1 = (r'error: field has incomplete type '
r'\'Result\' \(aka \'void\'\)(\r)?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE + r'note: in instantiation '
r'of function template specialization '
r'\'testing::internal::ReturnAction<(?P<return_type>.*)>'
r'::operator Action<void \(.*\)>\' requested here')
clang_regex2 = (r'error: field has incomplete type '
r'\'Result\' \(aka \'void\'\)(\r)?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE + r'note: in instantiation '
r'of function template specialization '
r'\'testing::internal::DoBothAction<.*>'
r'::operator Action<(?P<return_type>.*) \(.*\)>\' '
r'requested here')
diagnosis = """
You are using an action that returns %(return_type)s, but it needs to return
void. Please use a void-returning action instead.
All actions but the last in DoAll(...) must return void. Perhaps you need
to re-arrange the order of actions in a DoAll(), if you are using one?"""
return _GenericDiagnoser(
'NRN',
'Need to Return Nothing',
[(gcc_regex, diagnosis % {'return_type': '*something*'}),
(clang_regex1, diagnosis),
(clang_regex2, diagnosis)],
msg)
def _IncompleteByReferenceArgumentDiagnoser(msg):
"""Diagnoses the IBRA disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'instantiated from here\n'
r'.*gtest-printers\.h.*error: invalid application of '
r'\'sizeof\' to incomplete type \'(?P<type>.*)\'')
clang_regex = (r'.*gtest-printers\.h.*error: invalid application of '
r'\'sizeof\' to an incomplete type '
r'\'(?P<type>.*)( const)?\'\r?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE +
r'note: in instantiation of member function '
r'\'testing::internal2::TypeWithoutFormatter<.*>::'
r'PrintValue\' requested here')
diagnosis = """
In order to mock this function, Google Mock needs to see the definition
of type "%(type)s" - declaration alone is not enough. Either #include
the header that defines it, or change the argument to be passed
by pointer."""
return _GenericDiagnoser('IBRA', 'Incomplete By-Reference Argument Type',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
def _OverloadedFunctionMatcherDiagnoser(msg):
"""Diagnoses the OFM disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: no matching function for '
r'call to \'Truly\(<unresolved overloaded function type>\)')
clang_regex = (_CLANG_FILE_LINE_RE + r'error: no matching function for '
r'call to \'Truly')
diagnosis = """
The argument you gave to Truly() is an overloaded function. Please tell
your compiler which overloaded version you want to use.
For example, if you want to use the version whose signature is
bool Foo(int n);
you should write
Truly(static_cast<bool (*)(int n)>(Foo))"""
return _GenericDiagnoser('OFM', 'Overloaded Function Matcher',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
def _OverloadedFunctionActionDiagnoser(msg):
"""Diagnoses the OFA disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: no matching function for call to '
r'\'Invoke\(<unresolved overloaded function type>')
clang_regex = (_CLANG_FILE_LINE_RE + r'error: no matching '
r'function for call to \'Invoke\'\r?\n'
r'(.*\n)*?'
r'.*\bgmock-generated-actions\.h:\d+:\d+:\s+'
r'note: candidate template ignored:\s+'
r'couldn\'t infer template argument \'FunctionImpl\'')
diagnosis = """
Function you are passing to Invoke is overloaded. Please tell your compiler
which overloaded version you want to use.
For example, if you want to use the version whose signature is
bool MyFunction(int n, double x);
you should write something like
Invoke(static_cast<bool (*)(int n, double x)>(MyFunction))"""
return _GenericDiagnoser('OFA', 'Overloaded Function Action',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
def _OverloadedMethodActionDiagnoser(msg):
"""Diagnoses the OMA disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: no matching function for '
r'call to \'Invoke\(.+, <unresolved overloaded function '
r'type>\)')
clang_regex = (_CLANG_FILE_LINE_RE + r'error: no matching function '
r'for call to \'Invoke\'\r?\n'
r'(.*\n)*?'
r'.*\bgmock-generated-actions\.h:\d+:\d+: '
r'note: candidate function template not viable: '
r'requires .*, but 2 (arguments )?were provided')
diagnosis = """
The second argument you gave to Invoke() is an overloaded method. Please
tell your compiler which overloaded version you want to use.
For example, if you want to use the version whose signature is
class Foo {
...
bool Bar(int n, double x);
};
you should write something like
Invoke(foo, static_cast<bool (Foo::*)(int n, double x)>(&Foo::Bar))"""
return _GenericDiagnoser('OMA', 'Overloaded Method Action',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
def _MockObjectPointerDiagnoser(msg):
"""Diagnoses the MOP disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: request for member '
r'\'gmock_(?P<method>.+)\' in \'(?P<mock_object>.+)\', '
r'which is of non-class type \'(.*::)*(?P<class_name>.+)\*\'')
clang_regex = (_CLANG_FILE_LINE_RE + r'error: member reference type '
r'\'(?P<class_name>.*?) *\' is a pointer; '
r'(did you mean|maybe you meant) to use \'->\'\?')
diagnosis = """
The first argument to ON_CALL() and EXPECT_CALL() must be a mock *object*,
not a *pointer* to it. Please write '*(%(mock_object)s)' instead of
'%(mock_object)s' as your first argument.
For example, given the mock class:
class %(class_name)s : public ... {
...
MOCK_METHOD0(%(method)s, ...);
};
and the following mock instance:
%(class_name)s* mock_ptr = ...
you should use the EXPECT_CALL like this:
EXPECT_CALL(*mock_ptr, %(method)s(...));"""
return _GenericDiagnoser(
'MOP',
'Mock Object Pointer',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis % {'mock_object': 'mock_object',
'method': 'method',
'class_name': '%(class_name)s'})],
msg)
def _NeedToUseSymbolDiagnoser(msg):
"""Diagnoses the NUS disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: \'(?P<symbol>.+)\' '
r'(was not declared in this scope|has not been declared)')
clang_regex = (_CLANG_FILE_LINE_RE +
r'error: (use of undeclared identifier|unknown type name|'
r'no template named) \'(?P<symbol>[^\']+)\'')
diagnosis = """
'%(symbol)s' is defined by Google Mock in the testing namespace.
Did you forget to write
using testing::%(symbol)s;
?"""
for m in (list(_FindAllMatches(gcc_regex, msg)) +
list(_FindAllMatches(clang_regex, msg))):
symbol = m.groupdict()['symbol']
if symbol in _COMMON_GMOCK_SYMBOLS:
yield ('NUS', 'Need to Use Symbol', diagnosis % m.groupdict())
def _NeedToUseReturnNullDiagnoser(msg):
"""Diagnoses the NRNULL disease, given the error messages by the compiler."""
gcc_regex = ('instantiated from \'testing::internal::ReturnAction<R>'
'::operator testing::Action<Func>\(\) const.*\n' +
_GCC_FILE_LINE_RE + r'instantiated from here\n'
r'.*error: no matching function for call to \'ImplicitCast_\('
r'(:?long )?int&\)')
clang_regex = (r'\bgmock-actions.h:.* error: no matching function for '
r'call to \'ImplicitCast_\'\r?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE + r'note: in instantiation '
r'of function template specialization '
r'\'testing::internal::ReturnAction<(int|long)>::operator '
r'Action<(?P<type>.*)\(\)>\' requested here')
diagnosis = """
You are probably calling Return(NULL) and the compiler isn't sure how to turn
NULL into %(type)s. Use ReturnNull() instead.
Note: the line number may be off; please fix all instances of Return(NULL)."""
return _GenericDiagnoser(
'NRNULL', 'Need to use ReturnNull',
[(clang_regex, diagnosis),
(gcc_regex, diagnosis % {'type': 'the right type'})],
msg)
def _TypeInTemplatedBaseDiagnoser(msg):
"""Diagnoses the TTB disease, given the error messages by the compiler."""
# This version works when the type is used as the mock function's return
# type.
gcc_4_3_1_regex_type_in_retval = (
r'In member function \'int .*\n' + _GCC_FILE_LINE_RE +
r'error: a function call cannot appear in a constant-expression')
gcc_4_4_0_regex_type_in_retval = (
r'error: a function call cannot appear in a constant-expression'
+ _GCC_FILE_LINE_RE + r'error: template argument 1 is invalid\n')
# This version works when the type is used as the mock function's sole
# parameter type.
gcc_regex_type_of_sole_param = (
_GCC_FILE_LINE_RE +
r'error: \'(?P<type>.+)\' was not declared in this scope\n'
r'.*error: template argument 1 is invalid\n')
# This version works when the type is used as a parameter of a mock
# function that has multiple parameters.
gcc_regex_type_of_a_param = (
r'error: expected `;\' before \'::\' token\n'
+ _GCC_FILE_LINE_RE +
r'error: \'(?P<type>.+)\' was not declared in this scope\n'
r'.*error: template argument 1 is invalid\n'
r'.*error: \'.+\' was not declared in this scope')
clang_regex_type_of_retval_or_sole_param = (
_CLANG_FILE_LINE_RE +
r'error: use of undeclared identifier \'(?P<type>.*)\'\n'
r'(.*\n)*?'
r'(?P=file):(?P=line):\d+: error: '
r'non-friend class member \'Result\' cannot have a qualified name'
)
clang_regex_type_of_a_param = (
_CLANG_FILE_LINE_RE +
r'error: C\+\+ requires a type specifier for all declarations\n'
r'(.*\n)*?'
r'(?P=file):(?P=line):(?P=column): error: '
r'C\+\+ requires a type specifier for all declarations'
)
clang_regex_unknown_type = (
_CLANG_FILE_LINE_RE +
r'error: unknown type name \'(?P<type>[^\']+)\''
)
diagnosis = """
In a mock class template, types or typedefs defined in the base class
template are *not* automatically visible. This is how C++ works. Before
you can use a type or typedef named %(type)s defined in base class Base<T>, you
need to make it visible. One way to do it is:
typedef typename Base<T>::%(type)s %(type)s;"""
for diag in _GenericDiagnoser(
'TTB', 'Type in Template Base',
[(gcc_4_3_1_regex_type_in_retval, diagnosis % {'type': 'Foo'}),
(gcc_4_4_0_regex_type_in_retval, diagnosis % {'type': 'Foo'}),
(gcc_regex_type_of_sole_param, diagnosis),
(gcc_regex_type_of_a_param, diagnosis),
(clang_regex_type_of_retval_or_sole_param, diagnosis),
(clang_regex_type_of_a_param, diagnosis % {'type': 'Foo'})],
msg):
yield diag
# Avoid overlap with the NUS pattern.
for m in _FindAllMatches(clang_regex_unknown_type, msg):
type_ = m.groupdict()['type']
if type_ not in _COMMON_GMOCK_SYMBOLS:
yield ('TTB', 'Type in Template Base', diagnosis % m.groupdict())
def _WrongMockMethodMacroDiagnoser(msg):
"""Diagnoses the WMM disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE +
r'.*this_method_does_not_take_(?P<wrong_args>\d+)_argument.*\n'
r'.*\n'
r'.*candidates are.*FunctionMocker<[^>]+A(?P<args>\d+)\)>')
clang_regex = (_CLANG_NON_GMOCK_FILE_LINE_RE +
r'error:.*array.*negative.*r?\n'
r'(.*\n)*?'
r'(?P=file):(?P=line):(?P=column): error: too few arguments '
r'to function call, expected (?P<args>\d+), '
r'have (?P<wrong_args>\d+)')
clang11_re = (_CLANG_NON_GMOCK_FILE_LINE_RE +
r'.*this_method_does_not_take_'
r'(?P<wrong_args>\d+)_argument.*')
diagnosis = """
You are using MOCK_METHOD%(wrong_args)s to define a mock method that has
%(args)s arguments. Use MOCK_METHOD%(args)s (or MOCK_CONST_METHOD%(args)s,
MOCK_METHOD%(args)s_T, MOCK_CONST_METHOD%(args)s_T as appropriate) instead."""
return _GenericDiagnoser('WMM', 'Wrong MOCK_METHODn Macro',
[(gcc_regex, diagnosis),
(clang11_re, diagnosis % {'wrong_args': 'm',
'args': 'n'}),
(clang_regex, diagnosis)],
msg)
def _WrongParenPositionDiagnoser(msg):
"""Diagnoses the WPP disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE +
r'error:.*testing::internal::MockSpec<.* has no member named \''
r'(?P<method>\w+)\'')
clang_regex = (_CLANG_NON_GMOCK_FILE_LINE_RE +
r'error: no member named \'(?P<method>\w+)\' in '
r'\'testing::internal::MockSpec<.*>\'')
diagnosis = """
The closing parenthesis of ON_CALL or EXPECT_CALL should be *before*
".%(method)s". For example, you should write:
EXPECT_CALL(my_mock, Foo(_)).%(method)s(...);
instead of:
EXPECT_CALL(my_mock, Foo(_).%(method)s(...));"""
return _GenericDiagnoser('WPP', 'Wrong Parenthesis Position',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
_DIAGNOSERS = [
_IncompleteByReferenceArgumentDiagnoser,
_MockObjectPointerDiagnoser,
_NeedToReturnNothingDiagnoser,
_NeedToReturnReferenceDiagnoser,
_NeedToReturnSomethingDiagnoser,
_NeedToUseReturnNullDiagnoser,
_NeedToUseSymbolDiagnoser,
_OverloadedFunctionActionDiagnoser,
_OverloadedFunctionMatcherDiagnoser,
_OverloadedMethodActionDiagnoser,
_TypeInTemplatedBaseDiagnoser,
_WrongMockMethodMacroDiagnoser,
_WrongParenPositionDiagnoser,
]
def Diagnose(msg):
"""Generates all possible diagnoses given the compiler error message."""
msg = re.sub(r'\x1b\[[^m]*m', '', msg) # Strips all color formatting.
# Assuming the string is using the UTF-8 encoding, replaces the left and
# the right single quote characters with apostrophes.
msg = re.sub(r'(\xe2\x80\x98|\xe2\x80\x99)', "'", msg)
diagnoses = []
for diagnoser in _DIAGNOSERS:
for diag in diagnoser(msg):
diagnosis = '[%s - %s]\n%s' % diag
if not diagnosis in diagnoses:
diagnoses.append(diagnosis)
return diagnoses
def main():
print ('Google Mock Doctor v%s - '
'diagnoses problems in code using Google Mock.' % _VERSION)
if sys.stdin.isatty():
print ('Please copy and paste the compiler errors here. Press c-D when '
'you are done:')
else:
print ('Waiting for compiler errors on stdin . . .')
msg = sys.stdin.read().strip()
diagnoses = Diagnose(msg)
count = len(diagnoses)
if not count:
print ("""
Your compiler complained:
8<------------------------------------------------------------
%s
------------------------------------------------------------>8
Uh-oh, I'm not smart enough to figure out what the problem is. :-(
However...
If you send your source code and the compiler's error messages to
%s, you can be helped and I can get smarter --
win-win for us!""" % (msg, _EMAIL))
else:
print ('------------------------------------------------------------')
print ('Your code appears to have the following',)
if count > 1:
print ('%s diseases:' % (count,))
else:
print ('disease:')
i = 0
for d in diagnoses:
i += 1
if count > 1:
print ('\n#%s:' % (i,))
print (d)
print ("""
How did I do? If you think I'm wrong or unhelpful, please send your
source code and the compiler's error messages to %s.
Then you can be helped and I can get smarter -- I promise I won't be upset!""" %
_EMAIL)
if __name__ == '__main__':
main()
|
felipebetancur/metagoofil | refs/heads/master | hachoir_core/field/character.py | 74 | """
Character field class: a 8-bit character
"""
from hachoir_core.field import Bits
from hachoir_core.endian import BIG_ENDIAN
from hachoir_core.tools import makePrintable
class Character(Bits):
"""
A 8-bit character using ASCII charset for display attribute.
"""
static_size = 8
def __init__(self, parent, name, description=None):
Bits.__init__(self, parent, name, 8, description=description)
def createValue(self):
return chr(self._parent.stream.readBits(
self.absolute_address, 8, BIG_ENDIAN))
def createRawDisplay(self):
return unicode(Bits.createValue(self))
def createDisplay(self):
return makePrintable(self.value, "ASCII", quote="'", to_unicode=True)
|
mhnatiuk/phd_sociology_of_religion | refs/heads/master | scrapper/build/Twisted/twisted/manhole/test/__init__.py | 52 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.manhole}.
"""
|
eleme/ruskit | refs/heads/master | ruskit/cmds/manage.py | 1 | # -*- coding: utf-8 -*-
import datetime
import redis
import pprint
from ruskit import cli
from ..cluster import Cluster, ClusterNode
from ..utils import echo
from ..distribute import print_cluster, gen_distribution
from ..utils import timeout_argument
from ..health import HealthCheckManager
@cli.command
@cli.argument("cluster")
@timeout_argument
def info(args):
cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster))
dis = []
for n in cluster.masters:
slaves = ','.join([s["addr"] for s in n.slaves(n.name)])
msg = "{} {}:{} {} {}".format(n.name, n.host, n.port, len(n.slots),
slaves)
dis.append(msg)
echo("\n".join(dis))
echo("Masters:", len(cluster.masters))
echo("Instances:", len(cluster.nodes))
echo("Slots:", sum(len(n.slots) for n in cluster.masters))
@cli.command
@cli.argument("cluster")
@timeout_argument
def slowlog(args):
cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster))
slow_logs = cluster.get_slow_logs()
for master, logs in slow_logs.iteritems():
echo("Node: ", "%s:%s" % (master.host, master.port))
for log in logs:
time = datetime.datetime.fromtimestamp(log['start_time'])
echo(
"\t",
time,
"%s%s" % (log['duration'], "μs"),
repr(log['command'])
)
@cli.command
@cli.argument("cluster")
@timeout_argument
def fix(args):
cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster))
cluster.fix_open_slots()
cluster.fill_slots()
@cli.command
@cli.argument("cluster")
@cli.argument("nodes", nargs='+')
@timeout_argument
def delete(args):
"""Delete nodes from the cluster
"""
nodes = [ClusterNode.from_uri(n) for n in args.nodes]
cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster))
echo("Deleting...")
for node in nodes:
cluster.delete_node(node)
cluster.wait()
@cli.command
@cli.argument("src")
@cli.argument("-d", "--dst")
@cli.argument("-s", "--slot", type=int)
@cli.argument("-c", "--count", type=int)
@cli.argument("-i", "--income", action="store_true")
@timeout_argument
@cli.pass_ctx
def migrate(ctx, args):
src = ClusterNode.from_uri(args.src)
cluster = Cluster.from_node(src)
if args.dst:
dst = ClusterNode.from_uri(args.dst)
if args.dst and args.slot is not None:
try:
cluster.migrate_slot(src, dst, args.slot, verbose=True)
except redis.ResponseError as e:
ctx.abort(str(e))
elif args.dst:
count = len(src.slots) if args.count is None else args.count
cluster.migrate(src, dst, count)
else:
cluster.migrate_node(src, args.count, income=args.income)
cluster.wait()
@cli.command
@cli.argument("cluster")
@timeout_argument
def reshard(args):
"""Balance slots in the cluster.
This command will try its best to distribute slots equally.
"""
cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster))
cluster.reshard()
@cli.command
@cli.argument("node")
@cli.argument("master")
@timeout_argument
@cli.pass_ctx
def replicate(ctx, args):
"""Make node to be the slave of a master.
"""
slave = ClusterNode.from_uri(args.node)
master = ClusterNode.from_uri(args.master)
if not master.is_master():
ctx.abort("Node {!r} is not a master.".format(args.master))
try:
slave.replicate(master.name)
except redis.ResponseError as e:
ctx.abort(str(e))
Cluster.from_node(master).wait()
@cli.command
@cli.argument("cluster")
@timeout_argument
def destroy(args):
cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster))
for node in cluster.masters:
node.flushall()
for node in cluster.nodes:
node.reset(hard=True)
@cli.command
@cli.argument("cluster")
@timeout_argument
def flushall(args):
"""Execute flushall in all cluster nodes.
"""
cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster))
for node in cluster.masters:
node.flushall()
@cli.command
@cli.argument("cluster")
@cli.argument("name")
@cli.argument("value")
@cli.argument("--config-command", default="config")
@cli.argument("--rewrite", action="store_true")
@timeout_argument
@cli.pass_ctx
def reconfigure(ctx, args):
cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster))
if not cluster:
ctx.abort("Cluster not exists")
for node in cluster.nodes:
echo("Setting `%s` of `%s` to `%s`" % (args.name, node, args.value))
node.execute_command(args.config_command + " SET",
args.name, args.value)
if args.rewrite:
node.execute_command(args.config_command + " REWRITE")
@cli.command
@cli.argument("cluster")
@cli.argument("command", nargs='+')
@timeout_argument
@cli.pass_ctx
def cmd(ctx, args):
cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster))
command = args.command
for n in cluster.nodes:
print n.execute_command(*command)
@cli.command
@cli.argument("cluster")
@timeout_argument
@cli.pass_ctx
def peek(ctx, args):
cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster))
if not cluster.consistent():
ctx.abort("Cluster not consistent.")
dist = gen_distribution(cluster.nodes, [])
print_cluster(dist)
@cli.command
@cli.argument("nodes", nargs='+')
@timeout_argument
@cli.pass_ctx
def check(ctx, args):
nodes = [ClusterNode.from_uri(n) for n in args.nodes]
report = HealthCheckManager(nodes).check()
if report is None:
print 'cluster is healthy'
return
for check_name, diff in report.iteritems():
print '#' * 30
print check_name
print '#' * 30
pprint.pprint(diff)
|
mozilla/verbatim | refs/heads/master | vendor/lib/python/translate/lang/hy.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2007-2008, 2011 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""This module represents the Armenian language.
.. seealso:: http://en.wikipedia.org/wiki/Armenian_language
"""
import re
from translate.lang import common
class hy(common.Common):
"""This class represents Armenian."""
armenianpunc = u"։՝՜՞"
punctuation = u"".join([common.Common.commonpunc, common.Common.quotes,
common.Common.miscpunc, armenianpunc])
sentenceend = u"։՝՜…"
sentencere = re.compile(ur"""
(?s) # make . also match newlines
.*? # anything, but match non-greedy
[%s] # the puntuation for sentence ending
\s+ # the spacing after the puntuation
(?=[^a-zա-ֆ\d]) # lookahead that next part starts with caps
""" % sentenceend, re.VERBOSE | re.UNICODE
)
puncdict = {
u".": u"։",
u":": u"՝",
u"!": u"՜",
u"?": u"՞",
}
ignoretests = ["startcaps", "simplecaps"]
|
douggeiger/gnuradio | refs/heads/master | gr-blocks/python/blocks/qa_udp_source_sink.py | 40 | #!/usr/bin/env python
#
# Copyright 2008,2010,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, blocks
import os
from threading import Timer
class test_udp_sink_source(gr_unittest.TestCase):
def setUp(self):
os.environ['GR_CONF_CONTROLPORT_ON'] = 'False'
self.tb_snd = gr.top_block()
self.tb_rcv = gr.top_block()
def tearDown(self):
self.tb_rcv = None
self.tb_snd = None
def test_001(self):
# Tests calling disconnect/reconnect.
port = 65510
n_data = 16
src_data = [x for x in range(n_data)]
expected_result = tuple(src_data)
src = blocks.vector_source_s(src_data, False)
udp_snd = blocks.udp_sink(gr.sizeof_short, 'localhost', port)
self.tb_snd.connect(src, udp_snd)
self.tb_snd.run()
udp_snd.disconnect()
udp_snd.connect('localhost', port+1)
src.rewind()
self.tb_snd.run()
def test_002(self):
port = 65520
n_data = 100
src_data = [float(x) for x in range(n_data)]
expected_result = tuple(src_data)
src = blocks.vector_source_f(src_data, False)
udp_snd = blocks.udp_sink(gr.sizeof_float, 'localhost', port)
self.tb_snd.connect(src, udp_snd)
udp_rcv = blocks.udp_source(gr.sizeof_float, 'localhost', port)
dst = blocks.vector_sink_f()
self.tb_rcv.connect(udp_rcv, dst)
self.tb_rcv.start()
self.tb_snd.run()
udp_snd.disconnect()
self.timeout = False
q = Timer(2.0,self.stop_rcv)
q.start()
self.tb_rcv.wait()
q.cancel()
result_data = dst.data()
self.assertEqual(expected_result, result_data)
self.assert_(not self.timeout)
def test_003(self):
port = 65530
udp_rcv = blocks.udp_source(gr.sizeof_float, '0.0.0.0', 0, eof=False)
rcv_port = udp_rcv.get_port()
udp_snd = blocks.udp_sink(gr.sizeof_float, '127.0.0.1', port)
udp_snd.connect('127.0.0.1', rcv_port)
n_data = 16
src_data = [float(x) for x in range(n_data)]
expected_result = tuple(src_data)
src = blocks.vector_source_f(src_data)
dst = blocks.vector_sink_f()
self.tb_snd.connect(src, udp_snd)
self.tb_rcv.connect(udp_rcv, dst)
self.tb_rcv.start()
self.tb_snd.run()
udp_snd.disconnect()
self.timeout = False
q = Timer(2.0,self.stop_rcv)
q.start()
self.tb_rcv.wait()
q.cancel()
result_data = dst.data()
self.assertEqual(expected_result, result_data)
self.assert_(self.timeout) # source ignores EOF?
def stop_rcv(self):
self.timeout = True
self.tb_rcv.stop()
#print "tb_rcv stopped by Timer"
if __name__ == '__main__':
gr_unittest.run(test_udp_sink_source, "test_udp_sink_source.xml")
|
40223249-1/2015cd_midterm | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/http/__init__.py | 1383 | # This directory is a Python package.
|
crepererum/invenio | refs/heads/master | invenio/modules/indexer/tokenizers/BibIndexDOITokenizer.py | 12 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.modules.indexer.tokenizers.BibIndexFilteringTokenizer import BibIndexFilteringTokenizer
class BibIndexDOITokenizer(BibIndexFilteringTokenizer):
"""
Filtering tokenizer which tokenizes DOI tag (0247_a)
only if "0247_2" tag is present and its value equals "DOI"
and 909C4a tag without any constraints.
"""
def __init__(self, stemming_language=None, remove_stopwords=False, remove_html_markup=False, remove_latex_markup=False):
self.rules = (('0247_a', '2', 'DOI'), ('909C4a', '', ''))
def get_tokenizing_function(self, wordtable_type):
"""Returns proper tokenizing function"""
return self.tokenize
def tokenize_via_recjson(self, recID):
"""
Nonmarc version of tokenize function for DOI.
Note: with nonmarc we don't need to filter anymore.
We just need to take value from record because we
use bibfield here.
"""
rec = get_record(recID)
values = rec.get('doi', [])
return values
def get_nonmarc_tokenizing_function(self, table_type):
"""
Returns proper tokenizing function for non-marc records.
"""
return self.tokenize_via_recjson
|
amunozf/legibilidad | refs/heads/master | legibilidad.py | 1 | # Legibilidad 2 (beta)
# Averigua la legibilidad de un texto
# Spanish readability calculations
# © 2016 Alejandro Muñoz Fernández
#This program is free software: you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import statistics
def count_letters(text):
'''
Text letter count
'''
count = 0
for char in text:
if char.isalpha():
count += 1
if count == 0:
return 1
else:
return count
def letter_dict(text):
'''
letter count dictionary
'''
text = text.lower()
replacements = {'á': 'a','é': 'e','í': 'i','ó': 'o','ú': 'u','ü': 'u'}
for i, j in replacements.items():
text = text.replace(i, j)
letterlist = list(filter(None,map(lambda c: c if c.isalpha() else '', text)))
letterdict = dict()
for letter in letterlist:
letterdict[letter] = letterdict.get(letter,0) + 1
return letterdict
def count_words(text):
'''
Text word count
'''
text = ''.join(filter(lambda x: not x.isdigit(), text))
clean = re.compile('\W+')
text = clean.sub(' ', text).strip()
# Prevents zero division
if len(text.split()) == 0:
return 1
else:
return len(text.split())
def textdict(wordlist):
'''
Dictionary of word counts
'''
wordlist = ''.join(filter(lambda x: not x.isdigit(), wordlist))
clean = re.compile('\W+')
wordlist = clean.sub(' ', wordlist).strip()
wordlist = wordlist.split()
# Word count dictionary
worddict = dict()
for word in wordlist:
worddict[word.lower()] = worddict.get(word,0) + 1
return worddict
def count_sentences(text):
'''
Sentence count
'''
text = text.replace("\n","")
sentence_end = re.compile('[.:;!?\)\()]')
sencences=sentence_end.split(text)
sencences = list(filter(None, sencences))
if len(sencences) == 0:
return 1
else:
return len(sencences)
def count_paragraphs(text):
'''
Paragraph count
'''
text = re.sub('<[^>]*>', '', text)
text = list(filter(None, text.split('\n')))
if len(text) == 0:
return 1
else:
return len(text)
def numbers2words(text):
'''
Comverts figures into words (e.g. 2 to two)
'''
import nal
new_text = []
for word in text.split():
formato_numerico = re.compile("^[\-]?[1-9][0-9]*\.?[0-9]+$")
if re.match(formato_numerico,word):
if type(word) == "int":
word = int(word)
else:
word = float(word)
word = nal.to_word(word)
new_text.append(word.lower())
text = ' '.join(new_text)
return text
def count_syllables(word):
'''
Word syllable count
'''
import separasilabas
word = re.sub(r'\W+', '', word)
syllables = separasilabas.silabizer()
return len(syllables(word))
def count_all_syllables(text):
'''
The whole text syllable count
'''
text = ''.join(filter(lambda x: not x.isdigit(), text))
clean = re.compile('\W+')
text = clean.sub(' ', text).strip()
text = text.split()
text = filter(None, text)
total = 0
for word in text:
total += count_syllables(word)
if total == 0:
return 1
else:
return total
def Pval(text):
'''
Syllables-per-word mean (P value)
'''
syllables = count_all_syllables(numbers2words(text))
words = count_words(numbers2words(text))
return round(syllables / words,2)
def Fval(text):
'''
Words-per-sentence mean (F value)
'''
sencences = count_sentences(text)
words = count_words(numbers2words(text))
return round(words / sencences,2)
def fernandez_huerta(text):
'''
Fernández Huerta readability score
'''
fernandez_huerta = 206.84 - 60*Pval(text) - 1.02*Fval(text)
return round(fernandez_huerta,2)
def szigriszt_pazos(text):
'''
Szigriszt Pazos readability score (1992)
'''
return round(206.835 - 62.3 * ( count_all_syllables(numbers2words(text)) / count_words(numbers2words(text))) - (count_words(numbers2words(text)) / count_sentences(text)),2)
def gutierrez(text):
'''
Gutiérrez de Polini's readability score (1972)
'''
legibguti = 95.2 - 9.7 * (count_letters(text) / count_words(text)) - 0.35 * (count_words(text) / count_sentences(text))
return round(legibguti, 2)
def mu(text):
'''
Muñoz Baquedano and Muñoz Urra's readability score (2006)
'''
n = count_words(text)
# Delete all digits
text = ''.join(filter(lambda x: not x.isdigit(), text))
# Cleans it all
clean = re.compile('\W+')
text = clean.sub(' ', text).strip()
text = text.split() # word list
word_lengths = []
for word in text:
word_lengths.append(len(word))
# The mean calculation needs at least 1 value on the list, and the variance, two. If somebody enters only one word or, what is worse, a figure, the calculation breaks, so this is a 'fix'
try:
mean = statistics.mean(word_lengths)
variance = statistics.variance(word_lengths)
mu = (n / (n - 1)) * (mean / variance) * 100
return round(mu, 2)
except:
return 0
def crawford(text):
'''
Crawford's readability formula
'''
sentences = count_sentences(text)
words = count_words(numbers2words(text))
syllables = count_all_syllables(numbers2words(text))
SeW = 100 * sentences / words # number of sentences per 100 words (mean)
SiW = 100 * syllables / words # number of syllables in 100 words (mean)
years = -0.205 * SeW + 0.049 * SiW - 3.407
years = round(years,1)
return years
def interpretaP(P):
'''
Szigriszt-Pazos score interpretation
'''
if P <= 15:
return "muy difícil"
elif P > 15 and P <= 35:
return "árido"
elif P > 35 and P <= 50:
return "bastante difícil"
elif P > 50 and P <= 65:
return "normal"
elif P > 65 and P <= 75:
return "bastante fácil"
elif P > 75 and P <= 85:
return "fácil"
else:
return "muy fácil"
# Interpreta la fernandez_huerta
def interpretaL(L):
if L < 30:
return "muy difícil"
elif L >= 30 and L < 50:
return "difícil"
elif L >= 50 and L < 60:
return "bastante difícil"
elif L >= 60 and L < 70:
return "normal"
elif L >= 70 and L < 80:
return "bastante fácil"
elif L >= 80 and L < 90:
return "fácil"
else:
return "muy fácil"
# Interpretación Inflesz
def inflesz(P):
if P <= 40:
return "muy difícil"
elif P > 40 and P <= 55:
return "algo difícil"
elif P > 55 and P <= 65:
return "normal"
elif P > 65 and P <= 80:
return "bastante fácil"
else:
return "muy fácil"
def gutierrez_interpret(G):
if G <= 33.33:
return "difícil"
if G > 33.33 and G < 66.66:
return "normal"
else:
return "fácil"
def mu_interpret(M):
if M < 31:
return "muy difícil"
elif M >= 31 and M <= 51:
return "difícil"
elif M >= 51 and M < 61:
return "un poco difícil"
elif M >= 61 and M < 71:
return "adecuado"
elif M >= 71 and M < 81:
return "un poco fácil"
elif M >= 81 and M < 91:
return "fácil"
else:
return "muy fácil"
# See ejemplo.py to see how it works!
|
graik/biskit | refs/heads/master | biskit/core/scientificIO/__init__.py | 1 | """
This sub-package has been ripped out of the Scientific Python Package written
by Konrad Hinsen. Its sole purpose is low-level PDB parsing and writing. This
allows us to remove all dependencies on numpy-oldnumeric.
"""
|
sanguinariojoe/FreeCAD | refs/heads/master | src/Mod/Fem/femsolver/writerbase.py | 12 | # ***************************************************************************
# * Copyright (c) 2016 Bernd Hahnebach <bernd@bimstatik.org> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "FreeCAD FEM solver writer base object"
__author__ = "Bernd Hahnebach"
__url__ = "https://www.freecadweb.org"
## \addtogroup FEM
# @{
import os
import FreeCAD
from femmesh import meshtools
from femtools.femutils import type_of_obj
class FemInputWriter():
def __init__(
self,
analysis_obj,
solver_obj,
mesh_obj,
member,
dir_name=None
):
# class attributes from parameter values
self.analysis = analysis_obj
self.solver_obj = solver_obj
self.analysis_type = self.solver_obj.AnalysisType
self.mesh_object = mesh_obj
self.document = self.analysis.Document
# materials
self.material_objects = member.mats_linear
self.material_nonlinear_objects = member.mats_nonlinear
# geometries
self.beamsection_objects = member.geos_beamsection
self.beamrotation_objects = member.geos_beamrotation
self.fluidsection_objects = member.geos_fluidsection
self.shellthickness_objects = member.geos_shellthickness
# constraints
self.contact_objects = member.cons_contact
self.displacement_objects = member.cons_displacement
self.fixed_objects = member.cons_fixed
self.force_objects = member.cons_force
self.heatflux_objects = member.cons_heatflux
self.initialtemperature_objects = member.cons_initialtemperature
self.planerotation_objects = member.cons_planerotation
self.pressure_objects = member.cons_pressure
self.sectionprint_objects = member.cons_sectionprint
self.selfweight_objects = member.cons_selfweight
self.temperature_objects = member.cons_temperature
self.tie_objects = member.cons_tie
self.transform_objects = member.cons_transform
# working dir
self.dir_name = dir_name
# if dir_name was not given or if it exists but isn't empty: create a temporary dir
# Purpose: makes sure the analysis can be run even on wired situation
if not dir_name:
FreeCAD.Console.PrintWarning(
"Error: FemInputWriter has no working_dir --> "
"we are going to make a temporary one!\n"
)
self.dir_name = self.document.TransientDir.replace(
"\\", "/"
) + "/FemAnl_" + analysis_obj.Uid[-4:]
if not os.path.isdir(self.dir_name):
os.mkdir(self.dir_name)
# new class attributes
self.fc_ver = FreeCAD.Version()
self.ccx_nall = "Nall"
self.ccx_eall = "Eall"
self.ccx_evolumes = "Evolumes"
self.ccx_efaces = "Efaces"
self.ccx_eedges = "Eedges"
self.ccx_elsets = []
if self.mesh_object:
if hasattr(self.mesh_object, "Shape"):
self.theshape = self.mesh_object.Shape
elif hasattr(self.mesh_object, "Part"):
self.theshape = self.mesh_object.Part
else:
FreeCAD.Console.PrintWarning(
"A finite mesh without a link to a Shape was given. "
"Happen on pure mesh objects. "
"Not all methods do work without this link.\n"
)
# ATM only used in meshtools.get_femelement_direction1D_set
# TODO somehow this is not smart, rare meshes might be used often
self.femmesh = self.mesh_object.FemMesh
else:
FreeCAD.Console.PrintWarning(
"No finite element mesh object was given to the writer class. "
"In rare cases this might not be an error. "
)
self.femnodes_mesh = {}
self.femelement_table = {}
self.constraint_conflict_nodes = []
self.femnodes_ele_table = {}
self.femelements_edges_only = []
self.femelements_faces_only = []
self.femelement_volumes_table = {}
self.femelement_faces_table = {}
self.femelement_edges_table = {}
self.femelement_count_test = True
# use set for node sets to be sure all nodes are unique
# use sorted to be sure the order is the same on different runs
# be aware a sorted set returns a list, because set are not sorted by default
# - done in return value of meshtools.get_femnodes_by_femobj_with_references
# might be appropriate for element sets too
def get_constraints_fixed_nodes(self):
# get nodes
for femobj in self.fixed_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
print_obj_info(femobj["Object"])
femobj["Nodes"] = meshtools.get_femnodes_by_femobj_with_references(
self.femmesh,
femobj
)
# add nodes to constraint_conflict_nodes, needed by constraint plane rotation
for node in femobj["Nodes"]:
self.constraint_conflict_nodes.append(node)
# if mixed mesh with solids the node set needs to be split
# because solid nodes do not have rotational degree of freedom
if self.femmesh.Volumes \
and (len(self.shellthickness_objects) > 0 or len(self.beamsection_objects) > 0):
FreeCAD.Console.PrintMessage("We need to find the solid nodes.\n")
if not self.femelement_volumes_table:
self.femelement_volumes_table = meshtools.get_femelement_volumes_table(
self.femmesh
)
for femobj in self.fixed_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
nds_solid = []
nds_faceedge = []
for n in femobj["Nodes"]:
solid_node = False
for ve in self.femelement_volumes_table:
if n in self.femelement_volumes_table[ve]:
solid_node = True
nds_solid.append(n)
break
if not solid_node:
nds_faceedge.append(n)
femobj["NodesSolid"] = set(nds_solid)
femobj["NodesFaceEdge"] = set(nds_faceedge)
def get_constraints_displacement_nodes(self):
# get nodes
for femobj in self.displacement_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
print_obj_info(femobj["Object"])
femobj["Nodes"] = meshtools.get_femnodes_by_femobj_with_references(
self.femmesh,
femobj
)
# add nodes to constraint_conflict_nodes, needed by constraint plane rotation
for node in femobj["Nodes"]:
self.constraint_conflict_nodes.append(node)
def get_constraints_planerotation_nodes(self):
# get nodes
for femobj in self.planerotation_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
print_obj_info(femobj["Object"])
femobj["Nodes"] = meshtools.get_femnodes_by_femobj_with_references(
self.femmesh,
femobj
)
def get_constraints_transform_nodes(self):
# get nodes
for femobj in self.transform_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
print_obj_info(femobj["Object"])
femobj["Nodes"] = meshtools.get_femnodes_by_femobj_with_references(
self.femmesh,
femobj
)
def get_constraints_temperature_nodes(self):
# get nodes
for femobj in self.temperature_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
print_obj_info(femobj["Object"])
femobj["Nodes"] = meshtools.get_femnodes_by_femobj_with_references(
self.femmesh,
femobj
)
def get_constraints_fluidsection_nodes(self):
# get nodes
for femobj in self.fluidsection_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
print_obj_info(femobj["Object"])
femobj["Nodes"] = meshtools.get_femnodes_by_femobj_with_references(
self.femmesh,
femobj
)
def get_constraints_force_nodeloads(self):
# check shape type of reference shape
for femobj in self.force_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
print_obj_info(femobj["Object"], log=True)
if femobj["RefShapeType"] == "Vertex":
FreeCAD.Console.PrintLog(
" load on vertices --> The femelement_table "
"and femnodes_mesh are not needed for node load calculation.\n"
)
elif femobj["RefShapeType"] == "Face" \
and meshtools.is_solid_femmesh(self.femmesh) \
and not meshtools.has_no_face_data(self.femmesh):
FreeCAD.Console.PrintLog(
" solid_mesh with face data --> The femelement_table is not "
"needed but the femnodes_mesh is needed for node load calculation.\n"
)
if not self.femnodes_mesh:
self.femnodes_mesh = self.femmesh.Nodes
else:
FreeCAD.Console.PrintLog(
" mesh without needed data --> The femelement_table "
"and femnodes_mesh are not needed for node load calculation.\n"
)
if not self.femnodes_mesh:
self.femnodes_mesh = self.femmesh.Nodes
if not self.femelement_table:
self.femelement_table = meshtools.get_femelement_table(
self.femmesh
)
# get node loads
FreeCAD.Console.PrintLog(
" Finite element mesh nodes will be retrieved by searching "
"the appropriate nodes in the finite element mesh.\n"
)
FreeCAD.Console.PrintLog(
" The appropriate finite element mesh node load values will "
"be calculated according to the finite element definition.\n"
)
for femobj in self.force_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
frc_obj = femobj["Object"]
print_obj_info(frc_obj)
if frc_obj.Force == 0:
FreeCAD.Console.PrintMessage(" Warning --> Force = 0\n")
if femobj["RefShapeType"] == "Vertex": # point load on vertices
femobj["NodeLoadTable"] = meshtools.get_force_obj_vertex_nodeload_table(
self.femmesh,
frc_obj
)
elif femobj["RefShapeType"] == "Edge": # line load on edges
femobj["NodeLoadTable"] = meshtools.get_force_obj_edge_nodeload_table(
self.femmesh,
self.femelement_table,
self.femnodes_mesh, frc_obj
)
elif femobj["RefShapeType"] == "Face": # area load on faces
femobj["NodeLoadTable"] = meshtools.get_force_obj_face_nodeload_table(
self.femmesh,
self.femelement_table,
self.femnodes_mesh, frc_obj
)
def get_constraints_pressure_faces(self):
# TODO see comments in get_constraints_force_nodeloads()
# it applies here too. Mhh it applies to all constraints ...
"""
# deprecated version
# get the faces and face numbers
for femobj in self.pressure_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
femobj["PressureFaces"] = meshtools.get_pressure_obj_faces_depreciated(
self.femmesh,
femobj
)
# print(femobj["PressureFaces"])
"""
if not self.femnodes_mesh:
self.femnodes_mesh = self.femmesh.Nodes
if not self.femelement_table:
self.femelement_table = meshtools.get_femelement_table(self.femmesh)
if not self.femnodes_ele_table:
self.femnodes_ele_table = meshtools.get_femnodes_ele_table(
self.femnodes_mesh,
self.femelement_table
)
for femobj in self.pressure_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
print_obj_info(femobj["Object"])
pressure_faces = meshtools.get_pressure_obj_faces(
self.femmesh,
self.femelement_table,
self.femnodes_ele_table, femobj
)
# the data model is for compatibility reason with deprecated version
# get_pressure_obj_faces_depreciated returns the face ids in a tuple per ref_shape
# some_string was the reference_shape_element_string in deprecated method
# [(some_string, [ele_id, ele_face_id], [ele_id, ele_face_id], ...])]
some_string = "{}: face load".format(femobj["Object"].Name)
femobj["PressureFaces"] = [(some_string, pressure_faces)]
FreeCAD.Console.PrintLog("{}\n".format(femobj["PressureFaces"]))
def get_constraints_contact_faces(self):
if not self.femnodes_mesh:
self.femnodes_mesh = self.femmesh.Nodes
if not self.femelement_table:
self.femelement_table = meshtools.get_femelement_table(self.femmesh)
if not self.femnodes_ele_table:
self.femnodes_ele_table = meshtools.get_femnodes_ele_table(
self.femnodes_mesh,
self.femelement_table
)
for femobj in self.contact_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
print_obj_info(femobj["Object"])
contact_slave_faces, contact_master_faces = meshtools.get_contact_obj_faces(
self.femmesh,
self.femelement_table,
self.femnodes_ele_table, femobj
)
# [ele_id, ele_face_id], [ele_id, ele_face_id], ...]
# whereas the ele_face_id might be ccx specific
femobj["ContactSlaveFaces"] = contact_slave_faces
femobj["ContactMasterFaces"] = contact_master_faces
# FreeCAD.Console.PrintLog("{}\n".format(femobj["ContactSlaveFaces"]))
# FreeCAD.Console.PrintLog("{}\n".format(femobj["ContactMasterFaces"]))
# information in the regard of element faces constraints
# forum post: https://forum.freecadweb.org/viewtopic.php?f=18&t=42783&p=370286#p366723
# contact: master and slave could be the same face: rubber of a damper
# tie: master and slave have to be separate faces AFA UR_ K
# section print: only the element faces of solid elements
# from one side of the geometric face are needed
def get_constraints_tie_faces(self):
if not self.femnodes_mesh:
self.femnodes_mesh = self.femmesh.Nodes
if not self.femelement_table:
self.femelement_table = meshtools.get_femelement_table(self.femmesh)
if not self.femnodes_ele_table:
self.femnodes_ele_table = meshtools.get_femnodes_ele_table(
self.femnodes_mesh,
self.femelement_table
)
for femobj in self.tie_objects:
# femobj --> dict, FreeCAD document object is femobj["Object"]
print_obj_info(femobj["Object"])
slave_faces, master_faces = meshtools.get_tie_obj_faces(
self.femmesh,
self.femelement_table,
self.femnodes_ele_table, femobj
)
# [ele_id, ele_face_id], [ele_id, ele_face_id], ...]
# whereas the ele_face_id might be ccx specific
femobj["TieSlaveFaces"] = slave_faces
femobj["TieMasterFaces"] = master_faces
# FreeCAD.Console.PrintLog("{}\n".format(femobj["ContactSlaveFaces"]))
# FreeCAD.Console.PrintLog("{}\n".format(femobj["ContactMasterFaces"]))
def get_element_geometry2D_elements(self):
# get element ids and write them into the objects
FreeCAD.Console.PrintMessage("Shell thicknesses\n")
if not self.femelement_faces_table:
self.femelement_faces_table = meshtools.get_femelement_faces_table(
self.femmesh
)
meshtools.get_femelement_sets(
self.femmesh,
self.femelement_faces_table,
self.shellthickness_objects
)
def get_element_geometry1D_elements(self):
# get element ids and write them into the objects
FreeCAD.Console.PrintMessage("Beam sections\n")
if not self.femelement_edges_table:
self.femelement_edges_table = meshtools.get_femelement_edges_table(
self.femmesh
)
meshtools.get_femelement_sets(
self.femmesh,
self.femelement_edges_table,
self.beamsection_objects
)
def get_element_rotation1D_elements(self):
# get for each geometry edge direction the element ids and rotation norma
FreeCAD.Console.PrintMessage("Beam rotations\n")
if not self.femelement_edges_table:
self.femelement_edges_table = meshtools.get_femelement_edges_table(
self.femmesh
)
meshtools.get_femelement_direction1D_set(
self.femmesh,
self.femelement_edges_table,
self.beamrotation_objects,
self.theshape
)
def get_element_fluid1D_elements(self):
# get element ids and write them into the objects
FreeCAD.Console.PrintMessage("Fluid sections\n")
if not self.femelement_edges_table:
self.femelement_edges_table = meshtools.get_femelement_edges_table(
self.femmesh
)
meshtools.get_femelement_sets(
self.femmesh,
self.femelement_edges_table,
self.fluidsection_objects
)
def get_material_elements(self):
# it only works if either Volumes or Shellthicknesses or Beamsections
# are in the material objects, it means it does not work
# for mixed meshes and multiple materials, this is checked in check_prerequisites
# the femelement_table is only calculated for
# the highest dimension in get_femelement_table
FreeCAD.Console.PrintMessage("Materials\n")
if self.femmesh.Volumes:
# we only could do this for volumes, if a mesh contains volumes
# we're going to use them in the analysis
# but a mesh could contain the element faces of the volumes as faces
# and the edges of the faces as edges
# there we have to check of some geometric objects
all_found = False
if self.femmesh.GroupCount:
all_found = meshtools.get_femelement_sets_from_group_data(
self.femmesh,
self.material_objects
)
FreeCAD.Console.PrintMessage(all_found)
FreeCAD.Console.PrintMessage("\n")
if all_found is False:
if not self.femelement_table:
self.femelement_table = meshtools.get_femelement_table(self.femmesh)
# we're going to use the binary search for get_femelements_by_femnodes()
# thus we need the parameter values self.femnodes_ele_table
if not self.femnodes_mesh:
self.femnodes_mesh = self.femmesh.Nodes
if not self.femnodes_ele_table:
self.femnodes_ele_table = meshtools.get_femnodes_ele_table(
self.femnodes_mesh,
self.femelement_table
)
control = meshtools.get_femelement_sets(
self.femmesh,
self.femelement_table,
self.material_objects,
self.femnodes_ele_table
)
# we only need to set it, if it is still True
if (self.femelement_count_test is True) and (control is False):
self.femelement_count_test = False
if self.shellthickness_objects:
if not self.femelement_faces_table:
self.femelement_faces_table = meshtools.get_femelement_faces_table(
self.femmesh
)
meshtools.get_femelement_sets(
self.femmesh,
self.femelement_faces_table,
self.material_objects
)
if self.beamsection_objects or self.fluidsection_objects:
if not self.femelement_edges_table:
self.femelement_edges_table = meshtools.get_femelement_edges_table(
self.femmesh
)
meshtools.get_femelement_sets(
self.femmesh,
self.femelement_edges_table,
self.material_objects
)
# helper
def print_obj_info(obj, log=False):
if log is False:
FreeCAD.Console.PrintMessage("{}:\n".format(obj.Label))
FreeCAD.Console.PrintMessage(
" Type: {}, Name: {}\n".format(type_of_obj(obj), obj.Name)
)
else:
FreeCAD.Console.PrintLog("{}:\n".format(obj.Label))
FreeCAD.Console.PrintLog(
" Type: {}, Name: {}\n".format(type_of_obj(obj), obj.Name)
)
## @}
|
mrquim/repository.mrquim | refs/heads/master | repo/plugin.video.mrpiracy/resources/lib/js2py/legecy_translators/tokenize.py | 101 | from jsparser import *
from utils import *
# maybe I will try rewriting my parser in the future... Tokenizer makes things much easier and faster, unfortunately I
# did not know anything about parsers when I was starting this project so I invented my own.
|
jwren/intellij-community | refs/heads/master | python/testData/inspections/PyAbstractClassInspection/quickFix/SetImportedABCMetaAsMetaclassPy3/main.py | 17 | from abc import ABCMeta, abstractmethod
class A1(metaclass=ABCMeta):
@abstractmethod
def m1(self):
pass
class A<caret>2(A1):
pass |
microcosm-cc/microweb | refs/heads/master | fabfile.py | 1 | import os
from fabric.api import env
from fabric.api import sudo
from fabric.api import prefix
from fabric.contrib.project import rsync_project
from fabric.context_managers import settings
from contextlib import contextmanager
env.hosts = []
env.serve_root = '/srv/www/django'
env.project_name = 'microweb'
env.virtualenv_name = 'microwebenv'
env.project_root = os.path.join(env.serve_root, env.project_name)
env.virtualenv_root = os.path.join(env.serve_root, env.virtualenv_name)
env.requirements_path = os.path.join(env.project_root, 'requirements.txt')
env.activate = 'source %s' % os.path.join(env.virtualenv_root, 'bin/activate')
@contextmanager
def activate_virtualenv():
with prefix(env.activate):
yield
def dev_env():
env.hosts.append('wpy01.dev.microcosm.cc')
def dev2_env():
env.hosts.append('wpyrapha.dev.microcosm.cc')
def prod_env():
env.hosts.append('wpy01.microcosm.cc:2020')
def test_env():
env.hosts.append('deployment@dev.microco.sm')
def destroy_virtualenv():
sudo('rm -rf %s' % env.virtualenv_root, user='django')
def create_virtualenv():
sudo('virtualenv %s' % env.virtualenv_root, user='django')
def install_requirements():
with activate_virtualenv():
sudo('pip install -r %s' % env.requirements_path, user='django')
def collectstatic():
with activate_virtualenv():
sudo('python %s collectstatic --noinput' % os.path.join(env.project_root, 'manage.py'), user='django')
def rsync():
rsync_project(
env.serve_root,
extra_opts='--exclude .git/ --exclude ENV/ --delete --rsync-path="sudo -u django rsync"'
)
def start_service():
sudo('service microweb start', user='root')
def stop_service():
sudo('service microweb stop', user='root')
def restart_service():
sudo('service microweb restart', user='root')
def restart_memcached():
sudo('service memcached restart', user='root')
def first_deploy():
create_virtualenv()
rsync()
install_requirements()
collectstatic()
start_service()
def redeploy():
rsync()
install_requirements()
collectstatic()
restart_service()
|
DavidAndreev/indico | refs/heads/devel | indico/modules/attachments/api/hooks.py | 2 | # This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from indico.modules.events import Event
from indico.modules.attachments.api.util import build_folders_api_data
from indico.modules.events.contributions.models.contributions import Contribution
from indico.modules.events.contributions.models.subcontributions import SubContribution
from indico.modules.events.sessions import Session
from indico.web.http_api import HTTPAPIHook
from indico.web.http_api.responses import HTTPAPIError
@HTTPAPIHook.register
class AttachmentsExportHook(HTTPAPIHook):
TYPES = ('attachments',)
RE = (r'(?P<event_id>\d+)'
r'((/session/(?P<session_id>\d+)|(/contribution/(?P<contribution_id>\d+)(/(?P<subcontribution_id>\d+))?))?)?')
MAX_RECORDS = {}
GUEST_ALLOWED = True
VALID_FORMATS = ('json', 'jsonp', 'xml')
def _getParams(self):
super(AttachmentsExportHook, self)._getParams()
event = self._obj = Event.get(self._pathParams['event_id'], is_deleted=False)
if event is None:
raise HTTPAPIError('No such event', 404)
session_id = self._pathParams.get('session_id')
if session_id:
self._obj = Session.query.with_parent(event).filter_by(id=session_id).first()
if self._obj is None:
raise HTTPAPIError("No such session", 404)
contribution_id = self._pathParams.get('contribution_id')
if contribution_id:
contribution = self._obj = Contribution.query.with_parent(event).filter_by(id=contribution_id).first()
if contribution is None:
raise HTTPAPIError("No such contribution", 404)
subcontribution_id = self._pathParams.get('subcontribution_id')
if subcontribution_id:
self._obj = SubContribution.query.with_parent(contribution).filter_by(id=subcontribution_id).first()
if self._obj is None:
raise HTTPAPIError("No such subcontribution", 404)
def _hasAccess(self, aw):
user = aw.getUser().user if aw.getUser() else None
return self._obj.can_access(user)
def export_attachments(self, aw):
return {'folders': build_folders_api_data(self._obj)}
|
torquemad/pixelated-user-agent | refs/heads/master | service/pixelated/bitmask_libraries/certs.py | 8 | #
# Copyright (c) 2014 ThoughtWorks, Inc.
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
import os
class LeapCertificate(object):
LEAP_CERT = None
LEAP_FINGERPRINT = None
def __init__(self, provider):
self._config = provider.config
self._server_name = provider.server_name
self._provider = provider
@staticmethod
def set_cert_and_fingerprint(cert_file=None, cert_fingerprint=None):
if cert_fingerprint is None:
LeapCertificate.LEAP_CERT = str(cert_file) if cert_file else True
LeapCertificate.LEAP_FINGERPRINT = None
else:
LeapCertificate.LEAP_FINGERPRINT = cert_fingerprint
LeapCertificate.LEAP_CERT = False
@property
def provider_web_cert(self):
return self.LEAP_CERT
@property
def provider_api_cert(self):
return str(os.path.join(self._provider.config.leap_home, 'providers', self._server_name, 'keys', 'client', 'api.pem'))
def setup_ca_bundle(self):
path = os.path.join(self._provider.config.leap_home, 'providers', self._server_name, 'keys', 'client')
if not os.path.isdir(path):
os.makedirs(path, 0700)
self._download_cert(self.provider_api_cert)
def _download_cert(self, cert_file_name):
cert = self._provider.fetch_valid_certificate()
with open(cert_file_name, 'w') as file:
file.write(cert)
|
jeremiahyan/odoo | refs/heads/master | addons/hr_expense/models/hr_department.py | 66 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class HrDepartment(models.Model):
_inherit = 'hr.department'
def _compute_expense_sheets_to_approve(self):
expense_sheet_data = self.env['hr.expense.sheet'].read_group([('department_id', 'in', self.ids), ('state', '=', 'submit')], ['department_id'], ['department_id'])
result = dict((data['department_id'][0], data['department_id_count']) for data in expense_sheet_data)
for department in self:
department.expense_sheets_to_approve_count = result.get(department.id, 0)
expense_sheets_to_approve_count = fields.Integer(compute='_compute_expense_sheets_to_approve', string='Expenses Reports to Approve')
|
docker-infra/ansible-modules-core | refs/heads/devel | system/sysctl.py | 105 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, David "DaviXX" CHANIAL <david.chanial@gmail.com>
# (c) 2014, James Tanner <tanner.jc@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: sysctl
short_description: Manage entries in sysctl.conf.
description:
- This module manipulates sysctl entries and optionally performs a C(/sbin/sysctl -p) after changing them.
version_added: "1.0"
options:
name:
description:
- The dot-separated path (aka I(key)) specifying the sysctl variable.
required: true
default: null
aliases: [ 'key' ]
value:
description:
- Desired value of the sysctl key.
required: false
default: null
aliases: [ 'val' ]
state:
description:
- Whether the entry should be present or absent in the sysctl file.
choices: [ "present", "absent" ]
default: present
ignoreerrors:
description:
- Use this option to ignore errors about unknown keys.
choices: [ "yes", "no" ]
default: no
reload:
description:
- If C(yes), performs a I(/sbin/sysctl -p) if the C(sysctl_file) is
updated. If C(no), does not reload I(sysctl) even if the
C(sysctl_file) is updated.
choices: [ "yes", "no" ]
default: "yes"
sysctl_file:
description:
- Specifies the absolute path to C(sysctl.conf), if not C(/etc/sysctl.conf).
required: false
default: /etc/sysctl.conf
sysctl_set:
description:
- Verify token value with the sysctl command and set with -w if necessary
choices: [ "yes", "no" ]
required: false
version_added: 1.5
default: False
notes: []
requirements: []
author: "David CHANIAL (@davixx) <david.chanial@gmail.com>"
'''
EXAMPLES = '''
# Set vm.swappiness to 5 in /etc/sysctl.conf
- sysctl: name=vm.swappiness value=5 state=present
# Remove kernel.panic entry from /etc/sysctl.conf
- sysctl: name=kernel.panic state=absent sysctl_file=/etc/sysctl.conf
# Set kernel.panic to 3 in /tmp/test_sysctl.conf
- sysctl: name=kernel.panic value=3 sysctl_file=/tmp/test_sysctl.conf reload=no
# Set ip forwarding on in /proc and do not reload the sysctl file
- sysctl: name="net.ipv4.ip_forward" value=1 sysctl_set=yes
# Set ip forwarding on in /proc and in the sysctl file and reload if necessary
- sysctl: name="net.ipv4.ip_forward" value=1 sysctl_set=yes state=present reload=yes
'''
# ==============================================================
import os
import tempfile
import re
class SysctlModule(object):
def __init__(self, module):
self.module = module
self.args = self.module.params
self.sysctl_cmd = self.module.get_bin_path('sysctl', required=True)
self.sysctl_file = self.args['sysctl_file']
self.proc_value = None # current token value in proc fs
self.file_value = None # current token value in file
self.file_lines = [] # all lines in the file
self.file_values = {} # dict of token values
self.changed = False # will change occur
self.set_proc = False # does sysctl need to set value
self.write_file = False # does the sysctl file need to be reloaded
self.process()
# ==============================================================
# LOGIC
# ==============================================================
def process(self):
# Whitespace is bad
self.args['name'] = self.args['name'].strip()
self.args['value'] = self._parse_value(self.args['value'])
thisname = self.args['name']
# get the current proc fs value
self.proc_value = self.get_token_curr_value(thisname)
# get the currect sysctl file value
self.read_sysctl_file()
if thisname not in self.file_values:
self.file_values[thisname] = None
# update file contents with desired token/value
self.fix_lines()
# what do we need to do now?
if self.file_values[thisname] is None and self.args['state'] == "present":
self.changed = True
self.write_file = True
elif self.file_values[thisname] is None and self.args['state'] == "absent":
self.changed = False
elif self.file_values[thisname] != self.args['value']:
self.changed = True
self.write_file = True
# use the sysctl command or not?
if self.args['sysctl_set']:
if self.proc_value is None:
self.changed = True
elif not self._values_is_equal(self.proc_value, self.args['value']):
self.changed = True
self.set_proc = True
# Do the work
if not self.module.check_mode:
if self.write_file:
self.write_sysctl()
if self.write_file and self.args['reload']:
self.reload_sysctl()
if self.set_proc:
self.set_token_value(self.args['name'], self.args['value'])
def _values_is_equal(self, a, b):
"""Expects two string values. It will split the string by whitespace
and compare each value. It will return True if both lists are the same,
contain the same elements and the same order."""
if a is None or b is None:
return False
a = a.split()
b = b.split()
if len(a) != len(b):
return False
return len([i for i, j in zip(a, b) if i == j]) == len(a)
def _parse_value(self, value):
if value is None:
return ''
elif isinstance(value, bool):
if value:
return '1'
else:
return '0'
elif isinstance(value, basestring):
if value.lower() in BOOLEANS_TRUE:
return '1'
elif value.lower() in BOOLEANS_FALSE:
return '0'
else:
return value.strip()
else:
return value
# ==============================================================
# SYSCTL COMMAND MANAGEMENT
# ==============================================================
# Use the sysctl command to find the current value
def get_token_curr_value(self, token):
thiscmd = "%s -e -n %s" % (self.sysctl_cmd, token)
rc,out,err = self.module.run_command(thiscmd)
if rc != 0:
return None
else:
return out
# Use the sysctl command to set the current value
def set_token_value(self, token, value):
if len(value.split()) > 0:
value = '"' + value + '"'
thiscmd = "%s -w %s=%s" % (self.sysctl_cmd, token, value)
rc,out,err = self.module.run_command(thiscmd)
if rc != 0:
self.module.fail_json(msg='setting %s failed: %s' % (token, out + err))
else:
return rc
# Run sysctl -p
def reload_sysctl(self):
# do it
if get_platform().lower() == 'freebsd':
# freebsd doesn't support -p, so reload the sysctl service
rc,out,err = self.module.run_command('/etc/rc.d/sysctl reload')
else:
# system supports reloading via the -p flag to sysctl, so we'll use that
sysctl_args = [self.sysctl_cmd, '-p', self.sysctl_file]
if self.args['ignoreerrors']:
sysctl_args.insert(1, '-e')
rc,out,err = self.module.run_command(sysctl_args)
if rc != 0:
self.module.fail_json(msg="Failed to reload sysctl: %s" % str(out) + str(err))
# ==============================================================
# SYSCTL FILE MANAGEMENT
# ==============================================================
# Get the token value from the sysctl file
def read_sysctl_file(self):
lines = []
if os.path.isfile(self.sysctl_file):
try:
f = open(self.sysctl_file, "r")
lines = f.readlines()
f.close()
except IOError, e:
self.module.fail_json(msg="Failed to open %s: %s" % (self.sysctl_file, str(e)))
for line in lines:
line = line.strip()
self.file_lines.append(line)
# don't split empty lines or comments
if not line or line.startswith("#"):
continue
k, v = line.split('=',1)
k = k.strip()
v = v.strip()
self.file_values[k] = v.strip()
# Fix the value in the sysctl file content
def fix_lines(self):
checked = []
self.fixed_lines = []
for line in self.file_lines:
if not line.strip() or line.strip().startswith("#"):
self.fixed_lines.append(line)
continue
tmpline = line.strip()
k, v = line.split('=',1)
k = k.strip()
v = v.strip()
if k not in checked:
checked.append(k)
if k == self.args['name']:
if self.args['state'] == "present":
new_line = "%s=%s\n" % (k, self.args['value'])
self.fixed_lines.append(new_line)
else:
new_line = "%s=%s\n" % (k, v)
self.fixed_lines.append(new_line)
if self.args['name'] not in checked and self.args['state'] == "present":
new_line = "%s=%s\n" % (self.args['name'], self.args['value'])
self.fixed_lines.append(new_line)
# Completely rewrite the sysctl file
def write_sysctl(self):
# open a tmp file
fd, tmp_path = tempfile.mkstemp('.conf', '.ansible_m_sysctl_', os.path.dirname(self.sysctl_file))
f = open(tmp_path,"w")
try:
for l in self.fixed_lines:
f.write(l.strip() + "\n")
except IOError, e:
self.module.fail_json(msg="Failed to write to file %s: %s" % (tmp_path, str(e)))
f.flush()
f.close()
# replace the real one
self.module.atomic_move(tmp_path, self.sysctl_file)
# ==============================================================
# main
def main():
# defining module
module = AnsibleModule(
argument_spec = dict(
name = dict(aliases=['key'], required=True),
value = dict(aliases=['val'], required=False, type='str'),
state = dict(default='present', choices=['present', 'absent']),
reload = dict(default=True, type='bool'),
sysctl_set = dict(default=False, type='bool'),
ignoreerrors = dict(default=False, type='bool'),
sysctl_file = dict(default='/etc/sysctl.conf')
),
supports_check_mode=True
)
result = SysctlModule(module)
module.exit_json(changed=result.changed)
sys.exit(0)
# import module snippets
from ansible.module_utils.basic import *
main()
|
Caylo/easybuild-framework | refs/heads/master | test/framework/asyncprocess.py | 3 | ##
# Copyright 2012-2016 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Unit tests for asyncprocess.py.
@author: Toon Willems (Ghent University)
"""
import os
import sys
import time
from test.framework.utilities import EnhancedTestCase
from unittest import TextTestRunner, TestSuite
import easybuild.tools.asyncprocess as p
from easybuild.tools.asyncprocess import Popen
class AsyncProcessTest(EnhancedTestCase):
""" Testcase for asyncprocess """
def setUp(self):
""" setup a basic shell """
super(AsyncProcessTest, self).setUp()
self.shell = Popen('sh', stdin=p.PIPE, stdout=p.PIPE, shell=True, executable='/bin/bash')
def runTest(self):
""" try echoing some text and see if it comes back out """
p.send_all(self.shell, "echo hello\n")
time.sleep(0.1)
self.assertEqual(p.recv_some(self.shell), "hello\n")
p.send_all(self.shell, "echo hello world\n")
time.sleep(0.1)
self.assertEqual(p.recv_some(self.shell), "hello world\n")
p.send_all(self.shell, "exit\n")
time.sleep(0.1)
self.assertEqual("", p.recv_some(self.shell, e=0))
self.assertRaises(Exception, p.recv_some, self.shell)
def tearDown(self):
"""cleanup"""
super(AsyncProcessTest, self).tearDown()
def suite():
""" returns all the testcases in this module """
return TestSuite([AsyncProcessTest()])
if __name__ == '__main__':
TextTestRunner(verbosity=1).run(suite())
|
rcook/DesignLab | refs/heads/master | app/src/processing/app/i18n/python/requests/packages/__init__.py | 1849 | from __future__ import absolute_import
from . import urllib3
|
Yuriy-Leonov/nova | refs/heads/master | nova/virt/hyperv/vmutilsv2.py | 8 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility class for VM related operations.
Based on the "root/virtualization/v2" namespace available starting with
Hyper-V Server / Windows Server 2012.
"""
import sys
import uuid
if sys.platform == 'win32':
import wmi
from oslo.config import cfg
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.virt.hyperv import constants
from nova.virt.hyperv import vmutils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class VMUtilsV2(vmutils.VMUtils):
_PHYS_DISK_RES_SUB_TYPE = 'Microsoft:Hyper-V:Physical Disk Drive'
_DISK_RES_SUB_TYPE = 'Microsoft:Hyper-V:Synthetic Disk Drive'
_DVD_RES_SUB_TYPE = 'Microsoft:Hyper-V:Synthetic DVD Drive'
_SCSI_RES_SUBTYPE = 'Microsoft:Hyper-V:Synthetic SCSI Controller'
_IDE_DISK_RES_SUB_TYPE = 'Microsoft:Hyper-V:Virtual Hard Disk'
_IDE_DVD_RES_SUB_TYPE = 'Microsoft:Hyper-V:Virtual CD/DVD Disk'
_IDE_CTRL_RES_SUB_TYPE = 'Microsoft:Hyper-V:Emulated IDE Controller'
_SCSI_CTRL_RES_SUB_TYPE = 'Microsoft:Hyper-V:Synthetic SCSI Controller'
_VIRTUAL_SYSTEM_TYPE_REALIZED = 'Microsoft:Hyper-V:System:Realized'
_SNAPSHOT_FULL = 2
_METRIC_AGGR_CPU_AVG = 'Aggregated Average CPU Utilization'
_METRIC_AGGR_DISK_R = 'Aggregated Disk Data Read'
_METRIC_AGGR_DISK_W = 'Aggregated Disk Data Written'
_METRIC_ENABLED = 2
_vm_power_states_map = {constants.HYPERV_VM_STATE_ENABLED: 2,
constants.HYPERV_VM_STATE_DISABLED: 3,
constants.HYPERV_VM_STATE_REBOOT: 11,
constants.HYPERV_VM_STATE_PAUSED: 9,
constants.HYPERV_VM_STATE_SUSPENDED: 6}
def __init__(self, host='.'):
super(VMUtilsV2, self).__init__(host)
def _init_hyperv_wmi_conn(self, host):
self._conn = wmi.WMI(moniker='//%s/root/virtualization/v2' % host)
def _create_vm_obj(self, vs_man_svc, vm_name):
vs_data = self._conn.Msvm_VirtualSystemSettingData.new()
vs_data.ElementName = vm_name
(job_path,
vm_path,
ret_val) = vs_man_svc.DefineSystem(ResourceSettings=[],
ReferenceConfiguration=None,
SystemSettings=vs_data.GetText_(1))
job = self.check_ret_val(ret_val, job_path)
if not vm_path and job:
vm_path = job.associators("Msvm_AffectedJobElement")[0]
return self._get_wmi_obj(vm_path)
def _get_vm_setting_data(self, vm):
vmsettings = vm.associators(
wmi_result_class='Msvm_VirtualSystemSettingData')
# Avoid snapshots
return [s for s in vmsettings if
s.VirtualSystemType == self._VIRTUAL_SYSTEM_TYPE_REALIZED][0]
def attach_ide_drive(self, vm_name, path, ctrller_addr, drive_addr,
drive_type=constants.IDE_DISK):
"""Create an IDE drive and attach it to the vm."""
vm = self._lookup_vm_check(vm_name)
ctrller_path = self._get_vm_ide_controller(vm, ctrller_addr)
if drive_type == constants.IDE_DISK:
res_sub_type = self._DISK_RES_SUB_TYPE
elif drive_type == constants.IDE_DVD:
res_sub_type = self._DVD_RES_SUB_TYPE
drive = self._get_new_resource_setting_data(res_sub_type)
#Set the IDE ctrller as parent.
drive.Parent = ctrller_path
drive.Address = drive_addr
drive.AddressOnParent = drive_addr
#Add the cloned disk drive object to the vm.
new_resources = self._add_virt_resource(drive, vm.path_())
drive_path = new_resources[0]
if drive_type == constants.IDE_DISK:
res_sub_type = self._IDE_DISK_RES_SUB_TYPE
elif drive_type == constants.IDE_DVD:
res_sub_type = self._IDE_DVD_RES_SUB_TYPE
res = self._get_new_resource_setting_data(
res_sub_type, 'Msvm_StorageAllocationSettingData')
res.Parent = drive_path
res.HostResource = [path]
self._add_virt_resource(res, vm.path_())
def attach_volume_to_controller(self, vm_name, controller_path, address,
mounted_disk_path):
"""Attach a volume to a controller."""
vm = self._lookup_vm_check(vm_name)
diskdrive = self._get_new_resource_setting_data(
self._PHYS_DISK_RES_SUB_TYPE)
diskdrive.AddressOnParent = address
diskdrive.Parent = controller_path
diskdrive.HostResource = [mounted_disk_path]
self._add_virt_resource(diskdrive, vm.path_())
def create_scsi_controller(self, vm_name):
"""Create an iscsi controller ready to mount volumes."""
scsicontrl = self._get_new_resource_setting_data(
self._SCSI_RES_SUBTYPE)
scsicontrl.VirtualSystemIdentifiers = ['{' + str(uuid.uuid4()) + '}']
vm = self._lookup_vm_check(vm_name)
self._add_virt_resource(scsicontrl, vm.path_())
def destroy_vm(self, vm_name):
vm = self._lookup_vm_check(vm_name)
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
#Remove the VM. It does not destroy any associated virtual disk.
(job_path, ret_val) = vs_man_svc.DestroySystem(vm.path_())
self.check_ret_val(ret_val, job_path)
def _add_virt_resource(self, res_setting_data, vm_path):
"""Adds a new resource to the VM."""
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
res_xml = [res_setting_data.GetText_(1)]
(job_path,
new_resources,
ret_val) = vs_man_svc.AddResourceSettings(vm_path, res_xml)
self.check_ret_val(ret_val, job_path)
return new_resources
def _modify_virt_resource(self, res_setting_data, vm_path):
"""Updates a VM resource."""
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
(job_path,
out_res_setting_data,
ret_val) = vs_man_svc.ModifyResourceSettings(
ResourceSettings=[res_setting_data.GetText_(1)])
self.check_ret_val(ret_val, job_path)
def _remove_virt_resource(self, res_setting_data, vm_path):
"""Removes a VM resource."""
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
res_path = [res_setting_data.path_()]
(job_path, ret_val) = vs_man_svc.RemoveResourceSettings(res_path)
self.check_ret_val(ret_val, job_path)
def get_vm_state(self, vm_name):
settings = self.get_vm_summary_info(vm_name)
return settings['EnabledState']
def take_vm_snapshot(self, vm_name):
vm = self._lookup_vm_check(vm_name)
vs_snap_svc = self._conn.Msvm_VirtualSystemSnapshotService()[0]
(job_path, snp_setting_data, ret_val) = vs_snap_svc.CreateSnapshot(
AffectedSystem=vm.path_(),
SnapshotType=self._SNAPSHOT_FULL)
self.check_ret_val(ret_val, job_path)
job_wmi_path = job_path.replace('\\', '/')
job = wmi.WMI(moniker=job_wmi_path)
snp_setting_data = job.associators(
wmi_result_class='Msvm_VirtualSystemSettingData')[0]
return snp_setting_data.path_()
def remove_vm_snapshot(self, snapshot_path):
vs_snap_svc = self._conn.Msvm_VirtualSystemSnapshotService()[0]
(job_path, ret_val) = vs_snap_svc.DestroySnapshot(snapshot_path)
self.check_ret_val(ret_val, job_path)
def set_nic_connection(self, vm_name, nic_name, vswitch_conn_data):
nic_data = self._get_nic_data_by_name(nic_name)
eth_port_data = self._get_new_setting_data(
'Msvm_EthernetPortAllocationSettingData')
eth_port_data.HostResource = [vswitch_conn_data]
eth_port_data.Parent = nic_data.path_()
vm = self._lookup_vm_check(vm_name)
self._add_virt_resource(eth_port_data, vm.path_())
def enable_vm_metrics_collection(self, vm_name):
metric_names = [self._METRIC_AGGR_CPU_AVG,
self._METRIC_AGGR_DISK_R,
self._METRIC_AGGR_DISK_W]
vm = self._lookup_vm_check(vm_name)
metric_svc = self._conn.Msvm_MetricService()[0]
for metric_name in metric_names:
metric_def = self._conn.CIM_BaseMetricDefinition(Name=metric_name)
if not metric_def:
LOG.debug(_("Metric not found: %s") % metric_name)
else:
metric_svc.ControlMetrics(
Subject=vm.path_(),
Definition=metric_def[0].path_(),
MetricCollectionEnabled=self._METRIC_ENABLED)
|
alex/pinax | refs/heads/master | pinax/fixtures/generate/gen_auth.py | 4 | import random
from django.contrib.auth.models import User
names = """Jacob Emily Michael Emma Joshua Madison Ethan Isabella Matthew Ava
Daniel Abigail Christopher Olivia Andrew Hannah Anthony Sophia William
Samantha Joseph Elizabeth Alexander Ashley David Mia Ryan Alexis Noah
Sarah James Natalie Nicholas Grace Tyler Chloe Logan Alyssa John Brianna
Christian Ella Jonathan Taylor Nathan Anna Benjamin Lauren Samuel Hailey
Dylan Kayla Brandon Addison Gabriel Victoria Elijah Jasmine Aiden Savannah
Angel Julia Jose Jessica Zachary Lily Caleb Sydney Jack Morgan Jackson
Katherine Kevin Destiny Gavin Lillian Mason Alexa Isaiah Alexandra Austin
Kaitlyn Evan Kaylee Luke Nevaeh Aidan Brooke Justin Makayla Jordan Allison
Robert Maria Isaac Angelina Landon Rachel Jayden Gabriella
"""
surnames = """Smith Johnson Williams Brown Jones Miller Davis Garcia
Rodriguez Wilson Martinez Anderson Taylor Thomas Hernandez Moore Martin
Jackson Thompson White Lopez Le Gonzalez Harris Clark Lewis Robinson Walker
Perez Hall Young Allen Sanchez Wright King Scott Green Baker Adams Nelson
Hill Ramirez Campbell Mitchell Roberts Carter Phillips Evans Turner Torres
Parker Collins Edwards Stewart Flores Morris Nguyen Murphy Rivera Cook Rogers
Morgan Peterson Cooper Reed Bailey Bell Gomez Kelly Howard Ward Cox Diaz
Richardson Wood Watson Brooks Bennett Gray James Reyes Cruz Hughes Price
Myers Long Foster Sanders Ross Morales Powell Sullivan Russell Ortiz
Jenkins Gutierrez Perry Butler Barnes Fisher
"""
names = names.split()
random.shuffle(names)
surnames = surnames.split()
random.shuffle(surnames)
def generate():
for name, surname in zip(names, surnames):
username = '%s_%s' % (name.lower(), surname.lower())
u = User.objects.create(
username=username,
first_name=name,
last_name=surname,
is_active=True,
is_superuser=False,
is_staff=False,
email='%s@example.com' % (username,),
password='sha1$58ab4$c80250ca3c0e27ab651ab1f76411ce1418742d25' #password=123
)
print "Created User %s" % unicode(u)
u = User.objects.create(
username='admin',
first_name='Admin',
last_name='Admin',
is_active=True,
is_superuser=True,
is_staff=True,
email='admin@example.com',
password='sha1$58ab4$c80250ca3c0e27ab651ab1f76411ce1418742d25' #password=123
)
print "Created Admin User"
if __name__ == "__main__":
generate() |
bwrsandman/OpenUpgrade | refs/heads/8.0 | openerp/addons/test_new_api/tests/test_new_fields.py | 69 | #
# test cases for new-style fields
#
from datetime import date, datetime
from collections import defaultdict
from openerp.tests import common
from openerp.exceptions import except_orm
class TestNewFields(common.TransactionCase):
def test_00_basics(self):
""" test accessing new fields """
# find a discussion
discussion = self.env.ref('test_new_api.discussion_0')
# read field as a record attribute or as a record item
self.assertIsInstance(discussion.name, basestring)
self.assertIsInstance(discussion['name'], basestring)
self.assertEqual(discussion['name'], discussion.name)
# read it with method read()
values = discussion.read(['name'])[0]
self.assertEqual(values['name'], discussion.name)
def test_01_basic_get_assertion(self):
""" test item getter """
# field access works on single record
record = self.env.ref('test_new_api.message_0_0')
self.assertEqual(len(record), 1)
ok = record.body
# field access fails on multiple records
records = self.env['test_new_api.message'].search([])
assert len(records) > 1
with self.assertRaises(except_orm):
faulty = records.body
def test_01_basic_set_assertion(self):
""" test item setter """
# field assignment works on single record
record = self.env.ref('test_new_api.message_0_0')
self.assertEqual(len(record), 1)
record.body = 'OK'
# field assignment fails on multiple records
records = self.env['test_new_api.message'].search([])
assert len(records) > 1
with self.assertRaises(except_orm):
records.body = 'Faulty'
def test_10_computed(self):
""" check definition of computed fields """
# by default function fields are not stored and readonly
field = self.env['test_new_api.message']._fields['size']
self.assertFalse(field.store)
self.assertTrue(field.readonly)
field = self.env['test_new_api.message']._fields['name']
self.assertTrue(field.store)
self.assertTrue(field.readonly)
def test_10_non_stored(self):
""" test non-stored fields """
# find messages
for message in self.env['test_new_api.message'].search([]):
# check definition of field
self.assertEqual(message.size, len(message.body or ''))
# check recomputation after record is modified
size = message.size
message.write({'body': (message.body or '') + "!!!"})
self.assertEqual(message.size, size + 3)
# special case: computed field without dependency must be computed
record = self.env['test_new_api.mixed'].create({})
self.assertTrue(record.now)
def test_11_stored(self):
""" test stored fields """
# find the demo discussion
discussion = self.env.ref('test_new_api.discussion_0')
self.assertTrue(len(discussion.messages) > 0)
# check messages
name0 = discussion.name or ""
for message in discussion.messages:
self.assertEqual(message.name, "[%s] %s" % (name0, message.author.name))
# modify discussion name, and check again messages
discussion.name = name1 = 'Talking about stuff...'
for message in discussion.messages:
self.assertEqual(message.name, "[%s] %s" % (name1, message.author.name))
# switch message from discussion, and check again
name2 = 'Another discussion'
discussion2 = discussion.copy({'name': name2})
message2 = discussion.messages[0]
message2.discussion = discussion2
for message in discussion2.messages:
self.assertEqual(message.name, "[%s] %s" % (name2, message.author.name))
def test_12_recursive(self):
""" test recursively dependent fields """
Category = self.env['test_new_api.category']
abel = Category.create({'name': 'Abel'})
beth = Category.create({'name': 'Bethany'})
cath = Category.create({'name': 'Catherine'})
dean = Category.create({'name': 'Dean'})
ewan = Category.create({'name': 'Ewan'})
finn = Category.create({'name': 'Finnley'})
gabe = Category.create({'name': 'Gabriel'})
cath.parent = finn.parent = gabe
abel.parent = beth.parent = cath
dean.parent = ewan.parent = finn
self.assertEqual(abel.display_name, "Gabriel / Catherine / Abel")
self.assertEqual(beth.display_name, "Gabriel / Catherine / Bethany")
self.assertEqual(cath.display_name, "Gabriel / Catherine")
self.assertEqual(dean.display_name, "Gabriel / Finnley / Dean")
self.assertEqual(ewan.display_name, "Gabriel / Finnley / Ewan")
self.assertEqual(finn.display_name, "Gabriel / Finnley")
self.assertEqual(gabe.display_name, "Gabriel")
ewan.parent = cath
self.assertEqual(ewan.display_name, "Gabriel / Catherine / Ewan")
cath.parent = finn
self.assertEqual(ewan.display_name, "Gabriel / Finnley / Catherine / Ewan")
def test_12_cascade(self):
""" test computed field depending on computed field """
message = self.env.ref('test_new_api.message_0_0')
message.invalidate_cache()
double_size = message.double_size
self.assertEqual(double_size, message.size)
def test_13_inverse(self):
""" test inverse computation of fields """
Category = self.env['test_new_api.category']
abel = Category.create({'name': 'Abel'})
beth = Category.create({'name': 'Bethany'})
cath = Category.create({'name': 'Catherine'})
dean = Category.create({'name': 'Dean'})
ewan = Category.create({'name': 'Ewan'})
finn = Category.create({'name': 'Finnley'})
gabe = Category.create({'name': 'Gabriel'})
self.assertEqual(ewan.display_name, "Ewan")
ewan.display_name = "Abel / Bethany / Catherine / Erwan"
self.assertEqual(beth.parent, abel)
self.assertEqual(cath.parent, beth)
self.assertEqual(ewan.parent, cath)
self.assertEqual(ewan.name, "Erwan")
def test_14_search(self):
""" test search on computed fields """
discussion = self.env.ref('test_new_api.discussion_0')
# determine message sizes
sizes = set(message.size for message in discussion.messages)
# search for messages based on their size
for size in sizes:
messages0 = self.env['test_new_api.message'].search(
[('discussion', '=', discussion.id), ('size', '<=', size)])
messages1 = self.env['test_new_api.message'].browse()
for message in discussion.messages:
if message.size <= size:
messages1 += message
self.assertEqual(messages0, messages1)
def test_15_constraint(self):
""" test new-style Python constraints """
discussion = self.env.ref('test_new_api.discussion_0')
# remove oneself from discussion participants: we can no longer create
# messages in discussion
discussion.participants -= self.env.user
with self.assertRaises(Exception):
self.env['test_new_api.message'].create({'discussion': discussion.id, 'body': 'Whatever'})
# make sure that assertRaises() does not leave fields to recompute
self.assertFalse(self.env.has_todo())
# put back oneself into discussion participants: now we can create
# messages in discussion
discussion.participants += self.env.user
self.env['test_new_api.message'].create({'discussion': discussion.id, 'body': 'Whatever'})
def test_20_float(self):
""" test float fields """
record = self.env['test_new_api.mixed'].create({})
# assign value, and expect rounding
record.write({'number': 2.4999999999999996})
self.assertEqual(record.number, 2.50)
# same with field setter
record.number = 2.4999999999999996
self.assertEqual(record.number, 2.50)
def test_21_date(self):
""" test date fields """
record = self.env['test_new_api.mixed'].create({})
# one may assign False or None
record.date = None
self.assertFalse(record.date)
# one may assign date and datetime objects
record.date = date(2012, 05, 01)
self.assertEqual(record.date, '2012-05-01')
record.date = datetime(2012, 05, 01, 10, 45, 00)
self.assertEqual(record.date, '2012-05-01')
# one may assign dates in the default format, and it must be checked
record.date = '2012-05-01'
self.assertEqual(record.date, '2012-05-01')
with self.assertRaises(ValueError):
record.date = '12-5-1'
def test_22_selection(self):
""" test selection fields """
record = self.env['test_new_api.mixed'].create({})
# one may assign False or None
record.lang = None
self.assertFalse(record.lang)
# one may assign a value, and it must be checked
for language in self.env['res.lang'].search([]):
record.lang = language.code
with self.assertRaises(ValueError):
record.lang = 'zz_ZZ'
def test_23_relation(self):
""" test relation fields """
demo = self.env.ref('base.user_demo')
message = self.env.ref('test_new_api.message_0_0')
# check environment of record and related records
self.assertEqual(message.env, self.env)
self.assertEqual(message.discussion.env, self.env)
demo_env = self.env(user=demo)
self.assertNotEqual(demo_env, self.env)
# check environment of record and related records
self.assertEqual(message.env, self.env)
self.assertEqual(message.discussion.env, self.env)
# "migrate" message into demo_env, and check again
demo_message = message.sudo(demo)
self.assertEqual(demo_message.env, demo_env)
self.assertEqual(demo_message.discussion.env, demo_env)
# assign record's parent to a record in demo_env
message.discussion = message.discussion.copy({'name': 'Copy'})
# both message and its parent field must be in self.env
self.assertEqual(message.env, self.env)
self.assertEqual(message.discussion.env, self.env)
def test_24_reference(self):
""" test reference fields. """
record = self.env['test_new_api.mixed'].create({})
# one may assign False or None
record.reference = None
self.assertFalse(record.reference)
# one may assign a user or a partner...
record.reference = self.env.user
self.assertEqual(record.reference, self.env.user)
record.reference = self.env.user.partner_id
self.assertEqual(record.reference, self.env.user.partner_id)
# ... but no record from a model that starts with 'ir.'
with self.assertRaises(ValueError):
record.reference = self.env['ir.model'].search([], limit=1)
def test_25_related(self):
""" test related fields. """
message = self.env.ref('test_new_api.message_0_0')
discussion = message.discussion
# by default related fields are not stored
field = message._fields['discussion_name']
self.assertFalse(field.store)
self.assertFalse(field.readonly)
# check value of related field
self.assertEqual(message.discussion_name, discussion.name)
# change discussion name, and check result
discussion.name = 'Foo'
self.assertEqual(message.discussion_name, 'Foo')
# change discussion name via related field, and check result
message.discussion_name = 'Bar'
self.assertEqual(discussion.name, 'Bar')
self.assertEqual(message.discussion_name, 'Bar')
# search on related field, and check result
search_on_related = self.env['test_new_api.message'].search([('discussion_name', '=', 'Bar')])
search_on_regular = self.env['test_new_api.message'].search([('discussion.name', '=', 'Bar')])
self.assertEqual(search_on_related, search_on_regular)
# check that field attributes are copied
message_field = message.fields_get(['discussion_name'])['discussion_name']
discussion_field = discussion.fields_get(['name'])['name']
self.assertEqual(message_field['help'], discussion_field['help'])
def test_26_inherited(self):
""" test inherited fields. """
# a bunch of fields are inherited from res_partner
for user in self.env['res.users'].search([]):
partner = user.partner_id
for field in ('is_company', 'name', 'email', 'country_id'):
self.assertEqual(getattr(user, field), getattr(partner, field))
self.assertEqual(user[field], partner[field])
def test_30_read(self):
""" test computed fields as returned by read(). """
discussion = self.env.ref('test_new_api.discussion_0')
for message in discussion.messages:
display_name = message.display_name
size = message.size
data = message.read(['display_name', 'size'])[0]
self.assertEqual(data['display_name'], display_name)
self.assertEqual(data['size'], size)
def test_40_new(self):
""" test new records. """
discussion = self.env.ref('test_new_api.discussion_0')
# create a new message
message = self.env['test_new_api.message'].new()
self.assertFalse(message.id)
# assign some fields; should have no side effect
message.discussion = discussion
message.body = BODY = "May the Force be with you."
self.assertEqual(message.discussion, discussion)
self.assertEqual(message.body, BODY)
self.assertFalse(message.author)
self.assertNotIn(message, discussion.messages)
# check computed values of fields
self.assertEqual(message.name, "[%s] %s" % (discussion.name, ''))
self.assertEqual(message.size, len(BODY))
def test_41_defaults(self):
""" test default values. """
fields = ['discussion', 'body', 'author', 'size']
defaults = self.env['test_new_api.message'].default_get(fields)
self.assertEqual(defaults, {'author': self.env.uid})
defaults = self.env['test_new_api.mixed'].default_get(['number'])
self.assertEqual(defaults, {'number': 3.14})
class TestMagicFields(common.TransactionCase):
def test_write_date(self):
record = self.env['test_new_api.discussion'].create({'name': 'Booba'})
self.assertEqual(record.create_uid, self.env.user)
self.assertEqual(record.write_uid, self.env.user)
|
acsone/connector | refs/heads/8.0 | connector/deprecate.py | 18 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import inspect
import logging
def log_deprecate(message):
# get the caller of the deprecated method
frame, __, lineno, funcname, __, __ = inspect.stack()[2]
module = inspect.getmodule(frame)
logger = logging.getLogger(module.__name__)
logger.warning('Deprecated: %s at line %r: %s', funcname, lineno, message)
class DeprecatedClass(object):
def __init__(self, oldname, replacement):
self.oldname = oldname
self.replacement = replacement
def _warning(self):
frame, __, lineno, funcname, __, __ = inspect.stack()[2]
module = inspect.getmodule(frame)
logger = logging.getLogger(module.__name__)
lineno = lineno
logger.warning('Deprecated: class %s must be replaced by %s '
'at line %r',
self.oldname,
self.replacement.__name__,
lineno)
def __call__(self, *args, **kwargs):
self._warning()
return self.replacement(*args, **kwargs)
def __getattr__(self, *args, **kwargs):
return getattr(self.replacement, *args, **kwargs)
|
vivekanand1101/anitya | refs/heads/master | anitya/lib/backends/github.py | 2 | # -*- coding: utf-8 -*-
"""
(c) 2014 - Copyright Red Hat Inc
Authors:
Pierre-Yves Chibon <pingou@pingoured.fr>
"""
from anitya.lib.backends import BaseBackend, get_versions_by_regex
from anitya.lib.exceptions import AnityaPluginException
REGEX = b'class="tag-name">([^<]*)</span'
class GithubBackend(BaseBackend):
''' The custom class for projects hosted on github.com.
This backend allows to specify a version_url and a regex that will
be used to retrieve the version information.
'''
name = 'GitHub'
examples = [
'https://github.com/fedora-infra/fedocal',
'https://github.com/fedora-infra/pkgdb2',
]
@classmethod
def get_version(cls, project):
''' Method called to retrieve the latest version of the projects
provided, project that relies on the backend of this plugin.
:arg Project project: a :class:`model.Project` object whose backend
corresponds to the current plugin.
:return: the latest version found upstream
:return type: str
:raise AnityaPluginException: a
:class:`anitya.lib.exceptions.AnityaPluginException` exception
when the version cannot be retrieved correctly
'''
return cls.get_ordered_versions(project)[-1]
@classmethod
def get_versions(cls, project):
''' Method called to retrieve all the versions (that can be found)
of the projects provided, project that relies on the backend of
this plugin.
:arg Project project: a :class:`model.Project` object whose backend
corresponds to the current plugin.
:return: a list of all the possible releases found
:return type: list
:raise AnityaPluginException: a
:class:`anitya.lib.exceptions.AnityaPluginException` exception
when the versions cannot be retrieved correctly
'''
if project.version_url:
url_template = 'https://github.com/%(version_url)s/tags'
version_url = project.version_url.replace('https://github.com/', '')
url = url_template % {'version_url': version_url}
elif project.homepage.startswith('https://github.com'):
url = project.homepage
if url.endswith('/'):
url = project.homepage[:1]
url += '/tags'
else:
raise AnityaPluginException(
'Project %s was incorrectly set-up' % project.name)
return get_versions_by_regex(url, REGEX, project)
|
frankiecjunle/yunblog | refs/heads/master | venv/lib/python2.7/site-packages/werkzeug/contrib/wrappers.py | 77 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.wrappers
~~~~~~~~~~~~~~~~~~~~~~~~~
Extra wrappers or mixins contributed by the community. These wrappers can
be mixed in into request objects to add extra functionality.
Example::
from werkzeug.wrappers import Request as RequestBase
from werkzeug.contrib.wrappers import JSONRequestMixin
class Request(RequestBase, JSONRequestMixin):
pass
Afterwards this request object provides the extra functionality of the
:class:`JSONRequestMixin`.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import codecs
try:
from simplejson import loads
except ImportError:
from json import loads
from werkzeug.exceptions import BadRequest
from werkzeug.utils import cached_property
from werkzeug.http import dump_options_header, parse_options_header
from werkzeug._compat import wsgi_decoding_dance
def is_known_charset(charset):
"""Checks if the given charset is known to Python."""
try:
codecs.lookup(charset)
except LookupError:
return False
return True
class JSONRequestMixin(object):
"""Add json method to a request object. This will parse the input data
through simplejson if possible.
:exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type
is not json or if the data itself cannot be parsed as json.
"""
@cached_property
def json(self):
"""Get the result of simplejson.loads if possible."""
if 'json' not in self.environ.get('CONTENT_TYPE', ''):
raise BadRequest('Not a JSON request')
try:
return loads(self.data.decode(self.charset, self.encoding_errors))
except Exception:
raise BadRequest('Unable to read JSON request')
class ProtobufRequestMixin(object):
"""Add protobuf parsing method to a request object. This will parse the
input data through `protobuf`_ if possible.
:exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type
is not protobuf or if the data itself cannot be parsed property.
.. _protobuf: http://code.google.com/p/protobuf/
"""
#: by default the :class:`ProtobufRequestMixin` will raise a
#: :exc:`~werkzeug.exceptions.BadRequest` if the object is not
#: initialized. You can bypass that check by setting this
#: attribute to `False`.
protobuf_check_initialization = True
def parse_protobuf(self, proto_type):
"""Parse the data into an instance of proto_type."""
if 'protobuf' not in self.environ.get('CONTENT_TYPE', ''):
raise BadRequest('Not a Protobuf request')
obj = proto_type()
try:
obj.ParseFromString(self.data)
except Exception:
raise BadRequest("Unable to parse Protobuf request")
# Fail if not all required fields are set
if self.protobuf_check_initialization and not obj.IsInitialized():
raise BadRequest("Partial Protobuf request")
return obj
class RoutingArgsRequestMixin(object):
"""This request mixin adds support for the wsgiorg routing args
`specification`_.
.. _specification: http://www.wsgi.org/wsgi/Specifications/routing_args
"""
def _get_routing_args(self):
return self.environ.get('wsgiorg.routing_args', (()))[0]
def _set_routing_args(self, value):
if self.shallow:
raise RuntimeError('A shallow request tried to modify the WSGI '
'environment. If you really want to do that, '
'set `shallow` to False.')
self.environ['wsgiorg.routing_args'] = (value, self.routing_vars)
routing_args = property(_get_routing_args, _set_routing_args, doc='''
The positional URL arguments as `tuple`.''')
del _get_routing_args, _set_routing_args
def _get_routing_vars(self):
rv = self.environ.get('wsgiorg.routing_args')
if rv is not None:
return rv[1]
rv = {}
if not self.shallow:
self.routing_vars = rv
return rv
def _set_routing_vars(self, value):
if self.shallow:
raise RuntimeError('A shallow request tried to modify the WSGI '
'environment. If you really want to do that, '
'set `shallow` to False.')
self.environ['wsgiorg.routing_args'] = (self.routing_args, value)
routing_vars = property(_get_routing_vars, _set_routing_vars, doc='''
The keyword URL arguments as `dict`.''')
del _get_routing_vars, _set_routing_vars
class ReverseSlashBehaviorRequestMixin(object):
"""This mixin reverses the trailing slash behavior of :attr:`script_root`
and :attr:`path`. This makes it possible to use :func:`~urlparse.urljoin`
directly on the paths.
Because it changes the behavior or :class:`Request` this class has to be
mixed in *before* the actual request class::
class MyRequest(ReverseSlashBehaviorRequestMixin, Request):
pass
This example shows the differences (for an application mounted on
`/application` and the request going to `/application/foo/bar`):
+---------------+-------------------+---------------------+
| | normal behavior | reverse behavior |
+===============+===================+=====================+
| `script_root` | ``/application`` | ``/application/`` |
+---------------+-------------------+---------------------+
| `path` | ``/foo/bar`` | ``foo/bar`` |
+---------------+-------------------+---------------------+
"""
@cached_property
def path(self):
"""Requested path as unicode. This works a bit like the regular path
info in the WSGI environment but will not include a leading slash.
"""
path = wsgi_decoding_dance(self.environ.get('PATH_INFO') or '',
self.charset, self.encoding_errors)
return path.lstrip('/')
@cached_property
def script_root(self):
"""The root path of the script includling a trailing slash."""
path = wsgi_decoding_dance(self.environ.get('SCRIPT_NAME') or '',
self.charset, self.encoding_errors)
return path.rstrip('/') + '/'
class DynamicCharsetRequestMixin(object):
""""If this mixin is mixed into a request class it will provide
a dynamic `charset` attribute. This means that if the charset is
transmitted in the content type headers it's used from there.
Because it changes the behavior or :class:`Request` this class has
to be mixed in *before* the actual request class::
class MyRequest(DynamicCharsetRequestMixin, Request):
pass
By default the request object assumes that the URL charset is the
same as the data charset. If the charset varies on each request
based on the transmitted data it's not a good idea to let the URLs
change based on that. Most browsers assume either utf-8 or latin1
for the URLs if they have troubles figuring out. It's strongly
recommended to set the URL charset to utf-8::
class MyRequest(DynamicCharsetRequestMixin, Request):
url_charset = 'utf-8'
.. versionadded:: 0.6
"""
#: the default charset that is assumed if the content type header
#: is missing or does not contain a charset parameter. The default
#: is latin1 which is what HTTP specifies as default charset.
#: You may however want to set this to utf-8 to better support
#: browsers that do not transmit a charset for incoming data.
default_charset = 'latin1'
def unknown_charset(self, charset):
"""Called if a charset was provided but is not supported by
the Python codecs module. By default latin1 is assumed then
to not lose any information, you may override this method to
change the behavior.
:param charset: the charset that was not found.
:return: the replacement charset.
"""
return 'latin1'
@cached_property
def charset(self):
"""The charset from the content type."""
header = self.environ.get('CONTENT_TYPE')
if header:
ct, options = parse_options_header(header)
charset = options.get('charset')
if charset:
if is_known_charset(charset):
return charset
return self.unknown_charset(charset)
return self.default_charset
class DynamicCharsetResponseMixin(object):
"""If this mixin is mixed into a response class it will provide
a dynamic `charset` attribute. This means that if the charset is
looked up and stored in the `Content-Type` header and updates
itself automatically. This also means a small performance hit but
can be useful if you're working with different charsets on
responses.
Because the charset attribute is no a property at class-level, the
default value is stored in `default_charset`.
Because it changes the behavior or :class:`Response` this class has
to be mixed in *before* the actual response class::
class MyResponse(DynamicCharsetResponseMixin, Response):
pass
.. versionadded:: 0.6
"""
#: the default charset.
default_charset = 'utf-8'
def _get_charset(self):
header = self.headers.get('content-type')
if header:
charset = parse_options_header(header)[1].get('charset')
if charset:
return charset
return self.default_charset
def _set_charset(self, charset):
header = self.headers.get('content-type')
ct, options = parse_options_header(header)
if not ct:
raise TypeError('Cannot set charset if Content-Type '
'header is missing.')
options['charset'] = charset
self.headers['Content-Type'] = dump_options_header(ct, options)
charset = property(_get_charset, _set_charset, doc="""
The charset for the response. It's stored inside the
Content-Type header as a parameter.""")
del _get_charset, _set_charset
|
hoh/reloadr | refs/heads/master | examples/03_function.py | 1 |
from time import sleep
from reloadr import reloadr
@reloadr
def move(car, dx=0, dy=0):
car.x += dx
car.y += dy
class Car:
x = 0
y = 0
def __init__(self, x=0, y=0):
self.x = x
self.y = y
def position(self):
return 'Car on {} {}'.format(self.x, self.y)
car = Car(1000, 3000)
while True:
move(car, 1, 1)
print(car.position())
sleep(0.5)
move._reload()
|
edx/edx-analytics-data-api-client | refs/heads/master | analyticsclient/engagement_timeline.py | 1 | from urllib.parse import urlencode
from analyticsclient.base import PostableCourseIDsEndpoint
from analyticsclient.constants import data_formats
class EngagementTimeline(PostableCourseIDsEndpoint):
"""Engagement Timeline."""
def __init__(self, client, username, course_id):
"""
Initialize the EngagementTimeline client.
Arguments:
client (analyticsclient.client.Client): The client to use to access remote resources.
username (str): String identifying the user (e.g. jbradley)
course_id (str): String identifying the course (e.g. edX/DemoX/Demo_Course)
"""
super().__init__(client)
self.username = str(username)
self.course_id = str(course_id)
def get(self):
"""Get a particular learner's engagement timeline for a particular course."""
querystring = urlencode({'course_id': self.course_id})
path = f'engagement_timelines/{self.username}/?{querystring}'
return self.client.get(path, data_format=data_formats.JSON)
|
NetApp/cinder | refs/heads/master | cinder/api/views/manageable_volumes.py | 13 | # Copyright (c) 2016 Stratoscale, Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder.api import common
class ViewBuilder(common.ViewBuilder):
"""Model manageable volume responses as a python dictionary."""
_collection_name = "os-volume-manage"
def summary_list(self, request, volumes, count):
"""Show a list of manageable volumes without many details."""
return self._list_view(self.summary, request, volumes, count)
def detail_list(self, request, volumes, count):
"""Detailed view of a list of manageable volumes."""
return self._list_view(self.detail, request, volumes, count)
def summary(self, request, volume):
"""Generic, non-detailed view of a manageable volume description."""
return {
'reference': volume['reference'],
'size': volume['size'],
'safe_to_manage': volume['safe_to_manage']
}
def detail(self, request, volume):
"""Detailed view of a manageable volume description."""
return {
'reference': volume['reference'],
'size': volume['size'],
'safe_to_manage': volume['safe_to_manage'],
'reason_not_safe': volume['reason_not_safe'],
'cinder_id': volume['cinder_id'],
'extra_info': volume['extra_info']
}
def _list_view(self, func, request, volumes, count):
"""Provide a view for a list of manageable volumes."""
vol_list = [func(request, volume) for volume in volumes]
return {"manageable-volumes": vol_list}
|
prabodhprakash/problemsolving | refs/heads/master | spoj/MMAXPER.py | 1 | import math
import sys
sys.stdin = open('input.txt', 'r')
dp = {}
def calculate_maximum_area(index, old_width, old_height):
if index == 0:
return max(w[0], h[0])
width = w[index]
height = h[index]
key = str(index) + str(height) + str(width)
if dp.get(key) is not None:
return dp.get(key)
index -= 1
val_a = calculate_maximum_area(index, width, height) + width + abs(old_height - height)
val_b = calculate_maximum_area(index, height, width) + height + abs(old_height - width)
val = max(val_a, val_b)
key = str(index) + str(height) + str(width)
dp[key] = val
return val
no_bars = int(raw_input())
bars_mat = [[0 for x in range(no_bars)] for y in range(no_bars)]
bars_mat = [[0 for x in range(no_bars)] for y in range(2)]
for i in range (0, no_bars):
temph, tempw = map(int, raw_input().split())
h.append(temph)
w.append(tempw)
print calculate_maximum_area(no_bars-1, 0, 0) |
toshywoshy/ansible | refs/heads/devel | test/units/modules/network/fortios/test_fortios_switch_controller_storm_control.py | 21 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_switch_controller_storm_control
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_switch_controller_storm_control.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_switch_controller_storm_control_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'switch_controller_storm_control': {
'broadcast': 'enable',
'rate': '4',
'unknown_multicast': 'enable',
'unknown_unicast': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_storm_control.fortios_switch_controller(input_data, fos_instance)
expected_data = {
'broadcast': 'enable',
'rate': '4',
'unknown-multicast': 'enable',
'unknown-unicast': 'enable'
}
set_method_mock.assert_called_with('switch-controller', 'storm-control', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_switch_controller_storm_control_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'switch_controller_storm_control': {
'broadcast': 'enable',
'rate': '4',
'unknown_multicast': 'enable',
'unknown_unicast': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_storm_control.fortios_switch_controller(input_data, fos_instance)
expected_data = {
'broadcast': 'enable',
'rate': '4',
'unknown-multicast': 'enable',
'unknown-unicast': 'enable'
}
set_method_mock.assert_called_with('switch-controller', 'storm-control', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_switch_controller_storm_control_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'switch_controller_storm_control': {
'broadcast': 'enable',
'rate': '4',
'unknown_multicast': 'enable',
'unknown_unicast': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_storm_control.fortios_switch_controller(input_data, fos_instance)
expected_data = {
'broadcast': 'enable',
'rate': '4',
'unknown-multicast': 'enable',
'unknown-unicast': 'enable'
}
set_method_mock.assert_called_with('switch-controller', 'storm-control', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_switch_controller_storm_control_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'switch_controller_storm_control': {
'random_attribute_not_valid': 'tag',
'broadcast': 'enable',
'rate': '4',
'unknown_multicast': 'enable',
'unknown_unicast': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_storm_control.fortios_switch_controller(input_data, fos_instance)
expected_data = {
'broadcast': 'enable',
'rate': '4',
'unknown-multicast': 'enable',
'unknown-unicast': 'enable'
}
set_method_mock.assert_called_with('switch-controller', 'storm-control', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.