repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
daemonmaker/pylearn2 | refs/heads/master | pylearn2/scripts/tests/test_dbm.py | 37 | """
Tests scripts in the DBM folder
"""
import os
import pylearn2.scripts.dbm.show_negative_chains as negative_chains
import pylearn2.scripts.dbm.show_reconstructions as show_reconstruct
import pylearn2.scripts.dbm.show_samples as show_samples
import pylearn2.scripts.dbm.top_filters as top_filters
from pylearn2.config import yaml_parse
from pylearn2.models.dbm.layer import BinaryVector, BinaryVectorMaxPool
from pylearn2.datasets.mnist import MNIST
from pylearn2.models.dbm.dbm import DBM
from nose.tools import with_setup
from pylearn2.datasets import control
from pylearn2.utils import serial
from theano import function
from theano.compat.six.moves import cPickle
def setup():
"""
Create pickle file with a simple model.
"""
# tearDown is guaranteed to run pop_load_data.
control.push_load_data(False)
with open('dbm.pkl', 'wb') as f:
dataset = MNIST(which_set='train', start=0, stop=100, binarize=True)
vis_layer = BinaryVector(nvis=784, bias_from_marginals=dataset)
hid_layer1 = BinaryVectorMaxPool(layer_name='h1', pool_size=1,
irange=.05, init_bias=-2.,
detector_layer_dim=50)
hid_layer2 = BinaryVectorMaxPool(layer_name='h2', pool_size=1,
irange=.05, init_bias=-2.,
detector_layer_dim=10)
model = DBM(batch_size=20, niter=2, visible_layer=vis_layer,
hidden_layers=[hid_layer1, hid_layer2])
model.dataset_yaml_src = """
!obj:pylearn2.datasets.binarizer.Binarizer {
raw: !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
start: 0,
stop: 100
}
}
"""
model.layer_to_chains = model.make_layer_to_state(1)
cPickle.dump(model, f, protocol=cPickle.HIGHEST_PROTOCOL)
def teardown():
"""Delete the pickle file created for the tests"""
if os.path.isfile('dbm.pkl'):
os.remove('dbm.pkl')
control.pop_load_data()
@with_setup(setup, teardown)
def test_show_negative_chains():
"""Test the show_negative_chains script main function"""
negative_chains.show_negative_chains('dbm.pkl')
@with_setup(setup, teardown)
def test_show_reconstructions():
"""Test the reconstruction update_viewer function"""
rows = 5
cols = 10
m = rows * cols
model = show_reconstruct.load_model('dbm.pkl', m)
dataset = show_reconstruct.load_dataset(model.dataset_yaml_src,
use_test_set='n')
batch = model.visible_layer.space.make_theano_batch()
reconstruction = model.reconstruct(batch)
recons_func = function([batch], reconstruction)
vis_batch = dataset.get_batch_topo(m)
patch_viewer = show_reconstruct.init_viewer(dataset, rows, cols, vis_batch)
show_reconstruct.update_viewer(dataset, batch, rows, cols, patch_viewer,
recons_func, vis_batch)
@with_setup(setup, teardown)
def test_show_samples():
"""Test the samples update_viewer function"""
rows = 10
cols = 10
m = rows * cols
model = show_samples.load_model('dbm.pkl', m)
dataset = yaml_parse.load(model.dataset_yaml_src)
samples_viewer = show_samples.init_viewer(dataset, rows, cols)
vis_batch = dataset.get_batch_topo(m)
show_samples.update_viewer(dataset, samples_viewer, vis_batch, rows, cols)
@with_setup(setup, teardown)
def test_top_filters():
"""Test the top_filters viewer functions"""
model = serial.load('dbm.pkl')
layer_1, layer_2 = model.hidden_layers[0:2]
W1 = layer_1.get_weights()
W2 = layer_2.get_weights()
top_filters.get_mat_product_viewer(W1, W2)
dataset_yaml_src = model.dataset_yaml_src
dataset = yaml_parse.load(dataset_yaml_src)
imgs = dataset.get_weights_view(W1.T)
top_filters.get_connections_viewer(imgs, W1, W2)
|
ntt-sic/nova | refs/heads/master | nova/tests/fake_loadables/__init__.py | 151 | # Copyright 2012 OpenStack Foundation # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For Loadable class handling.
"""
from nova import loadables
class FakeLoadable(object):
pass
class FakeLoader(loadables.BaseLoader):
def __init__(self):
super(FakeLoader, self).__init__(FakeLoadable)
|
clumsy/intellij-community | refs/heads/master | python/testData/inspections/PyCallingNonCallableInspection/callableClassDecorator.py | 83 | class D(object):
def __init__(self, attribute, value):
pass
def __call__(self, cls):
return cls
@D("value", 42)
class C(object):
pass
a = C()
print(a.value) |
eharney/cinder | refs/heads/master | cinder/api/v3/volume_manage.py | 3 | # Copyright (c) 2016 Stratoscale, Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six.moves import http_client
from cinder.api.contrib import volume_manage as volume_manage_v2
from cinder.api import microversions as mv
from cinder.api.openstack import wsgi
from cinder.api.v3 import resource_common_manage as common
class VolumeManageController(common.ManageResource,
volume_manage_v2.VolumeManageController):
def __init__(self, *args, **kwargs):
super(VolumeManageController, self).__init__(*args, **kwargs)
self._set_resource_type('volume')
@wsgi.response(http_client.ACCEPTED)
def create(self, req, body):
self._ensure_min_version(req, mv.MANAGE_EXISTING_LIST)
return super(VolumeManageController, self).create(req, body)
def create_resource():
return wsgi.Resource(VolumeManageController())
|
esse-io/zen-common | refs/heads/master | oslo-modules/oslo_utils/strutils.py | 1 | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
System-level utilities and helper functions.
"""
import math
import re
import unicodedata
import six
from oslo_utils._i18n import _
from oslo_utils import encodeutils
UNIT_PREFIX_EXPONENT = {
'k': 1,
'K': 1,
'Ki': 1,
'M': 2,
'Mi': 2,
'G': 3,
'Gi': 3,
'T': 4,
'Ti': 4,
}
UNIT_SYSTEM_INFO = {
'IEC': (1024, re.compile(r'(^[-+]?\d*\.?\d+)([KMGT]i?)?(b|bit|B)$')),
'SI': (1000, re.compile(r'(^[-+]?\d*\.?\d+)([kMGT])?(b|bit|B)$')),
}
TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes')
FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no')
SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]")
SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+")
# NOTE(flaper87): The following globals are used by `mask_password`
_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password',
'auth_token', 'new_pass', 'auth_password', 'secret_uuid',
'sys_pswd', 'token']
# NOTE(ldbragst): Let's build a list of regex objects using the list of
# _SANITIZE_KEYS we already have. This way, we only have to add the new key
# to the list of _SANITIZE_KEYS and we can generate regular expressions
# for XML and JSON automatically.
_SANITIZE_PATTERNS_2 = {}
_SANITIZE_PATTERNS_1 = {}
# NOTE(amrith): Some regular expressions have only one parameter, some
# have two parameters. Use different lists of patterns here.
_FORMAT_PATTERNS_1 = [r'(%(key)s\s*[=]\s*)[^\s^\'^\"]+']
_FORMAT_PATTERNS_2 = [r'(%(key)s\s*[=]\s*[\"\'])[^\"\']*([\"\'])',
r'(%(key)s\s+[\"\'])[^\"\']*([\"\'])',
r'([-]{2}%(key)s\s+)[^\'^\"^=^\s]+([\s]*)',
r'(<%(key)s>)[^<]*(</%(key)s>)',
r'([\"\']%(key)s[\"\']\s*:\s*[\"\'])[^\"\']*([\"\'])',
r'([\'"][^"\']*%(key)s[\'"]\s*:\s*u?[\'"])[^\"\']*'
'([\'"])',
r'([\'"][^\'"]*%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?'
'[\'"])[^\"\']*([\'"])',
r'(%(key)s\s*--?[A-z]+\s*)\S+(\s*)']
# NOTE(dhellmann): Keep a separate list of patterns by key so we only
# need to apply the substitutions for keys we find using a quick "in"
# test.
for key in _SANITIZE_KEYS:
_SANITIZE_PATTERNS_1[key] = []
_SANITIZE_PATTERNS_2[key] = []
for pattern in _FORMAT_PATTERNS_2:
reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
_SANITIZE_PATTERNS_2[key].append(reg_ex)
for pattern in _FORMAT_PATTERNS_1:
reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
_SANITIZE_PATTERNS_1[key].append(reg_ex)
def int_from_bool_as_string(subject):
"""Interpret a string as a boolean and return either 1 or 0.
Any string value in:
('True', 'true', 'On', 'on', '1')
is interpreted as a boolean True.
Useful for JSON-decoded stuff and config file parsing
"""
return int(bool_from_string(subject))
def bool_from_string(subject, strict=False, default=False):
"""Interpret a string as a boolean.
A case-insensitive match is performed such that strings matching 't',
'true', 'on', 'y', 'yes', or '1' are considered True and, when
`strict=False`, anything else returns the value specified by 'default'.
Useful for JSON-decoded stuff and config file parsing.
If `strict=True`, unrecognized values, including None, will raise a
ValueError which is useful when parsing values passed in from an API call.
Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'.
"""
if not isinstance(subject, six.string_types):
subject = six.text_type(subject)
lowered = subject.strip().lower()
if lowered in TRUE_STRINGS:
return True
elif lowered in FALSE_STRINGS:
return False
elif strict:
acceptable = ', '.join(
"'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS))
msg = _("Unrecognized value '%(val)s', acceptable values are:"
" %(acceptable)s") % {'val': subject,
'acceptable': acceptable}
raise ValueError(msg)
else:
return default
def string_to_bytes(text, unit_system='IEC', return_int=False):
"""Converts a string into an float representation of bytes.
The units supported for IEC ::
Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it)
KB, KiB, MB, MiB, GB, GiB, TB, TiB
The units supported for SI ::
kb(it), Mb(it), Gb(it), Tb(it)
kB, MB, GB, TB
Note that the SI unit system does not support capital letter 'K'
:param text: String input for bytes size conversion.
:param unit_system: Unit system for byte size conversion.
:param return_int: If True, returns integer representation of text
in bytes. (default: decimal)
:returns: Numerical representation of text in bytes.
:raises ValueError: If text has an invalid value.
"""
try:
base, reg_ex = UNIT_SYSTEM_INFO[unit_system]
except KeyError:
msg = _('Invalid unit system: "%s"') % unit_system
raise ValueError(msg)
match = reg_ex.match(text)
if match:
magnitude = float(match.group(1))
unit_prefix = match.group(2)
if match.group(3) in ['b', 'bit']:
magnitude /= 8
else:
msg = _('Invalid string format: %s') % text
raise ValueError(msg)
if not unit_prefix:
res = magnitude
else:
res = magnitude * pow(base, UNIT_PREFIX_EXPONENT[unit_prefix])
if return_int:
return int(math.ceil(res))
return res
def to_slug(value, incoming=None, errors="strict"):
"""Normalize string.
Convert to lowercase, remove non-word characters, and convert spaces
to hyphens.
Inspired by Django's `slugify` filter.
:param value: Text to slugify
:param incoming: Text's current encoding
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: slugified unicode representation of `value`
:raises TypeError: If text is not an instance of str
"""
value = encodeutils.safe_decode(value, incoming, errors)
# NOTE(aababilov): no need to use safe_(encode|decode) here:
# encodings are always "ascii", error handling is always "ignore"
# and types are always known (first: unicode; second: str)
value = unicodedata.normalize("NFKD", value).encode(
"ascii", "ignore").decode("ascii")
value = SLUGIFY_STRIP_RE.sub("", value).strip().lower()
return SLUGIFY_HYPHENATE_RE.sub("-", value)
def mask_password(message, secret="***"):
"""Replace password with 'secret' in message.
:param message: The string which includes security information.
:param secret: value with which to replace passwords.
:returns: The unicode value of message with the password fields masked.
For example:
>>> mask_password("'adminPass' : 'aaaaa'")
"'adminPass' : '***'"
>>> mask_password("'admin_pass' : 'aaaaa'")
"'admin_pass' : '***'"
>>> mask_password('"password" : "aaaaa"')
'"password" : "***"'
>>> mask_password("'original_password' : 'aaaaa'")
"'original_password' : '***'"
>>> mask_password("u'original_password' : u'aaaaa'")
"u'original_password' : u'***'"
"""
try:
message = six.text_type(message)
except UnicodeDecodeError:
# NOTE(jecarey): Temporary fix to handle cases where message is a
# byte string. A better solution will be provided in Kilo.
pass
substitute1 = r'\g<1>' + secret
substitute2 = r'\g<1>' + secret + r'\g<2>'
# NOTE(ldbragst): Check to see if anything in message contains any key
# specified in _SANITIZE_KEYS, if not then just return the message since
# we don't have to mask any passwords.
for key in _SANITIZE_KEYS:
if key in message:
for pattern in _SANITIZE_PATTERNS_2[key]:
message = re.sub(pattern, substitute2, message)
for pattern in _SANITIZE_PATTERNS_1[key]:
message = re.sub(pattern, substitute1, message)
return message
def is_int_like(val):
"""Check if a value looks like an integer with base 10.
:param val: Value to verify
:type val: string
:returns: bool
"""
try:
return six.text_type(int(val)) == six.text_type(val)
except (TypeError, ValueError):
return False
|
jjscarafia/odoo | refs/heads/master | addons/analytic/models/__init__.py | 78 | # -*- coding: utf-8 -*-
import analytic
|
jshiv/turntable | refs/heads/master | test/lib/python2.7/site-packages/scipy/interpolate/setup.py | 106 | #!/usr/bin/env python
from __future__ import division, print_function, absolute_import
from os.path import join
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info
lapack_opt = get_info('lapack_opt', notfound_action=2)
config = Configuration('interpolate', parent_package, top_path)
fitpack_src = [join('fitpack', '*.f')]
config.add_library('fitpack', sources=fitpack_src)
config.add_extension('interpnd',
sources=['interpnd.c'])
config.add_extension('_ppoly',
sources=['_ppoly.c'],
**lapack_opt)
config.add_extension('_fitpack',
sources=['src/_fitpackmodule.c'],
libraries=['fitpack'],
depends=(['src/__fitpack.h','src/multipack.h']
+ fitpack_src)
)
config.add_extension('dfitpack',
sources=['src/fitpack.pyf'],
libraries=['fitpack'],
depends=fitpack_src,
)
config.add_extension('_interpolate',
sources=['src/_interpolate.cpp'],
include_dirs=['src'],
depends=['src/interpolate.h'])
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
Ravenm/2143-OOP-NASH | refs/heads/master | python3env/Lib/site-packages/pip/_vendor/html5lib/__init__.py | 154 | """
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage:
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "1.0b8"
|
Alexander-M-Waldman/local_currency_site | refs/heads/master | lib/python2.7/site-packages/django/contrib/auth/handlers/modwsgi.py | 537 | from django import db
from django.contrib import auth
from django.utils.encoding import force_bytes
def check_password(environ, username, password):
"""
Authenticates against Django's auth database
mod_wsgi docs specify None, True, False as return value depending
on whether the user exists and authenticates.
"""
UserModel = auth.get_user_model()
# db connection state is managed similarly to the wsgi handler
# as mod_wsgi may call these functions outside of a request/response cycle
db.reset_queries()
try:
try:
user = UserModel._default_manager.get_by_natural_key(username)
except UserModel.DoesNotExist:
return None
if not user.is_active:
return None
return user.check_password(password)
finally:
db.close_old_connections()
def groups_for_user(environ, username):
"""
Authorizes a user based on groups
"""
UserModel = auth.get_user_model()
db.reset_queries()
try:
try:
user = UserModel._default_manager.get_by_natural_key(username)
except UserModel.DoesNotExist:
return []
if not user.is_active:
return []
return [force_bytes(group.name) for group in user.groups.all()]
finally:
db.close_old_connections()
|
jonfoster/pyxb2 | refs/heads/master | tests/drivers/test-ctd-simple.py | 3 | # -*- coding: utf-8 -*-
import logging
if __name__ == '__main__':
logging.basicConfig()
_log = logging.getLogger(__name__)
import pyxb.binding.generate
import pyxb.binding.datatypes as xsd
import pyxb.utils.domutils
from xml.dom import Node
import os.path
schema_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../schemas/test-ctd-simple.xsd'))
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestCTDSimple (unittest.TestCase):
def testClause4 (self):
self.assertTrue(clause4._IsSimpleTypeContent())
self.assertTrue(clause4._TypeDefinition == xsd.string)
self.assertEqual(None, clause4._TypeDefinition._CF_length.value())
def testClause3 (self):
self.assertTrue(clause3._IsSimpleTypeContent())
self.assertTrue(issubclass(clause3, clause4))
self.assertTrue(clause3._TypeDefinition == xsd.string)
def testClause2 (self):
self.assertTrue(clause2._IsSimpleTypeContent())
self.assertTrue(issubclass(clause2, ctype))
self.assertTrue(issubclass(clause2._TypeDefinition, xsd.string))
self.assertEqual(6, clause2._TypeDefinition._CF_length.value())
def testClause1_1 (self):
self.assertTrue(clause1_1._IsSimpleTypeContent())
self.assertTrue(issubclass(clause1_1, clause4))
self.assertTrue(issubclass(clause1_1._TypeDefinition, xsd.string))
self.assertEqual(2, clause1_1._TypeDefinition._CF_minLength.value())
self.assertEqual(4, clause1_1._TypeDefinition._CF_maxLength.value())
def testClause1_2 (self):
self.assertTrue(clause1_2._IsSimpleTypeContent())
self.assertTrue(issubclass(clause1_2, clause4))
self.assertTrue(issubclass(clause1_2._TypeDefinition, xsd.string))
self.assertEqual(6, clause1_2._TypeDefinition._CF_length.value())
if __name__ == '__main__':
unittest.main()
|
tensorflow/models | refs/heads/master | official/vision/beta/modeling/layers/mask_sampler.py | 1 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains definitions of mask sampler."""
# Import libraries
import tensorflow as tf
from official.vision.beta.ops import spatial_transform_ops
def _sample_and_crop_foreground_masks(candidate_rois: tf.Tensor,
candidate_gt_boxes: tf.Tensor,
candidate_gt_classes: tf.Tensor,
candidate_gt_indices: tf.Tensor,
gt_masks: tf.Tensor,
num_sampled_masks: int = 128,
mask_target_size: int = 28):
"""Samples and creates cropped foreground masks for training.
Args:
candidate_rois: A `tf.Tensor` of shape of [batch_size, N, 4], where N is the
number of candidate RoIs to be considered for mask sampling. It includes
both positive and negative RoIs. The `num_mask_samples_per_image` positive
RoIs will be sampled to create mask training targets.
candidate_gt_boxes: A `tf.Tensor` of shape of [batch_size, N, 4], storing
the corresponding groundtruth boxes to the `candidate_rois`.
candidate_gt_classes: A `tf.Tensor` of shape of [batch_size, N], storing the
corresponding groundtruth classes to the `candidate_rois`. 0 in the tensor
corresponds to the background class, i.e. negative RoIs.
candidate_gt_indices: A `tf.Tensor` of shape [batch_size, N], storing the
corresponding groundtruth instance indices to the `candidate_gt_boxes`,
i.e. gt_boxes[candidate_gt_indices[:, i]] = candidate_gt_boxes[:, i] and
gt_boxes which is of shape [batch_size, MAX_INSTANCES, 4], M >= N, is
the superset of candidate_gt_boxes.
gt_masks: A `tf.Tensor` of [batch_size, MAX_INSTANCES, mask_height,
mask_width] containing all the groundtruth masks which sample masks are
drawn from.
num_sampled_masks: An `int` that specifies the number of masks to sample.
mask_target_size: An `int` that specifies the final cropped mask size after
sampling. The output masks are resized w.r.t the sampled RoIs.
Returns:
foreground_rois: A `tf.Tensor` of shape of [batch_size, K, 4] storing the
RoI that corresponds to the sampled foreground masks, where
K = num_mask_samples_per_image.
foreground_classes: A `tf.Tensor` of shape of [batch_size, K] storing the
classes corresponding to the sampled foreground masks.
cropoped_foreground_masks: A `tf.Tensor` of shape of
[batch_size, K, mask_target_size, mask_target_size] storing the cropped
foreground masks used for training.
"""
_, fg_instance_indices = tf.nn.top_k(
tf.cast(tf.greater(candidate_gt_classes, 0), dtype=tf.int32),
k=num_sampled_masks)
fg_instance_indices_shape = tf.shape(fg_instance_indices)
batch_indices = (
tf.expand_dims(tf.range(fg_instance_indices_shape[0]), axis=-1) *
tf.ones([1, fg_instance_indices_shape[-1]], dtype=tf.int32))
gather_nd_instance_indices = tf.stack(
[batch_indices, fg_instance_indices], axis=-1)
foreground_rois = tf.gather_nd(
candidate_rois, gather_nd_instance_indices)
foreground_boxes = tf.gather_nd(
candidate_gt_boxes, gather_nd_instance_indices)
foreground_classes = tf.gather_nd(
candidate_gt_classes, gather_nd_instance_indices)
foreground_gt_indices = tf.gather_nd(
candidate_gt_indices, gather_nd_instance_indices)
foreground_gt_indices = tf.where(
tf.equal(foreground_gt_indices, -1),
tf.zeros_like(foreground_gt_indices),
foreground_gt_indices)
foreground_gt_indices_shape = tf.shape(foreground_gt_indices)
batch_indices = (
tf.expand_dims(tf.range(foreground_gt_indices_shape[0]), axis=-1) *
tf.ones([1, foreground_gt_indices_shape[-1]], dtype=tf.int32))
gather_nd_gt_indices = tf.stack(
[batch_indices, foreground_gt_indices], axis=-1)
foreground_masks = tf.gather_nd(gt_masks, gather_nd_gt_indices)
cropped_foreground_masks = spatial_transform_ops.crop_mask_in_target_box(
foreground_masks, foreground_boxes, foreground_rois, mask_target_size,
sample_offset=0.5)
return foreground_rois, foreground_classes, cropped_foreground_masks
@tf.keras.utils.register_keras_serializable(package='Vision')
class MaskSampler(tf.keras.layers.Layer):
"""Samples and creates mask training targets."""
def __init__(self, mask_target_size: int, num_sampled_masks: int, **kwargs):
self._config_dict = {
'mask_target_size': mask_target_size,
'num_sampled_masks': num_sampled_masks,
}
super(MaskSampler, self).__init__(**kwargs)
def call(self, candidate_rois: tf.Tensor, candidate_gt_boxes: tf.Tensor,
candidate_gt_classes: tf.Tensor, candidate_gt_indices: tf.Tensor,
gt_masks: tf.Tensor):
"""Samples and creates mask targets for training.
Args:
candidate_rois: A `tf.Tensor` of shape of [batch_size, N, 4], where N is
the number of candidate RoIs to be considered for mask sampling. It
includes both positive and negative RoIs. The
`num_mask_samples_per_image` positive RoIs will be sampled to create
mask training targets.
candidate_gt_boxes: A `tf.Tensor` of shape of [batch_size, N, 4], storing
the corresponding groundtruth boxes to the `candidate_rois`.
candidate_gt_classes: A `tf.Tensor` of shape of [batch_size, N], storing
the corresponding groundtruth classes to the `candidate_rois`. 0 in the
tensor corresponds to the background class, i.e. negative RoIs.
candidate_gt_indices: A `tf.Tensor` of shape [batch_size, N], storing the
corresponding groundtruth instance indices to the `candidate_gt_boxes`,
i.e. gt_boxes[candidate_gt_indices[:, i]] = candidate_gt_boxes[:, i],
where gt_boxes which is of shape [batch_size, MAX_INSTANCES, 4], M >=
N, is the superset of candidate_gt_boxes.
gt_masks: A `tf.Tensor` of [batch_size, MAX_INSTANCES, mask_height,
mask_width] containing all the groundtruth masks which sample masks are
drawn from. after sampling. The output masks are resized w.r.t the
sampled RoIs.
Returns:
foreground_rois: A `tf.Tensor` of shape of [batch_size, K, 4] storing the
RoI that corresponds to the sampled foreground masks, where
K = num_mask_samples_per_image.
foreground_classes: A `tf.Tensor` of shape of [batch_size, K] storing the
classes corresponding to the sampled foreground masks.
cropoped_foreground_masks: A `tf.Tensor` of shape of
[batch_size, K, mask_target_size, mask_target_size] storing the
cropped foreground masks used for training.
"""
foreground_rois, foreground_classes, cropped_foreground_masks = (
_sample_and_crop_foreground_masks(
candidate_rois,
candidate_gt_boxes,
candidate_gt_classes,
candidate_gt_indices,
gt_masks,
self._config_dict['num_sampled_masks'],
self._config_dict['mask_target_size']))
return foreground_rois, foreground_classes, cropped_foreground_masks
def get_config(self):
return self._config_dict
@classmethod
def from_config(cls, config):
return cls(**config)
|
jaffe59/vp-cnn | refs/heads/master | cnn_classifier/model.py | 1 | import torch
import torch.nn as nn
from torch.nn import init
import torch.nn.functional as F
from torch import autograd
class CNN_Text(nn.Module):
def __init__(self, args, char_or_word, vectors=None):
super(CNN_Text,self).__init__()
self.args = args
V = args.embed_num
if char_or_word == 'char':
D = args.char_embed_dim
else:
D = args.word_embed_dim
C = args.class_num
Ci = 1
Co = args.kernel_num
if char_or_word == 'char':
Ks = args.char_kernel_sizes
else:
Ks = args.word_kernel_sizes
self.embed = nn.Embedding(V, D, padding_idx=1)
if vectors is not None:
self.embed.weight.data = vectors
# print(self.embed.weight.data[100])
# print(self.embed.weight.data.size())
self.convs1 = nn.ModuleList([nn.Conv2d(Ci, Co, (K, D)) for K in Ks])
if char_or_word == 'word' or char_or_word == 'char':
for layer in self.convs1:
if args.ortho_init == True:
init.orthogonal(layer.weight.data)
else:
layer.weight.data.uniform_(-0.01, 0.01)
layer.bias.data.zero_()
'''
self.conv13 = nn.Conv2d(Ci, Co, (3, D))
self.conv14 = nn.Conv2d(Ci, Co, (4, D))
self.conv15 = nn.Conv2d(Ci, Co, (5, D))
'''
self.dropout = nn.Dropout(args.dropout)
self.fc1 = nn.Linear(len(Ks)*Co, C)
if char_or_word == 'word' or char_or_word == 'char':
if args.ortho_init == True:
init.orthogonal(self.fc1.weight.data)
else:
init.normal(self.fc1.weight.data)
self.fc1.weight.data.mul_(0.01)
self.fc1.bias.data.zero_()
# print(V, D, C, Ci, Co, Ks, self.convs1, self.fc1)
def conv_and_pool(self, x, conv):
x = F.relu(conv(x)).squeeze(3) #(N,Co,W)
x = F.max_pool1d(x, x.size(2)).squeeze(2)
return x
def forward(self, x):
x = self.confidence(x)
logit = F.log_softmax(x) # (N,C)
return logit
def confidence(self, x):
x = self.embed(x) # (N,W,D)
if self.args.static and self.args.word_vector:
x = autograd.Variable(x.data)
x = x.unsqueeze(1) # (N,Ci,W,D)
x = [F.relu(conv(x)).squeeze(3) for conv in self.convs1] # [(N,Co,W), ...]*len(Ks)
# print([x_p.size() for x_p in x])
x = [F.max_pool1d(i, i.size(2)).squeeze(2) for i in x] # [(N,Co), ...]*len(Ks)
x = torch.cat(x, 1)
'''
x1 = self.conv_and_pool(x,self.conv13) #(N,Co)
x2 = self.conv_and_pool(x,self.conv14) #(N,Co)
x3 = self.conv_and_pool(x,self.conv15) #(N,Co)
x = torch.cat((x1, x2, x3), 1) # (N,len(Ks)*Co)
'''
x = self.dropout(x) # (N,len(Ks)*Co)
linear_out = self.fc1(x)
return linear_out
class SimpleLogistic(nn.Module):
def __init__(self, args):
super(SimpleLogistic, self).__init__()
self.args = args
self.input_size = self.args.class_num * 2
self.output_size = self.args.class_num
self.layer_num = self.args.layer_num
self.layers = nn.ModuleList([nn.Linear(self.input_size, self.input_size) if x < self.layer_num - 1 else
nn.Linear(self.input_size, self.output_size) for x in range(self.layer_num)])
def forward(self, x1, x2):
x = torch.cat((x1, x2), dim=-1)
for layer in self.layers:
x = layer(x)
return x
class StackingNet(nn.Module):
def __init__(self, args):
super(StackingNet, self).__init__()
self.args = args
self.params = nn.ParameterList([nn.Parameter(torch.rand(1)) for i in range(2)])
def forward(self, inputs):
output = 0
for index, input in enumerate(inputs):
output += input * self.params[index].expand(input.size())
output = F.log_softmax(output)
return output |
yoer/hue | refs/heads/master | desktop/core/ext-py/django-nose-1.3/testapp/settings.py | 35 | DATABASES = {
'default': {
'NAME': 'django_master',
'ENGINE': 'django.db.backends.sqlite3',
}
}
MIDDLEWARE_CLASSES = ()
INSTALLED_APPS = (
'django_nose',
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
SECRET_KEY = 'ssshhhh'
|
thresholdsoftware/asylum | refs/heads/master | openerp/addons/l10n_fr_hr_payroll/l10n_fr_hr_payroll.py | 51 | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP SA (<http://openerp.com>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
class res_company(osv.osv):
_inherit = 'res.company'
_columns = {
'plafond_secu': fields.float('Plafond de la Securite Sociale', digits_compute=dp.get_precision('Payroll')),
'nombre_employes': fields.integer('Nombre d\'employes'),
'cotisation_prevoyance': fields.float('Cotisation Patronale Prevoyance', digits_compute=dp.get_precision('Payroll')),
'org_ss': fields.char('Organisme de securite sociale', size=64),
'conv_coll': fields.char('Convention collective', size=64),
}
res_company()
class hr_contract(osv.osv):
_inherit = 'hr.contract'
_columns = {
'qualif': fields.char('Qualification', size=64),
'niveau': fields.char('Niveau', size=64),
'coef': fields.char('Coefficient', size=64),
}
hr_contract()
class hr_payslip(osv.osv):
_inherit = 'hr.payslip'
_columns = {
'payment_mode': fields.char('Mode de paiement', size=64),
}
hr_payslip()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
coddingtonbear/d-rats | refs/heads/master | d_rats/session_coordinator.py | 4 | #!/usr/bin/python
#
# Copyright 2008 Dan Smith <dsmith@danplanet.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import socket
import threading
import time
import os
import gobject
import formgui
import emailgw
import signals
import msgrouting
from utils import run_safe, run_gtk_locked
from d_rats.sessions import base, file, form, sock
class SessionThread(object):
OUTGOING = False
def __init__(self, coord, session, data):
self.enabled = True
self.coord = coord
self.session = session
self.arg = data
self.thread = threading.Thread(target=self.worker, args=(data,))
self.thread.setDaemon(True)
self.thread.start()
def stop(self):
self.enabled = False
self.thread.join()
def worker(self, **args):
print "**** EMPTY SESSION THREAD ****"
class FileBaseThread(SessionThread):
progress_key = "recv_size"
@run_safe
def status_cb(self, vals):
#print "GUI Status:"
#for k,v in vals.items():
# print " -> %s: %s" % (k, v)
if vals["total_size"]:
pct = (float(vals[self.progress_key]) / vals["total_size"]) * 100.0
else:
pct = 0.0
if vals["retries"] > 0:
retries = " (%i retries)" % vals["retries"]
else:
retries = ""
if vals.has_key("start_time"):
elapsed = time.time() - vals["start_time"]
kbytes = vals[self.progress_key]
speed = " %2.2f B/s" % (kbytes / elapsed)
else:
speed = ""
if vals["sent_wire"]:
amt = vals["sent_wire"]
if amt > 1024:
sent = " (%s %.1f KB)" % (_("Total"), amt >> 10)
else:
sent = " (%s %i B)" % (_("Total"), amt)
else:
sent = ""
msg = "%s [%02.0f%%]%s%s%s" % (vals["msg"], pct, speed, sent, retries)
self.pct_complete = pct
self.coord.session_status(self.session, msg)
def completed(self, objname=None):
self.coord.session_status(self.session, _("Transfer Completed"))
if objname:
msg = " of %s" % objname
else:
msg = ""
size = self.session.stats["total_size"]
if size > 1024:
size >>= 10
units = "KB"
else:
units = "B"
if self.session.stats.has_key("start_time"):
start = self.session.stats["start_time"]
exmsg = " (%i%s @ %2.2f B/s)" % (\
size, units,
self.session.stats["total_size"] /
(time.time() - start))
else:
exmsg = ""
def failed(self, restart_info=None):
s = _("Transfer Interrupted") + \
" (%.0f%% complete)" % self.pct_complete
self.coord.session_failed(self.session, s, restart_info)
def __init__(self, *args):
SessionThread.__init__(self, *args)
self.pct_complete = 0.0
self.session.status_cb = self.status_cb
class FileRecvThread(FileBaseThread):
progress_key = "recv_size"
def worker(self, path):
fn = self.session.recv_file(path)
if fn:
self.completed("file %s" % os.path.basename(fn))
self.coord.session_newfile(self.session, fn)
else:
self.failed()
class FileSendThread(FileBaseThread):
OUTGOING = True
progress_key = "sent_size"
def worker(self, path):
if self.session.send_file(path):
self.completed("file %s" % os.path.basename(path))
self.coord.session_file_sent(self.session, path)
else:
self.failed((self.session.get_station(), path))
class FormRecvThread(FileBaseThread):
progress_key = "recv_size"
def worker(self, path):
md = os.path.join(self.coord.config.form_store_dir(), _("Inbox"))
newfn = time.strftime(os.path.join(md, "form_%m%d%Y_%H%M%S.xml"))
if not msgrouting.msg_lock(newfn):
print "AIEE! Unable to lock incoming new message file!"
fn = self.session.recv_file(newfn)
name = "%s %s %s" % (self.session.name,
_("from"),
self.session.get_station())
if fn == newfn:
form = formgui.FormFile(fn)
form.add_path_element(self.coord.config.get("user", "callsign"))
form.save_to(fn)
self.completed("form")
self.coord.session_newform(self.session, fn)
else:
self.failed()
print "<--- Form transfer failed -->"
class FormSendThread(FileBaseThread):
OUTGOING = True
progress_key = "sent_size"
def worker(self, path):
if self.session.send_file(path):
self.completed()
self.coord.session_form_sent(self.session, path)
else:
self.failed((self.session.get_station(), path))
class SocketThread(SessionThread):
def status(self):
vals = self.session.stats
if vals["retries"] > 0:
retries = " (%i %s)" % (vals["retries"], _("retries"))
else:
retries = ""
msg = "%i %s %s %i %s %s%s" % (vals["sent_size"],
_("bytes"), _("sent"),
vals["recv_size"],
_("bytes"), _("received"),
retries)
self.coord.session_status(self.session, msg)
def socket_read(self, sock, length, to=5):
data = ""
t = time.time()
while (time.time() - t) < to :
d = ""
try:
d = sock.recv(length - len(d))
except socket.timeout:
continue
if not d and not data:
raise Exception("Socket is closed")
data += d
return data
def worker(self, data):
(sock, timeout) = data
print "*** Socket thread alive (%i timeout)" % timeout
sock.settimeout(timeout)
while self.enabled:
t = time.time()
try:
sd = self.socket_read(sock, 512, timeout)
except Exception, e:
print str(e)
break
print "Waited %f sec for socket" % (time.time() - t)
try:
rd = self.session.read(512)
except base.SessionClosedError, e:
print "Session closed"
self.enabled = False
break
self.status()
if sd:
print "Sending socket data (%i)" % len(sd)
self.session.write(sd)
if rd:
print "Sending radio data (%i)" % len(rd)
sock.sendall(rd)
print "Closing session"
self.session.close()
try:
sock.close()
except:
pass
print "*** Socket thread exiting"
class SessionCoordinator(gobject.GObject):
__gsignals__ = {
"session-status-update" : signals.SESSION_STATUS_UPDATE,
"session-started" : signals.SESSION_STARTED,
"session-ended" : signals.SESSION_ENDED,
"file-received" : signals.FILE_RECEIVED,
"form-received" : signals.FORM_RECEIVED,
"file-sent" : signals.FILE_SENT,
"form-sent" : signals.FORM_SENT,
}
_signals = __gsignals__
def _emit(self, signal, *args):
gobject.idle_add(self.emit, signal, *args)
def session_status(self, session, msg):
self._emit("session-status-update", session._id, msg)
def session_newform(self, session, path):
self._emit("form-received", session._id, path)
def session_newfile(self, session, path):
self._emit("file-received", session._id, path)
def session_form_sent(self, session, path):
self._emit("form-sent", session._id, path)
def session_file_sent(self, session, path):
self._emit("file-sent", session._id, path)
def session_failed(self, session, msg, restart_info=None):
self._emit("session-ended", session._id, msg, restart_info)
def cancel_session(self, id, force=False):
if id < 2:
# Don't let them cancel Control or Chat
return
try:
session = self.sm.sessions[id]
except Exception, e:
print "Session `%i' not found: %s" % (id, e)
return
if self.sthreads.has_key(id):
del self.sthreads[id]
session.close(force)
def create_socket_listener(self, sport, dport, dest):
if dport not in self.socket_listeners.keys():
print "Starting a listener for port %i->%s:%i" % (sport,
dest,
dport)
self.socket_listeners[dport] = \
sock.SocketListener(self.sm, dest, sport, dport)
print "Started"
else:
raise Exception("Listener for %i already active" % dport)
def new_file_xfer(self, session, direction):
msg = _("File transfer of %s started with %s") % (session.name,
session._st)
self.emit("session-status-update", session._id, msg)
if direction == "in":
dd = self.config.get("prefs", "download_dir")
self.sthreads[session._id] = FileRecvThread(self, session, dd)
elif direction == "out":
of = self.outgoing_files.pop()
self.sthreads[session._id] = FileSendThread(self, session, of)
def new_form_xfer(self, session, direction):
msg = _("Message transfer of %s started with %s") % (session.name,
session._st)
self.emit("session-status-update", session._id, msg)
if direction == "in":
dd = self.config.form_store_dir()
self.sthreads[session._id] = FormRecvThread(self, session, dd)
elif direction == "out":
of = self.outgoing_forms.pop()
self.sthreads[session._id] = FormSendThread(self, session, of)
def new_socket(self, session, direction):
msg = _("Socket session %s started with %s") % (session.name,
session._st)
self.emit("session-status-update", session._id, msg)
to = float(self.config.get("settings", "sockflush"))
try:
foo, port = session.name.split(":", 2)
port = int(port)
except Exception, e:
print "Invalid socket session name %s: %s" % (session.name, e)
session.close()
return
if direction == "in":
try:
ports = self.config.options("tcp_in")
for _portspec in ports:
portspec = self.config.get("tcp_in", _portspec)
p, h = portspec.split(",")
p = int(p)
if p == port:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((h, port))
self.sthreads[session._id] = SocketThread(self,
session,
(sock, to))
return
raise Exception("Port %i not configured" % port)
except Exception, e:
msg = _("Error starting socket session: %s") % e
self.emit("session-status-update", session._id, msg)
session.close()
elif direction == "out":
sock = self.socket_listeners[port].dsock
self.sthreads[session._id] = SocketThread(self, session, (sock, to))
@run_gtk_locked
def _new_session(self, type, session, direction):
if session._id <= 3:
return # Skip control, chat, sniff, rpc
print "New session (%s) of type: %s" % (direction, session.__class__)
self.emit("session-started", session._id, type)
if isinstance(session, form.FormTransferSession):
self.new_form_xfer(session, direction)
elif isinstance(session, file.FileTransferSession):
self.new_file_xfer(session, direction)
elif isinstance(session, sock.SocketSession):
self.new_socket(session, direction)
else:
print "*** Unknown session type: %s" % session.__class__.__name__
def new_session(self, type, session, direction):
gobject.idle_add(self._new_session, type, session, direction)
def end_session(self, id):
thread = self.sthreads.get(id, None)
if isinstance(thread, SessionThread):
del self.sthreads[id]
else:
self._emit("session-ended", id, "Ended", None)
def session_cb(self, data, reason, session):
t = str(session.__class__.__name__).replace("Session", "")
if "." in t:
t = t.split(".")[2]
if reason.startswith("new,"):
self.new_session(t, session, reason.split(",", 2)[1])
elif reason == "end":
self.end_session(session._id)
def send_file(self, dest, filename, name=None):
if name is None:
name = os.path.basename(filename)
self.outgoing_files.insert(0, filename)
print "Outgoing files: %s" % self.outgoing_files
xfer = file.FileTransferSession
bs = self.config.getint("settings", "ddt_block_size")
ol = self.config.getint("settings", "ddt_block_outlimit")
t = threading.Thread(target=self.sm.start_session,
kwargs={"name" : name,
"dest" : dest,
"cls" : xfer,
"blocksize" : bs,
"outlimit" : ol})
t.setDaemon(True)
t.start()
print "Started Session"
def send_form(self, dest, filename, name="Form"):
self.outgoing_forms.insert(0, filename)
print "Outgoing forms: %s" % self.outgoing_forms
xfer = form.FormTransferSession
t = threading.Thread(target=self.sm.start_session,
kwargs={"name" : name,
"dest" : dest,
"cls" : xfer})
t.setDaemon(True)
t.start()
print "Started form session"
def __init__(self, config, sm):
gobject.GObject.__init__(self)
self.sm = sm
self.config = config
self.sthreads = {}
self.outgoing_files = []
self.outgoing_forms = []
self.socket_listeners = {}
def shutdown(self):
for dport, listener in self.socket_listeners.items():
print "Stopping TCP:%i" % dport
listener.stop()
|
marqueedev/django | refs/heads/master | django/contrib/gis/geos/polygon.py | 450 | from ctypes import byref, c_uint
from django.contrib.gis.geos import prototypes as capi
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.contrib.gis.geos.libgeos import GEOM_PTR, get_pointer_arr
from django.contrib.gis.geos.linestring import LinearRing
from django.utils import six
from django.utils.six.moves import range
class Polygon(GEOSGeometry):
_minlength = 1
def __init__(self, *args, **kwargs):
"""
Initializes on an exterior ring and a sequence of holes (both
instances may be either LinearRing instances, or a tuple/list
that may be constructed into a LinearRing).
Examples of initialization, where shell, hole1, and hole2 are
valid LinearRing geometries:
>>> from django.contrib.gis.geos import LinearRing, Polygon
>>> shell = hole1 = hole2 = LinearRing()
>>> poly = Polygon(shell, hole1, hole2)
>>> poly = Polygon(shell, (hole1, hole2))
>>> # Example where a tuple parameters are used:
>>> poly = Polygon(((0, 0), (0, 10), (10, 10), (0, 10), (0, 0)),
... ((4, 4), (4, 6), (6, 6), (6, 4), (4, 4)))
"""
if not args:
raise TypeError('Must provide at least one LinearRing, or a tuple, to initialize a Polygon.')
# Getting the ext_ring and init_holes parameters from the argument list
ext_ring = args[0]
init_holes = args[1:]
n_holes = len(init_holes)
# If initialized as Polygon(shell, (LinearRing, LinearRing)) [for backward-compatibility]
if n_holes == 1 and isinstance(init_holes[0], (tuple, list)):
if len(init_holes[0]) == 0:
init_holes = ()
n_holes = 0
elif isinstance(init_holes[0][0], LinearRing):
init_holes = init_holes[0]
n_holes = len(init_holes)
polygon = self._create_polygon(n_holes + 1, (ext_ring,) + init_holes)
super(Polygon, self).__init__(polygon, **kwargs)
def __iter__(self):
"Iterates over each ring in the polygon."
for i in range(len(self)):
yield self[i]
def __len__(self):
"Returns the number of rings in this Polygon."
return self.num_interior_rings + 1
@classmethod
def from_bbox(cls, bbox):
"Constructs a Polygon from a bounding box (4-tuple)."
x0, y0, x1, y1 = bbox
for z in bbox:
if not isinstance(z, six.integer_types + (float,)):
return GEOSGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' %
(x0, y0, x0, y1, x1, y1, x1, y0, x0, y0))
return Polygon(((x0, y0), (x0, y1), (x1, y1), (x1, y0), (x0, y0)))
# ### These routines are needed for list-like operation w/ListMixin ###
def _create_polygon(self, length, items):
# Instantiate LinearRing objects if necessary, but don't clone them yet
# _construct_ring will throw a TypeError if a parameter isn't a valid ring
# If we cloned the pointers here, we wouldn't be able to clean up
# in case of error.
rings = []
for r in items:
if isinstance(r, GEOM_PTR):
rings.append(r)
else:
rings.append(self._construct_ring(r))
shell = self._clone(rings.pop(0))
n_holes = length - 1
if n_holes:
holes = get_pointer_arr(n_holes)
for i, r in enumerate(rings):
holes[i] = self._clone(r)
holes_param = byref(holes)
else:
holes_param = None
return capi.create_polygon(shell, holes_param, c_uint(n_holes))
def _clone(self, g):
if isinstance(g, GEOM_PTR):
return capi.geom_clone(g)
else:
return capi.geom_clone(g.ptr)
def _construct_ring(self, param, msg=(
'Parameter must be a sequence of LinearRings or objects that can initialize to LinearRings')):
"Helper routine for trying to construct a ring from the given parameter."
if isinstance(param, LinearRing):
return param
try:
ring = LinearRing(param)
return ring
except TypeError:
raise TypeError(msg)
def _set_list(self, length, items):
# Getting the current pointer, replacing with the newly constructed
# geometry, and destroying the old geometry.
prev_ptr = self.ptr
srid = self.srid
self.ptr = self._create_polygon(length, items)
if srid:
self.srid = srid
capi.destroy_geom(prev_ptr)
def _get_single_internal(self, index):
"""
Returns the ring at the specified index. The first index, 0, will
always return the exterior ring. Indices > 0 will return the
interior ring at the given index (e.g., poly[1] and poly[2] would
return the first and second interior ring, respectively).
CAREFUL: Internal/External are not the same as Interior/Exterior!
_get_single_internal returns a pointer from the existing geometries for use
internally by the object's methods. _get_single_external returns a clone
of the same geometry for use by external code.
"""
if index == 0:
return capi.get_extring(self.ptr)
else:
# Getting the interior ring, have to subtract 1 from the index.
return capi.get_intring(self.ptr, index - 1)
def _get_single_external(self, index):
return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid)
_set_single = GEOSGeometry._set_single_rebuild
_assign_extended_slice = GEOSGeometry._assign_extended_slice_rebuild
# #### Polygon Properties ####
@property
def num_interior_rings(self):
"Returns the number of interior rings."
# Getting the number of rings
return capi.get_nrings(self.ptr)
def _get_ext_ring(self):
"Gets the exterior ring of the Polygon."
return self[0]
def _set_ext_ring(self, ring):
"Sets the exterior ring of the Polygon."
self[0] = ring
# Properties for the exterior ring/shell.
exterior_ring = property(_get_ext_ring, _set_ext_ring)
shell = exterior_ring
@property
def tuple(self):
"Gets the tuple for each ring in this Polygon."
return tuple(self[i].tuple for i in range(len(self)))
coords = tuple
@property
def kml(self):
"Returns the KML representation of this Polygon."
inner_kml = ''.join("<innerBoundaryIs>%s</innerBoundaryIs>" % self[i + 1].kml
for i in range(self.num_interior_rings))
return "<Polygon><outerBoundaryIs>%s</outerBoundaryIs>%s</Polygon>" % (self[0].kml, inner_kml)
|
aevri/mel | refs/heads/master | mel/lib/moleimaging.py | 1 | """Routines for analysing images of moles."""
import math
import cv2
import mel.lib.image
import mel.lib.math
def find_mole(frame):
# look for areas of high saturation, they are likely moles
img = frame.copy()
img = cv2.blur(img, (40, 40))
img = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
img = cv2.split(img)[1]
_, img = cv2.threshold(img, 30, 255, cv2.THRESH_BINARY)
ringed, stats, _ = mel.lib.moleimaging.process_contours(img, frame)
return ringed, stats
def calc_hist(image, channel, mask):
hist = cv2.calcHist([image], [channel], mask, [8], [0, 256])
hist = [int(x) for x in hist]
hist_sum = sum(hist)
hist = [100 * x / hist_sum for x in hist]
return hist
def log10_zero(x):
"""Return the log10 of x, map log10(0) -> 0."""
if x == 0:
return 0
else:
return math.log10(x)
def biggest_contour(image):
contours, _ = cv2.findContours(
image, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE
)
if not contours:
raise Exception("No contours found.")
max_area = 0
max_index = None
num_non_none = 0
for i, c in enumerate(contours):
if c is not None:
num_non_none += 1
if len(c) > 5:
area = cv2.contourArea(c)
if max_index is None or area > max_area:
max_area = area
max_index = i
if max_index is None:
if num_non_none:
raise Exception(
"No suitable contours found. Some contours were found, "
"but were too short."
)
else:
raise Exception("No non-None contours found.")
return contours[max_index]
def process_contours(mole_regions, original):
final = original.copy()
stats = None
contours, _ = cv2.findContours(
mole_regions.copy(), cv2.RETR_LIST, cv2.CHAIN_APPROX_NONE
)
mole_contour, mole_area = find_mole_contour(
contours, mole_regions.shape[0:2]
)
ellipse = None
if mole_contour is not None:
if len(mole_contour) > 5:
ellipse = cv2.fitEllipse(mole_contour)
blue = (255, 0, 0)
moments = cv2.moments(mole_contour)
hu_moments = cv2.HuMoments(moments)
hu_moments = [log10_zero(abs(float(x))) for x in hu_moments]
hsv = cv2.cvtColor(original, cv2.COLOR_BGR2HSV)
hist = calc_hist(hsv, 1, mole_regions)
hist_surrounding = calc_hist(hsv, 1, None)
cv2.ellipse(final, ellipse, blue, 5)
sqrt_area = math.sqrt(mole_area)
aspect_ratio = (ellipse[1][0] / ellipse[1][1]) * 100
ellipse_area = math.pi * ellipse[1][0] * ellipse[1][1] * 0.25
if ellipse_area:
coverage_percent = (mole_area / ellipse_area) * 100
else:
coverage_percent = 0
stats = (sqrt_area, aspect_ratio, coverage_percent)
stats += tuple(hist)
stats += tuple(hist_surrounding)
stats += tuple(hu_moments)
return final, stats, ellipse
def find_mole_contour(contours, width_height):
centre = (
width_height[0] // 2,
width_height[1] // 2,
)
mole_contour = None
mole_area = None
mole_distance = None
for contour in contours:
if contour is not None and len(contour) > 5:
area = cv2.contourArea(contour)
moments = cv2.moments(contour)
if moments["m00"] != 0:
cx = int(moments["m10"] / moments["m00"])
cy = int(moments["m01"] / moments["m00"])
dx = centre[0] - cx
dy = centre[1] - cy
distance = (dx ** 2 + dy ** 2) ** 0.5
if mole_area and area > 10 * mole_area:
mole_contour = contour
mole_area = area
mole_distance = distance
elif mole_distance is None or distance < mole_distance:
mole_contour = contour
mole_area = area
mole_distance = distance
return mole_contour, mole_area
class MoleAcquirer(object):
def __init__(self):
super(MoleAcquirer, self).__init__()
self._is_locked = False
self._last_stats = None
self._last_stats_diff = None
def update(self, stats):
if stats and self._last_stats:
stats_diff = list(map(lambda x, y: x - y, self._last_stats, stats))
self._last_stats = list(
map(
lambda x, y: mel.lib.math.lerp(x, y, 0.5),
self._last_stats,
stats,
)
)
if self._last_stats_diff:
self._last_stats_diff = list(
map(
lambda x, y: mel.lib.math.lerp(x, y, 0.5),
self._last_stats_diff,
stats_diff,
)
)
should_lock = all([int(x) == 0 for x in self._last_stats_diff])
should_unlock = any(
[abs(int(x)) > 1 for x in self._last_stats_diff]
)
if not self._is_locked and should_lock:
self._is_locked = True
elif self._is_locked and should_unlock:
self._is_locked = False
else:
self._last_stats_diff = stats_diff
self._is_locked = False
elif stats:
self._last_stats = stats
self._is_locked = False
else:
self._is_locked = False
@property
def is_locked(self):
return self._is_locked
def point_to_int_point(point):
return (int(point[0]), int(point[1]))
def rotate_point_around_pivot(point, pivot, degrees):
centre_point = (point[0] - pivot[0], point[1] - pivot[1])
theta = degrees * (math.pi / 180.0)
rotated_point = (
centre_point[0] * math.cos(theta) - centre_point[1] * math.sin(theta),
centre_point[0] * math.sin(theta) + centre_point[1] * math.cos(theta),
)
new_point = (rotated_point[0] + pivot[0], rotated_point[1] + pivot[1])
return new_point
def draw_vertical_lines(image, left, top, right, bottom, color, width):
cv2.line(image, (left, top), (left, bottom), color, width)
cv2.line(image, (right, top), (right, bottom), color, width)
def draw_horizontal_lines(image, left, top, right, bottom, color, width):
cv2.line(image, (left, top), (right, top), color, width)
cv2.line(image, (left, bottom), (right, bottom), color, width)
def annotate_image(original, is_rot_sensitive):
is_aligned = False
center_xy = None
angle_degs = None
original_width = original.shape[1]
original_height = original.shape[0]
final = original.copy()
img = original.copy()
img = cv2.blur(img, (40, 40))
img = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
img = cv2.split(img)[1]
_, img = cv2.threshold(img, 30, 255, cv2.THRESH_BINARY)
contours, _ = cv2.findContours(img, cv2.RETR_LIST, cv2.CHAIN_APPROX_NONE)
mole_contour, _ = find_mole_contour(contours, img.shape[0:2])
if mole_contour is not None:
if len(mole_contour) > 5:
ellipse = cv2.fitEllipse(mole_contour)
guide_ellipse = (ellipse[0], ellipse[1], 0)
center_xy = ellipse[0]
center_xy = point_to_int_point(center_xy)
angle_degs = ellipse[2]
top_xy = (center_xy[0], center_xy[1] - int(ellipse[1][1] / 2))
ellipse_top_xy = rotate_point_around_pivot(
top_xy, center_xy, angle_degs
)
ellipse_top_xy = point_to_int_point(ellipse_top_xy)
yellow = (0, 255, 255)
green = (0, 255, 0)
red = (0, 0, 255)
blue = (255, 0, 0)
is_rotation_aligned = not is_rot_sensitive
color = yellow
thickness = 10
if is_rot_sensitive:
if angle_degs <= 20 or angle_degs >= 160:
is_rotation_aligned = True
color = green
thickness = 2
else:
if angle_degs >= 45 and angle_degs <= 135:
color = red
cv2.ellipse(final, guide_ellipse, green, 10)
cv2.line(final, center_xy, top_xy, green, 10)
cv2.line(final, center_xy, ellipse_top_xy, color, 10)
else:
color = green
thickness = 2
cv2.ellipse(final, ellipse, color, thickness)
bounds_half_width = original_width // 10
bounds_half_height = original_height // 10
bounds_center_x = original_width // 2
bounds_center_y = original_height // 2
bounds_left = bounds_center_x - bounds_half_width
bounds_right = bounds_center_x + bounds_half_width
bounds_top = bounds_center_y - bounds_half_height
bounds_bottom = bounds_center_y + bounds_half_height
is_position_aligned = True
if center_xy[0] < bounds_left or center_xy[0] > bounds_right:
is_position_aligned = False
# show x guide
draw_vertical_lines(
final,
bounds_left,
bounds_top,
bounds_right,
bounds_bottom,
blue,
10,
)
cv2.rectangle(final, center_xy, center_xy, blue, 10)
if center_xy[1] < bounds_top or center_xy[1] > bounds_bottom:
is_position_aligned = False
# show y guide
draw_horizontal_lines(
final,
bounds_left,
bounds_top,
bounds_right,
bounds_bottom,
blue,
10,
)
cv2.rectangle(final, center_xy, center_xy, blue, 10)
if is_rotation_aligned and is_position_aligned:
is_aligned = True
original[:, :] = final[:, :]
return is_aligned, center_xy, angle_degs
def find_mole_ellipse(original, centre, radius):
lefttop = centre - (radius, radius)
rightbottom = centre + (radius + 1, radius + 1)
original = mel.lib.image.slice_square_or_none(
original, lefttop, rightbottom
)
if original is None:
return None
image = original[:]
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
image = cv2.split(image)[1]
image = cv2.equalizeHist(image)
image = cv2.threshold(image, 252, 255, cv2.THRESH_BINARY)[1]
image, _, ellipse = mel.lib.moleimaging.process_contours(image, original)
if ellipse:
ellipse = (
(ellipse[0][0] + lefttop[0], ellipse[0][1] + lefttop[1]),
ellipse[1],
ellipse[2],
)
return ellipse
# -----------------------------------------------------------------------------
# Copyright (C) 2015-2018 Angelos Evripiotis.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
|
dbrattli/python-gearshift | refs/heads/master | gearshift/identity/tests/test_identity.py | 1 | # coding=UTF-8
import re
import unittest
import urllib
import formencode
import cherrypy
from gearshift import testutil, database, identity, config, startup, expose
from gearshift.controllers import Controller, RootController
from gearshift.identity import SecureResource, \
has_permission, in_group, not_anonymous
from gearshift.identity.soprovider import TG_User, TG_Group, TG_Permission
hub = database.AutoConnectHub("sqlite:///:memory:")
#hub = database.PackageHub("gearshift.identity")
def mycustomencrypt(password):
return password + '_custom'
class RestrictedArea(Controller, SecureResource):
require = in_group('peon')
@expose()
def index(self):
return "restricted_index"
@expose()
@identity.require(in_group('admin'))
def in_admin_group(self):
return 'in_admin_group'
@expose()
@identity.require(in_group('other'))
def in_other_group(self):
return 'in_other_group'
@expose()
def in_admin_group_explicit_check(self):
if 'admin' not in identity.current.groups:
raise identity.IdentityFailure("You need to be an Admin")
else:
return 'in_admin_group'
@expose()
def in_other_group_explicit_check(self):
if 'other' not in identity.current.groups:
raise identity.IdentityException
else:
return 'in_other_group'
class IdentityRoot(RootController):
_cp_config = {
'tools.decode.on' : True
}
@expose()
def index(self):
return dict()
@expose()
def identity_failed(self, **kw):
cherrypy.response.status = 401
return 'identity_failed_answer'
@expose()
@identity.require(not_anonymous())
def logged_in_only(self):
return 'logged_in_only'
@expose()
@identity.require(in_group('peon'))
def in_peon_group(self):
user = TG_User.by_user_name("samIam")
group_ids = set((TG_Group.by_group_name("peon").id,
TG_Group.by_group_name("other").id))
assert identity.current.user == user
assert identity.current.user_name == user.user_name
assert identity.current.user_id == user.id
assert identity.current.groups == set(('peon', 'other'))
assert identity.current.group_ids == group_ids
assert "samIam" == cherrypy.serving.request.identity.user_name
return 'in_peon_group'
@expose()
def test_exposed_require(self):
print self.in_peon_group._cp_config
if not self.in_peon_group._cp_config.has_key('tools.identity.require'):
return 'no require config'
if not isinstance(self.in_peon_group._cp_config['tools.identity.require'], in_group):
return 'not correct class'
if 'peon' != self.in_peon_group._cp_config['tools.identity.require'].group_name:
return 'not correct group name'
return 'require is exposed'
@expose()
@identity.require(in_group('admin'))
def in_admin_group(self):
return 'in_admin_group'
@expose()
@identity.require(has_permission('chops_wood'))
def has_chopper_permission(self):
return 'has_chopper_permission'
@expose()
@identity.require(has_permission('bosses_people'))
def has_boss_permission(self):
return "has_boss_permission"
@expose()
def logout(self):
identity.current.logout()
return "logged out"
@expose()
@identity.require(not_anonymous())
def user_email(self):
return identity.current.user.email_address
peon_area = RestrictedArea()
@expose()
def new_user_setup(self, user_name, password):
return '%s %s' % (user_name, password)
_test_encoded_params = ('b=krümel&d.a=klöße1')
@expose()
@identity.require(not_anonymous())
def test_params(self, **kwargs):
params = self._test_encoded_params
# formencode's variable_decode create a datastructure
# but does not "decode" anything
to_datastruct = formencode.variabledecode.variable_decode
expected_params = to_datastruct(
dict([p.split('=') for p in params.split('&')]))
params_ok = True
print expected_params
print cherrypy.request.params
if not expected_params['b'].decode(
'utf8') == cherrypy.request.params['b']:
params_ok = False
if not expected_params['d']['a'].decode(
'utf8') == cherrypy.request.params['d.a']:
params_ok = False
if params_ok:
return 'params ok'
else:
return 'wrong params: %s\nexpected unicode objects' \
' for all strings' % cherrypy.request.params
@expose()
def is_anonymous(self):
assert cherrypy.serving.request.identity.user_name == None
assert cherrypy.serving.request.identity.anonymous
assert identity.current.anonymous
return 'is_anonymous'
class TestIdentity(testutil.TGTest):
def setUp(self):
# identity requires visit and a failure_url
test_config = {'global': {
'tools.visit.on': True, 'tools.identity.on': True,
'tools.identity.failure_url': '/identity_failed',
'tools.identity.soprovider.encryption_algorithm': None}}
original_config = dict()
for key in test_config['global']:
original_config[key] = config.get(key)
self._original_config = original_config
#config.configure_loggers(test_config)
config.update(test_config['global'])
self.root = IdentityRoot
testutil.TGTest.setUp(self)
self.init_model()
def tearDown(self):
testutil.TGTest.tearDown(self)
config.update(self._original_config)
def init_model(self):
if TG_User.select().count() == 0:
user = TG_User(user_name='samIam',
email_address='samiam@example.com',
display_name='Samuel Amicus', password='secret')
admin_group = TG_Group(group_name="admin",
display_name="Administrator")
peon_group = TG_Group(group_name="peon",
display_name="Regular Peon")
other_group = TG_Group(group_name="other",
display_name="Another Group")
chopper_perm = TG_Permission(permission_name='chops_wood',
description="Wood Chopper")
boss_perm = TG_Permission(permission_name='bosses_people',
description="Benevolent Dictator")
peon_group.addTG_Permission(chopper_perm)
admin_group.addTG_Permission(boss_perm)
user.addTG_Group(peon_group)
user.addTG_Group(other_group)
def test_user_password_parameters(self):
"""Controller can receive user_name and password parameters."""
response = self.app.get('/new_user_setup?user_name=test&password=pw')
assert 'test pw' in response
def test_user_exists(self):
u = TG_User.by_user_name('samIam')
assert u.email_address == 'samiam@example.com'
def test_user_password(self):
"""Test if we can set a user password (no encryption algorithm)."""
hub.begin()
u = TG_User.by_user_name('samIam')
u.password='password'
u.sync()
assert u.password == 'password'
hub.rollback()
hub.end()
def test_user_password_unicode(self):
"""Test if we can set a user password which is encoded as unicode
(no encryption algorithm)."""
config.update({'tools.identity.soprovider.encryption_algorithm': None})
# force new config values to load
startup.startTurboGears()
self.app.get('/')
hub.begin()
u = TG_User.by_user_name('samIam')
u.password = u'garçon'
u.sync()
assert u.password == u'garçon'
hub.rollback()
hub.end()
def test_user_password_hashed_sha(self):
"""Test if a sha hashed password is stored in the database."""
config.update({'tools.identity.soprovider.encryption_algorithm': 'sha1'})
# force new config values to load
startup.startTurboGears()
self.app.get('/')
hub.begin()
u = TG_User.by_user_name('samIam')
u.password = 'password'
u.sync()
assert u.password =='5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8'
hub.rollback()
hub.end()
def test_user_password_hashed_sha_unicode(self):
"""Test if a sha hashed password with unicode characters is stored
in the database."""
config.update({'tools.identity.soprovider.encryption_algorithm': 'sha1'})
# force new config values to load
startup.startTurboGears()
self.app.get('/')
hub.begin()
u = TG_User.by_user_name('samIam')
u.password = u'garçon'
u.sync()
assert u.password == '442edb21c491a6e6f502eb79e98614f3c7edf43e'
hub.rollback()
hub.end()
def test_user_password_hashed_md5(self):
"""Test if a md5 hashed password is stored in the database."""
config.update({'tools.identity.soprovider.encryption_algorithm': 'md5'})
# force new config values to load
startup.startTurboGears()
self.app.get('/')
hub.begin()
u = TG_User.by_user_name('samIam')
u.password = 'password'
u.sync()
assert u.password =='5f4dcc3b5aa765d61d8327deb882cf99'
hub.rollback()
hub.end()
def test_user_password_hashed_md5_unicode(self):
"""Test if a md5 hashed password with unicode characters is stored
in the database."""
config.update({'tools.identity.soprovider.encryption_algorithm': 'md5'})
# force new config values to load
startup.startTurboGears()
self.app.get('/')
hub.begin()
u = TG_User.by_user_name('samIam')
u.password = u'garçon'
u.sync()
assert u.password == 'c295c4bb2672ca8c432effc53b40bb1e'
hub.rollback()
hub.end()
def test_user_password_hashed_md5_utf8string(self):
"""Test if a md5 hashed password with unicode characters is stored in
the database if the password is entered as str (encoded in UTF-8). This
test ensures that the encryption algorithm does handle non-unicode
parameters gracefully."""
config.update({'tools.identity.soprovider.encryption_algorithm': 'md5'})
# force new config values to load
startup.startTurboGears()
self.app.get('/')
hub.begin()
u = TG_User.by_user_name('samIam')
u.password = u'garçon'.encode('UTF-8')
u.sync()
assert u.password == 'c295c4bb2672ca8c432effc53b40bb1e'
hub.rollback()
hub.end()
def test_user_password_raw(self):
"""Test that we can store raw values in the password field
(without being hashed)."""
config.update({'tools.identity.soprovider.encryption_algorithm':'sha1'})
# force new config values to load
startup.startTurboGears()
self.app.get('/')
hub.begin()
u = TG_User.by_user_name('samIam')
u.set_password_raw('password')
u.sync()
assert u.password =='password'
hub.rollback()
hub.end()
def test_user_password_raw_unicode(self):
config.update({'tools.identity.soprovider.encryption_algorithm':'sha1'})
# force new config values to load
startup.startTurboGears()
self.app.get('/')
hub.begin()
u = TG_User.by_user_name('samIam')
u.set_password_raw(u'garçon')
u.sync()
assert u.password == u'garçon'
hub.rollback()
hub.end()
def test_user_password_hashed_custom(self):
"""Test if a custom hashed password is stored in the database."""
config.update({'tools.identity.soprovider.encryption_algorithm': 'custom',
'tools.identity.custom_encryption':
'identity.tests.test_identity.mycustomencrypt'})
# force new config values to load
startup.startTurboGears()
self.app.get('/')
hub.begin()
u = TG_User.by_user_name('samIam')
u.password = 'password'
u.sync()
assert u.password =='password_custom'
hub.rollback()
hub.end()
def test_anonymous_browsing(self):
"""Test if we can show up anonymously."""
response = self.app.get('/is_anonymous')
assert 'is_anonymous' in response
def test_deny_anonymous(self):
"""Test that we have secured an url from anonymous users."""
response = self.app.get('/logged_in_only', status=401)
assert 'identity_failed_answer' in response
def test_deny_anonymous_viewable(self):
"""Test that a logged in user can see an resource blocked
from anonymous users."""
response = self.app.get('/logged_in_only?'
'user_name=samIam&password=secret&login=Login')
assert 'logged_in_only' in response
def test_logout(self):
"""Test that logout works and session is gets invalid afterwards."""
response = self.app.get('/in_peon_group?'
'user_name=samIam&password=secret&login=Login')
session_id = response.headers['Set-Cookie']
response = self.app.get('/logout', headers={'Cookie': session_id })
response = self.app.get('/is_anonymous', headers={'Cookie': session_id})
assert response.body == 'is_anonymous'
def test_require_group(self):
"""Test that a anonymous user"""
response = self.app.get('/in_peon_group', status=401)
assert 'identity_failed_answer' in response, response
def test_require_expose_required_permission(self):
"""Test that the decorator exposes the correct permissions via _require
attribute on the actual method."""
response = self.app.get('/test_exposed_require')
assert 'require is exposed' in response, response
def test_require_group_viewable(self):
"""Test that a user with proper group membership can see a restricted url."""
response = self.app.get('/in_peon_group?'
'user_name=samIam&password=secret&login=Login')
assert 'in_peon_group' in response, response
user = TG_User.by_user_name("samIam")
def test_require_group_viewable(self):
"""Test that the current user and group properties are set correctly."""
response = self.app.get('/in_peon_group?'
'user_name=samIam&password=secret&login=Login')
user = TG_User.by_user_name("samIam")
group_ids = set((TG_Group.by_group_name("peon").id,
TG_Group.by_group_name("other").id))
def test_user_not_in_right_group(self):
"""Test that a user is denied access if they aren't in the right group.
"""
response = self.app.get('/in_admin_group?'
'user_name=samIam&password=secret&login=Login', status=401)
assert 'identity_failed_answer' in response, response
def test_require_permission(self):
"""Test that an anonymous user is denied access to a permission
restricted url.
"""
response = self.app.get('/has_chopper_permission', status=401)
assert 'identity_failed_answer' in response, response
def test_require_permission_viewable(self):
"""Test that a user with proper permissions can see a restricted url."""
response = self.app.get('/has_chopper_permission?'
'user_name=samIam&password=secret&login=Login')
assert 'has_chopper_permission' in response
def test_user_lacks_permission(self):
"""Test that a user is denied acces if they don't have the proper
permission.
"""
response = self.app.get('/has_boss_permission?'
'user_name=samIam&password=secret&login=Login', status=401)
assert 'identity_failed_answer' in response, response
def test_user_info_available(self):
"""Test that we can see user information inside our controller."""
response = self.app.get('/user_email?'
'user_name=samIam&password=secret&login=Login')
assert 'samiam@example.com' in response, response
def test_bad_login(self):
"""Test that we are denied access if we provide a bad login."""
response = self.app.get('/logged_in_only?'
'user_name=samIam&password=wrong&login=Login', status=401)
assert 'identity_failed_answer' in response, response
def test_restricted_subdirectory(self):
"""Test that we can restrict access to a whole subdirectory."""
response = self.app.get('/peon_area/index', status=401)
assert 'identity_failed_answer' in response, response
def test_restricted_subdirectory_viewable(self):
"""Test that we can access a restricted subdirectory
if we have proper credentials."""
response = self.app.get('/peon_area/index?'
'user_name=samIam&password=secret&login=Login')
assert 'restricted_index' in response, response
def test_decoratator_in_restricted_subdirectory(self):
"""Test that we can require a different permission
in a protected subdirectory."""
response = self.app.get('/peon_area/in_other_group?'
'user_name=samIam&password=secret&login=Login')
assert 'in_other_group' in response, response
def test_decoratator_failure_in_restricted_subdirectory(self):
"""Test that we can get an identity failure from a decorator
in a restricted subdirectory"""
response = self.app.get('/peon_area/in_admin_group?'
'user_name=samIam&password=secret&login=Login', status=401)
assert 'identity_failed_answer' in response, response
def test_explicit_checks_in_restricted_subdirectory(self):
"""Test that explicit permission checks in a protected
directory is handled as expected"""
response = self.app.get('/peon_area/in_other_group_explicit_check?'
'user_name=samIam&password=secret&login=Login')
assert 'in_other_group' in response, response
def test_throwing_identity_exception_in_restricted_subdirectory(self):
"""Test that throwing an IdentityException in a protected
directory is handled as expected"""
response = self.app.get('/peon_area/in_admin_group_explicit_check?'
'user_name=samIam&password=secret&login=Login', status=401)
assert 'identity_failed' in response, response
def test_decode_filter_whenidfails(self):
"""Test that the decode filter doesn't break with nested
variables and Identity"""
params = urllib.quote(IdentityRoot._test_encoded_params.decode(
'utf-8').encode('latin-1'), '=&')
response = self.app.get('/test_params?' + params, status=401)
assert 'identity_failed_answer' in response, response
def test_decode_filter_whenidworks(self):
"""Test that the decode filter doesn't break with nested
variables and Identity"""
params = urllib.quote(IdentityRoot._test_encoded_params.decode(
'utf-8').encode('latin-1'), '=&')
params += '&user_name=samIam&password=secret&login=Login'
response = self.app.get('/test_params?' + params)
assert 'params ok' in response, response
class TestTGUser(testutil.DBTest):
model = TG_User
def setUp(self):
self._identity_on = config.get('tools.identity.on', False)
config.update({'tools.identity.on': False})
startup.startTurboGears()
testutil.DBTest.setUp(self)
def tearDown(self):
testutil.DBTest.tearDown(self)
startup.stopTurboGears()
config.update({'tools.identity.on': self._identity_on})
def test_create_user(self):
"""Check that User can be created outside of a running identity provider."""
u = TG_User(user_name='testcase',
email_address='testcase@example.com',
display_name='Test Me', password='test')
assert u.password=='test', u.password
|
thedep2/CouchPotatoServer | refs/heads/develop | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/vulture.py | 44 | from __future__ import unicode_literals
import json
import os.path
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
parse_iso8601,
)
class VultureIE(InfoExtractor):
IE_NAME = 'vulture.com'
_VALID_URL = r'https?://video\.vulture\.com/video/(?P<display_id>[^/]+)/'
_TEST = {
'url': 'http://video.vulture.com/video/Mindy-Kaling-s-Harvard-Speech/player?layout=compact&read_more=1',
'md5': '8d997845642a2b5152820f7257871bc8',
'info_dict': {
'id': '6GHRQL3RV7MSD1H4',
'ext': 'mp4',
'title': 'kaling-speech-2-MAGNIFY STANDARD CONTAINER REVISED',
'uploader_id': 'Sarah',
'thumbnail': 're:^http://.*\.jpg$',
'timestamp': 1401288564,
'upload_date': '20140528',
'description': 'Uplifting and witty, as predicted.',
'duration': 1015,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
display_id = mobj.group('display_id')
webpage = self._download_webpage(url, display_id)
query_string = self._search_regex(
r"queryString\s*=\s*'([^']+)'", webpage, 'query string')
video_id = self._search_regex(
r'content=([^&]+)', query_string, 'video ID')
query_url = 'http://video.vulture.com/embed/player/container/1000/1000/?%s' % query_string
query_webpage = self._download_webpage(
query_url, display_id, note='Downloading query page')
params_json = self._search_regex(
r'(?sm)new MagnifyEmbeddablePlayer\({.*?contentItem:\s*(\{.*?\})\n,\n',
query_webpage,
'player params')
params = json.loads(params_json)
upload_timestamp = parse_iso8601(params['posted'].replace(' ', 'T'))
uploader_id = params.get('user', {}).get('handle')
media_item = params['media_item']
title = os.path.splitext(media_item['title'])[0]
duration = int_or_none(media_item.get('duration_seconds'))
return {
'id': video_id,
'display_id': display_id,
'url': media_item['pipeline_xid'],
'title': title,
'timestamp': upload_timestamp,
'thumbnail': params.get('thumbnail_url'),
'uploader_id': uploader_id,
'description': params.get('description'),
'duration': duration,
}
|
mindriot101/bokeh | refs/heads/master | examples/integration/glyphs/frame_clipping_multi_backend.py | 7 | from bokeh.plotting import figure
from bokeh.layouts import row
from bokeh.io import save
def make_figure(output_backend):
p = figure(plot_width=400,
plot_height=400,
output_backend=output_backend,
title="Backend: %s" % output_backend)
p.circle(x=[1, 2, 3], y=[1, 2, 3], radius=0.25, color="blue", alpha=0.5)
p.annulus(x=[1, 2, 3], y=[1, 2, 3], inner_radius=0.1, outer_radius=0.20, color="orange")
return p
canvas = make_figure("canvas")
webgl = make_figure("webgl")
svg = make_figure("svg")
save(row(canvas, webgl, svg))
|
legalsylvain/OpenUpgrade | refs/heads/master | addons/hr_expense/__init__.py | 436 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_expense
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
odlgroup/odl | refs/heads/master | odl/util/vectorization.py | 2 | # Copyright 2014-2019 The ODL contributors
#
# This file is part of ODL.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
"""Utilities for internal functionality connected to vectorization."""
from __future__ import print_function, division, absolute_import
from builtins import object
from functools import wraps
import numpy as np
__all__ = ('is_valid_input_array', 'is_valid_input_meshgrid',
'out_shape_from_meshgrid', 'out_shape_from_array',
'OptionalArgDecorator', 'vectorize')
def is_valid_input_array(x, ndim=None):
"""Test if ``x`` is a correctly shaped point array in R^d."""
x = np.asarray(x)
if ndim is None or ndim == 1:
return x.ndim == 1 and x.size > 1 or x.ndim == 2 and x.shape[0] == 1
else:
return x.ndim == 2 and x.shape[0] == ndim
def is_valid_input_meshgrid(x, ndim):
"""Test if ``x`` is a `meshgrid` sequence for points in R^d."""
# This case is triggered in FunctionSpaceElement.__call__ if the
# domain does not have an 'ndim' attribute. We return False and
# continue.
if ndim is None:
return False
if not isinstance(x, tuple):
return False
if ndim > 1:
try:
np.broadcast(*x)
except (ValueError, TypeError): # cannot be broadcast
return False
return (len(x) == ndim and
all(isinstance(xi, np.ndarray) for xi in x) and
all(xi.ndim == ndim for xi in x))
def out_shape_from_meshgrid(mesh):
"""Get the broadcast output shape from a `meshgrid`."""
if len(mesh) == 1:
return (len(mesh[0]),)
else:
return np.broadcast(*mesh).shape
def out_shape_from_array(arr):
"""Get the output shape from an array."""
arr = np.asarray(arr)
if arr.ndim == 1:
return arr.shape
else:
return (arr.shape[1],)
class OptionalArgDecorator(object):
"""Abstract class to create decorators with optional arguments.
This class implements the functionality of a decorator that can
be used with and without arguments, i.e. the following patterns
both work::
@decorator
def myfunc(x, *args, **kwargs):
pass
@decorator(param, **dec_kwargs)
def myfunc(x, *args, **kwargs):
pass
The arguments to the decorator are passed on to the underlying
wrapper.
To use this class, subclass it and implement the static ``_wrapper``
method.
"""
def __new__(cls, *args, **kwargs):
"""Create a new decorator instance.
There are two cases to distinguish:
1. Without arguments::
@decorator
def myfunc(x):
pass
which is equivalent to ::
def myfunc(x):
pass
myfunc = decorator(myfunc)
Hence, in this case, the ``__new__`` method of the decorator
immediately returns the wrapped function.
2. With arguments::
@decorator(*dec_args, **dec_kwargs)
def myfunc(x):
pass
which is equivalent to ::
def myfunc(x):
pass
dec_instance = decorator(*dec_args, **dec_kwargs)
myfunc = dec_instance(myfunc)
Hence, in this case, the first call creates an actual class
instance of ``decorator``, and in the second statement, the
``dec_instance.__call__`` method returns the wrapper using
the stored ``dec_args`` and ``dec_kwargs``.
"""
# Decorating without arguments: return wrapper w/o args directly
instance = super(OptionalArgDecorator, cls).__new__(cls)
if (not kwargs and
len(args) == 1 and
callable(args[0])):
func = args[0]
return instance._wrapper(func)
# With arguments, return class instance
else:
instance.wrapper_args = args
instance.wrapper_kwargs = kwargs
return instance
def __call__(self, func):
"""Return ``self(func)``.
This method is invoked when the decorator was created with
arguments.
Parameters
----------
func : callable
Original function to be wrapped
Returns
-------
wrapped : callable
The wrapped function
"""
return self._wrapper(func, *self.wrapper_args, **self.wrapper_kwargs)
@staticmethod
def _wrapper(func, *wrapper_args, **wrapper_kwargs):
"""Make a wrapper for ``func`` and return it.
This is a default implementation that simply returns the wrapped
function, i.e., the resulting decorator is the identity.
"""
return func
class vectorize(OptionalArgDecorator):
"""Decorator class for function vectorization.
This vectorizer expects a function with exactly one positional
argument (input) and optional keyword arguments. The decorated
function has an optional ``out`` parameter for in-place evaluation.
Examples
--------
Use the decorator witout arguments:
>>> @vectorize
... def f(x):
... return x[0] + x[1] if x[0] < x[1] else x[0] - x[1]
>>>
>>> f([0, 1]) # np.vectorize'd functions always return an array
array(1)
>>> f([[0, -2], [1, 4]]) # corresponds to points [0, 1], [-2, 4]
array([1, 2])
The function may have ``kwargs``:
>>> @vectorize
... def f(x, param=1.0):
... return x[0] + x[1] if x[0] < param else x[0] - x[1]
>>>
>>> f([[0, -2], [1, 4]])
array([1, 2])
>>> f([[0, -2], [1, 4]], param=-1.0)
array([-1, 2])
You can pass arguments to the vectorizer, too:
>>> @vectorize(otypes=['float32'])
... def f(x):
... return x[0] + x[1] if x[0] < x[1] else x[0] - x[1]
>>> f([[0, -2], [1, 4]])
array([ 1., 2.], dtype=float32)
"""
@staticmethod
def _wrapper(func, *vect_args, **vect_kwargs):
"""Return the vectorized wrapper function."""
if not hasattr(func, '__name__'):
# Set name if not available. Happens if func is actually a function
func.__name__ = '{}.__call__'.format(func.__class__.__name__)
return wraps(func)(_NumpyVectorizeWrapper(func, *vect_args,
**vect_kwargs))
class _NumpyVectorizeWrapper(object):
"""Class for vectorization wrapping using `numpy.vectorize`.
The purpose of this class is to store the vectorized version of
a function when it is called for the first time.
"""
def __init__(self, func, *vect_args, **vect_kwargs):
"""Initialize a new instance.
Parameters
----------
func : callable
Python function or method to be wrapped
vect_args :
positional arguments for `numpy.vectorize`
vect_kwargs :
keyword arguments for `numpy.vectorize`
"""
super(_NumpyVectorizeWrapper, self).__init__()
self.func = func
self.vfunc = None
self.vect_args = vect_args
self.vect_kwargs = vect_kwargs
def __call__(self, x, out=None, **kwargs):
"""Vectorized function call.
Parameters
----------
x : `array-like` or sequence of `array-like`'s
Input argument(s) to the wrapped function
out : `numpy.ndarray`, optional
Appropriately sized array to write to
Returns
-------
out : `numpy.ndarray`
Result of the vectorized function evaluation. If ``out``
was given, the returned object is a reference to it.
"""
if np.isscalar(x):
x = np.array([x])
elif isinstance(x, np.ndarray) and x.ndim == 1:
x = x[None, :]
if self.vfunc is None:
# Not yet vectorized
def _func(*x, **kw):
return self.func(np.array(x), **kw)
self.vfunc = np.vectorize(_func, *self.vect_args,
**self.vect_kwargs)
if out is None:
return self.vfunc(*x, **kwargs)
else:
out[:] = self.vfunc(*x, **kwargs)
if __name__ == '__main__':
from odl.util.testutils import run_doctests
run_doctests()
|
mdanielwork/intellij-community | refs/heads/master | python/lib/Lib/distutils/command/install_egg_info.py | 438 | """distutils.command.install_egg_info
Implements the Distutils 'install_egg_info' command, for installing
a package's PKG-INFO metadata."""
from distutils.cmd import Command
from distutils import log, dir_util
import os, sys, re
class install_egg_info(Command):
"""Install an .egg-info file for the package"""
description = "Install package's PKG-INFO metadata as an .egg-info file"
user_options = [
('install-dir=', 'd', "directory to install to"),
]
def initialize_options(self):
self.install_dir = None
def finalize_options(self):
self.set_undefined_options('install_lib',('install_dir','install_dir'))
basename = "%s-%s-py%s.egg-info" % (
to_filename(safe_name(self.distribution.get_name())),
to_filename(safe_version(self.distribution.get_version())),
sys.version[:3]
)
self.target = os.path.join(self.install_dir, basename)
self.outputs = [self.target]
def run(self):
target = self.target
if os.path.isdir(target) and not os.path.islink(target):
dir_util.remove_tree(target, dry_run=self.dry_run)
elif os.path.exists(target):
self.execute(os.unlink,(self.target,),"Removing "+target)
elif not os.path.isdir(self.install_dir):
self.execute(os.makedirs, (self.install_dir,),
"Creating "+self.install_dir)
log.info("Writing %s", target)
if not self.dry_run:
f = open(target, 'w')
self.distribution.metadata.write_pkg_file(f)
f.close()
def get_outputs(self):
return self.outputs
# The following routines are taken from setuptools' pkg_resources module and
# can be replaced by importing them from pkg_resources once it is included
# in the stdlib.
def safe_name(name):
"""Convert an arbitrary string to a standard distribution name
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
"""
return re.sub('[^A-Za-z0-9.]+', '-', name)
def safe_version(version):
"""Convert an arbitrary string to a standard version string
Spaces become dots, and all other non-alphanumeric characters become
dashes, with runs of multiple dashes condensed to a single dash.
"""
version = version.replace(' ','.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
def to_filename(name):
"""Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'.
"""
return name.replace('-','_')
|
jborean93/ansible | refs/heads/devel | test/units/parsing/test_unquote.py | 298 | # coding: utf-8
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.parsing.quoting import unquote
import pytest
UNQUOTE_DATA = (
(u'1', u'1'),
(u'\'1\'', u'1'),
(u'"1"', u'1'),
(u'"1 \'2\'"', u'1 \'2\''),
(u'\'1 "2"\'', u'1 "2"'),
(u'\'1 \'2\'\'', u'1 \'2\''),
(u'"1\\"', u'"1\\"'),
(u'\'1\\\'', u'\'1\\\''),
(u'"1 \\"2\\" 3"', u'1 \\"2\\" 3'),
(u'\'1 \\\'2\\\' 3\'', u'1 \\\'2\\\' 3'),
(u'"', u'"'),
(u'\'', u'\''),
# Not entirely sure these are good but they match the current
# behaviour
(u'"1""2"', u'1""2'),
(u'\'1\'\'2\'', u'1\'\'2'),
(u'"1" 2 "3"', u'1" 2 "3'),
(u'"1"\'2\'"3"', u'1"\'2\'"3'),
)
@pytest.mark.parametrize("quoted, expected", UNQUOTE_DATA)
def test_unquote(quoted, expected):
assert unquote(quoted) == expected
|
aspose-cells/Aspose.Cells-for-Cloud | refs/heads/master | Examples/Python/Examples/ProtectExcelWorkbooks.py | 2 | import asposecellscloud
from asposecellscloud.CellsApi import CellsApi
from asposecellscloud.CellsApi import ApiException
from asposecellscloud.models import WorkbookProtectionRequest
import asposestoragecloud
from asposestoragecloud.StorageApi import StorageApi
apiKey = "XXXXX" #sepcify App Key
appSid = "XXXXX" #sepcify App SID
apiServer = "http://api.aspose.com/v1.1"
data_folder = "../../data/"
#Instantiate Aspose Storage API SDK
storage_apiClient = asposestoragecloud.ApiClient.ApiClient(apiKey, appSid, True)
storageApi = StorageApi(storage_apiClient)
#Instantiate Aspose Cells API SDK
api_client = asposecellscloud.ApiClient.ApiClient(apiKey, appSid, True)
cellsApi = CellsApi(api_client);
#set input file name
filename = "Sample_Test_Book.xls"
body = WorkbookProtectionRequest.WorkbookProtectionRequest()
body.Password = "aspose"
body.ProtectionType = "All"
#upload file to aspose cloud storage
storageApi.PutCreate(Path=filename, file=data_folder + filename)
try:
#invoke Aspose.Cells Cloud SDK API to protect a workbook
response = cellsApi.PostProtectDocument(name=filename, body=body)
if response.Status == "OK":
#download protected document from cloud storage
response = storageApi.GetDownload(Path=filename)
outfilename = "c:/temp/" + filename
with open(outfilename, 'wb') as f:
for chunk in response.InputStream:
f.write(chunk)
except ApiException as ex:
print "ApiException:"
print "Code:" + str(ex.code)
print "Message:" + ex.message
|
shumik/skencil-c | refs/heads/master | Sketch/UI/mainwindow.py | 1 | # Sketch - A Python-based interactive drawing program
# Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004 by Bernhard Herzog
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os, sys, string
from types import TupleType, ListType
from Sketch.Lib import util
from Sketch.warn import warn, warn_tb, INTERNAL, USER
from Sketch import _, config, load, plugins, SketchVersion
from Sketch import Publisher, Point, EmptyFillStyle, EmptyLineStyle, \
EmptyPattern, Document, GuideLine, PostScriptDevice, SketchError
import Sketch
from Sketch.Graphics import image, eps
import Sketch.Scripting
from Sketch.const import DOCUMENT, CLIPBOARD, CLOSED, COLOR1, COLOR2
from Sketch.const import STATE, VIEW, MODE, SELECTION, POSITION, UNDO, EDITED,\
CURRENTINFO
from Tkinter import TclVersion, TkVersion, Frame, Scrollbar
from Tkinter import X, BOTTOM, BOTH, TOP, HORIZONTAL, LEFT, Y
import Tkinter
from tkext import AppendMenu, UpdatedLabel, UpdatedButton, CommandButton, \
CommandCheckbutton, MakeCommand, MultiButton
import tkext
from command import CommandClass, Keymap, Commands
from canvas import SketchCanvas
import ruler
from poslabel import PositionLabel
import palette, tooltips
import skpixmaps
pixmaps = skpixmaps.PixmapTk
import skapp
import time
command_list = []
def AddCmd(name, menu_name, method_name = None, **kw):
kw['menu_name'] = menu_name
if not method_name:
method_name = name
cmd = apply(CommandClass, (name, method_name), kw)
command_list.append(cmd)
def AddDocCmd(name, menu_name, method_name = None, **kw):
kw['menu_name'] = menu_name
if not method_name:
method_name = name
method_name = ('document', method_name)
for key in CommandClass.callable_attributes:
if kw.has_key(key):
value = kw[key]
if type(value) == type(""):
kw[key] = ('document', value)
if not kw.has_key('subscribe_to'):
kw['subscribe_to'] = SELECTION
if not kw.has_key('sensitive_cb'):
kw['sensitive_cb'] = ('document', 'HasSelection')
cmd = apply(CommandClass, (name, method_name), kw)
command_list.append(cmd)
class SketchMainWindow(Publisher):
tk_basename = 'sketch'
tk_class_name = 'Sketch'
log_file = None #added for logging by shumon June 4, 2009
start_time = 0
#for experiment stats (added by shumon June 5, 2009)
last_user_action_time = 0
first_user_action_time = 0
total_undos = 0
total_direct_clones_created = 0
total_tiled_clones_created = 0
total_direct_cloning_free_moves = 0
total_direct_cloning_x_moves = 0
total_direct_cloning_y_moves = 0
total_mouse_moves = 0
total_button_downs = 0
total_button_ups = 0
total_interpolate_to_root_calls = 0
total_unlink_clone_calls = 0
total_break_clones_calls = 0
total_clone_deleted_calls = 0
total_call_editing_box_displays = 0
first_editing_box_displayed_time = 0
last_up_on_272 = 0
total_call_editing_cancel_presses = 0
total_clone_editing_all_presses = 0
total_clone_editing_skip_presses = 0
total_clone_editing_all_following_presses = 0
total_clone_editing_selected_presses = 0
total_create_tiled_clones_button_presses = 0
total_remove_tiled_clones_button_presses = 0
total_reset_tiled_clones_button_presses = 0
total_tiled_clones_dialog_closes = 0
total_tiled_clones_sucessfully_created_attempts = 0
total_tiled_clones_sucessfullly_removes = 0
total_new_document_creations = 0
total_document_openings = 0
total_tiled_clones_dialog_openings = 0
def log_stats(self):
participant = self.application.participant
f1 = self.application.f1
f2 = self.application.f2
f3 = self.application.f3
f4 = self.application.f4
if participant == None or f1 == None or f2 == None or f3 == None or f3 == None:
return
#log_stats_file_name = str('stats_'+'p'+str(participant)+'c'+str(condition)+'s'+str(trial)+'.csv')
log_stats_file_name = str('experiment_stats.csv')
#see if it exists
log_stats_file = None
try:
log_stats_file = open(log_stats_file_name)
except IOError:
#file doesn't exist, add the col info
log_stats_file = open(log_stats_file_name, 'a')
string =_('p')+','+\
_('f1')+','+\
_('f2')+','+\
_('f3')+','+\
_('f4')+','+\
_('task_time')+','+\
_('last_user_action_time')+','+\
_('end_of_trial_time')+','+\
_('first_editing_box_displayed_time')+','+\
_('last_up_on_272')+','+\
_('total_call_editing_box_displays')+','+\
_('total_undos')+','+\
_('total_direct_clones_created')+','+\
_('total_tiled_clones_created')+','+\
\
_('total_direct_cloning_free_moves ')+','+\
_('total_direct_cloning_x_moves ')+','+\
_('total_direct_cloning_y_moves ')+','+\
_('total_mouse_moves ')+','+\
_('total_button_downs ')+','+\
_('total_button_ups ')+','+\
\
_('total_interpolate_to_root_calls ')+','+\
_('total_unlink_clone_calls ')+','+\
_('total_break_clones_calls ')+','+\
_('total_clone_deleted_calls ')+','+\
\
_('total_call_editing_cancel_presses ')+','+\
_('total_clone_editing_all_presses ')+','+\
_('total_clone_editing_skip_presses ')+','+\
_('total_clone_editing_all_following_presses ')+','+\
_('total_clone_editing_selected_presses ')+','+\
\
_('total_create_tiled_clones_button_presses ')+','+\
_('total_remove_tiled_clones_button_presses ')+','+\
_('total_reset_tiled_clones_button_presses ')+','+\
_('total_tiled_clones_dialog_closes ')+','+\
\
_('total_tiled_clones_sucessfully_created_attempts ')+','+\
_('total_tiled_clones_sucessfullly_removes ')+','+\
\
_('total_new_document_creations ')+','+\
_('total_document_openings ')+','+\
\
_('total_tiled_clones_dialog_openings ')+','+\
_('reserved ')+','+\
'\n'
log_stats_file.write(string)
print "DOESNT EXIST"
reserved = 0
if f1 == 1: reserved = participant % 12
elif f1 == 2: reserved = participant % 6
elif f1 == 3: reserved = participant % 20
log_stats_file = open(log_stats_file_name, 'a')
string = str(participant)+','+\
str(f1)+','+\
str(f2)+','+\
str(f3)+','+\
str(f4)+','+\
str(self.last_user_action_time-self.first_user_action_time)+','+\
str(self.last_user_action_time)+','+\
str(time.time())+','+\
str(self.first_editing_box_displayed_time )+','+\
str(self.last_up_on_272)+','+\
str(self.total_call_editing_box_displays )+','+\
str(self.total_undos)+','+\
str(self.total_direct_clones_created)+','+\
str(self.total_tiled_clones_created)+','+\
\
str(self.total_direct_cloning_free_moves )+','+\
str(self.total_direct_cloning_x_moves )+','+\
str(self.total_direct_cloning_y_moves )+','+\
str(self.total_mouse_moves )+','+\
str(self.total_button_downs )+','+\
str(self.total_button_ups )+','+\
\
str(self.total_interpolate_to_root_calls )+','+\
str(self.total_unlink_clone_calls )+','+\
str(self.total_break_clones_calls )+','+\
str(self.total_clone_deleted_calls )+','+\
\
str(self.total_call_editing_cancel_presses )+','+\
str(self.total_clone_editing_all_presses )+','+\
str(self.total_clone_editing_skip_presses )+','+\
str(self.total_clone_editing_all_following_presses )+','+\
str(self.total_clone_editing_selected_presses )+','+\
\
str(self.total_create_tiled_clones_button_presses )+','+\
str(self.total_remove_tiled_clones_button_presses )+','+\
str(self.total_reset_tiled_clones_button_presses )+','+\
str(self.total_tiled_clones_dialog_closes )+','+\
\
str(self.total_tiled_clones_sucessfully_created_attempts )+','+\
str(self.total_tiled_clones_sucessfullly_removes )+','+\
\
str(self.total_new_document_creations )+','+\
str(self.total_document_openings )+','+\
\
str(self.total_tiled_clones_dialog_openings )+','+\
str(reserved )+','+\
'\n'
log_stats_file.write(string)
#####################################################################3
#in case use screws up and starts direct cloning instead of tiled cloning
log_file_name = None
def reopen_log_file(self):
if self.log_file != None and self.log_file_name != None:
self.log_file.close()
self.log_file = open(self.log_file_name, 'w')
self.start_time = time.time()
self.document.first_time_logging = True
print "reopened!"
def setup_experiment_logging(self):
#Experiment Logging: #added June 4, 2009
participant = self.application.participant
f1 = self.application.f1
f2 = self.application.f2
f3 = self.application.f3
f4 = self.application.f4
if participant != None and f1 != None and f2 != None and f3 != None and f4 != None:
self.log_file_name = './participant_save/'+str(participant)+'/'+'p' + str(participant) + 'f1' + str(f1) + 'f2' + str(f2)+'f3' + str(f3)+'f4'+ str(f4) + '.csv'
self.log_file = open(self.log_file_name, 'w')
self.start_time = time.time()
######################################################
#save user's trial
#has to be called after MetaInfo() is initialized
def set_experiment_save_filename(self):
return
#participant = self.application.participant
#condition = self.application.condition
#trial = self.application.trial
#if participant != None and condition != None and trial != None:
# save_filename = 'p' + str(participant) + 'c' + str(condition)+'t'+ str(trial) + '.sk'
# self.document.meta.filename = save_filename
def display_technique_instruction(self):
technique = self.application.technique
task = self.application.task
if technique != 0:
message = None
direct_message = _("Use Direct Cloning to do the task")
tiled_message = _("Use Tiled Clones to do the task")
e_message = _("Use Smart Duplication to do the task")
clone_edit_message = _("Use the Clone Editing to do the task")
sel_edit_message = _("Use Selection to do the task.")
#rect_sel_edit_message = _("Use Rectangular Selection to do the task")
practice_creation_message1 = _("Training session type 1 of 2.\nYour actions will not be logged\n(Practise Direct and Tiled Cloning)")
practice_creation_message2 = _("Training session type 2 of 2.\nYour actions will not be logged\n(Practise Smart Duplication)")
practice_editing_message1 = _("Training session type 1 of 2.\nYour actions will not be logged\n(Practise Clone Editing)")
practice_editing_message2 = _("Training session type 2 of 2.\nYour actions will not be logged\n(Practise Selection Editing)")
#experiment two
creation_tasks = ["a","b","c","d"]
if technique == _("t") and task in creation_tasks:
message = tiled_message
elif technique == _("d") and task in creation_tasks:
message = direct_message
elif technique == _("e") and task in creation_tasks:
message = e_message
elif technique == _("c") and task not in creation_tasks:
message = clone_edit_message
elif technique == _("s") and task not in creation_tasks:
message = sel_edit_message
elif technique == _("p"):
message = practice_creation_message1
self.application.technique = None #reset technique so it doesnt intervene with practise session
self.application.session = _("p")
elif technique == _("q"):
message = practice_creation_message2
self.application.technique = None #reset technique so it doesnt intervene with practise session
self.application.session = _("q")
self.application.technique = _("e")
self.application.task_size = 1
elif technique == _("x") or technique == ("v"):
message = practice_editing_message1
self.application.technique = None #reset technique so it doesnt intervene with practise session
self.application.session = _("r")
self.application.technique = _("c")
self.application.task_size = 1
self.application.task = _("e") if technique == _("x") else _("g")
elif technique == _("y") or technique == ("w"):
message = practice_editing_message2
self.application.technique = None #reset technique so it doesnt intervene with practise session
self.application.session = _("u")
self.application.technique = _("s")
self.application.task_size = 1
self.application.task = _("e") if technique == _("y") else _("g")
buttons = _("OK")
print "about to display"
self.application.MessageBox(title = _("Instruction"),message= message, buttons = buttons)
print "displayed"
#old implementation of display technique instruction for the first experiment
'''def display_technique_instruction(self):
technique = self.application.technique
task = self.application.task
if technique != 0:
message = None
direct_message = _("Use Direct Cloning to complete the task")
tiled_message = _("Use Tiled Clones to complete the task")
freeform_message = _("Use Duplication to complete the task")
practice_message = _("This is your training session.Your actions will not be logged.\nUse Direct Cloning, Tiled Cloning, or Duplication (if applicable) to practice this task")
#for the first session a it's freeform, the rest - tiles
if technique == _("t") and task == _("a"):
message = freeform_message
elif technique == _("d"):
message = direct_message
elif technique == _("t"):
message = tiled_message
elif technique == _("p"):
message = practice_message
self.application.technique = None #reset technique so it doesnt intervene with practise session
buttons = _("OK")
print "about to display"
self.application.MessageBox(title = _("Instruction"),message= message, buttons = buttons)
print "displayed"
'''
def __init__(self, application, filename, run_script = None):
self.application = application
self.root = application.root # XXX
self.filename = filename
self.run_script = run_script
self.canvas = None
self.document = None
self.commands = None
self.NewDocument()
self.create_commands()
self.build_window()
self.build_menu()
self.build_toolbar()
self.build_status_bar()
self.__init_dlgs()
#added by shumon June 4, 2009
self.setup_experiment_logging()
self.set_experiment_save_filename()
def issue_document(self):
self.issue(DOCUMENT, self.document)
def create_commands(self):
cmds = Commands()
keymap = Keymap()
for cmd_class in command_list:
cmd = cmd_class.InstantiateFor(self)
setattr(cmds, cmd.name, cmd)
keymap.AddCommand(cmd)
self.commands = cmds
self.commands.Update()
self.keymap = keymap
def MapKeystroke(self, stroke):
return self.keymap.MapKeystroke(stroke)
def save_doc_if_edited(self, title = _("Save Document")):
if self.document is not None and self.document.WasEdited():
'''if self.application.participant != None: #if this is not an experiment
self.SaveToFileInteractive()
return tkext.No'''
try:
if self.application.session == _("p") or self.application.session == _("q") or self.application.session == _("r") or self.application.session == _("u"):
return tkext.No #practise session don't save
except: pass
message = _("%s has been changed.\nDo you want to save it?") \
% self.document.meta.filename
result = self.application.MessageBox(title = title,
message = message,
buttons = tkext.YesNoCancel)
if result == tkext.Yes:
self.SaveToFileInteractive()
return result
return tkext.No
def Document(self):
return self.document
def SetDocument(self, document):
channels = (SELECTION, UNDO, MODE)
old_doc = self.document
if old_doc is not None:
for channel in channels:
old_doc.Unsubscribe(channel, self.issue, channel)
self.document = document
for channel in channels:
self.document.Subscribe(channel, self.issue, channel)
if self.canvas is not None:
self.canvas.SetDocument(document)
self.issue_document()
# issue_document has to be called before old_doc is destroyed,
# because destroying it causes all connections to be deleted and
# some dialogs (derived from SketchDlg) try to unsubscribe in
# response to our DOCUMENT message. The connector currently
# raises an exception in this case. Perhaps it should silently
# ignore Unsubscribe() calls with methods that are actually not
# subscribers (any more)
if old_doc is not None:
old_doc.Destroy()
self.set_window_title()
if self.commands:
self.commands.Update()
self.document.main_window = self
#if self.application.task == 'a':
# print "clones are NOT aligned permanently"
# self.document.clones_aligned_permanently = 0 #for the first task only
AddCmd('NewDocument', _("New"), bitmap = pixmaps.NewDocument)
def NewDocument(self):
if self.save_doc_if_edited(_("New Document")) == tkext.Cancel:
return
if self.document != None:
self.document.DestroyTiledClonesDialog() #Added by Shumon June 1, 2009
self.SetDocument(Document(create_layer = 1))
self.set_experiment_save_filename() #added by shumon June 5,2006
string = 'new_document'
self.document.log(string)
self.total_new_document_creations += 1
AddCmd('LoadFromFile', _("Open..."), bitmap = pixmaps.Open,
key_stroke = 'C-o')
AddCmd('LoadMRU0', '', 'LoadFromFile', args = 0, key_stroke = 'M-1',
name_cb = lambda: os.path.split(config.preferences.mru_files[0])[1])
AddCmd('LoadMRU1', '', 'LoadFromFile', args = 1, key_stroke = 'M-2',
name_cb = lambda: os.path.split(config.preferences.mru_files[1])[1])
AddCmd('LoadMRU2', '', 'LoadFromFile', args = 2, key_stroke = 'M-3',
name_cb = lambda: os.path.split(config.preferences.mru_files[2])[1])
AddCmd('LoadMRU3', '', 'LoadFromFile', args = 3, key_stroke = 'M-4',
name_cb = lambda: os.path.split(config.preferences.mru_files[3])[1])
def LoadFromFile(self, filename = None, directory = None):
app = self.application
if self.save_doc_if_edited(_("Open Document")) == tkext.Cancel:
return
if type(filename) == type(0):
filename = config.preferences.mru_files[filename]
if not filename:
if not directory:
directory = self.document.meta.directory
if not directory:
directory = os.getcwd()
name = ''
filename = app.GetOpenFilename(filetypes = skapp.openfiletypes(),
initialdir = directory,
initialfile = name)
if not filename:
return
try:
if not os.path.isabs(filename):
filename = os.path.join(os.getcwd(), filename)
doc = load.load_drawing(filename)
self.SetDocument(doc)
self.add_mru_file(filename)
self.set_experiment_save_filename() #Added by shumon June 2009
#string = 'document_opened'
#self.document.log(string)
self.total_document_openings += 1 #Not counting this anymore
self.display_technique_instruction()
if self.application.task > 'd': self.document.on_document_open()
if self.application.task != None : self.canvas.TogglePageOutlineMode()
except SketchError, value:
app.MessageBox(title = _("Open"),
message = _("An error occurred:\n") + str(value))
#warn_tb(USER, "An error occurred:\n", str(value))
self.remove_mru_file(filename)
else:
messages = doc.meta.load_messages
if messages:
app.MessageBox(title = _("Open"),
message=_("Warnings from the import filter:\n")
+ messages)
doc.meta.load_messages = ''
AddCmd('SaveToFile', _("Save"), 'SaveToFileInteractive',
bitmap = pixmaps.Save, key_stroke = ('C-s', 'F2'))
AddCmd('SaveToFileAs', _("Save As..."), 'SaveToFileInteractive', args = 1,
key_stroke = ('C-w', 'F3'))
def SaveToFileInteractive(self, use_dialog = 0):
filename = self.document.meta.fullpathname
native_format = self.document.meta.native_format
compressed_file = self.document.meta.compressed_file
compressed = self.document.meta.compressed
app = self.application
if use_dialog or not filename or not native_format:
dir = self.document.meta.directory
if not dir:
dir = os.getcwd()
name = self.document.meta.filename
basename, ext = os.path.splitext(name)
if not native_format:
name = basename + '.sk'
filename = app.GetSaveFilename(filetypes = skapp.savefiletypes(),
initialdir = dir,
initialfile = name)
if not filename:
return
extension = os.path.splitext(filename)[1]
fileformat = plugins.guess_export_plugin(extension)
if not fileformat:
fileformat = plugins.NativeFormat
compressed_file = '' # guess compression from filename
compressed = ''
else:
fileformat = plugins.NativeFormat
self.SaveToFile(filename, fileformat, compressed, compressed_file)
def SaveToFile(self, filename, fileformat = None, compressed = '',
compressed_file = ''):
app = self.application
try:
if not self.document.meta.backup_created:
try:
if compressed_file:
util.make_backup(compressed_file)
else:
util.make_backup(filename)
except util.BackupError, value:
backupfile = value.filename
strerror = value.strerror
msg = (_("Cannot create backup file %(filename)s:\n"
"%(message)s\n"
"Choose `continue' to try saving anyway,\n"
"or `cancel' to cancel.")
% {'filename':`backupfile`, 'message':strerror})
cancel = _("Cancel")
result = app.MessageBox(title = _("Save To File"),
message = msg, icon = 'warning',
buttons = (_("Continue"), cancel))
if result == cancel:
return
self.document.meta.backup_created = 1
if fileformat is None:
fileformat = plugins.NativeFormat
try:
saver = plugins.find_export_plugin(fileformat)
if compressed:
# XXX there should be a plugin interface for this kind
# of post-processing
if compressed == "gzip":
cmd = 'gzip -c -9 > ' + util.sh_quote(compressed_file)
elif compressed == "bzip2":
cmd = 'bzip2 > ' + util.sh_quote(compressed_file)
file = os.popen(cmd, 'w')
saver(self.document, filename, file = file)
else:
saver(self.document, filename)
finally:
saver.UnloadPlugin()
except IOError, value:
if type(value) == type(()):
value = value[1]
app.MessageBox(title = _("Save To File"),
message = _("Cannot save %(filename)s:\n"
"%(message)s") \
% {'filename':`os.path.split(filename)[1]`,
'message':value},
icon = 'warning')
self.remove_mru_file(filename)
return
if fileformat == plugins.NativeFormat:
dir, name = os.path.split(filename)
# XXX should meta.directory be set for non-native formats as well
self.document.meta.directory = dir
self.document.meta.filename = name
self.document.meta.fullpathname = filename
self.document.meta.file_type = plugins.NativeFormat
self.document.meta.native_format = 1
if not compressed_file:
self.document.meta.compressed_file = ''
self.document.meta.compressed = ''
if compressed_file:
self.add_mru_file(compressed_file)
else:
self.add_mru_file(filename)
self.set_window_title()
AddCmd('SavePS', _("Save as PostScript..."), key_stroke = 'M-p')
def SavePS(self, filename = None):
app = self.application
bbox = self.document.BoundingRect(visible = 0, printable = 1)
if bbox is None:
app.MessageBox(title = _("Save As PostScript"),
message = _("The document doesn't have "
"any printable layers."),
icon = "warning")
return
if not filename:
dir = self.document.meta.ps_directory
if not dir:
dir = self.document.meta.directory
if not dir:
dir = os.getcwd()
name = self.document.meta.filename
name, ext = os.path.splitext(name)
name = name + '.ps'
filename = app.GetSaveFilename(title = _("Save As PostScript"),
filetypes = skapp.psfiletypes,
initialdir = dir,
initialfile = name)
if not filename:
return
try:
ps_dev = PostScriptDevice(filename, as_eps = 1,
bounding_box = tuple(bbox),
For = util.get_real_username(),
CreationDate = util.current_date(),
Title = os.path.basename(filename),
document = self.document)
self.document.Draw(ps_dev)
ps_dev.Close()
self.document.meta.ps_directory = os.path.split(filename)[0]
except IOError, value:
app.MessageBox(title = _("Save As PostScript"),
message = _("Cannot save %(filename)s:\n"
"%(message)s") \
% {'filename':`os.path.split(filename)[1]`,
'message':value[1]},
icon = 'warning')
def add_mru_file(self, filename):
if filename:
config.add_mru_file(filename)
self.update_mru_files()
def remove_mru_file(self, filename):
if filename:
config.remove_mru_file(filename)
self.update_mru_files()
def update_mru_files(self):
self.commands.LoadMRU0.Update()
self.commands.LoadMRU1.Update()
self.commands.LoadMRU2.Update()
self.commands.LoadMRU3.Update()
self.file_menu.RebuildMenu()
AddCmd('InsertFile', _("Insert Document..."))
def InsertFile(self, filename = None):
app = self.application
if not filename:
dir = self.document.meta.directory
if not dir:
dir = os.getcwd()
name = ''
filename = app.GetOpenFilename(filetypes = skapp.openfiletypes(),
initialdir = dir,
initialfile = name)
if not filename:
return
try:
if not os.path.isabs(filename):
filename = os.path.join(os.getcwd(), filename)
doc = load.load_drawing(filename)
group = doc.as_group()
except SketchError, value:
app.MessageBox(title = _("Insert Document"),
message = _("An error occurred:\n") + str(value))
#warn_tb(USER, "An error occurred:\n", str(value))
self.remove_mru_file(filename)
else:
messages = doc.meta.load_messages
if messages:
app.MessageBox(title = _("Insert Document"),
message=_("Warnings from the import filter:\n")
+ messages)
doc.meta.load_messages = ''
#
if group is not None:
self.canvas.PlaceObject(group)
else:
app.MessageBox(title = _("Insert Document"),
message=_("The document is empty"))
AddCmd('LoadPalette', _("Load Palette..."))
def LoadPalette(self, filename = None):
if not filename:
dir = config.std_res_dir
if not dir:
dir = os.getcwd()
name = ''
filename = self.application.GetOpenFilename(
filetypes = palette.file_types,
initialdir = dir,
initialfile = name)
if not filename:
return
pal = palette.LoadPalette(filename)
if not pal:
self.application.MessageBox(title = _("Load Palette"),
message = _("Cannot load palette %(filename)s")
% {'filename': filename})
else:
self.palette.SetPalette(pal)
# XXX Should we just store the basename if the palette file
# is located in the resource_dir?
config.preferences.palette = filename
def __init_dlgs(self):
self.dialogs = {}
def CreateDialog(self, module, dlgname):
if self.dialogs.has_key(dlgname):
dialog = self.dialogs[dlgname]
dialog.deiconify_and_raise()
else:
exec "from %s import %s" % (module, dlgname)
dlgclass = locals()[dlgname]
dialog = dlgclass(self.root, self, self.document)
dialog.Subscribe(CLOSED, self.__dlg_closed, dlgname)
self.dialogs[dlgname] = dialog
def HideDialogs(self):
for dialog in self.dialogs.values():
dialog.withdraw()
AddCmd('HideDialogs', _("Hide Dialogs"))
def ShowDialogs(self):
for dialog in self.dialogs.values():
dialog.deiconify_and_raise()
AddCmd('ShowDialogs', _("Show Dialogs"))
def __dlg_closed(self, dialog, name):
try:
del self.dialogs[name]
except:
# This might happen if the dialog is buggy...
warn(INTERNAL, 'dialog %s alread removed from dialog list', name)
AddCmd('CreateLayerDialog', _("Layers..."),
'CreateDialog', args = ('layerdlg', 'LayerPanel'),
key_stroke = 'F5')
AddCmd('CreateAlignDialog', _("Align..."),
'CreateDialog', args = ('aligndlg', 'AlignPanel'))
AddCmd('CreateGridDialog', _("Grid..."),
'CreateDialog', args = ('griddlg', 'GridPanel'))
AddCmd('CreateLineStyleDialog', _("Line..."),
'CreateDialog', args = ('linedlg', 'LinePanel'),
key_stroke = 'F7')
AddCmd('CreateFillStyleDialog', _("Fill..."),
'CreateDialog', args = ('filldlg', 'FillPanel'),
key_stroke = 'F6')
AddCmd('CreateFontDialog', _("Font..."),
'CreateDialog', args = ('fontdlg', 'FontPanel'))
AddCmd('CreateStyleDialog', _("Styles..."),
'CreateDialog', args = ('styledlg', 'StylePanel'))
AddCmd('CreateBlendDialog', _("Blend..."),
'CreateDialog', args = ('blenddlg', 'BlendPanel'),
key_stroke = 'C-b')
AddCmd('CreateLayoutDialog', _("Page Layout..."),
'CreateDialog', args = ('layoutdlg', 'LayoutPanel'))
#AddCmd('CreateExportDialog', 'Export...',
# 'CreateDialog', args = ('export', 'ExportPanel'))
AddCmd('CreateCurveDialog', _("Curve Commands..."),
'CreateDialog', args = ('curvedlg', 'CurvePanel'))
AddCmd('CreateGuideDialog', _("Guide Lines..."),
'CreateDialog', args = ('guidedlg', 'GuidePanel'))
AddCmd('CreatePrintDialog', _("Print..."),
'CreateDialog', args = ('printdlg', 'PrintPanel'),
key_stroke = "C-p")
AddCmd('CreateReloadPanel', _("Reload Module..."),
'CreateDialog', args = ('reloaddlg', 'ReloadPanel'))
def CreatePluginDialog(self, info):
if info.HasCustomDialog():
dialog = info.CreateCustomDialog(self.root, self, self.document)
else:
from plugindlg import PluginPanel
dialog = PluginPanel(self.root, self, self.document, info)
dialog.Subscribe(CLOSED, self.__dlg_closed, info.class_name)
self.dialogs[info.class_name] = dialog
AddCmd('SetOptions', _("Options..."))
def SetOptions(self):
import optiondlg
optiondlg.OptionDialog(self.root, self.canvas)
def set_window_title(self):
self.root.client(util.gethostname())
if self.document:
appname = config.name
meta = self.document.meta
if meta.compressed:
docname = os.path.split(meta.compressed_file)[1]
docname = os.path.splitext(docname)[0]
else:
docname = self.document.meta.filename
title = config.preferences.window_title_template % locals()
command = (config.sketch_command, self.document.meta.fullpathname)
else:
title = config.name
command = (config.sketch_command, )
self.root.title(title)
self.root.command(command)
def UpdateCommands(self):
self.canvas.UpdateCommands()
def Run(self):
#self.root.wait_visibility(self.canvas)
if self.filename:
if os.path.isdir(self.filename):
filename = ''
directory = self.filename
else:
filename = self.filename
directory = ''
self.LoadFromFile(filename, directory = directory)
self.filename = ''
if self.run_script:
from Sketch.Scripting.script import Context
dict = {'context': Context()}
try:
execfile(self.run_script, dict)
except:
warn_tb(USER, _("Error running script `%s'"), self.run_script)
self.application.Mainloop()
AddCmd('Exit', _("Exit"), key_stroke = ('M-Q', 'M-F4'))
#added by shumon June 5, 2009
def Exit(self):
if self.save_doc_if_edited(_("Exit")) != tkext.Cancel:
self.commands = None
self.document.DestroyTiledClonesDialog() #added by Shumon June 1, 2009
self.log_stats() #added by shumon June 5, 2009
self.application.Exit()
def build_window(self):
root = self.application.root
self.mbar = Frame(root, name = 'menubar')
self.mbar.pack(fill=X)
self.tbar = Frame(root, name = 'toolbar')
self.tbar.pack(fill=X)
self.status_bar = Frame(root, name = 'statusbar')
self.status_bar.pack(side = BOTTOM, fill=X)
palette_frame = Frame(root, name = 'palette_frame')
palette_frame.pack(side = BOTTOM, fill = X)
frame = Frame(root, name = 'canvas_frame')
frame.pack(side = TOP, fill = BOTH, expand = 1)
vbar = Scrollbar(frame)
vbar.grid(in_ = frame, column = 2, row = 1, sticky = 'ns')
hbar = Scrollbar(frame, orient = HORIZONTAL)
hbar.grid(in_ = frame, column = 1, row = 2, sticky = 'ew')
hrule = ruler.Ruler(root, orient = ruler.HORIZONTAL)
hrule.grid(in_ = frame, column = 1, row = 0, sticky = 'ew',
columnspan = 2)
vrule = ruler.Ruler(root, orient = ruler.VERTICAL)
vrule.grid(in_ = frame, column = 0, row = 1, sticky = 'ns',
rowspan = 2)
tmp = Frame(frame, name = 'rulercorner')
tmp.grid(column = 0, row = 0, sticky = 'news')
resolution = config.preferences.screen_resolution
self.canvas = SketchCanvas(root, toplevel = root,
background = 'white', name = 'canvas',
resolution = resolution, main_window = self,
document = self.document)
self.canvas.grid(in_ = frame, column = 1, row = 1, sticky = 'news')
self.canvas.focus()
self.canvas.SetScrollbars(hbar, vbar)
self.canvas.SetRulers(hrule, vrule)
hrule.SetCanvas(self.canvas)
vrule.SetCanvas(self.canvas)
frame.columnconfigure(0, weight = 0)
frame.columnconfigure(1, weight = 1)
frame.columnconfigure(2, weight = 0)
frame.rowconfigure(0, weight = 0)
frame.rowconfigure(1, weight = 1)
frame.rowconfigure(2, weight = 0)
hbar['command'] = self.canvas._w + ' xview'
vbar['command'] = self.canvas._w + ' yview'
# the palette
button = MultiButton(palette_frame, bitmap = pixmaps.NoPattern,
command = self.no_pattern, args = 'fill')
button.pack(side = LEFT)
button.Subscribe('COMMAND2', self.no_pattern, 'line')
pal = palette.GetStandardPalette()
self.palette = palette.PaletteWidget(palette_frame, pal)
ScrollXUnits = self.palette.ScrollXUnits
ScrollXPages = self.palette.ScrollXPages
CanScrollLeft = self.palette.CanScrollLeft
CanScrollRight = self.palette.CanScrollRight
for bitmap, command, args, sensitivecb in [
(pixmaps.ArrArrLeft, ScrollXPages, -1, CanScrollLeft),
(pixmaps.ArrLeft, ScrollXUnits, -1, CanScrollLeft),
(pixmaps.ArrRight, ScrollXUnits, +1, CanScrollRight),
(pixmaps.ArrArrRight, ScrollXPages, +1, CanScrollRight)]:
button = UpdatedButton(palette_frame, bitmap = bitmap,
command = command, args = args,
sensitivecb = sensitivecb)
button.pack(side = LEFT)
self.palette.Subscribe(VIEW, button.Update)
if bitmap == pixmaps.ArrLeft:
self.palette.pack(side = LEFT, fill = X, expand = 1)
self.palette.Subscribe(COLOR1, self.canvas.FillSolid)
self.palette.Subscribe(COLOR2, self.canvas.LineColor)
root.protocol('WM_DELETE_WINDOW', tkext.MakeMethodCommand(self.Exit))
def make_file_menu(self):
cmds = self.commands
return map(MakeCommand,
[cmds.NewDocument,
cmds.LoadFromFile,
cmds.SaveToFile,
cmds.SaveToFileAs,
cmds.SavePS,
cmds.CreatePrintDialog,
None,
cmds.InsertFile,
#cmds.CreateExportDialog,
None,
cmds.SetOptions,
cmds.AboutBox,
None,
cmds.LoadMRU0,
cmds.LoadMRU1,
cmds.LoadMRU2,
cmds.LoadMRU3,
None,
cmds.Exit])
def make_edit_menu(self):
cmds = self.canvas.commands
return map(MakeCommand,
[self.commands.Undo,
self.commands.Redo,
self.commands.ResetUndo,
None,
self.commands.CopySelected,
self.commands.CutSelected,
self.commands.PasteClipboard,
self.commands.RemoveSelected,
None,
self.commands.SelectAll,
None,
self.commands.DuplicateSelected,
None,
[(_("Create"), {'auto_rebuild':self.creation_entries}),
[]],
None,
cmds.SelectionMode, cmds.EditMode,
])
def creation_entries(self):
cmds = self.canvas.commands
entries = [cmds.CreateRectangle,
cmds.CreateEllipse,
cmds.CreatePolyBezier,
cmds.CreatePolyLine,
cmds.CreateSimpleText,
self.commands.CreateImage,
None]
items = plugins.object_plugins.items()
items.sort()
place = self.place_plugin_object
dialog = self.CreatePluginDialog
group = self.create_plugin_group
for name, plugin in items:
if plugin.UsesSelection():
entries.append((plugin.menu_text, group, plugin))
elif plugin.HasParameters() or plugin.HasCustomDialog():
entries.append((plugin.menu_text + '...', dialog, plugin))
else:
entries.append((plugin.menu_text, place, plugin))
return map(MakeCommand, entries)
def place_plugin_object(self, info):
self.canvas.PlaceObject(info())
def create_plugin_group(self, info):
self.document.group_selected(info.menu_text, info.CallFactory)
def PlaceObject(self, object):
self.canvas.PlaceObject(object)
def make_effects_menu(self):
return map(MakeCommand,
[self.commands.FlipHorizontal,
self.commands.FlipVertical,
None,
self.commands.RemoveTransformation,
None,
self.commands.CreateBlendDialog,
self.commands.CancelBlend,
None,
self.commands.CreateMaskGroup,
self.commands.CreatePathText,
])
def make_curve_menu(self):
canvas = self.canvas
cmds = self.canvas.commands.PolyBezierEditor
return map(MakeCommand,
[cmds.ContAngle,
cmds.ContSmooth,
cmds.ContSymmetrical,
cmds.SegmentsToLines,
cmds.SegmentsToCurve,
cmds.SelectAllNodes,
None,
cmds.DeleteNodes,
cmds.InsertNodes,
None,
cmds.CloseNodes,
cmds.OpenNodes,
None,
self.commands.CombineBeziers,
self.commands.SplitBeziers,
None,
self.commands.ConvertToCurve])
def make_view_menu(self):
def MakeEntry(scale, call = self.canvas.SetScale):
percent = int(100 * scale)
return (('%3d%%' % percent), call, scale)
cmds = self.canvas.commands
scale = map(MakeEntry, [ 0.125, 0.25, 0.5, 1, 2, 4, 8])
return map(MakeCommand,
[MakeEntry(1),
[_("Zoom")] + scale,
cmds.ZoomIn,
cmds.ZoomOut,
cmds.ZoomMode,
None,
cmds.FitToWindow,
cmds.FitSelectedToWindow,
cmds.FitPageToWindow,
cmds.RestoreViewport,
None,
cmds.ForceRedraw,
None,
cmds.ToggleOutlineMode,
cmds.TogglePageOutlineMode,
None,
cmds.ToggleCrosshairs,
None,
self.commands.LoadPalette
])
def make_arrange_menu(self):
commands = [self.commands.CreateAlignDialog,
None,
self.commands.MoveSelectedToTop,
self.commands.MoveSelectedToBottom,
self.commands.MoveSelectionUp,
self.commands.MoveSelectionDown,
None,
self.commands.AbutHorizontal,
self.commands.AbutVertical,
None,
self.commands.GroupSelected,
self.commands.UngroupSelected,
None,
self.canvas.commands.ToggleSnapToObjects,
self.canvas.commands.ToggleSnapToGrid,
self.commands.CreateGridDialog,
None,
self.canvas.commands.ToggleSnapToGuides,
self.commands.AddHorizGuideLine,
self.commands.AddVertGuideLine,
self.commands.CreateGuideDialog]
if config.preferences.show_advanced_snap_commands:
commands.append(None)
commands.append(self.canvas.commands.ToggleSnapMoveRelative)
commands.append(self.canvas.commands.ToggleSnapBoundingRect)
commands = commands + [None,
self.commands.CreateLayoutDialog
]
return map(MakeCommand, commands)
def make_style_menu(self):
return map(MakeCommand,
[self.commands.FillNone,
self.commands.CreateFillStyleDialog,
self.canvas.commands.FillSolid,
None,
self.commands.LineNone,
self.commands.CreateLineStyleDialog,
None,
self.commands.CreateStyleFromSelection,
self.commands.CreateStyleDialog,
self.commands.UpdateStyle,
None,
self.commands.CreateFontDialog
])
def make_window_menu(self):
cmds = self.commands
return map(MakeCommand,
[cmds.HideDialogs,
cmds.ShowDialogs,
None,
cmds.CreateLayerDialog,
cmds.CreateAlignDialog,
cmds.CreateGridDialog,
None,
cmds.CreateLineStyleDialog,
cmds.CreateFillStyleDialog,
cmds.CreateFontDialog,
cmds.CreateStyleDialog,
None,
cmds.CreateLayoutDialog,
None,
cmds.CreateBlendDialog,
cmds.CreateCurveDialog
])
def make_special_menu(self):
cmdlist = [self.commands.python_prompt,
self.commands.CreateReloadPanel,
self.commands.DocumentInfo,
None,
self.commands.DumpXImage,
self.commands.CreateClone,
#self.commands.export_bitmap,
]
Sketch.Issue(None, Sketch.const.ADD_TO_SPECIAL_MENU, cmdlist)
return map(MakeCommand, cmdlist)
def make_script_menu(self):
tree = Sketch.Scripting.Registry.MenuTree()
cmdlist = self.convert_menu_tree(tree)
return map(MakeCommand, cmdlist)
def convert_menu_tree(self, tree):
result = []
for title, item in tree:
if type(item) == ListType:
result.append([title] + self.convert_menu_tree(item))
else:
result.append((title, item.Execute))
return result
def build_menu(self):
mbar = self.mbar
self.file_menu = AppendMenu(mbar, _("File"), self.make_file_menu())
AppendMenu(mbar, _("Edit"), self.make_edit_menu())
AppendMenu(mbar, _("Effects"), self.make_effects_menu())
AppendMenu(mbar, _("Curve"), self.make_curve_menu())
AppendMenu(mbar, _("Arrange"), self.make_arrange_menu())
AppendMenu(mbar, _("Style"), self.make_style_menu())
AppendMenu(mbar, _("View"), self.make_view_menu())
AppendMenu(mbar, _("Script"), self.make_script_menu())
AppendMenu(mbar, _("Windows"), self.make_window_menu())
if config.preferences.show_special_menu:
AppendMenu(mbar, _("Special"), self.make_special_menu())
self.update_mru_files()
self.file_menu.RebuildMenu()
def build_toolbar(self):
tbar = self.tbar
canvas = self.canvas
cmds = [self.commands.NewDocument,
self.commands.LoadFromFile,
self.commands.SaveToFile,
None,
canvas.commands.EditMode,
canvas.commands.SelectionMode,
None,
self.commands.Undo,
self.commands.Redo,
self.commands.RemoveSelected,
None,
self.commands.DuplicateSelected,
self.commands.FlipHorizontal,
self.commands.FlipVertical,
None,
self.commands.MoveSelectedToTop,
self.commands.MoveSelectionUp,
self.commands.MoveSelectionDown,
self.commands.MoveSelectedToBottom,
None,
self.commands.GroupSelected,
self.commands.UngroupSelected,
None,
canvas.commands.ZoomMode,
canvas.commands.ToggleSnapToGrid,
None,
canvas.commands.CreateRectangle,
canvas.commands.CreateEllipse,
canvas.commands.CreatePolyBezier,
canvas.commands.CreatePolyLine,
canvas.commands.CreateSimpleText,
self.commands.CreateImage,
]
buttons = []
for cmd in cmds:
if cmd is None:
b = Frame(tbar, class_ = 'TBSeparator')
b.pack(side = LEFT, fill = Y)
else:
if cmd.is_check:
b = CommandCheckbutton(tbar, cmd,
highlightthickness = 0,
takefocus = 0)
else:
b = CommandButton(tbar, cmd, highlightthickness = 0,
takefocus = 0)
tooltips.AddDescription(b, cmd.menu_name)
b.pack(side = LEFT, fill = Y)
def state_changed(buttons = buttons):
for button in buttons:
button.Update()
canvas.Subscribe(STATE, state_changed)
def build_status_bar(self):
status_bar = self.status_bar
canvas = self.canvas
stat_mode = UpdatedLabel(status_bar, name = 'mode', text = '',
updatecb = canvas.ModeInfoText)
stat_mode.pack(side = 'left')
stat_mode.Update()
canvas.Subscribe(MODE, stat_mode.Update)
stat_edited = UpdatedLabel(status_bar, name = 'edited', text = '',
updatecb = self.EditedInfoText)
stat_edited.pack(side = 'left')
stat_edited.Update()
self.Subscribe(UNDO, stat_edited.Update)
stat_zoom = UpdatedLabel(status_bar, name = 'zoom', text = '',
updatecb = canvas.ZoomInfoText)
stat_zoom.pack(side = 'left')
stat_zoom.Update()
canvas.Subscribe(VIEW, stat_zoom.Update)
stat_pos = PositionLabel(status_bar, name = 'position', text = '',
updatecb = canvas.GetCurrentPos)
stat_pos.pack(side = 'left')
stat_pos.Update()
canvas.Subscribe(POSITION, stat_pos.Update)
stat_sel = UpdatedLabel(status_bar, name = 'selection', text = '',
updatecb = canvas.CurrentInfoText)
stat_sel.pack(side = 'left', fill = X, expand = 1)
stat_sel.Update()
update = stat_sel.Update
canvas.Subscribe(SELECTION, update)
canvas.Subscribe(CURRENTINFO, update)
canvas.Subscribe(EDITED, update)
def EditedInfoText(self):
if self.document.WasEdited():
return _("modified")
return _("unmodified")
AddCmd('AboutBox', _("About..."))
def AboutBox(self):
abouttext = _("Skencil (%(version)s)\n"
"(c) 1996-2005 by Bernhard Herzog\n\n"
"Versions:\n"
"Python:\t%(py)s\tTcl:\t%(tcl)s\n"
"Tkinter:\t%(tkinter)s\tTk:\t%(tk)s") \
% {'version':SketchVersion,
'py':string.split(sys.version)[0],
'tcl':TclVersion,
'tkinter':string.split(Tkinter.__version__)[1],
'tk':TkVersion}
self.application.MessageBox(title = _("About Skencil"),
message = abouttext,
icon = pixmaps.IconLarge)
#
# Special methods. Mainly interesting for debugging
#
AddCmd('DocumentInfo', "Document Info...", key_stroke = 'F12')
def DocumentInfo(self):
text = self.document.DocumentInfo()
from Sketch import _sketch
meminfo = 'Memory:\n'\
'# Bezier Paths:\t%d\n'\
'# RGBColors:\t%d\n' \
'# Rects:\t\t%d\n'\
'# Trafos:\t\t%d\n'\
'# Points:\t\t%d' % (_sketch.num_allocated(),
_sketch.colors_allocated(),
_sketch.rects_allocated(),
_sketch.trafos_allocted(),
_sketch.points_allocated())
text = text + '\n\n' + meminfo
self.application.MessageBox(title = 'Document Info', message = text,
icon = 'info')
AddCmd('DumpXImage', 'Dump XImage')
def DumpXImage(self):
gc = self.canvas.gc
if gc.ximage:
gc.ximage.dump_data("/tmp/ximage.dat")
# AddCmd('export_bitmap', 'Export Bitmap')
# def export_bitmap(self):
# import export
# export.export_bitmap(self.document)
AddCmd('python_prompt', 'Python Prompt', key_stroke = 'F11')
def python_prompt(self):
if config.preferences.show_special_menu:
import prompt
prompt.PythonPrompt()
#
# Insert Special Objects
#
#
# Create Image
#
def GetOpenImageFilename(self, title = None, initialdir = '',
initialfile = '', no_eps = 0):
if title is None:
title = _("Load Image")
if no_eps:
filetypes = skapp.bitmapfiletypes
else:
filetypes = skapp.imagefiletypes
filename = self.application.GetOpenFilename(title = title,
filetypes = filetypes,
initialdir = initialdir,
initialfile = initialfile)
return filename
AddCmd('CreateImage', _("Load Raster/EPS Image..."),
bitmap = pixmaps.Image, subscribe_to = None)
def CreateImage(self, filename = None):
if not filename:
filename = self.GetOpenImageFilename(title = _("Load Image"),
initialdir = config.preferences.image_dir,
initialfile = '')
if filename:
try:
file = open(filename, 'r')
is_eps = eps.IsEpsFileStart(file.read(256))
file.close()
dir, name = os.path.split(filename)
config.preferences.image_dir = dir
if is_eps:
imageobj = eps.EpsImage(filename = filename)
else:
imageobj = image.Image(imagefile = filename)
self.canvas.PlaceObject(imageobj)
except IOError, value:
if type(value) == TupleType:
value = value[1]
self.application.MessageBox(title = _("Load Image"),
message = _("Cannot load %(filename)s:\n"
"%(message)s") \
% {'filename':`os.path.split(filename)[1]`,
'message':value},
icon = 'warning')
AddCmd('AddHorizGuideLine', _("Add Horizontal Guide Line"), 'AddGuideLine',
args = 1)
AddCmd('AddVertGuideLine', _("Add Vertical Guide Line"), 'AddGuideLine',
args = 0)
def AddGuideLine(self, horizontal = 1):
self.canvas.PlaceObject(GuideLine(Point(0, 0), horizontal))
#
#
#
AddCmd('CreateStyleFromSelection', _("Name Style..."),
sensitive_cb = ('document', 'CanCreateStyle'),
subscribe_to = SELECTION)
def CreateStyleFromSelection(self):
import styledlg
doc = self.document
object = doc.CurrentObject()
style_names = doc.GetStyleNames()
if object:
name = styledlg.GetStyleName(self.root, object, style_names)
if name:
name, which_properties = name
doc.CreateStyleFromSelection(name, which_properties)
def no_pattern(self, category):
import styledlg
if category == 'fill':
title = _("No Fill")
prop = 'fill_pattern'
else:
title = _("No Line")
prop = 'line_pattern'
styledlg.set_properties(self.root, self.document, title, category,
{prop: EmptyPattern})
#
# Document commands
#
AddDocCmd('SelectAll', _("Select All"), sensitive_cb = 'IsSelectionMode',
subscribe_to = MODE)
AddDocCmd('SelectNextObject', _("Select Next"), key_stroke = 'M-Right')
AddDocCmd('SelectPreviousObject', _("Select Previous"),
key_stroke = 'M-Left')
AddDocCmd('SelectFirstChild', _("Select First Child"),
key_stroke = 'M-Down')
AddDocCmd('SelectParent', _("Select Parent"), key_stroke = 'M-Up')
# rearrange object
AddDocCmd('RemoveSelected', _("Delete"), key_stroke = 'Delete',
bitmap = pixmaps.Delete)
AddDocCmd('MoveSelectedToTop', _("Move to Top"),
bitmap = pixmaps.MoveToTop, key_stroke = 'Home')
AddDocCmd('MoveSelectedToBottom', _("Move to Bottom"),
bitmap = pixmaps.MoveToBottom, key_stroke = 'End')
AddDocCmd('MoveSelectionUp', _("Move One Up"), bitmap = pixmaps.MoveOneUp,
key_stroke = 'S-Prior')
AddDocCmd('MoveSelectionDown', _("Move One Down"),
bitmap = pixmaps.MoveOneDown, key_stroke = 'S-Next')
AddDocCmd('DuplicateSelected', _("Duplicate"), bitmap = pixmaps.Duplicate,
key_stroke = 'C-d')
#
AddDocCmd('GroupSelected', _("Group"), sensitive_cb = 'CanGroup',
key_stroke = 'C-g', bitmap = pixmaps.Group)
AddDocCmd('UngroupSelected', _("Ungroup"), sensitive_cb = 'CanUngroup',
key_stroke = 'C-u', bitmap = pixmaps.Ungroup)
#
AddDocCmd('ConvertToCurve', _("Convert To Curve"),
sensitive_cb = 'CanConvertToCurve')
AddDocCmd('CombineBeziers', _("Combine Beziers"),
sensitive_cb = 'CanCombineBeziers')
AddDocCmd('SplitBeziers', _("Split Beziers"),
sensitive_cb = 'CanSplitBeziers')
#
# Align
#
AddDocCmd('AbutHorizontal', _("Abut Horizontal"))
AddDocCmd('AbutVertical', _("Abut Vertical"))
AddDocCmd('FlipHorizontal', _("Flip Horizontal"), 'FlipSelected',
args = (1, 0), bitmap = pixmaps.FlipHorizontal)
AddDocCmd('FlipVertical', _("Flip Vertical"), 'FlipSelected',
args = (0, 1), bitmap = pixmaps.FlipVertical)
# effects
AddDocCmd('CancelBlend', _("Cancel Blend"),
sensitive_cb = 'CanCancelBlend')
AddDocCmd('RemoveTransformation', _("Remove Transformation"))
AddDocCmd('CreateMaskGroup', _("Create Mask Group"),
sensitive_cb = 'CanCreateMaskGroup')
AddDocCmd('CreatePathText', _("Create Path Text"),
sensitive_cb = 'CanCreatePathText')
AddDocCmd('CreateClone', _("Create Clone"),
sensitive_cb = 'CanCreateClone')
#
# Cut/Paste
#
def CutCopySelected(self, method):
objects = getattr(self.document, method)()
if objects is not None:
self.application.SetClipboard(objects)
AddCmd('CopySelected', _("Copy"), 'CutCopySelected',
args= ('CopyForClipboard',), subscribe_to = SELECTION,
sensitive_cb = ('document', 'HasSelection'))
AddCmd('CutSelected', _("Cut"), 'CutCopySelected',
args= ('CutForClipboard',), subscribe_to = SELECTION,
sensitive_cb = ('document', 'HasSelection'))
AddCmd('PasteClipboard', _("Paste"),
subscribe_to = ('application', CLIPBOARD),
sensitive_cb = ('application', 'ClipboardContainsData'))
def PasteClipboard(self):
if self.application.ClipboardContainsData():
obj = self.application.GetClipboard().Object()
obj = obj.Duplicate()
self.canvas.PlaceObject(obj)
#
# Undo/Redo
#
AddDocCmd('Undo', _("Undo"), subscribe_to = UNDO,
sensitive_cb = 'CanUndo', name_cb = 'UndoMenuText',
key_stroke = 'C-z', bitmap = pixmaps.Undo)
AddDocCmd('Redo', _("Redo"), subscribe_to = UNDO,
sensitive_cb = 'CanRedo', name_cb = 'RedoMenuText',
key_stroke = 'C-r', bitmap = pixmaps.Redo)
AddDocCmd('ResetUndo', _("Discard Undo History"), subscribe_to = None,
sensitive_cb = None)
#
# Styles
#
AddDocCmd('FillNone', _("No Fill"), 'AddStyle', args = EmptyFillStyle)
AddDocCmd('LineNone', _("No Line"), 'AddStyle', args = EmptyLineStyle)
AddDocCmd('UpdateStyle', _("Update Style"), 'UpdateDynamicStyleSel')
|
awkspace/ansible | refs/heads/devel | lib/ansible/modules/cloud/misc/virt_net.py | 8 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Maciej Delmanowski <drybjed@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: virt_net
author: "Maciej Delmanowski (@drybjed)"
version_added: "2.0"
short_description: Manage libvirt network configuration
description:
- Manage I(libvirt) networks.
options:
name:
required: true
aliases: ['network']
description:
- name of the network being managed. Note that network must be previously
defined with xml.
state:
required: false
choices: [ "active", "inactive", "present", "absent" ]
description:
- specify which state you want a network to be in.
If 'active', network will be started.
If 'present', ensure that network is present but do not change its
state; if it's missing, you need to specify xml argument.
If 'inactive', network will be stopped.
If 'undefined' or 'absent', network will be removed from I(libvirt) configuration.
command:
required: false
choices: [ "define", "create", "start", "stop", "destroy",
"undefine", "get_xml", "list_nets", "facts",
"info", "status", "modify"]
description:
- in addition to state management, various non-idempotent commands are available.
See examples.
Modify was added in version 2.1
autostart:
required: false
type: bool
description:
- Specify if a given network should be started automatically on system boot.
uri:
required: false
default: "qemu:///system"
description:
- libvirt connection uri.
xml:
required: false
description:
- XML document used with the define command.
requirements:
- "python >= 2.6"
- "python-libvirt"
- "python-lxml"
'''
EXAMPLES = '''
# Define a new network
- virt_net:
command: define
name: br_nat
xml: '{{ lookup("template", "network/bridge.xml.j2") }}'
# Start a network
- virt_net:
command: create
name: br_nat
# List available networks
- virt_net:
command: list_nets
# Get XML data of a specified network
- virt_net:
command: get_xml
name: br_nat
# Stop a network
- virt_net:
command: destroy
name: br_nat
# Undefine a network
- virt_net:
command: undefine
name: br_nat
# Gather facts about networks
# Facts will be available as 'ansible_libvirt_networks'
- virt_net:
command: facts
# Gather information about network managed by 'libvirt' remotely using uri
- virt_net:
command: info
uri: '{{ item }}'
with_items: '{{ libvirt_uris }}'
register: networks
# Ensure that a network is active (needs to be defined and built first)
- virt_net:
state: active
name: br_nat
# Ensure that a network is inactive
- virt_net:
state: inactive
name: br_nat
# Ensure that a given network will be started at boot
- virt_net:
autostart: yes
name: br_nat
# Disable autostart for a given network
- virt_net:
autostart: no
name: br_nat
# Add a new host in the dhcp pool
- virt_net:
name: br_nat
xml: "<host mac='FC:C2:33:00:6c:3c' name='my_vm' ip='192.168.122.30'/>"
'''
try:
import libvirt
except ImportError:
HAS_VIRT = False
else:
HAS_VIRT = True
try:
from lxml import etree
except ImportError:
HAS_XML = False
else:
HAS_XML = True
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
VIRT_FAILED = 1
VIRT_SUCCESS = 0
VIRT_UNAVAILABLE = 2
ALL_COMMANDS = []
ENTRY_COMMANDS = ['create', 'status', 'start', 'stop',
'undefine', 'destroy', 'get_xml', 'define',
'modify']
HOST_COMMANDS = ['list_nets', 'facts', 'info']
ALL_COMMANDS.extend(ENTRY_COMMANDS)
ALL_COMMANDS.extend(HOST_COMMANDS)
ENTRY_STATE_ACTIVE_MAP = {
0: "inactive",
1: "active"
}
ENTRY_STATE_AUTOSTART_MAP = {
0: "no",
1: "yes"
}
ENTRY_STATE_PERSISTENT_MAP = {
0: "no",
1: "yes"
}
class EntryNotFound(Exception):
pass
class LibvirtConnection(object):
def __init__(self, uri, module):
self.module = module
conn = libvirt.open(uri)
if not conn:
raise Exception("hypervisor connection failure")
self.conn = conn
def find_entry(self, entryid):
# entryid = -1 returns a list of everything
results = []
# Get active entries
for name in self.conn.listNetworks():
entry = self.conn.networkLookupByName(name)
results.append(entry)
# Get inactive entries
for name in self.conn.listDefinedNetworks():
entry = self.conn.networkLookupByName(name)
results.append(entry)
if entryid == -1:
return results
for entry in results:
if entry.name() == entryid:
return entry
raise EntryNotFound("network %s not found" % entryid)
def create(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).create()
else:
try:
state = self.find_entry(entryid).isActive()
except Exception:
return self.module.exit_json(changed=True)
if not state:
return self.module.exit_json(changed=True)
def modify(self, entryid, xml):
network = self.find_entry(entryid)
# identify what type of entry is given in the xml
new_data = etree.fromstring(xml)
old_data = etree.fromstring(network.XMLDesc(0))
if new_data.tag == 'host':
mac_addr = new_data.get('mac')
hosts = old_data.xpath('/network/ip/dhcp/host')
# find the one mac we're looking for
host = None
for h in hosts:
if h.get('mac') == mac_addr:
host = h
break
if host is None:
# add the host
if not self.module.check_mode:
res = network.update(libvirt.VIR_NETWORK_UPDATE_COMMAND_ADD_LAST,
libvirt.VIR_NETWORK_SECTION_IP_DHCP_HOST,
-1, xml, libvirt.VIR_NETWORK_UPDATE_AFFECT_CURRENT)
else:
# pretend there was a change
res = 0
if res == 0:
return True
else:
# change the host
if host.get('name') == new_data.get('name') and host.get('ip') == new_data.get('ip'):
return False
else:
if not self.module.check_mode:
res = network.update(libvirt.VIR_NETWORK_UPDATE_COMMAND_MODIFY,
libvirt.VIR_NETWORK_SECTION_IP_DHCP_HOST,
-1, xml, libvirt.VIR_NETWORK_UPDATE_AFFECT_CURRENT)
else:
# pretend there was a change
res = 0
if res == 0:
return True
# command, section, parentIndex, xml, flags=0
self.module.fail_json(msg='updating this is not supported yet %s' % to_native(xml))
def destroy(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).destroy()
else:
if self.find_entry(entryid).isActive():
return self.module.exit_json(changed=True)
def undefine(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).undefine()
else:
if not self.find_entry(entryid):
return self.module.exit_json(changed=True)
def get_status2(self, entry):
state = entry.isActive()
return ENTRY_STATE_ACTIVE_MAP.get(state, "unknown")
def get_status(self, entryid):
if not self.module.check_mode:
state = self.find_entry(entryid).isActive()
return ENTRY_STATE_ACTIVE_MAP.get(state, "unknown")
else:
try:
state = self.find_entry(entryid).isActive()
return ENTRY_STATE_ACTIVE_MAP.get(state, "unknown")
except Exception:
return ENTRY_STATE_ACTIVE_MAP.get("inactive", "unknown")
def get_uuid(self, entryid):
return self.find_entry(entryid).UUIDString()
def get_xml(self, entryid):
return self.find_entry(entryid).XMLDesc(0)
def get_forward(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
try:
result = xml.xpath('/network/forward')[0].get('mode')
except Exception:
raise ValueError('Forward mode not specified')
return result
def get_domain(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
try:
result = xml.xpath('/network/domain')[0].get('name')
except Exception:
raise ValueError('Domain not specified')
return result
def get_macaddress(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
try:
result = xml.xpath('/network/mac')[0].get('address')
except Exception:
raise ValueError('MAC address not specified')
return result
def get_autostart(self, entryid):
state = self.find_entry(entryid).autostart()
return ENTRY_STATE_AUTOSTART_MAP.get(state, "unknown")
def get_autostart2(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).autostart()
else:
try:
return self.find_entry(entryid).autostart()
except Exception:
return self.module.exit_json(changed=True)
def set_autostart(self, entryid, val):
if not self.module.check_mode:
return self.find_entry(entryid).setAutostart(val)
else:
try:
state = self.find_entry(entryid).autostart()
except Exception:
return self.module.exit_json(changed=True)
if bool(state) != val:
return self.module.exit_json(changed=True)
def get_bridge(self, entryid):
return self.find_entry(entryid).bridgeName()
def get_persistent(self, entryid):
state = self.find_entry(entryid).isPersistent()
return ENTRY_STATE_PERSISTENT_MAP.get(state, "unknown")
def get_dhcp_leases(self, entryid):
network = self.find_entry(entryid)
return network.DHCPLeases()
def define_from_xml(self, entryid, xml):
if not self.module.check_mode:
return self.conn.networkDefineXML(xml)
else:
try:
self.find_entry(entryid)
except Exception:
return self.module.exit_json(changed=True)
class VirtNetwork(object):
def __init__(self, uri, module):
self.module = module
self.uri = uri
self.conn = LibvirtConnection(self.uri, self.module)
def get_net(self, entryid):
return self.conn.find_entry(entryid)
def list_nets(self, state=None):
results = []
for entry in self.conn.find_entry(-1):
if state:
if state == self.conn.get_status2(entry):
results.append(entry.name())
else:
results.append(entry.name())
return results
def state(self):
results = []
for entry in self.list_nets():
state_blurb = self.conn.get_status(entry)
results.append("%s %s" % (entry, state_blurb))
return results
def autostart(self, entryid):
return self.conn.set_autostart(entryid, True)
def get_autostart(self, entryid):
return self.conn.get_autostart2(entryid)
def set_autostart(self, entryid, state):
return self.conn.set_autostart(entryid, state)
def create(self, entryid):
return self.conn.create(entryid)
def modify(self, entryid, xml):
return self.conn.modify(entryid, xml)
def start(self, entryid):
return self.conn.create(entryid)
def stop(self, entryid):
return self.conn.destroy(entryid)
def destroy(self, entryid):
return self.conn.destroy(entryid)
def undefine(self, entryid):
return self.conn.undefine(entryid)
def status(self, entryid):
return self.conn.get_status(entryid)
def get_xml(self, entryid):
return self.conn.get_xml(entryid)
def define(self, entryid, xml):
return self.conn.define_from_xml(entryid, xml)
def info(self):
return self.facts(facts_mode='info')
def facts(self, facts_mode='facts'):
results = dict()
for entry in self.list_nets():
results[entry] = dict()
results[entry]["autostart"] = self.conn.get_autostart(entry)
results[entry]["persistent"] = self.conn.get_persistent(entry)
results[entry]["state"] = self.conn.get_status(entry)
results[entry]["bridge"] = self.conn.get_bridge(entry)
results[entry]["uuid"] = self.conn.get_uuid(entry)
try:
results[entry]["dhcp_leases"] = self.conn.get_dhcp_leases(entry)
# not supported on RHEL 6
except AttributeError:
pass
try:
results[entry]["forward_mode"] = self.conn.get_forward(entry)
except ValueError:
pass
try:
results[entry]["domain"] = self.conn.get_domain(entry)
except ValueError:
pass
try:
results[entry]["macaddress"] = self.conn.get_macaddress(entry)
except ValueError:
pass
facts = dict()
if facts_mode == 'facts':
facts["ansible_facts"] = dict()
facts["ansible_facts"]["ansible_libvirt_networks"] = results
elif facts_mode == 'info':
facts['networks'] = results
return facts
def core(module):
state = module.params.get('state', None)
name = module.params.get('name', None)
command = module.params.get('command', None)
uri = module.params.get('uri', None)
xml = module.params.get('xml', None)
autostart = module.params.get('autostart', None)
v = VirtNetwork(uri, module)
res = {}
if state and command == 'list_nets':
res = v.list_nets(state=state)
if not isinstance(res, dict):
res = {command: res}
return VIRT_SUCCESS, res
if state:
if not name:
module.fail_json(msg="state change requires a specified name")
res['changed'] = False
if state in ['active']:
if v.status(name) != 'active':
res['changed'] = True
res['msg'] = v.start(name)
elif state in ['present']:
try:
v.get_net(name)
except EntryNotFound:
if not xml:
module.fail_json(msg="network '" + name + "' not present, but xml not specified")
v.define(name, xml)
res = {'changed': True, 'created': name}
elif state in ['inactive']:
entries = v.list_nets()
if name in entries:
if v.status(name) != 'inactive':
res['changed'] = True
res['msg'] = v.destroy(name)
elif state in ['undefined', 'absent']:
entries = v.list_nets()
if name in entries:
if v.status(name) != 'inactive':
v.destroy(name)
res['changed'] = True
res['msg'] = v.undefine(name)
else:
module.fail_json(msg="unexpected state")
return VIRT_SUCCESS, res
if command:
if command in ENTRY_COMMANDS:
if not name:
module.fail_json(msg="%s requires 1 argument: name" % command)
if command in ('define', 'modify'):
if not xml:
module.fail_json(msg=command + " requires xml argument")
try:
v.get_net(name)
except EntryNotFound:
v.define(name, xml)
res = {'changed': True, 'created': name}
else:
if command == 'modify':
mod = v.modify(name, xml)
res = {'changed': mod, 'modified': name}
return VIRT_SUCCESS, res
res = getattr(v, command)(name)
if not isinstance(res, dict):
res = {command: res}
return VIRT_SUCCESS, res
elif hasattr(v, command):
res = getattr(v, command)()
if not isinstance(res, dict):
res = {command: res}
return VIRT_SUCCESS, res
else:
module.fail_json(msg="Command %s not recognized" % command)
if autostart is not None:
if not name:
module.fail_json(msg="state change requires a specified name")
res['changed'] = False
if autostart:
if not v.get_autostart(name):
res['changed'] = True
res['msg'] = v.set_autostart(name, True)
else:
if v.get_autostart(name):
res['changed'] = True
res['msg'] = v.set_autostart(name, False)
return VIRT_SUCCESS, res
module.fail_json(msg="expected state or command parameter to be specified")
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(aliases=['network']),
state=dict(choices=['active', 'inactive', 'present', 'absent']),
command=dict(choices=ALL_COMMANDS),
uri=dict(default='qemu:///system'),
xml=dict(),
autostart=dict(type='bool')
),
supports_check_mode=True
)
if not HAS_VIRT:
module.fail_json(
msg='The `libvirt` module is not importable. Check the requirements.'
)
if not HAS_XML:
module.fail_json(
msg='The `lxml` module is not importable. Check the requirements.'
)
rc = VIRT_SUCCESS
try:
rc, result = core(module)
except Exception as e:
module.fail_json(msg=str(e))
if rc != 0: # something went wrong emit the msg
module.fail_json(rc=rc, msg=result)
else:
module.exit_json(**result)
if __name__ == '__main__':
main()
|
EDUlib/edx-platform | refs/heads/master | lms/djangoapps/support/urls.py | 5 | """
URLs for the student support app.
"""
from django.conf.urls import url
from .views.certificate import CertificatesSupportView
from .views.contact_us import ContactUsView
from .views.course_entitlements import EntitlementSupportView
from .views.enrollments import EnrollmentSupportListView, EnrollmentSupportView
from .views.feature_based_enrollments import FeatureBasedEnrollmentsSupportView
from .views.index import index
from .views.manage_user import ManageUserDetailView, ManageUserSupportView
from .views.program_enrollments import LinkProgramEnrollmentSupportView, ProgramEnrollmentsInspectorView
from .views.sso_records import SsoView
COURSE_ENTITLEMENTS_VIEW = EntitlementSupportView.as_view()
app_name = 'support'
urlpatterns = [
url(r'^$', index, name="index"),
url(r'^certificates/?$', CertificatesSupportView.as_view(), name="certificates"),
url(r'^enrollment/?$', EnrollmentSupportView.as_view(), name="enrollment"),
url(r'^course_entitlement/?$', COURSE_ENTITLEMENTS_VIEW, name="course_entitlement"),
url(r'^contact_us/?$', ContactUsView.as_view(), name="contact_us"),
url(
r'^enrollment/(?P<username_or_email>[\w.@+-]+)?$',
EnrollmentSupportListView.as_view(),
name="enrollment_list"
),
url(r'^manage_user/?$', ManageUserSupportView.as_view(), name="manage_user"),
url(
r'^manage_user/(?P<username_or_email>[\w.@+-]+)?$',
ManageUserDetailView.as_view(),
name="manage_user_detail"
),
url(
r'^feature_based_enrollments/?$',
FeatureBasedEnrollmentsSupportView.as_view(),
name="feature_based_enrollments"
),
url(r'link_program_enrollments/?$', LinkProgramEnrollmentSupportView.as_view(), name='link_program_enrollments'),
url(
r'program_enrollments_inspector/?$',
ProgramEnrollmentsInspectorView.as_view(),
name='program_enrollments_inspector'
),
url(r'sso_records/(?P<username_or_email>[\w.@+-]+)?$', SsoView.as_view(), name='sso_records'),
]
|
rishilification/phantomjs | refs/heads/master | src/qt/qtwebkit/Tools/BuildSlaveSupport/wait-for-SVN-server.py | 118 | #!/usr/bin/env python
#
# Copyright (C) 2006 John Pye
# Copyright (C) 2012 University of Szeged
#
# This script is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
from optparse import OptionParser
import exceptions
import sys
import time
import xml.dom.minidom
import os
import subprocess
def getLatestSVNRevision(SVNServer):
try:
p = subprocess.Popen(["svn", "log", "--non-interactive", "--verbose", "--xml", "--limit=1", SVNServer], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
response = p.communicate()[0]
doc = xml.dom.minidom.parseString(response)
el = doc.getElementsByTagName("logentry")[0]
return el.getAttribute("revision")
except xml.parsers.expat.ExpatError, e:
print "FAILED TO PARSE 'svn log' XML:"
print str(e)
print "----"
print "RECEIVED TEXT:"
print response
sys.exit(1)
def waitForSVNRevision(SVNServer, revision):
if not revision or not revision.isdigit():
latestRevision = int(getLatestSVNRevision(SVNServer))
print "Latest SVN revision on %s is r%d. Don't wait, because revision argument isn't a valid SVN revision." % (SVNServer, latestRevision)
return
revision = int(revision)
while True:
latestRevision = int(getLatestSVNRevision(SVNServer))
if latestRevision < revision:
print "Latest SVN revision on %s is r%d, but we are waiting for r%d. Sleeping for 5 seconds." % (SVNServer, latestRevision, revision)
time.sleep(5)
else:
print "Latest SVN revision on %s is r%d, which is newer or equal than r%d." % (SVNServer, latestRevision, revision)
break
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-r", "--revision", dest="revision", help="SVN revision number")
parser.add_option("-s", "--svn-server", dest="SVNServer", help="SVN server")
options, args = parser.parse_args()
waitForSVNRevision(options.SVNServer, options.revision)
|
carlmw/oscar-wager | refs/heads/master | django/core/serializers/__init__.py | 36 | """
Interfaces for serializing Django objects.
Usage::
from django.core import serializers
json = serializers.serialize("json", some_query_set)
objects = list(serializers.deserialize("json", json))
To add your own serializers, use the SERIALIZATION_MODULES setting::
SERIALIZATION_MODULES = {
"csv" : "path.to.csv.serializer",
"txt" : "path.to.txt.serializer",
}
"""
from django.conf import settings
from django.utils import importlib
# Built-in serializers
BUILTIN_SERIALIZERS = {
"xml" : "django.core.serializers.xml_serializer",
"python" : "django.core.serializers.python",
"json" : "django.core.serializers.json",
}
# Check for PyYaml and register the serializer if it's available.
try:
import yaml
BUILTIN_SERIALIZERS["yaml"] = "django.core.serializers.pyyaml"
except ImportError:
pass
_serializers = {}
def register_serializer(format, serializer_module, serializers=None):
""""Register a new serializer.
``serializer_module`` should be the fully qualified module name
for the serializer.
If ``serializers`` is provided, the registration will be added
to the provided dictionary.
If ``serializers`` is not provided, the registration will be made
directly into the global register of serializers. Adding serializers
directly is not a thread-safe operation.
"""
module = importlib.import_module(serializer_module)
if serializers is None:
_serializers[format] = module
else:
serializers[format] = module
def unregister_serializer(format):
"Unregister a given serializer. This is not a thread-safe operation."
del _serializers[format]
def get_serializer(format):
if not _serializers:
_load_serializers()
return _serializers[format].Serializer
def get_serializer_formats():
if not _serializers:
_load_serializers()
return _serializers.keys()
def get_public_serializer_formats():
if not _serializers:
_load_serializers()
return [k for k, v in _serializers.iteritems() if not v.Serializer.internal_use_only]
def get_deserializer(format):
if not _serializers:
_load_serializers()
return _serializers[format].Deserializer
def serialize(format, queryset, **options):
"""
Serialize a queryset (or any iterator that returns database objects) using
a certain serializer.
"""
s = get_serializer(format)()
s.serialize(queryset, **options)
return s.getvalue()
def deserialize(format, stream_or_string, **options):
"""
Deserialize a stream or a string. Returns an iterator that yields ``(obj,
m2m_relation_dict)``, where ``obj`` is a instantiated -- but *unsaved* --
object, and ``m2m_relation_dict`` is a dictionary of ``{m2m_field_name :
list_of_related_objects}``.
"""
d = get_deserializer(format)
return d(stream_or_string, **options)
def _load_serializers():
"""
Register built-in and settings-defined serializers. This is done lazily so
that user code has a chance to (e.g.) set up custom settings without
needing to be careful of import order.
"""
global _serializers
serializers = {}
for format in BUILTIN_SERIALIZERS:
register_serializer(format, BUILTIN_SERIALIZERS[format], serializers)
if hasattr(settings, "SERIALIZATION_MODULES"):
for format in settings.SERIALIZATION_MODULES:
register_serializer(format, settings.SERIALIZATION_MODULES[format], serializers)
_serializers = serializers
|
nirb/whatsapp | refs/heads/master | build/lib/yowsup/layers/protocol_receipts/protocolentities/test_receipt_incoming.py | 68 | from yowsup.layers.protocol_receipts.protocolentities import IncomingReceiptProtocolEntity
from yowsup.structs.protocolentity import ProtocolEntityTest
import unittest
import time
class IncomingReceiptProtocolEntityTest(ProtocolEntityTest, unittest.TestCase):
def setUp(self):
self.ProtocolEntity = IncomingReceiptProtocolEntity
self.node = IncomingReceiptProtocolEntity("123", "sender", int(time.time())).toProtocolTreeNode()
|
CristianBB/SickRage | refs/heads/develop | lib/libtrakt/exceptions.py | 55 | class traktException(Exception):
pass
class traktAuthException(traktException):
pass
class traktServerBusy(traktException):
pass
|
Changaco/oh-mainline | refs/heads/master | vendor/packages/python-social-auth/social/backends/changetip.py | 68 | from social.backends.oauth import BaseOAuth2
class ChangeTipOAuth2(BaseOAuth2):
"""ChangeTip OAuth authentication backend
https://www.changetip.com/api
"""
name = 'changetip'
AUTHORIZATION_URL = 'https://www.changetip.com/o/authorize/'
ACCESS_TOKEN_URL = 'https://www.changetip.com/o/token/'
ACCESS_TOKEN_METHOD = 'POST'
SCOPE_SEPARATOR = ' '
def get_user_details(self, response):
"""Return user details from ChangeTip account"""
return {
'username': response['username'],
'email': response.get('email', ''),
'first_name': '',
'last_name': '',
}
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
return self.get_json('https://api.changetip.com/v2/me/', params={
'access_token': access_token
})
|
gfyoung/numpy | refs/heads/master | numpy/distutils/fcompiler/intel.py | 25 | # http://developer.intel.com/software/products/compilers/flin/
from __future__ import division, absolute_import, print_function
import sys
from numpy.distutils.ccompiler import simple_version_match
from numpy.distutils.fcompiler import FCompiler, dummy_fortran_file
compilers = ['IntelFCompiler', 'IntelVisualFCompiler',
'IntelItaniumFCompiler', 'IntelItaniumVisualFCompiler',
'IntelEM64VisualFCompiler', 'IntelEM64TFCompiler']
def intel_version_match(type):
# Match against the important stuff in the version string
return simple_version_match(start=r'Intel.*?Fortran.*?(?:%s).*?Version' % (type,))
class BaseIntelFCompiler(FCompiler):
def update_executables(self):
f = dummy_fortran_file()
self.executables['version_cmd'] = ['<F77>', '-FI', '-V', '-c',
f + '.f', '-o', f + '.o']
def runtime_library_dir_option(self, dir):
return '-Wl,-rpath="%s"' % dir
class IntelFCompiler(BaseIntelFCompiler):
compiler_type = 'intel'
compiler_aliases = ('ifort',)
description = 'Intel Fortran Compiler for 32-bit apps'
version_match = intel_version_match('32-bit|IA-32')
possible_executables = ['ifort', 'ifc']
executables = {
'version_cmd' : None, # set by update_executables
'compiler_f77' : [None, "-72", "-w90", "-w95"],
'compiler_f90' : [None],
'compiler_fix' : [None, "-FI"],
'linker_so' : ["<F90>", "-shared"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"]
}
pic_flags = ['-fPIC']
module_dir_switch = '-module ' # Don't remove ending space!
module_include_switch = '-I'
def get_flags_free(self):
return ['-FR']
def get_flags(self):
return ['-fPIC']
def get_flags_opt(self): # Scipy test failures with -O2
v = self.get_version()
mpopt = 'openmp' if v and v < '15' else 'qopenmp'
return ['-fp-model strict -O1 -{}'.format(mpopt)]
def get_flags_arch(self):
return []
def get_flags_linker_so(self):
opt = FCompiler.get_flags_linker_so(self)
v = self.get_version()
if v and v >= '8.0':
opt.append('-nofor_main')
if sys.platform == 'darwin':
# Here, it's -dynamiclib
try:
idx = opt.index('-shared')
opt.remove('-shared')
except ValueError:
idx = 0
opt[idx:idx] = ['-dynamiclib', '-Wl,-undefined,dynamic_lookup']
return opt
class IntelItaniumFCompiler(IntelFCompiler):
compiler_type = 'intele'
compiler_aliases = ()
description = 'Intel Fortran Compiler for Itanium apps'
version_match = intel_version_match('Itanium|IA-64')
possible_executables = ['ifort', 'efort', 'efc']
executables = {
'version_cmd' : None,
'compiler_f77' : [None, "-FI", "-w90", "-w95"],
'compiler_fix' : [None, "-FI"],
'compiler_f90' : [None],
'linker_so' : ['<F90>', "-shared"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"]
}
class IntelEM64TFCompiler(IntelFCompiler):
compiler_type = 'intelem'
compiler_aliases = ()
description = 'Intel Fortran Compiler for 64-bit apps'
version_match = intel_version_match('EM64T-based|Intel\\(R\\) 64|64|IA-64|64-bit')
possible_executables = ['ifort', 'efort', 'efc']
executables = {
'version_cmd' : None,
'compiler_f77' : [None, "-FI"],
'compiler_fix' : [None, "-FI"],
'compiler_f90' : [None],
'linker_so' : ['<F90>', "-shared"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"]
}
def get_flags(self):
return ['-fPIC']
def get_flags_opt(self): # Scipy test failures with -O2
v = self.get_version()
mpopt = 'openmp' if v and v < '15' else 'qopenmp'
return ['-fp-model strict -O1 -{}'.format(mpopt)]
def get_flags_arch(self):
return ['']
# Is there no difference in the version string between the above compilers
# and the Visual compilers?
class IntelVisualFCompiler(BaseIntelFCompiler):
compiler_type = 'intelv'
description = 'Intel Visual Fortran Compiler for 32-bit apps'
version_match = intel_version_match('32-bit|IA-32')
def update_executables(self):
f = dummy_fortran_file()
self.executables['version_cmd'] = ['<F77>', '/FI', '/c',
f + '.f', '/o', f + '.o']
ar_exe = 'lib.exe'
possible_executables = ['ifort', 'ifl']
executables = {
'version_cmd' : None,
'compiler_f77' : [None],
'compiler_fix' : [None],
'compiler_f90' : [None],
'linker_so' : [None],
'archiver' : [ar_exe, "/verbose", "/OUT:"],
'ranlib' : None
}
compile_switch = '/c '
object_switch = '/Fo' # No space after /Fo!
library_switch = '/OUT:' # No space after /OUT:!
module_dir_switch = '/module:' # No space after /module:
module_include_switch = '/I'
def get_flags(self):
opt = ['/nologo', '/MD', '/nbs', '/names:lowercase', '/assume:underscore']
return opt
def get_flags_free(self):
return []
def get_flags_debug(self):
return ['/4Yb', '/d2']
def get_flags_opt(self):
return ['/O1'] # Scipy test failures with /O2
def get_flags_arch(self):
return ["/arch:IA32", "/QaxSSE3"]
def runtime_library_dir_option(self, dir):
raise NotImplementedError
class IntelItaniumVisualFCompiler(IntelVisualFCompiler):
compiler_type = 'intelev'
description = 'Intel Visual Fortran Compiler for Itanium apps'
version_match = intel_version_match('Itanium')
possible_executables = ['efl'] # XXX this is a wild guess
ar_exe = IntelVisualFCompiler.ar_exe
executables = {
'version_cmd' : None,
'compiler_f77' : [None, "-FI", "-w90", "-w95"],
'compiler_fix' : [None, "-FI", "-4L72", "-w"],
'compiler_f90' : [None],
'linker_so' : ['<F90>', "-shared"],
'archiver' : [ar_exe, "/verbose", "/OUT:"],
'ranlib' : None
}
class IntelEM64VisualFCompiler(IntelVisualFCompiler):
compiler_type = 'intelvem'
description = 'Intel Visual Fortran Compiler for 64-bit apps'
version_match = simple_version_match(start=r'Intel\(R\).*?64,')
def get_flags_arch(self):
return ['']
if __name__ == '__main__':
from distutils import log
log.set_verbosity(2)
from numpy.distutils import customized_fcompiler
print(customized_fcompiler(compiler='intel').get_version())
|
rohitwaghchaure/alec_frappe5_erpnext | refs/heads/develop | erpnext/accounts/doctype/journal_voucher/test_journal_voucher.py | 24 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import unittest, frappe
from frappe.utils import flt
class TestJournalVoucher(unittest.TestCase):
def test_journal_voucher_with_against_jv(self):
jv_invoice = frappe.copy_doc(test_records[2])
base_jv = frappe.copy_doc(test_records[0])
self.jv_against_voucher_testcase(base_jv, jv_invoice)
def test_jv_against_sales_order(self):
from erpnext.selling.doctype.sales_order.test_sales_order \
import test_records as so_test_records
sales_order = frappe.copy_doc(so_test_records[0])
base_jv = frappe.copy_doc(test_records[0])
self.jv_against_voucher_testcase(base_jv, sales_order)
def test_jv_against_purchase_order(self):
from erpnext.buying.doctype.purchase_order.test_purchase_order \
import test_records as po_test_records
purchase_order = frappe.copy_doc(po_test_records[0])
base_jv = frappe.copy_doc(test_records[1])
self.jv_against_voucher_testcase(base_jv, purchase_order)
def jv_against_voucher_testcase(self, base_jv, test_voucher):
dr_or_cr = "credit" if test_voucher.doctype in ["Sales Order", "Journal Voucher"] else "debit"
field_dict = {'Journal Voucher': "against_jv",
'Sales Order': "against_sales_order",
'Purchase Order': "against_purchase_order"
}
self.clear_account_balance()
test_voucher.insert()
test_voucher.submit()
if test_voucher.doctype == "Journal Voucher":
self.assertTrue(frappe.db.sql("""select name from `tabJournal Voucher Detail`
where account = %s and docstatus = 1 and parent = %s""",
("_Test Customer - _TC", test_voucher.name)))
self.assertTrue(not frappe.db.sql("""select name from `tabJournal Voucher Detail`
where %s=%s""" % (field_dict.get(test_voucher.doctype), '%s'), (test_voucher.name)))
base_jv.get("entries")[0].is_advance = "Yes" if (test_voucher.doctype in ["Sales Order", "Purchase Order"]) else "No"
base_jv.get("entries")[0].set(field_dict.get(test_voucher.doctype), test_voucher.name)
base_jv.insert()
base_jv.submit()
submitted_voucher = frappe.get_doc(test_voucher.doctype, test_voucher.name)
self.assertTrue(frappe.db.sql("""select name from `tabJournal Voucher Detail`
where %s=%s""" % (field_dict.get(test_voucher.doctype), '%s'), (submitted_voucher.name)))
self.assertTrue(frappe.db.sql("""select name from `tabJournal Voucher Detail`
where %s=%s and %s=400""" % (field_dict.get(submitted_voucher.doctype), '%s', dr_or_cr), (submitted_voucher.name)))
if base_jv.get("entries")[0].is_advance == "Yes":
self.advance_paid_testcase(base_jv, submitted_voucher, dr_or_cr)
self.cancel_against_voucher_testcase(submitted_voucher)
def advance_paid_testcase(self, base_jv, test_voucher, dr_or_cr):
#Test advance paid field
advance_paid = frappe.db.sql("""select advance_paid from `tab%s`
where name=%s""" % (test_voucher.doctype, '%s'), (test_voucher.name))
payment_against_order = base_jv.get("entries")[0].get(dr_or_cr)
self.assertTrue(flt(advance_paid[0][0]) == flt(payment_against_order))
def cancel_against_voucher_testcase(self, test_voucher):
if test_voucher.doctype == "Journal Voucher":
# if test_voucher is a Journal Voucher, test cancellation of test_voucher
test_voucher.cancel()
self.assertTrue(not frappe.db.sql("""select name from `tabJournal Voucher Detail`
where against_jv=%s""", test_voucher.name))
elif test_voucher.doctype in ["Sales Order", "Purchase Order"]:
# if test_voucher is a Sales Order/Purchase Order, test error on cancellation of test_voucher
submitted_voucher = frappe.get_doc(test_voucher.doctype, test_voucher.name)
self.assertRaises(frappe.LinkExistsError, submitted_voucher.cancel)
def test_jv_against_stock_account(self):
from erpnext.stock.doctype.purchase_receipt.test_purchase_receipt import set_perpetual_inventory
set_perpetual_inventory()
jv = frappe.copy_doc(test_records[0])
jv.get("entries")[0].account = "_Test Warehouse - _TC"
jv.insert()
from erpnext.accounts.general_ledger import StockAccountInvalidTransaction
self.assertRaises(StockAccountInvalidTransaction, jv.submit)
set_perpetual_inventory(0)
def test_monthly_budget_crossed_ignore(self):
frappe.db.set_value("Company", "_Test Company", "monthly_bgt_flag", "Ignore")
self.clear_account_balance()
jv = frappe.copy_doc(test_records[0])
jv.get("entries")[1].account = "_Test Account Cost for Goods Sold - _TC"
jv.get("entries")[1].cost_center = "_Test Cost Center - _TC"
jv.get("entries")[1].debit = 20000.0
jv.get("entries")[0].credit = 20000.0
jv.insert()
jv.submit()
self.assertTrue(frappe.db.get_value("GL Entry",
{"voucher_type": "Journal Voucher", "voucher_no": jv.name}))
def test_monthly_budget_crossed_stop(self):
from erpnext.accounts.utils import BudgetError
frappe.db.set_value("Company", "_Test Company", "monthly_bgt_flag", "Stop")
self.clear_account_balance()
jv = frappe.copy_doc(test_records[0])
jv.get("entries")[1].account = "_Test Account Cost for Goods Sold - _TC"
jv.get("entries")[1].cost_center = "_Test Cost Center - _TC"
jv.get("entries")[1].debit = 20000.0
jv.get("entries")[0].credit = 20000.0
jv.insert()
self.assertRaises(BudgetError, jv.submit)
frappe.db.set_value("Company", "_Test Company", "monthly_bgt_flag", "Ignore")
def test_yearly_budget_crossed_stop(self):
from erpnext.accounts.utils import BudgetError
self.clear_account_balance()
self.test_monthly_budget_crossed_ignore()
frappe.db.set_value("Company", "_Test Company", "yearly_bgt_flag", "Stop")
jv = frappe.copy_doc(test_records[0])
jv.posting_date = "2013-08-12"
jv.get("entries")[1].account = "_Test Account Cost for Goods Sold - _TC"
jv.get("entries")[1].cost_center = "_Test Cost Center - _TC"
jv.get("entries")[1].debit = 150000.0
jv.get("entries")[0].credit = 150000.0
jv.insert()
self.assertRaises(BudgetError, jv.submit)
frappe.db.set_value("Company", "_Test Company", "yearly_bgt_flag", "Ignore")
def test_monthly_budget_on_cancellation(self):
from erpnext.accounts.utils import BudgetError
frappe.db.set_value("Company", "_Test Company", "monthly_bgt_flag", "Stop")
self.clear_account_balance()
jv = frappe.copy_doc(test_records[0])
jv.get("entries")[0].account = "_Test Account Cost for Goods Sold - _TC"
jv.get("entries")[0].cost_center = "_Test Cost Center - _TC"
jv.get("entries")[0].credit = 30000.0
jv.get("entries")[1].debit = 30000.0
jv.submit()
self.assertTrue(frappe.db.get_value("GL Entry",
{"voucher_type": "Journal Voucher", "voucher_no": jv.name}))
jv1 = frappe.copy_doc(test_records[0])
jv1.get("entries")[1].account = "_Test Account Cost for Goods Sold - _TC"
jv1.get("entries")[1].cost_center = "_Test Cost Center - _TC"
jv1.get("entries")[1].debit = 40000.0
jv1.get("entries")[0].credit = 40000.0
jv1.submit()
self.assertTrue(frappe.db.get_value("GL Entry",
{"voucher_type": "Journal Voucher", "voucher_no": jv1.name}))
self.assertRaises(BudgetError, jv.cancel)
frappe.db.set_value("Company", "_Test Company", "monthly_bgt_flag", "Ignore")
def clear_account_balance(self):
frappe.db.sql("""delete from `tabGL Entry`""")
test_records = frappe.get_test_records('Journal Voucher')
|
pytest-dev/pytest | refs/heads/main | testing/plugins_integration/bdd_wallet.py | 14 | from pytest_bdd import given
from pytest_bdd import scenario
from pytest_bdd import then
from pytest_bdd import when
import pytest
@scenario("bdd_wallet.feature", "Buy fruits")
def test_publish():
pass
@pytest.fixture
def wallet():
class Wallet:
amount = 0
return Wallet()
@given("A wallet with 50")
def fill_wallet(wallet):
wallet.amount = 50
@when("I buy some apples for 1")
def buy_apples(wallet):
wallet.amount -= 1
@when("I buy some bananas for 2")
def buy_bananas(wallet):
wallet.amount -= 2
@then("I have 47 left")
def check(wallet):
assert wallet.amount == 47
|
opencord/voltha | refs/heads/master | voltha/extensions/omci/onu_configuration.py | 1 | #
# Copyright 2018 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import structlog
from voltha.protos.device_pb2 import Image
from omci_entities import *
from database.mib_db_api import *
from enum import IntEnum
class OMCCVersion(IntEnum):
Unknown = 0 # Unknown or unsupported version
G_984_4 = 0x80 # (06/04)
G_984_4_2005_Amd_1 = 0x81 # Amd.1 (06/05)
G_984_4_2006_Amd_2 = 0x82 # Amd.2 (03/06)
G_984_4_2006_Amd_3 = 0x83 # Amd.3 (12/06)
G_984_4_2008 = 0x84 # (02/08)
G_984_4_2009_Amd_1 = 0x85 # Amd.1 (06/09)
G_984_4_2009_Amd_2_Base = 0x86 # Amd.2 (2009) Baseline message set only, w/o the extended message set option
G_984_4_2009_Amd_2 = 0x96 # Amd.2 (2009) Extended message set option + baseline message set.
G_988_2010_Base = 0xA0 # (2010) Baseline message set only, w/o the extended message set option
G_988_2011_Amd_1_Base = 0xA1 # Amd.1 (2011) Baseline message set only
G_988_2012_Amd_2_Base = 0xA2 # Amd.2 (2012) Baseline message set only
G_988_2012_Base = 0xA3 # (2012) Baseline message set only
G_988_2010 = 0xB0 # (2010) Baseline and extended message set
G_988_2011_Amd_1 = 0xB1 # Amd.1 (2011) Baseline and extended message set
G_988_2012_Amd_2 = 0xB2 # Amd.2 (2012) Baseline and extended message set
G_988_2012 = 0xB3 # (2012)Baseline and extended message set
@staticmethod
def values():
return {OMCCVersion[member].value for member in OMCCVersion.__members__.keys()}
@staticmethod
def to_enum(value):
return next((v for k, v in OMCCVersion.__members__.items()
if v.value == value), OMCCVersion.Unknown)
class OnuConfiguration(object):
"""
Utility class to query OMCI MIB Database for various ONU/OMCI Configuration
and capabilties. These capabilities revolve around read-only MEs discovered
during the MIB Upload process.
There is also a 'omci_onu_capabilities' State Machine and an
'onu_capabilities_task.py' OMCI Task that will query the ONU, via the
OMCI (ME#287) Managed entity to get the full list of supported OMCI ME's
and available actions/message-types supported.
NOTE: Currently this class is optimized/tested for ONUs that support the
OpenOMCI implementation.
"""
def __init__(self, omci_agent, device_id):
"""
Initialize this instance of the OnuConfiguration class
:param omci_agent: (OpenOMCIAgent) agent reference
:param device_id: (str) ONU Device ID
:raises KeyError: If ONU Device is not registered with OpenOMCI
"""
self.log = structlog.get_logger(device_id=device_id)
self._device_id = device_id
self._onu_device = omci_agent.get_device(device_id)
# The capabilities
self._attributes = None
self.reset()
def _get_capability(self, attr, class_id, instance_id=None):
"""
Get the OMCI capabilities for this device
:param attr: (str) OnuConfiguration attribute field
:param class_id: (int) ME Class ID
:param instance_id: (int) Instance ID. If not provided, all instances of the
specified class ID are returned if present in the DB.
:return: (dict) Class and/or Instances. None is returned if the CLASS is not present
"""
try:
assert self._onu_device.mib_synchronizer.last_mib_db_sync is not None, \
'MIB Database for ONU {} has never been synchronized'.format(self._device_id)
# Get the requested information
if self._attributes[attr] is None:
value = self._onu_device.query_mib(class_id, instance_id=instance_id)
if isinstance(value, dict) and len(value) > 0:
self._attributes[attr] = value
return self._attributes[attr]
except Exception as e:
self.log.exception('onu-capabilities', e=e, class_id=class_id,
instance_id=instance_id)
raise
def reset(self):
"""
Reset the cached database entries to None. This method should be
called after any communications loss to the ONU (reboot, PON down, ...)
in case a new software load with different capabilities is available.
"""
self._attributes = {
'_ont_g': None,
'_ont_2g': None,
'_ani_g': None,
'_uni_g': None,
'_cardholder': None,
'_circuit_pack': None,
'_software': None,
'_pptp': None,
'_veip': None
}
@property
def version(self):
"""
This attribute identifies the version of the ONU as defined by the vendor
"""
ontg = self._get_capability('_ont_g', OntG.class_id, 0)
if ontg is None or ATTRIBUTES_KEY not in ontg:
return None
return ontg[ATTRIBUTES_KEY].get('version')
@property
def serial_number(self):
"""
The serial number is unique for each ONU
"""
ontg = self._get_capability('_ont_g', OntG.class_id, 0)
if ontg is None or ATTRIBUTES_KEY not in ontg:
return None
return ontg[ATTRIBUTES_KEY].get('serial_number')
@property
def traffic_management_option(self):
"""
This attribute identifies the upstream traffic management function
implemented in the ONU. There are three options:
0 Priority controlled and flexibly scheduled upstream traffic. The traffic
scheduler and priority queue mechanism are used for upstream traffic.
1 Rate controlled upstream traffic. The maximum upstream traffic of each
individual connection is guaranteed by shaping.
2 Priority and rate controlled. The traffic scheduler and priority queue
mechanism are used for upstream traffic. The maximum upstream traffic
of each individual connection is guaranteed by shaping.
"""
ontg = self._get_capability('_ont_g', OntG.class_id, 0)
if ontg is None or ATTRIBUTES_KEY not in ontg:
return None
return ontg[ATTRIBUTES_KEY].get('traffic_management_options')
@property
def onu_survival_time(self):
"""
This attribute indicates the minimum guaranteed time in milliseconds
between the loss of external power and the silence of the ONU. This does not
include survival time attributable to a backup battery. The value zero implies that
the actual time is not known.
Optional
"""
ontg = self._get_capability('_ont_g', OntG.class_id, 0)
if ontg is None or ATTRIBUTES_KEY not in ontg:
return None
return ontg[ATTRIBUTES_KEY].get('ont_survival_time', 0)
@property
def equipment_id(self):
"""
This attribute may be used to identify the specific type of ONU. In some
environments, this attribute may include the equipment CLEI code.
"""
ont2g = self._get_capability('_ont_2g', Ont2G.class_id, 0)
if ont2g is None or ATTRIBUTES_KEY not in ont2g:
return None
return ont2g[ATTRIBUTES_KEY].get('equipment_id')
@property
def omcc_version(self):
"""
This attribute identifies the version of the OMCC protocol being used by the
ONU. This allows the OLT to manage a network with ONUs that support different
OMCC versions. Release levels of [ITU-T G.984.4] are supported with code
points of the form 0x8y and 0x9y, where y is a hexadecimal digit in the range
0..F. Support for continuing revisions of this Recommendation is defined in
the 0xAy range.
"""
ont2g = self._get_capability('_ont_2g', Ont2G.class_id, 0)
if ont2g is None or ATTRIBUTES_KEY not in ont2g:
return None
return OMCCVersion.to_enum(ont2g[ATTRIBUTES_KEY].get('omcc_version', 0))
@property
def vendor_product_code(self):
"""
This attribute contains a vendor-specific product code for the ONU
"""
ont2g = self._get_capability('_ont_2g', Ont2G.class_id, 0)
if ont2g is None or ATTRIBUTES_KEY not in ont2g:
return None
return ont2g[ATTRIBUTES_KEY].get('vendor_product_code')
@property
def total_priority_queues(self):
"""
This attribute reports the total number of upstream priority queues
that are not associated with a circuit pack, but with the ONU in its entirety
"""
ont2g = self._get_capability('_ont_2g', Ont2G.class_id, 0)
if ont2g is None or ATTRIBUTES_KEY not in ont2g:
return None
return ont2g[ATTRIBUTES_KEY].get('total_priority_queue_number')
@property
def total_traffic_schedulers(self):
"""
This attribute reports the total number of traffic schedulers that
are not associated with a circuit pack, but with the ONU in its entirety.
"""
ont2g = self._get_capability('_ont_2g', Ont2G.class_id, 0)
if ont2g is None or ATTRIBUTES_KEY not in ont2g:
return None
return ont2g[ATTRIBUTES_KEY].get('total_traffic_scheduler_number')
@property
def total_gem_ports(self):
"""
This attribute reports the total number of GEM port-IDs supported
by the ONU.
"""
ont2g = self._get_capability('_ont_2g', Ont2G.class_id, 0)
if ont2g is None or ATTRIBUTES_KEY not in ont2g:
return None
return ont2g[ATTRIBUTES_KEY].get('total_gem_port_id_number')
@property
def uptime(self):
"""
This attribute counts 10 ms intervals since the ONU was last initialized.
It rolls over to 0 when full
"""
ont2g = self._get_capability('_ont_2g', Ont2G.class_id, 0)
if ont2g is None or ATTRIBUTES_KEY not in ont2g:
return None
return ont2g[ATTRIBUTES_KEY].get('sys_uptime')
@property
def connectivity_capability(self):
"""
This attribute indicates the Ethernet connectivity models that the ONU
can support. The value 0 indicates that the capability is not supported; 1 signifies
support.
Bit Model [Figure reference ITU-T 988]
1 (LSB) N:1 bridging, Figure 8.2.2-3
2 1:M mapping, Figure 8.2.2-4
3 1:P filtering, Figure 8.2.2-5
4 N:M bridge-mapping, Figure 8.2.2-6
5 1:MP map-filtering, Figure 8.2.2-7
6 N:P bridge-filtering, Figure 8.2.2-8
7 to refer to N:MP bridge-map-filtering, Figure 8.2.2-9
8...16 Reserved
"""
ont2g = self._get_capability('_ont_2g', Ont2G.class_id, 0)
if ont2g is None or ATTRIBUTES_KEY not in ont2g:
return None
return ont2g[ATTRIBUTES_KEY].get('connectivity_capability')
@property
def qos_configuration_flexibility(self):
"""
This attribute reports whether various managed entities in the
ONU are fixed by the ONU's architecture or whether they are configurable. For
backward compatibility, and if the ONU does not support this attribute, all such
attributes are understood to be hard-wired.
Bit Interpretation when bit value = 1
1 (LSB) Priority queue ME: Port field of related port attribute is
read-write and can point to any T-CONT or UNI port in the
same slot
2 Priority queue ME: The traffic scheduler pointer is permitted
to refer to any other traffic scheduler in the same slot
3 Traffic scheduler ME: T-CONT pointer is read-write
4 Traffic scheduler ME: Policy attribute is read-write
5 T-CONT ME: Policy attribute is read-write
6 Priority queue ME: Priority field of related port attribute is
read-write
7..16 Reserved
"""
ont2g = self._get_capability('_ont_2g', Ont2G.class_id, 0)
if ont2g is None or ATTRIBUTES_KEY not in ont2g:
return None
return ont2g[ATTRIBUTES_KEY].get('qos_configuration_flexibility')
@property
def priority_queue_scale_factor(self):
"""
This specifies the scale factor of several attributes of the priority
queue managed entity of section 5.2.8
"""
ont2g = self._get_capability('_ont_2g', Ont2G.class_id, 0)
if ont2g is None or ATTRIBUTES_KEY not in ont2g:
return None
return ont2g[ATTRIBUTES_KEY].get('priority_queue_scale_factor', 1)
@property
def cardholder_entities(self):
"""
Return a dictionary containing some overall information on the CardHolder
instances for this ONU.
"""
ch = self._get_capability('_cardholder', Cardholder.class_id)
results = dict()
if ch is not None:
for inst, inst_data in ch.items():
if isinstance(inst, int):
results[inst] = {
'entity-id': inst,
'is-single-piece': inst >= 256,
'slot-number': inst & 0xff,
'actual-plug-in-type': inst_data[ATTRIBUTES_KEY].get('actual_plugin_unit_type', 0),
'actual-equipment-id': inst_data[ATTRIBUTES_KEY].get('actual_equipment_id', 0),
'protection-profile-ptr': inst_data[ATTRIBUTES_KEY].get('protection_profile_pointer', 0),
}
return results if len(results) else None
@property
def circuitpack_entities(self):
"""
This specifies the scale factor of several attributes of the priority
queue managed entity of section 5.2.8
"""
cp = self._get_capability('_circuit_pack', CircuitPack.class_id)
results = dict()
if cp is not None:
for inst, inst_data in cp.items():
if isinstance(inst, int):
results[inst] = {
'entity-id': inst,
'number-of-ports': inst_data[ATTRIBUTES_KEY].get('number_of_ports', 0),
'serial-number': inst_data[ATTRIBUTES_KEY].get('serial_number', 0),
'version': inst_data[ATTRIBUTES_KEY].get('version', 0),
'vendor-id': inst_data[ATTRIBUTES_KEY].get('vendor_id', 0),
'total-tcont-count': inst_data[ATTRIBUTES_KEY].get('total_tcont_buffer_number', 0),
'total-priority-queue-count': inst_data[ATTRIBUTES_KEY].get('total_priority_queue_number', 0),
'total-traffic-sched-count': inst_data[ATTRIBUTES_KEY].get('total_traffic_scheduler_number', 0),
}
return results if len(results) else None
@property
def software_images(self):
"""
Get a list of software image information for the ONU. The information is provided
so that it may be directly added to the protobuf Device information software list.
"""
sw = self._get_capability('_software', SoftwareImage.class_id)
images = list()
if sw is not None:
for inst, inst_data in sw.items():
if isinstance(inst, int):
is_active = inst_data[ATTRIBUTES_KEY].get('is_active', False)
images.append(Image(name='running-revision' if is_active else 'candidate-revision',
version=str(inst_data[ATTRIBUTES_KEY].get('version',
'Not Available').rstrip('\0')),
is_active=is_active,
is_committed=inst_data[ATTRIBUTES_KEY].get('is_committed',
False),
is_valid=inst_data[ATTRIBUTES_KEY].get('is_valid',
False),
install_datetime='Not Available',
hash=str(inst_data[ATTRIBUTES_KEY].get('image_hash',
'Not Available').rstrip('\0'))))
return images if len(images) else None
@property
def ani_g_entities(self):
"""
This managed entity organizes data associated with each access network
interface supported by a G-PON ONU. The ONU automatically creates one
instance of this managed entity for each PON physical port.
"""
ag = self._get_capability('_ani_g', AniG.class_id)
results = dict()
if ag is not None:
for inst, inst_data in ag.items():
if isinstance(inst, int):
results[inst] = {
'entity-id': inst,
'slot-number': (inst >> 8) & 0xff,
'port-number': inst & 0xff,
'total-tcont-count': inst_data[ATTRIBUTES_KEY].get('total_tcont_number', 0),
'piggyback-dba-reporting': inst_data[ATTRIBUTES_KEY].get('piggyback_dba_reporting', 0),
}
return results if len(results) else None
@property
def uni_g_entities(self):
"""
This managed entity organizes data associated with user network interfaces
(UNIs) supported by GEM. One instance of the UNI-G managed entity exists
for each UNI supported by the ONU.
The ONU automatically creates or deletes instances of this managed entity
upon the creation or deletion of a real or virtual circuit pack managed
entity, one per port.
"""
ug = self._get_capability('_uni_g', UniG.class_id)
results = dict()
if ug is not None:
for inst, inst_data in ug.items():
if isinstance(inst, int):
results[inst] = {
'entity-id': inst,
'management-capability': inst_data[ATTRIBUTES_KEY].get('management_capability', 0)
}
return results if len(results) else None
@property
def pptp_entities(self):
"""
Returns discovered PPTP Ethernet entities. TODO more detail here
"""
pptp = self._get_capability('_pptp', PptpEthernetUni.class_id)
results = dict()
if pptp is not None:
for inst, inst_data in pptp.items():
if isinstance(inst, int):
results[inst] = {
'entity-id': inst,
'expected-type': inst_data[ATTRIBUTES_KEY].get('expected_type', 0),
'sensed-type': inst_data[ATTRIBUTES_KEY].get('sensed_type', 0),
'autodetection-config': inst_data[ATTRIBUTES_KEY].get('autodetection_config', 0),
'ethernet-loopback-config': inst_data[ATTRIBUTES_KEY].get('ethernet_loopback_config', 0),
'administrative-state': inst_data[ATTRIBUTES_KEY].get('administrative_state', 0),
'operational-state': inst_data[ATTRIBUTES_KEY].get('operational_state', 0),
'config-ind': inst_data[ATTRIBUTES_KEY].get('config_ind', 0),
'max-frame-size': inst_data[ATTRIBUTES_KEY].get('max_frame_size', 0),
'dte-dce-ind': inst_data[ATTRIBUTES_KEY].get('dte_dce_ind', 0),
'pause-time': inst_data[ATTRIBUTES_KEY].get('pause_time', 0),
'bridged-ip-ind': inst_data[ATTRIBUTES_KEY].get('bridged_ip_ind', 0),
'arc': inst_data[ATTRIBUTES_KEY].get('arc', 0),
'arc-interval': inst_data[ATTRIBUTES_KEY].get('arc_interval', 0),
'pppoe-filter': inst_data[ATTRIBUTES_KEY].get('ppoe_filter', 0),
'power-control': inst_data[ATTRIBUTES_KEY].get('power_control', 0)
}
return results if len(results) else None
@property
def veip_entities(self):
"""
Returns discovered VEIP entities. TODO more detail here
"""
veip = self._get_capability('_veip', VeipUni.class_id)
results = dict()
if veip is not None:
for inst, inst_data in veip.items():
if isinstance(inst, int):
results[inst] = {
'entity-id': inst,
'administrative-state': inst_data[ATTRIBUTES_KEY].get('administrative_state', 0),
'operational-state': inst_data[ATTRIBUTES_KEY].get('operational_state', 0),
'interdomain-name': inst_data[ATTRIBUTES_KEY].get('interdomain_name', ""),
'tcp-udp-pointer': inst_data[ATTRIBUTES_KEY].get('tcp_udp_pointer', 0),
'iana-assigned-port': inst_data[ATTRIBUTES_KEY].get('iana_assigned_port', 0)
}
return results if len(results) else None
|
eunchong/build | refs/heads/master | scripts/common/master_cfg_utils.py | 2 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Routines to list, select, and load masters and builders in master.cfg.
These routines help to load up master.cfgs in all directories, then locate
masters and builders among those loaded. This is intended to simplify master
selection and processing in frontend and build analysis tools, especially the
buildrunner.
When run standalone, the script acts as example usage which lists masters
and builders of a selected master.
"""
# pylint: disable=C0323
import contextlib
import os
import optparse
import sys
import traceback
BASE_DIR = os.path.abspath(os.path.join(
os.path.dirname(os.path.abspath(__file__)), os.pardir, os.pardir))
# Install the common Infra environment (main only).
if __name__ == '__main__':
sys.path.insert(0, os.path.join(BASE_DIR, 'scripts'))
print sys.path[0]
import common.env
common.env.Install()
from common import chromium_utils
@contextlib.contextmanager
def TemporaryMasterPasswords():
all_paths = [os.path.join(BASE_DIR, 'site_config', '.bot_password')]
all_paths.extend(os.path.join(path, '.apply_issue_password')
for path in chromium_utils.ListMasters())
created_paths = []
for path in all_paths:
if not os.path.exists(path):
try:
with open(path, 'w') as f:
f.write('reindeer flotilla\n')
created_paths.append(path)
except OSError:
pass
try:
yield
finally:
for path in created_paths:
try:
os.remove(path)
except OSError:
print 'WARNING: Could not remove %s!' % path
def ExecuteConfig(canonical_config):
"""Execute a master.cfg file and return its dictionary.
WARNING: executing a master.cfg loads modules into the python process.
Attempting to load another master.cfg with similar module names will
cause subtle (and not-so-subtle) errors. It is recommended to only call
this once per process.
"""
localDict = {'basedir': os.path.dirname(canonical_config),
'__file__': canonical_config}
f = open(canonical_config, 'r')
mycwd = os.getcwd()
os.chdir(localDict['basedir'])
beforepath = list(sys.path) # make a 'backup' of it
sys.path.append(localDict['basedir'])
try:
exec f in localDict
return localDict
finally:
sys.path = beforepath
os.chdir(mycwd)
f.close()
def LoadConfig(basedir, config_file='master.cfg', suppress=False):
"""Load and execute a master.cfg file from a directory.
This is a nicer wrapper around ExecuteConfig which will trap IO or execution
errors and provide an informative message if one occurs.
WARNING: executing a master.cfg loads modules into the python process.
Attempting to load another master.cfg with similar module names will
cause subtle (and not-so-subtle) errors. It is recommended to only call
this once per process.
"""
canonical_basedir = os.path.abspath(os.path.expanduser(basedir))
canonical_config = os.path.join(canonical_basedir, config_file)
with TemporaryMasterPasswords():
try:
localdict = ExecuteConfig(canonical_config)
except IOError as err:
errno, strerror = err
filename = err.filename
print >>sys.stderr, 'error %d executing %s: %s: %s' % (errno,
canonical_config, strerror, filename)
print >>sys.stderr, traceback.format_exc()
return None
except Exception:
if not suppress:
print >>sys.stderr, ('error while parsing %s: ' % canonical_config)
print >>sys.stderr, traceback.format_exc()
return None
return localdict
def PrettyPrintInternal(items, columns, title, notfound, spacing=4):
"""Display column-based information from an array of hashes."""
if not items:
print
print notfound
return
itemdata = {}
for col in columns:
itemdata[col] = [s[col] if col in s else 'n/a' for s in items]
lengths = {}
for col in columns:
datalen = max([len(x) for x in itemdata[col]])
lengths[col] = max(len(col), datalen)
maxwidth = sum([lengths[col] for col in columns]) + (
spacing * (len(columns) - 1))
spac = ' ' * spacing
print
print title
print
print spac.join([col.rjust(lengths[col]) for col in columns])
print '-' * maxwidth
for i in range(len(items)):
print spac.join([itemdata[col][i].rjust(lengths[col]) for col in columns])
def PrettyPrintBuilders(builders, master):
"""Pretty-print a list of builders from a master."""
columns = ['name', 'slavename', 'category']
title = 'outputting builders for: %s' % master
notfound = 'no builders found.'
builders = Denormalize(builders, 'slavenames', 'slavename', columns)
PrettyPrintInternal(builders, columns, title, notfound)
def PrettyPrintMasters(masterpairs):
masters = []
for mastername, path in masterpairs:
abspath = os.path.abspath(path)
relpath = os.path.relpath(path)
shortpath = abspath if len(abspath) < len(relpath) else relpath
master = {}
master['mastername'] = mastername
master['path'] = shortpath
masters.append(master)
columns = ['mastername', 'path']
title = 'listing available masters:'
notfound = 'no masters found.'
PrettyPrintInternal(masters, columns, title, notfound)
def Denormalize(items, over, newcol, wanted):
"""Splits a one-to-many hash into many one-to-ones.
PrettyPrintInternal needs a list of many builders with one slave, this will
properly format the data as such.
items: a list of dictionaries to be denormalized
over: the column (key) over which to separate items
newcol: the new name of 'over' in the new item
wanted: the desired keys in the new item
Example: take some diners with different meals:
[{'name': 'diner1', 'toasts': ['rye', 'wheat'], eggs:['scrambled']},
{'name': 'diner2', 'toasts': ['rye', 'white'], eggs:['fried']}]
Let's say you only cared about your diner/toast options. If you denormalized
with over=toasts, newcol=toast, wanted=['name', toast'], you'd get:
[{'name': 'diner1', 'toast': 'rye'},
{'name': 'diner1', 'toast': 'wheat'},
{'name': 'diner2', 'toast': 'rye'},
{'name': 'diner2', 'toast': 'white'}]
"""
def arrayify(possible_array):
"""Convert 'string' into ['string']. Leave actual arrays alone."""
if isinstance(possible_array, basestring):
return [possible_array]
return possible_array
wanted_cols = set(wanted)
wanted_cols.discard(newcol)
result = []
for row in items:
for element in arrayify(row[over]):
newitem = {}
# Only bring over the requested columns, instead of all.
for col in wanted_cols:
if col in row:
newitem[col] = row[col]
newitem[newcol] = element
result.append(newitem)
return result
def OnlyGetOne(seq, key, source):
"""Confirm a sequence only contains one unique value and return it.
This is used when searching for a specific builder. If a match turns up
multiple results that all share the same builder, then select that builder.
"""
def uniquify(seq):
return list(frozenset(seq))
res = uniquify([s[key] for s in seq])
if len(res) > 1:
print >>sys.stderr, 'error: %s too many %ss:' % (source, key)
for r in res:
print ' ', r
return None
elif not res:
print 'error: %s zero %ss' % (source, key)
return None
else:
return res[0]
def GetMasters(include_public=True, include_internal=True):
"""Return a pair of (mastername, path) for all masters found."""
# note: ListMasters uses master.cfg hardcoded as part of its search path
def parse_master_name(masterpath):
"""Returns a mastername from a pathname to a master."""
_, tail = os.path.split(masterpath)
sep = '.'
hdr = 'master'
chunks = tail.split(sep)
if not chunks or chunks[0] != hdr or len(chunks) < 2:
raise ValueError('unable to parse mastername from path! (%s)' % tail)
return sep.join(chunks[1:])
return [(parse_master_name(m), m) for m in
chromium_utils.ListMasters(include_public=include_public,
include_internal=include_internal)]
def ChooseMaster(searchname):
"""Given a string, find all masters and pick the master that matches."""
masters = GetMasters()
masternames = []
master_lookup = {}
for mn, path in masters:
master = {}
master['mastername'] = mn
master_lookup[mn] = path
masternames.append(master)
candidates = [mn for mn in masternames if mn['mastername'] == searchname]
errstring = 'string \'%s\' matches' % searchname
master = OnlyGetOne(candidates, 'mastername', errstring)
if not master:
return None
return master_lookup[master]
def SearchBuilders(builders, spec):
"""Return a list of builders which match what is specified in 'spec'.
'spec' can be a hash with a key of either 'name', 'slavename', or 'either'.
This allows for flexibility in how a frontend gets information from the user.
"""
if 'builder' in spec:
return [b for b in builders if b['name'] ==
spec['builder']]
elif 'hostname' in spec:
return [b for b in builders if b['slavename']
== spec['hostname']]
else:
return [b for b in builders if (b['name'] ==
spec['either']) or (b['slavename'] == spec['either'])]
def GetBuilderName(builders, keyval):
"""Return unique builder name from a list of builders."""
errstring = 'string \'%s\' matches' % keyval
return OnlyGetOne(builders, 'name', errstring)
def ChooseBuilder(builders, spec):
"""Search through builders matching 'spec' and return it."""
denormedbuilders = Denormalize(builders, 'slavenames', 'slavename', ['name'])
candidates = SearchBuilders(denormedbuilders, spec)
buildername = GetBuilderName(candidates, spec.values()[0])
if not buildername:
return None
builder = [b for b in builders if b['name'] == buildername][0]
if 'hostname' in spec:
builder['slavename'] = spec['hostname']
elif 'either' in spec and spec['either'] in builder['slavenames']:
builder['slavename'] = spec['either']
else:
# User selected builder instead of slavename, so just pick the first
# slave the builder has.
builder['slavename'] = builder['slavenames'][0]
return builder
def main():
prog_desc = 'List all masters or builders within a master.'
usage = '%prog [master] [builder or slave]'
parser = optparse.OptionParser(usage=(usage + '\n\n' + prog_desc))
(_, args) = parser.parse_args()
if len(args) > 2:
parser.error("Too many arguments specified!")
masterpairs = GetMasters()
if len(args) < 1:
PrettyPrintMasters(masterpairs)
return 0
master_path = ChooseMaster(args[0])
if not master_path:
return 2
config = LoadConfig(master_path)
if not config:
return 2
mastername = config['BuildmasterConfig']['properties']['mastername']
builders = config['BuildmasterConfig']['builders']
if len(args) < 2:
PrettyPrintBuilders(builders, mastername)
return 0
my_builder = ChooseBuilder(builders, {'either': args[1]})
if not my_builder:
return 2
print "Matched %s/%s." % (mastername, my_builder['name'])
return 0
if __name__ == '__main__':
sys.exit(main())
|
TrimBiggs/calico | refs/heads/master | calico/logutils.py | 4 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Metaswitch Networks
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
_log = logging.getLogger(__name__)
def logging_exceptions(logger):
def decorator(fn):
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except:
logger.exception("Exception in function %s")
raise
return wrapped
return decorator
|
Muxi-Studio/muxi_site | refs/heads/develop | muxiwebsite/api/comments.py | 2 | # coding: utf-8
"""
comments.py
~~~~~~~~~~
木犀官网评论API
"""
from flask import jsonify, g, request
from . import api
from muxiwebsite.models import Share
from muxiwebsite.models import Comment
from .authentication import auth
from muxiwebsite import db
@api.route('/shares/<int:id>/comments/')
def get_shares_id_comments(id):
share = Share.query.get_or_404(id)
comments = share.comment.all()
return jsonify({
'comments' : [comment.to_json() for comment in comments]
}), 200
@api.route('/shares/<int:id>/comments/', methods=['GET', 'POST'])
@auth.login_required
def new_shares_id_comments(id):
"""向特定分享发评论"""
data_dict = eval(request.data)
share = Share.query.get_or_404(id)
if not hasattr(g.current_user, 'id'):
return jsonify({
'error' : 'plase login first'
}), 403
comment = Comment(
comment = data_dict.get('comment'),
author_id = g.current_user.id,
share_id = share.id
# date = request.args.get('date')
)
db.session.add(comment)
db.session.commit()
return jsonify({
'new comment' : comment.id
}), 201
|
bitcraft/pyglet | refs/heads/master | pyglet/canvas/win32.py | 1 | from .base import Display, Screen, ScreenMode, Canvas
from pyglet.libs.win32 import kernel32, user32, types, constants
from pyglet.libs.win32.constants import *
from pyglet.libs.win32.types import *
class Win32Display(Display):
def get_screens(self):
screens = list()
def enum_proc(hMonitor, hdcMonitor, lprcMonitor, dwData):
r = lprcMonitor.contents
width = r.right - r.left
height = r.bottom - r.top
screens.append(
Win32Screen(self, hMonitor, r.left, r.top, width, height))
return True
enum_proc_ptr = MONITORENUMPROC(enum_proc)
user32.EnumDisplayMonitors(None, None, enum_proc_ptr, 0)
return screens
class Win32Screen(Screen):
_initial_mode = None
def __init__(self, display, handle, x, y, width, height):
super().__init__(display, x, y, width, height)
self._handle = handle
def get_matching_configs(self, template):
canvas = Win32Canvas(self.display, 0, user32.GetDC(0))
configs = template.match(canvas)
# TODO: deprecate config's being screen-specific
for config in configs:
config.screen = self
return configs
def get_device_name(self):
info = MONITORINFOEX()
info.cbSize = sizeof(MONITORINFOEX)
user32.GetMonitorInfoW(self._handle, byref(info))
return info.szDevice
def get_modes(self):
device_name = self.get_device_name()
i = 0
modes = list()
while True:
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
r = user32.EnumDisplaySettingsW(device_name, i, byref(mode))
if not r:
break
modes.append(Win32ScreenMode(self, mode))
i += 1
return modes
def get_mode(self):
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
user32.EnumDisplaySettingsW(self.get_device_name(),
ENUM_CURRENT_SETTINGS,
byref(mode))
return Win32ScreenMode(self, mode)
def set_mode(self, mode):
assert mode.screen is self
if not self._initial_mode:
self._initial_mode = self.get_mode()
r = user32.ChangeDisplaySettingsExW(self.get_device_name(),
byref(mode._mode),
None,
CDS_FULLSCREEN,
None)
if r == DISP_CHANGE_SUCCESSFUL:
self.width = mode.width
self.height = mode.height
def restore_mode(self):
if self._initial_mode:
self.set_mode(self._initial_mode)
class Win32ScreenMode(ScreenMode):
def __init__(self, screen, mode):
super().__init__(screen)
self._mode = mode
self.width = mode.dmPelsWidth
self.height = mode.dmPelsHeight
self.depth = mode.dmBitsPerPel
self.rate = mode.dmDisplayFrequency
class Win32Canvas(Canvas):
def __init__(self, display, hwnd, hdc):
super().__init__(display)
self.hwnd = hwnd
self.hdc = hdc
|
canvasnetworks/canvas | refs/heads/master | common/boto/emr/instance_group.py | 2 | #
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class InstanceGroup(object):
def __init__(self, num_instances, role, type, market, name):
self.num_instances = num_instances
self.role = role
self.type = type
self.market = market
self.name = name
def __repr__(self):
return '%s.%s(name=%r, num_instances=%r, role=%r, type=%r, market = %r)' % (
self.__class__.__module__, self.__class__.__name__,
self.name, self.num_instances, self.role, self.type, self.market)
|
morphis/home-assistant | refs/heads/snap-support | homeassistant/components/light/yeelight.py | 4 | """
Support for Xiaomi Yeelight Wifi color bulb.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.yeelight/
"""
import logging
import colorsys
import voluptuous as vol
from homeassistant.util.color import (
color_temperature_mired_to_kelvin as mired_to_kelvin,
color_temperature_kelvin_to_mired as kelvin_to_mired,
color_temperature_to_rgb)
from homeassistant.const import CONF_DEVICES, CONF_NAME
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_RGB_COLOR, ATTR_TRANSITION, ATTR_COLOR_TEMP,
ATTR_FLASH, FLASH_SHORT, FLASH_LONG,
SUPPORT_BRIGHTNESS, SUPPORT_RGB_COLOR, SUPPORT_TRANSITION,
SUPPORT_COLOR_TEMP, SUPPORT_FLASH,
Light, PLATFORM_SCHEMA)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['yeelight==0.2.2']
_LOGGER = logging.getLogger(__name__)
CONF_TRANSITION = "transition"
DEFAULT_TRANSITION = 350
CONF_SAVE_ON_CHANGE = "save_on_change"
CONF_MODE_MUSIC = "use_music_mode"
DOMAIN = 'yeelight'
DEVICE_SCHEMA = vol.Schema({
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_TRANSITION, default=DEFAULT_TRANSITION): cv.positive_int,
vol.Optional(CONF_MODE_MUSIC, default=False): cv.boolean,
vol.Optional(CONF_SAVE_ON_CHANGE, default=True): cv.boolean,
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA}, })
SUPPORT_YEELIGHT_RGB = (SUPPORT_RGB_COLOR |
SUPPORT_COLOR_TEMP)
SUPPORT_YEELIGHT = (SUPPORT_BRIGHTNESS |
SUPPORT_TRANSITION |
SUPPORT_FLASH)
def _cmd(func):
"""A wrapper to catch exceptions from the bulb."""
def _wrap(self, *args, **kwargs):
import yeelight
try:
_LOGGER.debug("Calling %s with %s %s", func, args, kwargs)
return func(self, *args, **kwargs)
except yeelight.BulbException as ex:
_LOGGER.error("Error when calling %s: %s", func, ex)
return _wrap
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Yeelight bulbs."""
lights = []
if discovery_info is not None:
_LOGGER.debug("Adding autodetected %s", discovery_info['hostname'])
# not using hostname, as it seems to vary.
name = "yeelight_%s_%s" % (discovery_info["device_type"],
discovery_info["properties"]["mac"])
device = {'name': name, 'ipaddr': discovery_info['host']}
lights.append(YeelightLight(device, DEVICE_SCHEMA({})))
else:
for ipaddr, device_config in config[CONF_DEVICES].items():
_LOGGER.debug("Adding configured %s", device_config[CONF_NAME])
device = {'name': device_config[CONF_NAME], 'ipaddr': ipaddr}
lights.append(YeelightLight(device, device_config))
add_devices(lights, True) # true to request an update before adding.
class YeelightLight(Light):
"""Representation of a Yeelight light."""
def __init__(self, device, config):
"""Initialize the light."""
self.config = config
self._name = device['name']
self._ipaddr = device['ipaddr']
self._supported_features = SUPPORT_YEELIGHT
self._available = False
self._bulb_device = None
self._brightness = None
self._color_temp = None
self._is_on = None
self._rgb = None
@property
def available(self) -> bool:
"""Return if bulb is available."""
return self._available
@property
def supported_features(self) -> int:
"""Flag supported features."""
return self._supported_features
@property
def unique_id(self) -> str:
"""Return the ID of this light."""
return "{}.{}".format(self.__class__, self._ipaddr)
@property
def color_temp(self) -> int:
"""Return the color temperature."""
return self._color_temp
@property
def name(self) -> str:
"""Return the name of the device if any."""
return self._name
@property
def is_on(self) -> bool:
"""Return true if device is on."""
return self._is_on
@property
def brightness(self) -> int:
"""Return the brightness of this light between 1..255."""
return self._brightness
def _get_rgb_from_properties(self):
rgb = self._properties.get("rgb", None)
color_mode = self._properties.get("color_mode", None)
if not rgb or not color_mode:
return rgb
color_mode = int(color_mode)
if color_mode == 2: # color temperature
return color_temperature_to_rgb(self.color_temp)
if color_mode == 3: # hsv
hue = self._properties.get("hue")
sat = self._properties.get("sat")
val = self._properties.get("bright")
return colorsys.hsv_to_rgb(hue, sat, val)
rgb = int(rgb)
blue = rgb & 0xff
green = (rgb >> 8) & 0xff
red = (rgb >> 16) & 0xff
return red, green, blue
@property
def rgb_color(self) -> tuple:
"""Return the color property."""
return self._rgb
@property
def _properties(self) -> dict:
return self._bulb.last_properties
@property
def _bulb(self) -> object:
import yeelight
if self._bulb_device is None:
try:
self._bulb_device = yeelight.Bulb(self._ipaddr)
self._bulb_device.get_properties() # force init for type
btype = self._bulb_device.bulb_type
if btype == yeelight.BulbType.Color:
self._supported_features |= SUPPORT_YEELIGHT_RGB
self._available = True
except yeelight.BulbException as ex:
self._available = False
_LOGGER.error("Failed to connect to bulb %s, %s: %s",
self._ipaddr, self._name, ex)
return self._bulb_device
def set_music_mode(self, mode) -> None:
"""Set the music mode on or off."""
if mode:
self._bulb.start_music()
else:
self._bulb.stop_music()
def update(self) -> None:
"""Update properties from the bulb."""
import yeelight
try:
self._bulb.get_properties()
self._is_on = self._properties.get("power") == "on"
bright = self._properties.get("bright", None)
if bright:
self._brightness = 255 * (int(bright) / 100)
temp_in_k = self._properties.get("ct", None)
if temp_in_k:
self._color_temp = kelvin_to_mired(int(temp_in_k))
self._rgb = self._get_rgb_from_properties()
self._available = True
except yeelight.BulbException as ex:
if self._available: # just inform once
_LOGGER.error("Unable to update bulb status: %s", ex)
self._available = False
@_cmd
def set_brightness(self, brightness, duration) -> None:
"""Set bulb brightness."""
if brightness:
_LOGGER.debug("Setting brightness: %s", brightness)
self._bulb.set_brightness(brightness / 255 * 100,
duration=duration)
@_cmd
def set_rgb(self, rgb, duration) -> None:
"""Set bulb's color."""
if rgb and self.supported_features & SUPPORT_RGB_COLOR:
_LOGGER.debug("Setting RGB: %s", rgb)
self._bulb.set_rgb(rgb[0], rgb[1], rgb[2], duration=duration)
@_cmd
def set_colortemp(self, colortemp, duration) -> None:
"""Set bulb's color temperature."""
if colortemp and self.supported_features & SUPPORT_COLOR_TEMP:
temp_in_k = mired_to_kelvin(colortemp)
_LOGGER.debug("Setting color temp: %s K", temp_in_k)
self._bulb.set_color_temp(temp_in_k, duration=duration)
@_cmd
def set_default(self) -> None:
"""Set current options as default."""
self._bulb.set_default()
@_cmd
def set_flash(self, flash) -> None:
"""Activate flash."""
if flash:
from yeelight import RGBTransition, SleepTransition, Flow
if self._bulb.last_properties["color_mode"] != 1:
_LOGGER.error("Flash supported currently only in RGB mode.")
return
transition = int(self.config[CONF_TRANSITION])
if flash == FLASH_LONG:
count = 1
duration = transition * 5
if flash == FLASH_SHORT:
count = 1
duration = transition * 2
red, green, blue = self.rgb_color
transitions = list()
transitions.append(
RGBTransition(255, 0, 0, brightness=10, duration=duration))
transitions.append(SleepTransition(
duration=transition))
transitions.append(
RGBTransition(red, green, blue, brightness=self.brightness,
duration=duration))
flow = Flow(count=count, transitions=transitions)
self._bulb.start_flow(flow)
def turn_on(self, **kwargs) -> None:
"""Turn the bulb on."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
colortemp = kwargs.get(ATTR_COLOR_TEMP)
rgb = kwargs.get(ATTR_RGB_COLOR)
flash = kwargs.get(ATTR_FLASH)
duration = int(self.config[CONF_TRANSITION]) # in ms
if ATTR_TRANSITION in kwargs: # passed kwarg overrides config
duration = int(kwargs.get(ATTR_TRANSITION) * 1000) # kwarg in s
self._bulb.turn_on(duration=duration)
if self.config[CONF_MODE_MUSIC] and not self._bulb.music_mode:
self.set_music_mode(self.config[CONF_MODE_MUSIC])
# values checked for none in methods
self.set_rgb(rgb, duration)
self.set_colortemp(colortemp, duration)
self.set_brightness(brightness, duration)
self.set_flash(flash)
# save the current state if we had a manual change.
if self.config[CONF_SAVE_ON_CHANGE]:
if brightness or colortemp or rgb:
self.set_default()
def turn_off(self, **kwargs) -> None:
"""Turn off."""
self._bulb.turn_off()
|
ukanga/SickRage | refs/heads/master | lib/rtorrent/lib/xmlrpc/scgi.py | 145 | #!/usr/bin/python
# rtorrent_xmlrpc
# (c) 2011 Roger Que <alerante@bellsouth.net>
#
# Modified portions:
# (c) 2013 Dean Gardiner <gardiner91@gmail.com>
#
# Python module for interacting with rtorrent's XML-RPC interface
# directly over SCGI, instead of through an HTTP server intermediary.
# Inspired by Glenn Washburn's xmlrpc2scgi.py [1], but subclasses the
# built-in xmlrpclib classes so that it is compatible with features
# such as MultiCall objects.
#
# [1] <http://libtorrent.rakshasa.no/wiki/UtilsXmlrpc2scgi>
#
# Usage: server = SCGIServerProxy('scgi://localhost:7000/')
# server = SCGIServerProxy('scgi:///path/to/scgi.sock')
# print server.system.listMethods()
# mc = xmlrpclib.MultiCall(server)
# mc.get_up_rate()
# mc.get_down_rate()
# print mc()
#
#
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the
# OpenSSL library under certain conditions as described in each
# individual source file, and distribute linked combinations
# including the two.
#
# You must obey the GNU General Public License in all respects for
# all of the code used other than OpenSSL. If you modify file(s)
# with this exception, you may extend this exception to your version
# of the file(s), but you are not obligated to do so. If you do not
# wish to do so, delete this exception statement from your version.
# If you delete this exception statement from all source files in the
# program, then also delete it here.
#
#
#
# Portions based on Python's xmlrpclib:
#
# Copyright (c) 1999-2002 by Secret Labs AB
# Copyright (c) 1999-2002 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
import httplib
import re
import socket
import urllib
import xmlrpclib
import errno
class SCGITransport(xmlrpclib.Transport):
# Added request() from Python 2.7 xmlrpclib here to backport to Python 2.6
def request(self, host, handler, request_body, verbose=0):
#retry request once if cached connection has gone cold
for i in (0, 1):
try:
return self.single_request(host, handler, request_body, verbose)
except socket.error, e:
if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE):
raise
except httplib.BadStatusLine: #close after we sent request
if i:
raise
def single_request(self, host, handler, request_body, verbose=0):
# Add SCGI headers to the request.
headers = {'CONTENT_LENGTH': str(len(request_body)), 'SCGI': '1'}
header = '\x00'.join(('%s\x00%s' % item for item in headers.iteritems())) + '\x00'
header = '%d:%s' % (len(header), header)
request_body = '%s,%s' % (header, request_body)
sock = None
try:
if host:
host, port = urllib.splitport(host)
addrinfo = socket.getaddrinfo(host, int(port), socket.AF_INET,
socket.SOCK_STREAM)
sock = socket.socket(*addrinfo[0][:3])
sock.connect(addrinfo[0][4])
else:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(handler)
self.verbose = verbose
sock.send(request_body)
return self.parse_response(sock.makefile())
finally:
if sock:
sock.close()
def parse_response(self, response):
p, u = self.getparser()
response_body = ''
while True:
data = response.read(1024)
if not data:
break
response_body += data
# Remove SCGI headers from the response.
response_header, response_body = re.split(r'\n\s*?\n', response_body,
maxsplit=1)
if self.verbose:
print 'body:', repr(response_body)
p.feed(response_body)
p.close()
return u.close()
class SCGIServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=False,
allow_none=False, use_datetime=False):
type, uri = urllib.splittype(uri)
if type not in ('scgi'):
raise IOError('unsupported XML-RPC protocol')
self.__host, self.__handler = urllib.splithost(uri)
if not self.__handler:
self.__handler = '/'
if transport is None:
transport = SCGITransport(use_datetime=use_datetime)
self.__transport = transport
self.__encoding = encoding
self.__verbose = verbose
self.__allow_none = allow_none
def __close(self):
self.__transport.close()
def __request(self, methodname, params):
# call a method on the remote server
request = xmlrpclib.dumps(params, methodname, encoding=self.__encoding,
allow_none=self.__allow_none)
response = self.__transport.request(
self.__host,
self.__handler,
request,
verbose=self.__verbose
)
if len(response) == 1:
response = response[0]
return response
def __repr__(self):
return (
"<SCGIServerProxy for %s%s>" %
(self.__host, self.__handler)
)
__str__ = __repr__
def __getattr__(self, name):
# magic method dispatcher
return xmlrpclib._Method(self.__request, name)
# note: to call a remote object with an non-standard name, use
# result getattr(server, "strange-python-name")(args)
def __call__(self, attr):
"""A workaround to get special attributes on the ServerProxy
without interfering with the magic __getattr__
"""
if attr == "close":
return self.__close
elif attr == "transport":
return self.__transport
raise AttributeError("Attribute %r not found" % (attr,))
|
Microsoft/PTVS | refs/heads/master | Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py | 9 | import hashlib
import os
from textwrap import dedent
from ..cache import BaseCache
from ..controller import CacheController
try:
FileNotFoundError
except NameError:
# py2.X
FileNotFoundError = OSError
def _secure_open_write(filename, fmode):
# We only want to write to this file, so open it in write only mode
flags = os.O_WRONLY
# os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
# will open *new* files.
# We specify this because we want to ensure that the mode we pass is the
# mode of the file.
flags |= os.O_CREAT | os.O_EXCL
# Do not follow symlinks to prevent someone from making a symlink that
# we follow and insecurely open a cache file.
if hasattr(os, "O_NOFOLLOW"):
flags |= os.O_NOFOLLOW
# On Windows we'll mark this file as binary
if hasattr(os, "O_BINARY"):
flags |= os.O_BINARY
# Before we open our file, we want to delete any existing file that is
# there
try:
os.remove(filename)
except (IOError, OSError):
# The file must not exist already, so we can just skip ahead to opening
pass
# Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
# race condition happens between the os.remove and this line, that an
# error will be raised. Because we utilize a lockfile this should only
# happen if someone is attempting to attack us.
fd = os.open(filename, flags, fmode)
try:
return os.fdopen(fd, "wb")
except:
# An error occurred wrapping our FD in a file object
os.close(fd)
raise
class FileCache(BaseCache):
def __init__(self, directory, forever=False, filemode=0o0600,
dirmode=0o0700, use_dir_lock=None, lock_class=None):
if use_dir_lock is not None and lock_class is not None:
raise ValueError("Cannot use use_dir_lock and lock_class together")
try:
from pip._vendor.lockfile import LockFile
from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile
except ImportError:
notice = dedent("""
NOTE: In order to use the FileCache you must have
lockfile installed. You can install it via pip:
pip install lockfile
""")
raise ImportError(notice)
else:
if use_dir_lock:
lock_class = MkdirLockFile
elif lock_class is None:
lock_class = LockFile
self.directory = directory
self.forever = forever
self.filemode = filemode
self.dirmode = dirmode
self.lock_class = lock_class
@staticmethod
def encode(x):
return hashlib.sha224(x.encode()).hexdigest()
def _fn(self, name):
# NOTE: This method should not change as some may depend on it.
# See: https://github.com/ionrock/cachecontrol/issues/63
hashed = self.encode(name)
parts = list(hashed[:5]) + [hashed]
return os.path.join(self.directory, *parts)
def get(self, key):
name = self._fn(key)
if not os.path.exists(name):
return None
with open(name, 'rb') as fh:
return fh.read()
def set(self, key, value):
name = self._fn(key)
# Make sure the directory exists
try:
os.makedirs(os.path.dirname(name), self.dirmode)
except (IOError, OSError):
pass
with self.lock_class(name) as lock:
# Write our actual file
with _secure_open_write(lock.path, self.filemode) as fh:
fh.write(value)
def delete(self, key):
name = self._fn(key)
if not self.forever:
try:
os.remove(name)
except FileNotFoundError:
pass
def url_to_file_path(url, filecache):
"""Return the file cache path based on the URL.
This does not ensure the file exists!
"""
key = CacheController.cache_url(url)
return filecache._fn(key)
|
dahlstrom-g/intellij-community | refs/heads/master | python/testData/intentions/convertingFStringQuotesSuggestedOnFragmentBraces_after.py | 10 | s = f"{42}"
|
aborodavkin/pyton_for_my_tasks | refs/heads/master | Poimai_sharik_cvet_imeet_znach 15_07_2016 17_50.py | 1 | from random import randrange as rnd
from tkinter import *
from random import choice, randint
import math
import winsound
import time
def tick():
global time_finish, time_str, time_ost_text,flag_t,scores
# Количество минут и секунд
time_test=round(time_finish-time.time())
# print('*',time_test)
m=time_test//60
s=time_test%60
# print('Min: ',m,' Sec: ',s)
# canv.create_text(50, 5, text=time_test)
if time_test>=0:
change_timer(m,s)
delay=1000
canv.after(delay,tick)
# Конец блока минут и секунд
if time_test == 0:
flag_t=1
# create a child/top window
win_top = Toplevel(bg='lightgreen')
win_top.title('The end!')
win_top.geometry("700x85+300+150")
Label(win_top,
text='Результат: '+str(scores),
fg = "blue",
bg = "yellow",
font = "Verdana 40 bold").pack()
win_top.mainloop()
class ball():
def __init__(self):
x = self.x = 0
y = self.y = 0
r = self.r = 0
self.nap = 0
self.nx=0
self.ny=0
self.width = 0
self.color = ''
self.pen_color = ''
self.id = canv.create_oval(x-r,y-r,x+r,y+r, width = self.width, fill = self.color, outline = self.pen_color )
def paint(self):
x = self.x
y = self.y
r = self.r
canv.coords(self.id,x-r,y-r,x+r,y+r)
canv.itemconfig (self.id, width = self.width, fill = self.color, outline = self.pen_color)
def create_timer():
global time_str, time_ost_text, time_finish
# Количество минут и секунд
time_test=round(time_finish-time.time())
m=time_test//60
s=time_test%60
time_str='Мин: '+str(m)+' Сек: '+str(s)
time_ost_text = canv.create_text(600, 30,
text="Времечко " + time_str,
font="Sans 18")
def change_timer(m,s):
global time_ost_text
time_str=str(m)+' мин. '+str(s)+' сек.'
canv.itemconfigure(time_ost_text, text="Времечко: " + time_str)
def create_scores_text():
global scores_text
scores_text = canv.create_text(120, 30,
text="Ваши очёчки: " + str(scores),
font="Sans 18")
def change_scores_text():
canv.itemconfigure(scores_text, text="Ваши очёчки: " + str(scores))
def click_ball(event):
global scores, w, h, balls, flag_t
""" Обработчик событий мышки для игрового холста canvas
param event: событие с координатами клика
По клику мышкой нужно удалять тот объект, на который мышка указывает.
А также засчитываеть его в очки пользователя.
"""
obj = canv.find_closest(event.x, event.y)
id_shara=obj[0]
# print('Какой шарик удалили: ',id_shara)
for b in balls:
if b.id==id_shara:
cvet=b.color
#print('Цвет пропавшего шарика: ',cvet)
try:
x1, y1, x2, y2 = canv.coords(obj)
radius=math.sqrt((x2-x1)**2+(y2-y1)**2)/2
# print(radius)
delta_scores=int(100/radius)
except:
x1,y1,x2,y2=0,0,0,0
if x1 <= event.x <= x2 and y1 <= event.y <= y2 and flag_t==0: # Блок на возрастание очков при времени =0
winsound.Beep(400, 50)
# Звук
if cvet=='yellow':
delta_scores=10
elif cvet=='blue':
delta_scores=15
scores+=delta_scores
# print(scores)
change_scores_text()
canv.delete(obj)
for b in balls:
if b.id==id_shara:
del b # Удалили объект, к который попали
# Создаем новый шарик
new_ball = ball()
new_ball.x = rnd(50,w-50)
new_ball.y = rnd(50,h-50)
new_ball.r = rnd(10,50)
new_ball.nap = rnd(1,4)
new_ball.nx = 1
new_ball.ny = 1
new_ball.color = choice(['lemonchiffon', 'olivedrab', 'ivory', 'powderblue', 'darkslategray', 'orange',
'pink', 'gold', 'red','yellow', 'violet', 'indigo', 'chartreuse', 'lime'])
balls += [new_ball]
def time_event():
global balls, w,h
# print('*')
""" передвигает все шарики на чуть-чуть """
for b in balls:
if b.nx==1 and b.ny == 1:
dx = 1
dy = 1
elif b.nx==-1 and b.ny == 1:
dx = -1
dy = 1
elif b.nx==1 and b.ny == -1:
dx = 1
dy = -1
else:
dx = -1
dy = -1
# Двигаем шарики
b.x=b.x+dx
b.y=b.y+dy
if b.x>w-b.r:
#print('Выход из окна вправо')
b.nx=-1
if b.x<b.r:
#print('Выход из окна влево')
b.nx=1
if b.y>h-b.r:
b.ny=-1
#print('Выход из окна вниз')
if b.y<b.r:
#print('Выход из окна вверх')
b.ny=1
# Перерисовка шариков
b.paint()
if flag_t==0:
canv.after(10, time_event) # Двигае шыры при наличии времени
time_finish=time.time()+10
flag_t=0 # Флаг времени
root = Tk()
w=800
h=600
root.geometry("%dx%d" % (w, h))
canv = Canvas(root, bg = 'white')
canv.pack(fill = BOTH, expand = 1)
scores=0
balls = []
colors= ['aqua', 'blue', 'fuchsia', 'green', 'maroon', 'orange', 'lawngreen', 'lightseagreen', 'deeppink', 'crimson',
'pink', 'purple', 'red','yellow', 'violet', 'indigo', 'chartreuse', 'lime', 'hotpink', 'khaki' ]
# Создаем шарики!!!
for z in range(20):
new_ball = ball()
new_ball.x = rnd(50,w-50)
new_ball.y = rnd(50,h-50)
new_ball.r = rnd(10,50)
new_ball.nap = rnd(1,4)
new_ball.color = colors[z]
if new_ball.nap==1:
new_ball.nx=1
new_ball.ny=1
if new_ball.nap==2:
new_ball.nx=1
new_ball.ny=-1
if new_ball.nap==3:
new_ball.nx=-1
new_ball.ny=-1
else:
new_ball.nx=-1
new_ball.ny=1
# new_ball.color = choice(['aqua', 'blue', 'fuchsia', 'green', 'maroon', 'orange',
# 'pink', 'purple', 'red','yellow', 'violet', 'indigo', 'chartreuse', 'lime'])
balls += [new_ball]
for b in balls:
b.paint()
# print(b.id)
create_scores_text()
create_timer()
delay=1000
canv.after(delay,tick)
time_event()
canv.bind("<Button>", click_ball)
# canv.bind("<Motion>", move_all_balls)
# canv.bind("<FocusIn>", move_all_balls)
mainloop()
|
qedi-r/home-assistant | refs/heads/dev | tests/components/ness_alarm/__init__.py | 39 | """Tests for the ness_alarm component."""
|
tiddlyweb/tiddlywebplugins.logout | refs/heads/master | test/test_compile.py | 1 |
def test_compile():
try:
import tiddlywebplugins.logout
assert True
except ImportError, exc:
assert False, exc
|
tobbad/micropython | refs/heads/master | tests/basics/string_istest.py | 117 | print("".isspace())
print(" \t\n\r\v\f".isspace())
print("a".isspace())
print(" \t\n\r\v\fa".isspace())
print("".isalpha())
print("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ".isalpha())
print("0abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ".isalpha())
print("this ".isalpha())
print("".isdigit())
print("0123456789".isdigit())
print("0123456789a".isdigit())
print("0123456789 ".isdigit())
print("".isupper())
print("CHEESE-CAKE WITH ... _FROSTING_*99".isupper())
print("aB".isupper())
print("".islower())
print("cheese-cake with ... _frosting_*99".islower())
print("aB".islower())
print("123".islower())
print("123a".islower())
|
Shaffi08/yowsup | refs/heads/master | yowsup/layers/auth/protocolentities/stream_error_ack.py | 50 | from yowsup.structs import ProtocolEntity, ProtocolTreeNode
class StreamErrorAckProtocolEntity(ProtocolEntity):
'''
<stream:error>
<ack></ack>
</stream:error>
'''
def __init__(self):
super(StreamErrorAckProtocolEntity, self).__init__("stream:error")
def toProtocolTreeNode(self):
node = super(StreamErrorAckProtocolEntity, self).toProtocolTreeNode()
node.addChild(ProtocolTreeNode("ack"))
return node
def __str__(self):
out = "Ack Stream Error\n"
return out
@staticmethod
def fromProtocolTreeNode(node):
return StreamErrorAckProtocolEntity()
|
zaphodatreides/P8000-Kernel | refs/heads/master | tools/perf/scripts/python/failed-syscalls-by-pid.py | 11180 | # failed system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
|
LeeDroid-/LeeDrOiD-Hima-M9 | refs/heads/master | tools/perf/scripts/python/failed-syscalls-by-pid.py | 11180 | # failed system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
|
vityagi/azure-linux-extensions | refs/heads/master | AzureEnhancedMonitor/ext/installer.py | 8 | #!/usr/bin/env python
#
# Copyright 2014 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import imp
import os
import shutil
from Utils.WAAgentUtil import waagent
import Utils.HandlerUtil as util
ExtensionShortName = 'AzureEnhancedMonitor'
def parse_context(operation):
hutil = util.HandlerUtility(waagent.Log, waagent.Error, ExtensionShortName)
hutil.do_parse_context(operation)
return hutil
def find_psutil_build(buildDir):
for item in os.listdir(buildDir):
try:
build = os.path.join(buildDir, item)
binary = os.path.join(build, '_psutil_linux.so')
imp.load_dynamic('_psutil_linux', binary)
return build
except Exception:
pass
raise Exception("Available build of psutil not found.")
def main():
waagent.LoggerInit('/var/log/waagent.log','/dev/stdout')
waagent.Log("{0} started to handle.".format(ExtensionShortName))
hutil = parse_context("Install")
try:
root = os.path.dirname(os.path.abspath(__file__))
buildDir = os.path.join(root, "libpsutil")
build = find_psutil_build(buildDir)
for item in os.listdir(build):
src = os.path.join(build, item)
dest = os.path.join(root, item)
if os.path.isfile(src):
if os.path.isfile(dest):
os.remove(dest)
shutil.copyfile(src, dest)
else:
if os.path.isdir(dest):
shutil.rmtree(dest)
shutil.copytree(src, dest)
except Exception as e:
hutil.error("{0}, {1}").format(e, traceback.format_exc())
hutil.do_exit(1, "Install", 'failed','0',
'Install failed: {0}'.format(e))
if __name__ == '__main__':
main()
|
Jgarcia-IAS/SAT | refs/heads/master | openerp/addons-extra/odoo-pruebas/odoo-server/addons/base_import_module/__openerp__.py | 322 | {
'name': 'Base import module',
'description': """
Import a custom data module
===========================
This module allows authorized users to import a custom data module (.xml files and static assests)
for customization purpose.
""",
'category': 'Uncategorized',
'website': 'https://www.odoo.com',
'author': 'OpenERP SA',
'depends': ['web'],
'installable': True,
'auto_install': False,
'data': ['views/base_import_module.xml'],
'qweb': [],
'test': [],
}
|
pdamodaran/yellowbrick | refs/heads/develop | yellowbrick/contrib/__init__.py | 1 | # yellowbrick.contrib
#
# Contains a variety of extra tools and experimental visualizers outside of
# core support or still in development.
#
#
# ID: __init__.py [] bilbro@gmail.com $
from .scatter import ScatterViz, ScatterVisualizer, scatterviz
|
fnp/pylucene | refs/heads/master | samples/LuceneInAction/lia/tools/BerkeleyDbSearcher.py | 1 | # ====================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ====================================================================
import os
from bsddb.db import DBEnv, DB
from bsddb.db import \
DB_INIT_MPOOL, DB_INIT_LOCK, DB_INIT_TXN, DB_THREAD, DB_BTREE
# missing from python interface at the moment
DB_LOG_INMEMORY = 0x00020000
from lucene import DbDirectory, IndexSearcher, Term, TermQuery
class BerkeleyDbSearcher(object):
def main(cls, argv):
if len(argv) != 2:
print "Usage: BerkeleyDbSearcher <index dir>"
return
dbHome = argv[1]
env = DBEnv()
env.set_flags(DB_LOG_INMEMORY, 1);
if os.name == 'nt':
env.set_cachesize(0, 0x4000000, 1)
elif os.name == 'posix':
from commands import getstatusoutput
if getstatusoutput('uname') == (0, 'Linux'):
env.set_cachesize(0, 0x4000000, 1)
env.open(dbHome, (DB_THREAD |
DB_INIT_MPOOL | DB_INIT_LOCK | DB_INIT_TXN), 0)
index = DB(env)
blocks = DB(env)
txn = None
try:
txn = env.txn_begin(None)
index.open(filename = '__index__', dbtype = DB_BTREE,
flags = DB_THREAD, txn = txn)
blocks.open(filename = '__blocks__', dbtype = DB_BTREE,
flags = DB_THREAD, txn = txn)
except:
if txn is not None:
txn.abort()
txn = None
raise
else:
txn.commit()
txn = None
try:
txn = env.txn_begin(None)
directory = DbDirectory(txn, index, blocks, 0)
searcher = IndexSearcher(directory, True)
topDocs = searcher.search(TermQuery(Term("contents", "fox")), 50)
print topDocs.totalHits, "document(s) found"
searcher.close()
except:
if txn is not None:
txn.abort()
txn = None
raise
else:
txn.abort()
index.close()
blocks.close()
env.close()
main = classmethod(main)
|
goofwear/raspberry_pwn | refs/heads/master | src/pentest/metagoofil/hachoir_core/__init__.py | 95 | from hachoir_core.version import VERSION as __version__, PACKAGE, WEBSITE, LICENSE
|
dkubiak789/OpenUpgrade | refs/heads/8.0 | addons/hr_holidays/hr_holidays.py | 22 | # -*- coding: utf-8 -*-
##################################################################################
#
# Copyright (c) 2005-2006 Axelor SARL. (http://www.axelor.com)
# and 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# $Id: hr.py 4656 2006-11-24 09:58:42Z Cyp $
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import datetime
import math
import time
from operator import attrgetter
from openerp.exceptions import Warning
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
class hr_holidays_status(osv.osv):
_name = "hr.holidays.status"
_description = "Leave Type"
def get_days(self, cr, uid, ids, employee_id, context=None):
result = dict((id, dict(max_leaves=0, leaves_taken=0, remaining_leaves=0,
virtual_remaining_leaves=0)) for id in ids)
holiday_ids = self.pool['hr.holidays'].search(cr, uid, [('employee_id', '=', employee_id),
('state', 'in', ['confirm', 'validate1', 'validate']),
('holiday_status_id', 'in', ids)
], context=context)
for holiday in self.pool['hr.holidays'].browse(cr, uid, holiday_ids, context=context):
status_dict = result[holiday.holiday_status_id.id]
if holiday.type == 'add':
status_dict['virtual_remaining_leaves'] += holiday.number_of_days
if holiday.state == 'validate':
status_dict['max_leaves'] += holiday.number_of_days
status_dict['remaining_leaves'] += holiday.number_of_days
elif holiday.type == 'remove': # number of days is negative
status_dict['virtual_remaining_leaves'] += holiday.number_of_days
if holiday.state == 'validate':
status_dict['leaves_taken'] -= holiday.number_of_days
status_dict['remaining_leaves'] += holiday.number_of_days
return result
def _user_left_days(self, cr, uid, ids, name, args, context=None):
employee_id = False
if context and 'employee_id' in context:
employee_id = context['employee_id']
else:
employee_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context)
if employee_ids:
employee_id = employee_ids[0]
if employee_id:
res = self.get_days(cr, uid, ids, employee_id, context=context)
else:
res = dict.fromkeys(ids, {'leaves_taken': 0, 'remaining_leaves': 0, 'max_leaves': 0})
return res
_columns = {
'name': fields.char('Leave Type', size=64, required=True, translate=True),
'categ_id': fields.many2one('calendar.event.type', 'Meeting Type',
help='Once a leave is validated, OpenERP will create a corresponding meeting of this type in the calendar.'),
'color_name': fields.selection([('red', 'Red'),('blue','Blue'), ('lightgreen', 'Light Green'), ('lightblue','Light Blue'), ('lightyellow', 'Light Yellow'), ('magenta', 'Magenta'),('lightcyan', 'Light Cyan'),('black', 'Black'),('lightpink', 'Light Pink'),('brown', 'Brown'),('violet', 'Violet'),('lightcoral', 'Light Coral'),('lightsalmon', 'Light Salmon'),('lavender', 'Lavender'),('wheat', 'Wheat'),('ivory', 'Ivory')],'Color in Report', required=True, help='This color will be used in the leaves summary located in Reporting\Leaves by Department.'),
'limit': fields.boolean('Allow to Override Limit', help='If you select this check box, the system allows the employees to take more leaves than the available ones for this type and will not take them into account for the "Remaining Legal Leaves" defined on the employee form.'),
'active': fields.boolean('Active', help="If the active field is set to false, it will allow you to hide the leave type without removing it."),
'max_leaves': fields.function(_user_left_days, string='Maximum Allowed', help='This value is given by the sum of all holidays requests with a positive value.', multi='user_left_days'),
'leaves_taken': fields.function(_user_left_days, string='Leaves Already Taken', help='This value is given by the sum of all holidays requests with a negative value.', multi='user_left_days'),
'remaining_leaves': fields.function(_user_left_days, string='Remaining Leaves', help='Maximum Leaves Allowed - Leaves Already Taken', multi='user_left_days'),
'virtual_remaining_leaves': fields.function(_user_left_days, string='Virtual Remaining Leaves', help='Maximum Leaves Allowed - Leaves Already Taken - Leaves Waiting Approval', multi='user_left_days'),
'double_validation': fields.boolean('Apply Double Validation', help="When selected, the Allocation/Leave Requests for this type require a second validation to be approved."),
}
_defaults = {
'color_name': 'red',
'active': True,
}
def name_get(self, cr, uid, ids, context=None):
if not context.get('employee_id',False):
# leave counts is based on employee_id, would be inaccurate if not based on correct employee
return super(hr_holidays_status, self).name_get(cr, uid, ids, context=context)
res = []
for record in self.browse(cr, uid, ids, context=context):
name = record.name
if not record.limit:
name = name + (' (%d/%d)' % (record.leaves_taken or 0.0, record.max_leaves or 0.0))
res.append((record.id, name))
return res
class hr_holidays(osv.osv):
_name = "hr.holidays"
_description = "Leave"
_order = "type desc, date_from asc"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_track = {
'state': {
'hr_holidays.mt_holidays_approved': lambda self, cr, uid, obj, ctx=None: obj.state == 'validate',
'hr_holidays.mt_holidays_refused': lambda self, cr, uid, obj, ctx=None: obj.state == 'refuse',
'hr_holidays.mt_holidays_confirmed': lambda self, cr, uid, obj, ctx=None: obj.state == 'confirm',
},
}
def _employee_get(self, cr, uid, context=None):
emp_id = context.get('default_employee_id', False)
if emp_id:
return emp_id
ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context)
if ids:
return ids[0]
return False
def _compute_number_of_days(self, cr, uid, ids, name, args, context=None):
result = {}
for hol in self.browse(cr, uid, ids, context=context):
if hol.type=='remove':
result[hol.id] = -hol.number_of_days_temp
else:
result[hol.id] = hol.number_of_days_temp
return result
def _get_can_reset(self, cr, uid, ids, name, arg, context=None):
"""User can reset a leave request if it is its own leave request or if
he is an Hr Manager. """
user = self.pool['res.users'].browse(cr, uid, uid, context=context)
group_hr_manager_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'group_hr_manager')[1]
if group_hr_manager_id in [g.id for g in user.groups_id]:
return dict.fromkeys(ids, True)
result = dict.fromkeys(ids, False)
for holiday in self.browse(cr, uid, ids, context=context):
if holiday.employee_id and holiday.employee_id.user_id and holiday.employee_id.user_id.id == uid:
result[holiday.id] = True
return result
def _check_date(self, cr, uid, ids):
for holiday in self.browse(cr, uid, ids):
holiday_ids = self.search(cr, uid, [('date_from', '<=', holiday.date_to), ('date_to', '>=', holiday.date_from),
('employee_id', '=', holiday.employee_id.id), ('id', '<>', holiday.id),
('state', 'not in', ['cancel', 'refuse'])])
if holiday_ids:
return False
return True
_check_holidays = lambda self, cr, uid, ids, context=None: self.check_holidays(cr, uid, ids, context=context)
_columns = {
'name': fields.char('Description', size=64),
'state': fields.selection([('draft', 'To Submit'), ('cancel', 'Cancelled'),('confirm', 'To Approve'), ('refuse', 'Refused'), ('validate1', 'Second Approval'), ('validate', 'Approved')],
'Status', readonly=True, track_visibility='onchange',
help='The status is set to \'To Submit\', when a holiday request is created.\
\nThe status is \'To Approve\', when holiday request is confirmed by user.\
\nThe status is \'Refused\', when holiday request is refused by manager.\
\nThe status is \'Approved\', when holiday request is approved by manager.'),
'user_id':fields.related('employee_id', 'user_id', type='many2one', relation='res.users', string='User', store=True),
'date_from': fields.datetime('Start Date', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, select=True),
'date_to': fields.datetime('End Date', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'holiday_status_id': fields.many2one("hr.holidays.status", "Leave Type", required=True,readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'employee_id': fields.many2one('hr.employee', "Employee", select=True, invisible=False, readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'manager_id': fields.many2one('hr.employee', 'First Approval', invisible=False, readonly=True, help='This area is automatically filled by the user who validate the leave'),
'notes': fields.text('Reasons',readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'number_of_days_temp': fields.float('Allocation', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'number_of_days': fields.function(_compute_number_of_days, string='Number of Days', store=True),
'meeting_id': fields.many2one('calendar.event', 'Meeting'),
'type': fields.selection([('remove','Leave Request'),('add','Allocation Request')], 'Request Type', required=True, readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, help="Choose 'Leave Request' if someone wants to take an off-day. \nChoose 'Allocation Request' if you want to increase the number of leaves available for someone", select=True),
'parent_id': fields.many2one('hr.holidays', 'Parent'),
'linked_request_ids': fields.one2many('hr.holidays', 'parent_id', 'Linked Requests',),
'department_id':fields.related('employee_id', 'department_id', string='Department', type='many2one', relation='hr.department', readonly=True, store=True),
'category_id': fields.many2one('hr.employee.category', "Employee Tag", help='Category of Employee', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'holiday_type': fields.selection([('employee','By Employee'),('category','By Employee Tag')], 'Allocation Mode', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, help='By Employee: Allocation/Request for individual Employee, By Employee Tag: Allocation/Request for group of employees in category', required=True),
'manager_id2': fields.many2one('hr.employee', 'Second Approval', readonly=True, help='This area is automaticly filled by the user who validate the leave with second level (If Leave type need second validation)'),
'double_validation': fields.related('holiday_status_id', 'double_validation', type='boolean', relation='hr.holidays.status', string='Apply Double Validation'),
'can_reset': fields.function(
_get_can_reset,
type='boolean'),
}
_defaults = {
'employee_id': _employee_get,
'state': 'confirm',
'type': 'remove',
'user_id': lambda obj, cr, uid, context: uid,
'holiday_type': 'employee'
}
_constraints = [
(_check_date, 'You can not have 2 leaves that overlaps on same day!', ['date_from','date_to']),
(_check_holidays, 'The number of remaining leaves is not sufficient for this leave type', ['state','number_of_days_temp'])
]
_sql_constraints = [
('type_value', "CHECK( (holiday_type='employee' AND employee_id IS NOT NULL) or (holiday_type='category' AND category_id IS NOT NULL))",
"The employee or employee category of this request is missing. Please make sure that your user login is linked to an employee."),
('date_check2', "CHECK ( (type='add') OR (date_from <= date_to))", "The start date must be anterior to the end date."),
('date_check', "CHECK ( number_of_days_temp >= 0 )", "The number of days must be greater than 0."),
]
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
if context is None:
context = {}
default = default.copy()
default['date_from'] = False
default['date_to'] = False
return super(hr_holidays, self).copy(cr, uid, id, default, context=context)
def _create_resource_leave(self, cr, uid, leaves, context=None):
'''This method will create entry in resource calendar leave object at the time of holidays validated '''
obj_res_leave = self.pool.get('resource.calendar.leaves')
for leave in leaves:
vals = {
'name': leave.name,
'date_from': leave.date_from,
'holiday_id': leave.id,
'date_to': leave.date_to,
'resource_id': leave.employee_id.resource_id.id,
'calendar_id': leave.employee_id.resource_id.calendar_id.id
}
obj_res_leave.create(cr, uid, vals, context=context)
return True
def _remove_resource_leave(self, cr, uid, ids, context=None):
'''This method will create entry in resource calendar leave object at the time of holidays cancel/removed'''
obj_res_leave = self.pool.get('resource.calendar.leaves')
leave_ids = obj_res_leave.search(cr, uid, [('holiday_id', 'in', ids)], context=context)
return obj_res_leave.unlink(cr, uid, leave_ids, context=context)
def onchange_type(self, cr, uid, ids, holiday_type, employee_id=False, context=None):
result = {}
if holiday_type == 'employee' and not employee_id:
ids_employee = self.pool.get('hr.employee').search(cr, uid, [('user_id','=', uid)])
if ids_employee:
result['value'] = {
'employee_id': ids_employee[0]
}
elif holiday_type != 'employee':
result['value'] = {
'employee_id': False
}
return result
def onchange_employee(self, cr, uid, ids, employee_id):
result = {'value': {'department_id': False}}
if employee_id:
employee = self.pool.get('hr.employee').browse(cr, uid, employee_id)
result['value'] = {'department_id': employee.department_id.id}
return result
# TODO: can be improved using resource calendar method
def _get_number_of_days(self, date_from, date_to):
"""Returns a float equals to the timedelta between two dates given as string."""
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
from_dt = datetime.datetime.strptime(date_from, DATETIME_FORMAT)
to_dt = datetime.datetime.strptime(date_to, DATETIME_FORMAT)
timedelta = to_dt - from_dt
diff_day = timedelta.days + float(timedelta.seconds) / 86400
return diff_day
def unlink(self, cr, uid, ids, context=None):
for rec in self.browse(cr, uid, ids, context=context):
if rec.state not in ['draft', 'cancel', 'confirm']:
raise osv.except_osv(_('Warning!'),_('You cannot delete a leave which is in %s state.')%(rec.state))
return super(hr_holidays, self).unlink(cr, uid, ids, context)
def onchange_date_from(self, cr, uid, ids, date_to, date_from):
"""
If there are no date set for date_to, automatically set one 8 hours later than
the date_from.
Also update the number_of_days.
"""
# date_to has to be greater than date_from
if (date_from and date_to) and (date_from > date_to):
raise osv.except_osv(_('Warning!'),_('The start date must be anterior to the end date.'))
result = {'value': {}}
# No date_to set so far: automatically compute one 8 hours later
if date_from and not date_to:
date_to_with_delta = datetime.datetime.strptime(date_from, tools.DEFAULT_SERVER_DATETIME_FORMAT) + datetime.timedelta(hours=8)
result['value']['date_to'] = str(date_to_with_delta)
# Compute and update the number of days
if (date_to and date_from) and (date_from <= date_to):
diff_day = self._get_number_of_days(date_from, date_to)
result['value']['number_of_days_temp'] = round(math.floor(diff_day))+1
else:
result['value']['number_of_days_temp'] = 0
return result
def onchange_date_to(self, cr, uid, ids, date_to, date_from):
"""
Update the number_of_days.
"""
# date_to has to be greater than date_from
if (date_from and date_to) and (date_from > date_to):
raise osv.except_osv(_('Warning!'),_('The start date must be anterior to the end date.'))
result = {'value': {}}
# Compute and update the number of days
if (date_to and date_from) and (date_from <= date_to):
diff_day = self._get_number_of_days(date_from, date_to)
result['value']['number_of_days_temp'] = round(math.floor(diff_day))+1
else:
result['value']['number_of_days_temp'] = 0
return result
def create(self, cr, uid, values, context=None):
""" Override to avoid automatic logging of creation """
if context is None:
context = {}
context = dict(context, mail_create_nolog=True)
hol_id = super(hr_holidays, self).create(cr, uid, values, context=context)
return hol_id
def holidays_reset(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {
'state': 'draft',
'manager_id': False,
'manager_id2': False,
})
to_unlink = []
for record in self.browse(cr, uid, ids, context=context):
for record2 in record.linked_request_ids:
self.holidays_reset(cr, uid, [record2.id], context=context)
to_unlink.append(record2.id)
if to_unlink:
self.unlink(cr, uid, to_unlink, context=context)
return True
def holidays_first_validate(self, cr, uid, ids, context=None):
obj_emp = self.pool.get('hr.employee')
ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)])
manager = ids2 and ids2[0] or False
self.holidays_first_validate_notificate(cr, uid, ids, context=context)
return self.write(cr, uid, ids, {'state':'validate1', 'manager_id': manager})
def holidays_validate(self, cr, uid, ids, context=None):
obj_emp = self.pool.get('hr.employee')
ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)])
manager = ids2 and ids2[0] or False
self.write(cr, uid, ids, {'state':'validate'})
data_holiday = self.browse(cr, uid, ids)
for record in data_holiday:
if record.double_validation:
self.write(cr, uid, [record.id], {'manager_id2': manager})
else:
self.write(cr, uid, [record.id], {'manager_id': manager})
if record.holiday_type == 'employee' and record.type == 'remove':
meeting_obj = self.pool.get('calendar.event')
meeting_vals = {
'name': record.name or _('Leave Request'),
'categ_ids': record.holiday_status_id.categ_id and [(6,0,[record.holiday_status_id.categ_id.id])] or [],
'duration': record.number_of_days_temp * 8,
'description': record.notes,
'user_id': record.user_id.id,
'start': record.date_from,
'stop': record.date_to,
'allday': False,
'state': 'open', # to block that meeting date in the calendar
'class': 'confidential'
}
#Add the partner_id (if exist) as an attendee
if record.user_id and record.user_id.partner_id:
meeting_vals['partner_ids'] = [(4,record.user_id.partner_id.id)]
meeting_id = meeting_obj.create(cr, uid, meeting_vals)
self._create_resource_leave(cr, uid, [record], context=context)
self.write(cr, uid, ids, {'meeting_id': meeting_id})
elif record.holiday_type == 'category':
emp_ids = obj_emp.search(cr, uid, [('category_ids', 'child_of', [record.category_id.id])])
leave_ids = []
for emp in obj_emp.browse(cr, uid, emp_ids):
vals = {
'name': record.name,
'type': record.type,
'holiday_type': 'employee',
'holiday_status_id': record.holiday_status_id.id,
'date_from': record.date_from,
'date_to': record.date_to,
'notes': record.notes,
'number_of_days_temp': record.number_of_days_temp,
'parent_id': record.id,
'employee_id': emp.id
}
leave_ids.append(self.create(cr, uid, vals, context=None))
for leave_id in leave_ids:
# TODO is it necessary to interleave the calls?
self.signal_confirm(cr, uid, [leave_id])
self.signal_validate(cr, uid, [leave_id])
self.signal_second_validate(cr, uid, [leave_id])
return True
def holidays_confirm(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
if record.employee_id and record.employee_id.parent_id and record.employee_id.parent_id.user_id:
self.message_subscribe_users(cr, uid, [record.id], user_ids=[record.employee_id.parent_id.user_id.id], context=context)
return self.write(cr, uid, ids, {'state': 'confirm'})
def holidays_refuse(self, cr, uid, ids, context=None):
obj_emp = self.pool.get('hr.employee')
ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)])
manager = ids2 and ids2[0] or False
for holiday in self.browse(cr, uid, ids, context=context):
if holiday.state == 'validate1':
self.write(cr, uid, [holiday.id], {'state': 'refuse', 'manager_id': manager})
else:
self.write(cr, uid, [holiday.id], {'state': 'refuse', 'manager_id2': manager})
self.holidays_cancel(cr, uid, ids, context=context)
return True
def holidays_cancel(self, cr, uid, ids, context=None):
meeting_obj = self.pool.get('calendar.event')
for record in self.browse(cr, uid, ids):
# Delete the meeting
if record.meeting_id:
meeting_obj.unlink(cr, uid, [record.meeting_id.id])
# If a category that created several holidays, cancel all related
self.signal_refuse(cr, uid, map(attrgetter('id'), record.linked_request_ids or []))
self._remove_resource_leave(cr, uid, ids, context=context)
return True
def check_holidays(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
if record.holiday_type != 'employee' or record.type != 'remove' or not record.employee_id or record.holiday_status_id.limit:
continue
leave_days = self.pool.get('hr.holidays.status').get_days(cr, uid, [record.holiday_status_id.id], record.employee_id.id, context=context)[record.holiday_status_id.id]
if leave_days['remaining_leaves'] < 0 or leave_days['virtual_remaining_leaves'] < 0:
# Raising a warning gives a more user-friendly feedback than the default constraint error
raise Warning(_('The number of remaining leaves is not sufficient for this leave type.\n'
'Please verify also the leaves waiting for validation.'))
return True
# -----------------------------
# OpenChatter and notifications
# -----------------------------
def _needaction_domain_get(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
empids = emp_obj.search(cr, uid, [('parent_id.user_id', '=', uid)], context=context)
dom = ['&', ('state', '=', 'confirm'), ('employee_id', 'in', empids)]
# if this user is a hr.manager, he should do second validations
if self.pool.get('res.users').has_group(cr, uid, 'base.group_hr_manager'):
dom = ['|'] + dom + [('state', '=', 'validate1')]
return dom
def holidays_first_validate_notificate(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
self.message_post(cr, uid, [obj.id],
_("Request approved, waiting second validation."), context=context)
class resource_calendar_leaves(osv.osv):
_inherit = "resource.calendar.leaves"
_description = "Leave Detail"
_columns = {
'holiday_id': fields.many2one("hr.holidays", "Leave Request"),
}
class hr_employee(osv.osv):
_inherit="hr.employee"
def create(self, cr, uid, vals, context=None):
# don't pass the value of remaining leave if it's 0 at the creation time, otherwise it will trigger the inverse
# function _set_remaining_days and the system may not be configured for. Note that we don't have this problem on
# the write because the clients only send the fields that have been modified.
if 'remaining_leaves' in vals and not vals['remaining_leaves']:
del(vals['remaining_leaves'])
return super(hr_employee, self).create(cr, uid, vals, context=context)
def _set_remaining_days(self, cr, uid, empl_id, name, value, arg, context=None):
employee = self.browse(cr, uid, empl_id, context=context)
diff = value - employee.remaining_leaves
type_obj = self.pool.get('hr.holidays.status')
holiday_obj = self.pool.get('hr.holidays')
# Find for holidays status
status_ids = type_obj.search(cr, uid, [('limit', '=', False)], context=context)
if len(status_ids) != 1 :
raise osv.except_osv(_('Warning!'),_("The feature behind the field 'Remaining Legal Leaves' can only be used when there is only one leave type with the option 'Allow to Override Limit' unchecked. (%s Found). Otherwise, the update is ambiguous as we cannot decide on which leave type the update has to be done. \nYou may prefer to use the classic menus 'Leave Requests' and 'Allocation Requests' located in 'Human Resources \ Leaves' to manage the leave days of the employees if the configuration does not allow to use this field.") % (len(status_ids)))
status_id = status_ids and status_ids[0] or False
if not status_id:
return False
if diff > 0:
leave_id = holiday_obj.create(cr, uid, {'name': _('Allocation for %s') % employee.name, 'employee_id': employee.id, 'holiday_status_id': status_id, 'type': 'add', 'holiday_type': 'employee', 'number_of_days_temp': diff}, context=context)
elif diff < 0:
leave_id = holiday_obj.create(cr, uid, {'name': _('Leave Request for %s') % employee.name, 'employee_id': employee.id, 'holiday_status_id': status_id, 'type': 'remove', 'holiday_type': 'employee', 'number_of_days_temp': abs(diff)}, context=context)
else:
return False
holiday_obj.signal_confirm(cr, uid, [leave_id])
holiday_obj.signal_validate(cr, uid, [leave_id])
holiday_obj.signal_second_validate(cr, uid, [leave_id])
return True
def _get_remaining_days(self, cr, uid, ids, name, args, context=None):
cr.execute("""SELECT
sum(h.number_of_days) as days,
h.employee_id
from
hr_holidays h
join hr_holidays_status s on (s.id=h.holiday_status_id)
where
h.state='validate' and
s.limit=False and
h.employee_id in (%s)
group by h.employee_id"""% (','.join(map(str,ids)),) )
res = cr.dictfetchall()
remaining = {}
for r in res:
remaining[r['employee_id']] = r['days']
for employee_id in ids:
if not remaining.get(employee_id):
remaining[employee_id] = 0.0
return remaining
def _get_leave_status(self, cr, uid, ids, name, args, context=None):
holidays_obj = self.pool.get('hr.holidays')
holidays_id = holidays_obj.search(cr, uid,
[('employee_id', 'in', ids), ('date_from','<=',time.strftime('%Y-%m-%d %H:%M:%S')),
('date_to','>=',time.strftime('%Y-%m-%d 23:59:59')),('type','=','remove'),('state','not in',('cancel','refuse'))],
context=context)
result = {}
for id in ids:
result[id] = {
'current_leave_state': False,
'current_leave_id': False,
'leave_date_from':False,
'leave_date_to':False,
}
for holiday in self.pool.get('hr.holidays').browse(cr, uid, holidays_id, context=context):
result[holiday.employee_id.id]['leave_date_from'] = holiday.date_from
result[holiday.employee_id.id]['leave_date_to'] = holiday.date_to
result[holiday.employee_id.id]['current_leave_state'] = holiday.state
result[holiday.employee_id.id]['current_leave_id'] = holiday.holiday_status_id.id
return result
def _leaves_count(self, cr, uid, ids, field_name, arg, context=None):
Holidays = self.pool['hr.holidays']
return {
employee_id: Holidays.search_count(cr,uid, [('employee_id', '=', employee_id)], context=context)
for employee_id in ids
}
_columns = {
'remaining_leaves': fields.function(_get_remaining_days, string='Remaining Legal Leaves', fnct_inv=_set_remaining_days, type="float", help='Total number of legal leaves allocated to this employee, change this value to create allocation/leave request. Total based on all the leave types without overriding limit.'),
'current_leave_state': fields.function(_get_leave_status, multi="leave_status", string="Current Leave Status", type="selection",
selection=[('draft', 'New'), ('confirm', 'Waiting Approval'), ('refuse', 'Refused'),
('validate1', 'Waiting Second Approval'), ('validate', 'Approved'), ('cancel', 'Cancelled')]),
'current_leave_id': fields.function(_get_leave_status, multi="leave_status", string="Current Leave Type",type='many2one', relation='hr.holidays.status'),
'leave_date_from': fields.function(_get_leave_status, multi='leave_status', type='date', string='From Date'),
'leave_date_to': fields.function(_get_leave_status, multi='leave_status', type='date', string='To Date'),
'leaves_count': fields.function(_leaves_count, type='integer', string='Leaves'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
aayushchugh07/qbo-browser | refs/heads/master | btp3/cgi_backup2/viewdata.py | 3 | #!/usr/bin/env python
#print("Location:http://newurl.com/foobar")
import cgi
import QBO
import backend
import sys
from PrintFunctions import *;
getvars = cgi.FieldStorage()
tablename=''
newname=''
try:
tablename=getvars["bagname"].value
except:
pass
x=None
if(tablename):
x=backend.viewData(tablename)
print "Content-type: text/html"
print
printHeader()
printNavbar()
printRecords(x,tablename)
printFooter()
|
mlperf/training_results_v0.6 | refs/heads/master | Google/benchmarks/transformer/implementations/tpu-v3-32-transformer/dataset_preproc/data_generators/lm1b_mnli.py | 7 | # coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data generators for LM1B and MNLI combined datasets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensor2tensor.data_generators import lm1b
from tensor2tensor.data_generators import multi_problem
from tensor2tensor.data_generators import multinli
from tensor2tensor.data_generators import text_problems
from tensor2tensor.utils import registry
@registry.register_problem
class LanguagemodelLm1bMultiNLISubwords(multi_problem.MultiProblem):
"""LM1b and MNLI mixed problem class for multitask learning."""
def __init__(self, was_reversed=False, was_copy=False):
super(LanguagemodelLm1bMultiNLISubwords, self).__init__(
was_reversed, was_copy)
self.task_list.append(lm1b.LanguagemodelLm1b32k())
self.task_list.append(multinli.MultiNLISharedVocab())
@property
def vocab_type(self):
return text_problems.VocabType.SUBWORD
@registry.register_problem
class LanguagemodelLm1bMultiNLI(multi_problem.MultiProblem):
"""LM1b and MNLI mixed problem class for multitask learning."""
def __init__(self, was_reversed=False, was_copy=False):
super(LanguagemodelLm1bMultiNLI, self).__init__(was_reversed, was_copy)
self.task_list.append(lm1b.LanguagemodelLm1bCharacters())
self.task_list.append(multinli.MultiNLICharacters())
@property
def vocab_type(self):
return text_problems.VocabType.CHARACTER
|
maweigert/spimagine | refs/heads/master | spimagine/models/__init__.py | 1 |
# from .keyframe_model import TransformData |
pedrobaeza/website | refs/heads/8.0 | website_sale_vat_required/__init__.py | 66 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Agile Business Group sagl (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import controllers
|
proxysh/Safejumper-for-Mac | refs/heads/master | buildlinux/env32/lib/python2.7/site-packages/twisted/trial/test/novars.py | 162 | # fodder for test_script, which parses files for emacs local variable
# declarations. This one is supposed to have none.
# The class declaration is irrelevant
class Bar(object):
pass
|
carlTLR/gyp | refs/heads/master | pylib/gyp/common_test.py | 2542 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the common.py file."""
import gyp.common
import unittest
import sys
class TestTopologicallySorted(unittest.TestCase):
def test_Valid(self):
"""Test that sorting works on a valid graph with one possible order."""
graph = {
'a': ['b', 'c'],
'b': [],
'c': ['d'],
'd': ['b'],
}
def GetEdge(node):
return tuple(graph[node])
self.assertEqual(
gyp.common.TopologicallySorted(graph.keys(), GetEdge),
['a', 'c', 'd', 'b'])
def test_Cycle(self):
"""Test that an exception is thrown on a cyclic graph."""
graph = {
'a': ['b'],
'b': ['c'],
'c': ['d'],
'd': ['a'],
}
def GetEdge(node):
return tuple(graph[node])
self.assertRaises(
gyp.common.CycleError, gyp.common.TopologicallySorted,
graph.keys(), GetEdge)
class TestGetFlavor(unittest.TestCase):
"""Test that gyp.common.GetFlavor works as intended"""
original_platform = ''
def setUp(self):
self.original_platform = sys.platform
def tearDown(self):
sys.platform = self.original_platform
def assertFlavor(self, expected, argument, param):
sys.platform = argument
self.assertEqual(expected, gyp.common.GetFlavor(param))
def test_platform_default(self):
self.assertFlavor('freebsd', 'freebsd9' , {})
self.assertFlavor('freebsd', 'freebsd10', {})
self.assertFlavor('openbsd', 'openbsd5' , {})
self.assertFlavor('solaris', 'sunos5' , {});
self.assertFlavor('solaris', 'sunos' , {});
self.assertFlavor('linux' , 'linux2' , {});
self.assertFlavor('linux' , 'linux3' , {});
def test_param(self):
self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
if __name__ == '__main__':
unittest.main()
|
brucx/uberpku | refs/heads/master | views/todos.py | 17 | # coding: utf-8
from leancloud import Object
from leancloud import Query
from leancloud import LeanCloudError
from flask import Blueprint
from flask import request
from flask import redirect
from flask import url_for
from flask import render_template
class Todo(Object):
pass
todos_view = Blueprint('todos', __name__)
@todos_view.route('')
def show():
try:
todos = Query(Todo).descending('createdAt').find()
except LeanCloudError, e:
if e.code == 101: # 服务端对应的 Class 还没创建
todos = []
else:
raise e
return render_template('todos.html', todos=todos)
@todos_view.route('', methods=['POST'])
def add():
content = request.form['content']
todo = Todo(content=content)
todo.save()
return redirect(url_for('todos.show'))
|
meredith-digops/ansible | refs/heads/devel | lib/ansible/modules/monitoring/zabbix_host.py | 42 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013-2014, Epic Games, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: zabbix_host
short_description: Zabbix host creates/updates/deletes
description:
- This module allows you to create, modify and delete Zabbix host entries and associated group and template data.
version_added: "2.0"
author:
- "(@cove)"
- "Tony Minfei Ding"
- "Harrison Gu (@harrisongu)"
requirements:
- "python >= 2.6"
- zabbix-api
options:
server_url:
description:
- Url of Zabbix server, with protocol (http or https).
required: true
aliases: [ "url" ]
login_user:
description:
- Zabbix user name, used to authenticate against the server.
required: true
login_password:
description:
- Zabbix user password.
required: true
http_login_user:
description:
- Basic Auth login
required: false
default: None
version_added: "2.1"
http_login_password:
description:
- Basic Auth password
required: false
default: None
version_added: "2.1"
host_name:
description:
- Name of the host in Zabbix.
- host_name is the unique identifier used and cannot be updated using this module.
required: true
visible_name:
description:
- Visible name of the host in Zabbix.
required: false
version_added: '2.3'
host_groups:
description:
- List of host groups the host is part of.
required: false
link_templates:
description:
- List of templates linked to the host.
required: false
default: None
inventory_mode:
description:
- Configure the inventory mode.
choices: ['automatic', 'manual', 'disabled']
required: false
default: None
version_added: '2.1'
status:
description:
- Monitoring status of the host.
required: false
choices: ['enabled', 'disabled']
default: "enabled"
state:
description:
- State of the host.
- On C(present), it will create if host does not exist or update the host if the associated data is different.
- On C(absent) will remove a host if it exists.
required: false
choices: ['present', 'absent']
default: "present"
timeout:
description:
- The timeout of API request (seconds).
default: 10
proxy:
description:
- The name of the Zabbix Proxy to be used
default: None
interfaces:
description:
- List of interfaces to be created for the host (see example below).
- 'Available values are: dns, ip, main, port, type and useip.'
- Please review the interface documentation for more information on the supported properties
- 'https://www.zabbix.com/documentation/2.0/manual/appendix/api/hostinterface/definitions#host_interface'
required: false
default: []
force:
description:
- Overwrite the host configuration, even if already present
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "2.0"
'''
EXAMPLES = '''
- name: Create a new host or update an existing host's info
local_action:
module: zabbix_host
server_url: http://monitor.example.com
login_user: username
login_password: password
host_name: ExampleHost
visible_name: ExampleName
host_groups:
- Example group1
- Example group2
link_templates:
- Example template1
- Example template2
status: enabled
state: present
inventory_mode: automatic
interfaces:
- type: 1
main: 1
useip: 1
ip: 10.xx.xx.xx
dns: ""
port: 10050
- type: 4
main: 1
useip: 1
ip: 10.xx.xx.xx
dns: ""
port: 12345
proxy: a.zabbix.proxy
'''
import logging
import copy
try:
from zabbix_api import ZabbixAPI, ZabbixAPISubClass
# Extend the ZabbixAPI
# Since the zabbix-api python module too old (version 1.0, no higher version so far),
# it does not support the 'hostinterface' api calls,
# so we have to inherit the ZabbixAPI class to add 'hostinterface' support.
class ZabbixAPIExtends(ZabbixAPI):
hostinterface = None
def __init__(self, server, timeout, user, passwd, **kwargs):
ZabbixAPI.__init__(self, server, timeout=timeout, user=user, passwd=passwd)
self.hostinterface = ZabbixAPISubClass(self, dict({"prefix": "hostinterface"}, **kwargs))
HAS_ZABBIX_API = True
except ImportError:
HAS_ZABBIX_API = False
class Host(object):
def __init__(self, module, zbx):
self._module = module
self._zapi = zbx
# exist host
def is_host_exist(self, host_name):
result = self._zapi.host.get({'filter': {'host': host_name}})
return result
# check if host group exists
def check_host_group_exist(self, group_names):
for group_name in group_names:
result = self._zapi.hostgroup.get({'filter': {'name': group_name}})
if not result:
self._module.fail_json(msg="Hostgroup not found: %s" % group_name)
return True
def get_template_ids(self, template_list):
template_ids = []
if template_list is None or len(template_list) == 0:
return template_ids
for template in template_list:
template_list = self._zapi.template.get({'output': 'extend', 'filter': {'host': template}})
if len(template_list) < 1:
self._module.fail_json(msg="Template not found: %s" % template)
else:
template_id = template_list[0]['templateid']
template_ids.append(template_id)
return template_ids
def add_host(self, host_name, group_ids, status, interfaces, proxy_id, visible_name):
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
parameters = {'host': host_name, 'interfaces': interfaces, 'groups': group_ids, 'status': status}
if proxy_id:
parameters['proxy_hostid'] = proxy_id
if visible_name:
parameters['name'] = visible_name
host_list = self._zapi.host.create(parameters)
if len(host_list) >= 1:
return host_list['hostids'][0]
except Exception as e:
self._module.fail_json(msg="Failed to create host %s: %s" % (host_name, e))
def update_host(self, host_name, group_ids, status, host_id, interfaces, exist_interface_list, proxy_id, visible_name):
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
parameters = {'hostid': host_id, 'groups': group_ids, 'status': status}
if proxy_id:
parameters['proxy_hostid'] = proxy_id
if visible_name:
parameters['name'] = visible_name
self._zapi.host.update(parameters)
interface_list_copy = exist_interface_list
if interfaces:
for interface in interfaces:
flag = False
interface_str = interface
for exist_interface in exist_interface_list:
interface_type = interface['type']
exist_interface_type = int(exist_interface['type'])
if interface_type == exist_interface_type:
# update
interface_str['interfaceid'] = exist_interface['interfaceid']
self._zapi.hostinterface.update(interface_str)
flag = True
interface_list_copy.remove(exist_interface)
break
if not flag:
# add
interface_str['hostid'] = host_id
self._zapi.hostinterface.create(interface_str)
# remove
remove_interface_ids = []
for remove_interface in interface_list_copy:
interface_id = remove_interface['interfaceid']
remove_interface_ids.append(interface_id)
if len(remove_interface_ids) > 0:
self._zapi.hostinterface.delete(remove_interface_ids)
except Exception as e:
self._module.fail_json(msg="Failed to update host %s: %s" % (host_name, e))
def delete_host(self, host_id, host_name):
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.host.delete([host_id])
except Exception as e:
self._module.fail_json(msg="Failed to delete host %s: %s" % (host_name, e))
# get host by host name
def get_host_by_host_name(self, host_name):
host_list = self._zapi.host.get({'output': 'extend', 'filter': {'host': [host_name]}})
if len(host_list) < 1:
self._module.fail_json(msg="Host not found: %s" % host_name)
else:
return host_list[0]
# get proxyid by proxy name
def get_proxyid_by_proxy_name(self, proxy_name):
proxy_list = self._zapi.proxy.get({'output': 'extend', 'filter': {'host': [proxy_name]}})
if len(proxy_list) < 1:
self._module.fail_json(msg="Proxy not found: %s" % proxy_name)
else:
return proxy_list[0]['proxyid']
# get group ids by group names
def get_group_ids_by_group_names(self, group_names):
group_ids = []
if self.check_host_group_exist(group_names):
group_list = self._zapi.hostgroup.get({'output': 'extend', 'filter': {'name': group_names}})
for group in group_list:
group_id = group['groupid']
group_ids.append({'groupid': group_id})
return group_ids
# get host templates by host id
def get_host_templates_by_host_id(self, host_id):
template_ids = []
template_list = self._zapi.template.get({'output': 'extend', 'hostids': host_id})
for template in template_list:
template_ids.append(template['templateid'])
return template_ids
# get host groups by host id
def get_host_groups_by_host_id(self, host_id):
exist_host_groups = []
host_groups_list = self._zapi.hostgroup.get({'output': 'extend', 'hostids': host_id})
if len(host_groups_list) >= 1:
for host_groups_name in host_groups_list:
exist_host_groups.append(host_groups_name['name'])
return exist_host_groups
# check the exist_interfaces whether it equals the interfaces or not
def check_interface_properties(self, exist_interface_list, interfaces):
interfaces_port_list = []
if interfaces is not None:
if len(interfaces) >= 1:
for interface in interfaces:
interfaces_port_list.append(int(interface['port']))
exist_interface_ports = []
if len(exist_interface_list) >= 1:
for exist_interface in exist_interface_list:
exist_interface_ports.append(int(exist_interface['port']))
if set(interfaces_port_list) != set(exist_interface_ports):
return True
for exist_interface in exist_interface_list:
exit_interface_port = int(exist_interface['port'])
for interface in interfaces:
interface_port = int(interface['port'])
if interface_port == exit_interface_port:
for key in interface.keys():
if str(exist_interface[key]) != str(interface[key]):
return True
return False
# get the status of host by host
def get_host_status_by_host(self, host):
return host['status']
# check all the properties before link or clear template
def check_all_properties(self, host_id, host_groups, status, interfaces, template_ids,
exist_interfaces, host, proxy_id, visible_name):
# get the existing host's groups
exist_host_groups = self.get_host_groups_by_host_id(host_id)
if set(host_groups) != set(exist_host_groups):
return True
# get the existing status
exist_status = self.get_host_status_by_host(host)
if int(status) != int(exist_status):
return True
# check the exist_interfaces whether it equals the interfaces or not
if self.check_interface_properties(exist_interfaces, interfaces):
return True
# get the existing templates
exist_template_ids = self.get_host_templates_by_host_id(host_id)
if set(list(template_ids)) != set(exist_template_ids):
return True
if host['proxy_hostid'] != proxy_id:
return True
if host['name'] != visible_name:
return True
return False
# link or clear template of the host
def link_or_clear_template(self, host_id, template_id_list):
# get host's exist template ids
exist_template_id_list = self.get_host_templates_by_host_id(host_id)
exist_template_ids = set(exist_template_id_list)
template_ids = set(template_id_list)
template_id_list = list(template_ids)
# get unlink and clear templates
templates_clear = exist_template_ids.difference(template_ids)
templates_clear_list = list(templates_clear)
request_str = {'hostid': host_id, 'templates': template_id_list, 'templates_clear': templates_clear_list}
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.host.update(request_str)
except Exception as e:
self._module.fail_json(msg="Failed to link template to host: %s" % e)
# Update the host inventory_mode
def update_inventory_mode(self, host_id, inventory_mode):
# nothing was set, do nothing
if not inventory_mode:
return
if inventory_mode == "automatic":
inventory_mode = int(1)
elif inventory_mode == "manual":
inventory_mode = int(0)
elif inventory_mode == "disabled":
inventory_mode = int(-1)
# watch for - https://support.zabbix.com/browse/ZBX-6033
request_str = {'hostid': host_id, 'inventory_mode': inventory_mode}
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.host.update(request_str)
except Exception as e:
self._module.fail_json(msg="Failed to set inventory_mode to host: %s" % e)
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(type='str', required=True, aliases=['url']),
login_user=dict(type='str', required=True),
login_password=dict(type='str', required=True, no_log=True),
host_name=dict(type='str', required=True),
http_login_user=dict(type='str', required=False, default=None),
http_login_password=dict(type='str', required=False, default=None, no_log=True),
host_groups=dict(type='list', required=False),
link_templates=dict(type='list', required=False),
status=dict(default="enabled", choices=['enabled', 'disabled']),
state=dict(default="present", choices=['present', 'absent']),
inventory_mode=dict(required=False, choices=['automatic', 'manual', 'disabled']),
timeout=dict(type='int', default=10),
interfaces=dict(type='list', required=False),
force=dict(type='bool', default=True),
proxy=dict(type='str', required=False),
visible_name=dict(type='str', required=False)
),
supports_check_mode=True
)
if not HAS_ZABBIX_API:
module.fail_json(msg="Missing requried zabbix-api module (check docs or install with: pip install zabbix-api)")
server_url = module.params['server_url']
login_user = module.params['login_user']
login_password = module.params['login_password']
http_login_user = module.params['http_login_user']
http_login_password = module.params['http_login_password']
host_name = module.params['host_name']
visible_name = module.params['visible_name']
host_groups = module.params['host_groups']
link_templates = module.params['link_templates']
inventory_mode = module.params['inventory_mode']
status = module.params['status']
state = module.params['state']
timeout = module.params['timeout']
interfaces = module.params['interfaces']
force = module.params['force']
proxy = module.params['proxy']
# convert enabled to 0; disabled to 1
status = 1 if status == "disabled" else 0
zbx = None
# login to zabbix
try:
zbx = ZabbixAPIExtends(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password)
zbx.login(login_user, login_password)
except Exception as e:
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
host = Host(module, zbx)
template_ids = []
if link_templates:
template_ids = host.get_template_ids(link_templates)
group_ids = []
if host_groups:
group_ids = host.get_group_ids_by_group_names(host_groups)
ip = ""
if interfaces:
for interface in interfaces:
if interface['type'] == 1:
ip = interface['ip']
# check if host exist
is_host_exist = host.is_host_exist(host_name)
if is_host_exist:
# Use proxy specified, or set to None when updating host
if proxy:
proxy_id = host.get_proxyid_by_proxy_name(proxy)
else:
proxy_id = None
# get host id by host name
zabbix_host_obj = host.get_host_by_host_name(host_name)
host_id = zabbix_host_obj['hostid']
if state == "absent":
# remove host
host.delete_host(host_id, host_name)
module.exit_json(changed=True, result="Successfully delete host %s" % host_name)
else:
if not group_ids:
module.fail_json(msg="Specify at least one group for updating host '%s'." % host_name)
if not force:
module.fail_json(changed=False, result="Host present, Can't update configuration without force")
# get exist host's interfaces
exist_interfaces = host._zapi.hostinterface.get({'output': 'extend', 'hostids': host_id})
exist_interfaces_copy = copy.deepcopy(exist_interfaces)
# update host
interfaces_len = len(interfaces) if interfaces else 0
if len(exist_interfaces) > interfaces_len:
if host.check_all_properties(host_id, host_groups, status, interfaces, template_ids,
exist_interfaces, zabbix_host_obj, proxy_id, visible_name):
host.link_or_clear_template(host_id, template_ids)
host.update_host(host_name, group_ids, status, host_id,
interfaces, exist_interfaces, proxy_id, visible_name)
module.exit_json(changed=True,
result="Successfully update host %s (%s) and linked with template '%s'"
% (host_name, ip, link_templates))
else:
module.exit_json(changed=False)
else:
if host.check_all_properties(host_id, host_groups, status, interfaces, template_ids,
exist_interfaces_copy, zabbix_host_obj, proxy_id, visible_name):
host.update_host(host_name, group_ids, status, host_id, interfaces, exist_interfaces, proxy_id, visible_name)
host.link_or_clear_template(host_id, template_ids)
host.update_inventory_mode(host_id, inventory_mode)
module.exit_json(changed=True,
result="Successfully update host %s (%s) and linked with template '%s'"
% (host_name, ip, link_templates))
else:
module.exit_json(changed=False)
else:
if state == "absent":
# the host is already deleted.
module.exit_json(changed=False)
# Use proxy specified, or set to 0 when adding new host
if proxy:
proxy_id = host.get_proxyid_by_proxy_name(proxy)
else:
proxy_id = 0
if not group_ids:
module.fail_json(msg="Specify at least one group for creating host '%s'." % host_name)
if not interfaces or (interfaces and len(interfaces) == 0):
module.fail_json(msg="Specify at least one interface for creating host '%s'." % host_name)
# create host
host_id = host.add_host(host_name, group_ids, status, interfaces, proxy_id, visible_name)
host.link_or_clear_template(host_id, template_ids)
host.update_inventory_mode(host_id, inventory_mode)
module.exit_json(changed=True, result="Successfully added host %s (%s) and linked with template '%s'" % (
host_name, ip, link_templates))
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
marckuz/django | refs/heads/master | tests/logging_tests/logconfig.py | 609 | import logging
from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
class MyHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self)
self.config = settings.LOGGING
class MyEmailBackend(BaseEmailBackend):
def send_messages(self, email_messages):
pass
|
omererdem/honeything | refs/heads/master | src/cwmp/tr/x_gmoca_1_0.py | 1 | #!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# AUTO-GENERATED BY parse-schema.py
#
# DO NOT EDIT!!
#
#pylint: disable-msg=C6202
#pylint: disable-msg=C6409
#pylint: disable-msg=C6310
# These should not actually be necessary (bugs in gpylint?):
#pylint: disable-msg=E1101
#pylint: disable-msg=W0231
#
"""Auto-generated from spec: urn:google-com:x-gmoca-1-0."""
import core
class X_GOOGLE_COM_GMOCA_v1_0(core.Exporter):
"""Represents X_GOOGLE_COM_GMOCA_v1_0."""
def __init__(self, **defaults):
core.Exporter.__init__(self, defaults=defaults)
self.Export(params=['DebugOutput'])
if __name__ == '__main__':
print core.DumpSchema(X_GOOGLE_COM_GMOCA_v1_0)
|
beckastar/django | refs/heads/master | django/core/files/uploadedfile.py | 14 | """
Classes representing uploaded files.
"""
import errno
import os
from io import BytesIO
from django.conf import settings
from django.core.files.base import File
from django.core.files import temp as tempfile
from django.utils.encoding import force_str
__all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile',
'SimpleUploadedFile')
class UploadedFile(File):
"""
A abstract uploaded file (``TemporaryUploadedFile`` and
``InMemoryUploadedFile`` are the built-in concrete subclasses).
An ``UploadedFile`` object behaves somewhat like a file object and
represents some file data that the user submitted with a form.
"""
DEFAULT_CHUNK_SIZE = 64 * 2 ** 10
def __init__(self, file=None, name=None, content_type=None, size=None, charset=None, content_type_extra=None):
super(UploadedFile, self).__init__(file, name)
self.size = size
self.content_type = content_type
self.charset = charset
self.content_type_extra = content_type_extra
def __repr__(self):
return force_str("<%s: %s (%s)>" % (
self.__class__.__name__, self.name, self.content_type))
def _get_name(self):
return self._name
def _set_name(self, name):
# Sanitize the file name so that it can't be dangerous.
if name is not None:
# Just use the basename of the file -- anything else is dangerous.
name = os.path.basename(name)
# File names longer than 255 characters can cause problems on older OSes.
if len(name) > 255:
name, ext = os.path.splitext(name)
ext = ext[:255]
name = name[:255 - len(ext)] + ext
self._name = name
name = property(_get_name, _set_name)
class TemporaryUploadedFile(UploadedFile):
"""
A file uploaded to a temporary location (i.e. stream-to-disk).
"""
def __init__(self, name, content_type, size, charset, content_type_extra=None):
if settings.FILE_UPLOAD_TEMP_DIR:
file = tempfile.NamedTemporaryFile(suffix='.upload',
dir=settings.FILE_UPLOAD_TEMP_DIR)
else:
file = tempfile.NamedTemporaryFile(suffix='.upload')
super(TemporaryUploadedFile, self).__init__(file, name, content_type, size, charset, content_type_extra)
def temporary_file_path(self):
"""
Returns the full path of this file.
"""
return self.file.name
def close(self):
try:
return self.file.close()
except OSError as e:
if e.errno != errno.ENOENT:
# Means the file was moved or deleted before the tempfile
# could unlink it. Still sets self.file.close_called and
# calls self.file.file.close() before the exception
raise
class InMemoryUploadedFile(UploadedFile):
"""
A file uploaded into memory (i.e. stream-to-memory).
"""
def __init__(self, file, field_name, name, content_type, size, charset, content_type_extra=None):
super(InMemoryUploadedFile, self).__init__(file, name, content_type, size, charset, content_type_extra)
self.field_name = field_name
def open(self, mode=None):
self.file.seek(0)
def close(self):
pass
def chunks(self, chunk_size=None):
self.file.seek(0)
yield self.read()
def multiple_chunks(self, chunk_size=None):
# Since it's in memory, we'll never have multiple chunks.
return False
class SimpleUploadedFile(InMemoryUploadedFile):
"""
A simple representation of a file, which just has content, size, and a name.
"""
def __init__(self, name, content, content_type='text/plain'):
content = content or b''
super(SimpleUploadedFile, self).__init__(BytesIO(content), None, name,
content_type, len(content), None, None)
@classmethod
def from_dict(cls, file_dict):
"""
Creates a SimpleUploadedFile object from
a dictionary object with the following keys:
- filename
- content-type
- content
"""
return cls(file_dict['filename'],
file_dict['content'],
file_dict.get('content-type', 'text/plain'))
|
radekosmulski/courses | refs/heads/master | deeplearning2/bcolz_array_iterator.py | 13 | import numpy as np
import bcolz
import threading
class BcolzArrayIterator(object):
"""
Returns an iterator object into Bcolz carray files
Original version by Thiago Ramon Gonçalves Montoya
Docs (and discovery) by @MPJansen
Refactoring, performance improvements, fixes by Jeremy Howard j@fast.ai
:Example:
X = bcolz.open('file_path/feature_file.bc', mode='r')
y = bcolz.open('file_path/label_file.bc', mode='r')
trn_batches = BcolzArrayIterator(X, y, batch_size=64, shuffle=True)
model.fit_generator(generator=trn_batches, samples_per_epoch=trn_batches.N, nb_epoch=1)
:param X: Input features
:param y: (optional) Input labels
:param w: (optional) Input feature weights
:param batch_size: (optional) Batch size, defaults to 32
:param shuffle: (optional) Shuffle batches, defaults to false
:param seed: (optional) Provide a seed to shuffle, defaults to a random seed
:rtype: BcolzArrayIterator
>>> A = np.random.random((32*10 + 17, 10, 10))
>>> c = bcolz.carray(A, rootdir='test.bc', mode='w', expectedlen=A.shape[0], chunklen=16)
>>> c.flush()
>>> Bc = bcolz.open('test.bc')
>>> bc_it = BcolzArrayIterator(Bc, shuffle=True)
>>> C_list = [next(bc_it) for i in range(11)]
>>> C = np.concatenate(C_list)
>>> np.allclose(sorted(A.flatten()), sorted(C.flatten()))
True
"""
def __init__(self, X, y=None, w=None, batch_size=32, shuffle=False, seed=None):
if y is not None and len(X) != len(y):
raise ValueError('X (features) and y (labels) should have the same length'
'Found: X.shape = %s, y.shape = %s' % (X.shape, y.shape))
if w is not None and len(X) != len(w):
raise ValueError('X (features) and w (weights) should have the same length'
'Found: X.shape = %s, w.shape = %s' % (X.shape, w.shape))
if batch_size % X.chunklen != 0:
raise ValueError('batch_size needs to be a multiple of X.chunklen')
self.chunks_per_batch = batch_size // X.chunklen
self.X = X
self.y = y if y is not None else None
self.w = w if w is not None else None
self.N = X.shape[0]
self.batch_size = batch_size
self.batch_index = 0
self.total_batches_seen = 0
self.lock = threading.Lock()
self.shuffle = shuffle
self.seed = seed
def reset(self): self.batch_index = 0
def next(self):
with self.lock:
if self.batch_index == 0:
if self.seed is not None:
np.random.seed(self.seed + self.total_batches_seen)
self.index_array = (np.random.permutation(self.X.nchunks + 1) if self.shuffle
else np.arange(self.X.nchunks + 1))
#batches_x = np.zeros((self.batch_size,)+self.X.shape[1:])
batches_x, batches_y, batches_w = [],[],[]
for i in range(self.chunks_per_batch):
current_index = self.index_array[self.batch_index]
if current_index == self.X.nchunks:
batches_x.append(self.X.leftover_array[:self.X.leftover_elements])
current_batch_size = self.X.leftover_elements
else:
batches_x.append(self.X.chunks[current_index][:])
current_batch_size = self.X.chunklen
self.batch_index += 1
self.total_batches_seen += 1
idx = current_index * self.X.chunklen
if not self.y is None: batches_y.append(self.y[idx: idx + current_batch_size])
if not self.w is None: batches_w.append(self.w[idx: idx + current_batch_size])
if self.batch_index >= len(self.index_array):
self.batch_index = 0
break
batch_x = np.concatenate(batches_x)
if self.y is None: return batch_x
batch_y = np.concatenate(batches_y)
if self.w is None: return batch_x, batch_y
batch_w = np.concatenate(batches_w)
return batch_x, batch_y, batch_w
def __iter__(self): return self
def __next__(self, *args, **kwargs): return self.next(*args, **kwargs)
|
Kazade/NeHe-Website | refs/heads/master | google_appengine/lib/django-1.2/tests/regressiontests/views/urls.py | 38 | # coding: utf-8
from os import path
from django.conf.urls.defaults import *
from models import *
import views
base_dir = path.dirname(path.abspath(__file__))
media_dir = path.join(base_dir, 'media')
locale_dir = path.join(base_dir, 'locale')
js_info_dict = {
'domain': 'djangojs',
'packages': ('regressiontests.views',),
}
js_info_dict_english_translation = {
'domain': 'djangojs',
'packages': ('regressiontests.views.app0',),
}
js_info_dict_multi_packages1 = {
'domain': 'djangojs',
'packages': ('regressiontests.views.app1', 'regressiontests.views.app2'),
}
js_info_dict_multi_packages2 = {
'domain': 'djangojs',
'packages': ('regressiontests.views.app3', 'regressiontests.views.app4'),
}
date_based_info_dict = {
'queryset': Article.objects.all(),
'date_field': 'date_created',
'month_format': '%m',
}
numeric_days_info_dict = dict(date_based_info_dict, day_format='%d')
date_based_datefield_info_dict = dict(date_based_info_dict, queryset=DateArticle.objects.all())
urlpatterns = patterns('',
(r'^$', views.index_page),
# Default views
(r'^shortcut/(\d+)/(.*)/$', 'django.views.defaults.shortcut'),
(r'^non_existing_url/', 'django.views.defaults.page_not_found'),
(r'^server_error/', 'django.views.defaults.server_error'),
# i18n views
(r'^i18n/', include('django.conf.urls.i18n')),
(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', js_info_dict),
(r'^jsi18n_english_translation/$', 'django.views.i18n.javascript_catalog', js_info_dict_english_translation),
(r'^jsi18n_multi_packages1/$', 'django.views.i18n.javascript_catalog', js_info_dict_multi_packages1),
(r'^jsi18n_multi_packages2/$', 'django.views.i18n.javascript_catalog', js_info_dict_multi_packages2),
# Static views
(r'^site_media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': media_dir}),
# Special URLs for particular regression cases.
url(u'^中文/$', 'regressiontests.views.views.redirect'),
url(u'^中文/target/$', 'regressiontests.views.views.index_page'),
)
# Date-based generic views.
urlpatterns += patterns('django.views.generic.date_based',
(r'^date_based/object_detail/(?P<year>\d{4})/(?P<month>\d{1,2})/(?P<day>\d{1,2})/(?P<slug>[-\w]+)/$',
'object_detail',
dict(slug_field='slug', **date_based_info_dict)),
(r'^date_based/object_detail/(?P<year>\d{4})/(?P<month>\d{1,2})/(?P<day>\d{1,2})/(?P<slug>[-\w]+)/allow_future/$',
'object_detail',
dict(allow_future=True, slug_field='slug', **date_based_info_dict)),
(r'^date_based/archive_day/(?P<year>\d{4})/(?P<month>\d{1,2})/(?P<day>\d{1,2})/$',
'archive_day',
numeric_days_info_dict),
(r'^date_based/archive_month/(?P<year>\d{4})/(?P<month>\d{1,2})/$',
'archive_month',
date_based_info_dict),
(r'^date_based/datefield/archive_month/(?P<year>\d{4})/(?P<month>\d{1,2})/$',
'archive_month',
date_based_datefield_info_dict),
)
# crud generic views.
urlpatterns += patterns('django.views.generic.create_update',
(r'^create_update/member/create/article/$', 'create_object',
dict(login_required=True, model=Article)),
(r'^create_update/create/article/$', 'create_object',
dict(post_save_redirect='/views/create_update/view/article/%(slug)s/',
model=Article)),
(r'^create_update/update/article/(?P<slug>[-\w]+)/$', 'update_object',
dict(post_save_redirect='/views/create_update/view/article/%(slug)s/',
slug_field='slug', model=Article)),
(r'^create_update/create_custom/article/$', views.custom_create),
(r'^create_update/delete/article/(?P<slug>[-\w]+)/$', 'delete_object',
dict(post_delete_redirect='/views/create_update/', slug_field='slug',
model=Article)),
# No post_save_redirect and no get_absolute_url on model.
(r'^create_update/no_redirect/create/article/$', 'create_object',
dict(model=Article)),
(r'^create_update/no_redirect/update/article/(?P<slug>[-\w]+)/$',
'update_object', dict(slug_field='slug', model=Article)),
# get_absolute_url on model, but no passed post_save_redirect.
(r'^create_update/no_url/create/article/$', 'create_object',
dict(model=UrlArticle)),
(r'^create_update/no_url/update/article/(?P<slug>[-\w]+)/$',
'update_object', dict(slug_field='slug', model=UrlArticle)),
)
# a view that raises an exception for the debug view
urlpatterns += patterns('',
(r'^raises/$', views.raises),
(r'^raises404/$', views.raises404),
)
# rediriects, both temporary and permanent, with non-ASCII targets
urlpatterns += patterns('django.views.generic.simple',
('^nonascii_redirect/$', 'redirect_to',
{'url': u'/views/中文/target/', 'permanent': False}),
('^permanent_nonascii_redirect/$', 'redirect_to',
{'url': u'/views/中文/target/', 'permanent': True}),
)
urlpatterns += patterns('regressiontests.views.views',
url(r'view_exception/(?P<n>\d+)/$', 'view_exception', name='view_exception'),
url(r'template_exception/(?P<n>\d+)/$', 'template_exception', name='template_exception'),
url(r'^raises_template_does_not_exist/$', 'raises_template_does_not_exist', name='raises_template_does_not_exist'),
)
|
mne-tools/mne-tools.github.io | refs/heads/main | 0.20/_downloads/4bc6b51e34f6266c5a9cb987ba687b13/plot_10_evoked_overview.py | 1 | """
.. _tut-evoked-class:
The Evoked data structure: evoked/averaged data
===============================================
This tutorial covers the basics of creating and working with :term:`evoked`
data. It introduces the :class:`~mne.Evoked` data structure in detail,
including how to load, query, subselect, export, and plot data from an
:class:`~mne.Evoked` object. For info on creating an :class:`~mne.Evoked`
object from (possibly simulated) data in a :class:`NumPy array
<numpy.ndarray>`, see :ref:`tut_creating_data_structures`.
.. contents:: Page contents
:local:
:depth: 2
As usual we'll start by importing the modules we need:
"""
import os
import mne
###############################################################################
# Creating ``Evoked`` objects from ``Epochs``
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# :class:`~mne.Evoked` objects typically store an EEG or MEG signal that has
# been *averaged* over multiple :term:`epochs`, which is a common technique for
# estimating stimulus-evoked activity. The data in an :class:`~mne.Evoked`
# object are stored in an :class:`array <numpy.ndarray>` of shape
# ``(n_channels, n_times)`` (in contrast to an :class:`~mne.Epochs` object,
# which stores data of shape ``(n_epochs, n_channels, n_times)``). Thus to
# create an :class:`~mne.Evoked` object, we'll start by epoching some raw data,
# and then averaging together all the epochs from one condition:
sample_data_folder = mne.datasets.sample.data_path()
sample_data_raw_file = os.path.join(sample_data_folder, 'MEG', 'sample',
'sample_audvis_raw.fif')
raw = mne.io.read_raw_fif(sample_data_raw_file, verbose=False)
events = mne.find_events(raw, stim_channel='STI 014')
# we'll skip the "face" and "buttonpress" conditions, to save memory:
event_dict = {'auditory/left': 1, 'auditory/right': 2, 'visual/left': 3,
'visual/right': 4}
epochs = mne.Epochs(raw, events, tmin=-0.3, tmax=0.7, event_id=event_dict,
preload=True)
evoked = epochs['auditory/left'].average()
del raw # reduce memory usage
###############################################################################
# Basic visualization of ``Evoked`` objects
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# We can visualize the average evoked response for left-auditory stimuli using
# the :meth:`~mne.Evoked.plot` method, which yields a butterfly plot of each
# channel type:
evoked.plot()
###############################################################################
# Like the ``plot()`` methods for :meth:`Raw <mne.io.Raw.plot>` and
# :meth:`Epochs <mne.Epochs.plot>` objects,
# :meth:`evoked.plot() <mne.Evoked.plot>` has many parameters for customizing
# the plot output, such as color-coding channel traces by scalp location, or
# plotting the :term:`global field power <GFP>` alongside the channel traces.
# See :ref:`tut-visualize-evoked` for more information about visualizing
# :class:`~mne.Evoked` objects.
#
#
# Subselecting ``Evoked`` data
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# .. sidebar:: Evokeds are not memory-mapped
#
# :class:`~mne.Evoked` objects use a :attr:`~mne.Evoked.data` *attribute*
# rather than a :meth:`~mne.Epochs.get_data` *method*; this reflects the fact
# that the data in :class:`~mne.Evoked` objects are always loaded into
# memory, never `memory-mapped`_ from their location on disk (because they
# are typically *much* smaller than :class:`~mne.io.Raw` or
# :class:`~mne.Epochs` objects).
#
#
# Unlike :class:`~mne.io.Raw` and :class:`~mne.Epochs` objects,
# :class:`~mne.Evoked` objects do not support selection by square-bracket
# indexing. Instead, data can be subselected by indexing the
# :attr:`~mne.Evoked.data` attribute:
print(evoked.data[:2, :3]) # first 2 channels, first 3 timepoints
###############################################################################
# To select based on time in seconds, the :meth:`~mne.Evoked.time_as_index`
# method can be useful, although beware that depending on the sampling
# frequency, the number of samples in a span of given duration may not always
# be the same (see the :ref:`time-as-index` section of the
# :ref:`tutorial about Raw data <tut-raw-class>` for details).
#
#
# Selecting, dropping, and reordering channels
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# By default, when creating :class:`~mne.Evoked` data from an
# :class:`~mne.Epochs` object, only the "data" channels will be retained:
# ``eog``, ``ecg``, ``stim``, and ``misc`` channel types will be dropped. You
# can control which channel types are retained via the ``picks`` parameter of
# :meth:`epochs.average() <mne.Epochs.average>`, by passing ``'all'`` to
# retain all channels, or by passing a list of integers, channel names, or
# channel types. See the documentation of :meth:`~mne.Epochs.average` for
# details.
#
# If you've already created the :class:`~mne.Evoked` object, you can use the
# :meth:`~mne.Evoked.pick`, :meth:`~mne.Evoked.pick_channels`,
# :meth:`~mne.Evoked.pick_types`, and :meth:`~mne.Evoked.drop_channels` methods
# to modify which channels are included in an :class:`~mne.Evoked` object.
# You can also use :meth:`~mne.Evoked.reorder_channels` for this purpose; any
# channel names not provided to :meth:`~mne.Evoked.reorder_channels` will be
# dropped. Note that *channel* selection methods modify the object in-place, so
# in interactive/exploratory sessions you may want to create a
# :meth:`~mne.Evoked.copy` first.
evoked_eeg = evoked.copy().pick_types(meg=False, eeg=True)
print(evoked_eeg.ch_names)
new_order = ['EEG 002', 'MEG 2521', 'EEG 003']
evoked_subset = evoked.copy().reorder_channels(new_order)
print(evoked_subset.ch_names)
###############################################################################
# Similarities among the core data structures
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# :class:`~mne.Evoked` objects have many similarities with :class:`~mne.io.Raw`
# and :class:`~mne.Epochs` objects, including:
#
# - They can be loaded from and saved to disk in ``.fif`` format, and their
# data can be exported to a :class:`NumPy array <numpy.ndarray>` (but through
# the :attr:`~mne.Evoked.data` attribute, not through a ``get_data()``
# method). :class:`Pandas DataFrame <pandas.DataFrame>` export is also
# available through the :meth:`~mne.Evoked.to_data_frame` method.
#
# - You can change the name or type of a channel using
# :meth:`evoked.rename_channels() <mne.Evoked.rename_channels>` or
# :meth:`evoked.set_channel_types() <mne.Evoked.set_channel_types>`.
# Both methods take :class:`dictionaries <dict>` where the keys are existing
# channel names, and the values are the new name (or type) for that channel.
# Existing channels that are not in the dictionary will be unchanged.
#
# - :term:`SSP projector <projector>` manipulation is possible through
# :meth:`~mne.Evoked.add_proj`, :meth:`~mne.Evoked.del_proj`, and
# :meth:`~mne.Evoked.plot_projs_topomap` methods, and the
# :attr:`~mne.Evoked.proj` attribute. See :ref:`tut-artifact-ssp` for more
# information on SSP.
#
# - Like :class:`~mne.io.Raw` and :class:`~mne.Epochs` objects,
# :class:`~mne.Evoked` objects have :meth:`~mne.Evoked.copy`,
# :meth:`~mne.Evoked.crop`, :meth:`~mne.Evoked.time_as_index`,
# :meth:`~mne.Evoked.filter`, and :meth:`~mne.Evoked.resample` methods.
#
# - Like :class:`~mne.io.Raw` and :class:`~mne.Epochs` objects,
# :class:`~mne.Evoked` objects have ``evoked.times``,
# :attr:`evoked.ch_names <mne.Evoked.ch_names>`, and :class:`info <mne.Info>`
# attributes.
#
#
# .. _tut-section-load-evk:
#
# Loading and saving ``Evoked`` data
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# Single :class:`~mne.Evoked` objects can be saved to disk with the
# :meth:`evoked.save() <mne.Evoked.save>` method. One difference between
# :class:`~mne.Evoked` objects and the other data structures is that multiple
# :class:`~mne.Evoked` objects can be saved into a single ``.fif`` file, using
# :func:`mne.write_evokeds`. The :ref:`example data <sample-dataset>`
# includes just such a ``.fif`` file: the data have already been epoched and
# averaged, and the file contains separate :class:`~mne.Evoked` objects for
# each experimental condition:
sample_data_evk_file = os.path.join(sample_data_folder, 'MEG', 'sample',
'sample_audvis-ave.fif')
evokeds_list = mne.read_evokeds(sample_data_evk_file, verbose=False)
print(evokeds_list)
print(type(evokeds_list))
###############################################################################
# Notice that :func:`mne.read_evokeds` returned a :class:`list` of
# :class:`~mne.Evoked` objects, and each one has an ``evoked.comment``
# attribute describing the experimental condition that was averaged to
# generate the estimate:
for evok in evokeds_list:
print(evok.comment)
###############################################################################
# If you want to load only some of the conditions present in a ``.fif`` file,
# :func:`~mne.read_evokeds` has a ``condition`` parameter, which takes either a
# string (matched against the comment attribute of the evoked objects on disk),
# or an integer selecting the :class:`~mne.Evoked` object based on the order
# it's stored in the file. Passing lists of integers or strings is also
# possible. If only one object is selected, the :class:`~mne.Evoked` object
# will be returned directly (rather than a length-one list containing it):
right_vis = mne.read_evokeds(sample_data_evk_file, condition='Right visual')
print(right_vis)
print(type(right_vis))
###############################################################################
# Above, when we created an :class:`~mne.Evoked` object by averaging epochs,
# baseline correction was applied by default when we extracted epochs from the
# class:`~mne.io.Raw` object (the default baseline period is ``(None, 0)``,
# which assured zero mean for times before the stimulus event). In contrast, if
# we plot the first :class:`~mne.Evoked` object in the list that was loaded
# from disk, we'll see that the data have not been baseline-corrected:
evokeds_list[0].plot(picks='eeg')
###############################################################################
# This can be remedied by either passing a ``baseline`` parameter to
# :func:`mne.read_evokeds`, or by applying baseline correction after loading,
# as shown here:
evokeds_list[0].apply_baseline((None, 0))
evokeds_list[0].plot(picks='eeg')
###############################################################################
# Notice that :meth:`~mne.Evoked.apply_baseline` operated in-place. Similarly,
# :class:`~mne.Evoked` objects may have been saved to disk with or without
# :term:`projectors <projector>` applied; you can pass ``proj=True`` to the
# :func:`~mne.read_evokeds` function, or use the :meth:`~mne.Evoked.apply_proj`
# method after loading.
#
#
# Combining ``Evoked`` objects
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# One way to pool data across multiple conditions when estimating evoked
# responses is to do so *prior to averaging* (recall that MNE-Python can select
# based on partial matching of ``/``-separated epoch labels; see
# :ref:`tut-section-subselect-epochs` for more info):
left_right_aud = epochs['auditory'].average()
print(left_right_aud)
###############################################################################
# This approach will weight each epoch equally and create a single
# :class:`~mne.Evoked` object. Notice that the printed representation includes
# ``(average, N=145)``, indicating that the :class:`~mne.Evoked` object was
# created by averaging across 145 epochs. In this case, the event types were
# fairly close in number:
left_aud = epochs['auditory/left'].average()
right_aud = epochs['auditory/right'].average()
print([evok.nave for evok in (left_aud, right_aud)])
###############################################################################
# However, this may not always be the case; if for statistical reasons it is
# important to average *the same number* of epochs from different conditions,
# you can use :meth:`~mne.Epochs.equalize_event_counts` prior to averaging.
#
# Another approach to pooling across conditions is to create separate
# :class:`~mne.Evoked` objects for each condition, and combine them afterward.
# This can be accomplished by the function :func:`mne.combine_evoked`, which
# computes a weighted sum of the :class:`~mne.Evoked` objects given to it. The
# weights can be manually specified as a list or array of float values, or can
# be specified using the keyword ``'equal'`` (weight each :class:`~mne.Evoked`
# object by :math:`\frac{1}{N}`, where :math:`N` is the number of
# :class:`~mne.Evoked` objects given) or the keyword ``'nave'`` (weight each
# :class:`~mne.Evoked` object by the number of epochs that were averaged
# together to create it):
left_right_aud = mne.combine_evoked([left_aud, right_aud], weights='nave')
assert left_right_aud.nave == left_aud.nave + right_aud.nave
###############################################################################
# Keeping track of ``nave`` is important for inverse imaging, because it is
# used to scale the noise covariance estimate (which in turn affects the
# magnitude of estimated source activity). See :ref:`minimum_norm_estimates`
# for more information (especially the :ref:`whitening_and_scaling` section).
# For this reason, combining :class:`~mne.Evoked` objects with either
# ``weights='equal'`` or by providing custom numeric weights **should usually
# not be done** if you intend to perform inverse imaging on the resulting
# :class:`~mne.Evoked` object.
#
#
# Other uses of ``Evoked`` objects
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# Although the most common use of :class:`~mne.Evoked` objects is to store
# *averages* of epoched data, there are a couple other uses worth noting here.
# First, the method :meth:`epochs.standard_error() <mne.Epochs.standard_error>`
# will create an :class:`~mne.Evoked` object (just like
# :meth:`epochs.average() <mne.Epochs.average>` does), but the data in the
# :class:`~mne.Evoked` object will be the standard error across epochs instead
# of the average. To indicate this difference, :class:`~mne.Evoked` objects
# have a :attr:`~mne.Evoked.kind` attribute that takes values ``'average'`` or
# ``'standard error'`` as appropriate.
#
# Another use of :class:`~mne.Evoked` objects is to represent *a single trial
# or epoch* of data, usually when looping through epochs. This can be easily
# accomplished with the :meth:`epochs.iter_evoked() <mne.Epochs.iter_evoked>`
# method, and can be useful for applications where you want to do something
# that is only possible for :class:`~mne.Evoked` objects. For example, here
# we use the :meth:`~mne.Evoked.get_peak` method (which isn't available for
# :class:`~mne.Epochs` objects) to get the peak response in each trial:
for ix, trial in enumerate(epochs[:3].iter_evoked()):
channel, latency, value = trial.get_peak(ch_type='eeg',
return_amplitude=True)
latency = int(round(latency * 1e3)) # convert to milliseconds
value = int(round(value * 1e6)) # convert to µV
print('Trial {}: peak of {} µV at {} ms in channel {}'
.format(ix, value, latency, channel))
###############################################################################
# .. REFERENCES
#
# .. _`memory-mapped`: https://en.wikipedia.org/wiki/Memory-mapped_file
|
gregmalcolm/python_koans | refs/heads/master | scent.py | 28 | from sniffer.api import *
import os
watch_paths = ['.', 'koans/']
@file_validator
def py_files(filename):
return filename.endswith('.py') and not os.path.basename(filename).startswith('.')
@runnable
def execute_koans(*args):
os.system('python3 -B contemplate_koans.py')
|
salguarnieri/intellij-community | refs/heads/master | python/testData/intentions/replaceOctalNumericLiteral_after.py | 83 | a = 0o1223 |
gurneyalex/odoo | refs/heads/13.0-improve_sale_coupon_perf | addons/pos_mercury/models/__init__.py | 43 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import pos_mercury
from . import pos_mercury_transaction
|
nkrinner/nova | refs/heads/master | nova/tests/policy_fixture.py | 13 | # Copyright 2012 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import os
import fixtures
from oslo.config import cfg
from nova.openstack.common import policy as common_policy
import nova.policy
from nova.tests import fake_policy
CONF = cfg.CONF
class PolicyFixture(fixtures.Fixture):
def setUp(self):
super(PolicyFixture, self).setUp()
self.policy_dir = self.useFixture(fixtures.TempDir())
self.policy_file_name = os.path.join(self.policy_dir.path,
'policy.json')
with open(self.policy_file_name, 'w') as policy_file:
policy_file.write(fake_policy.policy_data)
CONF.set_override('policy_file', self.policy_file_name)
nova.policy.reset()
nova.policy.init()
self.addCleanup(nova.policy.reset)
def set_rules(self, rules):
common_policy.set_rules(common_policy.Rules(
dict((k, common_policy.parse_rule(v))
for k, v in rules.items())))
class RoleBasedPolicyFixture(fixtures.Fixture):
def __init__(self, role="admin", *args, **kwargs):
super(RoleBasedPolicyFixture, self).__init__(*args, **kwargs)
self.role = role
def setUp(self):
"""Copy live policy.json file and convert all actions to
allow users of the specified role only
"""
super(RoleBasedPolicyFixture, self).setUp()
policy = json.load(open(CONF.policy_file))
# Convert all actions to require specified role
for action, rule in policy.iteritems():
policy[action] = 'role:%s' % self.role
self.policy_dir = self.useFixture(fixtures.TempDir())
self.policy_file_name = os.path.join(self.policy_dir.path,
'policy.json')
with open(self.policy_file_name, 'w') as policy_file:
json.dump(policy, policy_file)
CONF.set_override('policy_file', self.policy_file_name)
nova.policy.reset()
nova.policy.init()
self.addCleanup(nova.policy.reset)
|
nburn42/tensorflow | refs/heads/master | tensorflow/contrib/autograph/pyct/ast_util_test.py | 9 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ast_util module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ast
from tensorflow.contrib.autograph.pyct import ast_util
from tensorflow.contrib.autograph.pyct import compiler
from tensorflow.contrib.autograph.pyct import parser
from tensorflow.contrib.autograph.pyct import qual_names
from tensorflow.python.platform import test
class AstUtilTest(test.TestCase):
def test_rename_symbols(self):
node = ast.Tuple([
ast.Name('a', ast.Load()),
ast.Name('b', ast.Load()),
ast.Attribute(ast.Name('b', None), 'c', ast.Store()),
ast.Attribute(
ast.Attribute(ast.Name('b', None), 'c', ast.Load()), 'd', None)
], None)
node = qual_names.resolve(node)
node = ast_util.rename_symbols(
node, {
qual_names.QN('a'):
qual_names.QN('renamed_a'),
qual_names.QN(qual_names.QN('b'), attr='c'):
qual_names.QN('renamed_b_c'),
})
self.assertEqual(node.elts[0].id, 'renamed_a')
self.assertTrue(isinstance(node.elts[0].ctx, ast.Load))
self.assertEqual(node.elts[1].id, 'b')
self.assertEqual(node.elts[2].id, 'renamed_b_c')
self.assertTrue(isinstance(node.elts[2].ctx, ast.Store))
self.assertEqual(node.elts[3].value.id, 'renamed_b_c')
self.assertTrue(isinstance(node.elts[3].value.ctx, ast.Load))
def test_copy_clean(self):
ret = ast.Return(
ast.BinOp(
op=ast.Add(),
left=ast.Name(id='a', ctx=ast.Load()),
right=ast.Num(1)))
setattr(ret, '__foo', 'bar')
node = ast.FunctionDef(
name='f',
args=ast.arguments(
args=[ast.Name(id='a', ctx=ast.Param())],
vararg=None,
kwarg=None,
defaults=[]),
body=[ret],
decorator_list=[],
returns=None)
new_node = ast_util.copy_clean(node)
self.assertFalse(node is new_node)
self.assertFalse(ret is new_node.body[0])
self.assertFalse(hasattr(new_node.body[0], '__foo'))
def test_keywords_to_dict(self):
keywords = parser.parse_expression('f(a=b, c=1, d=\'e\')').keywords
d = ast_util.keywords_to_dict(keywords)
# Make sure we generate a usable dict node by attaching it to a variable and
# compiling everything.
output = parser.parse_str('b = 3')
output.body += (ast.Assign([ast.Name(id='d', ctx=ast.Store())], d),)
result, _ = compiler.ast_to_object(output)
self.assertDictEqual(result.d, {'a': 3, 'c': 1, 'd': 'e'})
def assertMatch(self, target_str, pattern_str):
node = parser.parse_expression(target_str)
pattern = parser.parse_expression(pattern_str)
self.assertTrue(ast_util.matches(node, pattern))
def assertNoMatch(self, target_str, pattern_str):
node = parser.parse_expression(target_str)
pattern = parser.parse_expression(pattern_str)
self.assertFalse(ast_util.matches(node, pattern))
def test_matches_symbols(self):
self.assertMatch('foo', '_')
self.assertNoMatch('foo()', '_')
self.assertMatch('foo + bar', 'foo + _')
self.assertNoMatch('bar + bar', 'foo + _')
self.assertNoMatch('foo - bar', 'foo + _')
def test_matches_function_args(self):
self.assertMatch('super(Foo, self).__init__(arg1, arg2)',
'super(_).__init__(_)')
self.assertMatch('super().__init__()', 'super(_).__init__(_)')
self.assertNoMatch('super(Foo, self).bar(arg1, arg2)',
'super(_).__init__(_)')
self.assertMatch('super(Foo, self).__init__()', 'super(Foo, _).__init__(_)')
self.assertNoMatch('super(Foo, self).__init__()',
'super(Bar, _).__init__(_)')
if __name__ == '__main__':
test.main()
|
Evfro/polara | refs/heads/master | polara/evaluation/pipelines.py | 1 | from __future__ import print_function
from operator import mul as mul_op
from functools import reduce
from random import choice
import pandas as pd
from collections import abc
def is_list_like(obj, allow_sets=False, allow_dict=False):
return (isinstance(obj, abc.Iterable) and
not isinstance(obj, (str, bytes)) and
not (allow_sets is False and isinstance(obj, abc.Set)) and
not (allow_dict is False and isinstance(obj, abc.Mapping)))
def random_chooser():
while True:
values = yield
yield choice(values)
def random_grid(params, n=60, grid_cache=None, skip_config=None):
if not isinstance(n, int):
raise TypeError('n must be an integer, not {}'.format(type(n)))
if n < 0:
raise ValueError('n should be >= 0')
# fix names and order of parameters
param_names, param_values = zip(*params.items())
grid = set(grid_cache) if grid_cache is not None else set()
max_n = reduce(mul_op, [len(vals) for vals in param_values])
n = min(n if n > 0 else max_n, max_n)
skipped = set()
if skip_config is None:
def never_skip(config): return False
skip_config = never_skip
param_chooser = random_chooser()
try:
while len(grid) < (n-len(skipped)):
level_choice = []
for param_val in param_values:
next(param_chooser)
level_choice.append(param_chooser.send(param_val))
level_choice = tuple(level_choice)
if skip_config(level_choice):
skipped.add(level_choice)
continue
grid.add(level_choice)
except KeyboardInterrupt:
print('Interrupted by user. Providing current results.')
return grid, param_names
def set_config(model, config, convert_nan=True):
for name, value in config.items():
if convert_nan:
value = value if value == value else None # convert NaN to None
setattr(model, name, value)
def evaluate_models(models, target_metric='precision', metric_type='all', **kwargs):
if not is_list_like(models, allow_sets=True):
models = [models]
model_scores = {}
for model in models:
scores = model.evaluate(metric_type, **kwargs)
scores = [scores] if not isinstance(scores, list) else scores
scores_df = pd.concat([pd.DataFrame([s]) for s in scores], axis=1)
if isinstance(target_metric, str):
model_scores[model.method] = scores_df[target_metric].squeeze()
elif callable(target_metric):
model_scores[model.method] = scores_df.apply(target_metric, axis=1).squeeze()
else:
raise NotImplementedError
return model_scores
def find_optimal_svd_rank(model, ranks, target_metric, return_scores=False,
protect_factors=True, config=None, verbose=False,
evaluator=None, iterator=lambda x: x, **kwargs):
evaluator = evaluator or evaluate_models
model_verbose = model.verbose
if config:
set_config(model, config)
model.rank = svd_rank = max(max(ranks), model.rank)
if not model._is_ready:
model.verbose = verbose
model.build()
if protect_factors:
svd_factors = dict(**model.factors) # avoid accidental overwrites
res = {}
try:
for rank in iterator(sorted(ranks, key=lambda x: -x)):
model.rank = rank
res[rank] = evaluator(model, target_metric, **kwargs)[model.method]
# prevent previous scores caching when assigning svd_rank
model._recommendations = None
finally:
if protect_factors:
model._rank = svd_rank
model.factors = svd_factors
model.verbose = model_verbose
scores = pd.Series(res)
best_rank = scores.idxmax()
if return_scores:
scores.index.name = 'rank'
scores.name = model.method
return best_rank, scores.loc[ranks]
return best_rank
def find_optimal_tucker_ranks(model, tucker_ranks, target_metric, return_scores=False,
config=None, verbose=False, same_space=False,
evaluator=None, iterator=lambda x: x, **kwargs):
evaluator = evaluator or evaluate_models
model_verbose = model.verbose
if config:
set_config(model, config)
model.mlrank = tuple([max(mode_ranks) for mode_ranks in tucker_ranks])
if not model._is_ready:
model.verbose = verbose
model.build()
factors = dict(**model.factors)
tucker_rank = model.mlrank
res_score = {}
for r1 in iterator(tucker_ranks[0]):
for r2 in tucker_ranks[1]:
if same_space and (r2 != r1):
continue
for r3 in tucker_ranks[2]:
if (r1*r2 < r3) or (r1*r3 < r2) or (r2*r3 < r1):
continue
try:
model.mlrank = mlrank = (r1, r2, r3)
res_score[mlrank] = evaluator(model, target_metric, **kwargs)[model.method]
# prevent previous scores caching when assigning tucker_rank
model._recommendations = None
finally:
model._mlrank = tucker_rank
model.factors = dict(**factors)
model.verbose = model_verbose
scores = pd.Series(res_score).sort_index()
best_mlrank = scores.idxmax()
if return_scores:
scores.index.names = ['r1', 'r2', 'r3']
scores.name = model.method
return best_mlrank, scores
return best_mlrank
def params_to_dict(names, params):
try:
return dict(zip(names, params))
except TypeError: # encountered single value
return {names: params}
def find_optimal_config(model, param_grid, param_names, target_metric, return_scores=False,
init_config=None, reset_config=None, verbose=False, force_build=True,
evaluator=None, iterator=lambda x: x, **kwargs):
evaluator = evaluator or evaluate_models
model_verbose = model.verbose
if init_config:
if not is_list_like(init_config):
init_config = [init_config]
for config in init_config:
set_config(model, config)
model.verbose = verbose
grid_results = {}
for params in iterator(param_grid):
param_config = params_to_dict(param_names, params)
try:
set_config(model, param_config)
if not model._is_ready or force_build:
model.build()
grid_results[params] = evaluator(model, target_metric, **kwargs)[model.method]
finally:
if reset_config is not None:
if isinstance(reset_config, dict):
set_config(model, reset_config)
elif callable(reset_config):
reset_config(model)
else:
raise NotImplementedError
model.verbose = model_verbose
# workaround non-orderable configs (otherwise pandas raises error)
scores = pd.Series(**dict(zip(('index', 'data'),
(zip(*grid_results.items())))))
best_params = scores.idxmax()
best_config = params_to_dict(param_names, best_params)
if return_scores:
try:
scores.index.names = param_names
except ValueError: # not list-like
scores.index.name = param_names
scores.name = model.method
return best_config, scores
return best_config
|
40223113/0623-w17 | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/browser/indexed_db.py | 632 | class EventListener:
def __init__(self, events=[]):
self._events=events
def append(self, event):
self._events.append(event)
def fire(self, e):
for _event in self._events:
_event(e)
class IndexedDB:
def __init__(self):
if not __BRYTHON__.has_indexedDB:
raise NotImplementedError("Your browser doesn't support indexedDB")
return
self._indexedDB=__BRYTHON__.indexedDB()
self._db=None
self._version=None
def _onsuccess(self, event):
self._db=event.target.result
def open(self, name, onsuccess, version=1.0, onerror=None,
onupgradeneeded=None):
self._version=version
_result=self._indexedDB.open(name, version)
_success=EventListener([self._onsuccess, onsuccess])
_result.onsuccess=_success.fire
_result.onupgradeneeded=onupgradeneeded
#if onerror is None:
def onerror(e):
print("onerror: %s:%s" % (e.type, e.target.result))
def onblocked(e):
print("blocked: %s:%s" % (e.type, e.result))
_result.onerror=onerror
_result.onblocked=onblocked
def transaction(self, entities, mode='read'):
return Transaction(self._db.transaction(entities, mode))
class Transaction:
def __init__(self, transaction):
self._transaction=transaction
def objectStore(self, name):
return ObjectStore(self._transaction.objectStore(name))
class ObjectStore:
def __init__(self, objectStore):
self._objectStore=objectStore
self._data=[]
def clear(self, onsuccess=None, onerror=None):
_result=self._objectStore.clear()
if onsuccess is not None:
_result.onsuccess=onsuccess
if onerror is not None:
_result.onerror=onerror
def _helper(self, func, object, onsuccess=None, onerror=None):
_result=func(object)
if onsuccess is not None:
_result.onsuccess=onsuccess
if onerror is not None:
_result.onerror=onerror
def put(self, obj, key=None, onsuccess=None, onerror=None):
_r = self._objectStore.put(obj, key)
_r.onsuccess = onsuccess
_r.onerror = onerror
def add(self, obj, key, onsuccess=None, onerror=None):
_r = self._objectStore.add(obj, key)
_r.onsuccess = onsuccess
_r.onerror = onerror
#self._helper(self._objectStore.add, object, onsuccess, onerror)
def delete(self, index, onsuccess=None, onerror=None):
self._helper(self._objectStore.delete, index, onsuccess, onerror)
def query(self, *args):
self._data=[]
def onsuccess(event):
cursor=event.target.result
if cursor is not None:
self._data.append(cursor.value)
getattr(cursor,"continue")() # cursor.continue() is illegal
self._objectStore.openCursor(args).onsuccess=onsuccess
def fetchall(self):
yield self._data
def get(self, key, onsuccess=None, onerror=None):
self._helper(self._objectStore.get, key, onsuccess, onerror)
|
Glasgow2015/team-10 | refs/heads/master | env/lib/python2.7/site-packages/django/views/generic/base.py | 82 | from __future__ import unicode_literals
import logging
import warnings
from functools import update_wrapper
from django import http
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import NoReverseMatch, reverse
from django.template.response import TemplateResponse
from django.utils import six
from django.utils.decorators import classonlymethod
from django.utils.deprecation import RemovedInDjango19Warning
_sentinel = object()
logger = logging.getLogger('django.request')
class ContextMixin(object):
"""
A default context mixin that passes the keyword arguments received by
get_context_data as the template context.
"""
def get_context_data(self, **kwargs):
if 'view' not in kwargs:
kwargs['view'] = self
return kwargs
class View(object):
"""
Intentionally simple parent class for all views. Only implements
dispatch-by-method and simple sanity checking.
"""
http_method_names = ['get', 'post', 'put', 'patch', 'delete', 'head', 'options', 'trace']
def __init__(self, **kwargs):
"""
Constructor. Called in the URLconf; can contain helpful extra
keyword arguments, and other things.
"""
# Go through keyword arguments, and either save their values to our
# instance, or raise an error.
for key, value in six.iteritems(kwargs):
setattr(self, key, value)
@classonlymethod
def as_view(cls, **initkwargs):
"""
Main entry point for a request-response process.
"""
for key in initkwargs:
if key in cls.http_method_names:
raise TypeError("You tried to pass in the %s method name as a "
"keyword argument to %s(). Don't do that."
% (key, cls.__name__))
if not hasattr(cls, key):
raise TypeError("%s() received an invalid keyword %r. as_view "
"only accepts arguments that are already "
"attributes of the class." % (cls.__name__, key))
def view(request, *args, **kwargs):
self = cls(**initkwargs)
if hasattr(self, 'get') and not hasattr(self, 'head'):
self.head = self.get
self.request = request
self.args = args
self.kwargs = kwargs
return self.dispatch(request, *args, **kwargs)
# take name and docstring from class
update_wrapper(view, cls, updated=())
# and possible attributes set by decorators
# like csrf_exempt from dispatch
update_wrapper(view, cls.dispatch, assigned=())
return view
def dispatch(self, request, *args, **kwargs):
# Try to dispatch to the right method; if a method doesn't exist,
# defer to the error handler. Also defer to the error handler if the
# request method isn't on the approved list.
if request.method.lower() in self.http_method_names:
handler = getattr(self, request.method.lower(), self.http_method_not_allowed)
else:
handler = self.http_method_not_allowed
return handler(request, *args, **kwargs)
def http_method_not_allowed(self, request, *args, **kwargs):
logger.warning('Method Not Allowed (%s): %s', request.method, request.path,
extra={
'status_code': 405,
'request': request
}
)
return http.HttpResponseNotAllowed(self._allowed_methods())
def options(self, request, *args, **kwargs):
"""
Handles responding to requests for the OPTIONS HTTP verb.
"""
response = http.HttpResponse()
response['Allow'] = ', '.join(self._allowed_methods())
response['Content-Length'] = '0'
return response
def _allowed_methods(self):
return [m.upper() for m in self.http_method_names if hasattr(self, m)]
class TemplateResponseMixin(object):
"""
A mixin that can be used to render a template.
"""
template_name = None
template_engine = None
response_class = TemplateResponse
content_type = None
def render_to_response(self, context, **response_kwargs):
"""
Returns a response, using the `response_class` for this
view, with a template rendered with the given context.
If any keyword arguments are provided, they will be
passed to the constructor of the response class.
"""
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=self.request,
template=self.get_template_names(),
context=context,
using=self.template_engine,
**response_kwargs
)
def get_template_names(self):
"""
Returns a list of template names to be used for the request. Must return
a list. May not be called if render_to_response is overridden.
"""
if self.template_name is None:
raise ImproperlyConfigured(
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'")
else:
return [self.template_name]
class TemplateView(TemplateResponseMixin, ContextMixin, View):
"""
A view that renders a template. This view will also pass into the context
any keyword arguments passed by the url conf.
"""
def get(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
class RedirectView(View):
"""
A view that provides a redirect on any GET request.
"""
permanent = _sentinel
url = None
pattern_name = None
query_string = False
def __init__(self, *args, **kwargs):
if 'permanent' not in kwargs and self.permanent is _sentinel:
warnings.warn(
"Default value of 'RedirectView.permanent' will change "
"from True to False in Django 1.9. Set an explicit value "
"to silence this warning.",
RemovedInDjango19Warning,
stacklevel=2
)
self.permanent = True
super(RedirectView, self).__init__(*args, **kwargs)
@classonlymethod
def as_view(cls, **initkwargs):
if 'permanent' not in initkwargs and cls.permanent is _sentinel:
warnings.warn(
"Default value of 'RedirectView.permanent' will change "
"from True to False in Django 1.9. Set an explicit value "
"to silence this warning.",
RemovedInDjango19Warning,
stacklevel=2
)
initkwargs['permanent'] = True
return super(RedirectView, cls).as_view(**initkwargs)
def get_redirect_url(self, *args, **kwargs):
"""
Return the URL redirect to. Keyword arguments from the
URL pattern match generating the redirect request
are provided as kwargs to this method.
"""
if self.url:
url = self.url % kwargs
elif self.pattern_name:
try:
url = reverse(self.pattern_name, args=args, kwargs=kwargs)
except NoReverseMatch:
return None
else:
return None
args = self.request.META.get('QUERY_STRING', '')
if args and self.query_string:
url = "%s?%s" % (url, args)
return url
def get(self, request, *args, **kwargs):
url = self.get_redirect_url(*args, **kwargs)
if url:
if self.permanent:
return http.HttpResponsePermanentRedirect(url)
else:
return http.HttpResponseRedirect(url)
else:
logger.warning('Gone: %s', request.path,
extra={
'status_code': 410,
'request': request
})
return http.HttpResponseGone()
def head(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def options(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
|
remitamine/youtube-dl | refs/heads/master | youtube_dl/extractor/jove.py | 50 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
unified_strdate
)
class JoveIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?jove\.com/video/(?P<id>[0-9]+)'
_CHAPTERS_URL = 'http://www.jove.com/video-chapters?videoid={video_id:}'
_TESTS = [
{
'url': 'http://www.jove.com/video/2744/electrode-positioning-montage-transcranial-direct-current',
'md5': '93723888d82dbd6ba8b3d7d0cd65dd2b',
'info_dict': {
'id': '2744',
'ext': 'mp4',
'title': 'Electrode Positioning and Montage in Transcranial Direct Current Stimulation',
'description': 'md5:015dd4509649c0908bc27f049e0262c6',
'thumbnail': r're:^https?://.*\.png$',
'upload_date': '20110523',
}
},
{
'url': 'http://www.jove.com/video/51796/culturing-caenorhabditis-elegans-axenic-liquid-media-creation',
'md5': '914aeb356f416811d911996434811beb',
'info_dict': {
'id': '51796',
'ext': 'mp4',
'title': 'Culturing Caenorhabditis elegans in Axenic Liquid Media and Creation of Transgenic Worms by Microparticle Bombardment',
'description': 'md5:35ff029261900583970c4023b70f1dc9',
'thumbnail': r're:^https?://.*\.png$',
'upload_date': '20140802',
}
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
chapters_id = self._html_search_regex(
r'/video-chapters\?videoid=([0-9]+)', webpage, 'chapters id')
chapters_xml = self._download_xml(
self._CHAPTERS_URL.format(video_id=chapters_id),
video_id, note='Downloading chapters XML',
errnote='Failed to download chapters XML')
video_url = chapters_xml.attrib.get('video')
if not video_url:
raise ExtractorError('Failed to get the video URL')
title = self._html_search_meta('citation_title', webpage, 'title')
thumbnail = self._og_search_thumbnail(webpage)
description = self._html_search_regex(
r'<div id="section_body_summary"><p class="jove_content">(.+?)</p>',
webpage, 'description', fatal=False)
publish_date = unified_strdate(self._html_search_meta(
'citation_publication_date', webpage, 'publish date', fatal=False))
comment_count = int(self._html_search_regex(
r'<meta name="num_comments" content="(\d+) Comments?"',
webpage, 'comment count', fatal=False))
return {
'id': video_id,
'title': title,
'url': video_url,
'thumbnail': thumbnail,
'description': description,
'upload_date': publish_date,
'comment_count': comment_count,
}
|
frappe/frappe | refs/heads/develop | frappe/core/__init__.py | 75 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
|
qe-team/marmot | refs/heads/master | marmot/features/phrase/tests/test_num_translations_feature_extractor.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import os
from marmot.features.phrase.num_translations_feature_extractor import NumTranslationsFeatureExtractor
# test a class which extracts source and target token count features, and the source/target token count ratio
class NumTranslationsFeatureExtractorTests(unittest.TestCase):
def setUp(self):
self.extractor = NumTranslationsFeatureExtractor('/export/data/varvara/europarl-sys/english_spanish/tiny/model/lex.1.f2e', '/export/data/varvara/europarl-sys/english_spanish/tiny/corpus/nc.clean.1.en')
def test_get_features(self):
obj = {'source':['a', 'boy', 'hits', 'the', 'dog'],
'target':['uno', 'nino', 'abati', 'el', 'perro'],
'token':['el'],
'index': (3, 4),
'source_token': ['the'],
'source_index':(3, 4)}
(f_001, f_005, f_01, f_02, f_05, f_001_w, f_005_w, f_01_w, f_02_w, f_05_w) = self.extractor.get_features(obj)
self.assertEqual(f_001, 7)
self.assertEqual(f_005, 6)
self.assertEqual(f_01, 3)
self.assertEqual(f_02, 2)
self.assertEqual(f_05, 0)
self.assertAlmostEqual(f_001_w, 0.52421775)
self.assertAlmostEqual(f_005_w, 0.4493295)
self.assertAlmostEqual(f_01_w, 0.22466475)
self.assertAlmostEqual(f_02_w, 0.1497765)
self.assertAlmostEqual(f_05_w, 0.0)
if __name__ == '__main__':
unittest.main()
|
fnouama/intellij-community | refs/heads/master | python/testData/refactoring/invertBoolean/my_file.py | 83 | BOOL = True |
hzlf/openbroadcast.org | refs/heads/master | website/apps/crispy_forms_vue/apps.py | 2 | from __future__ import unicode_literals
from django.apps import AppConfig
class CrispyFormsVueConfig(AppConfig):
name = "crispy_forms_vue"
|
lucashmorais/x-Bench | refs/heads/master | mozmill-env/python/Lib/site-packages/mozlog/structured/formatters/html/xmlgen.py | 44 | """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
This file is originally from: https://bitbucket.org/hpk42/py, specifically:
https://bitbucket.org/hpk42/py/src/980c8d526463958ee7cae678a7e4e9b054f36b94/py/_xmlgen.py?at=default
by holger krekel, holger at merlinux eu. 2009
"""
import sys, re
if sys.version_info >= (3,0):
def u(s):
return s
def unicode(x):
if hasattr(x, '__unicode__'):
return x.__unicode__()
return str(x)
else:
def u(s):
return unicode(s)
unicode = unicode
class NamespaceMetaclass(type):
def __getattr__(self, name):
if name[:1] == '_':
raise AttributeError(name)
if self == Namespace:
raise ValueError("Namespace class is abstract")
tagspec = self.__tagspec__
if tagspec is not None and name not in tagspec:
raise AttributeError(name)
classattr = {}
if self.__stickyname__:
classattr['xmlname'] = name
cls = type(name, (self.__tagclass__,), classattr)
setattr(self, name, cls)
return cls
class Tag(list):
class Attr(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def __init__(self, *args, **kwargs):
super(Tag, self).__init__(args)
self.attr = self.Attr(**kwargs)
def __unicode__(self):
return self.unicode(indent=0)
__str__ = __unicode__
def unicode(self, indent=2):
l = []
SimpleUnicodeVisitor(l.append, indent).visit(self)
return u("").join(l)
def __repr__(self):
name = self.__class__.__name__
return "<%r tag object %d>" % (name, id(self))
Namespace = NamespaceMetaclass('Namespace', (object, ), {
'__tagspec__': None,
'__tagclass__': Tag,
'__stickyname__': False,
})
class HtmlTag(Tag):
def unicode(self, indent=2):
l = []
HtmlVisitor(l.append, indent, shortempty=False).visit(self)
return u("").join(l)
# exported plain html namespace
class html(Namespace):
__tagclass__ = HtmlTag
__stickyname__ = True
__tagspec__ = dict([(x,1) for x in (
'a,abbr,acronym,address,applet,area,b,bdo,big,blink,'
'blockquote,body,br,button,caption,center,cite,code,col,'
'colgroup,comment,dd,del,dfn,dir,div,dl,dt,em,embed,'
'fieldset,font,form,frameset,h1,h2,h3,h4,h5,h6,head,html,'
'i,iframe,img,input,ins,kbd,label,legend,li,link,listing,'
'map,marquee,menu,meta,multicol,nobr,noembed,noframes,'
'noscript,object,ol,optgroup,option,p,pre,q,s,script,'
'select,small,span,strike,strong,style,sub,sup,table,'
'tbody,td,textarea,tfoot,th,thead,title,tr,tt,u,ul,xmp,'
'base,basefont,frame,hr,isindex,param,samp,var'
).split(',') if x])
class Style(object):
def __init__(self, **kw):
for x, y in kw.items():
x = x.replace('_', '-')
setattr(self, x, y)
class raw(object):
"""just a box that can contain a unicode string that will be
included directly in the output"""
def __init__(self, uniobj):
self.uniobj = uniobj
class SimpleUnicodeVisitor(object):
""" recursive visitor to write unicode. """
def __init__(self, write, indent=0, curindent=0, shortempty=True):
self.write = write
self.cache = {}
self.visited = {} # for detection of recursion
self.indent = indent
self.curindent = curindent
self.parents = []
self.shortempty = shortempty # short empty tags or not
def visit(self, node):
""" dispatcher on node's class/bases name. """
cls = node.__class__
try:
visitmethod = self.cache[cls]
except KeyError:
for subclass in cls.__mro__:
visitmethod = getattr(self, subclass.__name__, None)
if visitmethod is not None:
break
else:
visitmethod = self.__object
self.cache[cls] = visitmethod
visitmethod(node)
# the default fallback handler is marked private
# to avoid clashes with the tag name object
def __object(self, obj):
#self.write(obj)
self.write(escape(unicode(obj)))
def raw(self, obj):
self.write(obj.uniobj)
def list(self, obj):
assert id(obj) not in self.visited
self.visited[id(obj)] = 1
for elem in obj:
self.visit(elem)
def Tag(self, tag):
assert id(tag) not in self.visited
try:
tag.parent = self.parents[-1]
except IndexError:
tag.parent = None
self.visited[id(tag)] = 1
tagname = getattr(tag, 'xmlname', tag.__class__.__name__)
if self.curindent and not self._isinline(tagname):
self.write("\n" + u(' ') * self.curindent)
if tag:
self.curindent += self.indent
self.write(u('<%s%s>') % (tagname, self.attributes(tag)))
self.parents.append(tag)
for x in tag:
self.visit(x)
self.parents.pop()
self.write(u('</%s>') % tagname)
self.curindent -= self.indent
else:
nameattr = tagname+self.attributes(tag)
if self._issingleton(tagname):
self.write(u('<%s/>') % (nameattr,))
else:
self.write(u('<%s></%s>') % (nameattr, tagname))
def attributes(self, tag):
# serialize attributes
attrlist = dir(tag.attr)
attrlist.sort()
l = []
for name in attrlist:
res = self.repr_attribute(tag.attr, name)
if res is not None:
l.append(res)
l.extend(self.getstyle(tag))
return u("").join(l)
def repr_attribute(self, attrs, name):
if name[:2] != '__':
value = getattr(attrs, name)
if name.endswith('_'):
name = name[:-1]
if isinstance(value, raw):
insert = value.uniobj
else:
insert = escape(unicode(value))
return ' %s="%s"' % (name, insert)
def getstyle(self, tag):
""" return attribute list suitable for styling. """
try:
styledict = tag.style.__dict__
except AttributeError:
return []
else:
stylelist = [x+': ' + y for x,y in styledict.items()]
return [u(' style="%s"') % u('; ').join(stylelist)]
def _issingleton(self, tagname):
"""can (and will) be overridden in subclasses"""
return self.shortempty
def _isinline(self, tagname):
"""can (and will) be overridden in subclasses"""
return False
class HtmlVisitor(SimpleUnicodeVisitor):
single = dict([(x, 1) for x in
('br,img,area,param,col,hr,meta,link,base,'
'input,frame').split(',')])
inline = dict([(x, 1) for x in
('a abbr acronym b basefont bdo big br cite code dfn em font '
'i img input kbd label q s samp select small span strike '
'strong sub sup textarea tt u var'.split(' '))])
def repr_attribute(self, attrs, name):
if name == 'class_':
value = getattr(attrs, name)
if value is None:
return
return super(HtmlVisitor, self).repr_attribute(attrs, name)
def _issingleton(self, tagname):
return tagname in self.single
def _isinline(self, tagname):
return tagname in self.inline
class _escape:
def __init__(self):
self.escape = {
u('"') : u('"'), u('<') : u('<'), u('>') : u('>'),
u('&') : u('&'), u("'") : u('''),
}
self.charef_rex = re.compile(u("|").join(self.escape.keys()))
def _replacer(self, match):
return self.escape[match.group(0)]
def __call__(self, ustring):
""" xml-escape the given unicode string. """
ustring = unicode(ustring)
return self.charef_rex.sub(self._replacer, ustring)
escape = _escape()
|
Nepherhotep/django | refs/heads/master | tests/test_client/auth_backends.py | 315 | from django.contrib.auth.backends import ModelBackend
class TestClientBackend(ModelBackend):
pass
|
gweidner/incubator-systemml | refs/heads/master | scripts/perftest/python/utils_misc.py | 14 | #!/usr/bin/env python3
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
from os.path import join
import os
import json
import re
import sys
import getpass
from utils_exec import subprocess_exec
from utils_fs import check_hdfs_path
# This file contains all misc utility functions required by performance test module
def split_config_args(args):
"""
Based on configuration parameters passed build configuration dictionary used by subprocess
args: Dictionary
All parameters passed in
return: Dictionary, Dictionary, Dictionary
3 dictionaries - one for perf tests, one for systemml specific args, one for backend options
"""
perftest_args_dict = {}
perftest_args_dict['family'] = args['family']
perftest_args_dict['algo'] = args['algo']
perftest_args_dict['exec_type'] = args['exec_type']
perftest_args_dict['mat_type'] = args['mat_type']
perftest_args_dict['mat_shape'] = args['mat_shape']
perftest_args_dict['config_dir'] = args['config_dir']
perftest_args_dict['filename'] = args['filename']
perftest_args_dict['mode'] = args['mode']
perftest_args_dict['temp_dir'] = args['temp_dir']
perftest_args_dict['file_system_type'] = args['file_system_type']
systemml_args_dict = {}
if args['stats'] is not None:
systemml_args_dict['-stats'] = args['stats']
else:
systemml_args_dict['-stats'] = ''
if args['explain'] is not None:
systemml_args_dict['-explain'] = args['explain']
else:
systemml_args_dict['-explain'] = ''
if args['config'] is not None:
systemml_args_dict['-config'] = args['config']
if args['gpu'] is not None:
if args['gpu'] == 'no_option':
systemml_args_dict['-gpu'] = ''
else:
systemml_args_dict['-gpu'] = args['gpu']
backend_args_dict = {}
exec_type = args['exec_type']
if exec_type == 'hybrid_spark':
if args['master'] is not None:
backend_args_dict['--master'] = args['master']
if args['deploy_mode'] is not None:
backend_args_dict['--deploy-mode'] = args['master']
if args['num_executors'] is not None:
backend_args_dict['--num-executors'] = args['num_executors']
if args['driver_memory'] is not None:
backend_args_dict['--driver-memory'] = args['driver_memory']
if args['executor_cores'] is not None:
backend_args_dict['--executor-cores'] = args['executor_cores']
if args['conf'] is not None:
backend_args_dict['--conf'] = ''.join(args['conf'])
elif exec_type == 'singlenode':
if args['heapmem'] is not None:
backend_args_dict['-heapmem'] = args['heapmem']
return perftest_args_dict, systemml_args_dict, backend_args_dict
def args_dict_split(all_arguments):
"""
This functions split the super set of arguments to smaller dictionaries
all_arguments: Dictionary
All input arguments parsed
return: Dictionary, Dictionary, Dictionary
We return four dictionaries for init, script, spark arguments, singlenode arguments
"""
args_dict = dict(list(all_arguments.items())[0:9])
config_dict = dict(list(all_arguments.items())[9:13])
spark_dict = dict(list(all_arguments.items())[13:19])
singlenode_dict = dict(list(all_arguments.items())[19:])
return args_dict, config_dict, spark_dict, singlenode_dict
def get_families(current_algo, ml_algo):
"""
Given current algorithm we get its families.
current_algo : String
Input algorithm specified
ml_algo : Dictionary
key, value dictionary with family as key and algorithms as list of values
return: List
List of families returned
"""
family_list = []
for family, algos in ml_algo.items():
if current_algo in algos:
family_list.append(family)
return family_list
def split_rowcol(matrix_dim):
"""
Split the input matrix dimensions into row and columns
matrix_dim: String
Input concatenated string with row and column
return: Tuple
Row and column split based on suffix
"""
k = str(0) * 3
M = str(0) * 6
replace_M = matrix_dim.replace('M', str(M))
replace_k = replace_M.replace('k', str(k))
row, col = replace_k.split('_')
return row, col
def config_writer(write_path, config_obj):
"""
Writes the dictionary as an configuration json file to the give path
write_path: String
Absolute path of file name to be written
config_obj: List or Dictionary
Can be a dictionary or a list based on the object passed
"""
with open(write_path, 'w') as input_file:
json.dump(config_obj, input_file, indent=4)
def config_reader(read_path):
"""
Read json file given path
return: List or Dictionary
Reading the json file can give us a list if we have positional args or
key value args for a dictionary
"""
with open(read_path, 'r') as input_file:
conf_file = json.load(input_file)
return conf_file
def exec_dml_and_parse_time(exec_type, dml_file_name, args, backend_args_dict, systemml_args_dict, log_file_name=None):
"""
This function is responsible of execution of input arguments via python sub process,
We also extract time obtained from the output of this subprocess
exec_type: String
Contains the execution type singlenode / hybrid_spark
dml_file_name: String
DML file name to be used while processing the arguments give
args: Dictionary
Key values pairs depending on the arg type
backend_args_dict: Dictionary
Spark configuration arguments / singlenode config arguments
systemml_args_dict: Dictionary
Supplementary arguments required by the script
log_file_name: String
Path to write the logfile
return: String
The value of time parsed from the logs / error
"""
algorithm = dml_file_name + '.dml'
sup_args = ' '.join(['{} {}'.format(k, v) for k, v in systemml_args_dict.items()])
if exec_type == 'singlenode':
exec_script = join(os.environ.get('SYSTEMML_HOME'), 'bin', 'systemml-standalone.py')
singlenode_pre_args = ' '.join(['{} {}'.format(k, v) for k, v in backend_args_dict.items()])
args = ' '.join(['{} {}'.format(k, v) for k, v in args.items()])
cmd = [exec_script, singlenode_pre_args, '-f', algorithm, args, sup_args]
cmd_string = ' '.join(cmd)
if exec_type == 'hybrid_spark':
exec_script = join(os.environ.get('SYSTEMML_HOME'), 'bin', 'systemml-spark-submit.py')
spark_pre_args = ' '.join([' {} {} '.format(k, v) for k, v in backend_args_dict.items()])
args = ' '.join(['{} {}'.format(k, v) for k, v in args.items()])
cmd = [exec_script, spark_pre_args, '-f', algorithm, args, sup_args]
cmd_string = ' '.join(cmd)
time = subprocess_exec(cmd_string, log_file_name, 'time')
return time
def parse_time(raw_logs):
"""
Parses raw input list and extracts time
raw_logs : List
Each line obtained from the standard output is in the list
return: String
Extracted time in seconds or time_not_found
"""
# Debug
# print(raw_logs)
for line in raw_logs:
if line.startswith('Total execution time'):
extract_time = re.findall(r'\d+', line)
total_time = '.'.join(extract_time)
return total_time
return 'time_not_found'
def exec_test_data(exec_type, backend_args_dict, systemml_args_dict, datagen_path, config):
"""
Creates the test data split from the given input path
exec_type : String
Contains the execution type singlenode / hybrid_spark
path : String
Location of the input folder to pick X and Y
"""
systemml_home = os.environ.get('SYSTEMML_HOME')
test_split_script = join(systemml_home, 'scripts', 'perftest', 'extractTestData')
path = join(datagen_path, config.split('/')[-1])
X = join(path, 'X.data')
Y = join(path, 'Y.data')
X_test = join(path, 'X_test.data')
Y_test = join(path, 'Y_test.data')
args = {'-args': ' '.join([X, Y, X_test, Y_test, 'csv'])}
exec_dml_and_parse_time(exec_type, test_split_script, args, backend_args_dict, systemml_args_dict)
def check_predict(current_algo, ml_predict):
"""
To check if the current algorithm requires to run the predict
current_algo: String
Algorithm being processed
ml_predict: Dictionary
Key value pairs of algorithm and predict file to process
"""
if current_algo in ml_predict.keys():
return True
def get_folder_metrics(folder_name, action_mode):
"""
Gets metrics from folder name for logging
folder_name: String
Folder from which we want to grab details
return: List(3)
A list with mat_type, mat_shape, intercept
"""
if action_mode == 'data-gen':
split_name = folder_name.split('.')
mat_type = split_name[1]
mat_shape = split_name[2]
intercept = 'none'
try:
if action_mode == 'train':
split_name = folder_name.split('.')
mat_type = split_name[3]
mat_shape = split_name[2]
intercept = split_name[4]
if action_mode == 'predict':
split_name = folder_name.split('.')
mat_type = split_name[3]
mat_shape = split_name[2]
intercept = split_name[4]
except IndexError:
intercept = 'none'
return mat_type, mat_shape, intercept
def mat_type_check(current_family, matrix_types, dense_algos):
"""
Some Algorithms support different matrix_types. This function give us the right matrix_type given
an algorithm
current_family: String
Current family being porcessed in this function
matrix_type: List
Type of matrix to generate dense, sparse, all
dense_algos: List
Algorithms that support only dense matrix type
return: List
Return the list of right matrix types supported by the family
"""
current_type = []
for current_matrix_type in matrix_types:
if current_matrix_type == 'all':
if current_family in dense_algos:
current_type.append('dense')
else:
current_type.append('dense')
current_type.append('sparse')
if current_matrix_type == 'sparse':
if current_family in dense_algos:
sys.exit('{} does not support {} matrix type'.format(current_family,
current_matrix_type))
else:
current_type.append(current_matrix_type)
if current_matrix_type == 'dense':
current_type.append(current_matrix_type)
return current_type
def get_default_dir(file_system_type, temp_dir, exec_mode, config_dir):
"""
file_system_type: String
temp_dir: String
exec_mode: String
config_dir: String
return: String
Local or HDFS home directory
"""
if exec_mode == 'singlenode':
if temp_dir is None:
return config_dir
if temp_dir is not None:
return temp_dir
if exec_mode == 'hybrid_spark':
if file_system_type == 'hdfs':
cmd = ['hdfs', 'getconf', '-confKey', 'fs.default.name']
hdfs_base = subprocess_exec(' '.join(cmd), extract='hdfs_base')
if temp_dir is None:
hdfs_home = join(hdfs_base, 'user', getpass.getuser())
check_hdfs_path(hdfs_home)
return hdfs_home
if temp_dir is not None:
if temp_dir.startswith('hdfs'):
return temp_dir
else:
hdfs_home = join(hdfs_base, 'user', getpass.getuser(), temp_dir)
return hdfs_home
else:
if temp_dir is None:
return config_dir
if temp_dir is not None:
return temp_dir
|
lmprice/ansible | refs/heads/devel | lib/ansible/modules/network/f5/bigip_monitor_snmp_dca.py | 12 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_monitor_snmp_dca
short_description: Manages BIG-IP SNMP data collecting agent (DCA) monitors
description:
- The BIG-IP has an SNMP data collecting agent (DCA) that can query remote
SNMP agents of various types, including the UC Davis agent (UCD) and the
Windows 2000 Server agent (WIN2000).
version_added: 2.5
options:
name:
description:
- Monitor name.
required: True
description:
description:
- Specifies descriptive text that identifies the monitor.
parent:
description:
- The parent template of this monitor template. Once this value has
been set, it cannot be changed. By default, this value is the C(snmp_dca)
parent on the C(Common) partition.
default: "/Common/snmp_dca"
interval:
description:
- Specifies, in seconds, the frequency at which the system issues the
monitor check when either the resource is down or the status of the
resource is unknown. When creating a new monitor, the default is C(10).
timeout:
description:
- Specifies the number of seconds the target has in which to respond to
the monitor request. When creating a new monitor, the default is C(30)
seconds. If the target responds within the set time period, it is
considered 'up'. If the target does not respond within the set time
period, it is considered 'down'. When this value is set to 0 (zero),
the system uses the interval from the parent monitor. Note that
C(timeout) and C(time_until_up) combine to control when a resource is
set to up.
time_until_up:
description:
- Specifies the number of seconds to wait after a resource first responds
correctly to the monitor before setting the resource to 'up'. During the
interval, all responses from the resource must be correct. When the
interval expires, the resource is marked 'up'. A value of 0, means
that the resource is marked up immediately upon receipt of the first
correct response. When creating a new monitor, the default is C(0).
community:
description:
- Specifies the community name that the system must use to authenticate
with the host server through SNMP. When creating a new monitor, the
default value is C(public). Note that this value is case sensitive.
version:
description:
- Specifies the version of SNMP that the host server uses. When creating
a new monitor, the default is C(v1). When C(v1), specifies that the
host server uses SNMP version 1. When C(v2c), specifies that the host
server uses SNMP version 2c.
choices:
- v1
- v2c
agent_type:
description:
- Specifies the SNMP agent running on the monitored server. When creating
a new monitor, the default is C(UCD) (UC-Davis).
choices:
- UCD
- WIN2000
- GENERIC
cpu_coefficient:
description:
- Specifies the coefficient that the system uses to calculate the weight
of the CPU threshold in the dynamic ratio load balancing algorithm.
When creating a new monitor, the default is C(1.5).
cpu_threshold:
description:
- Specifies the maximum acceptable CPU usage on the target server. When
creating a new monitor, the default is C(80) percent.
memory_coefficient:
description:
- Specifies the coefficient that the system uses to calculate the weight
of the memory threshold in the dynamic ratio load balancing algorithm.
When creating a new monitor, the default is C(1.0).
memory_threshold:
description:
- Specifies the maximum acceptable memory usage on the target server.
When creating a new monitor, the default is C(70) percent.
disk_coefficient:
description:
- Specifies the coefficient that the system uses to calculate the weight
of the disk threshold in the dynamic ratio load balancing algorithm.
When creating a new monitor, the default is C(2.0).
disk_threshold:
description:
- Specifies the maximum acceptable disk usage on the target server. When
creating a new monitor, the default is C(90) percent.
partition:
description:
- Device partition to manage resources on.
default: Common
state:
description:
- When C(present), ensures that the monitor exists.
- When C(absent), ensures the monitor is removed.
default: present
choices:
- present
- absent
version_added: 2.5
notes:
- Requires BIG-IP software version >= 12
- This module does not support the C(variables) option because this option
is broken in the REST API and does not function correctly in C(tmsh); for
example you cannot remove user-defined params. Therefore, there is no way
to automatically configure it.
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create SNMP DCS monitor
bigip_monitor_snmp_dca:
state: present
server: lb.mydomain.com
user: admin
password: secret
name: my_monitor
delegate_to: localhost
- name: Remove TCP Echo Monitor
bigip_monitor_snmp_dca:
state: absent
server: lb.mydomain.com
user: admin
password: secret
name: my_monitor
delegate_to: localhost
'''
RETURN = r'''
parent:
description: New parent template of the monitor.
returned: changed
type: string
sample: snmp_dca
interval:
description: The new interval in which to run the monitor check.
returned: changed
type: int
sample: 2
timeout:
description: The new timeout in which the remote system must respond to the monitor.
returned: changed
type: int
sample: 10
time_until_up:
description: The new time in which to mark a system as up after first successful response.
returned: changed
type: int
sample: 2
community:
description: The new community for the monitor.
returned: changed
type: string
sample: foobar
version:
description: The new new SNMP version to be used by the monitor.
returned: changed
type: string
sample: v2c
agent_type:
description: The new agent type to be used by the monitor.
returned: changed
type: string
sample: UCD
cpu_coefficient:
description: The new CPU coefficient.
returned: changed
type: float
sample: 2.4
cpu_threshold:
description: The new CPU threshold.
returned: changed
type: int
sample: 85
memory_coefficient:
description: The new memory coefficient.
returned: changed
type: float
sample: 6.4
memory_threshold:
description: The new memory threshold.
returned: changed
type: int
sample: 50
disk_coefficient:
description: The new disk coefficient.
returned: changed
type: float
sample: 10.2
disk_threshold:
description: The new disk threshold.
returned: changed
type: int
sample: 34
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
except ImportError:
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
class Parameters(AnsibleF5Parameters):
api_map = {
'timeUntilUp': 'time_until_up',
'defaultsFrom': 'parent',
'agentType': 'agent_type',
'cpuCoefficient': 'cpu_coefficient',
'cpuThreshold': 'cpu_threshold',
'memoryCoefficient': 'memory_coefficient',
'memoryThreshold': 'memory_threshold',
'diskCoefficient': 'disk_coefficient',
'diskThreshold': 'disk_threshold'
}
api_attributes = [
'timeUntilUp', 'defaultsFrom', 'interval', 'timeout', 'destination', 'community',
'version', 'agentType', 'cpuCoefficient', 'cpuThreshold', 'memoryCoefficient',
'memoryThreshold', 'diskCoefficient', 'diskThreshold'
]
returnables = [
'parent', 'ip', 'interval', 'timeout', 'time_until_up', 'description', 'community',
'version', 'agent_type', 'cpu_coefficient', 'cpu_threshold', 'memory_coefficient',
'memory_threshold', 'disk_coefficient', 'disk_threshold'
]
updatables = [
'ip', 'interval', 'timeout', 'time_until_up', 'description', 'community',
'version', 'agent_type', 'cpu_coefficient', 'cpu_threshold', 'memory_coefficient',
'memory_threshold', 'disk_coefficient', 'disk_threshold'
]
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
except Exception:
return result
@property
def interval(self):
if self._values['interval'] is None:
return None
if 1 > int(self._values['interval']) > 86400:
raise F5ModuleError(
"Interval value must be between 1 and 86400"
)
return int(self._values['interval'])
@property
def timeout(self):
if self._values['timeout'] is None:
return None
return int(self._values['timeout'])
@property
def time_until_up(self):
if self._values['time_until_up'] is None:
return None
return int(self._values['time_until_up'])
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
@property
def cpu_coefficient(self):
result = self._get_numeric_property('cpu_coefficient')
return result
@property
def cpu_threshold(self):
result = self._get_numeric_property('cpu_threshold')
return result
@property
def memory_coefficient(self):
result = self._get_numeric_property('memory_coefficient')
return result
@property
def memory_threshold(self):
result = self._get_numeric_property('memory_threshold')
return result
@property
def disk_coefficient(self):
result = self._get_numeric_property('disk_coefficient')
return result
@property
def disk_threshold(self):
result = self._get_numeric_property('disk_threshold')
return result
def _get_numeric_property(self, property):
if self._values[property] is None:
return None
try:
fvar = float(self._values[property])
except ValueError:
raise F5ModuleError(
"Provided {0} must be a valid number".format(property)
)
return fvar
@property
def type(self):
return 'snmp_dca'
class Changes(Parameters):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
result = self.__default(param)
return result
@property
def parent(self):
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent monitor cannot be changed"
)
@property
def interval(self):
if self.want.timeout is not None and self.want.interval is not None:
if self.want.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.timeout is not None:
if self.have.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.interval is not None:
if self.want.interval >= self.have.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
if self.want.interval != self.have.interval:
return self.want.interval
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = Parameters(params=self.module.params)
self.changes = Changes()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Changes(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
changed[k] = change
if changed:
self.changes = Changes(params=changed)
return True
return False
def _announce_deprecations(self):
warnings = []
if self.want:
warnings += self.want._values.get('__warnings', [])
if self.have:
warnings += self.have._values.get('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations()
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
self._set_changed_options()
if self.want.timeout is None:
self.want.update({'timeout': 30})
if self.want.interval is None:
self.want.update({'interval': 10})
if self.want.time_until_up is None:
self.want.update({'time_until_up': 0})
if self.want.community is None:
self.want.update({'community': 'public'})
if self.want.version is None:
self.want.update({'version': 'v1'})
if self.want.agent_type is None:
self.want.update({'agent_type': 'UCD'})
if self.want.cpu_coefficient is None:
self.want.update({'cpu_coefficient': '1.5'})
if self.want.cpu_threshold is None:
self.want.update({'cpu_threshold': '80'})
if self.want.memory_coefficient is None:
self.want.update({'memory_coefficient': '1.0'})
if self.want.memory_threshold is None:
self.want.update({'memory_threshold': '70'})
if self.want.disk_coefficient is None:
self.want.update({'disk_coefficient': '2.0'})
if self.want.disk_threshold is None:
self.want.update({'disk_threshold': '90'})
if self.module.check_mode:
return True
self.create_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the monitor.")
return True
def read_current_from_device(self):
resource = self.client.api.tm.ltm.monitor.snmp_dcas.snmp_dca.load(
name=self.want.name,
partition=self.want.partition
)
result = resource.attrs
return Parameters(params=result)
def exists(self):
result = self.client.api.tm.ltm.monitor.snmp_dcas.snmp_dca.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update_on_device(self):
params = self.want.api_params()
result = self.client.api.tm.ltm.monitor.snmp_dcas.snmp_dca.load(
name=self.want.name,
partition=self.want.partition
)
result.modify(**params)
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.ltm.monitor.snmp_dcas.snmp_dca.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def remove_from_device(self):
result = self.client.api.tm.ltm.monitor.snmp_dcas.snmp_dca.load(
name=self.want.name,
partition=self.want.partition
)
if result:
result.delete()
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
description=dict(),
parent=dict(default='/Common/snmp_dca'),
interval=dict(type='int'),
timeout=dict(type='int'),
time_until_up=dict(type='int'),
community=dict(),
version=dict(choices=['v1', 'v2c']),
agent_type=dict(
choices=['UCD', 'WIN2000', 'GENERIC']
),
cpu_coefficient=dict(),
cpu_threshold=dict(type='int'),
memory_coefficient=dict(),
memory_threshold=dict(type='int'),
disk_coefficient=dict(),
disk_threshold=dict(type='int'),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
liqd/a4-meinberlin | refs/heads/renovate/pin-dependencies | meinberlin/apps/bplan/signals.py | 1 | from django.db.models.signals import post_save
from django.dispatch import receiver
from . import emails
from . import tasks
from .models import Bplan
from .models import Statement
@receiver(post_save, sender=Bplan)
def get_location(sender, instance, update_fields, **kwargs):
if instance.identifier and (not update_fields or
('point' not in update_fields and
'is_archived' not in update_fields)):
tasks.get_location_information(instance.pk)
@receiver(post_save, sender=Statement)
def send_notification(sender, instance, created, **kwargs):
if created:
emails.OfficeWorkerNotification.send(instance)
if instance.email:
emails.SubmitterConfirmation.send(instance)
@receiver(post_save, sender=Bplan)
def send_update(sender, instance, update_fields, **kwargs):
if (not update_fields or ('point' not in update_fields and
'is_archived' not in update_fields)):
emails.OfficeWorkerUpdateConfirmation.send(instance)
|
fossoult/odoo | refs/heads/8.0 | addons/sale_mrp/__init__.py | 445 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale_mrp
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
OpenNetworkingFoundation/ONFOpenTransport | refs/heads/develop | RI/flask_server/tapi_server/models/tapi_photonic_media_media_channel_node_edge_point_spec.py | 4 | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from tapi_server.models.base_model_ import Model
from tapi_server.models.tapi_photonic_media_media_channel_pool_capability_pac import TapiPhotonicMediaMediaChannelPoolCapabilityPac # noqa: F401,E501
from tapi_server import util
class TapiPhotonicMediaMediaChannelNodeEdgePointSpec(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, mc_pool=None): # noqa: E501
"""TapiPhotonicMediaMediaChannelNodeEdgePointSpec - a model defined in OpenAPI
:param mc_pool: The mc_pool of this TapiPhotonicMediaMediaChannelNodeEdgePointSpec. # noqa: E501
:type mc_pool: TapiPhotonicMediaMediaChannelPoolCapabilityPac
"""
self.openapi_types = {
'mc_pool': TapiPhotonicMediaMediaChannelPoolCapabilityPac
}
self.attribute_map = {
'mc_pool': 'mc-pool'
}
self._mc_pool = mc_pool
@classmethod
def from_dict(cls, dikt) -> 'TapiPhotonicMediaMediaChannelNodeEdgePointSpec':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The tapi.photonic.media.MediaChannelNodeEdgePointSpec of this TapiPhotonicMediaMediaChannelNodeEdgePointSpec. # noqa: E501
:rtype: TapiPhotonicMediaMediaChannelNodeEdgePointSpec
"""
return util.deserialize_model(dikt, cls)
@property
def mc_pool(self):
"""Gets the mc_pool of this TapiPhotonicMediaMediaChannelNodeEdgePointSpec.
:return: The mc_pool of this TapiPhotonicMediaMediaChannelNodeEdgePointSpec.
:rtype: TapiPhotonicMediaMediaChannelPoolCapabilityPac
"""
return self._mc_pool
@mc_pool.setter
def mc_pool(self, mc_pool):
"""Sets the mc_pool of this TapiPhotonicMediaMediaChannelNodeEdgePointSpec.
:param mc_pool: The mc_pool of this TapiPhotonicMediaMediaChannelNodeEdgePointSpec.
:type mc_pool: TapiPhotonicMediaMediaChannelPoolCapabilityPac
"""
self._mc_pool = mc_pool
|
andersonresende/django | refs/heads/master | tests/aggregation/tests.py | 14 | from __future__ import unicode_literals
import datetime
from decimal import Decimal
import re
from django.db import connection
from django.db.models import Avg, Sum, Count, Max, Min
from django.test import TestCase
from django.test.utils import Approximate
from django.test.utils import CaptureQueriesContext
from .models import Author, Publisher, Book, Store
class BaseAggregateTestCase(TestCase):
fixtures = ["aggregation.json"]
def test_empty_aggregate(self):
self.assertEqual(Author.objects.all().aggregate(), {})
def test_single_aggregate(self):
vals = Author.objects.aggregate(Avg("age"))
self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)})
def test_multiple_aggregates(self):
vals = Author.objects.aggregate(Sum("age"), Avg("age"))
self.assertEqual(vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)})
def test_filter_aggregate(self):
vals = Author.objects.filter(age__gt=29).aggregate(Sum("age"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["age__sum"], 254)
def test_related_aggregate(self):
vals = Author.objects.aggregate(Avg("friends__age"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["friends__age__avg"], 34.07, places=2)
vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["authors__age__avg"], 38.2857, places=2)
vals = Author.objects.all().filter(name__contains="a").aggregate(Avg("book__rating"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__rating__avg"], 4.0)
vals = Book.objects.aggregate(Sum("publisher__num_awards"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["publisher__num_awards__sum"], 30)
vals = Publisher.objects.aggregate(Sum("book__price"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__price__sum"], Decimal("270.27"))
def test_aggregate_multi_join(self):
vals = Store.objects.aggregate(Max("books__authors__age"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["books__authors__age__max"], 57)
vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__publisher__num_awards__min"], 1)
def test_aggregate_alias(self):
vals = Store.objects.filter(name="Amazon.com").aggregate(amazon_mean=Avg("books__rating"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["amazon_mean"], 4.08, places=2)
def test_annotate_basic(self):
self.assertQuerysetEqual(
Book.objects.annotate().order_by('pk'), [
"The Definitive Guide to Django: Web Development Done Right",
"Sams Teach Yourself Django in 24 Hours",
"Practical Django Projects",
"Python Web Development with Django",
"Artificial Intelligence: A Modern Approach",
"Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp"
],
lambda b: b.name
)
books = Book.objects.annotate(mean_age=Avg("authors__age"))
b = books.get(pk=1)
self.assertEqual(
b.name,
'The Definitive Guide to Django: Web Development Done Right'
)
self.assertEqual(b.mean_age, 34.5)
def test_annotate_defer(self):
qs = Book.objects.annotate(
page_sum=Sum("pages")).defer('name').filter(pk=1)
rows = [
(1, "159059725", 447, "The Definitive Guide to Django: Web Development Done Right")
]
self.assertQuerysetEqual(
qs.order_by('pk'), rows,
lambda r: (r.id, r.isbn, r.page_sum, r.name)
)
def test_annotate_defer_select_related(self):
qs = Book.objects.select_related('contact').annotate(
page_sum=Sum("pages")).defer('name').filter(pk=1)
rows = [
(1, "159059725", 447, "Adrian Holovaty",
"The Definitive Guide to Django: Web Development Done Right")
]
self.assertQuerysetEqual(
qs.order_by('pk'), rows,
lambda r: (r.id, r.isbn, r.page_sum, r.contact.name, r.name)
)
def test_annotate_m2m(self):
books = Book.objects.filter(rating__lt=4.5).annotate(Avg("authors__age")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 51.5),
('Practical Django Projects', 29.0),
('Python Web Development with Django', Approximate(30.3, places=1)),
('Sams Teach Yourself Django in 24 Hours', 45.0)
],
lambda b: (b.name, b.authors__age__avg),
)
books = Book.objects.annotate(num_authors=Count("authors")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 2),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
('Practical Django Projects', 1),
('Python Web Development with Django', 3),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 2)
],
lambda b: (b.name, b.num_authors)
)
def test_backwards_m2m_annotate(self):
authors = Author.objects.filter(name__contains="a").annotate(Avg("book__rating")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 4.5),
('Brad Dayley', 3.0),
('Jacob Kaplan-Moss', 4.5),
('James Bennett', 4.0),
('Paul Bissex', 4.0),
('Stuart Russell', 4.0)
],
lambda a: (a.name, a.book__rating__avg)
)
authors = Author.objects.annotate(num_books=Count("book")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 1),
('Brad Dayley', 1),
('Jacob Kaplan-Moss', 1),
('James Bennett', 1),
('Jeffrey Forcier', 1),
('Paul Bissex', 1),
('Peter Norvig', 2),
('Stuart Russell', 1),
('Wesley J. Chun', 1)
],
lambda a: (a.name, a.num_books)
)
def test_reverse_fkey_annotate(self):
books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 7),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 9),
('Practical Django Projects', 3),
('Python Web Development with Django', 7),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 3)
],
lambda b: (b.name, b.publisher__num_awards__sum)
)
publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name")
self.assertQuerysetEqual(
publishers, [
('Apress', Decimal("59.69")),
("Jonno's House of Books", None),
('Morgan Kaufmann', Decimal("75.00")),
('Prentice Hall', Decimal("112.49")),
('Sams', Decimal("23.09"))
],
lambda p: (p.name, p.book__price__sum)
)
def test_annotate_values(self):
books = list(Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values())
self.assertEqual(
books, [
{
"contact_id": 1,
"id": 1,
"isbn": "159059725",
"mean_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": 1,
"rating": 4.5,
}
]
)
books = Book.objects.filter(pk=1).annotate(mean_age=Avg('authors__age')).values('pk', 'isbn', 'mean_age')
self.assertEqual(
list(books), [
{
"pk": 1,
"isbn": "159059725",
"mean_age": 34.5,
}
]
)
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values("name")
self.assertEqual(
list(books), [
{
"name": "The Definitive Guide to Django: Web Development Done Right"
}
]
)
books = Book.objects.filter(pk=1).values().annotate(mean_age=Avg('authors__age'))
self.assertEqual(
list(books), [
{
"contact_id": 1,
"id": 1,
"isbn": "159059725",
"mean_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": 1,
"rating": 4.5,
}
]
)
books = Book.objects.values("rating").annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age")).order_by("rating")
self.assertEqual(
list(books), [
{
"rating": 3.0,
"n_authors": 1,
"mean_age": 45.0,
},
{
"rating": 4.0,
"n_authors": 6,
"mean_age": Approximate(37.16, places=1)
},
{
"rating": 4.5,
"n_authors": 2,
"mean_age": 34.5,
},
{
"rating": 5.0,
"n_authors": 1,
"mean_age": 57.0,
}
]
)
authors = Author.objects.annotate(Avg("friends__age")).order_by("name")
self.assertEqual(len(authors), 9)
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 32.0),
('Brad Dayley', None),
('Jacob Kaplan-Moss', 29.5),
('James Bennett', 34.0),
('Jeffrey Forcier', 27.0),
('Paul Bissex', 31.0),
('Peter Norvig', 46.0),
('Stuart Russell', 57.0),
('Wesley J. Chun', Approximate(33.66, places=1))
],
lambda a: (a.name, a.friends__age__avg)
)
def test_count(self):
vals = Book.objects.aggregate(Count("rating"))
self.assertEqual(vals, {"rating__count": 6})
vals = Book.objects.aggregate(Count("rating", distinct=True))
self.assertEqual(vals, {"rating__count": 4})
def test_fkey_aggregate(self):
explicit = list(Author.objects.annotate(Count('book__id')))
implicit = list(Author.objects.annotate(Count('book')))
self.assertEqual(explicit, implicit)
def test_annotate_ordering(self):
books = Book.objects.values('rating').annotate(oldest=Max('authors__age')).order_by('oldest', 'rating')
self.assertEqual(
list(books), [
{
"rating": 4.5,
"oldest": 35,
},
{
"rating": 3.0,
"oldest": 45
},
{
"rating": 4.0,
"oldest": 57,
},
{
"rating": 5.0,
"oldest": 57,
}
]
)
books = Book.objects.values("rating").annotate(oldest=Max("authors__age")).order_by("-oldest", "-rating")
self.assertEqual(
list(books), [
{
"rating": 5.0,
"oldest": 57,
},
{
"rating": 4.0,
"oldest": 57,
},
{
"rating": 3.0,
"oldest": 45,
},
{
"rating": 4.5,
"oldest": 35,
}
]
)
def test_aggregate_annotation(self):
vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(Avg("num_authors"))
self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)})
def test_filtering(self):
p = Publisher.objects.create(name='Expensive Publisher', num_awards=0)
Book.objects.create(
name='ExpensiveBook1',
pages=1,
isbn='111',
rating=3.5,
price=Decimal("1000"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008, 12, 1)
)
Book.objects.create(
name='ExpensiveBook2',
pages=1,
isbn='222',
rating=4.0,
price=Decimal("1000"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008, 12, 2)
)
Book.objects.create(
name='ExpensiveBook3',
pages=1,
isbn='333',
rating=4.5,
price=Decimal("35"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008, 12, 3)
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Apress",
"Sams",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1, book__price__lt=Decimal("40.0")).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 3]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 2]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__in=[1, 3]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Sams",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__isnull=True)
self.assertEqual(len(publishers), 0)
def test_annotation(self):
vals = Author.objects.filter(pk=1).aggregate(Count("friends__id"))
self.assertEqual(vals, {"friends__id__count": 2})
books = Book.objects.annotate(num_authors=Count("authors__name")).filter(num_authors__exact=2).order_by("pk")
self.assertQuerysetEqual(
books, [
"The Definitive Guide to Django: Web Development Done Right",
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name
)
authors = Author.objects.annotate(num_friends=Count("friends__id", distinct=True)).filter(num_friends=0).order_by("pk")
self.assertQuerysetEqual(
authors, [
"Brad Dayley",
],
lambda a: a.name
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
],
lambda p: p.name
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1)
self.assertQuerysetEqual(
publishers, [
"Apress",
],
lambda p: p.name
)
books = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1)
self.assertQuerysetEqual(
books, [
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name
)
def test_more_aggregation(self):
a = Author.objects.get(name__contains='Norvig')
b = Book.objects.get(name__contains='Done Right')
b.authors.add(a)
b.save()
vals = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1).aggregate(Avg("rating"))
self.assertEqual(vals, {"rating__avg": 4.25})
def test_even_more_aggregate(self):
publishers = Publisher.objects.annotate(earliest_book=Min("book__pubdate")).exclude(earliest_book=None).order_by("earliest_book").values()
self.assertEqual(
list(publishers), [
{
'earliest_book': datetime.date(1991, 10, 15),
'num_awards': 9,
'id': 4,
'name': 'Morgan Kaufmann'
},
{
'earliest_book': datetime.date(1995, 1, 15),
'num_awards': 7,
'id': 3,
'name': 'Prentice Hall'
},
{
'earliest_book': datetime.date(2007, 12, 6),
'num_awards': 3,
'id': 1,
'name': 'Apress'
},
{
'earliest_book': datetime.date(2008, 3, 3),
'num_awards': 1,
'id': 2,
'name': 'Sams'
}
]
)
vals = Store.objects.aggregate(Max("friday_night_closing"), Min("original_opening"))
self.assertEqual(
vals,
{
"friday_night_closing__max": datetime.time(23, 59, 59),
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
}
)
def test_annotate_values_list(self):
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("pk", "isbn", "mean_age")
self.assertEqual(
list(books), [
(1, "159059725", 34.5),
]
)
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("isbn")
self.assertEqual(
list(books), [
('159059725',)
]
)
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("mean_age")
self.assertEqual(
list(books), [
(34.5,)
]
)
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("mean_age", flat=True)
self.assertEqual(list(books), [34.5])
books = Book.objects.values_list("price").annotate(count=Count("price")).order_by("-count", "price")
self.assertEqual(
list(books), [
(Decimal("29.69"), 2),
(Decimal('23.09'), 1),
(Decimal('30'), 1),
(Decimal('75'), 1),
(Decimal('82.8'), 1),
]
)
def test_dates_with_aggregation(self):
"""
Test that .dates() returns a distinct set of dates when applied to a
QuerySet with aggregation.
Refs #18056. Previously, .dates() would return distinct (date_kind,
aggregation) sets, in this case (year, num_authors), so 2008 would be
returned twice because there are books from 2008 with a different
number of authors.
"""
dates = Book.objects.annotate(num_authors=Count("authors")).dates('pubdate', 'year')
self.assertQuerysetEqual(
dates, [
"datetime.date(1991, 1, 1)",
"datetime.date(1995, 1, 1)",
"datetime.date(2007, 1, 1)",
"datetime.date(2008, 1, 1)"
]
)
def test_values_aggregation(self):
# Refs #20782
max_rating = Book.objects.values('rating').aggregate(max_rating=Max('rating'))
self.assertEqual(max_rating['max_rating'], 5)
max_books_per_rating = Book.objects.values('rating').annotate(
books_per_rating=Count('id')
).aggregate(Max('books_per_rating'))
self.assertEqual(
max_books_per_rating,
{'books_per_rating__max': 3})
def test_ticket17424(self):
"""
Check that doing exclude() on a foreign model after annotate()
doesn't crash.
"""
all_books = list(Book.objects.values_list('pk', flat=True).order_by('pk'))
annotated_books = Book.objects.order_by('pk').annotate(one=Count("id"))
# The value doesn't matter, we just need any negative
# constraint on a related model that's a noop.
excluded_books = annotated_books.exclude(publisher__name="__UNLIKELY_VALUE__")
# Try to generate query tree
str(excluded_books.query)
self.assertQuerysetEqual(excluded_books, all_books, lambda x: x.pk)
# Check internal state
self.assertIsNone(annotated_books.query.alias_map["aggregation_book"].join_type)
self.assertIsNone(excluded_books.query.alias_map["aggregation_book"].join_type)
def test_ticket12886(self):
"""
Check that aggregation over sliced queryset works correctly.
"""
qs = Book.objects.all().order_by('-rating')[0:3]
vals = qs.aggregate(average_top3_rating=Avg('rating'))['average_top3_rating']
self.assertAlmostEqual(vals, 4.5, places=2)
def test_ticket11881(self):
"""
Check that subqueries do not needlessly contain ORDER BY, SELECT FOR UPDATE
or select_related() stuff.
"""
qs = Book.objects.all().select_for_update().order_by(
'pk').select_related('publisher').annotate(max_pk=Max('pk'))
with CaptureQueriesContext(connection) as captured_queries:
qs.aggregate(avg_pk=Avg('max_pk'))
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]['sql'].lower()
self.assertNotIn('for update', qstr)
forced_ordering = connection.ops.force_no_ordering()
if forced_ordering:
# If the backend needs to force an ordering we make sure it's
# the only "ORDER BY" clause present in the query.
self.assertEqual(
re.findall(r'order by (\w+)', qstr),
[', '.join(forced_ordering).lower()]
)
else:
self.assertNotIn('order by', qstr)
self.assertEqual(qstr.count(' join '), 0)
|
quintel/etmoses | refs/heads/master | scripts/temperature/temperature_profile.py | 1 | import numpy as np
from numpy import genfromtxt
import os
import pylab as plt
import glob
# Function definitions
def degree_heating(threshold_temperature, temp_array):
return [1 if x < threshold_temperature else 0 for x in temp_array]
# Importing the data
files = open(os.getcwd() + "../input_data/temperatures_de Bilt_2013_fifteen_minutes.csv")
# Main loop
for temperature_file in files:
print temperature_file
print temperature_file.split("/")[-1]
# Data for temperature (hourly)
data = genfromtxt(temperature_file, delimiter=",")
# Temperature data is given in tenths of degrees, converting to degrees
temperature_2012 = data / 10
# Creating degree hours (as the data is hourly)
hdh_2012 = np.array(degree_heating(18, temperature_2012))
cdh_2012 = np.array(degree_cooling(18, temperature_2012))
# Turning degree hours into degree days by summing degree hours per 24 and dividing by 24 (taking the average)
hdd_2012 = np.sum(hdh_2012.reshape(365, 24), axis = 1) / 24
cdd_2012 = np.sum(cdh_2012.reshape(365, 24), axis = 1) / 24
# As a check, also use the average daily temperature to calculate HDD and CDD
daily_averaged_temperature_2012 = np.mean(temperature_2012.reshape(365, 24), axis = 1)
ahdd_2012 = np.array(degree_heating(18, daily_averaged_temperature_2012))
acdd_2012 = np.array(degree_cooling(18, daily_averaged_temperature_2012))
plt.savetxt(os.getcwd() + "/degree_days_output/heating_degree_days_" + temperature_file.split('/')[-1], hdd_2012, fmt='%.3f')
plt.savetxt(os.getcwd() + "/degree_days_output/cooling_degree_days_" + temperature_file.split('/')[-1], cdd_2012, fmt='%.3f')
min_x = 0
max_x = 300
plt.figure(figsize=[10,7])
plt.plot(daily_averaged_temperature_2012[min_x: max_x], 'y', label='temp 2012')
plt.plot(([20]*365)[min_x: max_x],'r--')
plt.plot(([25]*365)[min_x: max_x],'k--')
plt.plot(hdd_2012[min_x: max_x], label='HDD')
plt.plot(cdd_2012[min_x: max_x], label='CDD')
#plt.plot(ahdd_2012[min_x: max_x], label='averaged HDD')
#plt.plot(acdd_2012[min_x: max_x], label='averaged CDD')
plt.xlabel('time (days)')
plt.ylabel('degree days (deg)')
plt.title('cooling- and heating degree days')
plt.legend()
plt.show()
plt.close()
# hdh = []
# cdh = []
# total = []
# for i in range(20):
# hdh.append( np.sum(degree_heating_hours(0 + i, temperature_2012)) )
# cdh.append( np.sum(degree_cooling_hours(0 + i, temperature_2012)) )
# total.append(hdh[-1]+cdh[-1])
# plt.figure(figsize=[10,7])
# plt.plot(hdh, label='HDH')
# plt.plot(cdh, label='CDH')
# plt.plot(total, label ='total')
# plt.legend()
# plt.show() |
paterson/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/wptserve/wptserve/constants.py | 326 | import utils
content_types = utils.invert_dict({"text/html": ["htm", "html"],
"application/json": ["json"],
"application/xhtml+xml": ["xht", "xhtm", "xhtml"],
"application/xml": ["xml"],
"application/x-xpinstall": ["xpi"],
"text/javascript": ["js"],
"text/css": ["css"],
"text/plain": ["txt", "md"],
"image/svg+xml": ["svg"],
"image/gif": ["gif"],
"image/jpeg": ["jpg", "jpeg"],
"image/png": ["png"],
"image/bmp": ["bmp"],
"text/event-stream": ["event_stream"],
"text/cache-manifest": ["manifest"],
"video/mp4": ["mp4", "m4v"],
"audio/mp4": ["m4a"],
"audio/mpeg": ["mp3"],
"video/webm": ["webm"],
"audio/webm": ["weba"],
"video/ogg": ["ogg", "ogv"],
"audio/ogg": ["oga"],
"audio/x-wav": ["wav"],
"text/vtt": ["vtt"],})
response_codes = {
100: ('Continue', 'Request received, please continue'),
101: ('Switching Protocols',
'Switching to new protocol; obey Upgrade header'),
200: ('OK', 'Request fulfilled, document follows'),
201: ('Created', 'Document created, URL follows'),
202: ('Accepted',
'Request accepted, processing continues off-line'),
203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
204: ('No Content', 'Request fulfilled, nothing follows'),
205: ('Reset Content', 'Clear input form for further input.'),
206: ('Partial Content', 'Partial content follows.'),
300: ('Multiple Choices',
'Object has several resources -- see URI list'),
301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
302: ('Found', 'Object moved temporarily -- see URI list'),
303: ('See Other', 'Object moved -- see Method and URL list'),
304: ('Not Modified',
'Document has not changed since given time'),
305: ('Use Proxy',
'You must use proxy specified in Location to access this '
'resource.'),
307: ('Temporary Redirect',
'Object moved temporarily -- see URI list'),
400: ('Bad Request',
'Bad request syntax or unsupported method'),
401: ('Unauthorized',
'No permission -- see authorization schemes'),
402: ('Payment Required',
'No payment -- see charging schemes'),
403: ('Forbidden',
'Request forbidden -- authorization will not help'),
404: ('Not Found', 'Nothing matches the given URI'),
405: ('Method Not Allowed',
'Specified method is invalid for this resource.'),
406: ('Not Acceptable', 'URI not available in preferred format.'),
407: ('Proxy Authentication Required', 'You must authenticate with '
'this proxy before proceeding.'),
408: ('Request Timeout', 'Request timed out; try again later.'),
409: ('Conflict', 'Request conflict.'),
410: ('Gone',
'URI no longer exists and has been permanently removed.'),
411: ('Length Required', 'Client must specify Content-Length.'),
412: ('Precondition Failed', 'Precondition in headers is false.'),
413: ('Request Entity Too Large', 'Entity is too large.'),
414: ('Request-URI Too Long', 'URI is too long.'),
415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
416: ('Requested Range Not Satisfiable',
'Cannot satisfy request range.'),
417: ('Expectation Failed',
'Expect condition could not be satisfied.'),
500: ('Internal Server Error', 'Server got itself in trouble'),
501: ('Not Implemented',
'Server does not support this operation'),
502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
503: ('Service Unavailable',
'The server cannot process the request due to a high load'),
504: ('Gateway Timeout',
'The gateway server did not receive a timely response'),
505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
}
|
jmmease/pandas | refs/heads/master | pandas/compat/numpy/__init__.py | 6 | """ support numpy compatiblitiy across versions """
import re
import numpy as np
from distutils.version import LooseVersion
from pandas.compat import string_types, string_and_binary_types
# numpy versioning
_np_version = np.__version__
_nlv = LooseVersion(_np_version)
_np_version_under1p10 = _nlv < '1.10'
_np_version_under1p11 = _nlv < '1.11'
_np_version_under1p12 = _nlv < '1.12'
_np_version_under1p13 = _nlv < '1.13'
_np_version_under1p14 = _nlv < '1.14'
_np_version_under1p15 = _nlv < '1.15'
if _nlv < '1.9':
raise ImportError('this version of pandas is incompatible with '
'numpy < 1.9.0\n'
'your numpy version is {0}.\n'
'Please upgrade numpy to >= 1.9.0 to use '
'this pandas version'.format(_np_version))
_tz_regex = re.compile('[+-]0000$')
def tz_replacer(s):
if isinstance(s, string_types):
if s.endswith('Z'):
s = s[:-1]
elif _tz_regex.search(s):
s = s[:-5]
return s
def np_datetime64_compat(s, *args, **kwargs):
"""
provide compat for construction of strings to numpy datetime64's with
tz-changes in 1.11 that make '2015-01-01 09:00:00Z' show a deprecation
warning, when need to pass '2015-01-01 09:00:00'
"""
if not _np_version_under1p11:
s = tz_replacer(s)
return np.datetime64(s, *args, **kwargs)
def np_array_datetime64_compat(arr, *args, **kwargs):
"""
provide compat for construction of an array of strings to a
np.array(..., dtype=np.datetime64(..))
tz-changes in 1.11 that make '2015-01-01 09:00:00Z' show a deprecation
warning, when need to pass '2015-01-01 09:00:00'
"""
if not _np_version_under1p11:
# is_list_like
if hasattr(arr, '__iter__') and not \
isinstance(arr, string_and_binary_types):
arr = [tz_replacer(s) for s in arr]
else:
arr = tz_replacer(arr)
return np.array(arr, *args, **kwargs)
__all__ = ['np',
'_np_version_under1p10',
'_np_version_under1p11',
'_np_version_under1p12',
'_np_version_under1p13',
'_np_version_under1p14',
'_np_version_under1p15'
]
|
ghostlines/ghostlines-robofont | refs/heads/master | src/lib/site-packages/requests/models.py | 59 | # -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import collections
import datetime
import sys
# Import encoding now, to avoid implicit import later.
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
# such as in Embedded Python. See https://github.com/kennethreitz/requests/issues/3578.
import encodings.idna
from io import BytesIO, UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
from .packages.urllib3.exceptions import (
DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError)
from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, check_header_validity)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, builtin_str, basestring)
from .compat import json as complexjson
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
)
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
class RequestEncodingMixin(object):
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.url)
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
return data
elif hasattr(data, '__iter__'):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return urlencode(result, doseq=True)
else:
return data
@staticmethod
def _encode_files(files, data):
"""Build the body for a multipart/form-data request.
Will successfully encode files when passed as a dict or a list of
tuples. Order is retained if data is a list of tuples but arbitrary
if parameters are supplied as a dict.
The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
or 4-tuples (filename, fileobj, contentype, custom_headers).
"""
if (not files):
raise ValueError("Files must be provided.")
elif isinstance(data, basestring):
raise ValueError("Data must not be a string.")
new_fields = []
fields = to_key_val_list(data or {})
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
val = [val]
for v in val:
if v is not None:
# Don't call str() on bytestrings: in Py3 it all goes wrong.
if not isinstance(v, bytes):
v = str(v)
new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field,
v.encode('utf-8') if isinstance(v, str) else v))
for (k, v) in files:
# support for explicit filename
ft = None
fh = None
if isinstance(v, (tuple, list)):
if len(v) == 2:
fn, fp = v
elif len(v) == 3:
fn, fp, ft = v
else:
fn, fp, ft, fh = v
else:
fn = guess_filename(v) or k
fp = v
if isinstance(fp, (str, bytes, bytearray)):
fdata = fp
else:
fdata = fp.read()
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
body, content_type = encode_multipart_formdata(new_fields)
return body, content_type
class RequestHooksMixin(object):
def register_hook(self, event, hook):
"""Properly register a hook."""
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
if isinstance(hook, collections.Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
Returns True if the hook existed, False if not.
"""
try:
self.hooks[event].remove(hook)
return True
except ValueError:
return False
class Request(RequestHooksMixin):
"""A user-created :class:`Request <Request>` object.
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
:param method: HTTP method to use.
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
:param json: json for the body to attach to the request (if files or data is not specified).
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
def __init__(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
self.method = method
self.url = url
self.headers = headers
self.files = files
self.data = data
self.json = json
self.params = params
self.auth = auth
self.cookies = cookies
def __repr__(self):
return '<Request [%s]>' % (self.method)
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
p = PreparedRequest()
p.prepare(
method=self.method,
url=self.url,
headers=self.headers,
files=self.files,
data=self.data,
json=self.json,
params=self.params,
auth=self.auth,
cookies=self.cookies,
hooks=self.hooks,
)
return p
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> r = req.prepare()
<PreparedRequest [GET]>
>>> s = requests.Session()
>>> s.send(r)
<Response [200]>
"""
def __init__(self):
#: HTTP verb to send to the server.
self.method = None
#: HTTP URL to send the request to.
self.url = None
#: dictionary of HTTP headers.
self.headers = None
# The `CookieJar` used to create the Cookie header will be stored here
# after prepare_cookies is called
self._cookies = None
#: request body to send to the server.
self.body = None
#: dictionary of callback hooks, for internal usage.
self.hooks = default_hooks()
#: integer denoting starting position of a readable file-like body.
self._body_position = None
def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
self.prepare_url(url, params)
self.prepare_headers(headers)
self.prepare_cookies(cookies)
self.prepare_body(data, files, json)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
# This MUST go after prepare_auth. Authenticators could add a hook
self.prepare_hooks(hooks)
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
def copy(self):
p = PreparedRequest()
p.method = self.method
p.url = self.url
p.headers = self.headers.copy() if self.headers is not None else None
p._cookies = _copy_cookie_jar(self._cookies)
p.body = self.body
p.hooks = self.hooks
p._body_position = self._body_position
return p
def prepare_method(self, method):
"""Prepares the given HTTP method."""
self.method = method
if self.method is not None:
self.method = to_native_string(self.method.upper())
@staticmethod
def _get_idna_encoded_host(host):
try:
from .packages import idna
except ImportError:
# tolerate the possibility of downstream repackagers unvendoring `requests`
# For more information, read: packages/__init__.py
import idna
sys.modules['requests.packages.idna'] = idna
try:
host = idna.encode(host, uts46=True).decode('utf-8')
except idna.IDNAError:
raise UnicodeError
return host
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
#: We're unable to blindly call unicode/str functions
#: as this will include the bytestring indicator (b'')
#: on python 3.x.
#: https://github.com/kennethreitz/requests/pull/2238
if isinstance(url, bytes):
url = url.decode('utf8')
else:
url = unicode(url) if is_py2 else str(url)
# Remove leading whitespaces from url
url = url.lstrip()
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
# `data` etc to work around exceptions from `url_parse`, which
# handles RFC 3986 only.
if ':' in url and not url.lower().startswith('http'):
self.url = url
return
# Support for unicode domain names and paths.
try:
scheme, auth, host, port, path, query, fragment = parse_url(url)
except LocationParseError as e:
raise InvalidURL(*e.args)
if not scheme:
error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
error = error.format(to_native_string(url, 'utf8'))
raise MissingSchema(error)
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# In general, we want to try IDNA encoding the hostname if the string contains
# non-ASCII characters. This allows users to automatically get the correct IDNA
# behaviour. For strings containing only ASCII characters, we need to also verify
# it doesn't start with a wildcard (*), before allowing the unencoded hostname.
if not unicode_is_ascii(host):
try:
host = self._get_idna_encoded_host(host)
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
elif host.startswith(u'*'):
raise InvalidURL('URL has an invalid label.')
# Carefully reconstruct the network location
netloc = auth or ''
if netloc:
netloc += '@'
netloc += host
if port:
netloc += ':' + str(port)
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
if isinstance(params, (str, bytes)):
params = to_native_string(params)
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
self.headers = CaseInsensitiveDict()
if headers:
for header in headers.items():
# Raise exception on invalid header value.
check_header_validity(header)
name, value = header
self.headers[to_native_string(name)] = value
def prepare_body(self, data, files, json=None):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
# If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
if not data and json is not None:
# urllib3 requires a bytes-like body. Python 2's json.dumps
# provides this natively, but Python 3 gives a Unicode string.
content_type = 'application/json'
body = complexjson.dumps(json)
if not isinstance(body, bytes):
body = body.encode('utf-8')
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, collections.Mapping))
])
try:
length = super_len(data)
except (TypeError, AttributeError, UnsupportedOperation):
length = None
if is_stream:
body = data
if getattr(body, 'tell', None) is not None:
# Record the current file position before reading.
# This will allow us to rewind a file in the event
# of a redirect.
try:
self._body_position = body.tell()
except (IOError, OSError):
# This differentiates from None, allowing us to catch
# a failed `tell()` later when trying to rewind the body
self._body_position = object()
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length:
self.headers['Content-Length'] = builtin_str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
else:
# Multi-part file uploads.
if files:
(body, content_type) = self._encode_files(files, data)
else:
if data:
body = self._encode_params(data)
if isinstance(data, basestring) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if content_type and ('content-type' not in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
def prepare_content_length(self, body):
"""Prepare Content-Length header based on request method and body"""
if body is not None:
length = super_len(body)
if length:
# If length exists, set it. Otherwise, we fallback
# to Transfer-Encoding: chunked.
self.headers['Content-Length'] = builtin_str(length)
elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None:
# Set Content-Length to 0 for methods that can have a body
# but don't provide one. (i.e. not GET or HEAD)
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth, url=''):
"""Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first.
if auth is None:
url_auth = get_auth_from_url(self.url)
auth = url_auth if any(url_auth) else None
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = HTTPBasicAuth(*auth)
# Allow auth to make its changes.
r = auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
# Recompute Content-Length
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data.
This function eventually generates a ``Cookie`` header from the
given cookies using cookielib. Due to cookielib's design, the header
will not be regenerated if it already exists, meaning this function
can only be called once for the life of the
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
header is removed beforehand.
"""
if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies
else:
self._cookies = cookiejar_from_dict(cookies)
cookie_header = get_cookie_header(self._cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
# hooks can be passed as None to the prepare method and to this
# method. To prevent iterating over None, simply use an empty list
# if hooks is False-y
hooks = hooks or []
for event in hooks:
self.register_hook(event, hooks[event])
class Response(object):
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
__attrs__ = [
'_content', 'status_code', 'headers', 'url', 'history',
'encoding', 'reason', 'cookies', 'elapsed', 'request'
]
def __init__(self):
super(Response, self).__init__()
self._content = False
self._content_consumed = False
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
# This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
self.url = None
#: Encoding to decode with when accessing r.text.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here. The list is sorted from the oldest to the most recent request.
self.history = []
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
self.reason = None
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta).
#: This property specifically measures the time taken between sending
#: the first byte of the request and finishing parsing the headers. It
#: is therefore unaffected by consuming the response content or the
#: value of the ``stream`` keyword argument.
self.elapsed = datetime.timedelta(0)
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
#: is a response.
self.request = None
def __getstate__(self):
# Consume everything; accessing the content attribute makes
# sure the content has been fully read.
if not self._content_consumed:
self.content
return dict(
(attr, getattr(self, attr, None))
for attr in self.__attrs__
)
def __setstate__(self, state):
for name, value in state.items():
setattr(self, name, value)
# pickled objects do not have .raw
setattr(self, '_content_consumed', True)
setattr(self, 'raw', None)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __iter__(self):
"""Allows you to use a response as an iterator."""
return self.iter_content(128)
@property
def ok(self):
try:
self.raise_for_status()
except HTTPError:
return False
return True
@property
def is_redirect(self):
"""True if this Response is a well-formed HTTP redirect that could have
been processed automatically (by :meth:`Session.resolve_redirects`).
"""
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
@property
def is_permanent_redirect(self):
"""True if this Response one of the permanent versions of redirect"""
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library"""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
chunk_size must be of type int or None. A value of None will
function differently depending on the value of `stream`.
stream=True will read data as it arrives in whatever size the
chunks are received. If stream=False, data is returned as
a single chunk.
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
def generate():
# Special case for urllib3.
if hasattr(self.raw, 'stream'):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
else:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
if self._content_consumed and isinstance(self._content, bool):
raise StreamConsumedError()
elif chunk_size is not None and not isinstance(chunk_size, int):
raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size))
# simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size)
stream_chunks = generate()
chunks = reused_chunks if self._content_consumed else stream_chunks
if decode_unicode:
chunks = stream_decode_response_unicode(chunks, self)
return chunks
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
.. note:: This method is not reentrant safe.
"""
pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
if delimiter:
lines = chunk.split(delimiter)
else:
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is False:
# Read the contents.
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code == 0 or self.raw is None:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
# since we exhausted the data.
return self._content
@property
def text(self):
"""Content of the response, in unicode.
If Response.encoding is None, encoding will be guessed using
``chardet``.
The encoding of the response content is determined based solely on HTTP
headers, following RFC 2616 to the letter. If you can take advantage of
non-HTTP knowledge to make a better guess at the encoding, you should
set ``r.encoding`` appropriately before accessing this property.
"""
# Try charset from content-type
content = None
encoding = self.encoding
if not self.content:
return str('')
# Fallback to auto-detected encoding.
if self.encoding is None:
encoding = self.apparent_encoding
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
return content
def json(self, **kwargs):
"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
:raises ValueError: If the response body does not contain valid json.
"""
if not self.encoding and self.content and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return complexjson.loads(
self.content.decode(encoding), **kwargs
)
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
return complexjson.loads(self.text, **kwargs)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers.get('link')
# l = MultiDict()
l = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
return l
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if isinstance(self.reason, bytes):
# We attempt to decode utf-8 first because some servers
# choose to localize their reason strings. If the string
# isn't utf-8, we fall back to iso-8859-1 for all other
# encodings. (See PR #3538)
try:
reason = self.reason.decode('utf-8')
except UnicodeDecodeError:
reason = self.reason.decode('iso-8859-1')
else:
reason = self.reason
if 400 <= self.status_code < 500:
http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)
elif 500 <= self.status_code < 600:
http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
def close(self):
"""Releases the connection back to the pool. Once this method has been
called the underlying ``raw`` object must not be accessed again.
*Note: Should not normally need to be called explicitly.*
"""
if not self._content_consumed:
self.raw.close()
release_conn = getattr(self.raw, 'release_conn', None)
if release_conn is not None:
release_conn()
|
google/google-ctf | refs/heads/master | third_party/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/test/test_funcattrs.py | 13 | from test import test_support
import types
import unittest
class FuncAttrsTest(unittest.TestCase):
def setUp(self):
class F:
def a(self):
pass
def b():
return 3
self.f = F
self.fi = F()
self.b = b
def cannot_set_attr(self, obj, name, value, exceptions):
# Helper method for other tests.
try:
setattr(obj, name, value)
except exceptions:
pass
else:
self.fail("shouldn't be able to set %s to %r" % (name, value))
try:
delattr(obj, name)
except exceptions:
pass
else:
self.fail("shouldn't be able to del %s" % name)
class FunctionPropertiesTest(FuncAttrsTest):
# Include the external setUp method that is common to all tests
def test_module(self):
self.assertEqual(self.b.__module__, __name__)
def test_dir_includes_correct_attrs(self):
self.b.known_attr = 7
self.assertIn('known_attr', dir(self.b),
"set attributes not in dir listing of method")
# Test on underlying function object of method
self.f.a.im_func.known_attr = 7
self.assertIn('known_attr', dir(self.f.a),
"set attribute on unbound method implementation in "
"class not in dir")
self.assertIn('known_attr', dir(self.fi.a),
"set attribute on unbound method implementations, "
"should show up in next dir")
def test_duplicate_function_equality(self):
# Body of `duplicate' is the exact same as self.b
def duplicate():
'my docstring'
return 3
self.assertNotEqual(self.b, duplicate)
def test_copying_func_code(self):
def test(): pass
self.assertEqual(test(), None)
test.func_code = self.b.func_code
self.assertEqual(test(), 3) # self.b always returns 3, arbitrarily
def test_func_globals(self):
self.assertIs(self.b.func_globals, globals())
self.cannot_set_attr(self.b, 'func_globals', 2, TypeError)
def test_func_closure(self):
a = 12
def f(): print a
c = f.func_closure
self.assertIsInstance(c, tuple)
self.assertEqual(len(c), 1)
# don't have a type object handy
self.assertEqual(c[0].__class__.__name__, "cell")
self.cannot_set_attr(f, "func_closure", c, TypeError)
def test_empty_cell(self):
def f(): print a
try:
f.func_closure[0].cell_contents
except ValueError:
pass
else:
self.fail("shouldn't be able to read an empty cell")
a = 12
def test_func_name(self):
self.assertEqual(self.b.__name__, 'b')
self.assertEqual(self.b.func_name, 'b')
self.b.__name__ = 'c'
self.assertEqual(self.b.__name__, 'c')
self.assertEqual(self.b.func_name, 'c')
self.b.func_name = 'd'
self.assertEqual(self.b.__name__, 'd')
self.assertEqual(self.b.func_name, 'd')
# __name__ and func_name must be a string
self.cannot_set_attr(self.b, '__name__', 7, TypeError)
self.cannot_set_attr(self.b, 'func_name', 7, TypeError)
# __name__ must be available when in restricted mode. Exec will raise
# AttributeError if __name__ is not available on f.
s = """def f(): pass\nf.__name__"""
exec s in {'__builtins__': {}}
# Test on methods, too
self.assertEqual(self.f.a.__name__, 'a')
self.assertEqual(self.fi.a.__name__, 'a')
self.cannot_set_attr(self.f.a, "__name__", 'a', AttributeError)
self.cannot_set_attr(self.fi.a, "__name__", 'a', AttributeError)
def test_func_code(self):
num_one, num_two = 7, 8
def a(): pass
def b(): return 12
def c(): return num_one
def d(): return num_two
def e(): return num_one, num_two
for func in [a, b, c, d, e]:
self.assertEqual(type(func.func_code), types.CodeType)
self.assertEqual(c(), 7)
self.assertEqual(d(), 8)
d.func_code = c.func_code
self.assertEqual(c.func_code, d.func_code)
self.assertEqual(c(), 7)
# self.assertEqual(d(), 7)
try:
b.func_code = c.func_code
except ValueError:
pass
else:
self.fail("func_code with different numbers of free vars should "
"not be possible")
try:
e.func_code = d.func_code
except ValueError:
pass
else:
self.fail("func_code with different numbers of free vars should "
"not be possible")
def test_blank_func_defaults(self):
self.assertEqual(self.b.func_defaults, None)
del self.b.func_defaults
self.assertEqual(self.b.func_defaults, None)
def test_func_default_args(self):
def first_func(a, b):
return a+b
def second_func(a=1, b=2):
return a+b
self.assertEqual(first_func.func_defaults, None)
self.assertEqual(second_func.func_defaults, (1, 2))
first_func.func_defaults = (1, 2)
self.assertEqual(first_func.func_defaults, (1, 2))
self.assertEqual(first_func(), 3)
self.assertEqual(first_func(3), 5)
self.assertEqual(first_func(3, 5), 8)
del second_func.func_defaults
self.assertEqual(second_func.func_defaults, None)
try:
second_func()
except TypeError:
pass
else:
self.fail("func_defaults does not update; deleting it does not "
"remove requirement")
class InstancemethodAttrTest(FuncAttrsTest):
def test_im_class(self):
self.assertEqual(self.f.a.im_class, self.f)
self.assertEqual(self.fi.a.im_class, self.f)
self.cannot_set_attr(self.f.a, "im_class", self.f, TypeError)
self.cannot_set_attr(self.fi.a, "im_class", self.f, TypeError)
def test_im_func(self):
self.f.b = self.b
self.assertEqual(self.f.b.im_func, self.b)
self.assertEqual(self.fi.b.im_func, self.b)
self.cannot_set_attr(self.f.b, "im_func", self.b, TypeError)
self.cannot_set_attr(self.fi.b, "im_func", self.b, TypeError)
def test_im_self(self):
self.assertEqual(self.f.a.im_self, None)
self.assertEqual(self.fi.a.im_self, self.fi)
self.cannot_set_attr(self.f.a, "im_self", None, TypeError)
self.cannot_set_attr(self.fi.a, "im_self", self.fi, TypeError)
def test_im_func_non_method(self):
# Behavior should be the same when a method is added via an attr
# assignment
self.f.id = types.MethodType(id, None, self.f)
self.assertEqual(self.fi.id(), id(self.fi))
self.assertNotEqual(self.fi.id(), id(self.f))
# Test usage
try:
self.f.id.unknown_attr
except AttributeError:
pass
else:
self.fail("using unknown attributes should raise AttributeError")
# Test assignment and deletion
self.cannot_set_attr(self.f.id, 'unknown_attr', 2, AttributeError)
self.cannot_set_attr(self.fi.id, 'unknown_attr', 2, AttributeError)
def test_implicit_method_properties(self):
self.f.a.im_func.known_attr = 7
self.assertEqual(self.f.a.known_attr, 7)
self.assertEqual(self.fi.a.known_attr, 7)
class ArbitraryFunctionAttrTest(FuncAttrsTest):
def test_set_attr(self):
# setting attributes only works on function objects
self.b.known_attr = 7
self.assertEqual(self.b.known_attr, 7)
for func in [self.f.a, self.fi.a]:
try:
func.known_attr = 7
except AttributeError:
pass
else:
self.fail("setting attributes on methods should raise error")
def test_delete_unknown_attr(self):
try:
del self.b.unknown_attr
except AttributeError:
pass
else:
self.fail("deleting unknown attribute should raise TypeError")
def test_setting_attrs_duplicates(self):
try:
self.f.a.klass = self.f
except AttributeError:
pass
else:
self.fail("setting arbitrary attribute in unbound function "
" should raise AttributeError")
self.f.a.im_func.klass = self.f
for method in [self.f.a, self.fi.a, self.fi.a.im_func]:
self.assertEqual(method.klass, self.f)
def test_unset_attr(self):
for func in [self.b, self.f.a, self.fi.a]:
try:
func.non_existent_attr
except AttributeError:
pass
else:
self.fail("using unknown attributes should raise "
"AttributeError")
class FunctionDictsTest(FuncAttrsTest):
def test_setting_dict_to_invalid(self):
self.cannot_set_attr(self.b, '__dict__', None, TypeError)
self.cannot_set_attr(self.b, 'func_dict', None, TypeError)
from UserDict import UserDict
d = UserDict({'known_attr': 7})
self.cannot_set_attr(self.f.a.im_func, '__dict__', d, TypeError)
self.cannot_set_attr(self.fi.a.im_func, '__dict__', d, TypeError)
def test_setting_dict_to_valid(self):
d = {'known_attr': 7}
self.b.__dict__ = d
# Setting dict is only possible on the underlying function objects
self.f.a.im_func.__dict__ = d
# Test assignment
self.assertIs(d, self.b.__dict__)
self.assertIs(d, self.b.func_dict)
# ... and on all the different ways of referencing the method's func
self.assertIs(d, self.f.a.im_func.__dict__)
self.assertIs(d, self.f.a.__dict__)
self.assertIs(d, self.fi.a.im_func.__dict__)
self.assertIs(d, self.fi.a.__dict__)
# Test value
self.assertEqual(self.b.known_attr, 7)
self.assertEqual(self.b.__dict__['known_attr'], 7)
self.assertEqual(self.b.func_dict['known_attr'], 7)
# ... and again, on all the different method's names
self.assertEqual(self.f.a.im_func.known_attr, 7)
self.assertEqual(self.f.a.known_attr, 7)
self.assertEqual(self.fi.a.im_func.known_attr, 7)
self.assertEqual(self.fi.a.known_attr, 7)
def test_delete_func_dict(self):
try:
del self.b.__dict__
except TypeError:
pass
else:
self.fail("deleting function dictionary should raise TypeError")
try:
del self.b.func_dict
except TypeError:
pass
else:
self.fail("deleting function dictionary should raise TypeError")
def test_unassigned_dict(self):
self.assertEqual(self.b.__dict__, {})
def test_func_as_dict_key(self):
value = "Some string"
d = {}
d[self.b] = value
self.assertEqual(d[self.b], value)
class FunctionDocstringTest(FuncAttrsTest):
def test_set_docstring_attr(self):
self.assertEqual(self.b.__doc__, None)
self.assertEqual(self.b.func_doc, None)
docstr = "A test method that does nothing"
self.b.__doc__ = self.f.a.im_func.__doc__ = docstr
self.assertEqual(self.b.__doc__, docstr)
self.assertEqual(self.b.func_doc, docstr)
self.assertEqual(self.f.a.__doc__, docstr)
self.assertEqual(self.fi.a.__doc__, docstr)
self.cannot_set_attr(self.f.a, "__doc__", docstr, AttributeError)
self.cannot_set_attr(self.fi.a, "__doc__", docstr, AttributeError)
def test_delete_docstring(self):
self.b.__doc__ = "The docstring"
del self.b.__doc__
self.assertEqual(self.b.__doc__, None)
self.assertEqual(self.b.func_doc, None)
self.b.func_doc = "The docstring"
del self.b.func_doc
self.assertEqual(self.b.__doc__, None)
self.assertEqual(self.b.func_doc, None)
class StaticMethodAttrsTest(unittest.TestCase):
def test_func_attribute(self):
def f():
pass
c = classmethod(f)
self.assertTrue(c.__func__ is f)
s = staticmethod(f)
self.assertTrue(s.__func__ is f)
def test_main():
test_support.run_unittest(FunctionPropertiesTest, InstancemethodAttrTest,
ArbitraryFunctionAttrTest, FunctionDictsTest,
FunctionDocstringTest,
StaticMethodAttrsTest)
if __name__ == "__main__":
test_main()
|
kookie424/googletest | refs/heads/master | test/gtest_catch_exceptions_test.py | 2139 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests Google Test's exception catching behavior.
This script invokes gtest_catch_exceptions_test_ and
gtest_catch_exceptions_ex_test_ (programs written with
Google Test) and verifies their output.
"""
__author__ = 'vladl@google.com (Vlad Losev)'
import os
import gtest_test_utils
# Constants.
FLAG_PREFIX = '--gtest_'
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
NO_CATCH_EXCEPTIONS_FLAG = FLAG_PREFIX + 'catch_exceptions=0'
FILTER_FLAG = FLAG_PREFIX + 'filter'
# Path to the gtest_catch_exceptions_ex_test_ binary, compiled with
# exceptions enabled.
EX_EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_catch_exceptions_ex_test_')
# Path to the gtest_catch_exceptions_test_ binary, compiled with
# exceptions disabled.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_catch_exceptions_no_ex_test_')
environ = gtest_test_utils.environ
SetEnvVar = gtest_test_utils.SetEnvVar
# Tests in this file run a Google-Test-based test program and expect it
# to terminate prematurely. Therefore they are incompatible with
# the premature-exit-file protocol by design. Unset the
# premature-exit filepath to prevent Google Test from creating
# the file.
SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
TEST_LIST = gtest_test_utils.Subprocess(
[EXE_PATH, LIST_TESTS_FLAG], env=environ).output
SUPPORTS_SEH_EXCEPTIONS = 'ThrowsSehException' in TEST_LIST
if SUPPORTS_SEH_EXCEPTIONS:
BINARY_OUTPUT = gtest_test_utils.Subprocess([EXE_PATH], env=environ).output
EX_BINARY_OUTPUT = gtest_test_utils.Subprocess(
[EX_EXE_PATH], env=environ).output
# The tests.
if SUPPORTS_SEH_EXCEPTIONS:
# pylint:disable-msg=C6302
class CatchSehExceptionsTest(gtest_test_utils.TestCase):
"""Tests exception-catching behavior."""
def TestSehExceptions(self, test_output):
self.assert_('SEH exception with code 0x2a thrown '
'in the test fixture\'s constructor'
in test_output)
self.assert_('SEH exception with code 0x2a thrown '
'in the test fixture\'s destructor'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in SetUpTestCase()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in TearDownTestCase()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in SetUp()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in TearDown()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in the test body'
in test_output)
def testCatchesSehExceptionsWithCxxExceptionsEnabled(self):
self.TestSehExceptions(EX_BINARY_OUTPUT)
def testCatchesSehExceptionsWithCxxExceptionsDisabled(self):
self.TestSehExceptions(BINARY_OUTPUT)
class CatchCxxExceptionsTest(gtest_test_utils.TestCase):
"""Tests C++ exception-catching behavior.
Tests in this test case verify that:
* C++ exceptions are caught and logged as C++ (not SEH) exceptions
* Exception thrown affect the remainder of the test work flow in the
expected manner.
"""
def testCatchesCxxExceptionsInFixtureConstructor(self):
self.assert_('C++ exception with description '
'"Standard C++ exception" thrown '
'in the test fixture\'s constructor'
in EX_BINARY_OUTPUT)
self.assert_('unexpected' not in EX_BINARY_OUTPUT,
'This failure belongs in this test only if '
'"CxxExceptionInConstructorTest" (no quotes) '
'appears on the same line as words "called unexpectedly"')
if ('CxxExceptionInDestructorTest.ThrowsExceptionInDestructor' in
EX_BINARY_OUTPUT):
def testCatchesCxxExceptionsInFixtureDestructor(self):
self.assert_('C++ exception with description '
'"Standard C++ exception" thrown '
'in the test fixture\'s destructor'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInDestructorTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInSetUpTestCase(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in SetUpTestCase()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInConstructorTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest constructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest::SetUp() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest test body '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInTearDownTestCase(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in TearDownTestCase()'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInSetUp(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in SetUp()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('unexpected' not in EX_BINARY_OUTPUT,
'This failure belongs in this test only if '
'"CxxExceptionInSetUpTest" (no quotes) '
'appears on the same line as words "called unexpectedly"')
def testCatchesCxxExceptionsInTearDown(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in TearDown()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTearDownTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTearDownTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInTestBody(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in the test body'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesNonStdCxxExceptions(self):
self.assert_('Unknown C++ exception thrown in the test body'
in EX_BINARY_OUTPUT)
def testUnhandledCxxExceptionsAbortTheProgram(self):
# Filters out SEH exception tests on Windows. Unhandled SEH exceptions
# cause tests to show pop-up windows there.
FITLER_OUT_SEH_TESTS_FLAG = FILTER_FLAG + '=-*Seh*'
# By default, Google Test doesn't catch the exceptions.
uncaught_exceptions_ex_binary_output = gtest_test_utils.Subprocess(
[EX_EXE_PATH,
NO_CATCH_EXCEPTIONS_FLAG,
FITLER_OUT_SEH_TESTS_FLAG],
env=environ).output
self.assert_('Unhandled C++ exception terminating the program'
in uncaught_exceptions_ex_binary_output)
self.assert_('unexpected' not in uncaught_exceptions_ex_binary_output)
if __name__ == '__main__':
gtest_test_utils.Main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.