repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
RPGOne/Skynet | refs/heads/Miho | scikit-learn-c604ac39ad0e5b066d964df3e8f31ba7ebda1e0e/sklearn/datasets/tests/test_samples_generator.py | 4 | from __future__ import division
from collections import defaultdict
from functools import partial
import numpy as np
from sklearn.externals.six.moves import zip
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_warns
from sklearn.datasets import make_classification
from sklearn.datasets import make_multilabel_classification
from sklearn.datasets import make_hastie_10_2
from sklearn.datasets import make_regression
from sklearn.datasets import make_blobs
from sklearn.datasets import make_friedman1
from sklearn.datasets import make_friedman2
from sklearn.datasets import make_friedman3
from sklearn.datasets import make_low_rank_matrix
from sklearn.datasets import make_sparse_coded_signal
from sklearn.datasets import make_sparse_uncorrelated
from sklearn.datasets import make_spd_matrix
from sklearn.datasets import make_swiss_roll
from sklearn.datasets import make_s_curve
from sklearn.datasets import make_biclusters
from sklearn.datasets import make_checkerboard
from sklearn.utils.validation import assert_all_finite
def test_make_classification():
X, y = make_classification(n_samples=100, n_features=20, n_informative=5,
n_redundant=1, n_repeated=1, n_classes=3,
n_clusters_per_class=1, hypercube=False,
shift=None, scale=None, weights=[0.1, 0.25],
random_state=0)
assert_equal(X.shape, (100, 20), "X shape mismatch")
assert_equal(y.shape, (100,), "y shape mismatch")
assert_equal(np.unique(y).shape, (3,), "Unexpected number of classes")
assert_equal(sum(y == 0), 10, "Unexpected number of samples in class #0")
assert_equal(sum(y == 1), 25, "Unexpected number of samples in class #1")
assert_equal(sum(y == 2), 65, "Unexpected number of samples in class #2")
def test_make_classification_informative_features():
"""Test the construction of informative features in make_classification
Also tests `n_clusters_per_class`, `n_classes`, `hypercube` and
fully-specified `weights`.
"""
# Create very separate clusters; check that vertices are unique and
# correspond to classes
class_sep = 1e6
make = partial(make_classification, class_sep=class_sep, n_redundant=0,
n_repeated=0, flip_y=0, shift=0, scale=1, shuffle=False)
for n_informative, weights, n_clusters_per_class in [(2, [1], 1),
(2, [1/3] * 3, 1),
(2, [1/4] * 4, 1),
(2, [1/2] * 2, 2),
(2, [3/4, 1/4], 2),
(10, [1/3] * 3, 10)
]:
n_classes = len(weights)
n_clusters = n_classes * n_clusters_per_class
n_samples = n_clusters * 50
for hypercube in (False, True):
X, y = make(n_samples=n_samples, n_classes=n_classes,
weights=weights, n_features=n_informative,
n_informative=n_informative,
n_clusters_per_class=n_clusters_per_class,
hypercube=hypercube, random_state=0)
assert_equal(X.shape, (n_samples, n_informative))
assert_equal(y.shape, (n_samples,))
# Cluster by sign, viewed as strings to allow uniquing
signs = np.sign(X)
signs = signs.view(dtype='|S{0}'.format(signs.strides[0]))
unique_signs, cluster_index = np.unique(signs,
return_inverse=True)
assert_equal(len(unique_signs), n_clusters,
"Wrong number of clusters, or not in distinct "
"quadrants")
clusters_by_class = defaultdict(set)
for cluster, cls in zip(cluster_index, y):
clusters_by_class[cls].add(cluster)
for clusters in clusters_by_class.values():
assert_equal(len(clusters), n_clusters_per_class,
"Wrong number of clusters per class")
assert_equal(len(clusters_by_class), n_classes,
"Wrong number of classes")
assert_array_almost_equal(np.bincount(y) / len(y) // weights,
[1] * n_classes,
err_msg="Wrong number of samples "
"per class")
# Ensure on vertices of hypercube
for cluster in range(len(unique_signs)):
centroid = X[cluster_index == cluster].mean(axis=0)
if hypercube:
assert_array_almost_equal(np.abs(centroid),
[class_sep] * n_informative,
decimal=0,
err_msg="Clusters are not "
"centered on hypercube "
"vertices")
else:
assert_raises(AssertionError,
assert_array_almost_equal,
np.abs(centroid),
[class_sep] * n_informative,
decimal=0,
err_msg="Clusters should not be cenetered "
"on hypercube vertices")
assert_raises(ValueError, make, n_features=2, n_informative=2, n_classes=5,
n_clusters_per_class=1)
assert_raises(ValueError, make, n_features=2, n_informative=2, n_classes=3,
n_clusters_per_class=2)
def test_make_multilabel_classification_return_sequences():
for allow_unlabeled, min_length in zip((True, False), (0, 1)):
X, Y = assert_warns(DeprecationWarning, make_multilabel_classification,
n_samples=100, n_features=20, n_classes=3,
random_state=0, allow_unlabeled=allow_unlabeled)
assert_equal(X.shape, (100, 20), "X shape mismatch")
if not allow_unlabeled:
assert_equal(max([max(y) for y in Y]), 2)
assert_equal(min([len(y) for y in Y]), min_length)
assert_true(max([len(y) for y in Y]) <= 3)
def test_make_multilabel_classification_return_indicator():
for allow_unlabeled, min_length in zip((True, False), (0, 1)):
X, Y = make_multilabel_classification(n_samples=25, n_features=20,
n_classes=3, random_state=0,
return_indicator=True,
allow_unlabeled=allow_unlabeled)
assert_equal(X.shape, (25, 20), "X shape mismatch")
assert_equal(Y.shape, (25, 3), "Y shape mismatch")
assert_true(np.all(np.sum(Y, axis=0) > min_length))
def test_make_hastie_10_2():
X, y = make_hastie_10_2(n_samples=100, random_state=0)
assert_equal(X.shape, (100, 10), "X shape mismatch")
assert_equal(y.shape, (100,), "y shape mismatch")
assert_equal(np.unique(y).shape, (2,), "Unexpected number of classes")
def test_make_regression():
X, y, c = make_regression(n_samples=100, n_features=10, n_informative=3,
effective_rank=5, coef=True, bias=0.0,
noise=1.0, random_state=0)
assert_equal(X.shape, (100, 10), "X shape mismatch")
assert_equal(y.shape, (100,), "y shape mismatch")
assert_equal(c.shape, (10,), "coef shape mismatch")
assert_equal(sum(c != 0.0), 3, "Unexpected number of informative features")
# Test that y ~= np.dot(X, c) + bias + N(0, 1.0).
assert_almost_equal(np.std(y - np.dot(X, c)), 1.0, decimal=1)
# Test with small number of features.
X, y = make_regression(n_samples=100, n_features=1) # n_informative=3
assert_equal(X.shape, (100, 1))
def test_make_regression_multitarget():
X, y, c = make_regression(n_samples=100, n_features=10, n_informative=3,
n_targets=3, coef=True, noise=1., random_state=0)
assert_equal(X.shape, (100, 10), "X shape mismatch")
assert_equal(y.shape, (100, 3), "y shape mismatch")
assert_equal(c.shape, (10, 3), "coef shape mismatch")
assert_array_equal(sum(c != 0.0), 3,
"Unexpected number of informative features")
# Test that y ~= np.dot(X, c) + bias + N(0, 1.0)
assert_almost_equal(np.std(y - np.dot(X, c)), 1.0, decimal=1)
def test_make_blobs():
X, y = make_blobs(n_samples=50, n_features=2,
centers=[[0.0, 0.0], [1.0, 1.0], [0.0, 1.0]],
random_state=0)
assert_equal(X.shape, (50, 2), "X shape mismatch")
assert_equal(y.shape, (50,), "y shape mismatch")
assert_equal(np.unique(y).shape, (3,), "Unexpected number of blobs")
def test_make_friedman1():
X, y = make_friedman1(n_samples=5, n_features=10, noise=0.0,
random_state=0)
assert_equal(X.shape, (5, 10), "X shape mismatch")
assert_equal(y.shape, (5,), "y shape mismatch")
assert_array_almost_equal(y,
10 * np.sin(np.pi * X[:, 0] * X[:, 1])
+ 20 * (X[:, 2] - 0.5) ** 2
+ 10 * X[:, 3] + 5 * X[:, 4])
def test_make_friedman2():
X, y = make_friedman2(n_samples=5, noise=0.0, random_state=0)
assert_equal(X.shape, (5, 4), "X shape mismatch")
assert_equal(y.shape, (5,), "y shape mismatch")
assert_array_almost_equal(y,
(X[:, 0] ** 2
+ (X[:, 1] * X[:, 2] - 1
/ (X[:, 1] * X[:, 3])) ** 2) ** 0.5)
def test_make_friedman3():
X, y = make_friedman3(n_samples=5, noise=0.0, random_state=0)
assert_equal(X.shape, (5, 4), "X shape mismatch")
assert_equal(y.shape, (5,), "y shape mismatch")
assert_array_almost_equal(y, np.arctan((X[:, 1] * X[:, 2]
- 1 / (X[:, 1] * X[:, 3]))
/ X[:, 0]))
def test_make_low_rank_matrix():
X = make_low_rank_matrix(n_samples=50, n_features=25, effective_rank=5,
tail_strength=0.01, random_state=0)
assert_equal(X.shape, (50, 25), "X shape mismatch")
from numpy.linalg import svd
u, s, v = svd(X)
assert_less(sum(s) - 5, 0.1, "X rank is not approximately 5")
def test_make_sparse_coded_signal():
Y, D, X = make_sparse_coded_signal(n_samples=5, n_components=8,
n_features=10, n_nonzero_coefs=3,
random_state=0)
assert_equal(Y.shape, (10, 5), "Y shape mismatch")
assert_equal(D.shape, (10, 8), "D shape mismatch")
assert_equal(X.shape, (8, 5), "X shape mismatch")
for col in X.T:
assert_equal(len(np.flatnonzero(col)), 3, 'Non-zero coefs mismatch')
assert_array_almost_equal(np.dot(D, X), Y)
assert_array_almost_equal(np.sqrt((D ** 2).sum(axis=0)),
np.ones(D.shape[1]))
def test_make_sparse_uncorrelated():
X, y = make_sparse_uncorrelated(n_samples=5, n_features=10, random_state=0)
assert_equal(X.shape, (5, 10), "X shape mismatch")
assert_equal(y.shape, (5,), "y shape mismatch")
def test_make_spd_matrix():
X = make_spd_matrix(n_dim=5, random_state=0)
assert_equal(X.shape, (5, 5), "X shape mismatch")
assert_array_almost_equal(X, X.T)
from numpy.linalg import eig
eigenvalues, _ = eig(X)
assert_array_equal(eigenvalues > 0, np.array([True] * 5),
"X is not positive-definite")
def test_make_swiss_roll():
X, t = make_swiss_roll(n_samples=5, noise=0.0, random_state=0)
assert_equal(X.shape, (5, 3), "X shape mismatch")
assert_equal(t.shape, (5,), "t shape mismatch")
assert_array_almost_equal(X[:, 0], t * np.cos(t))
assert_array_almost_equal(X[:, 2], t * np.sin(t))
def test_make_s_curve():
X, t = make_s_curve(n_samples=5, noise=0.0, random_state=0)
assert_equal(X.shape, (5, 3), "X shape mismatch")
assert_equal(t.shape, (5,), "t shape mismatch")
assert_array_almost_equal(X[:, 0], np.sin(t))
assert_array_almost_equal(X[:, 2], np.sign(t) * (np.cos(t) - 1))
def test_make_biclusters():
X, rows, cols = make_biclusters(
shape=(100, 100), n_clusters=4, shuffle=True, random_state=0)
assert_equal(X.shape, (100, 100), "X shape mismatch")
assert_equal(rows.shape, (4, 100), "rows shape mismatch")
assert_equal(cols.shape, (4, 100,), "columns shape mismatch")
assert_all_finite(X)
assert_all_finite(rows)
assert_all_finite(cols)
X2, _, _ = make_biclusters(shape=(100, 100), n_clusters=4,
shuffle=True, random_state=0)
assert_array_almost_equal(X, X2)
def test_make_checkerboard():
X, rows, cols = make_checkerboard(
shape=(100, 100), n_clusters=(20, 5),
shuffle=True, random_state=0)
assert_equal(X.shape, (100, 100), "X shape mismatch")
assert_equal(rows.shape, (100, 100), "rows shape mismatch")
assert_equal(cols.shape, (100, 100,), "columns shape mismatch")
X, rows, cols = make_checkerboard(
shape=(100, 100), n_clusters=2, shuffle=True, random_state=0)
assert_all_finite(X)
assert_all_finite(rows)
assert_all_finite(cols)
X1, _, _ = make_checkerboard(shape=(100, 100), n_clusters=2,
shuffle=True, random_state=0)
X2, _, _ = make_checkerboard(shape=(100, 100), n_clusters=2,
shuffle=True, random_state=0)
assert_array_equal(X1, X2)
|
idegtiarov/ceilometer | refs/heads/master | ceilometer/database/notifications.py | 1 | #
# Copyright 2015 Hewlett Packard
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import oslo_messaging
from oslo_utils import timeutils
from ceilometer.agent import plugin_base
from ceilometer import sample
SERVICE = 'trove'
cfg.CONF.import_opt('trove_control_exchange',
'ceilometer.notification')
class TroveMetricsNotificationBase(plugin_base.NotificationBase):
"""Base class for trove (dbaas) notifications."""
def get_targets(self, conf):
"""Return a sequence of oslo.messaging.Target
This sequence is defining the exchange and topics to be connected for
this plugin.
"""
return [oslo_messaging.Target(topic=topic,
exchange=conf.trove_control_exchange)
for topic in self.get_notification_topics(conf)]
class InstanceExists(TroveMetricsNotificationBase):
"""Handles Trove instance exists notification.
Emits a sample for a measurable audit interval.
"""
event_types = ['%s.instance.exists' % SERVICE]
def process_notification(self, message):
period_start = timeutils.normalize_time(timeutils.parse_isotime(
message['payload']['audit_period_beginning']))
period_end = timeutils.normalize_time(timeutils.parse_isotime(
message['payload']['audit_period_ending']))
period_difference = timeutils.delta_seconds(period_start, period_end)
yield sample.Sample.from_notification(
name=message['event_type'],
type=sample.TYPE_CUMULATIVE,
unit='s',
volume=period_difference,
resource_id=message['payload']['instance_id'],
user_id=message['payload']['user_id'],
project_id=message['payload']['tenant_id'],
message=message)
|
ntonjeta/iidea-Docker | refs/heads/master | examples/sobel/src/boost_1_63_0/libs/python/test/object.py | 20 | # Copyright David Abrahams 2004. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
from __future__ import print_function
'''
>>> from object_ext import *
>>> type(ref_to_noncopyable())
<class 'object_ext.NotCopyable'>
>>> def print1(x):
... print(x)
>>> call_object_3(print1)
3
>>> message()
'hello, world!'
>>> number()
42
>>> test('hi')
1
>>> test(None)
0
>>> test_not('hi')
0
>>> test_not(0)
1
Attributes
>>> class X: pass
...
>>> x = X()
>>> try: obj_getattr(x, 'foo')
... except AttributeError: pass
... else: print('expected an exception')
>>> try: obj_objgetattr(x, 'objfoo')
... except AttributeError: pass
... else: print('expected an exception')
>>> obj_setattr(x, 'foo', 1)
>>> x.foo
1
>>> obj_objsetattr(x, 'objfoo', 1)
>>> try:obj_objsetattr(x, 1)
... except TypeError: pass
... else: print('expected an exception')
>>> x.objfoo
1
>>> obj_getattr(x, 'foo')
1
>>> obj_objgetattr(x, 'objfoo')
1
>>> try:obj_objgetattr(x, 1)
... except TypeError: pass
... else: print('expected an exception')
>>> obj_const_getattr(x, 'foo')
1
>>> obj_const_objgetattr(x, 'objfoo')
1
>>> obj_setattr42(x, 'foo')
>>> x.foo
42
>>> obj_objsetattr42(x, 'objfoo')
>>> x.objfoo
42
>>> obj_moveattr(x, 'foo', 'bar')
>>> x.bar
42
>>> obj_objmoveattr(x, 'objfoo', 'objbar')
>>> x.objbar
42
>>> test_attr(x, 'foo')
1
>>> test_objattr(x, 'objfoo')
1
>>> test_not_attr(x, 'foo')
0
>>> test_not_objattr(x, 'objfoo')
0
>>> x.foo = None
>>> test_attr(x, 'foo')
0
>>> x.objfoo = None
>>> test_objattr(x, 'objfoo')
0
>>> test_not_attr(x, 'foo')
1
>>> test_not_objattr(x, 'objfoo')
1
>>> obj_delattr(x, 'foo')
>>> obj_objdelattr(x, 'objfoo')
>>> try:obj_delattr(x, 'foo')
... except AttributeError: pass
... else: print('expected an exception')
>>> try:obj_objdelattr(x, 'objfoo')
... except AttributeError: pass
... else: print('expected an exception')
Items
>>> d = {}
>>> obj_setitem(d, 'foo', 1)
>>> d['foo']
1
>>> obj_getitem(d, 'foo')
1
>>> obj_const_getitem(d, 'foo')
1
>>> obj_setitem42(d, 'foo')
>>> obj_getitem(d, 'foo')
42
>>> d['foo']
42
>>> obj_moveitem(d, 'foo', 'bar')
>>> d['bar']
42
>>> obj_moveitem2(d, 'bar', d, 'baz')
>>> d['baz']
42
>>> test_item(d, 'foo')
1
>>> test_not_item(d, 'foo')
0
>>> d['foo'] = None
>>> test_item(d, 'foo')
0
>>> test_not_item(d, 'foo')
1
Slices
>>> assert check_string_slice()
Operators
>>> def print_args(*args, **kwds):
... print(args, kwds)
>>> test_call(print_args, (0, 1, 2, 3), {'a':'A'})
(0, 1, 2, 3) {'a': 'A'}
>>> assert check_binary_operators()
>>> class X: pass
...
>>> assert check_inplace(list(range(3)), X())
Now make sure that object is actually managing reference counts
>>> import weakref
>>> class Z: pass
...
>>> z = Z()
>>> def death(r): print('death')
...
>>> r = weakref.ref(z, death)
>>> z.foo = 1
>>> obj_getattr(z, 'foo')
1
>>> del z
death
'''
def run(args = None):
import sys
import doctest
if args is not None:
sys.argv = args
return doctest.testmod(sys.modules.get(__name__))
if __name__ == '__main__':
print("running...")
import sys
status = run()[0]
if (status == 0): print("Done.")
sys.exit(status)
|
yaroslavvb/tensorflow | refs/heads/master | tensorflow/python/layers/base_test.py | 27 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.layers.base."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.layers import base as base_layers
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import test
class BaseLayerTest(test.TestCase):
def testLayerProperties(self):
layer = base_layers._Layer(name='my_layer')
self.assertEqual(layer.name, 'my_layer')
self.assertListEqual(layer.variables, [])
self.assertListEqual(layer.trainable_variables, [])
self.assertListEqual(layer.non_trainable_variables, [])
self.assertListEqual(layer.updates, [])
self.assertListEqual(layer.losses, [])
self.assertEqual(layer.built, False)
layer = base_layers._Layer(name='my_layer', trainable=False)
self.assertEqual(layer.trainable, False)
def testAddWeight(self):
with self.test_session():
layer = base_layers._Layer(name='my_layer')
# Test basic variable creation.
variable = layer._add_variable(
'my_var', [2, 2], initializer=init_ops.zeros_initializer())
self.assertEqual(variable.name, 'my_var:0')
self.assertListEqual(layer.variables, [variable])
self.assertListEqual(layer.trainable_variables, [variable])
self.assertListEqual(layer.non_trainable_variables, [])
self.assertListEqual(
layer.variables,
ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES))
# Test non-trainable variable creation.
# layer._add_variable should work even outside `build` and `call`.
variable_2 = layer._add_variable(
'non_trainable_var', [2, 2],
initializer=init_ops.zeros_initializer(),
trainable=False)
self.assertListEqual(layer.variables, [variable, variable_2])
self.assertListEqual(layer.trainable_variables, [variable])
self.assertListEqual(layer.non_trainable_variables, [variable_2])
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 1)
# Test with regularizer.
regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
variable = layer._add_variable(
'reg_var', [2, 2],
initializer=init_ops.zeros_initializer(),
regularizer=regularizer)
self.assertEqual(len(layer.losses), 1)
def testGetVariable(self):
with self.test_session():
# From inside `build` and `call` it should be possible to use
# either tf.get_variable
class MyLayer(base_layers._Layer):
def build(self, input_shape):
self.my_var = variable_scope.get_variable(
'my_var', [2, 2], initializer=init_ops.zeros_initializer())
def call(self, inputs):
variable_scope.get_variable(
'my_call_var', [2, 2], initializer=init_ops.zeros_initializer())
return inputs
layer = MyLayer(name='my_layer')
inputs = random_ops.random_uniform((5,), seed=1)
layer.apply(inputs)
layer.apply(inputs)
self.assertListEqual([v.name for v in layer.variables],
['my_layer/my_var:0', 'my_layer/my_call_var:0'])
def testCall(self):
class MyLayer(base_layers._Layer):
def call(self, inputs):
return math_ops.square(inputs)
layer = MyLayer(name='my_layer')
inputs = random_ops.random_uniform((5,), seed=1)
outputs = layer.apply(inputs)
self.assertEqual(layer.built, True)
self.assertEqual(outputs.op.name, 'my_layer/Square')
def testNaming(self):
default_layer = base_layers._Layer()
self.assertEqual(default_layer.name, 'private__layer')
default_layer1 = base_layers._Layer()
self.assertEqual(default_layer1.name, 'private__layer_1')
my_layer = base_layers._Layer(name='my_layer')
self.assertEqual(my_layer.name, 'my_layer')
my_layer1 = base_layers._Layer(name='my_layer')
self.assertEqual(my_layer1.name, 'my_layer_1')
# New graph has fully orthogonal names.
with ops.Graph().as_default():
my_layer_other_graph = base_layers._Layer(name='my_layer')
self.assertEqual(my_layer_other_graph.name, 'my_layer')
my_layer2 = base_layers._Layer(name='my_layer')
self.assertEqual(my_layer2.name, 'my_layer_2')
# Name scope shouldn't affect names.
with ops.name_scope('some_name_scope'):
default_layer2 = base_layers._Layer()
self.assertEqual(default_layer2.name, 'private__layer_2')
my_layer3 = base_layers._Layer(name='my_layer')
self.assertEqual(my_layer3.name, 'my_layer_3')
other_layer = base_layers._Layer(name='other_layer')
self.assertEqual(other_layer.name, 'other_layer')
# Variable scope gets added to names.
with variable_scope.variable_scope('var_scope'):
default_layer_scoped = base_layers._Layer()
self.assertEqual(default_layer_scoped.name, 'var_scope/private__layer')
my_layer_scoped = base_layers._Layer(name='my_layer')
self.assertEqual(my_layer_scoped.name, 'var_scope/my_layer')
my_layer_scoped1 = base_layers._Layer(name='my_layer')
self.assertEqual(my_layer_scoped1.name, 'var_scope/my_layer_1')
if __name__ == '__main__':
test.main()
|
cuongthai/cuongthai-s-blog | refs/heads/master | django/contrib/staticfiles/handlers.py | 160 | import urllib
from urlparse import urlparse
from django.conf import settings
from django.core.handlers.wsgi import WSGIHandler
from django.contrib.staticfiles import utils
from django.contrib.staticfiles.views import serve
class StaticFilesHandler(WSGIHandler):
"""
WSGI middleware that intercepts calls to the static files directory, as
defined by the STATIC_URL setting, and serves those files.
"""
def __init__(self, application, base_dir=None):
self.application = application
if base_dir:
self.base_dir = base_dir
else:
self.base_dir = self.get_base_dir()
self.base_url = urlparse(self.get_base_url())
super(StaticFilesHandler, self).__init__()
def get_base_dir(self):
return settings.STATIC_ROOT
def get_base_url(self):
utils.check_settings()
return settings.STATIC_URL
def _should_handle(self, path):
"""
Checks if the path should be handled. Ignores the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return (self.base_url[2] != path and
path.startswith(self.base_url[2]) and not self.base_url[1])
def file_path(self, url):
"""
Returns the relative path to the media file on disk for the given URL.
"""
relative_url = url[len(self.base_url[2]):]
return urllib.url2pathname(relative_url)
def serve(self, request):
"""
Actually serves the request path.
"""
return serve(request, self.file_path(request.path), insecure=True)
def get_response(self, request):
from django.http import Http404
if self._should_handle(request.path):
try:
return self.serve(request)
except Http404, e:
if settings.DEBUG:
from django.views import debug
return debug.technical_404_response(request, e)
return super(StaticFilesHandler, self).get_response(request)
def __call__(self, environ, start_response):
if not self._should_handle(environ['PATH_INFO']):
return self.application(environ, start_response)
return super(StaticFilesHandler, self).__call__(environ, start_response)
|
deanhiller/ormPerformanceTest | refs/heads/master | tools/ant/bin/runant.py | 124 | #!/usr/bin/python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
runant.py
This script is a translation of the runant.pl written by Steve Loughran.
It runs ant with/out arguments, it should be quite portable (thanks to
the python os library)
This script has been tested with Python2.0/Win2K
created: 2001-04-11
author: Pierre Dittgen pierre.dittgen@criltelecom.com
Assumptions:
- the "java" executable/script is on the command path
"""
import os, os.path, string, sys
# Change it to 1 to get extra debug information
debug = 0
#######################################################################
# If ANT_HOME is not set default to script's parent directory
if os.environ.has_key('ANT_HOME'):
ANT_HOME = os.environ['ANT_HOME']
else:
ANT_HOME = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0])))
# set ANT_LIB location
ANT_LIB = os.path.join(ANT_HOME, 'lib')
# set JAVACMD (check variables JAVACMD and JAVA_HOME)
JAVACMD = None
if not os.environ.has_key('JAVACMD'):
if os.environ.has_key('JAVA_HOME'):
if not os.path.exists(os.environ['JAVA_HOME']):
print "Warning: JAVA_HOME is not defined correctly."
else:
JAVACMD = os.path.join(os.environ['JAVA_HOME'], 'bin', 'java')
else:
print "Warning: JAVA_HOME not set."
else:
JAVACMD = os.environ['JAVACMD']
if not JAVACMD:
JAVACMD = 'java'
launcher_jar = os.path.join(ANT_LIB, 'ant-launcher.jar')
if not os.path.exists(launcher_jar):
print 'Warning: Unable to locate ant-launcher.jar. Expected to find it in %s' % \
ANT_LIB
# Build up standard classpath (LOCALCLASSPATH)
LOCALCLASSPATH = launcher_jar
if os.environ.has_key('LOCALCLASSPATH'):
LOCALCLASSPATH += os.pathsep + os.environ['LOCALCLASSPATH']
ANT_OPTS = ""
if os.environ.has_key('ANT_OPTS'):
ANT_OPTS = os.environ['ANT_OPTS']
OPTS = ""
if os.environ.has_key('JIKESPATH'):
OPTS = '-Djikes.class.path=\"%s\"' % os.environ['JIKESPATH']
ANT_ARGS = ""
if os.environ.has_key('ANT_ARGS'):
ANT_ARGS = os.environ['ANT_ARGS']
CLASSPATH = ""
if os.environ.has_key('CLASSPATH'):
CLASSPATH = "-lib " + os.environ['CLASSPATH']
# Builds the commandline
cmdline = ('%s %s -classpath %s -Dant.home=%s %s ' + \
'org.apache.tools.ant.launch.Launcher %s %s %s') \
% (JAVACMD, ANT_OPTS, LOCALCLASSPATH, ANT_HOME, OPTS, ANT_ARGS, \
CLASSPATH, string.join(sys.argv[1:], ' '))
if debug:
print '\n%s\n\n' % (cmdline)
sys.stdout.flush()
# Run the biniou!
os.system(cmdline)
|
ThorbenJensen/wifi-locator | refs/heads/master | src/utils_wifi.py | 1 | """Module that provides all wifi sensor functionality (i.e. returning scan)."""
import os
import re
import subprocess
import pandas as pd
def get_signals():
"""Get wifi signals."""
operating_system = os.name
if operating_system == 'nt':
return get_signals_windows()
if operating_system == 'posix':
df = get_signals_linux()
return df
# when here: no matching operating system
raise Exception('Your operating system ("%s") is not supported. \
Currently, windows is supported' % operating_system)
def get_signals_linux():
"""Get wifi signals on linux."""
command = 'nmcli nm wifi off && sleep 5 && nmcli nm wifi on && \
sleep 5 && nmcli dev wifi list'
a = subprocess.check_output(command, shell=True)
b = str(a)
bssids = re.findall('[0-9A-Z]{2}:[\wA-Z\:]+', b)
bssids = [bssid.lower() for bssid in bssids]
signals = re.findall('MB/s\s+([0-9]+)', b)
if len(bssids) != len(signals):
print('bssid & signals of different length!')
return pd.DataFrame(columns=['bssid', 'signal'])
df = pd.DataFrame({'bssid': bssids, 'signal': signals})
return df
def get_signals_windows():
"""Get wifi signals on windows."""
command = 'netsh wlan show networks mode=bssid'
a = subprocess.check_output(command.split(), shell=False)
b = str(a)
e = re.findall('[0-9a-z\:]+\:[0-9a-z\:]+', b)
f = re.findall('(\w+)%', b)
df = pd.DataFrame({'bssid': e, 'signal': f})
return df
if __name__ == '__main__':
print(get_signals())
|
ravenac95/simpleyaml | refs/heads/master | tests/lib/test_all.py | 1 |
import sys, simpleyaml as yaml, test_appliance
def main(args=None):
collections = []
import test_yaml
collections.append(test_yaml)
if yaml.__with_libyaml__:
import test_yaml_ext
collections.append(test_yaml_ext)
test_appliance.run(collections, args)
if __name__ == '__main__':
main()
|
TheGameCreators/AGKIDE | refs/heads/master | tests/ctags/py_constructor_arglist.py | 26 | """
Test arglist parsing of a class' __init__ method:
the __init__() method's argument list should be assigned to the class' argument list.
This is somewhat special to Python and the Python parses uses tm_source_file_set_tag_arglist()
to do this and tm_source_file_set_tag_arglist() uses tm_tags_find() which by default relies on
a sorted tags array. However, when the parses uses tm_source_file_set_tag_arglist() the tags
array is *not yet* sorted and so it might be the tag for the class SomeClass can't be found,
hence this test.
"""
from bsddb import btopen
import sys
from django.contrib.auth.decorators import login_required, permission_required
from django.http import HttpResponse, HttpResponseBadRequest
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.utils import simplejson
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.csrf import csrf_exempt2
from django.views.decorators.http import require_POST
from vnstat.api.error import InterfaceDataValidationError
class SomeClass(object):
def __init__(self, filename, pathsep='', treegap=64):
pass
|
unnikrishnankgs/va | refs/heads/master | venv/lib/python3.5/site-packages/tensorflow/models/object_detection/builders/model_builder_test.py | 21 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for object_detection.models.model_builder."""
import tensorflow as tf
from google.protobuf import text_format
from object_detection.builders import model_builder
from object_detection.meta_architectures import faster_rcnn_meta_arch
from object_detection.meta_architectures import rfcn_meta_arch
from object_detection.meta_architectures import ssd_meta_arch
from object_detection.models import faster_rcnn_inception_resnet_v2_feature_extractor as frcnn_inc_res
from object_detection.models import faster_rcnn_resnet_v1_feature_extractor as frcnn_resnet_v1
from object_detection.models.ssd_inception_v2_feature_extractor import SSDInceptionV2FeatureExtractor
from object_detection.models.ssd_mobilenet_v1_feature_extractor import SSDMobileNetV1FeatureExtractor
from object_detection.protos import model_pb2
FEATURE_EXTRACTOR_MAPS = {
'faster_rcnn_resnet50':
frcnn_resnet_v1.FasterRCNNResnet50FeatureExtractor,
'faster_rcnn_resnet101':
frcnn_resnet_v1.FasterRCNNResnet101FeatureExtractor,
'faster_rcnn_resnet152':
frcnn_resnet_v1.FasterRCNNResnet152FeatureExtractor
}
class ModelBuilderTest(tf.test.TestCase):
def create_model(self, model_config):
"""Builds a DetectionModel based on the model config.
Args:
model_config: A model.proto object containing the config for the desired
DetectionModel.
Returns:
DetectionModel based on the config.
"""
return model_builder.build(model_config, is_training=True)
def test_create_ssd_inception_v2_model_from_config(self):
model_text_proto = """
ssd {
feature_extractor {
type: 'ssd_inception_v2'
conv_hyperparams {
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
}
box_coder {
faster_rcnn_box_coder {
}
}
matcher {
argmax_matcher {
}
}
similarity_calculator {
iou_similarity {
}
}
anchor_generator {
ssd_anchor_generator {
aspect_ratios: 1.0
}
}
image_resizer {
fixed_shape_resizer {
height: 320
width: 320
}
}
box_predictor {
convolutional_box_predictor {
conv_hyperparams {
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
}
}
loss {
classification_loss {
weighted_softmax {
}
}
localization_loss {
weighted_smooth_l1 {
}
}
}
}"""
model_proto = model_pb2.DetectionModel()
text_format.Merge(model_text_proto, model_proto)
model = self.create_model(model_proto)
self.assertIsInstance(model, ssd_meta_arch.SSDMetaArch)
self.assertIsInstance(model._feature_extractor,
SSDInceptionV2FeatureExtractor)
def test_create_ssd_mobilenet_v1_model_from_config(self):
model_text_proto = """
ssd {
feature_extractor {
type: 'ssd_mobilenet_v1'
conv_hyperparams {
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
}
box_coder {
faster_rcnn_box_coder {
}
}
matcher {
argmax_matcher {
}
}
similarity_calculator {
iou_similarity {
}
}
anchor_generator {
ssd_anchor_generator {
aspect_ratios: 1.0
}
}
image_resizer {
fixed_shape_resizer {
height: 320
width: 320
}
}
box_predictor {
convolutional_box_predictor {
conv_hyperparams {
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
}
}
loss {
classification_loss {
weighted_softmax {
}
}
localization_loss {
weighted_smooth_l1 {
}
}
}
}"""
model_proto = model_pb2.DetectionModel()
text_format.Merge(model_text_proto, model_proto)
model = self.create_model(model_proto)
self.assertIsInstance(model, ssd_meta_arch.SSDMetaArch)
self.assertIsInstance(model._feature_extractor,
SSDMobileNetV1FeatureExtractor)
def test_create_faster_rcnn_resnet_v1_models_from_config(self):
model_text_proto = """
faster_rcnn {
num_classes: 3
image_resizer {
keep_aspect_ratio_resizer {
min_dimension: 600
max_dimension: 1024
}
}
feature_extractor {
type: 'faster_rcnn_resnet101'
}
first_stage_anchor_generator {
grid_anchor_generator {
scales: [0.25, 0.5, 1.0, 2.0]
aspect_ratios: [0.5, 1.0, 2.0]
height_stride: 16
width_stride: 16
}
}
first_stage_box_predictor_conv_hyperparams {
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
initial_crop_size: 14
maxpool_kernel_size: 2
maxpool_stride: 2
second_stage_box_predictor {
mask_rcnn_box_predictor {
fc_hyperparams {
op: FC
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
}
}
second_stage_post_processing {
batch_non_max_suppression {
score_threshold: 0.01
iou_threshold: 0.6
max_detections_per_class: 100
max_total_detections: 300
}
score_converter: SOFTMAX
}
}"""
model_proto = model_pb2.DetectionModel()
text_format.Merge(model_text_proto, model_proto)
for extractor_type, extractor_class in FEATURE_EXTRACTOR_MAPS.items():
model_proto.faster_rcnn.feature_extractor.type = extractor_type
model = model_builder.build(model_proto, is_training=True)
self.assertIsInstance(model, faster_rcnn_meta_arch.FasterRCNNMetaArch)
self.assertIsInstance(model._feature_extractor, extractor_class)
def test_create_faster_rcnn_inception_resnet_v2_model_from_config(self):
model_text_proto = """
faster_rcnn {
num_classes: 3
image_resizer {
keep_aspect_ratio_resizer {
min_dimension: 600
max_dimension: 1024
}
}
feature_extractor {
type: 'faster_rcnn_inception_resnet_v2'
}
first_stage_anchor_generator {
grid_anchor_generator {
scales: [0.25, 0.5, 1.0, 2.0]
aspect_ratios: [0.5, 1.0, 2.0]
height_stride: 16
width_stride: 16
}
}
first_stage_box_predictor_conv_hyperparams {
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
initial_crop_size: 17
maxpool_kernel_size: 1
maxpool_stride: 1
second_stage_box_predictor {
mask_rcnn_box_predictor {
fc_hyperparams {
op: FC
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
}
}
second_stage_post_processing {
batch_non_max_suppression {
score_threshold: 0.01
iou_threshold: 0.6
max_detections_per_class: 100
max_total_detections: 300
}
score_converter: SOFTMAX
}
}"""
model_proto = model_pb2.DetectionModel()
text_format.Merge(model_text_proto, model_proto)
model = model_builder.build(model_proto, is_training=True)
self.assertIsInstance(model, faster_rcnn_meta_arch.FasterRCNNMetaArch)
self.assertIsInstance(
model._feature_extractor,
frcnn_inc_res.FasterRCNNInceptionResnetV2FeatureExtractor)
def test_create_faster_rcnn_model_from_config_with_example_miner(self):
model_text_proto = """
faster_rcnn {
num_classes: 3
feature_extractor {
type: 'faster_rcnn_inception_resnet_v2'
}
image_resizer {
keep_aspect_ratio_resizer {
min_dimension: 600
max_dimension: 1024
}
}
first_stage_anchor_generator {
grid_anchor_generator {
scales: [0.25, 0.5, 1.0, 2.0]
aspect_ratios: [0.5, 1.0, 2.0]
height_stride: 16
width_stride: 16
}
}
first_stage_box_predictor_conv_hyperparams {
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
second_stage_box_predictor {
mask_rcnn_box_predictor {
fc_hyperparams {
op: FC
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
}
}
hard_example_miner {
num_hard_examples: 10
iou_threshold: 0.99
}
}"""
model_proto = model_pb2.DetectionModel()
text_format.Merge(model_text_proto, model_proto)
model = model_builder.build(model_proto, is_training=True)
self.assertIsNotNone(model._hard_example_miner)
def test_create_rfcn_resnet_v1_model_from_config(self):
model_text_proto = """
faster_rcnn {
num_classes: 3
image_resizer {
keep_aspect_ratio_resizer {
min_dimension: 600
max_dimension: 1024
}
}
feature_extractor {
type: 'faster_rcnn_resnet101'
}
first_stage_anchor_generator {
grid_anchor_generator {
scales: [0.25, 0.5, 1.0, 2.0]
aspect_ratios: [0.5, 1.0, 2.0]
height_stride: 16
width_stride: 16
}
}
first_stage_box_predictor_conv_hyperparams {
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
initial_crop_size: 14
maxpool_kernel_size: 2
maxpool_stride: 2
second_stage_box_predictor {
rfcn_box_predictor {
conv_hyperparams {
op: CONV
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
}
}
}
second_stage_post_processing {
batch_non_max_suppression {
score_threshold: 0.01
iou_threshold: 0.6
max_detections_per_class: 100
max_total_detections: 300
}
score_converter: SOFTMAX
}
}"""
model_proto = model_pb2.DetectionModel()
text_format.Merge(model_text_proto, model_proto)
for extractor_type, extractor_class in FEATURE_EXTRACTOR_MAPS.items():
model_proto.faster_rcnn.feature_extractor.type = extractor_type
model = model_builder.build(model_proto, is_training=True)
self.assertIsInstance(model, rfcn_meta_arch.RFCNMetaArch)
self.assertIsInstance(model._feature_extractor, extractor_class)
if __name__ == '__main__':
tf.test.main()
|
jean/sentry | refs/heads/master | tests/sentry/plugins/mail/activity/__init__.py | 2878 | from __future__ import absolute_import
|
Thraxis/SickRage | refs/heads/master | lib/chardet/charsetgroupprober.py | 53 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .enums import ProbingState
from .charsetprober import CharSetProber
class CharSetGroupProber(CharSetProber):
def __init__(self, lang_filter=None):
super(CharSetGroupProber, self).__init__(lang_filter=lang_filter)
self._active_num = 0
self.probers = []
self._best_guess_prober = None
def reset(self):
super(CharSetGroupProber, self).reset()
self._active_num = 0
for prober in self.probers:
if prober:
prober.reset()
prober.active = True
self._active_num += 1
self._best_guess_prober = None
@property
def charset_name(self):
if not self._best_guess_prober:
self.get_confidence()
if not self._best_guess_prober:
return None
return self._best_guess_prober.charset_name
def feed(self, byte_str):
for prober in self.probers:
if not prober:
continue
if not prober.active:
continue
state = prober.feed(byte_str)
if not state:
continue
if state == ProbingState.found_it:
self._best_guess_prober = prober
return self.state
elif state == ProbingState.not_me:
prober.active = False
self._active_num -= 1
if self._active_num <= 0:
self._state = ProbingState.not_me
return self.state
return self.state
def get_confidence(self):
state = self.state
if state == ProbingState.found_it:
return 0.99
elif state == ProbingState.not_me:
return 0.01
best_conf = 0.0
self._best_guess_prober = None
for prober in self.probers:
if not prober:
continue
if not prober.active:
self.logger.debug('%s not active', prober.charset_name)
continue
conf = prober.get_confidence()
self.logger.debug('%s confidence = %s', prober.charset_name, conf)
if best_conf < conf:
best_conf = conf
self._best_guess_prober = prober
if not self._best_guess_prober:
return 0.0
return best_conf
|
andreabrambilla/libres | refs/heads/master | test-data/local/snake_oil_structure/snake_oil/jobs/snake_oil_simulator.py | 2 | #!/usr/bin/env python
from datetime import datetime
import os
import sys
from ecl.summary import EclSum, EclSumTStep
from ecl.test import ExtendedTestCase
from res.test.synthesizer import OilSimulator
def globalIndex(i, j, k, nx=10, ny=10, nz=10):
return i + nx * (j - 1) + nx * ny * (k - 1)
def readParameters(filename):
params = {}
with open(filename, "r") as f:
for line in f:
key, value = line.split(":", 1)
params[key] = value.strip()
return params
def runSimulator(simulator, history_simulator, time_step_count):
""" @rtype: EclSum """
ecl_sum = EclSum.writer("SNAKE_OIL_FIELD", datetime(2010, 1, 1), 10, 10, 10)
ecl_sum.addVariable("FOPT")
ecl_sum.addVariable("FOPR")
ecl_sum.addVariable("FGPT")
ecl_sum.addVariable("FGPR")
ecl_sum.addVariable("FWPT")
ecl_sum.addVariable("FWPR")
ecl_sum.addVariable("FGOR")
ecl_sum.addVariable("FWCT")
ecl_sum.addVariable("FOPTH")
ecl_sum.addVariable("FOPRH")
ecl_sum.addVariable("FGPTH")
ecl_sum.addVariable("FGPRH")
ecl_sum.addVariable("FWPTH")
ecl_sum.addVariable("FWPRH")
ecl_sum.addVariable("FGORH")
ecl_sum.addVariable("FWCTH")
ecl_sum.addVariable("WOPR", wgname="OP1")
ecl_sum.addVariable("WOPR", wgname="OP2")
ecl_sum.addVariable("WWPR", wgname="OP1")
ecl_sum.addVariable("WWPR", wgname="OP2")
ecl_sum.addVariable("WGPR", wgname="OP1")
ecl_sum.addVariable("WGPR", wgname="OP2")
ecl_sum.addVariable("WGOR", wgname="OP1")
ecl_sum.addVariable("WGOR", wgname="OP2")
ecl_sum.addVariable("WWCT", wgname="OP1")
ecl_sum.addVariable("WWCT", wgname="OP2")
ecl_sum.addVariable("WOPRH", wgname="OP1")
ecl_sum.addVariable("WOPRH", wgname="OP2")
ecl_sum.addVariable("WWPRH", wgname="OP1")
ecl_sum.addVariable("WWPRH", wgname="OP2")
ecl_sum.addVariable("WGPRH", wgname="OP1")
ecl_sum.addVariable("WGPRH", wgname="OP2")
ecl_sum.addVariable("WGORH", wgname="OP1")
ecl_sum.addVariable("WGORH", wgname="OP2")
ecl_sum.addVariable("WWCTH", wgname="OP1")
ecl_sum.addVariable("WWCTH", wgname="OP2")
ecl_sum.addVariable("BPR", num=globalIndex(5, 5, 5))
ecl_sum.addVariable("BPR", num=globalIndex(1, 3, 8))
time_map = []
mini_step_count = 10
total_step_count = time_step_count * mini_step_count
for report_step in range(time_step_count):
for mini_step in range(mini_step_count):
t_step = ecl_sum.addTStep(report_step + 1, sim_days=report_step * mini_step_count + mini_step)
time_map.append(t_step.getSimTime().datetime().strftime("%d/%m/%Y"))
simulator.step(scale=1.0 / total_step_count)
history_simulator.step(scale=1.0 / total_step_count)
t_step["FOPR"] = simulator.fopr()
t_step["FOPT"] = simulator.fopt()
t_step["FGPR"] = simulator.fgpr()
t_step["FGPT"] = simulator.fgpt()
t_step["FWPR"] = simulator.fwpr()
t_step["FWPT"] = simulator.fwpt()
t_step["FGOR"] = simulator.fgor()
t_step["FWCT"] = simulator.fwct()
t_step["WOPR:OP1"] = simulator.opr("OP1")
t_step["WOPR:OP2"] = simulator.opr("OP2")
t_step["WGPR:OP1"] = simulator.gpr("OP1")
t_step["WGPR:OP2"] = simulator.gpr("OP2")
t_step["WWPR:OP1"] = simulator.wpr("OP1")
t_step["WWPR:OP2"] = simulator.wpr("OP2")
t_step["WGOR:OP1"] = simulator.gor("OP1")
t_step["WGOR:OP2"] = simulator.gor("OP2")
t_step["WWCT:OP1"] = simulator.wct("OP1")
t_step["WWCT:OP2"] = simulator.wct("OP2")
t_step["BPR:5,5,5"] = simulator.bpr("5,5,5")
t_step["BPR:1,3,8"] = simulator.bpr("1,3,8")
t_step["FOPRH"] = history_simulator.fopr()
t_step["FOPTH"] = history_simulator.fopt()
t_step["FGPRH"] = history_simulator.fgpr()
t_step["FGPTH"] = history_simulator.fgpt()
t_step["FWPRH"] = history_simulator.fwpr()
t_step["FWPTH"] = history_simulator.fwpt()
t_step["FGORH"] = history_simulator.fgor()
t_step["FWCTH"] = history_simulator.fwct()
t_step["WOPRH:OP1"] = history_simulator.opr("OP1")
t_step["WOPRH:OP2"] = history_simulator.opr("OP2")
t_step["WGPRH:OP1"] = history_simulator.gpr("OP1")
t_step["WGPRH:OP2"] = history_simulator.gpr("OP2")
t_step["WWPRH:OP1"] = history_simulator.wpr("OP1")
t_step["WWPRH:OP2"] = history_simulator.wpr("OP2")
t_step["WGORH:OP1"] = history_simulator.gor("OP1")
t_step["WGORH:OP2"] = history_simulator.gor("OP2")
t_step["WWCTH:OP1"] = history_simulator.wct("OP1")
t_step["WWCTH:OP2"] = history_simulator.wct("OP2")
return ecl_sum, time_map
def roundedInt(value):
return int(round(float(value)))
if __name__ == '__main__':
seed = int(readParameters("seed.txt")["SEED"])
parameters = readParameters("snake_oil_params.txt")
op1_divergence_scale = float(parameters["OP1_DIVERGENCE_SCALE"])
op2_divergence_scale = float(parameters["OP2_DIVERGENCE_SCALE"])
op1_persistence = float(parameters["OP1_PERSISTENCE"])
op2_persistence = float(parameters["OP2_PERSISTENCE"])
op1_offset = float(parameters["OP1_OFFSET"])
op2_offset = float(parameters["OP2_OFFSET"])
bpr_138_persistence = float(parameters["BPR_138_PERSISTENCE"])
bpr_555_persistence = float(parameters["BPR_555_PERSISTENCE"])
op1_octaves = roundedInt(parameters["OP1_OCTAVES"])
op2_octaves = roundedInt(parameters["OP2_OCTAVES"])
simulator = OilSimulator()
simulator.addWell("OP1", seed * 997, persistence=op1_persistence, octaves=op1_octaves, divergence_scale=op1_divergence_scale, offset=op1_offset)
simulator.addWell("OP2", seed * 13, persistence=op2_persistence, octaves=op2_octaves, divergence_scale=op2_divergence_scale, offset=op2_offset)
simulator.addBlock("5,5,5", seed * 37, persistence=bpr_555_persistence)
simulator.addBlock("1,3,8", seed * 31, persistence=bpr_138_persistence)
history_simulator = OilSimulator()
history_simulator.addWell("OP1", 222118781)
history_simulator.addWell("OP2", 118116362)
report_step_count = 200
ecl_sum, time_map = runSimulator(simulator, history_simulator, report_step_count)
ecl_sum.fwrite()
with open("time_map.txt", "w") as f:
for t in time_map:
f.write("%s\n" % t)
|
lebauce/artub | refs/heads/master | plugins/speculoos/poujolobjs.py | 1 | # Glumol - An adventure game creator
# Copyright (C) 1998-2008 Sylvain Baubeau & Alexis Contour
# This file is part of Glumol.
# Glumol is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# Glumol is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Glumol. If not, see <http://www.gnu.org/licenses/>.
import pypoujol
from OpenGL.GL import *
from box import POINT_WIDTH
class RegionPoint(pypoujol.Sprite):
def __init__(self, parent = None, frames = 1, virtual_frames = -1):
pypoujol.Sprite.__init__(self, parent)
self.x = 0
self.y = 0
self.color = (1.0, 1.0, 1.0, 1.0)
self.visible = True
def draw(self):
if not self.visible: return
glDisable(GL_TEXTURE_2D)
glPushMatrix()
glLoadIdentity()
glBegin(GL_QUADS)
x, y, w, h = self.x, self.y, POINT_WIDTH, POINT_WIDTH
glColor4f(*self.color)
glVertex2f(x - w / 2, y - h / 2)
glColor4f(*self.color)
glVertex2f(x + w / 2, y - h / 2)
glColor4f(*self.color)
glVertex2f(x + w / 2, y + h / 2)
glColor4f(*self.color)
glVertex2f(x - w / 2, y + h / 2)
glEnd()
glPopMatrix()
glEnable(GL_TEXTURE_2D)
class Contour(pypoujol.Sprite):
def __init__(self, parent = None, frames = 1, virtual_frames = -1, loop = False, hotspot = (0, 0)):
pypoujol.Sprite.__init__(self, parent)
self.hotspot = hotspot
self.box = None
self.color = (1.0, 1.0, 1.0, 1.0)
self.visible = True
self.gpoints = []
self.loop = loop
def draw(self):
if not self.box or not self.visible or len(self.box.points) < 2: return
glDisable(GL_TEXTURE_2D)
glTranslatef(self.hotspot[0], self.hotspot[1], 0)
if self.loop:
glBegin(GL_LINE_LOOP)
else:
glBegin(GL_LINE_STRIP)
glColor4f(*self.color)
if self.box:
for i in self.box.points:
glVertex2f(i[0], i[1])
glEnd()
glEnable(GL_TEXTURE_2D)
def set_box(self, box):
self.box = box
|
with-git/tensorflow | refs/heads/master | tensorflow/contrib/bayesflow/python/ops/custom_grad.py | 44 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions for specifying custom gradients.
See ${python/contrib.bayesflow.custom_gradient}.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.contrib.bayesflow.python.ops.custom_grad_impl import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
'custom_gradient',
]
remove_undocumented(__name__, _allowed_symbols)
|
md1024/rams | refs/heads/init_master | uber/tests/email/test_automated_email_categories.py | 1 | from uber.tests.email.email_fixtures import *
class FakeModel:
pass
@pytest.mark.usefixtures("email_subsystem_sane_setup")
class TestAutomatedEmailCategory:
def test_testing_environment(self, get_test_email_category):
assert len(AutomatedEmail.instances) == 1
assert len(AutomatedEmail.queries[Attendee](None)) == 3
assert not get_test_email_category.unapproved_emails_not_sent
def test_event_name(self, get_test_email_category):
assert get_test_email_category.subject == E.SUBJECT_TO_FIND
assert get_test_email_category.ident == E.IDENT_TO_FIND
def test_approval_needed_and_we_have_it(self, monkeypatch, set_test_approved_idents, get_test_email_category, log_unsent_because_unapproved):
job = SendAllAutomatedEmailsJob()
assert get_test_email_category.approved
assert job.log_unsent_because_unapproved.call_count == 0
def test_approval_needed_and_we_dont_have_it(self, monkeypatch, get_test_email_category, log_unsent_because_unapproved):
job = SendAllAutomatedEmailsJob()
assert not get_test_email_category.approved
assert job.log_unsent_because_unapproved.call_count == 1
# attempt to send the same email and we should see the unapproved count go up because it's still unapproved
assert not get_test_email_category.approved
assert job.log_unsent_because_unapproved.call_count == 2
def test_approval_not_needed(self, monkeypatch, get_test_email_category):
assert not get_test_email_category.approved
monkeypatch.setattr(get_test_email_category, 'needs_approval', False)
assert get_test_email_category.approved
# -------------- test should_send() -------------------
def test_should_send_goes_through(self, get_test_email_category, set_test_approved_idents, attendee1):
assert get_test_email_category._should_send(model_inst=attendee1)
def test_should_send_incorrect_model_used(self, monkeypatch, get_test_email_category, attendee1):
wrong_model = FakeModel()
assert not get_test_email_category._should_send(model_inst=wrong_model)
def test_should_send_no_email_present(self, monkeypatch, get_test_email_category, attendee1):
delattr(attendee1, 'email')
assert not get_test_email_category._should_send(model_inst=attendee1)
def test_should_send_blank_email_present(self, monkeypatch, get_test_email_category, attendee1):
attendee1.email = ''
assert not get_test_email_category._should_send(model_inst=attendee1)
def test_should_send_already_sent_this_email(self, get_test_email_category, set_test_approved_idents, set_previously_sent_emails_to_attendee1, attendee1):
assert not get_test_email_category._should_send(model_inst=attendee1)
def test_should_send_wrong_filter(self, get_test_email_category, set_test_approved_idents, attendee1):
get_test_email_category.filter = lambda a: a.paid == c.HAS_PAID
assert not get_test_email_category._should_send(model_inst=attendee1)
def test_should_send_not_approved(self, get_test_email_category, attendee1):
assert not get_test_email_category._should_send(model_inst=attendee1)
def test_should_send_at_con(self, at_con, get_test_email_category, set_test_approved_idents, attendee1):
assert not get_test_email_category._should_send(model_inst=attendee1)
get_test_email_category.allow_during_con = True
assert get_test_email_category._should_send(model_inst=attendee1)
# -----------
def test_send_doesnt_throw_exception(self, monkeypatch, get_test_email_category):
get_test_email_category.send_if_should(None, raise_errors=False)
def test_send_throws_exception(self, monkeypatch, get_test_email_category):
monkeypatch.setattr(get_test_email_category, '_should_send', Mock(side_effect=Exception('Boom!')))
with pytest.raises(Exception):
get_test_email_category.send_if_should(None, raise_errors=True)
def test_really_send_throws_exception(self, monkeypatch, get_test_email_category):
monkeypatch.setattr(get_test_email_category, 'computed_subject', Mock(side_effect=Exception('Boom!')))
with pytest.raises(Exception):
get_test_email_category.really_send(None)
valid_when = days_after(3, sept_15th - timedelta(days=5))
invalid_when = days_after(3, sept_15th)
@pytest.mark.parametrize("when, expected_result", [
([invalid_when], False),
([valid_when], True),
([invalid_when, valid_when], False),
([valid_when, invalid_when], False),
([invalid_when, invalid_when], False),
([valid_when, valid_when], True),
((), True)
])
def test_when_function(self, monkeypatch, get_test_email_category, set_datebase_now_to_sept_15th, attendee1, when, expected_result):
monkeypatch.setattr(get_test_email_category, 'when', when)
monkeypatch.setattr(AutomatedEmail, 'approved', True)
assert get_test_email_category.filters_run(attendee1) == expected_result
assert get_test_email_category._run_date_filters() == expected_result
assert get_test_email_category._should_send(model_inst=attendee1) == expected_result
@pytest.mark.parametrize("when, expected_text", [
([
days_after(3, sept_15th - timedelta(days=5)),
before(sept_15th - timedelta(days=3)),
days_before(3, sept_15th + timedelta(days=5), 1),
], [
'after 09/13',
'before 09/12',
'between 09/17 and 09/19'
]),
([days_after(3, sept_15th - timedelta(days=5))], ['after 09/13']),
])
def test_when_txt(self, monkeypatch, get_test_email_category, set_datebase_now_to_sept_15th, attendee1, when, expected_text):
monkeypatch.setattr(get_test_email_category, 'when', when)
assert get_test_email_category.when_txt == '\n'.join(expected_text)
@pytest.mark.parametrize("filter, expected_result", [
(lambda a: False, False),
(lambda a: True, True),
(lambda a: a.paid == c.NEED_NOT_PAY, True),
(lambda a: a.paid != c.NEED_NOT_PAY, False),
])
def test_filters(self, monkeypatch, get_test_email_category, attendee1, filter, expected_result):
monkeypatch.setattr(get_test_email_category, 'filter', filter)
monkeypatch.setattr(AutomatedEmail, 'approved', True)
assert get_test_email_category.filters_run(attendee1) == expected_result
assert get_test_email_category._should_send(model_inst=attendee1) == expected_result
def test_none_filter(self):
with pytest.raises(AssertionError):
AutomatedEmail(Attendee, '', '', None)
def test_no_filter(self):
# this is slightly silly but, if this ever changes, we should be explicit about what the expected result is
with pytest.raises(TypeError):
AutomatedEmail(Attendee, '', '')
|
wileeam/airflow | refs/heads/master | airflow/ti_deps/deps/dagrun_exists_dep.py | 4 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
from airflow.utils.session import provide_session
from airflow.utils.state import State
class DagrunRunningDep(BaseTIDep):
NAME = "Dagrun Running"
IGNOREABLE = True
@provide_session
def _get_dep_statuses(self, ti, session, dep_context):
dag = ti.task.dag
dagrun = ti.get_dagrun(session)
if not dagrun:
# The import is needed here to avoid a circular dependency
from airflow.models import DagRun
running_dagruns = DagRun.find(
dag_id=dag.dag_id,
state=State.RUNNING,
external_trigger=False,
session=session
)
if len(running_dagruns) >= dag.max_active_runs:
reason = ("The maximum number of active dag runs ({0}) for this task "
"instance's DAG '{1}' has been reached.".format(
dag.max_active_runs,
ti.dag_id))
else:
reason = "Unknown reason"
yield self._failing_status(
reason="Task instance's dagrun did not exist: {0}.".format(reason))
else:
if dagrun.state != State.RUNNING:
yield self._failing_status(
reason="Task instance's dagrun was not in the 'running' state but in "
"the state '{}'.".format(dagrun.state))
|
qianyu668899/Django | refs/heads/master | depot/depotapp/views.py | 2 | #/usr/bin/python
#coding: utf8
# Create your views here.
from django import forms
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseRedirect
from django.template.loader import get_template
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
# app specific files
from models import *
from forms import *
import datetime
#from djangorestframework.views import View
from django.db import transaction
from django.contrib.auth import authenticate,login,logout
from django.contrib.auth.decorators import login_required
def login_view(request):
user = authenticate(username=request.POST['username'], password=request.POST['password'])
if user is not None:
login(request, user)
print request.user
return list_product(request)
else:
#验证失败,暂时不做处理
return store_view(request)
def return_home(request):
return store_view(request)
def logout_view(request):
logout(request)
return store_view(request)
def store_view(request):
products = Product.objects.filter(date_available__gt=datetime.datetime.now().date()).order_by("-date_available")
#products=Product.objects.all()
#print products
cart = request.session.get("cart",None)
t = get_template('depotapp/store.html')
c = RequestContext(request,locals())
return HttpResponse(t.render(c))
def view_cart(request):
cart = request.session.get("cart",None)
t = get_template('depotapp/view_cart.html')
if not cart:
cart = Cart()
request.session["cart"] = cart
c = RequestContext(request,locals())
return HttpResponse(t.render(c))
def add_to_cart(request, id):
product = Product.objects.get(id = id)
cart = request.session.get("cart",None)
if not cart:
cart = Cart()
request.session["cart"] = cart
cart.add_product(product)
request.session['cart'] = cart
return view_cart(request)
def clean_cart(request):
request.session['cart'] = Cart()
return view_cart(request)
def create_product(request):
#print "press create"
form = ProductForm(request.POST or None)
if form.is_valid():
p = form.save(commit=False)
p.save()
for orders_id in form.cleaned_data.get('orders'):
litem = LineItem.objects.create(product=p, order=orders_id, unit_price=10.00, quantity=1)
litem.save()
form = ProductForm()
t = get_template('depotapp/create_product.html')
c = RequestContext(request,locals())
return HttpResponse(t.render(c))
#return list_product(request)
'''
class RESTforCart(View):
def get(self, request, *args, **kwargs):
return request.session['cart'].items
def post(self, request, *args, **kwargs):
print request.POST['product']
product = Product.objects.get(id=request.POST['product'])
cart = request.session['cart']
cart.add_product(product)
request.session['cart'] = cart
return request.session['cart'].items
'''
@login_required
def list_product(request):
list_items = Product.objects.all()
paginator = Paginator(list_items ,3)
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
try:
list_items = paginator.page(page)
except :
list_items = paginator.page(paginator.num_pages)
t = get_template('depotapp/list_product.html')
c = RequestContext(request,locals())
return HttpResponse(t.render(c))
def view_product(request, id):
product_instance = Product.objects.get(id = id)
t=get_template('depotapp/view_product.html')
c=RequestContext(request,locals())
return HttpResponse(t.render(c))
def edit_product(request, id):
product_instance = Product.objects.get(id=id)
form = ProductForm(request.POST or None, instance = product_instance)
if form.is_valid():
p = form.save(commit=False)
p.save()
for orders_id in form.cleaned_data.get('orders'):
litem = LineItem.objects.create(product=p, order=orders_id, unit_price=10.00, quantity=1)
litem.save()
t=get_template('depotapp/edit_product.html')
c=RequestContext(request,locals())
return HttpResponse(t.render(c))
#return list_product(request)
'''
p = form.save(commit=False)
p.save()
for orders_id in form.cleaned_data.get('orders'):
litem = LineItem.objects.create(product=p, order=orders_id, unit_price=10.00, quantity=1)
litem.save()
'''
def del_product(request, id):
product = Product.objects.get(id=id)
product.delete()
#t = get_template('depotapp/list_product.html')
#c = RequestContext(request,locals())
#return HttpResponse(t.render(c))
return list_product(request)
@transaction.commit_on_success
def create_order(request):
form = OrderForm(request.POST or None)
if form.is_valid():
order = form.save()
for item in request.session['cart'].items:
item.order = order
item.save()
clean_cart(request)
return store_view(request)
t = get_template('depotapp/create_order.html')
c = RequestContext(request,locals())
return HttpResponse(t.render(c))
def list_order(request):
list_items = Order.objects.all()
paginator = Paginator(list_items ,10)
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
try:
list_items = paginator.page(page)
except :
list_items = paginator.page(paginator.num_pages)
t = get_template('depotapp/list_order.html')
c = RequestContext(request,locals())
return HttpResponse(t.render(c))
def view_order(request, id):
order_instance = Order.objects.get(id = id)
t=get_template('depotapp/view_order.html')
c=RequestContext(request,locals())
return HttpResponse(t.render(c))
def edit_order(request, id):
order_instance = Order.objects.get(id=id)
form = OrderForm(request.POST or None, instance = order_instance)
if form.is_valid():
form.save()
t=get_template('depotapp/edit_order.html')
c=RequestContext(request,locals())
return HttpResponse(t.render(c))
def create_lineitem(request):
form = LineItemForm(request.POST or None)
if form.is_valid():
form.save()
form = LineItemForm()
t = get_template('depotapp/create_lineitem.html')
c = RequestContext(request,locals())
return HttpResponse(t.render(c))
def list_lineitem(request):
list_items = LineItem.objects.all()
paginator = Paginator(list_items ,10)
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
try:
list_items = paginator.page(page)
except :
list_items = paginator.page(paginator.num_pages)
t = get_template('depotapp/list_lineitem.html')
c = RequestContext(request,locals())
return HttpResponse(t.render(c))
def view_lineitem(request, id):
lineitem_instance = LineItem.objects.get(id = id)
t=get_template('depotapp/view_lineitem.html')
c=RequestContext(request,locals())
return HttpResponse(t.render(c))
def edit_lineitem(request, id):
lineitem_instance = LineItem.objects.get(id=id)
form = LineItemForm(request.POST or None, instance = lineitem_instance)
if form.is_valid():
form.save()
t=get_template('depotapp/edit_lineitem.html')
c=RequestContext(request,locals())
return HttpResponse(t.render(c))
def atom_of_order(request,id):
product = Product.objects.get(id = id)
t = get_template('depotapp/atom_order.xml')
c=RequestContext(request,locals())
return HttpResponse(t.render(c), mimetype='application/atom+xml')
|
xaviercobain88/framework-python | refs/heads/master | build/lib.linux-i686-2.7/openerp/addons/account/partner.py | 9 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from operator import itemgetter
import time
from openerp.osv import fields, osv
class account_fiscal_position(osv.osv):
_name = 'account.fiscal.position'
_description = 'Fiscal Position'
_columns = {
'name': fields.char('Fiscal Position', size=64, required=True),
'active': fields.boolean('Active', help="By unchecking the active field, you may hide a fiscal position without deleting it."),
'company_id': fields.many2one('res.company', 'Company'),
'account_ids': fields.one2many('account.fiscal.position.account', 'position_id', 'Account Mapping'),
'tax_ids': fields.one2many('account.fiscal.position.tax', 'position_id', 'Tax Mapping'),
'note': fields.text('Notes', translate=True),
}
_defaults = {
'active': True,
}
def map_tax(self, cr, uid, fposition_id, taxes, context=None):
if not taxes:
return []
if not fposition_id:
return map(lambda x: x.id, taxes)
result = set()
for t in taxes:
ok = False
for tax in fposition_id.tax_ids:
if tax.tax_src_id.id == t.id:
if tax.tax_dest_id:
result.add(tax.tax_dest_id.id)
ok=True
if not ok:
result.add(t.id)
return list(result)
def map_account(self, cr, uid, fposition_id, account_id, context=None):
if not fposition_id:
return account_id
for pos in fposition_id.account_ids:
if pos.account_src_id.id == account_id:
account_id = pos.account_dest_id.id
break
return account_id
account_fiscal_position()
class account_fiscal_position_tax(osv.osv):
_name = 'account.fiscal.position.tax'
_description = 'Taxes Fiscal Position'
_rec_name = 'position_id'
_columns = {
'position_id': fields.many2one('account.fiscal.position', 'Fiscal Position', required=True, ondelete='cascade'),
'tax_src_id': fields.many2one('account.tax', 'Tax Source', required=True),
'tax_dest_id': fields.many2one('account.tax', 'Replacement Tax')
}
_sql_constraints = [
('tax_src_dest_uniq',
'unique (position_id,tax_src_id,tax_dest_id)',
'A tax fiscal position could be defined only once time on same taxes.')
]
account_fiscal_position_tax()
class account_fiscal_position_account(osv.osv):
_name = 'account.fiscal.position.account'
_description = 'Accounts Fiscal Position'
_rec_name = 'position_id'
_columns = {
'position_id': fields.many2one('account.fiscal.position', 'Fiscal Position', required=True, ondelete='cascade'),
'account_src_id': fields.many2one('account.account', 'Account Source', domain=[('type','<>','view')], required=True),
'account_dest_id': fields.many2one('account.account', 'Account Destination', domain=[('type','<>','view')], required=True)
}
_sql_constraints = [
('account_src_dest_uniq',
'unique (position_id,account_src_id,account_dest_id)',
'An account fiscal position could be defined only once time on same accounts.')
]
account_fiscal_position_account()
class res_partner(osv.osv):
_name = 'res.partner'
_inherit = 'res.partner'
_description = 'Partner'
def _credit_debit_get(self, cr, uid, ids, field_names, arg, context=None):
query = self.pool.get('account.move.line')._query_get(cr, uid, context=context)
cr.execute("""SELECT l.partner_id, a.type, SUM(l.debit-l.credit)
FROM account_move_line l
LEFT JOIN account_account a ON (l.account_id=a.id)
WHERE a.type IN ('receivable','payable')
AND l.partner_id IN %s
AND l.reconcile_id IS NULL
AND """ + query + """
GROUP BY l.partner_id, a.type
""",
(tuple(ids),))
maps = {'receivable':'credit', 'payable':'debit' }
res = {}
for id in ids:
res[id] = {}.fromkeys(field_names, 0)
for pid,type,val in cr.fetchall():
if val is None: val=0
res[pid][maps[type]] = (type=='receivable') and val or -val
return res
def _asset_difference_search(self, cr, uid, obj, name, type, args, context=None):
if not args:
return []
having_values = tuple(map(itemgetter(2), args))
where = ' AND '.join(
map(lambda x: '(SUM(bal2) %(operator)s %%s)' % {
'operator':x[1]},args))
query = self.pool.get('account.move.line')._query_get(cr, uid, context=context)
cr.execute(('SELECT pid AS partner_id, SUM(bal2) FROM ' \
'(SELECT CASE WHEN bal IS NOT NULL THEN bal ' \
'ELSE 0.0 END AS bal2, p.id as pid FROM ' \
'(SELECT (debit-credit) AS bal, partner_id ' \
'FROM account_move_line l ' \
'WHERE account_id IN ' \
'(SELECT id FROM account_account '\
'WHERE type=%s AND active) ' \
'AND reconcile_id IS NULL ' \
'AND '+query+') AS l ' \
'RIGHT JOIN res_partner p ' \
'ON p.id = partner_id ) AS pl ' \
'GROUP BY pid HAVING ' + where),
(type,) + having_values)
res = cr.fetchall()
if not res:
return [('id','=','0')]
return [('id','in',map(itemgetter(0), res))]
def _credit_search(self, cr, uid, obj, name, args, context=None):
return self._asset_difference_search(cr, uid, obj, name, 'receivable', args, context=context)
def _debit_search(self, cr, uid, obj, name, args, context=None):
return self._asset_difference_search(cr, uid, obj, name, 'payable', args, context=context)
def has_something_to_reconcile(self, cr, uid, partner_id, context=None):
'''
at least a debit, a credit and a line older than the last reconciliation date of the partner
'''
cr.execute('''
SELECT l.partner_id, SUM(l.debit) AS debit, SUM(l.credit) AS credit
FROM account_move_line l
RIGHT JOIN account_account a ON (a.id = l.account_id)
RIGHT JOIN res_partner p ON (l.partner_id = p.id)
WHERE a.reconcile IS TRUE
AND p.id = %s
AND l.reconcile_id IS NULL
AND (p.last_reconciliation_date IS NULL OR l.date > p.last_reconciliation_date)
AND l.state <> 'draft'
GROUP BY l.partner_id''', (partner_id,))
res = cr.dictfetchone()
if res:
return bool(res['debit'] and res['credit'])
return False
def mark_as_reconciled(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'last_reconciliation_date': time.strftime('%Y-%m-%d %H:%M:%S')}, context=context)
_columns = {
'credit': fields.function(_credit_debit_get,
fnct_search=_credit_search, string='Total Receivable', multi='dc', help="Total amount this customer owes you."),
'debit': fields.function(_credit_debit_get, fnct_search=_debit_search, string='Total Payable', multi='dc', help="Total amount you have to pay to this supplier."),
'debit_limit': fields.float('Payable Limit'),
'property_account_payable': fields.property(
'account.account',
type='many2one',
relation='account.account',
string="Account Payable",
view_load=True,
domain="[('type', '=', 'payable')]",
help="This account will be used instead of the default one as the payable account for the current partner",
required=True),
'property_account_receivable': fields.property(
'account.account',
type='many2one',
relation='account.account',
string="Account Receivable",
view_load=True,
domain="[('type', '=', 'receivable')]",
help="This account will be used instead of the default one as the receivable account for the current partner",
required=True),
'property_account_position': fields.property(
'account.fiscal.position',
type='many2one',
relation='account.fiscal.position',
string="Fiscal Position",
view_load=True,
help="The fiscal position will determine taxes and accounts used for the partner.",
),
'property_payment_term': fields.property(
'account.payment.term',
type='many2one',
relation='account.payment.term',
string ='Customer Payment Term',
view_load=True,
help="This payment term will be used instead of the default one for sale orders and customer invoices"),
'property_supplier_payment_term': fields.property(
'account.payment.term',
type='many2one',
relation='account.payment.term',
string ='Supplier Payment Term',
view_load=True,
help="This payment term will be used instead of the default one for purchase orders and supplier invoices"),
'ref_companies': fields.one2many('res.company', 'partner_id',
'Companies that refers to partner'),
'last_reconciliation_date': fields.datetime('Latest Full Reconciliation Date', help='Date on which the partner accounting entries were fully reconciled last time. It differs from the last date where a reconciliation has been made for this partner, as here we depict the fact that nothing more was to be reconciled at this date. This can be achieved in 2 different ways: either the last unreconciled debit/credit entry of this partner was reconciled, either the user pressed the button "Nothing more to reconcile" during the manual reconciliation process.')
}
def _commercial_fields(self, cr, uid, context=None):
return super(res_partner, self)._commercial_fields(cr, uid, context=context) + \
['debit_limit', 'property_account_payable', 'property_account_receivable', 'property_account_position',
'property_payment_term', 'property_supplier_payment_term', 'last_reconciliation_date']
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
hejuna/bite-project | refs/heads/master | deps/gdata-python-client/samples/apps/marketplace_sample/gdata/tlslite/utils/ASN1Parser.py | 281 | """Class for parsing ASN.1"""
from compat import *
from codec import *
#Takes a byte array which has a DER TLV field at its head
class ASN1Parser:
def __init__(self, bytes):
p = Parser(bytes)
p.get(1) #skip Type
#Get Length
self.length = self._getASN1Length(p)
#Get Value
self.value = p.getFixBytes(self.length)
#Assuming this is a sequence...
def getChild(self, which):
p = Parser(self.value)
for x in range(which+1):
markIndex = p.index
p.get(1) #skip Type
length = self._getASN1Length(p)
p.getFixBytes(length)
return ASN1Parser(p.bytes[markIndex : p.index])
#Decode the ASN.1 DER length field
def _getASN1Length(self, p):
firstLength = p.get(1)
if firstLength<=127:
return firstLength
else:
lengthLength = firstLength & 0x7F
return p.get(lengthLength)
|
ericli1989/ali_kernel | refs/heads/master | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
|
qsnake/py2js | refs/heads/master | tests/modules/import_diamond.py | 6 |
import modules.diamond1
import modules.diamond2
modules.diamond1.run()
modules.diamond2.run()
|
UTAlan/HavePest.net | refs/heads/master | havepest/paste/util/UserDict24.py | 28 | """A more or less complete user-defined wrapper around dictionary objects."""
class UserDict:
def __init__(self, dict=None, **kwargs):
self.data = {}
if dict is not None:
if not hasattr(dict,'keys'):
dict = type({})(dict) # make mapping from a sequence
self.update(dict)
if len(kwargs):
self.update(kwargs)
def __repr__(self): return repr(self.data)
def __cmp__(self, dict):
if isinstance(dict, UserDict):
return cmp(self.data, dict.data)
else:
return cmp(self.data, dict)
def __len__(self): return len(self.data)
def __getitem__(self, key): return self.data[key]
def __setitem__(self, key, item): self.data[key] = item
def __delitem__(self, key): del self.data[key]
def clear(self): self.data.clear()
def copy(self):
if self.__class__ is UserDict:
return UserDict(self.data)
import copy
data = self.data
try:
self.data = {}
c = copy.copy(self)
finally:
self.data = data
c.update(self)
return c
def keys(self): return self.data.keys()
def items(self): return self.data.items()
def iteritems(self): return self.data.iteritems()
def iterkeys(self): return self.data.iterkeys()
def itervalues(self): return self.data.itervalues()
def values(self): return self.data.values()
def has_key(self, key): return self.data.has_key(key)
def update(self, dict):
if isinstance(dict, UserDict):
self.data.update(dict.data)
elif isinstance(dict, type(self.data)):
self.data.update(dict)
else:
for k, v in dict.items():
self[k] = v
def get(self, key, failobj=None):
if not self.has_key(key):
return failobj
return self[key]
def setdefault(self, key, failobj=None):
if not self.has_key(key):
self[key] = failobj
return self[key]
def pop(self, key, *args):
return self.data.pop(key, *args)
def popitem(self):
return self.data.popitem()
def __contains__(self, key):
return key in self.data
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
fromkeys = classmethod(fromkeys)
class IterableUserDict(UserDict):
def __iter__(self):
return iter(self.data)
class DictMixin:
# Mixin defining all dictionary methods for classes that already have
# a minimum dictionary interface including getitem, setitem, delitem,
# and keys. Without knowledge of the subclass constructor, the mixin
# does not define __init__() or copy(). In addition to the four base
# methods, progressively more efficiency comes with defining
# __contains__(), __iter__(), and iteritems().
# second level definitions support higher levels
def __iter__(self):
for k in self.keys():
yield k
def has_key(self, key):
try:
value = self[key]
except KeyError:
return False
return True
def __contains__(self, key):
return self.has_key(key)
# third level takes advantage of second level definitions
def iteritems(self):
for k in self:
yield (k, self[k])
def iterkeys(self):
return self.__iter__()
# fourth level uses definitions from lower levels
def itervalues(self):
for _, v in self.iteritems():
yield v
def values(self):
return [v for _, v in self.iteritems()]
def items(self):
return list(self.iteritems())
def clear(self):
for key in self.keys():
del self[key]
def setdefault(self, key, default):
try:
return self[key]
except KeyError:
self[key] = default
return default
def pop(self, key, *args):
if len(args) > 1:
raise TypeError, "pop expected at most 2 arguments, got "\
+ repr(1 + len(args))
try:
value = self[key]
except KeyError:
if args:
return args[0]
raise
del self[key]
return value
def popitem(self):
try:
k, v = self.iteritems().next()
except StopIteration:
raise KeyError, 'container is empty'
del self[k]
return (k, v)
def update(self, other):
# Make progressively weaker assumptions about "other"
if hasattr(other, 'iteritems'): # iteritems saves memory and lookups
for k, v in other.iteritems():
self[k] = v
elif hasattr(other, '__iter__'): # iter saves memory
for k in other:
self[k] = other[k]
else:
for k in other.keys():
self[k] = other[k]
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __repr__(self):
return repr(dict(self.iteritems()))
def __cmp__(self, other):
if other is None:
return 1
if isinstance(other, DictMixin):
other = dict(other.iteritems())
return cmp(dict(self.iteritems()), other)
def __len__(self):
return len(self.keys())
def __nonzero__(self):
return bool(self.iteritems())
|
MikkCZ/kitsune | refs/heads/master | kitsune/announcements/models.py | 6 | from datetime import datetime
from django.contrib.auth.models import User, Group
from django.db import models
from django.db.models import Q
from django.db.models.signals import post_save
from kitsune.sumo.templatetags.jinja_helpers import wiki_to_html
from kitsune.sumo.models import ModelBase
from kitsune.wiki.models import Locale
class Announcement(ModelBase):
created = models.DateTimeField(default=datetime.now)
creator = models.ForeignKey(User)
show_after = models.DateTimeField(
default=datetime.now, db_index=True,
verbose_name='Start displaying',
help_text=('When this announcement will start appearing. '
'(US/Pacific)'))
show_until = models.DateTimeField(
db_index=True, null=True, blank=True,
verbose_name='Stop displaying',
help_text=('When this announcement will stop appearing. '
'Leave blank for indefinite. (US/Pacific)'))
content = models.TextField(
max_length=10000,
help_text=("Use wiki syntax or HTML. It will display similar to a "
"document's content."))
group = models.ForeignKey(Group, null=True, blank=True)
locale = models.ForeignKey(Locale, null=True, blank=True)
def __unicode__(self):
excerpt = self.content[:50]
if self.group:
return u'[{group}] {excerpt}'.format(group=self.group,
excerpt=excerpt)
return u'{excerpt}'.format(excerpt=excerpt)
def is_visible(self):
now = datetime.now()
if now > self.show_after and (not self.show_until or
now < self.show_until):
return True
return False
@property
def content_parsed(self):
return wiki_to_html(self.content.strip())
@classmethod
def get_site_wide(cls):
return cls._visible_query(group=None, locale=None)
@classmethod
def get_for_group_id(cls, group_id):
"""Returns visible announcements for a given group id."""
return cls._visible_query(group__id=group_id)
@classmethod
def get_for_locale_name(cls, locale_name):
"""Returns visible announcements for a given locale name."""
return cls._visible_query(locale__locale=locale_name)
@classmethod
def _visible_query(cls, **query_kwargs):
"""Return visible announcements given a group query."""
return Announcement.objects.filter(
# Show if interval is specified and current or show_until is None
Q(show_after__lt=datetime.now()) &
(Q(show_until__gt=datetime.now()) | Q(show_until__isnull=True)),
**query_kwargs)
def connector(sender, instance, created, **kw):
# Only email new announcements in a group. We don't want to email everyone.
if created and instance.group:
from kitsune.announcements.tasks import send_group_email
now = datetime.now()
if instance.is_visible():
send_group_email.delay(instance.pk)
elif now < instance.show_after:
send_group_email.delay(instance.pk, eta=instance.show_after)
post_save.connect(connector, sender=Announcement,
dispatch_uid='email_announcement')
|
bobisjan/django-shanghai | refs/heads/master | shanghai/mixins/__init__.py | 1 | from .dispatcher import DispatcherMixin
from .fetcher import FetcherMixin, ModelFetcherMixin
from .fields import FieldsMixin
from .filter import FilterMixin, ModelFilterMixin
from .inclusion import InclusionMixin
from .linker import LinkerMixin
from .meta import MetaMixin
from .pagination import PaginationMixin, ModelPaginationMixin
from .responder import ResponderMixin
from .sort import SortMixin, ModelSortMixin
__all__ = ['DispatcherMixin', 'FetcherMixin', 'ModelFetcherMixin',
'InclusionMixin', 'FieldsMixin', 'LinkerMixin', 'MetaMixin',
'PaginationMixin', 'ModelPaginationMixin', 'ResponderMixin',
'SortMixin', 'ModelSortMixin', 'FilterMixin', 'ModelFilterMixin']
|
ryfeus/lambda-packs | refs/heads/master | Sklearn_scipy_numpy/source/pip/_vendor/requests/packages/chardet/mbcsgroupprober.py | 2768 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetgroupprober import CharSetGroupProber
from .utf8prober import UTF8Prober
from .sjisprober import SJISProber
from .eucjpprober import EUCJPProber
from .gb2312prober import GB2312Prober
from .euckrprober import EUCKRProber
from .cp949prober import CP949Prober
from .big5prober import Big5Prober
from .euctwprober import EUCTWProber
class MBCSGroupProber(CharSetGroupProber):
def __init__(self):
CharSetGroupProber.__init__(self)
self._mProbers = [
UTF8Prober(),
SJISProber(),
EUCJPProber(),
GB2312Prober(),
EUCKRProber(),
CP949Prober(),
Big5Prober(),
EUCTWProber()
]
self.reset()
|
Alwnikrotikz/marinemap | refs/heads/master | lingcod/common/templatetags/appname.py | 3 | from django.conf import settings
from django.template import Node, Library
register = Library()
@register.simple_tag
def appname():
try:
name = settings.APP_NAME
except AttributeError:
name = '... settings.APP_NAME ...'
return str(name)
@register.simple_tag
def help_email():
try:
name = settings.HELP_EMAIL
except AttributeError:
name = 'help@marinemap.org'
return str(name)
|
rangadi/incubator-beam | refs/heads/master | sdks/python/apache_beam/examples/wordcount_it_test.py | 5 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""End-to-end test for the wordcount example."""
from __future__ import absolute_import
import logging
import time
import unittest
from hamcrest.core.core.allof import all_of
from nose.plugins.attrib import attr
from apache_beam.examples import wordcount
from apache_beam.testing.pipeline_verifiers import FileChecksumMatcher
from apache_beam.testing.pipeline_verifiers import PipelineStateMatcher
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.test_utils import delete_files
class WordCountIT(unittest.TestCase):
# Enable nose tests running in parallel
_multiprocess_can_split_ = True
# The default checksum is a SHA-1 hash generated from a sorted list of
# lines read from expected output.
DEFAULT_CHECKSUM = '33535a832b7db6d78389759577d4ff495980b9c0'
@attr('IT')
def test_wordcount_it(self):
self._run_wordcount_it()
@attr('IT', 'ValidatesContainer')
def test_wordcount_fnapi_it(self):
self._run_wordcount_it(experiment='beam_fn_api')
def _run_wordcount_it(self, **opts):
test_pipeline = TestPipeline(is_integration_test=True)
# Set extra options to the pipeline for test purpose
output = '/'.join([test_pipeline.get_option('output'),
str(int(time.time())),
'results'])
arg_sleep_secs = test_pipeline.get_option('sleep_secs')
sleep_secs = int(arg_sleep_secs) if arg_sleep_secs is not None else None
pipeline_verifiers = [PipelineStateMatcher(),
FileChecksumMatcher(output + '*-of-*',
self.DEFAULT_CHECKSUM,
sleep_secs)]
extra_opts = {'output': output,
'on_success_matcher': all_of(*pipeline_verifiers)}
extra_opts.update(opts)
# Register clean up before pipeline execution
self.addCleanup(delete_files, [output + '*'])
# Get pipeline options from command argument: --test-pipeline-options,
# and start pipeline job by calling pipeline main function.
wordcount.run(test_pipeline.get_full_options_as_args(**extra_opts))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
|
LadyClaire/project-euler-solutions | refs/heads/master | 0001/pe0001.py | 1 | #! /usr/bin/env python3
# If we list all the natural numbers below 10 that are multiples of 3 or 5, we
# get 3, 5, 6 and 9. The sum of these multiples is 23.
#
# Find the sum of all the multiples of 3 or 5 below 1000.
def sumMultiples(upperLimit, *multiples):
'''Finds the sum of all multiples of the specified numbers up to but not
including upperLimit.'''
return sum(i for i in range(0, upperLimit) if any(i % j == 0 for j in
multiples))
if __name__ == "__main__":
print(sumMultiples(1000, 3, 5))
|
danielnyga/pracmln | refs/heads/master | python2/pracmln/utils/locs.py | 2 | import os
import appdirs
from pracmln._version import APPNAME, APPAUTHOR
root = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', '..'))
user_data = appdirs.user_data_dir(APPNAME, APPAUTHOR)
if os.path.basename(root).startswith('python'):
root = os.path.realpath(os.path.join(root, '..'))
app_data = root
else:
app_data = appdirs.site_data_dir(APPNAME, APPAUTHOR)
if not os.path.exists(app_data):
app_data = user_data
trdparty = os.path.join(app_data, '3rdparty')
examples = os.path.join(app_data, 'examples')
etc = os.path.join(app_data, 'etc')
|
jkrcma/passIon | refs/heads/master | index.py | 2 |
import logging
import pickle
from time import time
from hashlib import md5
from base64 import urlsafe_b64encode
from os import urandom
import redis
from flask import Flask, request, render_template
import config
logging.basicConfig(level=logging.DEBUG)
app = Flask(__name__,static_folder='public')
r = redis.StrictRedis(
host=config.REDIS_HOST,
port=config.REDIS_PORT,
db=config.REDIS_DB,
password=config.REDIS_PASSWORD
)
@app.route('/set', methods=['post'])
def setPass():
assert request.method == 'POST'
password = request.form['pass']
iv = request.form['iv']
uuid = urlsafe_b64encode(md5(urandom(128)).digest())[:8].decode('utf-8')
with r.pipeline() as pipe:
data = {'status': 'ok', 'iv': iv, 'pass': password}
pipe.set(uuid, pickle.dumps(data))
pipe.expire(uuid, config.TTL)
pipe.execute()
return '/get/{}'.format(uuid)
@app.route('/get/<uuid>', methods=['get'])
def getPass(uuid):
with r.pipeline() as pipe:
raw_data = r.get(uuid)
if not raw_data:
return render_template('expired.html')
data = pickle.loads(raw_data)
if data['status'] == 'ok':
new_data = {'status': 'withdrawn', 'time': int(time()), 'ip': request.remote_addr}
r.set(uuid, pickle.dumps(new_data))
return render_template('get.html', data=data['iv'] + '|' + data['pass'])
if data['status'] == 'withdrawn':
return render_template('withdrawn.html')
@app.route('/', methods=['get'])
def index():
ttl = int(config.TTL/60)
return render_template('index.html', ttl=ttl)
if __name__ == '__main__':
try:
port = config.APP_PORT
except AttributeError:
port = 5000
try:
host = config.APP_HOST
except AttributeError:
host = '127.0.0.1'
app.run(host=host, port=port)
|
sumedhasingla/VTK | refs/heads/master | ThirdParty/Twisted/twisted/internet/test/test_epollreactor.py | 39 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.internet.epollreactor}.
"""
from __future__ import division, absolute_import
from twisted.trial.unittest import TestCase
try:
from twisted.internet.epollreactor import _ContinuousPolling
except ImportError:
_ContinuousPolling = None
from twisted.internet.task import Clock
from twisted.internet.error import ConnectionDone
class Descriptor(object):
"""
Records reads and writes, as if it were a C{FileDescriptor}.
"""
def __init__(self):
self.events = []
def fileno(self):
return 1
def doRead(self):
self.events.append("read")
def doWrite(self):
self.events.append("write")
def connectionLost(self, reason):
reason.trap(ConnectionDone)
self.events.append("lost")
class ContinuousPollingTests(TestCase):
"""
L{_ContinuousPolling} can be used to read and write from C{FileDescriptor}
objects.
"""
def test_addReader(self):
"""
Adding a reader when there was previously no reader starts up a
C{LoopingCall}.
"""
poller = _ContinuousPolling(Clock())
self.assertEqual(poller._loop, None)
reader = object()
self.assertFalse(poller.isReading(reader))
poller.addReader(reader)
self.assertNotEqual(poller._loop, None)
self.assertTrue(poller._loop.running)
self.assertIs(poller._loop.clock, poller._reactor)
self.assertTrue(poller.isReading(reader))
def test_addWriter(self):
"""
Adding a writer when there was previously no writer starts up a
C{LoopingCall}.
"""
poller = _ContinuousPolling(Clock())
self.assertEqual(poller._loop, None)
writer = object()
self.assertFalse(poller.isWriting(writer))
poller.addWriter(writer)
self.assertNotEqual(poller._loop, None)
self.assertTrue(poller._loop.running)
self.assertIs(poller._loop.clock, poller._reactor)
self.assertTrue(poller.isWriting(writer))
def test_removeReader(self):
"""
Removing a reader stops the C{LoopingCall}.
"""
poller = _ContinuousPolling(Clock())
reader = object()
poller.addReader(reader)
poller.removeReader(reader)
self.assertEqual(poller._loop, None)
self.assertEqual(poller._reactor.getDelayedCalls(), [])
self.assertFalse(poller.isReading(reader))
def test_removeWriter(self):
"""
Removing a writer stops the C{LoopingCall}.
"""
poller = _ContinuousPolling(Clock())
writer = object()
poller.addWriter(writer)
poller.removeWriter(writer)
self.assertEqual(poller._loop, None)
self.assertEqual(poller._reactor.getDelayedCalls(), [])
self.assertFalse(poller.isWriting(writer))
def test_removeUnknown(self):
"""
Removing unknown readers and writers silently does nothing.
"""
poller = _ContinuousPolling(Clock())
poller.removeWriter(object())
poller.removeReader(object())
def test_multipleReadersAndWriters(self):
"""
Adding multiple readers and writers results in a single
C{LoopingCall}.
"""
poller = _ContinuousPolling(Clock())
writer = object()
poller.addWriter(writer)
self.assertNotEqual(poller._loop, None)
poller.addWriter(object())
self.assertNotEqual(poller._loop, None)
poller.addReader(object())
self.assertNotEqual(poller._loop, None)
poller.addReader(object())
poller.removeWriter(writer)
self.assertNotEqual(poller._loop, None)
self.assertTrue(poller._loop.running)
self.assertEqual(len(poller._reactor.getDelayedCalls()), 1)
def test_readerPolling(self):
"""
Adding a reader causes its C{doRead} to be called every 1
milliseconds.
"""
reactor = Clock()
poller = _ContinuousPolling(reactor)
desc = Descriptor()
poller.addReader(desc)
self.assertEqual(desc.events, [])
reactor.advance(0.00001)
self.assertEqual(desc.events, ["read"])
reactor.advance(0.00001)
self.assertEqual(desc.events, ["read", "read"])
reactor.advance(0.00001)
self.assertEqual(desc.events, ["read", "read", "read"])
def test_writerPolling(self):
"""
Adding a writer causes its C{doWrite} to be called every 1
milliseconds.
"""
reactor = Clock()
poller = _ContinuousPolling(reactor)
desc = Descriptor()
poller.addWriter(desc)
self.assertEqual(desc.events, [])
reactor.advance(0.001)
self.assertEqual(desc.events, ["write"])
reactor.advance(0.001)
self.assertEqual(desc.events, ["write", "write"])
reactor.advance(0.001)
self.assertEqual(desc.events, ["write", "write", "write"])
def test_connectionLostOnRead(self):
"""
If a C{doRead} returns a value indicating disconnection,
C{connectionLost} is called on it.
"""
reactor = Clock()
poller = _ContinuousPolling(reactor)
desc = Descriptor()
desc.doRead = lambda: ConnectionDone()
poller.addReader(desc)
self.assertEqual(desc.events, [])
reactor.advance(0.001)
self.assertEqual(desc.events, ["lost"])
def test_connectionLostOnWrite(self):
"""
If a C{doWrite} returns a value indicating disconnection,
C{connectionLost} is called on it.
"""
reactor = Clock()
poller = _ContinuousPolling(reactor)
desc = Descriptor()
desc.doWrite = lambda: ConnectionDone()
poller.addWriter(desc)
self.assertEqual(desc.events, [])
reactor.advance(0.001)
self.assertEqual(desc.events, ["lost"])
def test_removeAll(self):
"""
L{_ContinuousPolling.removeAll} removes all descriptors and returns
the readers and writers.
"""
poller = _ContinuousPolling(Clock())
reader = object()
writer = object()
both = object()
poller.addReader(reader)
poller.addReader(both)
poller.addWriter(writer)
poller.addWriter(both)
removed = poller.removeAll()
self.assertEqual(poller.getReaders(), [])
self.assertEqual(poller.getWriters(), [])
self.assertEqual(len(removed), 3)
self.assertEqual(set(removed), set([reader, writer, both]))
def test_getReaders(self):
"""
L{_ContinuousPolling.getReaders} returns a list of the read
descriptors.
"""
poller = _ContinuousPolling(Clock())
reader = object()
poller.addReader(reader)
self.assertIn(reader, poller.getReaders())
def test_getWriters(self):
"""
L{_ContinuousPolling.getWriters} returns a list of the write
descriptors.
"""
poller = _ContinuousPolling(Clock())
writer = object()
poller.addWriter(writer)
self.assertIn(writer, poller.getWriters())
if _ContinuousPolling is None:
skip = "epoll not supported in this environment."
|
bzero/bitex | refs/heads/master | apps/mailer/util.py | 10 | from smtplib import SMTP
from email.MIMEText import MIMEText
from email.Header import Header
from email.Utils import parseaddr, formataddr
def send_email(sender, recipient, subject, body, content_type='plain'):
"""Send an email.
All arguments should be Unicode strings (plain ASCII works as well).
Only the real name part of sender and recipient addresses may contain
non-ASCII characters.
The email will be properly MIME encoded and delivered though SMTP to
localhost port 25. This is easy to change if you want something different.
The charset of the email will be the first one out of US-ASCII, ISO-8859-1
and UTF-8 that can represent all the characters occurring in the email.
"""
# Header class is smart enough to try US-ASCII, then the charset we
# provide, then fall back to UTF-8.
header_charset = 'ISO-8859-1'
# We must choose the body charset manually
for body_charset in 'US-ASCII', 'ISO-8859-1', 'UTF-8':
try:
body.encode(body_charset)
except UnicodeError:
pass
else:
break
# Split real name (which is optional) and email address parts
sender_name, sender_addr = parseaddr(sender)
recipient_name, recipient_addr = parseaddr(recipient)
# We must always pass Unicode strings to Header, otherwise it will
# use RFC 2047 encoding even on plain ASCII strings.
sender_name = str(Header(unicode(sender_name), header_charset))
recipient_name = str(Header(unicode(recipient_name), header_charset))
# Make sure email addresses do not contain non-ASCII characters
sender_addr = sender_addr.encode('ascii')
recipient_addr = recipient_addr.encode('ascii')
# Create the message ('plain' stands for Content-Type: text/plain)
msg = MIMEText(body.encode(body_charset), content_type, body_charset)
msg['From'] = formataddr((sender_name, sender_addr))
msg['To'] = formataddr((recipient_name, recipient_addr))
msg['Subject'] = Header(unicode(subject), header_charset)
try:
# Send the message via SMTP to localhost:25
smtp = SMTP("127.0.0.1")
smtp.ehlo()
smtp.sendmail(sender_addr, recipient, msg.as_string())
smtp.quit()
except Exception as ex:
pass
|
asascience-open/ooi-ui-services | refs/heads/master | ooiservices/app/redmine/routes.py | 4 | #!/usr/bin/env python
'''
Redmine endpoints
'''
from flask import (jsonify, request, Response, current_app)
from ooiservices.app.redmine import redmine as api
from ooiservices.app.main.authentication import auth
from ooiservices.app.decorators import scope_required
from collections import OrderedDict
from redmine import Redmine
import json
# List the fields of interest in a redmine issue
issue_fields = ['id', 'assigned_to', 'author', 'created_on', 'description', 'done_ratio',
'due_date', 'estimated_hours', 'priority', 'project', 'relations', 'children', 'journal',
'start_date', 'status', 'subject', 'time_entries', 'tracker', 'updated_on', 'watchers']
def redmine_login():
key = current_app.config['REDMINE_KEY']
redmine = Redmine(current_app.config['REDMINE_URL'],
key=key, requests={'verify': False})
return redmine
@api.route('/ticket', methods=['POST'])
@auth.login_required
@scope_required('redmine')
def create_redmine_ticket():
'''
Create new redmine ticket.
'''
# Get request data
data = request.data
# Check that there is actually data
if not data:
return Response(response='{"error":"Invalid request"}', status=400, mimetype="application/json")
dataDict = json.loads(data)
# Define required and recommended ticket fields
required_fields = ['project_id', 'subject']
recommended_fields = ['due_date', 'description', 'priority_id', 'assigned_to_id',
'start_date', 'estimated_hours', 'status_id', 'notes',
'tracker_id', 'parent_issue_id', 'done_ratio']
# Check the required fields
for field in required_fields:
if field not in dataDict:
return Response(response='{"error":"Invalid request: ' + field + ' not defined"}',
status=400, mimetype="application/json")
fields = dict()
for field in required_fields + recommended_fields:
if field in dataDict:
fields[field] = dataDict[field]
try:
# Log into Redmine
redmine = redmine_login()
# Create new issue
issue = redmine.issue.new()
# Get the list of ticker Trackers
trackers = list(redmine.tracker.all())
# Find the REDMINE_TRACKER (like 'Support') and get the id
# This make a difference for field validation and proper tracker assignment
config_redmine_tracker = current_app.config['REDMINE_TRACKER']
tracker_id = [tracker.id for tracker in trackers if tracker.name == config_redmine_tracker][0]
issue.tracker_id = tracker_id
for key, value in fields.iteritems():
if value is not None:
setattr(issue, key, value)
issue.save()
return data, 201
except Exception as e:
current_app.logger.exception('[create_redmine_ticket] %s ' % e.message)
return Response(response='{"error":"'+e.message+'"}', status=400, mimetype="application/json")
@api.route('/ticket/', methods=['GET'])
@auth.login_required
@scope_required('redmine')
def get_all_redmine_tickets():
'''
List all redmine tickets.
'''
use_limit = False
limit = None
if 'limit' in request.args:
limit = int(request.args['limit'])
if limit is not None:
use_limit = True
if 'project' not in request.args:
return Response(response="{error: Invalid request: project_id not defined}",
status=400, mimetype="application/json")
proj = request.args['project']
redmine = redmine_login()
project = redmine.project.get(proj).refresh()
issues = dict(issues=[])
inx = 0
for issue in project.issues:
details = OrderedDict()
for field in issue_fields:
if hasattr(issue, field):
# Note following line will produce UnicodeEncodeError:
# 'ascii' codec can't encode characters in position 251-252: ordinal not in range(128)
details[field] = str(getattr(issue, field)) #
issues['issues'].append(details)
inx += 1
if use_limit:
if inx > limit:
break
return jsonify(issues)
@api.route('/ticket/id', methods=['POST'])
@auth.login_required
@scope_required('redmine')
def update_redmine_ticket():
'''
Update a specific redmine ticket.
'''
data = request.data
if not data:
return Response(response='{"error":"Invalid request"}', status=400, mimetype="application/json")
# Save the request as a dictionary
dataDict = json.loads(data)
# Check the required field (resource_id)
if 'resource_id' not in dataDict:
return Response(response='{"error":"Invalid request: resource_id not defined"}',
status=400, mimetype="application/json")
update_fields = ['project_id', 'subject', 'due_date', 'description', 'priority_id',
'assigned_to_id', 'start_date', 'estimated_hours', 'status_id', 'notes',
'tracker_id', 'parent_issue_id', 'done_ratio']
# Get all the update fields from the request
fields = dict()
for field in update_fields:
if field in dataDict:
fields[field] = dataDict[field]
# Log into Redmine
redmine = redmine_login()
# Get the issue, update all fields except the issue resource id
issue = redmine.issue.get(dataDict['resource_id'])
for key, value in fields.iteritems():
if 'resource_id' != key:
setattr(issue, key, value)
issue.save()
return data, 201
@api.route('/ticket/id/', methods=['GET'])
@auth.login_required
@scope_required('redmine')
def get_redmine_ticket():
'''
Get a specific ticket by id
'''
redmine = redmine_login()
if 'id' not in request.args:
return Response(response="{error: id not defined}", status=400, mimetype="application/json")
issue_id = request.args['id']
issue = redmine.issue.get(issue_id, include='children,journals,watchers')
details = OrderedDict()
for field in issue_fields:
if hasattr(issue, field):
details[field] = str(getattr(issue, field))
return jsonify(details)
@api.route('/users', methods=['GET'])
@auth.login_required
def get_redmine_users():
'''
Get all the users in a project.
'''
redmine = redmine_login()
if 'project' not in request.args:
return Response(response="{error: project not defined}", status=400, mimetype="application/json")
all_users = redmine.user.all(offset=1, limit=100)
users = dict(users=[])
for n in xrange(len(all_users)):
user = str(all_users[n])
user_id = int(all_users[n]['id'])
users['users'].append([user,user_id])
return jsonify(users)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# redmine methods for alert and alarm notifications
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def create_redmine_ticket_for_notification(project_id, subject, description, priority, assigned_id):
""" Create a redmine ticket for an alert notification.
"""
ticket_id = None
# Define required and recommended ticket fields
required_fields = ['project_id', 'subject']
recommended_fields = ['due_date', 'description', 'priority_id', 'assigned_to_id',
'start_date', 'estimated_hours', 'status_id', 'notes',
'tracker_id', 'parent_issue_id', 'done_ratio']
try:
data = {
'project_id': project_id,
'subject': subject,
'description': description,
'priority_id': priority,
'assigned_to_id': assigned_id,
'category_id': 1
}
fields = dict()
for field in required_fields + recommended_fields:
if field in data:
fields[field] = data[field]
# Log into Redmine
redmine = redmine_login()
issue = redmine.issue.new()
# Get the list of ticker Trackers
trackers = list(redmine.tracker.all())
# Find the REDMINE_TRACKER (like 'Support') and get the id
# This make a difference for field validation and proper tracker assignment
config_redmine_tracker = current_app.config['REDMINE_TRACKER']
tracker_id = [tracker.id for tracker in trackers if tracker.name == config_redmine_tracker][0]
issue.tracker_id = tracker_id # support
for key, value in fields.iteritems():
setattr(issue, key, value)
if issue.save():
ticket_id = issue.id
except Exception as err:
current_app.logger.exception('[create_redmine_ticket_for_notification] %s ' % err.message)
finally:
return ticket_id
def get_redmine_users_by_project(project_id):
'''
if project_id is None:
return []
all_projects = redmine.project.all()
projects = []
for n in xrange(len(all_projects)):
project = str(all_projects[n])
projects.append(project)
if project_id not in projects:
return []
'''
redmine = redmine_login()
all_users = redmine.user.all(offset=1, limit=100, project_id=project_id)
users = dict(users=[])
for n in xrange(len(all_users)):
user = str(all_users[n])
user_id = int(all_users[n]['id'])
users['users'].append([user,user_id])
return users
def get_redmine_ticket_for_notification(id):
''' Get a specific ticket by id for alert notification. Success return ticket_id; if error, return None.
'''
details = None
try:
redmine = redmine_login()
issue = redmine.issue.get(id, include='children,journals,watchers')
details = {}
for field in issue_fields:
if hasattr(issue, field):
details[field] = str(getattr(issue, field))
except Exception as err:
current_app.logger.exception('[get_redmine_ticket_for_notification] %s ' % err.message)
finally:
return details
# todo test case
def update_redmine_ticket_for_notification(resource_id, project_id, subject, description, priority, assigned_id):
''' Update a specific ticket for alert notification. Success return ticket_id, if error return None.
'''
ticket_id = None
try:
data = {'resource_id': resource_id,
'project_id': project_id,
'subject': subject,
'description': description,
'priority': priority,
'assigned_to': assigned_id,
'category_id': 1
}
update_fields = ['project', 'subject', 'due_date', 'description', 'priority',
'assigned_to', 'start_date', 'estimated_hours', 'status_id', 'notes',
'tracker_id', 'parent_issue_id', 'done_ratio']
# Get all the valid update fields from the request
fields = dict()
for field in update_fields:
if field in data:
fields[field] = data[field]
# Log into Redmine
redmine = redmine_login()
# Get the issue; update all fields except the issue resource id
issue = redmine.issue.get(data['resource_id'])
for key, value in fields.iteritems():
if 'resource_id' != key:
setattr(issue, key, value)
if issue.save():
ticket_id = issue.id
except Exception as err:
current_app.logger.exception('[update_redmine_ticket_for_notification] %s ' % err.message)
finally:
return ticket_id
|
rfhk/awo-custom | refs/heads/8.0 | product_listprice_list_view/models/stock_quant.py | 1 | # -*- coding: utf-8 -*-
# Copyright 2017-2018 Quartile Limited
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import models, fields, api
class StockQuant(models.Model):
_inherit = "stock.quant"
@api.multi
def write(self, vals):
res = super(StockQuant, self).write(vals)
if 'location_id' in vals or 'product_id' in vals or 'usage' in vals:
for sq in self:
sq.product_id.product_tmpl_id.sudo()._get_stock_location()
return res
@api.model
def create(self, vals):
res = super(StockQuant, self).create(vals)
if 'location_id' in vals or 'product_id' in vals or 'usage' in vals:
res.product_id.product_tmpl_id.sudo()._get_stock_location()
return res
@api.multi
def unlink(self):
products = []
for sq in self:
products.append(sq.product_id.product_tmpl_id)
res = super(StockQuant, self).unlink()
for product in products:
product.sudo()._get_stock_location()
return res
|
strands-project/mongodb_store | refs/heads/kinetic-devel | mongodb_store/scripts/mongodb_play.py | 2 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import rospy
import mongodb_store.util as mg_util
import sys
import time
import pymongo
from multiprocessing import Process
import calendar
import datetime
import threading
import multiprocessing
from rosgraph_msgs.msg import Clock
import signal
import Queue
from optparse import OptionParser
MongoClient = mg_util.import_MongoClient()
TIME_KEY = '_meta.inserted_at'
def max_time(collection):
return collection.find_one(sort=[(TIME_KEY, pymongo.DESCENDING)])['_meta']['inserted_at']
def min_time(collection):
return collection.find_one(sort=[(TIME_KEY, pymongo.ASCENDING)])['_meta']['inserted_at']
def to_ros_time(dt):
return rospy.Time(calendar.timegm(dt.utctimetuple()), dt.microsecond * 1000)
def to_datetime(rt):
return datetime.datetime.utcfromtimestamp(rt.secs) + datetime.timedelta(microseconds = rt.nsecs / 1000)
def ros_time_strftime(rt, format):
""" converts a ros time to a datetime and calls strftime on it with the given format """
return to_datetime(rt).strftime(format)
def mkdatetime(date_string):
return datetime.datetime.strptime(date_string, '%d/%m/%y %H:%M')
class PlayerProcess(object):
def __init__(self, event, start_time, end_time):
super(PlayerProcess, self).__init__()
self.event = event
self.start_time = start_time
self.end_time = end_time
self.running = multiprocessing.Value('b', True)
self.player_process = multiprocessing.Process(target=self.run, args=[self.running])
def start(self):
self.player_process.start()
def stop(self):
self.running.value = False
def join(self):
self.player_process.join()
def is_running(self):
return self.running.value
class TopicPlayer(PlayerProcess):
""" """
def __init__(self, mongodb_host, mongodb_port, db_name, collection_name, event, start_time, end_time):
super(TopicPlayer, self).__init__(event, start_time, end_time)
self.mongodb_host = mongodb_host
self.mongodb_port = mongodb_port
self.db_name = db_name
self.collection_name = collection_name
def init(self, running):
""" Called in subprocess to do process-specific initialisation """
rospy.init_node("mongodb_playback_%s" % self.collection_name)
# clear signal handlers in this child process, rospy will handle signals for us
signal.signal(signal.SIGTERM, signal.SIG_DFL)
signal.signal(signal.SIGINT, signal.SIG_DFL)
self.mongo_client=MongoClient(self.mongodb_host, self.mongodb_port)
self.collection = self.mongo_client[self.db_name][self.collection_name]
# two threads running here, the main one does the publishing
# the second one populates the qeue of things to publish
# how many to
buffer_size = 50
self.to_publish = Queue.Queue(maxsize=buffer_size)
self.queue_thread = threading.Thread(target=self.queue_from_db, args=[running])
self.queue_thread.start()
def queue_from_db(self, running):
# make sure there's an index on time in the collection so the sort operation doesn't require the whole collection to be loaded
self.collection.ensure_index(TIME_KEY)
# get all documents within the time window, sorted ascending order by time
documents = self.collection.find({TIME_KEY: { '$gte': to_datetime(self.start_time), '$lte': to_datetime(self.end_time)}}, sort=[(TIME_KEY, pymongo.ASCENDING)])
if documents.count() == 0:
rospy.logwarn('No messages to play back from topic %s' % self.collection_name)
return
else:
rospy.logdebug('Playing back %d messages', documents.count())
# load message class for this collection, they should all be the same
msg_cls = mg_util.load_class(documents[0]["_meta"]["stored_class"])
latch = False
if "latch" in documents[0]["_meta"]:
latch = documents[0]["_meta"]["latch"]
# publisher won't be used until something is on the queue, so it's safe to construct it here
self.publisher = rospy.Publisher(documents[0]["_meta"]["topic"], msg_cls, latch = latch, queue_size = 10)
for document in documents:
if running.value:
# instantiate the ROS message object from the dictionary retrieved from the db
message = mg_util.dictionary_to_message(document, msg_cls)
# print (message, document["_meta"]["inserted_at"])
# put will only work while there is space in the queue, if not it will block until another take is performed
self.to_publish.put((message, to_ros_time(document["_meta"]["inserted_at"])))
else:
break
rospy.logdebug('All messages queued for topic %s' % self.collection_name)
def run(self, running):
self.init(running)
# wait until sim clock has initialised
while rospy.get_rostime().secs == 0:
# can't use rospy time here as if clock is 0 it will wait forever
time.sleep(0.2)
rospy.logdebug('Topic playback ready %s %s' % (self.collection.name, rospy.get_param('use_sim_time')))
# wait for the signal to start
self.event.wait()
timeout = 1
while running.value:
try:
msg_time_tuple = self.to_publish.get(timeout=timeout)
publish_time = msg_time_tuple[1]
msg = msg_time_tuple[0]
now = rospy.get_rostime()
# if we've missed our window
if publish_time < now:
rospy.logwarn('Message out of sync by %f', (now - publish_time).to_sec())
else:
delay = publish_time - now
rospy.sleep(delay)
# rospy.loginfo('diff %f' % (publish_time - rospy.get_rostime()).to_sec())
self.publisher.publish(msg)
except Queue.Empty, e:
pass
self.queue_thread.join()
self.mongo_client.close()
rospy.loginfo('Topic playback finished %s' % self.collection.name)
class ClockPlayer(PlayerProcess):
""" Plays a clock message in a separate thread"""
def __init__(self, event, start_time, end_time, pre_roll = rospy.Duration(0), post_roll = rospy.Duration(0)):
super(ClockPlayer, self).__init__(event, start_time, end_time)
self.start_time = start_time
self.end_time = end_time
self.pre_roll = pre_roll
self.post_roll = post_roll
def init(self):
# we handle shutdown for this process
signal.signal(signal.SIGTERM, signal.SIG_DFL)
signal.signal(signal.SIGINT, signal.SIG_DFL)
# make sure this node doesn't use sim time
rospy.set_param('use_sim_time', False)
rospy.init_node('mongodb_playback_clock_player')
# switch to simulated time, note that as this is after the init_node, this node DOES NOT use sim time
rospy.set_param('use_sim_time', True)
# topic to public clock on
self.clock_pub = rospy.Publisher('/clock', Clock, queue_size=1)
# send the first message to get time off 0
self.clock_pub.publish(Clock(clock=(self.start_time - self.pre_roll)))
# notify everyone else that they can move on
self.event.set()
def run(self, running):
self.init()
start = self.start_time - self.pre_roll
end = self.end_time + self.post_roll
# start value
clock_msg = Clock(clock=start)
# timing details, should be moved to constructor parameters
updates_hz = 1000.0
rate = rospy.Rate(updates_hz)
# this assumes close to real-time playback
update = rospy.Duration(1.0 / updates_hz)
# wait for the signal to start
self.event.wait()
while running.value and clock_msg.clock <= end:
# update time
clock_msg.clock += update
# publish time
self.clock_pub.publish(clock_msg)
rate.sleep()
rospy.logdebug('Playback clock finished')
running.value = False
class MongoPlayback(object):
""" Plays back stored topics from the mongodb_store """
def __init__(self):
super(MongoPlayback, self).__init__()
self.mongodb_host = rospy.get_param("mongodb_host")
self.mongodb_port = rospy.get_param("mongodb_port")
self.mongo_client=MongoClient(self.mongodb_host, self.mongodb_port)
self.stop_called = False
def setup(self, database_name, req_topics, start_dt, end_dt):
""" Read in details of requested playback collections. """
if database_name not in self.mongo_client.database_names():
raise Exception('Unknown database %s' % database_name)
database = self.mongo_client[database_name]
collection_names = database.collection_names(include_system_collections=False)
req_topics = set(map(mg_util.topic_name_to_collection_name, req_topics))
if len(req_topics) > 0:
topics = req_topics.intersection(collection_names)
dropped = req_topics.difference(topics)
if(len(dropped) > 0):
print('WARNING Dropped non-existant requested topics for playback: %s' % dropped)
else:
topics = set(collection_names)
print('Playing back topics %s' % topics)
# create mongo collections
collections = [database[collection_name] for collection_name in topics]
# make sure they're easily accessible by time
for collection in collections:
collection.ensure_index(TIME_KEY)
if len(start_dt)==0:
# get the min and max time across all collections, conver to ros time
start_time = to_ros_time(min(map(min_time, [collection for collection in collections if collection.count() > 0])))
else:
start_time = to_ros_time(mkdatetime(start_dt))
if len(end_dt)==0:
end_time = to_ros_time(max(map(max_time, [collection for collection in collections if collection.count() > 0])))
else:
end_time = to_ros_time(mkdatetime(end_dt))
# we don't need a connection any more
self.mongo_client.close()
# rospy.loginfo('Playing back from %s' % to_datetime(start_time))
# rospy.loginfo('.............. to %s' % to_datetime(end_time))
self.event = multiprocessing.Event()
# create clock thread
pre_roll = rospy.Duration(2)
post_roll = rospy.Duration(0)
self.clock_player = ClockPlayer(self.event, start_time, end_time, pre_roll, post_roll)
# create playback objects
self.players = map(lambda c: TopicPlayer(self.mongodb_host, self.mongodb_port, database_name, c, self.event, start_time - pre_roll, end_time + post_roll), topics)
def start(self):
self.clock_player.start()
# wait until clock has set sim time
self.event.wait()
self.event.clear()
# this creates new processes and publishers for each topic
for player in self.players:
player.start()
# all players wait for this before starting --
# todo: it could happen that his gets hit before all are constructed though
self.event.set()
def join(self):
self.clock_player.join()
# if clock runs out but we weren't killed then we need ot stop other processes
if not self.stop_called:
self.stop()
for player in self.players:
player.join()
def stop(self):
self.stop_called = True
self.clock_player.stop()
for player in self.players:
player.stop()
def is_running(self):
return self.clock_player.is_running()
def main(argv):
myargv = rospy.myargv(argv=argv)
parser = OptionParser()
parser.usage += " [TOPICs...]"
parser.add_option("--mongodb-name", dest="mongodb_name",
help="Name of DB from which to retrieve values",
metavar="NAME", default="roslog")
parser.add_option("-s", "--start", dest="start", type="string", default="", metavar='S', help='start datetime of query, defaults to the earliest date stored in db, across all requested collections. Formatted "d/m/y H:M" e.g. "06/07/14 06:38"')
parser.add_option("-e", "--end", dest="end", type="string", default="", metavar='E', help='end datetime of query, defaults to the latest date stored in db, across all requested collections. Formatted "d/m/y H:M" e.g. "06/07/14 06:38"')
(options, args) = parser.parse_args(myargv)
database_name = options.mongodb_name
topics = set(args[1:])
playback = MongoPlayback()
def signal_handler(signal, frame):
playback.stop()
signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGINT, signal_handler)
playback.setup(database_name, topics, options.start, options.end)
playback.start()
playback.join()
rospy.set_param('use_sim_time', False)
# processes load main so move init_node out
if __name__ == "__main__":
main(sys.argv)
|
DynastyDYN/Dynasty | refs/heads/master | contrib/bitrpc/bitrpc.py | 1 | from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:7875")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:7875")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Dynasty address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Dynasty address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
|
openshift/openshift-tools | refs/heads/prod | openshift_tools/monitoring/generic_metric_sender.py | 13 | #!/usr/bin/env python
# vim: expandtab:tabstop=4:shiftwidth=4
"""
Base class for sending metrics
"""
import os
import yaml
class GenericMetricSenderException(Exception):
'''
GenericMetricSenderException
Exists to propagate errors up from the api
'''
pass
class GenericMetricSender(object):
"""
Abstract class that encapsulates all metric sending operations.
"""
# pylint: disable=unidiomatic-typecheck
def __init__(self):
""" raise exception - Abstract class shouldn't instantiate"""
if type(self) is GenericMetricSender:
raise TypeError("This is an abstract class and shouldn't be directly instantiated")
self.unique_metrics = []
self.config = None
self.config_file = None
def parse_config(self):
""" parse default config file """
if not self.config:
if not os.path.exists(self.config_file):
raise GenericMetricSenderException(self.config_file + " does not exist.")
self.config = yaml.load(file(self.config_file))
# Allow for 6 arguments (including 'self')
# pylint: disable=too-many-arguments
def add_dynamic_metric(self, discovery_key, macro_string, macro_array, host=None, synthetic=False):
""" empty implementation overridden by derived classes """
pass
def add_metric(self, metric, host=None, synthetic=False, key_tags=None):
""" empty implementation overridden by derived classes """
pass
def add_heartbeat(self, add_heartbeat, host=None):
""" empty implementation overridden by derived classes """
pass
def send_metrics(self):
""" empty implementation overridden by derived classes """
pass
def print_unique_metrics_key_value(self):
"""
This function prints the key/value pairs the UniqueMetrics that sender
currently has stored
"""
print "\n" + self.__class__.__name__ + " Key/Value pairs:"
print "=============================="
for unique_metric in self.unique_metrics:
print("%s: %s") % (unique_metric.key, unique_metric.value)
print "==============================\n"
def print_unique_metrics(self):
"""
This function prints all of the information of the UniqueMetrics that sender
currently has stored
"""
print "\n" + self.__class__.__name__ + " UniqueMetrics:"
print "=============================="
for unique_metric in self.unique_metrics:
print unique_metric
print "==============================\n"
|
chienlieu2017/it_management | refs/heads/master | odoo/addons/barcodes/models/__init__.py | 15 | # -*- coding: utf-8 -*-
import barcodes
import barcode_events_mixin
|
danicampora/micropython | refs/heads/master | tests/extmod/uctypes_ptr_le.py | 59 | import sys
import uctypes
if sys.byteorder != "little":
print("SKIP")
sys.exit()
desc = {
"ptr": (uctypes.PTR | 0, uctypes.UINT8),
"ptr16": (uctypes.PTR | 0, uctypes.UINT16),
"ptr2": (uctypes.PTR | 0, {"b": uctypes.UINT8 | 0}),
}
bytes = b"01"
addr = uctypes.addressof(bytes)
buf = addr.to_bytes(uctypes.sizeof(desc))
S = uctypes.struct(uctypes.addressof(buf), desc, uctypes.LITTLE_ENDIAN)
print(S.ptr[0])
assert S.ptr[0] == ord("0")
print(S.ptr[1])
assert S.ptr[1] == ord("1")
print(hex(S.ptr16[0]))
assert hex(S.ptr16[0]) == "0x3130"
print(S.ptr2[0].b, S.ptr2[1].b)
print (S.ptr2[0].b, S.ptr2[1].b)
print(hex(S.ptr16[0]))
assert (S.ptr2[0].b, S.ptr2[1].b) == (48, 49)
|
molobrakos/home-assistant | refs/heads/master | homeassistant/components/lovelace/__init__.py | 8 | """Support for the Lovelace UI."""
from functools import wraps
import logging
import os
import time
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util.yaml import load_yaml
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'lovelace'
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
CONF_MODE = 'mode'
MODE_YAML = 'yaml'
MODE_STORAGE = 'storage'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_MODE, default=MODE_STORAGE):
vol.All(vol.Lower, vol.In([MODE_YAML, MODE_STORAGE])),
}),
}, extra=vol.ALLOW_EXTRA)
LOVELACE_CONFIG_FILE = 'ui-lovelace.yaml'
WS_TYPE_GET_LOVELACE_UI = 'lovelace/config'
WS_TYPE_SAVE_CONFIG = 'lovelace/config/save'
SCHEMA_GET_LOVELACE_UI = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({
vol.Required('type'): WS_TYPE_GET_LOVELACE_UI,
vol.Optional('force', default=False): bool,
})
SCHEMA_SAVE_CONFIG = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({
vol.Required('type'): WS_TYPE_SAVE_CONFIG,
vol.Required('config'): vol.Any(str, dict),
})
class ConfigNotFound(HomeAssistantError):
"""When no config available."""
async def async_setup(hass, config):
"""Set up the Lovelace commands."""
# Pass in default to `get` because defaults not set if loaded as dep
mode = config.get(DOMAIN, {}).get(CONF_MODE, MODE_STORAGE)
await hass.components.frontend.async_register_built_in_panel(
DOMAIN, config={
'mode': mode
})
if mode == MODE_YAML:
hass.data[DOMAIN] = LovelaceYAML(hass)
else:
hass.data[DOMAIN] = LovelaceStorage(hass)
hass.components.websocket_api.async_register_command(
WS_TYPE_GET_LOVELACE_UI, websocket_lovelace_config,
SCHEMA_GET_LOVELACE_UI)
hass.components.websocket_api.async_register_command(
WS_TYPE_SAVE_CONFIG, websocket_lovelace_save_config,
SCHEMA_SAVE_CONFIG)
hass.components.system_health.async_register_info(
DOMAIN, system_health_info)
return True
class LovelaceStorage:
"""Class to handle Storage based Lovelace config."""
def __init__(self, hass):
"""Initialize Lovelace config based on storage helper."""
self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
self._data = None
async def async_get_info(self):
"""Return the YAML storage mode."""
if self._data is None:
await self._load()
if self._data['config'] is None:
return {
'mode': 'auto-gen'
}
return _config_info('storage', self._data['config'])
async def async_load(self, force):
"""Load config."""
if self._data is None:
await self._load()
config = self._data['config']
if config is None:
raise ConfigNotFound
return config
async def async_save(self, config):
"""Save config."""
if self._data is None:
await self._load()
self._data['config'] = config
await self._store.async_save(self._data)
async def _load(self):
"""Load the config."""
data = await self._store.async_load()
self._data = data if data else {'config': None}
class LovelaceYAML:
"""Class to handle YAML-based Lovelace config."""
def __init__(self, hass):
"""Initialize the YAML config."""
self.hass = hass
self._cache = None
async def async_get_info(self):
"""Return the YAML storage mode."""
try:
config = await self.async_load(False)
except ConfigNotFound:
return {
'mode': 'yaml',
'error': '{} not found'.format(
self.hass.config.path(LOVELACE_CONFIG_FILE))
}
return _config_info('yaml', config)
async def async_load(self, force):
"""Load config."""
return await self.hass.async_add_executor_job(self._load_config, force)
def _load_config(self, force):
"""Load the actual config."""
fname = self.hass.config.path(LOVELACE_CONFIG_FILE)
# Check for a cached version of the config
if not force and self._cache is not None:
config, last_update = self._cache
modtime = os.path.getmtime(fname)
if config and last_update > modtime:
return config
try:
config = load_yaml(fname)
except FileNotFoundError:
raise ConfigNotFound from None
self._cache = (config, time.time())
return config
async def async_save(self, config):
"""Save config."""
raise HomeAssistantError('Not supported')
def handle_yaml_errors(func):
"""Handle error with WebSocket calls."""
@wraps(func)
async def send_with_error_handling(hass, connection, msg):
error = None
try:
result = await func(hass, connection, msg)
message = websocket_api.result_message(
msg['id'], result
)
except ConfigNotFound:
error = 'config_not_found', 'No config found.'
except HomeAssistantError as err:
error = 'error', str(err)
if error is not None:
message = websocket_api.error_message(msg['id'], *error)
connection.send_message(message)
return send_with_error_handling
@websocket_api.async_response
@handle_yaml_errors
async def websocket_lovelace_config(hass, connection, msg):
"""Send Lovelace UI config over WebSocket configuration."""
return await hass.data[DOMAIN].async_load(msg['force'])
@websocket_api.async_response
@handle_yaml_errors
async def websocket_lovelace_save_config(hass, connection, msg):
"""Save Lovelace UI configuration."""
await hass.data[DOMAIN].async_save(msg['config'])
async def system_health_info(hass):
"""Get info for the info page."""
return await hass.data[DOMAIN].async_get_info()
def _config_info(mode, config):
"""Generate info about the config."""
return {
'mode': mode,
'resources': len(config.get('resources', [])),
'views': len(config.get('views', []))
}
|
randynobx/ansible | refs/heads/devel | lib/ansible/modules/notification/hipchat.py | 34 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: hipchat
version_added: "1.2"
short_description: Send a message to Hipchat.
description:
- Send a message to a Hipchat room, with options to control the formatting.
options:
token:
description:
- API token.
required: true
room:
description:
- ID or name of the room.
required: true
from:
description:
- Name the message will appear to be sent from. Max length is 15
characters - above this it will be truncated.
required: false
default: Ansible
msg:
description:
- The message body.
required: true
default: null
color:
description:
- Background color for the message.
required: false
default: yellow
choices: [ "yellow", "red", "green", "purple", "gray", "random" ]
msg_format:
description:
- Message format.
required: false
default: text
choices: [ "text", "html" ]
notify:
description:
- If true, a notification will be triggered for users in the room.
required: false
default: 'yes'
choices: [ "yes", "no" ]
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 1.5.1
api:
description:
- API url if using a self-hosted hipchat server. For Hipchat API version
2 use the default URI with C(/v2) instead of C(/v1).
required: false
default: 'https://api.hipchat.com/v1'
version_added: 1.6.0
requirements: [ ]
author: "WAKAYAMA Shirou (@shirou), BOURDEL Paul (@pb8226)"
'''
EXAMPLES = '''
- hipchat:
room: notif
msg: Ansible task finished
# Use Hipchat API version 2
- hipchat:
api: https://api.hipchat.com/v2/
token: OAUTH2_TOKEN
room: notify
msg: Ansible task finished
'''
# ===========================================
# HipChat module specific support methods.
#
try:
import json
except ImportError:
import simplejson as json
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible.module_utils.six.moves.urllib.request import pathname2url
from ansible.module_utils.urls import fetch_url
DEFAULT_URI = "https://api.hipchat.com/v1"
MSG_URI_V1 = "/rooms/message"
NOTIFY_URI_V2 = "/room/{id_or_name}/notification"
def send_msg_v1(module, token, room, msg_from, msg, msg_format='text',
color='yellow', notify=False, api=MSG_URI_V1):
'''sending message to hipchat v1 server'''
params = {}
params['room_id'] = room
params['from'] = msg_from[:15] # max length is 15
params['message'] = msg
params['message_format'] = msg_format
params['color'] = color
params['api'] = api
params['notify'] = int(notify)
url = api + MSG_URI_V1 + "?auth_token=%s" % (token)
data = urlencode(params)
if module.check_mode:
# In check mode, exit before actually sending the message
module.exit_json(changed=False)
response, info = fetch_url(module, url, data=data)
if info['status'] == 200:
return response.read()
else:
module.fail_json(msg="failed to send message, return status=%s" % str(info['status']))
def send_msg_v2(module, token, room, msg_from, msg, msg_format='text',
color='yellow', notify=False, api=NOTIFY_URI_V2):
'''sending message to hipchat v2 server'''
headers = {'Authorization': 'Bearer %s' % token, 'Content-Type': 'application/json'}
body = dict()
body['message'] = msg
body['color'] = color
body['message_format'] = msg_format
body['notify'] = notify
POST_URL = api + NOTIFY_URI_V2
url = POST_URL.replace('{id_or_name}', pathname2url(room))
data = json.dumps(body)
if module.check_mode:
# In check mode, exit before actually sending the message
module.exit_json(changed=False)
response, info = fetch_url(module, url, data=data, headers=headers, method='POST')
# https://www.hipchat.com/docs/apiv2/method/send_room_notification shows
# 204 to be the expected result code.
if info['status'] in [200, 204]:
return response.read()
else:
module.fail_json(msg="failed to send message, return status=%s" % str(info['status']))
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
token=dict(required=True, no_log=True),
room=dict(required=True),
msg=dict(required=True),
msg_from=dict(default="Ansible", aliases=['from']),
color=dict(default="yellow", choices=["yellow", "red", "green",
"purple", "gray", "random"]),
msg_format=dict(default="text", choices=["text", "html"]),
notify=dict(default=True, type='bool'),
validate_certs=dict(default='yes', type='bool'),
api=dict(default=DEFAULT_URI),
),
supports_check_mode=True
)
token = module.params["token"]
room = str(module.params["room"])
msg = module.params["msg"]
msg_from = module.params["msg_from"]
color = module.params["color"]
msg_format = module.params["msg_format"]
notify = module.params["notify"]
api = module.params["api"]
try:
if api.find('/v2') != -1:
send_msg_v2(module, token, room, msg_from, msg, msg_format, color, notify, api)
else:
send_msg_v1(module, token, room, msg_from, msg, msg_format, color, notify, api)
except Exception:
e = get_exception()
module.fail_json(msg="unable to send msg: %s" % e)
changed = True
module.exit_json(changed=changed, room=room, msg_from=msg_from, msg=msg)
if __name__ == '__main__':
main()
|
bgris/ODL_bgris | refs/heads/master | lib/python3.5/distutils/unixccompiler.py | 7 | """distutils.unixccompiler
Contains the UnixCCompiler class, a subclass of CCompiler that handles
the "typical" Unix-style command-line C compiler:
* macros defined with -Dname[=value]
* macros undefined with -Uname
* include search directories specified with -Idir
* libraries specified with -lllib
* library search directories specified with -Ldir
* compile handled by 'cc' (or similar) executable with -c option:
compiles .c to .o
* link static library handled by 'ar' command (possibly with 'ranlib')
* link shared library handled by 'cc -shared'
"""
import os, sys, re
from distutils import sysconfig
from distutils.dep_util import newer
from distutils.ccompiler import \
CCompiler, gen_preprocess_options, gen_lib_options
from distutils.errors import \
DistutilsExecError, CompileError, LibError, LinkError
from distutils import log
if sys.platform == 'darwin':
import _osx_support
# XXX Things not currently handled:
# * optimization/debug/warning flags; we just use whatever's in Python's
# Makefile and live with it. Is this adequate? If not, we might
# have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
# SunCCompiler, and I suspect down that road lies madness.
# * even if we don't know a warning flag from an optimization flag,
# we need some way for outsiders to feed preprocessor/compiler/linker
# flags in to us -- eg. a sysadmin might want to mandate certain flags
# via a site config file, or a user might want to set something for
# compiling this module distribution only via the setup.py command
# line, whatever. As long as these options come from something on the
# current system, they can be as system-dependent as they like, and we
# should just happily stuff them into the preprocessor/compiler/linker
# options and carry on.
class UnixCCompiler(CCompiler):
compiler_type = 'unix'
# These are used by CCompiler in two places: the constructor sets
# instance attributes 'preprocessor', 'compiler', etc. from them, and
# 'set_executable()' allows any of these to be set. The defaults here
# are pretty generic; they will probably have to be set by an outsider
# (eg. using information discovered by the sysconfig about building
# Python extensions).
executables = {'preprocessor' : None,
'compiler' : ["cc"],
'compiler_so' : ["cc"],
'compiler_cxx' : ["cc"],
'linker_so' : ["cc", "-shared"],
'linker_exe' : ["cc"],
'archiver' : ["ar", "-cr"],
'ranlib' : None,
}
if sys.platform[:6] == "darwin":
executables['ranlib'] = ["ranlib"]
# Needed for the filename generation methods provided by the base
# class, CCompiler. NB. whoever instantiates/uses a particular
# UnixCCompiler instance should set 'shared_lib_ext' -- we set a
# reasonable common default here, but it's not necessarily used on all
# Unices!
src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
obj_extension = ".o"
static_lib_extension = ".a"
shared_lib_extension = ".so"
dylib_lib_extension = ".dylib"
xcode_stub_lib_extension = ".tbd"
static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
xcode_stub_lib_format = dylib_lib_format
if sys.platform == "cygwin":
exe_extension = ".exe"
def preprocess(self, source, output_file=None, macros=None,
include_dirs=None, extra_preargs=None, extra_postargs=None):
fixed_args = self._fix_compile_args(None, macros, include_dirs)
ignore, macros, include_dirs = fixed_args
pp_opts = gen_preprocess_options(macros, include_dirs)
pp_args = self.preprocessor + pp_opts
if output_file:
pp_args.extend(['-o', output_file])
if extra_preargs:
pp_args[:0] = extra_preargs
if extra_postargs:
pp_args.extend(extra_postargs)
pp_args.append(source)
# We need to preprocess: either we're being forced to, or we're
# generating output to stdout, or there's a target output file and
# the source file is newer than the target (or the target doesn't
# exist).
if self.force or output_file is None or newer(source, output_file):
if output_file:
self.mkpath(os.path.dirname(output_file))
try:
self.spawn(pp_args)
except DistutilsExecError as msg:
raise CompileError(msg)
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
compiler_so = self.compiler_so
if sys.platform == 'darwin':
compiler_so = _osx_support.compiler_fixup(compiler_so,
cc_args + extra_postargs)
try:
self.spawn(compiler_so + cc_args + [src, '-o', obj] +
extra_postargs)
except DistutilsExecError as msg:
raise CompileError(msg)
def create_static_lib(self, objects, output_libname,
output_dir=None, debug=0, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
output_filename = \
self.library_filename(output_libname, output_dir=output_dir)
if self._need_link(objects, output_filename):
self.mkpath(os.path.dirname(output_filename))
self.spawn(self.archiver +
[output_filename] +
objects + self.objects)
# Not many Unices required ranlib anymore -- SunOS 4.x is, I
# think the only major Unix that does. Maybe we need some
# platform intelligence here to skip ranlib if it's not
# needed -- or maybe Python's configure script took care of
# it for us, hence the check for leading colon.
if self.ranlib:
try:
self.spawn(self.ranlib + [output_filename])
except DistutilsExecError as msg:
raise LibError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
def link(self, target_desc, objects,
output_filename, output_dir=None, libraries=None,
library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
fixed_args = self._fix_lib_args(libraries, library_dirs,
runtime_library_dirs)
libraries, library_dirs, runtime_library_dirs = fixed_args
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
libraries)
if not isinstance(output_dir, (str, type(None))):
raise TypeError("'output_dir' must be a string or None")
if output_dir is not None:
output_filename = os.path.join(output_dir, output_filename)
if self._need_link(objects, output_filename):
ld_args = (objects + self.objects +
lib_opts + ['-o', output_filename])
if debug:
ld_args[:0] = ['-g']
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath(os.path.dirname(output_filename))
try:
if target_desc == CCompiler.EXECUTABLE:
linker = self.linker_exe[:]
else:
linker = self.linker_so[:]
if target_lang == "c++" and self.compiler_cxx:
# skip over environment variable settings if /usr/bin/env
# is used to set up the linker's environment.
# This is needed on OSX. Note: this assumes that the
# normal and C++ compiler have the same environment
# settings.
i = 0
if os.path.basename(linker[0]) == "env":
i = 1
while '=' in linker[i]:
i += 1
linker[i] = self.compiler_cxx[i]
if sys.platform == 'darwin':
linker = _osx_support.compiler_fixup(linker, ld_args)
self.spawn(linker + ld_args)
except DistutilsExecError as msg:
raise LinkError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option(self, dir):
return "-L" + dir
def _is_gcc(self, compiler_name):
return "gcc" in compiler_name or "g++" in compiler_name
def runtime_library_dir_option(self, dir):
# XXX Hackish, at the very least. See Python bug #445902:
# http://sourceforge.net/tracker/index.php
# ?func=detail&aid=445902&group_id=5470&atid=105470
# Linkers on different platforms need different options to
# specify that directories need to be added to the list of
# directories searched for dependencies when a dynamic library
# is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to
# be told to pass the -R option through to the linker, whereas
# other compilers and gcc on other systems just know this.
# Other compilers may need something slightly different. At
# this time, there's no way to determine this information from
# the configuration data stored in the Python installation, so
# we use this hack.
compiler = os.path.basename(sysconfig.get_config_var("CC"))
if sys.platform[:6] == "darwin":
# MacOSX's linker doesn't understand the -R flag at all
return "-L" + dir
elif sys.platform[:7] == "freebsd":
return "-Wl,-rpath=" + dir
elif sys.platform[:5] == "hp-ux":
if self._is_gcc(compiler):
return ["-Wl,+s", "-L" + dir]
return ["+s", "-L" + dir]
elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5":
return ["-rpath", dir]
else:
if self._is_gcc(compiler):
# gcc on non-GNU systems does not need -Wl, but can
# use it anyway. Since distutils has always passed in
# -Wl whenever gcc was used in the past it is probably
# safest to keep doing so.
if sysconfig.get_config_var("GNULD") == "yes":
# GNU ld needs an extra option to get a RUNPATH
# instead of just an RPATH.
return "-Wl,--enable-new-dtags,-R" + dir
else:
return "-Wl,-R" + dir
else:
# No idea how --enable-new-dtags would be passed on to
# ld if this system was using GNU ld. Don't know if a
# system like this even exists.
return "-R" + dir
def library_option(self, lib):
return "-l" + lib
def find_library_file(self, dirs, lib, debug=0):
shared_f = self.library_filename(lib, lib_type='shared')
dylib_f = self.library_filename(lib, lib_type='dylib')
xcode_stub_f = self.library_filename(lib, lib_type='xcode_stub')
static_f = self.library_filename(lib, lib_type='static')
if sys.platform == 'darwin':
# On OSX users can specify an alternate SDK using
# '-isysroot', calculate the SDK root if it is specified
# (and use it further on)
#
# Note that, as of Xcode 7, Apple SDKs may contain textual stub
# libraries with .tbd extensions rather than the normal .dylib
# shared libraries installed in /. The Apple compiler tool
# chain handles this transparently but it can cause problems
# for programs that are being built with an SDK and searching
# for specific libraries. Callers of find_library_file need to
# keep in mind that the base filename of the returned SDK library
# file might have a different extension from that of the library
# file installed on the running system, for example:
# /Applications/Xcode.app/Contents/Developer/Platforms/
# MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/
# usr/lib/libedit.tbd
# vs
# /usr/lib/libedit.dylib
cflags = sysconfig.get_config_var('CFLAGS')
m = re.search(r'-isysroot\s+(\S+)', cflags)
if m is None:
sysroot = '/'
else:
sysroot = m.group(1)
for dir in dirs:
shared = os.path.join(dir, shared_f)
dylib = os.path.join(dir, dylib_f)
static = os.path.join(dir, static_f)
xcode_stub = os.path.join(dir, xcode_stub_f)
if sys.platform == 'darwin' and (
dir.startswith('/System/') or (
dir.startswith('/usr/') and not dir.startswith('/usr/local/'))):
shared = os.path.join(sysroot, dir[1:], shared_f)
dylib = os.path.join(sysroot, dir[1:], dylib_f)
static = os.path.join(sysroot, dir[1:], static_f)
xcode_stub = os.path.join(sysroot, dir[1:], xcode_stub_f)
# We're second-guessing the linker here, with not much hard
# data to go on: GCC seems to prefer the shared library, so I'm
# assuming that *all* Unix C compilers do. And of course I'm
# ignoring even GCC's "-static" option. So sue me.
if os.path.exists(dylib):
return dylib
elif os.path.exists(xcode_stub):
return xcode_stub
elif os.path.exists(shared):
return shared
elif os.path.exists(static):
return static
# Oops, didn't find it in *any* of 'dirs'
return None
|
joachimmetz/plaso | refs/heads/main | plaso/parsers/sqlite_plugins/chrome_history.py | 3 | # -*- coding: utf-8 -*-
"""SQLite parser plugin for Google Chrome history database files."""
from dfdatetime import posix_time as dfdatetime_posix_time
from dfdatetime import webkit_time as dfdatetime_webkit_time
from plaso.containers import events
from plaso.containers import time_events
from plaso.lib import definitions
from plaso.parsers import sqlite
from plaso.parsers.sqlite_plugins import interface
class ChromeHistoryFileDownloadedEventData(events.EventData):
"""Chrome History file downloaded event data.
Attributes:
danger_type (int): assessment by Safe Browsing of the danger of the
downloaded content.
full_path (str): full path where the file was downloaded to.
interrupt_reason (int): indication why the download was interrupted.
offset (str): identifier of the row, from which the event data was
extracted.
opened (int): value to indicate if the downloaded file was opened from
the browser.
query (str): SQL query that was used to obtain the event data.
received_bytes (int): number of bytes received while downloading.
state (int): state of the download, such as finished or cancelled.
total_bytes (int): total number of bytes to download.
url (str): URL of the downloaded file.
"""
DATA_TYPE = 'chrome:history:file_downloaded'
def __init__(self):
"""Initializes event data."""
super(ChromeHistoryFileDownloadedEventData, self).__init__(
data_type=self.DATA_TYPE)
self.danger_type = None
self.full_path = None
self.interrupt_reason = None
self.offset = None
self.opened = None
self.query = None
self.received_bytes = None
self.state = None
self.total_bytes = None
self.url = None
class ChromeHistoryPageVisitedEventData(events.EventData):
"""Chrome History page visited event data.
Attributes:
from_visit (str): URL where the visit originated from.
offset (str): identifier of the row, from which the event data was
extracted.
page_transition_type (int): type of transitions between pages.
query (str): SQL query that was used to obtain the event data.
title (str): title of the visited page.
typed_count (int): number of characters of the URL that were typed.
url (str): URL of the visited page.
url_hidden (bool): True if the URL is hidden.
visit_source (int): source of the page visit.
"""
DATA_TYPE = 'chrome:history:page_visited'
def __init__(self):
"""Initializes event data."""
super(ChromeHistoryPageVisitedEventData, self).__init__(
data_type=self.DATA_TYPE)
self.from_visit = None
self.offset = None
self.page_transition_type = None
self.query = None
self.title = None
self.typed_count = None
self.url = None
self.url_hidden = None
self.visit_source = None
class BaseGoogleChromeHistoryPlugin(interface.SQLitePlugin):
"""SQLite parser plugin for Google Chrome history database files.
The Google Chrome history database file is typically stored in:
Archived History
History
Note that the Archived History database does not contain the downloads table.
"""
_SYNC_CACHE_QUERY = 'SELECT id, source FROM visit_source'
_URL_CACHE_QUERY = (
'SELECT visits.id AS id, urls.url, urls.title FROM '
'visits, urls WHERE urls.id = visits.url')
# https://cs.chromium.org/chromium/src/ui/base/page_transition_types.h?l=108
_PAGE_TRANSITION_CORE_MASK = 0xff
def _GetUrl(self, url, cache, database):
"""Retrieves an URL from a reference to an entry in the from_visit table.
Args:
url (str): URL.
cache (SQLiteCache): cache.
database (SQLiteDatabase): database.
Returns:
str: URL or an empty string if no URL was found.
"""
if not url:
return ''
url_cache_results = cache.GetResults('url')
if not url_cache_results:
result_set = database.Query(self._URL_CACHE_QUERY)
cache.CacheQueryResults(result_set, 'url', 'id', ('url', 'title'))
url_cache_results = cache.GetResults('url')
reference_url, reference_title = url_cache_results.get(url, ['', ''])
if not reference_url:
return ''
return '{0:s} ({1:s})'.format(reference_url, reference_title)
def _GetVisitSource(self, visit_identifier, cache, database):
"""Retrieves a visit source type based on the identifier.
Args:
visit_identifier (str): identifier from the visits table for the
particular record.
cache (SQLiteCache): cache which contains cached results from querying
the visit_source table.
database (SQLiteDatabase): database.
Returns:
int: visit source type or None if no visit source type was found for
the identifier.
"""
sync_cache_results = cache.GetResults('sync')
if not sync_cache_results:
result_set = database.Query(self._SYNC_CACHE_QUERY)
cache.CacheQueryResults(result_set, 'sync', 'id', ('source',))
sync_cache_results = cache.GetResults('sync')
if sync_cache_results and visit_identifier:
results = sync_cache_results.get(visit_identifier, None)
if results:
return results[0]
return None
def ParseLastVisitedRow(
self, parser_mediator, query, row, cache=None, database=None,
**unused_kwargs):
"""Parses a last visited row.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
query (str): query that created the row.
row (sqlite3.Row): row.
cache (SQLiteCache): cache which contains cached results from querying
the visits and urls tables.
database (Optional[SQLiteDatabase]): database.
"""
query_hash = hash(query)
hidden = self._GetRowValue(query_hash, row, 'hidden')
transition = self._GetRowValue(query_hash, row, 'transition')
visit_identifier = self._GetRowValue(query_hash, row, 'visit_id')
from_visit = self._GetRowValue(query_hash, row, 'from_visit')
event_data = ChromeHistoryPageVisitedEventData()
event_data.from_visit = self._GetUrl(from_visit, cache, database)
event_data.offset = self._GetRowValue(query_hash, row, 'id')
event_data.query = query
event_data.page_transition_type = (
transition & self._PAGE_TRANSITION_CORE_MASK)
event_data.title = self._GetRowValue(query_hash, row, 'title')
event_data.typed_count = self._GetRowValue(query_hash, row, 'typed_count')
event_data.url = self._GetRowValue(query_hash, row, 'url')
event_data.url_hidden = hidden == '1'
event_data.visit_source = self._GetVisitSource(
visit_identifier, cache, database)
timestamp = self._GetRowValue(query_hash, row, 'visit_time')
date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_LAST_VISITED)
parser_mediator.ProduceEventWithEventData(event, event_data)
class GoogleChrome8HistoryPlugin(BaseGoogleChromeHistoryPlugin):
"""SQLite parser plugin for Google Chrome 8 - 25 history database files."""
NAME = 'chrome_8_history'
DATA_FORMAT = 'Google Chrome 8 - 25 history SQLite database file'
REQUIRED_STRUCTURE = {
'downloads': frozenset([
'id', 'full_path', 'received_bytes', 'total_bytes', 'url',
'start_time', 'state']),
'urls': frozenset([
'id', 'url', 'title', 'visit_count', 'typed_count',
'last_visit_time', 'hidden']),
'visits': frozenset([
'visit_time', 'from_visit', 'transition', 'id'])}
QUERIES = [
(('SELECT urls.id, urls.url, urls.title, urls.visit_count, '
'urls.typed_count, urls.last_visit_time, urls.hidden, visits.'
'visit_time, visits.from_visit, visits.transition, visits.id '
'AS visit_id FROM urls, visits WHERE urls.id = visits.url ORDER '
'BY visits.visit_time'), 'ParseLastVisitedRow'),
(('SELECT id, full_path, url, start_time, received_bytes, '
'total_bytes, state FROM downloads'), 'ParseFileDownloadedRow')]
_SCHEMA_8 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,full_path '
'LONGVARCHAR NOT NULL,url LONGVARCHAR NOT NULL,start_time INTEGER '
'NOT NULL,received_bytes INTEGER NOT NULL,total_bytes INTEGER NOT '
'NULL,state INTEGER NOT NULL)'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY '
'KEY,value LONGVARCHAR)'),
'presentation': (
'CREATE TABLE presentation(url_id INTEGER PRIMARY KEY,pres_index '
'INTEGER NOT NULL)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL,pres_index INTEGER DEFAULT -1 NOT NULL)'),
'urls': (
'CREATE TABLE urls(id INTEGER PRIMARY KEY,url LONGVARCHAR,title '
'LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT NULL,typed_count '
'INTEGER DEFAULT 0 NOT NULL,last_visit_time INTEGER NOT NULL,hidden '
'INTEGER DEFAULT 0 NOT NULL,favicon_id INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,is_indexed BOOLEAN)')}
_SCHEMA_16 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,full_path '
'LONGVARCHAR NOT NULL,url LONGVARCHAR NOT NULL,start_time INTEGER '
'NOT NULL,received_bytes INTEGER NOT NULL,total_bytes INTEGER NOT '
'NULL,state INTEGER NOT NULL,end_time INTEGER NOT NULL,opened '
'INTEGER NOT NULL)'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY '
'KEY,value LONGVARCHAR)'),
'presentation': (
'CREATE TABLE presentation(url_id INTEGER PRIMARY KEY,pres_index '
'INTEGER NOT NULL)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL,pres_index INTEGER DEFAULT -1 NOT NULL)'),
'urls': (
'CREATE TABLE urls(id INTEGER PRIMARY KEY,url LONGVARCHAR,title '
'LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT NULL,typed_count '
'INTEGER DEFAULT 0 NOT NULL,last_visit_time INTEGER NOT NULL,hidden '
'INTEGER DEFAULT 0 NOT NULL,favicon_id INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,is_indexed BOOLEAN)')}
_SCHEMA_19 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,full_path '
'LONGVARCHAR NOT NULL,url LONGVARCHAR NOT NULL,start_time INTEGER '
'NOT NULL,received_bytes INTEGER NOT NULL,total_bytes INTEGER NOT '
'NULL,state INTEGER NOT NULL,end_time INTEGER NOT NULL,opened '
'INTEGER NOT NULL)'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'presentation': (
'CREATE TABLE presentation(url_id INTEGER PRIMARY KEY,pres_index '
'INTEGER NOT NULL)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL,pres_index INTEGER DEFAULT -1 NOT NULL)'),
'urls': (
'CREATE TABLE urls(id INTEGER PRIMARY KEY,url LONGVARCHAR,title '
'LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT NULL,typed_count '
'INTEGER DEFAULT 0 NOT NULL,last_visit_time INTEGER NOT NULL,hidden '
'INTEGER DEFAULT 0 NOT NULL,favicon_id INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,is_indexed BOOLEAN)')}
_SCHEMA_20 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,full_path '
'LONGVARCHAR NOT NULL,url LONGVARCHAR NOT NULL,start_time INTEGER '
'NOT NULL,received_bytes INTEGER NOT NULL,total_bytes INTEGER NOT '
'NULL,state INTEGER NOT NULL,end_time INTEGER NOT NULL,opened '
'INTEGER NOT NULL)'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'presentation': (
'CREATE TABLE presentation(url_id INTEGER PRIMARY KEY,pres_index '
'INTEGER NOT NULL)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL,pres_index INTEGER DEFAULT -1 NOT NULL)'),
'urls': (
'CREATE TABLE urls(id INTEGER PRIMARY KEY,url LONGVARCHAR,title '
'LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT NULL,typed_count '
'INTEGER DEFAULT 0 NOT NULL,last_visit_time INTEGER NOT NULL,hidden '
'INTEGER DEFAULT 0 NOT NULL,favicon_id INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,is_indexed '
'BOOLEAN,visit_duration INTEGER DEFAULT 0 NOT NULL)')}
SCHEMAS = [_SCHEMA_8, _SCHEMA_16, _SCHEMA_19, _SCHEMA_20]
def ParseFileDownloadedRow(
self, parser_mediator, query, row, **unused_kwargs):
"""Parses a file downloaded row.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
query (str): query that created the row.
row (sqlite3.Row): row.
"""
query_hash = hash(query)
event_data = ChromeHistoryFileDownloadedEventData()
event_data.full_path = self._GetRowValue(query_hash, row, 'full_path')
event_data.offset = self._GetRowValue(query_hash, row, 'id')
event_data.query = query
event_data.received_bytes = self._GetRowValue(
query_hash, row, 'received_bytes')
event_data.state = self._GetRowValue(query_hash, row, 'state')
event_data.total_bytes = self._GetRowValue(query_hash, row, 'total_bytes')
event_data.url = self._GetRowValue(query_hash, row, 'url')
timestamp = self._GetRowValue(query_hash, row, 'start_time')
date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_FILE_DOWNLOADED)
parser_mediator.ProduceEventWithEventData(event, event_data)
class GoogleChrome27HistoryPlugin(BaseGoogleChromeHistoryPlugin):
"""SQLite parser plugin for Google Chrome 27+ history database files."""
NAME = 'chrome_27_history'
DATA_FORMAT = 'Google Chrome 27 and later history SQLite database file'
REQUIRED_STRUCTURE = {
'downloads': frozenset([
'id', 'target_path', 'received_bytes', 'total_bytes', 'start_time',
'end_time', 'state', 'danger_type', 'interrupt_reason', 'opened']),
'downloads_url_chains': frozenset([
'id', 'url']),
'urls': frozenset([
'id', 'url', 'title', 'visit_count', 'typed_count',
'last_visit_time', 'hidden']),
'visits': frozenset([
'visit_time', 'from_visit', 'transition', 'id'])}
QUERIES = [
(('SELECT urls.id, urls.url, urls.title, urls.visit_count, '
'urls.typed_count, urls.last_visit_time, urls.hidden, visits.'
'visit_time, visits.from_visit, visits.transition, visits.id '
'AS visit_id FROM urls, visits WHERE urls.id = visits.url ORDER '
'BY visits.visit_time'), 'ParseLastVisitedRow'),
(('SELECT downloads.id AS id, downloads.start_time,'
'downloads.target_path, downloads_url_chains.url, '
'downloads.received_bytes, downloads.total_bytes, '
'downloads.end_time, downloads.state, downloads.danger_type, '
'downloads.interrupt_reason, downloads.opened FROM downloads,'
' downloads_url_chains WHERE downloads.id = '
'downloads_url_chains.id'), 'ParseFileDownloadedRow')]
_SCHEMA_27 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,current_path '
'LONGVARCHAR NOT NULL,target_path LONGVARCHAR NOT NULL,start_time '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL,total_bytes '
'INTEGER NOT NULL,state INTEGER NOT NULL,danger_type INTEGER NOT '
'NULL, interrupt_reason INTEGER NOT NULL,end_time INTEGER NOT '
'NULL,opened INTEGER NOT NULL)'),
'downloads_url_chains': (
'CREATE TABLE downloads_url_chains (id INTEGER NOT NULL,chain_index '
'INTEGER NOT NULL,url LONGVARCHAR NOT NULL, PRIMARY KEY (id, '
'chain_index) )'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL)'),
'urls': (
'CREATE TABLE urls(id INTEGER PRIMARY KEY,url LONGVARCHAR,title '
'LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT NULL,typed_count '
'INTEGER DEFAULT 0 NOT NULL,last_visit_time INTEGER NOT NULL,hidden '
'INTEGER DEFAULT 0 NOT NULL,favicon_id INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,is_indexed '
'BOOLEAN,visit_duration INTEGER DEFAULT 0 NOT NULL)')}
_SCHEMA_31 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,current_path '
'LONGVARCHAR NOT NULL,target_path LONGVARCHAR NOT NULL,start_time '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL,total_bytes '
'INTEGER NOT NULL,state INTEGER NOT NULL,danger_type INTEGER NOT '
'NULL, interrupt_reason INTEGER NOT NULL,end_time INTEGER NOT '
'NULL,opened INTEGER NOT NULL,referrer VARCHAR NOT NULL,by_ext_id '
'VARCHAR NOT NULL,by_ext_name VARCHAR NOT NULL,etag VARCHAR NOT '
'NULL,last_modified VARCHAR NOT NULL)'),
'downloads_url_chains': (
'CREATE TABLE downloads_url_chains (id INTEGER NOT NULL,chain_index '
'INTEGER NOT NULL,url LONGVARCHAR NOT NULL, PRIMARY KEY (id, '
'chain_index) )'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL)'),
'urls': (
'CREATE TABLE urls(id INTEGER PRIMARY KEY,url LONGVARCHAR,title '
'LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT NULL,typed_count '
'INTEGER DEFAULT 0 NOT NULL,last_visit_time INTEGER NOT NULL,hidden '
'INTEGER DEFAULT 0 NOT NULL,favicon_id INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,visit_duration '
'INTEGER DEFAULT 0 NOT NULL)')}
_SCHEMA_37 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,current_path '
'LONGVARCHAR NOT NULL,target_path LONGVARCHAR NOT NULL,start_time '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL,total_bytes '
'INTEGER NOT NULL,state INTEGER NOT NULL,danger_type INTEGER NOT '
'NULL,interrupt_reason INTEGER NOT NULL,end_time INTEGER NOT '
'NULL,opened INTEGER NOT NULL,referrer VARCHAR NOT NULL,by_ext_id '
'VARCHAR NOT NULL,by_ext_name VARCHAR NOT NULL,etag VARCHAR NOT '
'NULL,last_modified VARCHAR NOT NULL,mime_type VARCHAR(255) NOT '
'NULL,original_mime_type VARCHAR(255) NOT NULL)'),
'downloads_url_chains': (
'CREATE TABLE downloads_url_chains (id INTEGER NOT NULL,chain_index '
'INTEGER NOT NULL,url LONGVARCHAR NOT NULL, PRIMARY KEY (id, '
'chain_index) )'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL)'),
'urls': (
'CREATE TABLE urls(id INTEGER PRIMARY KEY,url LONGVARCHAR,title '
'LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT NULL,typed_count '
'INTEGER DEFAULT 0 NOT NULL,last_visit_time INTEGER NOT NULL,hidden '
'INTEGER DEFAULT 0 NOT NULL,favicon_id INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,visit_duration '
'INTEGER DEFAULT 0 NOT NULL)')}
_SCHEMA_51 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,guid VARCHAR NOT '
'NULL,current_path LONGVARCHAR NOT NULL,target_path LONGVARCHAR NOT '
'NULL,start_time INTEGER NOT NULL,received_bytes INTEGER NOT '
'NULL,total_bytes INTEGER NOT NULL,state INTEGER NOT '
'NULL,danger_type INTEGER NOT NULL,interrupt_reason INTEGER NOT '
'NULL,hash BLOB NOT NULL,end_time INTEGER NOT NULL,opened INTEGER '
'NOT NULL,referrer VARCHAR NOT NULL,site_url VARCHAR NOT '
'NULL,tab_url VARCHAR NOT NULL,tab_referrer_url VARCHAR NOT '
'NULL,http_method VARCHAR NOT NULL,by_ext_id VARCHAR NOT '
'NULL,by_ext_name VARCHAR NOT NULL,etag VARCHAR NOT '
'NULL,last_modified VARCHAR NOT NULL,mime_type VARCHAR(255) NOT '
'NULL,original_mime_type VARCHAR(255) NOT NULL)'),
'downloads_url_chains': (
'CREATE TABLE downloads_url_chains (id INTEGER NOT NULL,chain_index '
'INTEGER NOT NULL,url LONGVARCHAR NOT NULL, PRIMARY KEY (id, '
'chain_index) )'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL)'),
'urls': (
'CREATE TABLE urls(id INTEGER PRIMARY KEY,url LONGVARCHAR,title '
'LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT NULL,typed_count '
'INTEGER DEFAULT 0 NOT NULL,last_visit_time INTEGER NOT NULL,hidden '
'INTEGER DEFAULT 0 NOT NULL,favicon_id INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,visit_duration '
'INTEGER DEFAULT 0 NOT NULL)')}
_SCHEMA_58 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,guid VARCHAR NOT '
'NULL,current_path LONGVARCHAR NOT NULL,target_path LONGVARCHAR NOT '
'NULL,start_time INTEGER NOT NULL,received_bytes INTEGER NOT '
'NULL,total_bytes INTEGER NOT NULL,state INTEGER NOT '
'NULL,danger_type INTEGER NOT NULL,interrupt_reason INTEGER NOT '
'NULL,hash BLOB NOT NULL,end_time INTEGER NOT NULL,opened INTEGER '
'NOT NULL,referrer VARCHAR NOT NULL,site_url VARCHAR NOT '
'NULL,tab_url VARCHAR NOT NULL,tab_referrer_url VARCHAR NOT '
'NULL,http_method VARCHAR NOT NULL,by_ext_id VARCHAR NOT '
'NULL,by_ext_name VARCHAR NOT NULL,etag VARCHAR NOT '
'NULL,last_modified VARCHAR NOT NULL,mime_type VARCHAR(255) NOT '
'NULL,original_mime_type VARCHAR(255) NOT NULL)'),
'downloads_slices': (
'CREATE TABLE downloads_slices (download_id INTEGER NOT NULL,offset '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL,PRIMARY KEY '
'(download_id, offset) )'),
'downloads_url_chains': (
'CREATE TABLE downloads_url_chains (id INTEGER NOT NULL,chain_index '
'INTEGER NOT NULL,url LONGVARCHAR NOT NULL, PRIMARY KEY (id, '
'chain_index) )'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL)'),
'urls': (
'CREATE TABLE urls(id INTEGER PRIMARY KEY,url LONGVARCHAR,title '
'LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT NULL,typed_count '
'INTEGER DEFAULT 0 NOT NULL,last_visit_time INTEGER NOT NULL,hidden '
'INTEGER DEFAULT 0 NOT NULL,favicon_id INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,visit_duration '
'INTEGER DEFAULT 0 NOT NULL)')}
_SCHEMA_59 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,guid VARCHAR NOT '
'NULL,current_path LONGVARCHAR NOT NULL,target_path LONGVARCHAR NOT '
'NULL,start_time INTEGER NOT NULL,received_bytes INTEGER NOT '
'NULL,total_bytes INTEGER NOT NULL,state INTEGER NOT '
'NULL,danger_type INTEGER NOT NULL,interrupt_reason INTEGER NOT '
'NULL,hash BLOB NOT NULL,end_time INTEGER NOT NULL,opened INTEGER '
'NOT NULL,last_access_time INTEGER NOT NULL,transient INTEGER NOT '
'NULL,referrer VARCHAR NOT NULL,site_url VARCHAR NOT NULL,tab_url '
'VARCHAR NOT NULL,tab_referrer_url VARCHAR NOT NULL,http_method '
'VARCHAR NOT NULL,by_ext_id VARCHAR NOT NULL,by_ext_name VARCHAR '
'NOT NULL,etag VARCHAR NOT NULL,last_modified VARCHAR NOT '
'NULL,mime_type VARCHAR(255) NOT NULL,original_mime_type '
'VARCHAR(255) NOT NULL)'),
'downloads_slices': (
'CREATE TABLE downloads_slices (download_id INTEGER NOT NULL,offset '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL,PRIMARY KEY '
'(download_id, offset) )'),
'downloads_url_chains': (
'CREATE TABLE downloads_url_chains (id INTEGER NOT NULL,chain_index '
'INTEGER NOT NULL,url LONGVARCHAR NOT NULL, PRIMARY KEY (id, '
'chain_index) )'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL)'),
'typed_url_sync_metadata': (
'CREATE TABLE typed_url_sync_metadata (storage_key INTEGER PRIMARY '
'KEY NOT NULL,value BLOB)'),
'urls': (
'CREATE TABLE urls(id INTEGER PRIMARY KEY AUTOINCREMENT,url '
'LONGVARCHAR,title LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT '
'NULL,typed_count INTEGER DEFAULT 0 NOT NULL,last_visit_time '
'INTEGER NOT NULL,hidden INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,visit_duration '
'INTEGER DEFAULT 0 NOT NULL)')}
# Observed in Chrome 63.0.3239.108 meta.version 37
_SCHEMA_63 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,guid VARCHAR NOT '
'NULL,current_path LONGVARCHAR NOT NULL,target_path LONGVARCHAR NOT '
'NULL,start_time INTEGER NOT NULL,received_bytes INTEGER NOT '
'NULL,total_bytes INTEGER NOT NULL,state INTEGER NOT '
'NULL,danger_type INTEGER NOT NULL,interrupt_reason INTEGER NOT '
'NULL,hash BLOB NOT NULL,end_time INTEGER NOT NULL,opened INTEGER '
'NOT NULL,referrer VARCHAR NOT NULL,site_url VARCHAR NOT '
'NULL,tab_url VARCHAR NOT NULL,tab_referrer_url VARCHAR NOT '
'NULL,http_method VARCHAR NOT NULL,by_ext_id VARCHAR NOT '
'NULL,by_ext_name VARCHAR NOT NULL,etag VARCHAR NOT '
'NULL,last_modified VARCHAR NOT NULL,mime_type VARCHAR(255) NOT '
'NULL,original_mime_type VARCHAR(255) NOT NULL, last_access_time '
'INTEGER NOT NULL DEFAULT 0, transient INTEGER NOT NULL DEFAULT 0)'),
'downloads_slices': (
'CREATE TABLE downloads_slices (download_id INTEGER NOT NULL,offset '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL,PRIMARY KEY '
'(download_id, offset) )'),
'downloads_url_chains': (
'CREATE TABLE downloads_url_chains (id INTEGER NOT NULL,chain_index '
'INTEGER NOT NULL,url LONGVARCHAR NOT NULL, PRIMARY KEY (id, '
'chain_index) )'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL)'),
'typed_url_sync_metadata': (
'CREATE TABLE typed_url_sync_metadata (storage_key INTEGER PRIMARY '
'KEY NOT NULL,value BLOB)'),
'urls': (
'CREATE TABLE "urls"(id INTEGER PRIMARY KEY AUTOINCREMENT,url '
'LONGVARCHAR,title LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT '
'NULL,typed_count INTEGER DEFAULT 0 NOT NULL,last_visit_time '
'INTEGER NOT NULL,hidden INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,visit_duration '
'INTEGER DEFAULT 0 NOT NULL)')}
# Observed in Chrome 65.0.3325.162
_SCHEMA_65 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,current_path '
'LONGVARCHAR NOT NULL,target_path LONGVARCHAR NOT NULL,start_time '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL,total_bytes '
'INTEGER NOT NULL,state INTEGER NOT NULL,danger_type INTEGER NOT '
'NULL,interrupt_reason INTEGER NOT NULL,end_time INTEGER NOT '
'NULL,opened INTEGER NOT NULL,referrer VARCHAR NOT NULL,by_ext_id '
'VARCHAR NOT NULL,by_ext_name VARCHAR NOT NULL,etag VARCHAR NOT '
'NULL,last_modified VARCHAR NOT NULL,mime_type VARCHAR(255) NOT '
'NULL,original_mime_type VARCHAR(255) NOT NULL, guid VARCHAR NOT '
'NULL DEFAULT \'\', hash BLOB NOT NULL DEFAULT X\'\', http_method '
'VARCHAR NOT NULL DEFAULT \'\', tab_url VARCHAR NOT NULL '
'DEFAULT \'\', tab_referrer_url VARCHAR NOT NULL DEFAULT \'\', '
'site_url VARCHAR NOT NULL DEFAULT \'\', last_access_time INTEGER '
'NOT NULL DEFAULT 0, transient INTEGER NOT NULL DEFAULT 0)'),
'downloads_slices': (
'CREATE TABLE downloads_slices (download_id INTEGER NOT NULL,offset '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL,PRIMARY KEY '
'(download_id, offset) )'),
'downloads_url_chains': (
'CREATE TABLE downloads_url_chains (id INTEGER NOT NULL,chain_index '
'INTEGER NOT NULL,url LONGVARCHAR NOT NULL, PRIMARY KEY (id, '
'chain_index) )'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL)'),
'typed_url_sync_metadata': (
'CREATE TABLE typed_url_sync_metadata (storage_key INTEGER PRIMARY '
'KEY NOT NULL,value BLOB)'),
'urls': (
'CREATE TABLE "urls"(id INTEGER PRIMARY KEY AUTOINCREMENT,url '
'LONGVARCHAR,title LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT '
'NULL,typed_count INTEGER DEFAULT 0 NOT NULL,last_visit_time '
'INTEGER NOT NULL,hidden INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,visit_duration '
'INTEGER DEFAULT 0 NOT NULL)')}
# Observed in Chrome 67.0.3396.62.
_SCHEMA_67 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,current_path '
'LONGVARCHAR NOT NULL,target_path LONGVARCHAR NOT NULL,start_time '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL,total_bytes '
'INTEGER NOT NULL,state INTEGER NOT NULL,danger_type INTEGER NOT '
'NULL, interrupt_reason INTEGER NOT NULL,end_time INTEGER NOT '
'NULL,opened INTEGER NOT NULL,referrer VARCHAR NOT NULL,by_ext_id '
'VARCHAR NOT NULL,by_ext_name VARCHAR NOT NULL,etag VARCHAR NOT '
'NULL,last_modified VARCHAR NOT NULL, mime_type VARCHAR(255) NOT '
'NULL DEFAULT "", original_mime_type VARCHAR(255) NOT NULL DEFAULT '
'"", guid VARCHAR NOT NULL DEFAULT \'\', hash BLOB NOT NULL DEFAULT '
'X\'\', http_method VARCHAR NOT NULL DEFAULT \'\', tab_url VARCHAR '
'NOT NULL DEFAULT \'\', tab_referrer_url VARCHAR NOT NULL DEFAULT '
'\'\', site_url VARCHAR NOT NULL DEFAULT \'\', last_access_time '
'INTEGER NOT NULL DEFAULT 0, transient INTEGER NOT NULL DEFAULT 0)'),
'downloads_slices': (
'CREATE TABLE downloads_slices (download_id INTEGER NOT NULL,offset '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL, finished INTEGER '
'NOT NULL DEFAULT 0,PRIMARY KEY (download_id, offset) )'),
'downloads_url_chains': (
'CREATE TABLE downloads_url_chains (id INTEGER NOT NULL,chain_index '
'INTEGER NOT NULL,url LONGVARCHAR NOT NULL, PRIMARY KEY (id, '
'chain_index) )'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL)'),
'typed_url_sync_metadata': (
'CREATE TABLE typed_url_sync_metadata (storage_key INTEGER PRIMARY '
'KEY NOT NULL,value BLOB)'),
'urls': (
'CREATE TABLE "urls"(id INTEGER PRIMARY KEY AUTOINCREMENT,url '
'LONGVARCHAR,title LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT '
'NULL,typed_count INTEGER DEFAULT 0 NOT NULL,last_visit_time '
'INTEGER NOT NULL,hidden INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,visit_duration '
'INTEGER DEFAULT 0 NOT NULL)')}
# Observed in Linux Chrome 67.0.3396.99 meta.version 39
_SCHEMA_67_2 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,current_path '
'LONGVARCHAR NOT NULL,target_path LONGVARCHAR NOT NULL,start_time '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL,total_bytes '
'INTEGER NOT NULL,state INTEGER NOT NULL,danger_type INTEGER NOT '
'NULL,interrupt_reason INTEGER NOT NULL,end_time INTEGER NOT '
'NULL,opened INTEGER NOT NULL,referrer VARCHAR NOT NULL,by_ext_id '
'VARCHAR NOT NULL,by_ext_name VARCHAR NOT NULL,etag VARCHAR NOT '
'NULL,last_modified VARCHAR NOT NULL,mime_type VARCHAR(255) NOT '
'NULL,original_mime_type VARCHAR(255) NOT NULL, guid VARCHAR NOT '
'NULL DEFAULT \'\', hash BLOB NOT NULL DEFAULT X\'\', http_method '
'VARCHAR NOT NULL DEFAULT \'\', tab_url VARCHAR NOT NULL DEFAULT '
'\'\', tab_referrer_url VARCHAR NOT NULL DEFAULT \'\', site_url '
'VARCHAR NOT NULL DEFAULT \'\', last_access_time INTEGER NOT NULL '
'DEFAULT 0, transient INTEGER NOT NULL DEFAULT 0)'),
'downloads_slices': (
'CREATE TABLE downloads_slices (download_id INTEGER NOT NULL,offset '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL, finished INTEGER '
'NOT NULL DEFAULT 0,PRIMARY KEY (download_id, offset) )'),
'downloads_url_chains': (
'CREATE TABLE downloads_url_chains (id INTEGER NOT NULL,chain_index '
'INTEGER NOT NULL,url LONGVARCHAR NOT NULL, PRIMARY KEY (id, '
'chain_index) )'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL)'),
'typed_url_sync_metadata': (
'CREATE TABLE typed_url_sync_metadata (storage_key INTEGER PRIMARY '
'KEY NOT NULL,value BLOB)'),
'urls': (
'CREATE TABLE "urls"(id INTEGER PRIMARY KEY AUTOINCREMENT,url '
'LONGVARCHAR,title LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT '
'NULL,typed_count INTEGER DEFAULT 0 NOT NULL,last_visit_time '
'INTEGER NOT NULL,hidden INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,visit_duration '
'INTEGER DEFAULT 0 NOT NULL)')}
# Observed in MacOS Chrome 67.0.3396.99 meta.version 39
_SCHEMA_67_3 = {
'downloads': (
'CREATE TABLE downloads (id INTEGER PRIMARY KEY,guid VARCHAR NOT '
'NULL,current_path LONGVARCHAR NOT NULL,target_path LONGVARCHAR NOT '
'NULL,start_time INTEGER NOT NULL,received_bytes INTEGER NOT '
'NULL,total_bytes INTEGER NOT NULL,state INTEGER NOT '
'NULL,danger_type INTEGER NOT NULL,interrupt_reason INTEGER NOT '
'NULL,hash BLOB NOT NULL,end_time INTEGER NOT NULL,opened INTEGER '
'NOT NULL,last_access_time INTEGER NOT NULL,transient INTEGER NOT '
'NULL,referrer VARCHAR NOT NULL,site_url VARCHAR NOT NULL,tab_url '
'VARCHAR NOT NULL,tab_referrer_url VARCHAR NOT NULL,http_method '
'VARCHAR NOT NULL,by_ext_id VARCHAR NOT NULL,by_ext_name VARCHAR '
'NOT NULL,etag VARCHAR NOT NULL,last_modified VARCHAR NOT '
'NULL,mime_type VARCHAR(255) NOT NULL,original_mime_type '
'VARCHAR(255) NOT NULL)'),
'downloads_slices': (
'CREATE TABLE downloads_slices (download_id INTEGER NOT NULL,offset '
'INTEGER NOT NULL,received_bytes INTEGER NOT NULL, finished INTEGER '
'NOT NULL DEFAULT 0,PRIMARY KEY (download_id, offset) )'),
'downloads_url_chains': (
'CREATE TABLE downloads_url_chains (id INTEGER NOT NULL,chain_index '
'INTEGER NOT NULL,url LONGVARCHAR NOT NULL, PRIMARY KEY (id, '
'chain_index) )'),
'keyword_search_terms': (
'CREATE TABLE keyword_search_terms (keyword_id INTEGER NOT '
'NULL,url_id INTEGER NOT NULL,lower_term LONGVARCHAR NOT NULL,term '
'LONGVARCHAR NOT NULL)'),
'meta': (
'CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, '
'value LONGVARCHAR)'),
'segment_usage': (
'CREATE TABLE segment_usage (id INTEGER PRIMARY KEY,segment_id '
'INTEGER NOT NULL,time_slot INTEGER NOT NULL,visit_count INTEGER '
'DEFAULT 0 NOT NULL)'),
'segments': (
'CREATE TABLE segments (id INTEGER PRIMARY KEY,name VARCHAR,url_id '
'INTEGER NON NULL)'),
'typed_url_sync_metadata': (
'CREATE TABLE typed_url_sync_metadata (storage_key INTEGER PRIMARY '
'KEY NOT NULL,value BLOB)'),
'urls': (
'CREATE TABLE urls(id INTEGER PRIMARY KEY AUTOINCREMENT,url '
'LONGVARCHAR,title LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT '
'NULL,typed_count INTEGER DEFAULT 0 NOT NULL,last_visit_time '
'INTEGER NOT NULL,hidden INTEGER DEFAULT 0 NOT NULL)'),
'visit_source': (
'CREATE TABLE visit_source(id INTEGER PRIMARY KEY,source INTEGER '
'NOT NULL)'),
'visits': (
'CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT '
'NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition '
'INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,visit_duration '
'INTEGER DEFAULT 0 NOT NULL)')}
SCHEMAS = [
_SCHEMA_27, _SCHEMA_31, _SCHEMA_37, _SCHEMA_51, _SCHEMA_58, _SCHEMA_59,
_SCHEMA_63, _SCHEMA_65, _SCHEMA_67, _SCHEMA_67_2, _SCHEMA_67_3]
def ParseFileDownloadedRow(
self, parser_mediator, query, row, **unused_kwargs):
"""Parses a file downloaded row.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
query (str): query that created the row.
row (sqlite3.Row): row.
"""
query_hash = hash(query)
event_data = ChromeHistoryFileDownloadedEventData()
event_data.danger_type = self._GetRowValue(query_hash, row, 'danger_type')
event_data.full_path = self._GetRowValue(query_hash, row, 'target_path')
event_data.offset = self._GetRowValue(query_hash, row, 'id')
event_data.interrupt_reason = self._GetRowValue(
query_hash, row, 'interrupt_reason')
event_data.opened = self._GetRowValue(query_hash, row, 'opened')
event_data.query = query
event_data.received_bytes = self._GetRowValue(
query_hash, row, 'received_bytes')
event_data.state = self._GetRowValue(query_hash, row, 'state')
event_data.total_bytes = self._GetRowValue(query_hash, row, 'total_bytes')
event_data.url = self._GetRowValue(query_hash, row, 'url')
start_timestamp = self._GetRowValue(query_hash, row, 'start_time')
if start_timestamp:
start_date_time = dfdatetime_webkit_time.WebKitTime(
timestamp=start_timestamp)
start_event = time_events.DateTimeValuesEvent(
start_date_time, definitions.TIME_DESCRIPTION_START)
parser_mediator.ProduceEventWithEventData(start_event, event_data)
end_timestamp = self._GetRowValue(query_hash, row, 'end_time')
if end_timestamp:
end_date_time = dfdatetime_webkit_time.WebKitTime(
timestamp=end_timestamp)
end_event = time_events.DateTimeValuesEvent(
end_date_time, definitions.TIME_DESCRIPTION_END)
parser_mediator.ProduceEventWithEventData(end_event, event_data)
sqlite.SQLiteParser.RegisterPlugins([
GoogleChrome8HistoryPlugin, GoogleChrome27HistoryPlugin])
|
epfl-lts2/room_transfer_function_toolkit | refs/heads/master | room_transfer_function_toolkit_python/room_transfer_function_toolkit/io_methods.py | 1 | import datetime
import math
import numpy as np
import os
import pickle
import scipy.io
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
def write_load_parameters(data_path, file_name):
'''
Saves and loads the parameters of the experiment into 'config.pckl' file.
If needed, change the parameters.
Input:
data_path - folder where the file with the parameters is located
file_name - name of the file where parameters will be written
Output:
experiment_parameters - parameters for the experiment organized in a dictionary
'''
# relative file paths
if not os.path.exists(data_path):
os.makedirs(data_path)
# SINGAL_NUMBER: look at 'other_methods.get_test_signal()'
LOWPASS_FILTER_BANDWIDTH = 50 # key parameter of the system
# we want to reduce the frequency from the frequency of a microphone
# (from 44.1kHz) to a low frequency - for room mode tools
# reduce frequency = filter + downsample
adjusted_sampling_frequency = 2.1*LOWPASS_FILTER_BANDWIDTH
SAMPLING_FREQUENCY = 44100 # microphone (44.1 kHz)
STEP = math.floor(SAMPLING_FREQUENCY/adjusted_sampling_frequency)
REVERBERATON_TIME_60 = 1.25
TIME_MOMENTS = REVERBERATON_TIME_60*SAMPLING_FREQUENCY
TIME_MOMENTS = math.ceil(TIME_MOMENTS/STEP)
NUMBER_OF_WALLS = 6
experiment_parameters = {'Lx':3.9, 'Ly':8.15, 'Lz':3.35, \
'REVERBERATON_TIME_60':REVERBERATON_TIME_60, 'TEMPERATURE':25, \
'RECEIVER_NUMBER':46, \
'SIGNAL_NUMBER':0, 'LOWPASS_FILTER_BANDWIDTH':LOWPASS_FILTER_BANDWIDTH, \
'N':3, 'WALL_IMPEDANCES':0.01*np.ones((NUMBER_OF_WALLS, 1)), \
'SAMPLING_FREQUENCY':SAMPLING_FREQUENCY, 'STEP':STEP, \
'TIME_MOMENTS':TIME_MOMENTS, \
'TRAINING_DATA_FILE_PATH':'../data/measurements_training.pckl', \
'EVALUATION_DATA_FILE_PATH':'../data/measurements_evaluation.pckl', \
'UNLOCBOX_PATH':'../../unlocbox-1.7.3/unlocbox/init_unlocbox.m'}
file = open(os.path.join(data_path, file_name), 'wb')
pickle.dump(experiment_parameters, file)
file.close()
return experiment_parameters
def draw_lattice(fig, Lx, Ly, Lz, SPATIAL_SAMPLING_STEP):
'''
Visualizes the samling lattice on the given figure.
Input:
fig - figure handler
Lx, Ly, Lz - room dimensions
SPATIAL_SAMPLING_STEP - sampling step over all three coordinates
Ouput:
lattice drawn on the figure
'''
# color = [0.4, 0.4, 0.4]
fig.hold(True)
for g in range(0, Lx, SPATIAL_SAMPLING_STEP):
for i in range(0, Lz, SPATIAL_SAMPLING_STEP):
fig.plot([g, g], [0, Ly], [i, i], color = 'grey')
for g in range(0, Ly, SPATIAL_SAMPLING_STEP):
for i in range(0, Lz, SPATIAL_SAMPLING_STEP):
fig.plot([0, Lx], [g, g], [i, i], color = 'grey')
for g in range(0, Ly, SPATIAL_SAMPLING_STEP):
for i in range(0, Lx, SPATIAL_SAMPLING_STEP):
fig.plot([i, i], [g, g], [0, Lz], color = 'grey')
return fig
def draw_sensor_placement(Lx, Ly, Lz, pos_s, pos_mt, pos_me, SPATIAL_SAMPLING_STEP):
'''
Draws placement of sensors and sound sources across the given room.
Input:
Lx, Ly, Lz - room dimensions
pos_s - positions of sound sources
pos_mt - positions of microphones from the training set
pos_me - posiitions of microphones from the evaluation set
SPATIAL_SAMPLING_STEP - sampling step over all three coordinates
Output:
figure saved in a file
'''
# 1st step: draw the room; bottom-top-left-right
x = np.matrix('0 %s %s 0 0; 0 %s %s 0 0; \
0 0 0 0 0; %s %s %s %s %s' % (Lx, Lx, Lx, Lx, Lx, Lx, Lx, Lx, Lx))
y = np.matrix('0 0 %s %s 0; 0 0 %s %s 0; \
0 %s %s 0 0; 0 %s %s 0 0' % (Ly, Ly, Ly, Ly, Ly, Ly, Ly, Ly))
z = np.matrix('0 0 0 0 0; %s %s %s %s %s; \
0 0 %s %s 0; 0 0 %s %s 0' % (Lz, Lz, Lz, Lz, Lz, Lz, Lz, Lz, Lz))
# decorating the walls and the floor
# img = imread('pic\floor.jpg')
# fig.image([Lx, 0, 0], [Ly, 0, 0], img)
# img = imread('pic\wall.jpg')
# xImage = np.matrix('0 Lx; 0 Lx')
# yImage = np.matrix('Ly Ly; Ly Ly')
# zImage = np.matrix('0 0; Lz Lz')
# fig.surf(xImage, yImage, zImage, 'CData', img, 'FaceColor', 'texturemap')
# xImage = np.matrix('Lx Lx; Lx Lx')
# yImage = np.matrix('0 Ly; 0 Ly')
# zImage = np.matrix('0 0; Lz Lz')
# fig.surf(xImage, yImage, zImage, 'CData', img, 'FaceColor', 'texturemap')
# 2nd step: draw the sources
[xs, ys, zs] = pos_s.get_coordinates()
# output_file("experimental_setting.html", title="example of an experimental setting")
xmt = []
ymt = []
zmt = []
xme = []
yme = []
zme = []
for i in range(len(pos_mt)):
[x, y, z] = (pos_mt[i]).get_coordinates()
xmt.append(x)
ymt.append(y)
zmt.append(z)
[x, y, z] = (pos_me[i]).get_coordinates()
xme.append(x)
yme.append(y)
zme.append(z)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(xs, ys, zs, c = 'c', marker = 'o')
ax.hold(True)
ax.scatter(xmt, ymt, zmt, c = 'r', marker = 'o')
ax.scatter(xme, yme, zme, c = 'g', marker = 'o')
ax.set_xlabel('width')
ax.set_ylabel('depth')
ax.set_zlabel('height')
ax.set_xlim(0, Lx)
ax.set_ylim(0, Ly)
ax.set_zlim(0, Lz)
plt.show()
return |
ShridharSattur/WhereHows | refs/heads/master | wherehows-etl/src/main/resources/jython/requests/packages/chardet/constants.py | 3007 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
_debug = 0
eDetecting = 0
eFoundIt = 1
eNotMe = 2
eStart = 0
eError = 1
eItsMe = 2
SHORTCUT_THRESHOLD = 0.95
|
Neamar/django | refs/heads/master | tests/apps/tests.py | 126 | from __future__ import unicode_literals
import os
import warnings
from unittest import skipUnless
from django.apps import AppConfig, apps
from django.apps.registry import Apps
from django.contrib.admin.models import LogEntry
from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured
from django.db import models
from django.test import SimpleTestCase, override_settings
from django.test.utils import extend_sys_path
from django.utils import six
from django.utils._os import upath
from .default_config_app.apps import CustomConfig
from .models import SoAlternative, TotallyNormal, new_apps
# Small list with a variety of cases for tests that iterate on installed apps.
# Intentionally not in alphabetical order to check if the order is preserved.
SOME_INSTALLED_APPS = [
'apps.apps.MyAdmin',
'apps.apps.MyAuth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
SOME_INSTALLED_APPS_NAMES = [
'django.contrib.admin',
'django.contrib.auth',
] + SOME_INSTALLED_APPS[2:]
HERE = os.path.dirname(upath(__file__))
class AppsTests(SimpleTestCase):
def test_singleton_master(self):
"""
Ensures that only one master registry can exist.
"""
with self.assertRaises(RuntimeError):
Apps(installed_apps=None)
def test_ready(self):
"""
Tests the ready property of the master registry.
"""
# The master app registry is always ready when the tests run.
self.assertTrue(apps.ready)
# Non-master app registries are populated in __init__.
self.assertTrue(Apps().ready)
def test_bad_app_config(self):
"""
Tests when INSTALLED_APPS contains an incorrect app config.
"""
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=['apps.apps.BadConfig']):
pass
def test_not_an_app_config(self):
"""
Tests when INSTALLED_APPS contains a class that isn't an app config.
"""
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=['apps.apps.NotAConfig']):
pass
def test_no_such_app(self):
"""
Tests when INSTALLED_APPS contains an app that doesn't exist, either
directly or via an app config.
"""
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=['there is no such app']):
pass
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=['apps.apps.NoSuchApp']):
pass
def test_no_such_app_config(self):
"""
Tests when INSTALLED_APPS contains an entry that doesn't exist.
"""
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=['apps.apps.NoSuchConfig']):
pass
def test_default_app_config(self):
with self.settings(INSTALLED_APPS=['apps.default_config_app']):
config = apps.get_app_config('default_config_app')
self.assertIsInstance(config, CustomConfig)
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_app_configs(self):
"""
Tests apps.get_app_configs().
"""
app_configs = apps.get_app_configs()
self.assertListEqual(
[app_config.name for app_config in app_configs],
SOME_INSTALLED_APPS_NAMES)
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_app_config(self):
"""
Tests apps.get_app_config().
"""
app_config = apps.get_app_config('admin')
self.assertEqual(app_config.name, 'django.contrib.admin')
app_config = apps.get_app_config('staticfiles')
self.assertEqual(app_config.name, 'django.contrib.staticfiles')
with self.assertRaises(LookupError):
apps.get_app_config('webdesign')
msg = "No installed app with label 'django.contrib.auth'. Did you mean 'myauth'"
with self.assertRaisesMessage(LookupError, msg):
apps.get_app_config('django.contrib.auth')
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_is_installed(self):
"""
Tests apps.is_installed().
"""
self.assertTrue(apps.is_installed('django.contrib.admin'))
self.assertTrue(apps.is_installed('django.contrib.auth'))
self.assertTrue(apps.is_installed('django.contrib.staticfiles'))
self.assertFalse(apps.is_installed('django.contrib.webdesign'))
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_model(self):
"""
Tests apps.get_model().
"""
self.assertEqual(apps.get_model('admin', 'LogEntry'), LogEntry)
with self.assertRaises(LookupError):
apps.get_model('admin', 'LogExit')
# App label is case-sensitive, Model name is case-insensitive.
self.assertEqual(apps.get_model('admin', 'loGentrY'), LogEntry)
with self.assertRaises(LookupError):
apps.get_model('Admin', 'LogEntry')
# A single argument is accepted.
self.assertEqual(apps.get_model('admin.LogEntry'), LogEntry)
with self.assertRaises(LookupError):
apps.get_model('admin.LogExit')
with self.assertRaises(ValueError):
apps.get_model('admin_LogEntry')
@override_settings(INSTALLED_APPS=['apps.apps.RelabeledAppsConfig'])
def test_relabeling(self):
self.assertEqual(apps.get_app_config('relabeled').name, 'apps')
def test_duplicate_labels(self):
with six.assertRaisesRegex(self, ImproperlyConfigured, "Application labels aren't unique"):
with self.settings(INSTALLED_APPS=['apps.apps.PlainAppsConfig', 'apps']):
pass
def test_duplicate_names(self):
with six.assertRaisesRegex(self, ImproperlyConfigured, "Application names aren't unique"):
with self.settings(INSTALLED_APPS=['apps.apps.RelabeledAppsConfig', 'apps']):
pass
def test_import_exception_is_not_masked(self):
"""
App discovery should preserve stack traces. Regression test for #22920.
"""
with six.assertRaisesRegex(self, ImportError, "Oops"):
with self.settings(INSTALLED_APPS=['import_error_package']):
pass
def test_models_py(self):
"""
Tests that the models in the models.py file were loaded correctly.
"""
self.assertEqual(apps.get_model("apps", "TotallyNormal"), TotallyNormal)
with self.assertRaises(LookupError):
apps.get_model("apps", "SoAlternative")
with self.assertRaises(LookupError):
new_apps.get_model("apps", "TotallyNormal")
self.assertEqual(new_apps.get_model("apps", "SoAlternative"), SoAlternative)
def test_dynamic_load(self):
"""
Makes a new model at runtime and ensures it goes into the right place.
"""
old_models = list(apps.get_app_config("apps").get_models())
# Construct a new model in a new app registry
body = {}
new_apps = Apps(["apps"])
meta_contents = {
'app_label': "apps",
'apps': new_apps,
}
meta = type(str("Meta"), tuple(), meta_contents)
body['Meta'] = meta
body['__module__'] = TotallyNormal.__module__
temp_model = type(str("SouthPonies"), (models.Model,), body)
# Make sure it appeared in the right place!
self.assertListEqual(list(apps.get_app_config("apps").get_models()), old_models)
with self.assertRaises(LookupError):
apps.get_model("apps", "SouthPonies")
self.assertEqual(new_apps.get_model("apps", "SouthPonies"), temp_model)
def test_model_clash(self):
"""
Test for behavior when two models clash in the app registry.
"""
new_apps = Apps(["apps"])
meta_contents = {
'app_label': "apps",
'apps': new_apps,
}
body = {}
body['Meta'] = type(str("Meta"), tuple(), meta_contents)
body['__module__'] = TotallyNormal.__module__
type(str("SouthPonies"), (models.Model,), body)
# When __name__ and __module__ match we assume the module
# was reloaded and issue a warning. This use-case is
# useful for REPL. Refs #23621.
body = {}
body['Meta'] = type(str("Meta"), tuple(), meta_contents)
body['__module__'] = TotallyNormal.__module__
with warnings.catch_warnings(record=True) as w:
type(str("SouthPonies"), (models.Model,), body)
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
self.assertEqual(str(w[-1].message),
"Model 'apps.southponies' was already registered. "
"Reloading models is not advised as it can lead to inconsistencies, "
"most notably with related models.")
# If it doesn't appear to be a reloaded module then we expect
# a RuntimeError.
body = {}
body['Meta'] = type(str("Meta"), tuple(), meta_contents)
body['__module__'] = TotallyNormal.__module__ + '.whatever'
with six.assertRaisesRegex(self, RuntimeError,
"Conflicting 'southponies' models in application 'apps':.*"):
type(str("SouthPonies"), (models.Model,), body)
def test_get_containing_app_config_apps_not_ready(self):
"""
apps.get_containing_app_config() should raise an exception if
apps.apps_ready isn't True.
"""
apps.apps_ready = False
try:
with self.assertRaisesMessage(AppRegistryNotReady, "Apps aren't loaded yet"):
apps.get_containing_app_config('foo')
finally:
apps.apps_ready = True
def test_lazy_model_operation(self):
"""
Tests apps.lazy_model_operation().
"""
model_classes = []
initial_pending = set(apps._pending_operations)
def test_func(*models):
model_classes[:] = models
class LazyA(models.Model):
pass
# Test models appearing twice, and models appearing consecutively
model_keys = [('apps', model_name) for model_name in ['lazya', 'lazyb', 'lazyb', 'lazyc', 'lazya']]
apps.lazy_model_operation(test_func, *model_keys)
# LazyModelA shouldn't be waited on since it's already registered,
# and LazyModelC shouldn't be waited on until LazyModelB exists.
self.assertSetEqual(set(apps._pending_operations) - initial_pending, {('apps', 'lazyb')})
# Test that multiple operations can wait on the same model
apps.lazy_model_operation(test_func, ('apps', 'lazyb'))
class LazyB(models.Model):
pass
self.assertListEqual(model_classes, [LazyB])
# Now we are just waiting on LazyModelC.
self.assertSetEqual(set(apps._pending_operations) - initial_pending, {('apps', 'lazyc')})
class LazyC(models.Model):
pass
# Everything should be loaded - make sure the callback was executed properly.
self.assertListEqual(model_classes, [LazyA, LazyB, LazyB, LazyC, LazyA])
class Stub(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class AppConfigTests(SimpleTestCase):
"""Unit tests for AppConfig class."""
def test_path_set_explicitly(self):
"""If subclass sets path as class attr, no module attributes needed."""
class MyAppConfig(AppConfig):
path = 'foo'
ac = MyAppConfig('label', Stub())
self.assertEqual(ac.path, 'foo')
def test_explicit_path_overrides(self):
"""If path set as class attr, overrides __path__ and __file__."""
class MyAppConfig(AppConfig):
path = 'foo'
ac = MyAppConfig('label', Stub(__path__=['a'], __file__='b/__init__.py'))
self.assertEqual(ac.path, 'foo')
def test_dunder_path(self):
"""If single element in __path__, use it (in preference to __file__)."""
ac = AppConfig('label', Stub(__path__=['a'], __file__='b/__init__.py'))
self.assertEqual(ac.path, 'a')
def test_no_dunder_path_fallback_to_dunder_file(self):
"""If there is no __path__ attr, use __file__."""
ac = AppConfig('label', Stub(__file__='b/__init__.py'))
self.assertEqual(ac.path, 'b')
def test_empty_dunder_path_fallback_to_dunder_file(self):
"""If the __path__ attr is empty, use __file__ if set."""
ac = AppConfig('label', Stub(__path__=[], __file__='b/__init__.py'))
self.assertEqual(ac.path, 'b')
def test_multiple_dunder_path_fallback_to_dunder_file(self):
"""If the __path__ attr is length>1, use __file__ if set."""
ac = AppConfig('label', Stub(__path__=['a', 'b'], __file__='c/__init__.py'))
self.assertEqual(ac.path, 'c')
def test_no_dunder_path_or_dunder_file(self):
"""If there is no __path__ or __file__, raise ImproperlyConfigured."""
with self.assertRaises(ImproperlyConfigured):
AppConfig('label', Stub())
def test_empty_dunder_path_no_dunder_file(self):
"""If the __path__ attr is empty and there is no __file__, raise."""
with self.assertRaises(ImproperlyConfigured):
AppConfig('label', Stub(__path__=[]))
def test_multiple_dunder_path_no_dunder_file(self):
"""If the __path__ attr is length>1 and there is no __file__, raise."""
with self.assertRaises(ImproperlyConfigured):
AppConfig('label', Stub(__path__=['a', 'b']))
@skipUnless(six.PY3, "Namespace packages sans __init__.py were added in Python 3.3")
class NamespacePackageAppTests(SimpleTestCase):
# We need nsapp to be top-level so our multiple-paths tests can add another
# location for it (if its inside a normal package with an __init__.py that
# isn't possible). In order to avoid cluttering the already-full tests/ dir
# (which is on sys.path), we add these new entries to sys.path temporarily.
base_location = os.path.join(HERE, 'namespace_package_base')
other_location = os.path.join(HERE, 'namespace_package_other_base')
app_path = os.path.join(base_location, 'nsapp')
def test_single_path(self):
"""
A Py3.3+ namespace package can be an app if it has only one path.
"""
with extend_sys_path(self.base_location):
with self.settings(INSTALLED_APPS=['nsapp']):
app_config = apps.get_app_config('nsapp')
self.assertEqual(app_config.path, upath(self.app_path))
def test_multiple_paths(self):
"""
A Py3.3+ namespace package with multiple locations cannot be an app.
(Because then we wouldn't know where to load its templates, static
assets, etc from.)
"""
# Temporarily add two directories to sys.path that both contain
# components of the "nsapp" package.
with extend_sys_path(self.base_location, self.other_location):
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=['nsapp']):
pass
def test_multiple_paths_explicit_path(self):
"""
Multiple locations are ok only if app-config has explicit path.
"""
# Temporarily add two directories to sys.path that both contain
# components of the "nsapp" package.
with extend_sys_path(self.base_location, self.other_location):
with self.settings(INSTALLED_APPS=['nsapp.apps.NSAppConfig']):
app_config = apps.get_app_config('nsapp')
self.assertEqual(app_config.path, upath(self.app_path))
|
zzjkf2009/Midterm_Astar | refs/heads/master | opencv/modules/ts/misc/trace_profiler.py | 1 | from __future__ import print_function
import os
import sys
import csv
from pprint import pprint
from collections import deque
# trace.hpp
REGION_FLAG_IMPL_MASK = 15 << 16;
REGION_FLAG_IMPL_IPP = 1 << 16;
REGION_FLAG_IMPL_OPENCL = 2 << 16;
DEBUG = False
if DEBUG:
dprint = print
dpprint = pprint
else:
def dprint(args, **kwargs):
pass
def dpprint(args, **kwargs):
pass
def tryNum(s):
if s.startswith('0x'):
try:
return int(s, 16)
except ValueError:
pass
try:
return int(s)
except ValueError:
pass
if sys.version_info[0] < 3:
try:
return long(s)
except ValueError:
pass
return s
def formatTimestamp(t):
return "%.3f" % (t * 1e-6)
try:
from statistics import median
except ImportError:
def median(lst):
sortedLst = sorted(lst)
lstLen = len(lst)
index = (lstLen - 1) // 2
if (lstLen % 2):
return sortedLst[index]
else:
return (sortedLst[index] + sortedLst[index + 1]) * 0.5
def getCXXFunctionName(spec):
def dropParams(spec):
pos = len(spec) - 1
depth = 0
while pos >= 0:
if spec[pos] == ')':
depth = depth + 1
elif spec[pos] == '(':
depth = depth - 1
if depth == 0:
if pos == 0 or spec[pos - 1] in ['#', ':']:
res = dropParams(spec[pos+1:-1])
return (spec[:pos] + res[0], res[1])
return (spec[:pos], spec[pos:])
pos = pos - 1
return (spec, '')
def extractName(spec):
pos = len(spec) - 1
inName = False
while pos >= 0:
if spec[pos] == ' ':
if inName:
return spec[pos+1:]
elif spec[pos].isalnum():
inName = True
pos = pos - 1
return spec
if spec.startswith('IPP') or spec.startswith('OpenCL'):
prefix_size = len('IPP') if spec.startswith('IPP') else len('OpenCL')
prefix = spec[:prefix_size]
if prefix_size < len(spec) and spec[prefix_size] in ['#', ':']:
prefix = prefix + spec[prefix_size]
prefix_size = prefix_size + 1
begin = prefix_size
while begin < len(spec):
if spec[begin].isalnum() or spec[begin] in ['_', ':']:
break
begin = begin + 1
if begin == len(spec):
return spec
end = begin
while end < len(spec):
if not (spec[end].isalnum() or spec[end] in ['_', ':']):
break
end = end + 1
return prefix + spec[begin:end]
spec = spec.replace(') const', ')') # const methods
(ret_type_name, params) = dropParams(spec)
name = extractName(ret_type_name)
if 'operator' in name:
return name + params
if name.startswith('&'):
return name[1:]
return name
stack_size = 10
class Trace:
def __init__(self, filename=None):
self.tasks = {}
self.tasks_list = []
self.locations = {}
self.threads_stack = {}
self.pending_files = deque()
if filename:
self.load(filename)
class TraceTask:
def __init__(self, threadID, taskID, locationID, beginTimestamp):
self.threadID = threadID
self.taskID = taskID
self.locationID = locationID
self.beginTimestamp = beginTimestamp
self.endTimestamp = None
self.parentTaskID = None
self.parentThreadID = None
self.childTask = []
self.selfTimeIPP = 0
self.selfTimeOpenCL = 0
self.totalTimeIPP = 0
self.totalTimeOpenCL = 0
def __repr__(self):
return "TID={} ID={} loc={} parent={}:{} begin={} end={} IPP={}/{} OpenCL={}/{}".format(
self.threadID, self.taskID, self.locationID, self.parentThreadID, self.parentTaskID,
self.beginTimestamp, self.endTimestamp, self.totalTimeIPP, self.selfTimeIPP, self.totalTimeOpenCL, self.selfTimeOpenCL)
class TraceLocation:
def __init__(self, locationID, filename, line, name, flags):
self.locationID = locationID
self.filename = os.path.split(filename)[1]
self.line = line
self.name = getCXXFunctionName(name)
self.flags = flags
def __str__(self):
return "{}#{}:{}".format(self.name, self.filename, self.line)
def __repr__(self):
return "ID={} {}:{}:{}".format(self.locationID, self.filename, self.line, self.name)
def parse_file(self, filename):
dprint("Process file: '{}'".format(filename))
with open(filename) as infile:
for line in infile:
line = str(line).strip()
if line[0] == "#":
if line.startswith("#thread file:"):
name = str(line.split(':', 1)[1]).strip()
self.pending_files.append(os.path.join(os.path.split(filename)[0], name))
continue
self.parse_line(line)
def parse_line(self, line):
opts = line.split(',')
dpprint(opts)
if opts[0] == 'l':
opts = list(csv.reader([line]))[0] # process quote more
locationID = int(opts[1])
filename = str(opts[2])
line = int(opts[3])
name = opts[4]
flags = tryNum(opts[5])
self.locations[locationID] = self.TraceLocation(locationID, filename, line, name, flags)
return
extra_opts = {}
for e in opts[5:]:
if not '=' in e:
continue
(k, v) = e.split('=')
extra_opts[k] = tryNum(v)
if extra_opts:
dpprint(extra_opts)
threadID = None
taskID = None
locationID = None
ts = None
if opts[0] in ['b', 'e']:
threadID = int(opts[1])
taskID = int(opts[4])
locationID = int(opts[3])
ts = tryNum(opts[2])
thread_stack = None
currentTask = (None, None)
if threadID is not None:
if not threadID in self.threads_stack:
thread_stack = deque()
self.threads_stack[threadID] = thread_stack
else:
thread_stack = self.threads_stack[threadID]
currentTask = None if not thread_stack else thread_stack[-1]
t = (threadID, taskID)
if opts[0] == 'b':
assert not t in self.tasks, "Duplicate task: " + str(t) + repr(self.tasks[t])
task = self.TraceTask(threadID, taskID, locationID, ts)
self.tasks[t] = task
self.tasks_list.append(task)
thread_stack.append((threadID, taskID))
if currentTask:
task.parentThreadID = currentTask[0]
task.parentTaskID = currentTask[1]
if 'parentThread' in extra_opts:
task.parentThreadID = extra_opts['parentThread']
if 'parent' in extra_opts:
task.parentTaskID = extra_opts['parent']
if opts[0] == 'e':
task = self.tasks[t]
task.endTimestamp = ts
if 'tIPP' in extra_opts:
task.selfTimeIPP = extra_opts['tIPP']
if 'tOCL' in extra_opts:
task.selfTimeOpenCL = extra_opts['tOCL']
thread_stack.pop()
def load(self, filename):
self.pending_files.append(filename)
if DEBUG:
with open(filename, 'r') as f:
print(f.read(), end='')
while self.pending_files:
self.parse_file(self.pending_files.pop())
def getParentTask(self, task):
return self.tasks.get((task.parentThreadID, task.parentTaskID), None)
def process(self):
self.tasks_list.sort(key=lambda x: x.beginTimestamp)
parallel_for_location = None
for (id, l) in self.locations.items():
if l.name == 'parallel_for':
parallel_for_location = l.locationID
break
for task in self.tasks_list:
try:
task.duration = task.endTimestamp - task.beginTimestamp
task.selfDuration = task.duration
except:
task.duration = None
task.selfDuration = None
task.totalTimeIPP = task.selfTimeIPP
task.totalTimeOpenCL = task.selfTimeOpenCL
dpprint(self.tasks)
dprint("Calculate total times")
for task in self.tasks_list:
parentTask = self.getParentTask(task)
if parentTask:
parentTask.selfDuration = parentTask.selfDuration - task.duration
parentTask.childTask.append(task)
timeIPP = task.selfTimeIPP
timeOpenCL = task.selfTimeOpenCL
while parentTask:
if parentTask.locationID == parallel_for_location: # TODO parallel_for
break
parentLocation = self.locations[parentTask.locationID]
if (parentLocation.flags & REGION_FLAG_IMPL_MASK) == REGION_FLAG_IMPL_IPP:
parentTask.selfTimeIPP = parentTask.selfTimeIPP - timeIPP
timeIPP = 0
else:
parentTask.totalTimeIPP = parentTask.totalTimeIPP + timeIPP
if (parentLocation.flags & REGION_FLAG_IMPL_MASK) == REGION_FLAG_IMPL_OPENCL:
parentTask.selfTimeOpenCL = parentTask.selfTimeOpenCL - timeOpenCL
timeOpenCL = 0
else:
parentTask.totalTimeOpenCL = parentTask.totalTimeOpenCL + timeOpenCL
parentTask = self.getParentTask(parentTask)
dpprint(self.tasks)
dprint("Calculate total times (parallel_for)")
for task in self.tasks_list:
if task.locationID == parallel_for_location:
task.selfDuration = 0
childDuration = sum([t.duration for t in task.childTask])
if task.duration == 0 or childDuration == 0:
continue
timeCoef = task.duration / float(childDuration)
childTimeIPP = sum([t.totalTimeIPP for t in task.childTask])
childTimeOpenCL = sum([t.totalTimeOpenCL for t in task.childTask])
if childTimeIPP == 0 and childTimeOpenCL == 0:
continue
timeIPP = childTimeIPP * timeCoef
timeOpenCL = childTimeOpenCL * timeCoef
parentTask = task
while parentTask:
parentLocation = self.locations[parentTask.locationID]
if (parentLocation.flags & REGION_FLAG_IMPL_MASK) == REGION_FLAG_IMPL_IPP:
parentTask.selfTimeIPP = parentTask.selfTimeIPP - timeIPP
timeIPP = 0
else:
parentTask.totalTimeIPP = parentTask.totalTimeIPP + timeIPP
if (parentLocation.flags & REGION_FLAG_IMPL_MASK) == REGION_FLAG_IMPL_OPENCL:
parentTask.selfTimeOpenCL = parentTask.selfTimeOpenCL - timeOpenCL
timeOpenCL = 0
else:
parentTask.totalTimeOpenCL = parentTask.totalTimeOpenCL + timeOpenCL
parentTask = self.getParentTask(parentTask)
dpprint(self.tasks)
dprint("Done")
def dump(self, max_entries):
assert isinstance(max_entries, int)
class CallInfo():
def __init__(self, callID):
self.callID = callID
self.totalTimes = []
self.selfTimes = []
self.threads = set()
self.selfTimesIPP = []
self.selfTimesOpenCL = []
self.totalTimesIPP = []
self.totalTimesOpenCL = []
calls = {}
for currentTask in self.tasks_list:
task = currentTask
callID = []
for i in range(stack_size):
callID.append(task.locationID)
task = self.getParentTask(task)
if not task:
break
callID = tuple(callID)
if not callID in calls:
call = CallInfo(callID)
calls[callID] = call
else:
call = calls[callID]
call.totalTimes.append(currentTask.duration)
call.selfTimes.append(currentTask.selfDuration)
call.threads.add(currentTask.threadID)
call.selfTimesIPP.append(currentTask.selfTimeIPP)
call.selfTimesOpenCL.append(currentTask.selfTimeOpenCL)
call.totalTimesIPP.append(currentTask.totalTimeIPP)
call.totalTimesOpenCL.append(currentTask.totalTimeOpenCL)
dpprint(self.tasks)
dpprint(self.locations)
dpprint(calls)
calls_self_sum = {k: sum(v.selfTimes) for (k, v) in calls.items()}
calls_total_sum = {k: sum(v.totalTimes) for (k, v) in calls.items()}
calls_median = {k: median(v.selfTimes) for (k, v) in calls.items()}
calls_sorted = sorted(calls.keys(), key=lambda x: calls_self_sum[x], reverse=True)
calls_self_sum_IPP = {k: sum(v.selfTimesIPP) for (k, v) in calls.items()}
calls_total_sum_IPP = {k: sum(v.totalTimesIPP) for (k, v) in calls.items()}
calls_self_sum_OpenCL = {k: sum(v.selfTimesOpenCL) for (k, v) in calls.items()}
calls_total_sum_OpenCL = {k: sum(v.totalTimesOpenCL) for (k, v) in calls.items()}
if max_entries > 0 and len(calls_sorted) > max_entries:
calls_sorted = calls_sorted[:max_entries]
def formatPercents(p):
if p is not None:
return "{:>3d}".format(int(p*100))
return ''
name_width = 70
timestamp_width = 12
def fmtTS():
return '{:>' + str(timestamp_width) + '}'
fmt = "{:>3} {:<"+str(name_width)+"} {:>8} {:>3}"+((' '+fmtTS())*5)+((' '+fmtTS()+' {:>3}')*2)
fmt2 = "{:>3} {:<"+str(name_width)+"} {:>8} {:>3}"+((' '+fmtTS())*5)+((' '+fmtTS()+' {:>3}')*2)
print(fmt.format("ID", "name", "count", "thr", "min", "max", "median", "avg", "*self*", "IPP", "%", "OpenCL", "%"))
print(fmt2.format("", "", "", "", "t-min", "t-max", "t-median", "t-avg", "total", "t-IPP", "%", "t-OpenCL", "%"))
for (index, callID) in enumerate(calls_sorted):
call_self_times = calls[callID].selfTimes
loc0 = self.locations[callID[0]]
loc_array = [] # [str(callID)]
for (i, l) in enumerate(callID):
loc = self.locations[l]
loc_array.append(loc.name if i > 0 else str(loc))
loc_str = '|'.join(loc_array)
if len(loc_str) > name_width: loc_str = loc_str[:name_width-3]+'...'
print(fmt.format(index + 1, loc_str, len(call_self_times),
len(calls[callID].threads),
formatTimestamp(min(call_self_times)),
formatTimestamp(max(call_self_times)),
formatTimestamp(calls_median[callID]),
formatTimestamp(sum(call_self_times)/float(len(call_self_times))),
formatTimestamp(sum(call_self_times)),
formatTimestamp(calls_self_sum_IPP[callID]),
formatPercents(calls_self_sum_IPP[callID] / float(calls_self_sum[callID])) if calls_self_sum[callID] > 0 else formatPercents(None),
formatTimestamp(calls_self_sum_OpenCL[callID]),
formatPercents(calls_self_sum_OpenCL[callID] / float(calls_self_sum[callID])) if calls_self_sum[callID] > 0 else formatPercents(None),
))
call_total_times = calls[callID].totalTimes
print(fmt2.format("", "", "", "",
formatTimestamp(min(call_total_times)),
formatTimestamp(max(call_total_times)),
formatTimestamp(median(call_total_times)),
formatTimestamp(sum(call_total_times)/float(len(call_total_times))),
formatTimestamp(sum(call_total_times)),
formatTimestamp(calls_total_sum_IPP[callID]),
formatPercents(calls_total_sum_IPP[callID] / float(calls_total_sum[callID])) if calls_total_sum[callID] > 0 else formatPercents(None),
formatTimestamp(calls_total_sum_OpenCL[callID]),
formatPercents(calls_total_sum_OpenCL[callID] / float(calls_total_sum[callID])) if calls_total_sum[callID] > 0 else formatPercents(None),
))
print()
if __name__ == "__main__":
tracefile = sys.argv[1] if len(sys.argv) > 1 else 'OpenCVTrace.txt'
count = int(sys.argv[2]) if len(sys.argv) > 2 else 10
trace = Trace(tracefile)
trace.process()
trace.dump(max_entries = count)
print("OK")
|
Hazelsuko07/17WarmingUp | refs/heads/hy_try | py3.6/lib/python3.6/site-packages/wheel/metadata.py | 62 | """
Tools for converting old- to new-style metadata.
"""
import email.parser
import os.path
import re
import textwrap
from collections import namedtuple, OrderedDict
import pkg_resources
from . import __version__ as wheel_version
from .pkginfo import read_pkg_info
from .util import OrderedDefaultDict
METADATA_VERSION = "2.0"
PLURAL_FIELDS = {"classifier": "classifiers",
"provides_dist": "provides",
"provides_extra": "extras"}
SKIP_FIELDS = set()
CONTACT_FIELDS = (({"email": "author_email", "name": "author"},
"author"),
({"email": "maintainer_email", "name": "maintainer"},
"maintainer"))
# commonly filled out as "UNKNOWN" by distutils:
UNKNOWN_FIELDS = {"author", "author_email", "platform", "home_page", "license"}
# Wheel itself is probably the only program that uses non-extras markers
# in METADATA/PKG-INFO. Support its syntax with the extra at the end only.
EXTRA_RE = re.compile("""^(?P<package>.*?)(;\s*(?P<condition>.*?)(extra == '(?P<extra>.*?)')?)$""")
KEYWORDS_RE = re.compile("[\0-,]+")
MayRequiresKey = namedtuple('MayRequiresKey', ('condition', 'extra'))
def unique(iterable):
"""
Yield unique values in iterable, preserving order.
"""
seen = set()
for value in iterable:
if value not in seen:
seen.add(value)
yield value
def handle_requires(metadata, pkg_info, key):
"""
Place the runtime requirements from pkg_info into metadata.
"""
may_requires = OrderedDefaultDict(list)
for value in sorted(pkg_info.get_all(key)):
extra_match = EXTRA_RE.search(value)
if extra_match:
groupdict = extra_match.groupdict()
condition = groupdict['condition']
extra = groupdict['extra']
package = groupdict['package']
if condition.endswith(' and '):
condition = condition[:-5]
else:
condition, extra = None, None
package = value
key = MayRequiresKey(condition, extra)
may_requires[key].append(package)
if may_requires:
metadata['run_requires'] = []
def sort_key(item):
# Both condition and extra could be None, which can't be compared
# against strings in Python 3.
key, value = item
if key.condition is None:
return ''
return key.condition
for key, value in sorted(may_requires.items(), key=sort_key):
may_requirement = OrderedDict((('requires', value),))
if key.extra:
may_requirement['extra'] = key.extra
if key.condition:
may_requirement['environment'] = key.condition
metadata['run_requires'].append(may_requirement)
if 'extras' not in metadata:
metadata['extras'] = []
metadata['extras'].extend([key.extra for key in may_requires.keys() if key.extra])
def pkginfo_to_dict(path, distribution=None):
"""
Convert PKG-INFO to a prototype Metadata 2.0 (PEP 426) dict.
The description is included under the key ['description'] rather than
being written to a separate file.
path: path to PKG-INFO file
distribution: optional distutils Distribution()
"""
metadata = OrderedDefaultDict(
lambda: OrderedDefaultDict(lambda: OrderedDefaultDict(OrderedDict)))
metadata["generator"] = "bdist_wheel (" + wheel_version + ")"
try:
unicode
pkg_info = read_pkg_info(path)
except NameError:
with open(path, 'rb') as pkg_info_file:
pkg_info = email.parser.Parser().parsestr(pkg_info_file.read().decode('utf-8'))
description = None
if pkg_info['Summary']:
metadata['summary'] = pkginfo_unicode(pkg_info, 'Summary')
del pkg_info['Summary']
if pkg_info['Description']:
description = dedent_description(pkg_info)
del pkg_info['Description']
else:
payload = pkg_info.get_payload()
if isinstance(payload, bytes):
# Avoid a Python 2 Unicode error.
# We still suffer ? glyphs on Python 3.
payload = payload.decode('utf-8')
if payload:
description = payload
if description:
pkg_info['description'] = description
for key in sorted(unique(k.lower() for k in pkg_info.keys())):
low_key = key.replace('-', '_')
if low_key in SKIP_FIELDS:
continue
if low_key in UNKNOWN_FIELDS and pkg_info.get(key) == 'UNKNOWN':
continue
if low_key in sorted(PLURAL_FIELDS):
metadata[PLURAL_FIELDS[low_key]] = pkg_info.get_all(key)
elif low_key == "requires_dist":
handle_requires(metadata, pkg_info, key)
elif low_key == 'provides_extra':
if 'extras' not in metadata:
metadata['extras'] = []
metadata['extras'].extend(pkg_info.get_all(key))
elif low_key == 'home_page':
metadata['extensions']['python.details']['project_urls'] = {'Home': pkg_info[key]}
elif low_key == 'keywords':
metadata['keywords'] = KEYWORDS_RE.split(pkg_info[key])
else:
metadata[low_key] = pkg_info[key]
metadata['metadata_version'] = METADATA_VERSION
if 'extras' in metadata:
metadata['extras'] = sorted(set(metadata['extras']))
# include more information if distribution is available
if distribution:
for requires, attr in (('test_requires', 'tests_require'),):
try:
requirements = getattr(distribution, attr)
if isinstance(requirements, list):
new_requirements = sorted(convert_requirements(requirements))
metadata[requires] = [{'requires': new_requirements}]
except AttributeError:
pass
# handle contacts
contacts = []
for contact_type, role in CONTACT_FIELDS:
contact = OrderedDict()
for key in sorted(contact_type):
if contact_type[key] in metadata:
contact[key] = metadata.pop(contact_type[key])
if contact:
contact['role'] = role
contacts.append(contact)
if contacts:
metadata['extensions']['python.details']['contacts'] = contacts
# convert entry points to exports
try:
with open(os.path.join(os.path.dirname(path), "entry_points.txt"), "r") as ep_file:
ep_map = pkg_resources.EntryPoint.parse_map(ep_file.read())
exports = OrderedDict()
for group, items in sorted(ep_map.items()):
exports[group] = OrderedDict()
for item in sorted(map(str, items.values())):
name, export = item.split(' = ', 1)
exports[group][name] = export
if exports:
metadata['extensions']['python.exports'] = exports
except IOError:
pass
# copy console_scripts entry points to commands
if 'python.exports' in metadata['extensions']:
for (ep_script, wrap_script) in (('console_scripts', 'wrap_console'),
('gui_scripts', 'wrap_gui')):
if ep_script in metadata['extensions']['python.exports']:
metadata['extensions']['python.commands'][wrap_script] = \
metadata['extensions']['python.exports'][ep_script]
return metadata
def requires_to_requires_dist(requirement):
"""Compose the version predicates for requirement in PEP 345 fashion."""
requires_dist = []
for op, ver in requirement.specs:
requires_dist.append(op + ver)
if not requires_dist:
return ''
return " (%s)" % ','.join(sorted(requires_dist))
def convert_requirements(requirements):
"""Yield Requires-Dist: strings for parsed requirements strings."""
for req in requirements:
parsed_requirement = pkg_resources.Requirement.parse(req)
spec = requires_to_requires_dist(parsed_requirement)
extras = ",".join(parsed_requirement.extras)
if extras:
extras = "[%s]" % extras
yield (parsed_requirement.project_name + extras + spec)
def generate_requirements(extras_require):
"""
Convert requirements from a setup()-style dictionary to ('Requires-Dist', 'requirement')
and ('Provides-Extra', 'extra') tuples.
extras_require is a dictionary of {extra: [requirements]} as passed to setup(),
using the empty extra {'': [requirements]} to hold install_requires.
"""
for extra, depends in extras_require.items():
condition = ''
if extra and ':' in extra: # setuptools extra:condition syntax
extra, condition = extra.split(':', 1)
extra = pkg_resources.safe_extra(extra)
if extra:
yield ('Provides-Extra', extra)
if condition:
condition += " and "
condition += "extra == '%s'" % extra
if condition:
condition = '; ' + condition
for new_req in convert_requirements(depends):
yield ('Requires-Dist', new_req + condition)
def pkginfo_to_metadata(egg_info_path, pkginfo_path):
"""
Convert .egg-info directory with PKG-INFO to the Metadata 1.3 aka
old-draft Metadata 2.0 format.
"""
pkg_info = read_pkg_info(pkginfo_path)
pkg_info.replace_header('Metadata-Version', '2.0')
requires_path = os.path.join(egg_info_path, 'requires.txt')
if os.path.exists(requires_path):
with open(requires_path) as requires_file:
requires = requires_file.read()
for extra, reqs in sorted(pkg_resources.split_sections(requires),
key=lambda x: x[0] or ''):
for item in generate_requirements({extra: reqs}):
pkg_info[item[0]] = item[1]
description = pkg_info['Description']
if description:
pkg_info.set_payload(dedent_description(pkg_info))
del pkg_info['Description']
return pkg_info
def pkginfo_unicode(pkg_info, field):
"""Hack to coax Unicode out of an email Message() - Python 3.3+"""
text = pkg_info[field]
field = field.lower()
if not isinstance(text, str):
if not hasattr(pkg_info, 'raw_items'): # Python 3.2
return str(text)
for item in pkg_info.raw_items():
if item[0].lower() == field:
text = item[1].encode('ascii', 'surrogateescape') \
.decode('utf-8')
break
return text
def dedent_description(pkg_info):
"""
Dedent and convert pkg_info['Description'] to Unicode.
"""
description = pkg_info['Description']
# Python 3 Unicode handling, sorta.
surrogates = False
if not isinstance(description, str):
surrogates = True
description = pkginfo_unicode(pkg_info, 'Description')
description_lines = description.splitlines()
description_dedent = '\n'.join(
# if the first line of long_description is blank,
# the first line here will be indented.
(description_lines[0].lstrip(),
textwrap.dedent('\n'.join(description_lines[1:])),
'\n'))
if surrogates:
description_dedent = description_dedent \
.encode("utf8") \
.decode("ascii", "surrogateescape")
return description_dedent
if __name__ == "__main__":
import sys
import pprint
pprint.pprint(pkginfo_to_dict(sys.argv[1]))
|
diofant/diofant | refs/heads/master | diofant/polys/polytools.py | 1 | """User-friendly public interface to polynomial functions."""
import functools
import math
import operator
import mpmath
from ..core import (Add, Basic, E, Expr, Integer, Mul, Tuple, oo,
preorder_traversal)
from ..core.compatibility import iterable
from ..core.decorators import _sympifyit
from ..core.mul import _keep_coeff
from ..core.relational import Relational
from ..core.sympify import sympify
from ..domains import FF, QQ, ZZ
from ..domains.compositedomain import CompositeDomain
from ..logic.boolalg import BooleanAtom
from ..utilities import default_sort_key, group, sift
from .constructor import construct_domain
from .groebnertools import groebner as _groebner
from .groebnertools import matrix_fglm
from .monomials import Monomial
from .orderings import monomial_key
from .polyerrors import (CoercionFailed, ComputationFailed, DomainError,
ExactQuotientFailed, GeneratorsError,
GeneratorsNeeded, MultivariatePolynomialError,
PolificationFailed, PolynomialError,
UnificationFailed)
from .polyoptions import Modulus, Options, Order, allowed_flags, build_options
from .polyutils import _find_gens, _parallel_dict_from_expr, _sort_gens
from .rationaltools import together
from .rings import PolyElement
__all__ = ('Poly', 'PurePoly', 'parallel_poly_from_expr',
'degree', 'LC', 'LM', 'LT',
'div', 'rem', 'quo', 'exquo', 'half_gcdex', 'gcdex',
'invert', 'subresultants', 'resultant', 'discriminant', 'cofactors',
'gcd', 'lcm', 'terms_gcd', 'trunc',
'monic', 'content', 'primitive', 'compose', 'decompose',
'sqf_norm', 'sqf_part', 'sqf_list', 'sqf',
'factor_list', 'factor', 'count_roots',
'real_roots', 'nroots',
'cancel', 'reduced', 'groebner', 'GroebnerBasis', 'poly')
class Poly(Expr):
"""Generic class for representing polynomial expressions."""
is_commutative = True
is_Poly = True
_op_priority = 10.1
def __new__(cls, rep, *gens, **args):
"""Create a new polynomial instance out of something useful."""
opt = build_options(gens, args)
if iterable(rep, exclude=str):
if isinstance(rep, dict):
return cls._from_dict(rep, opt)
else:
return cls._from_list(list(rep), opt)
else:
rep = sympify(rep)
if rep.is_Poly:
return cls._from_poly(rep, opt)
else:
return cls._from_expr(rep, opt)
@classmethod
def new(cls, rep, *gens):
"""Construct :class:`Poly` instance from raw representation."""
if not isinstance(rep, PolyElement):
raise PolynomialError(
f'invalid polynomial representation: {rep}')
elif rep.ring.ngens != len(gens):
raise PolynomialError(f'invalid arguments: {rep}, {gens}')
obj = Expr.__new__(cls)
obj.rep = rep
obj.gens = gens
return obj
@classmethod
def from_dict(cls, rep, *gens, **args):
"""Construct a polynomial from a :class:`dict`."""
opt = build_options(gens, args)
return cls._from_dict(rep, opt)
@classmethod
def from_list(cls, rep, *gens, **args):
"""Construct a polynomial from a :class:`list`."""
opt = build_options(gens, args)
return cls._from_list(rep, opt)
@classmethod
def from_poly(cls, rep, *gens, **args):
"""Construct a polynomial from a polynomial."""
opt = build_options(gens, args)
return cls._from_poly(rep, opt)
@classmethod
def from_expr(cls, rep, *gens, **args):
"""Construct a polynomial from an expression."""
opt = build_options(gens, args)
return cls._from_expr(rep, opt)
@classmethod
def _from_dict(cls, rep, opt):
"""Construct a polynomial from a :class:`dict`."""
gens = opt.gens
if not gens:
raise GeneratorsNeeded(
"can't initialize from 'dict' without generators")
domain = opt.domain
if domain is None:
domain, rep = construct_domain(rep, opt=opt)
else:
for monom, coeff in rep.items():
rep[monom] = domain.convert(coeff)
ring = domain.poly_ring(*gens, order=opt.order)
return cls.new(ring.from_dict(rep), *gens)
@classmethod
def _from_list(cls, rep, opt):
"""Construct a polynomial from a :class:`list`."""
gens = opt.gens
if not gens:
raise GeneratorsNeeded(
"can't initialize from 'list' without generators")
elif len(gens) != 1:
raise MultivariatePolynomialError(
"'list' representation not supported")
domain = opt.domain
if domain is None:
domain, rep = construct_domain(rep, opt=opt)
else:
rep = list(map(domain.convert, rep))
ring = domain.poly_ring(*gens)
return cls.new(ring.from_list(rep), *gens)
@classmethod
def _from_poly(cls, rep, opt):
"""Construct a polynomial from a polynomial."""
if cls != rep.__class__:
rep = cls.new(rep.rep, *rep.gens)
gens = opt.gens
if opt.composite or (gens and set(rep.gens) != set(gens)):
return cls._from_expr(rep.as_expr(), opt)
if gens and rep.gens != gens:
rep = rep.reorder(*gens)
if opt.domain:
rep = rep.set_domain(opt.domain)
elif opt.field:
rep = rep.to_field()
return rep
@classmethod
def _from_expr(cls, rep, opt):
"""Construct a polynomial from an expression."""
(rep,), opt = _parallel_dict_from_expr([rep], opt)
return cls._from_dict(rep, opt)
def _hashable_content(self):
"""Allow Diofant to hash Poly instances."""
return self.rep, self.gens
def __hash__(self):
return super().__hash__()
@property
def free_symbols(self):
"""
Free symbols of a polynomial expression.
Examples
========
>>> (x**2 + 1).as_poly().free_symbols
{x}
>>> (x**2 + y).as_poly().free_symbols
{x, y}
>>> (x**2 + y).as_poly(x).free_symbols
{x, y}
"""
symbols = set()
for gen in self.gens:
symbols |= gen.free_symbols
return symbols | self.free_symbols_in_domain
@property
def free_symbols_in_domain(self):
"""
Free symbols of the domain of ``self``.
Examples
========
>>> (x**2 + 1).as_poly().free_symbols_in_domain
set()
>>> (x**2 + y).as_poly().free_symbols_in_domain
set()
>>> (x**2 + y).as_poly(x).free_symbols_in_domain
{y}
"""
domain, symbols = self.domain, set()
if isinstance(domain, CompositeDomain):
for gen in domain.symbols:
symbols |= gen.free_symbols
elif domain.is_ExpressionDomain:
for coeff in self.coeffs():
symbols |= coeff.free_symbols
return symbols
@property
def args(self):
"""
Don't mess up with the core.
Examples
========
>>> (x**2 + 1).as_poly().args
(x**2 + 1, x)
"""
return (self.as_expr(),) + self.gens
@property
def is_number(self):
return self.as_expr().is_number
@property
def gen(self):
"""
Return the principal generator.
Examples
========
>>> (x**2 + 1).as_poly().gen
x
"""
return self.gens[0]
@property
def domain(self):
"""Get the ground domain of ``self``."""
return self.rep.ring.domain
def unify(self, other):
"""
Make ``self`` and ``other`` belong to the same domain.
Examples
========
>>> f, g = (x/2 + 1).as_poly(), (2*x + 1).as_poly()
>>> f
Poly(1/2*x + 1, x, domain='QQ')
>>> g
Poly(2*x + 1, x, domain='ZZ')
>>> F, G = f.unify(g)
>>> F
Poly(1/2*x + 1, x, domain='QQ')
>>> G
Poly(2*x + 1, x, domain='QQ')
"""
_, per, F, G = self._unify(other)
return per(F), per(G)
def _unify(self, other):
other = sympify(other)
if not other.is_Poly:
try:
return (self.domain, self.per, self.rep,
self.rep.ring(self.domain.convert(other)))
except CoercionFailed:
raise UnificationFailed(f"can't unify {self} with {other}")
newring = self.rep.ring.unify(other.rep.ring)
gens = newring.symbols
F, G = self.rep.set_ring(newring), other.rep.set_ring(newring)
cls = self.__class__
dom = newring.domain
def per(rep, dom=dom, gens=gens, remove=None):
if remove is not None:
gens = gens[:remove] + gens[remove + 1:]
if not gens:
return dom.to_expr(rep)
return cls.new(rep, *gens)
return dom, per, F, G
def per(self, rep, gens=None, remove=None):
"""
Create a Poly out of the given representation.
Examples
========
>>> a = (x**2 + 1).as_poly()
>>> R = ZZ.inject(x)
>>> a.per(R.from_list([ZZ(1), ZZ(1)]), gens=[y])
Poly(y + 1, y, domain='ZZ')
"""
if gens is None:
gens = self.gens
if remove is not None:
gens = gens[:remove] + gens[remove + 1:]
if not gens:
return self.domain.to_expr(rep)
return self.__class__.new(rep, *gens)
def set_domain(self, domain):
"""Set the ground domain of ``self``."""
opt = build_options(self.gens, {'domain': domain})
newrep = self.rep.set_domain(opt.domain)
return self.per(newrep)
def set_modulus(self, modulus):
"""
Set the modulus of ``self``.
Examples
========
>>> (5*x**2 + 2*x - 1).as_poly().set_modulus(2)
Poly(x**2 + 1, x, modulus=2)
"""
modulus = Modulus.preprocess(modulus)
return self.set_domain(FF(modulus))
def get_modulus(self):
"""
Get the modulus of ``self``.
Examples
========
>>> (x**2 + 1).as_poly(modulus=2).get_modulus()
2
"""
domain = self.domain
if domain.is_FiniteField:
return Integer(domain.order)
else:
raise PolynomialError('not a polynomial over a Galois field')
def _eval_subs(self, old, new):
"""Internal implementation of :func:`~diofant.core.basic.Basic.subs`."""
if old in self.gens:
if new.is_number:
return self.eval(old, new)
else:
try:
return self.replace(old, new)
except PolynomialError:
pass
return self.as_expr().subs({old: new})
def exclude(self):
"""
Remove unnecessary generators from ``self``.
Examples
========
>>> (a + x).as_poly(a, b, c, d, x).exclude()
Poly(a + x, a, x, domain='ZZ')
"""
rep = self.rep
if rep.is_ground:
return self
for x in rep.ring.symbols:
try:
rep = rep.drop(x)
except ValueError:
pass
return self.per(rep, gens=rep.ring.symbols)
def replace(self, x, y=None):
"""
Replace ``x`` with ``y`` in generators list.
Examples
========
>>> (x**2 + 1).as_poly().replace(x, y)
Poly(y**2 + 1, y, domain='ZZ')
"""
if y is None:
if self.is_univariate:
x, y = self.gen, x
else:
raise PolynomialError(
'syntax supported only in univariate case')
if x == y:
return self
if x in self.gens and y not in self.gens:
dom = self.domain
if not isinstance(dom, CompositeDomain) or y not in dom.symbols:
gens = list(self.gens)
gens[gens.index(x)] = y
rep = dom.poly_ring(*gens).from_dict(dict(self.rep))
return self.per(rep, gens=gens)
raise PolynomialError(f"can't replace {x} with {y} in {self}")
def reorder(self, *gens, **args):
"""
Efficiently apply new order of generators.
Examples
========
>>> (x**2 + x*y**2).as_poly().reorder(y, x)
Poly(y**2*x + x**2, y, x, domain='ZZ')
"""
opt = Options((), args)
if not gens:
gens = _sort_gens(self.gens, opt=opt)
elif set(self.gens) != set(gens):
raise PolynomialError(
'generators list can differ only up to order of elements')
rep = self.rep
new_ring = rep.ring.clone(symbols=gens)
rep = rep.set_ring(new_ring)
return self.per(rep, gens=gens)
def has_only_gens(self, *gens):
"""
Return ``True`` if ``Poly(f, *gens)`` retains ground domain.
Examples
========
>>> (x*y + 1).as_poly(x, y, z).has_only_gens(x, y)
True
>>> (x*y + z).as_poly(x, y, z).has_only_gens(x, y)
False
"""
indices = set()
for gen in gens:
try:
index = self.gens.index(gen)
except ValueError:
raise GeneratorsError(
f"{self} doesn't have {gen} as generator")
else:
indices.add(index)
for monom in self.monoms():
for i, elt in enumerate(monom):
if i not in indices and elt:
return False
return True
def to_ring(self):
"""
Make the ground domain a ring.
Examples
========
>>> (x**2 + 1).as_poly(field=True).to_ring()
Poly(x**2 + 1, x, domain='ZZ')
"""
return self.set_domain(self.domain.ring)
def to_field(self):
"""
Make the ground domain a field.
Examples
========
>>> (x**2 + 1).as_poly().to_field()
Poly(x**2 + 1, x, domain='QQ')
"""
return self.set_domain(self.domain.field)
def to_exact(self):
"""
Make the ground domain exact.
Examples
========
>>> (x**2 + 1.0).as_poly().to_exact()
Poly(x**2 + 1, x, domain='QQ')
"""
return self.set_domain(self.domain.get_exact())
def retract(self, field=None):
"""
Recalculate the ground domain of a polynomial.
Examples
========
>>> f = (x**2 + 1).as_poly(domain=QQ.inject(y))
>>> f
Poly(x**2 + 1, x, domain='QQ[y]')
>>> f.retract()
Poly(x**2 + 1, x, domain='ZZ')
>>> f.retract(field=True)
Poly(x**2 + 1, x, domain='QQ')
"""
dom, rep = construct_domain(self.as_dict(),
field=field,
composite=isinstance(self.domain, CompositeDomain) or None,
extension=False if self.domain.is_ExpressionDomain else True)
return self.from_dict(rep, *self.gens, domain=dom)
def slice(self, x, m, n=None):
"""Take a continuous subsequence of terms of ``self``."""
if n is None:
j, m, n = 0, x, m
else:
j = self._gen_to_level(x)
m, n = int(m), int(n)
result = self.rep.slice(m, n, j)
return self.per(result)
def coeffs(self, order=None):
"""
Returns all non-zero coefficients from ``self`` in lex order.
Examples
========
>>> (x**3 + 2*x + 3).as_poly().coeffs()
[1, 2, 3]
See Also
========
all_coeffs
coeff_monomial
"""
return [coeff for _, coeff in self.terms(order)]
def monoms(self, order=None):
"""
Returns all non-zero monomials from ``self`` in lex order.
Examples
========
>>> (x**2 + 2*x*y**2 + x*y + 3*y).as_poly().monoms()
[(2, 0), (1, 2), (1, 1), (0, 1)]
"""
return [monom for monom, _ in self.terms(order)]
def terms(self, order=None):
"""
Returns all non-zero terms from ``self`` in lex order.
Examples
========
>>> (x**2 + 2*x*y**2 + x*y + 3*y).as_poly().terms()
[((2, 0), 1), ((1, 2), 2), ((1, 1), 1), ((0, 1), 3)]
"""
rep = self.rep
if order is None:
order = rep.ring.order
else:
order = Order.preprocess(order)
return [(m, self.domain.to_expr(c))
for m, c in sorted(rep.items(),
key=lambda monom: order(monom[0]),
reverse=True)]
def all_coeffs(self):
"""
Returns all coefficients from a univariate polynomial ``self``.
Examples
========
>>> (x**3 + 2*x - 1).as_poly().all_coeffs()
[-1, 2, 0, 1]
"""
return [self.domain.to_expr(c) for c in self.rep.all_coeffs()]
def termwise(self, func, *gens, **args):
"""
Apply a function to all terms of ``self``.
Examples
========
>>> def func(k, coeff):
... k = k[0]
... return coeff//10**(2-k)
>>> (x**2 + 20*x + 400).as_poly().termwise(func)
Poly(x**2 + 2*x + 4, x, domain='ZZ')
"""
terms = {}
for monom, coeff in self.terms():
result = func(monom, coeff)
if isinstance(result, tuple):
monom, coeff = result
else:
coeff = result
if coeff:
if monom not in terms:
terms[monom] = coeff
else:
raise PolynomialError(f'{monom} monomial was generated twice')
return self.from_dict(terms, *(gens or self.gens), **args)
def length(self):
"""
Returns the number of non-zero terms in ``self``.
Examples
========
>>> (x**2 + 2*x - 1).as_poly().length()
3
"""
return len(self.as_dict())
def as_dict(self, native=False):
"""
Switch to a :class:`dict` representation.
Examples
========
>>> (x**2 + 2*x*y**2 - y).as_poly().as_dict()
{(0, 1): -1, (1, 2): 2, (2, 0): 1}
"""
if native:
return dict(self.rep)
else:
return {k: self.domain.to_expr(v) for k, v in self.rep.items()}
def as_expr(self, *gens):
"""
Convert a Poly instance to an Expr instance.
Examples
========
>>> f = (x**2 + 2*x*y**2 - y).as_poly()
>>> f.as_expr()
x**2 + 2*x*y**2 - y
>>> f.as_expr({x: 5})
10*y**2 - y + 25
>>> f.as_expr(5, 6)
379
"""
if not gens:
gens = self.gens
elif len(gens) == 1 and isinstance(gens[0], dict):
mapping = gens[0]
gens = list(self.gens)
for gen, value in mapping.items():
try:
index = gens.index(gen)
except ValueError:
raise GeneratorsError(
f"{self} doesn't have {gen} as generator")
else:
gens[index] = value
rep = self.rep
return rep.ring.to_expr(rep).subs(zip(self.gens, gens))
def inject(self, front=False):
"""
Inject ground domain generators into ``self``.
Examples
========
>>> f = (x**2*y + x*y**3 + x*y + 1).as_poly(x)
>>> f.inject()
Poly(x**2*y + x*y**3 + x*y + 1, x, y, domain='ZZ')
>>> f.inject(front=True)
Poly(y**3*x + y*x**2 + y*x + 1, y, x, domain='ZZ')
"""
result = self.rep.inject(front=front)
return self.new(result, *result.ring.symbols)
def eject(self, *gens):
"""
Eject selected generators into the ground domain.
Examples
========
>>> f = (x**2*y + x*y**3 + x*y + 1).as_poly()
>>> f.eject(x)
Poly(x*y**3 + (x**2 + x)*y + 1, y, domain='ZZ[x]')
>>> f.eject(y)
Poly(y*x**2 + (y**3 + y)*x + 1, x, domain='ZZ[y]')
"""
dom = self.domain
if not dom.is_Numerical:
raise DomainError(f"can't eject generators over {dom}")
result = self.rep.copy()
result = result.eject(*gens)
return self.new(result, *result.ring.symbols)
def terms_gcd(self):
"""
Remove GCD of terms from the polynomial ``self``.
Examples
========
>>> (x**6*y**2 + x**3*y).as_poly().terms_gcd()
((3, 1), Poly(x**3*y + 1, x, y, domain='ZZ'))
"""
J, result = self.rep.terms_gcd()
return J, self.per(result)
def quo_ground(self, coeff):
"""
Quotient of ``self`` by a an element of the ground domain.
Examples
========
>>> (2*x + 4).as_poly().quo_ground(2)
Poly(x + 2, x, domain='ZZ')
>>> (2*x + 3).as_poly().quo_ground(2)
Poly(x + 1, x, domain='ZZ')
"""
result = self.rep.quo_ground(coeff)
return self.per(result)
def exquo_ground(self, coeff):
"""
Exact quotient of ``self`` by a an element of the ground domain.
Examples
========
>>> (2*x + 4).as_poly().exquo_ground(2)
Poly(x + 2, x, domain='ZZ')
>>> (2*x + 3).as_poly().exquo_ground(2)
Traceback (most recent call last):
...
ExactQuotientFailed: 2 does not divide 3 in ZZ
"""
result = self.rep.exquo_ground(coeff)
return self.per(result)
def div(self, other, auto=True):
"""
Polynomial division with remainder of ``self`` by ``other``.
Examples
========
>>> (x**2 + 1).as_poly().div((2*x - 4).as_poly())
(Poly(1/2*x + 1, x, domain='QQ'), Poly(5, x, domain='QQ'))
>>> (x**2 + 1).as_poly().div((2*x - 4).as_poly(), auto=False)
(Poly(0, x, domain='ZZ'), Poly(x**2 + 1, x, domain='ZZ'))
"""
dom, per, F, G = self._unify(other)
retract = False
if auto and dom.is_Ring and not dom.is_Field:
F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field)
retract = True
q, r = divmod(F, G)
if retract:
try:
Q, R = q.set_domain(q.ring.domain.ring), r.set_domain(r.ring.domain.ring)
except CoercionFailed:
pass
else:
q, r = Q, R
return per(q), per(r)
def rem(self, other, auto=True):
"""
Computes the polynomial remainder of ``self`` by ``other``.
Examples
========
>>> (x**2 + 1).as_poly().rem((2*x - 4).as_poly())
Poly(5, x, domain='ZZ')
>>> (x**2 + 1).as_poly().rem((2*x - 4).as_poly(), auto=False)
Poly(x**2 + 1, x, domain='ZZ')
"""
dom, per, F, G = self._unify(other)
retract = False
if auto and dom.is_Ring and not dom.is_Field:
F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field)
retract = True
r = F % G
if retract:
try:
r = r.set_domain(r.ring.domain.ring)
except CoercionFailed:
pass
return per(r)
def quo(self, other, auto=True):
"""
Computes polynomial quotient of ``self`` by ``other``.
Examples
========
>>> (x**2 + 1).as_poly().quo((2*x - 4).as_poly())
Poly(1/2*x + 1, x, domain='QQ')
>>> (x**2 - 1).as_poly().quo((x - 1).as_poly())
Poly(x + 1, x, domain='ZZ')
"""
dom, per, F, G = self._unify(other)
retract = False
if auto and dom.is_Ring and not dom.is_Field:
F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field)
retract = True
q = F // G
if retract:
try:
q = q.set_domain(q.ring.domain.ring)
except CoercionFailed:
pass
return per(q)
def exquo(self, other, auto=True):
"""
Computes polynomial exact quotient of ``self`` by ``other``.
Examples
========
>>> (x**2 - 1).as_poly().exquo((x - 1).as_poly())
Poly(x + 1, x, domain='ZZ')
>>> (x**2 + 1).as_poly().exquo((2*x - 4).as_poly())
Traceback (most recent call last):
...
ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1
"""
dom, per, F, G = self._unify(other)
retract = False
if auto and dom.is_Ring and not dom.is_Field:
F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field)
retract = True
try:
q = F.exquo(G)
except ExactQuotientFailed as exc:
raise exc.new(self.as_expr(), other.as_expr())
if retract:
try:
q = q.set_domain(q.ring.domain.ring)
except CoercionFailed:
pass
return per(q)
def _gen_to_level(self, gen):
"""Returns level associated with the given generator."""
try:
return self.rep.ring.index(gen)
except ValueError:
raise PolynomialError(f'a valid generator expected, got {gen}')
def degree(self, gen=0):
"""
Returns degree of ``self`` in ``x_j``.
The degree of 0 is negative floating-point infinity.
Examples
========
>>> (x**2 + y*x + 1).as_poly().degree()
2
>>> (x**2 + y*x + y).as_poly().degree(y)
1
>>> Integer(0).as_poly(x).degree()
-inf
"""
j = self._gen_to_level(gen)
return self.rep.degree(j)
def total_degree(self):
"""
Returns the total degree of ``self``.
Examples
========
>>> (x**2 + y*x + 1).as_poly().total_degree()
2
>>> (x + y**5).as_poly().total_degree()
5
"""
return self.rep.total_degree()
def LC(self, order=None):
"""
Returns the leading coefficient of ``self``.
Examples
========
>>> (4*x**3 + 2*x**2 + 3*x).as_poly().LC()
4
"""
if order is not None:
return self.coeffs(order)[0]
result = self.rep.LC
return self.domain.to_expr(result)
def TC(self):
"""
Returns the trailing coefficient of ``self``.
Examples
========
>>> (x**3 + 2*x**2 + 3*x).as_poly().TC()
0
"""
result = self.rep[1]
return self.domain.to_expr(result)
def EC(self, order=None):
"""
Returns the last non-zero coefficient of ``self``.
Examples
========
>>> (x**3 + 2*x**2 + 3*x).as_poly().EC()
3
"""
EM = self.EM(order)
return self.coeff_monomial(tuple(EM))
def coeff_monomial(self, monom):
"""
Returns the coefficient of ``monom`` in ``self`` if there, else None.
Examples
========
>>> p = (24*x*y*exp(8) + 23*x).as_poly(greedy=False)
>>> p.coeff_monomial(x)
23
>>> p.coeff_monomial(y)
0
>>> p.coeff_monomial(x*y)
24*E**8
>>> p.coeff_monomial((1, 1))
24*E**8
Note that ``Expr.coeff()`` behaves differently, collecting terms
if possible; the Poly must be converted to an Expr to use that
method, however:
>>> p.as_expr().coeff(x)
24*E**8*y + 23
>>> p.as_expr().coeff(y)
24*E**8*x
>>> p.as_expr().coeff(x*y)
24*E**8
"""
N = Monomial(monom, self.gens)
if len(N) != len(self.gens):
raise ValueError('exponent of each generator must be specified')
result = self.rep[N]
return self.domain.to_expr(result)
def coeff(self, x, n=1, right=False):
# the semantics of coeff_monomial and Expr.coeff are different;
# if someone is working with a Poly, they should be aware of the
# differences and chose the method best suited for the query.
# Alternatively, a pure-polys method could be written here but
# at this time the ``right`` keyword would be ignored because Poly
# doesn't work with non-commutatives.
raise NotImplementedError(
'Either convert to Expr with `as_expr` method '
"to use Expr's coeff method or else use the "
'`coeff_monomial` method of Polys.')
def LM(self, order=None):
"""
Returns the leading monomial of ``self``.
The leading monomial signifies the the monomial having the highest
power of the principal generator in the polynomial expression.
Examples
========
>>> (4*x**2 + 2*x*y**2 + x*y + 3*y).as_poly().LM()
x**2*y**0
"""
LM = (0,)*len(self.gens) if self.is_zero else self.monoms(order)[0]
return Monomial(LM, self.gens)
def EM(self, order=None):
"""
Returns the last non-zero monomial of ``self``.
Examples
========
>>> (4*x**2 + 2*x*y**2 + x*y + 3*y).as_poly().EM()
x**0*y**1
"""
EM = (0,)*len(self.gens) if self.is_zero else self.monoms(order)[-1]
return Monomial(EM, self.gens)
def LT(self, order=None):
"""
Returns the leading term of ``self``.
The leading term signifies the term having the highest power
of the principal generator in the polynomial expression.
Examples
========
>>> (4*x**2 + 2*x*y**2 + x*y + 3*y).as_poly().LT()
(x**2*y**0, 4)
"""
LM = self.LM(order)
return LM, self.coeff_monomial(tuple(LM))
def ET(self, order=None):
"""
Returns the last non-zero term of ``self``.
Examples
========
>>> (4*x**2 + 2*x*y**2 + x*y + 3*y).as_poly().ET()
(x**0*y**1, 3)
"""
EM = self.EM(order)
return EM, self.coeff_monomial(tuple(EM))
def clear_denoms(self, convert=False):
"""
Clear denominators, but keep the ground domain.
Examples
========
>>> f = (x/2 + Rational(1, 3)).as_poly()
>>> f.clear_denoms()
(6, Poly(3*x + 2, x, domain='QQ'))
>>> f.clear_denoms(convert=True)
(6, Poly(3*x + 2, x, domain='ZZ'))
"""
dom = self.domain
if convert and dom.has_assoc_Ring:
dom = self.domain.ring
coeff, result = self.rep.clear_denoms(convert=convert)
f = self.per(result)
if convert:
f = f.set_domain(dom)
return dom.to_expr(coeff), f
def rat_clear_denoms(self, other):
"""
Clear denominators in a rational function ``self/other``.
Examples
========
>>> f = (x**2/y + 1).as_poly(x)
>>> g = (x**3 + y).as_poly(x)
>>> p, q = f.rat_clear_denoms(g)
>>> p
Poly(x**2 + y, x, domain='ZZ[y]')
>>> q
Poly(y*x**3 + y**2, x, domain='ZZ[y]')
"""
f, g = self, other
dom, per, f, g = f._unify(g)
f = per(f)
g = per(g)
if not (dom.is_Field and dom.has_assoc_Ring):
return f, g
a, f = f.clear_denoms(convert=True)
b, g = g.clear_denoms(convert=True)
f *= b
g *= a
return f, g
def integrate(self, *specs, **args):
"""
Computes indefinite integral of ``self``.
Examples
========
>>> (x**2 + 2*x + 1).as_poly().integrate()
Poly(1/3*x**3 + x**2 + x, x, domain='QQ')
>>> (x*y**2 + x).as_poly().integrate((0, 1), (1, 0))
Poly(1/2*x**2*y**2 + 1/2*x**2, x, y, domain='QQ')
"""
f = self
if args.get('auto', True) and f.domain.is_Ring:
f = f.to_field()
if not specs:
return f.per(f.rep.integrate(m=1))
rep = f.rep
for spec in specs:
if type(spec) is tuple:
gen, m = spec
else:
gen, m = spec, 1
rep = rep.integrate(f._gen_to_level(gen), int(m))
return f.per(rep)
def _eval_derivative(self, v):
rep = self.rep
v = self._gen_to_level(v)
rep = rep.diff(v)
return self.per(rep)
def eval(self, x, a=None, auto=True):
"""
Evaluate ``self`` at ``a`` in the given variable.
Examples
========
>>> (x**2 + 2*x + 3).as_poly().eval(2)
11
>>> (2*x*y + 3*x + y + 2).as_poly().eval(x, 2)
Poly(5*y + 8, y, domain='ZZ')
>>> f = (2*x*y + 3*x + y + 2*z).as_poly()
>>> f.eval({x: 2})
Poly(5*y + 2*z + 6, y, z, domain='ZZ')
>>> f.eval({x: 2, y: 5})
Poly(2*z + 31, z, domain='ZZ')
>>> f.eval({x: 2, y: 5, z: 7})
45
>>> f.eval((2, 5))
Poly(2*z + 31, z, domain='ZZ')
>>> f(2, 5)
Poly(2*z + 31, z, domain='ZZ')
"""
f = self
if a is None:
if isinstance(x, dict):
mapping = x
for gen, value in mapping.items():
f = f.eval(gen, value)
return f
elif isinstance(x, (tuple, list)):
values = x
if len(values) > len(f.gens):
raise ValueError('too many values provided')
for gen, value in zip(f.gens, values):
f = f.eval(gen, value)
return f
else:
j, a = 0, x
else:
j = f._gen_to_level(x)
try:
result = f.rep.eval(j, a)
except CoercionFailed:
if not auto:
raise DomainError(f"can't evaluate at {a} in {f.domain}")
else:
a_domain, [a] = construct_domain([a])
new_domain = f.domain.unify(a_domain, f.gens)
f = f.set_domain(new_domain)
a = new_domain.convert(a, a_domain)
result = f.rep.eval(j, a)
return f.per(result, remove=j)
def __call__(self, *values):
"""
Evaluate ``self`` at the give values.
Examples
========
>>> f = (2*x*y + 3*x + y + 2*z).as_poly()
>>> f(2)
Poly(5*y + 2*z + 6, y, z, domain='ZZ')
>>> f(2, 5)
Poly(2*z + 31, z, domain='ZZ')
>>> f(2, 5, 7)
45
"""
return self.eval(values)
def half_gcdex(self, other, auto=True):
"""
Half extended Euclidean algorithm of ``self`` and ``other``.
Returns ``(s, h)`` such that ``h = gcd(f, g)`` and ``s*f = h (mod g)``.
Examples
========
>>> f = (x**4 - 2*x**3 - 6*x**2 + 12*x + 15).as_poly()
>>> g = (x**3 + x**2 - 4*x - 4).as_poly()
>>> f.half_gcdex(g)
(Poly(-1/5*x + 3/5, x, domain='QQ'), Poly(x + 1, x, domain='QQ'))
"""
dom, per, F, G = self._unify(other)
if auto and dom.is_Ring:
F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field)
s, h = F.half_gcdex(G)
return per(s), per(h)
def gcdex(self, other, auto=True):
"""
Extended Euclidean algorithm of ``self`` and ``other``.
Returns ``(s, t, h)`` such that ``h = gcd(f, g)`` and ``s*f + t*g = h``.
Examples
========
>>> f = (x**4 - 2*x**3 - 6*x**2 + 12*x + 15).as_poly()
>>> g = (x**3 + x**2 - 4*x - 4).as_poly()
>>> f.gcdex(g)
(Poly(-1/5*x + 3/5, x, domain='QQ'),
Poly(1/5*x**2 - 6/5*x + 2, x, domain='QQ'),
Poly(x + 1, x, domain='QQ'))
"""
dom, per, F, G = self._unify(other)
if auto and dom.is_Ring:
F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field)
s, t, h = F.gcdex(G)
return per(s), per(t), per(h)
def invert(self, other, auto=True):
"""
Invert ``self`` modulo ``other`` when possible.
Examples
========
>>> (x**2 - 1).as_poly().invert((2*x - 1).as_poly())
Poly(-4/3, x, domain='QQ')
>>> (x**2 - 1).as_poly().invert((x - 1).as_poly())
Traceback (most recent call last):
...
NotInvertible: zero divisor
"""
dom, per, F, G = self._unify(other)
if auto and dom.is_Ring:
F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field)
result = F.ring.invert(F, G)
return per(result)
def subresultants(self, other):
"""
Computes the subresultant PRS of ``self`` and ``other``.
Examples
========
>>> (x**2 + 1).as_poly().subresultants((x**2 - 1).as_poly())
[Poly(x**2 + 1, x, domain='ZZ'),
Poly(x**2 - 1, x, domain='ZZ'),
Poly(-2, x, domain='ZZ')]
"""
_, per, F, G = self._unify(other)
result = F.subresultants(G)
return list(map(per, result))
def resultant(self, other, includePRS=False):
"""
Computes the resultant of ``self`` and ``other`` via PRS.
If includePRS=True, it includes the subresultant PRS in the result.
Because the PRS is used to calculate the resultant, this is more
efficient than calling :func:`subresultants` separately.
Examples
========
>>> f = (x**2 + 1).as_poly()
>>> f.resultant((x**2 - 1).as_poly())
4
>>> f.resultant((x**2 - 1).as_poly(), includePRS=True)
(4, [Poly(x**2 + 1, x, domain='ZZ'), Poly(x**2 - 1, x, domain='ZZ'),
Poly(-2, x, domain='ZZ')])
"""
_, per, F, G = self._unify(other)
if includePRS:
result, R = F.resultant(G, includePRS=includePRS)
return per(result, remove=0), list(map(per, R))
else:
result = F.resultant(G)
return per(result, remove=0)
def discriminant(self):
"""
Computes the discriminant of ``self``.
Examples
========
>>> (x**2 + 2*x + 3).as_poly().discriminant()
-8
"""
result = self.rep.discriminant()
return self.per(result, remove=0)
def dispersionset(self, other=None):
r"""Compute the *dispersion set* of two polynomials.
Examples
========
>>> ((x - 3)*(x + 3)).as_poly().dispersionset()
{0, 6}
"""
f = self.rep
ring = f.ring
g = other.rep if other is not None else other
return {ZZ.to_expr(i) for i in ring.dispersionset(f, g)}
def cofactors(self, other):
"""
Returns the GCD of ``self`` and ``other`` and their cofactors.
For two polynomials ``f`` and ``g`` it returns polynomials
``(h, cff, cfg)`` such that ``h = gcd(f, g)``, and ``cff = quo(f, h)``
and ``cfg = quo(g, h)`` are, so called, cofactors of ``f`` and ``g``.
Examples
========
>>> (x**2 - 1).as_poly().cofactors((x**2 - 3*x + 2).as_poly())
(Poly(x - 1, x, domain='ZZ'),
Poly(x + 1, x, domain='ZZ'),
Poly(x - 2, x, domain='ZZ'))
"""
_, per, F, G = self._unify(other)
h, cff, cfg = F.cofactors(G)
return per(h), per(cff), per(cfg)
def gcd(self, other):
"""
Returns the polynomial GCD of ``self`` and ``other``.
Examples
========
>>> (x**2 - 1).as_poly().gcd((x**2 - 3*x + 2).as_poly())
Poly(x - 1, x, domain='ZZ')
"""
_, per, F, G = self._unify(other)
result = F.gcd(G)
return per(result)
def lcm(self, other):
"""
Returns polynomial LCM of ``self`` and ``other``.
Examples
========
>>> (x**2 - 1).as_poly().lcm((x**2 - 3*x + 2).as_poly())
Poly(x**3 - 2*x**2 - x + 2, x, domain='ZZ')
"""
_, per, F, G = self._unify(other)
result = F.lcm(G)
return per(result)
def trunc(self, p):
"""
Reduce ``self`` modulo a constant ``p``.
Examples
========
>>> (2*x**3 + 3*x**2 + 5*x + 7).as_poly().trunc(3)
Poly(-x**3 - x + 1, x, domain='ZZ')
"""
p = self.domain.convert(p)
result = self.rep.trunc_ground(p)
return self.per(result)
def monic(self, auto=True):
"""
Divides all coefficients by ``LC(f)``.
Examples
========
>>> (3*x**2 + 6*x + 9).as_poly().monic()
Poly(x**2 + 2*x + 3, x, domain='QQ')
>>> (3*x**2 + 4*x + 2).as_poly().monic()
Poly(x**2 + 4/3*x + 2/3, x, domain='QQ')
"""
f = self
if auto and f.domain.is_Ring:
f = f.to_field()
result = f.rep.monic()
return f.per(result)
def content(self):
"""
Returns the GCD of polynomial coefficients.
Examples
========
>>> (6*x**2 + 8*x + 12).as_poly().content()
2
"""
result = self.rep.content()
return self.domain.to_expr(result)
def primitive(self):
"""
Returns the content and a primitive form of ``self``.
Examples
========
>>> (2*x**2 + 8*x + 12).as_poly().primitive()
(2, Poly(x**2 + 4*x + 6, x, domain='ZZ'))
"""
cont, result = self.rep.primitive()
return self.domain.to_expr(cont), self.per(result)
def compose(self, other):
"""
Computes the functional composition of ``self`` and ``other``.
Examples
========
>>> (x**2 + x).as_poly().compose((x - 1).as_poly())
Poly(x**2 - x, x, domain='ZZ')
"""
_, per, F, G = self._unify(other)
result = F.compose(G.ring.gens[0], G)
return per(result)
def decompose(self):
"""
Computes a functional decomposition of ``self``.
Examples
========
>>> (x**4 + 2*x**3 - x - 1).as_poly().decompose()
[Poly(x**2 - x - 1, x, domain='ZZ'), Poly(x**2 + x, x, domain='ZZ')]
"""
result = self.rep.decompose()
return list(map(self.per, result))
def shift(self, a):
"""
Efficiently compute Taylor shift ``f(x + a)``.
Examples
========
>>> (x**2 - 2*x + 1).as_poly().shift(2)
Poly(x**2 + 2*x + 1, x, domain='ZZ')
"""
result = self.rep.shift(a)
return self.per(result)
def sqf_norm(self):
"""
Computes square-free norm of ``self``.
Returns ``s``, ``f``, ``r``, such that ``g(x) = f(x-sa)`` and
``r(x) = Norm(g(x))`` is a square-free polynomial over ``K``,
where ``a`` is the algebraic extension of the ground domain.
Examples
========
>>> s, f, r = (x**2 + 1).as_poly(extension=[sqrt(3)]).sqf_norm()
>>> s
1
>>> f
Poly(x**2 - 2*sqrt(3)*x + 4, x, domain='QQ<sqrt(3)>')
>>> r
Poly(x**4 - 4*x**2 + 16, x, domain='QQ')
"""
s, g, r = self.rep.sqf_norm()
return s, self.per(g), self.per(r)
def sqf_part(self):
"""
Computes square-free part of ``self``.
Examples
========
>>> (x**3 - 3*x - 2).as_poly().sqf_part()
Poly(x**2 - x - 2, x, domain='ZZ')
"""
result = self.rep.sqf_part()
return self.per(result)
def sqf_list(self):
"""
Returns a list of square-free factors of ``self``.
Examples
========
>>> f = (2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16).as_poly()
>>> f.sqf_list()
(2, [(Poly(x + 1, x, domain='ZZ'), 2),
(Poly(x + 2, x, domain='ZZ'), 3)])
"""
coeff, factors = self.rep.sqf_list()
return (self.domain.to_expr(coeff),
[(self.per(g), k) for g, k in factors])
def factor_list(self):
"""
Returns a list of irreducible factors of ``self``.
Examples
========
>>> f = (2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y).as_poly()
>>> f.factor_list()
(2, [(Poly(x + y, x, y, domain='ZZ'), 1),
(Poly(x**2 + 1, x, y, domain='ZZ'), 2)])
"""
try:
coeff, factors = self.rep.factor_list()
except DomainError:
return Integer(1), [(self, 1)]
return (self.domain.to_expr(coeff),
[(self.per(g), k) for g, k in factors])
def count_roots(self, inf=None, sup=None):
"""
Return the number of roots of ``self`` in ``[inf, sup]`` interval.
Examples
========
>>> (x**4 - 4).as_poly().count_roots(-3, 3)
2
>>> (x**4 - 4).as_poly().count_roots(0, 1 + 3*I)
1
"""
inf_real, sup_real = True, True
if inf is not None:
inf = sympify(inf)
if inf == -oo:
inf = None
else:
re, im = inf.as_real_imag()
if not im:
inf = QQ.convert(inf)
else:
inf, inf_real = tuple(map(QQ.convert, (re, im))), False
if sup is not None:
sup = sympify(sup)
if sup is oo:
sup = None
else:
re, im = sup.as_real_imag()
if not im:
sup = QQ.convert(sup)
else:
sup, sup_real = tuple(map(QQ.convert, (re, im))), False
if inf_real and sup_real:
count = self.rep.ring._count_real_roots(self.rep, inf=inf, sup=sup)
else:
if inf_real and inf is not None:
inf = (inf, QQ.zero)
if sup_real and sup is not None:
sup = (sup, QQ.zero)
count = self.rep.ring._count_complex_roots(self.rep, inf=inf, sup=sup)
return Integer(count)
def root(self, index, radicals=True):
"""
Get an indexed root of a polynomial.
Examples
========
>>> f = (2*x**3 - 7*x**2 + 4*x + 4).as_poly()
>>> f.root(0)
-1/2
>>> f.root(1)
2
>>> f.root(2)
2
>>> f.root(3)
Traceback (most recent call last):
...
IndexError: root index out of [-3, 2] range, got 3
>>> (x**5 + x + 1).as_poly().root(0)
RootOf(x**3 - x**2 + 1, 0)
"""
from .rootoftools import RootOf
return RootOf(self, index, radicals=radicals)
def real_roots(self, multiple=True, radicals=True):
"""
Return a list of real roots with multiplicities.
Examples
========
>>> (2*x**3 - 7*x**2 + 4*x + 4).as_poly().real_roots()
[-1/2, 2, 2]
>>> (x**3 + x + 1).as_poly().real_roots()
[RootOf(x**3 + x + 1, 0)]
"""
from .rootoftools import RootOf
reals = RootOf.real_roots(self, radicals=radicals)
if multiple:
return reals
else:
return group(reals, multiple=False)
def all_roots(self, multiple=True, radicals=True):
"""
Return a list of real and complex roots with multiplicities.
Examples
========
>>> (2*x**3 - 7*x**2 + 4*x + 4).as_poly().all_roots()
[-1/2, 2, 2]
>>> (x**3 + x + 1).as_poly().all_roots()
[RootOf(x**3 + x + 1, 0), RootOf(x**3 + x + 1, 1),
RootOf(x**3 + x + 1, 2)]
"""
from .rootoftools import RootOf
roots = RootOf.all_roots(self, radicals=radicals)
if multiple:
return roots
else:
return group(roots, multiple=False)
def nroots(self, n=15, maxsteps=50, cleanup=True):
"""
Compute numerical approximations of roots of ``self``.
Parameters
==========
n ... the number of digits to calculate
maxsteps ... the maximum number of iterations to do
If the accuracy `n` cannot be reached in `maxsteps`, it will raise an
exception. You need to rerun with higher maxsteps.
Examples
========
>>> (x**2 - 3).as_poly().nroots(n=15)
[-1.73205080756888, 1.73205080756888]
>>> (x**2 - 3).as_poly().nroots(n=30)
[-1.73205080756887729352744634151, 1.73205080756887729352744634151]
"""
if self.is_multivariate:
raise MultivariatePolynomialError(
f"can't compute numerical roots of {self}")
if self.degree() <= 0:
return []
# For integer and rational coefficients, convert them to integers only
# (for accuracy). Otherwise just try to convert the coefficients to
# mpmath.mpc and raise an exception if the conversion fails.
if self.domain is ZZ:
coeffs = [int(coeff) for coeff in self.all_coeffs()]
elif self.domain is QQ:
denoms = [coeff.denominator for coeff in self.all_coeffs()]
fac = math.lcm(*denoms)
coeffs = [int(coeff*fac) for coeff in self.all_coeffs()]
else:
coeffs = [coeff.evalf(n, strict=False).as_real_imag()
for coeff in self.all_coeffs()]
try:
coeffs = [mpmath.mpc(*coeff) for coeff in coeffs]
except TypeError:
raise DomainError(f'Numerical domain expected, got {self.domain}')
dps = mpmath.mp.dps
mpmath.mp.dps = n
try:
# We need to add extra precision to guard against losing accuracy.
# 10 times the degree of the polynomial seems to work well.
roots = mpmath.polyroots(list(reversed(coeffs)), maxsteps=maxsteps,
cleanup=cleanup, error=False,
extraprec=self.degree()*10)
# Mpmath puts real roots first, then complex ones (as does all_roots)
# so we make sure this convention holds here, too.
roots = list(map(sympify,
sorted(roots, key=lambda r: (1 if r.imag else 0, r.real, r.imag))))
except mpmath.libmp.NoConvergence:
raise mpmath.libmp.NoConvergence(
f'convergence to root failed; try n < {n} or maxsteps > {maxsteps}')
finally:
mpmath.mp.dps = dps
return roots
def cancel(self, other, include=False):
"""
Cancel common factors in a rational function ``self/other``.
Examples
========
>>> (2*x**2 - 2).as_poly().cancel((x**2 - 2*x + 1).as_poly())
(1, Poly(2*x + 2, x, domain='ZZ'), Poly(x - 1, x, domain='ZZ'))
>>> (2*x**2 - 2).as_poly().cancel((x**2 - 2*x + 1).as_poly(), include=True)
(Poly(2*x + 2, x, domain='ZZ'), Poly(x - 1, x, domain='ZZ'))
"""
dom, per, F, G = self._unify(other)
result = F.cancel(G, include=include)
if not include:
if dom.has_assoc_Ring:
dom = dom.ring
cp, cq, p, q = result
cp = dom.to_expr(cp)
cq = dom.to_expr(cq)
return cp/cq, per(p), per(q)
else:
return tuple(map(per, result))
@property
def is_zero(self):
"""
Returns ``True`` if ``self`` is a zero polynomial.
Examples
========
>>> Integer(0).as_poly(x).is_zero
True
>>> Integer(1).as_poly(x).is_zero
False
"""
return not self.rep
@property
def is_one(self):
"""
Returns ``True`` if ``self`` is a unit polynomial.
Examples
========
>>> Integer(0).as_poly(x).is_one
False
>>> Integer(1).as_poly(x).is_one
True
"""
return self.rep == 1
@property
def is_squarefree(self):
"""
Returns ``True`` if ``self`` is a square-free polynomial.
Examples
========
>>> (x**2 - 2*x + 1).as_poly().is_squarefree
False
>>> (x**2 - 1).as_poly().is_squarefree
True
"""
return self.rep.is_squarefree
@property
def is_ground(self):
"""
Returns ``True`` if ``self`` is an element of the ground domain.
Examples
========
>>> x.as_poly().is_ground
False
>>> Integer(2).as_poly(x).is_ground
True
>>> y.as_poly(x).is_ground
True
"""
return self.rep.is_ground
@property
def is_linear(self):
"""
Returns ``True`` if ``self`` is linear in all its variables.
Examples
========
>>> (x + y + 2).as_poly().is_linear
True
>>> (x*y + 2).as_poly().is_linear
False
"""
return self.rep.is_linear
@property
def is_quadratic(self):
"""
Returns ``True`` if ``self`` is quadratic in all its variables.
Examples
========
>>> (x*y + 2).as_poly().is_quadratic
True
>>> (x*y**2 + 2).as_poly().is_quadratic
False
"""
return self.rep.is_quadratic
@property
def is_term(self):
"""
Returns ``True`` if ``self`` is zero or has only one term.
Examples
========
>>> (3*x**2).as_poly().is_term
True
>>> (3*x**2 + 1).as_poly().is_term
False
"""
return self.rep.is_term
@property
def is_homogeneous(self):
"""
Returns ``True`` if ``self`` is a homogeneous polynomial.
A homogeneous polynomial is a polynomial whose all monomials with
non-zero coefficients have the same total degree.
Examples
========
>>> (x**2 + x*y).as_poly().is_homogeneous
True
>>> (x**3 + x*y).as_poly().is_homogeneous
False
"""
return self.rep.is_homogeneous
@property
def is_irreducible(self):
"""
Returns ``True`` if ``self`` has no factors over its domain.
Examples
========
>>> (x**2 + x + 1).as_poly(modulus=2).is_irreducible
True
>>> (x**2 + 1).as_poly(modulus=2).is_irreducible
False
"""
return self.rep.is_irreducible
@property
def is_univariate(self):
"""
Returns ``True`` if ``self`` is a univariate polynomial.
Examples
========
>>> (x**2 + x + 1).as_poly().is_univariate
True
>>> (x*y**2 + x*y + 1).as_poly().is_univariate
False
>>> (x*y**2 + x*y + 1).as_poly(x).is_univariate
True
>>> (x**2 + x + 1).as_poly(x, y).is_univariate
False
"""
return len(self.gens) == 1
@property
def is_multivariate(self):
"""
Returns ``True`` if ``self`` is a multivariate polynomial.
Examples
========
>>> (x**2 + x + 1).as_poly().is_multivariate
False
>>> (x*y**2 + x*y + 1).as_poly().is_multivariate
True
>>> (x*y**2 + x*y + 1).as_poly(x).is_multivariate
False
>>> (x**2 + x + 1).as_poly(x, y).is_multivariate
True
"""
return len(self.gens) != 1
@property
def is_cyclotomic(self):
"""
Returns ``True`` if ``self`` is a cyclotomic polynomial.
Examples
========
>>> f = (x**16 + x**14 - x**10 + x**8 - x**6 + x**2 + 1).as_poly()
>>> f.is_cyclotomic
False
>>> g = (x**16 + x**14 - x**10 - x**8 - x**6 + x**2 + 1).as_poly()
>>> g.is_cyclotomic
True
"""
return self.rep.is_cyclotomic
def __abs__(self):
"""
Make all coefficients in ``self`` positive.
Examples
========
>>> abs((x**2 - 1).as_poly())
Poly(x**2 + 1, x, domain='ZZ')
"""
result = abs(self.rep)
return self.per(result)
def __neg__(self):
"""
Negate all coefficients in ``self``.
Examples
========
>>> -(x**2 - 1).as_poly()
Poly(-x**2 + 1, x, domain='ZZ')
"""
result = -self.rep
return self.per(result)
@_sympifyit('other', NotImplemented)
def __add__(self, other):
if not other.is_Poly:
try:
other = self.__class__(other, *self.gens)
except PolynomialError:
return self.as_expr() + other
_, per, F, G = self._unify(other)
result = F + G
return per(result)
@_sympifyit('other', NotImplemented)
def __radd__(self, other):
try:
other = self.__class__(other, *self.gens)
except PolynomialError:
return other + self.as_expr()
return other + self
@_sympifyit('other', NotImplemented)
def __sub__(self, other):
if not other.is_Poly:
try:
other = self.__class__(other, *self.gens)
except PolynomialError:
return self.as_expr() - other
_, per, F, G = self._unify(other)
result = F - G
return per(result)
@_sympifyit('other', NotImplemented)
def __rsub__(self, other):
try:
other = self.__class__(other, *self.gens)
except PolynomialError:
return other - self.as_expr()
return other - self
@_sympifyit('other', NotImplemented)
def __mul__(self, other):
if not other.is_Poly:
try:
other = self.__class__(other, *self.gens)
except PolynomialError:
return self.as_expr()*other
_, per, F, G = self._unify(other)
result = F * G
return per(result)
@_sympifyit('other', NotImplemented)
def __rmul__(self, other):
try:
other = self.__class__(other, *self.gens)
except PolynomialError:
return other*self.as_expr()
return other*self
def __pow__(self, n, mod=None):
if mod:
mod = sympify(mod, strict=True)
n = sympify(n)
if n.is_Integer and n >= 0:
n = int(n)
result = pow(self.rep, n, mod.rep if mod else mod)
return self.per(result)
else:
r = self.as_expr()**n
return r % mod if mod else r
@_sympifyit('other', NotImplemented)
def __divmod__(self, other):
if not other.is_Poly:
other = self.__class__(other, *self.gens)
return self.div(other)
@_sympifyit('other', NotImplemented)
def __rdivmod__(self, other):
other = self.__class__(other, *self.gens)
return other.div(self)
@_sympifyit('other', NotImplemented)
def __mod__(self, other):
if not other.is_Poly:
other = self.__class__(other, *self.gens)
return self.rem(other)
@_sympifyit('other', NotImplemented)
def __rmod__(self, other):
other = self.__class__(other, *self.gens)
return other.rem(self)
@_sympifyit('other', NotImplemented)
def __floordiv__(self, other):
if not other.is_Poly:
other = self.__class__(other, *self.gens)
return self.quo(other)
@_sympifyit('other', NotImplemented)
def __rfloordiv__(self, other):
other = self.__class__(other, *self.gens)
return other.quo(self)
@_sympifyit('other', NotImplemented)
def __truediv__(self, other):
return self.as_expr()/other.as_expr()
@_sympifyit('other', NotImplemented)
def __eq__(self, other):
f, g = self, other
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens, domain=f.domain)
except (PolynomialError, DomainError, CoercionFailed):
return False
if f.gens != g.gens:
return False
if f.domain != g.domain:
try:
dom = f.domain.unify(g.domain, f.gens)
except UnificationFailed: # pragma: no cover
return NotImplemented
f = f.set_domain(dom)
g = g.set_domain(dom)
return f.rep == g.rep
def __bool__(self):
return not self.is_zero
class PurePoly(Poly):
"""Class for representing pure polynomials."""
def _hashable_content(self):
"""Allow Diofant to hash Poly instances."""
return self.domain, frozenset(self.rep.items())
def __hash__(self):
return super().__hash__()
@property
def free_symbols(self):
"""
Free symbols of a polynomial.
Examples
========
>>> PurePoly(x**2 + 1).free_symbols
set()
>>> PurePoly(x**2 + y).free_symbols
set()
>>> PurePoly(x**2 + y, x).free_symbols
{y}
"""
return self.free_symbols_in_domain
@_sympifyit('other', NotImplemented)
def __eq__(self, other):
f, g = self, other
if not g.is_Poly:
try:
g = f.__class__(g, *f.gens, domain=f.domain)
except (PolynomialError, DomainError, CoercionFailed):
return False
if len(f.gens) != len(g.gens):
return False
if f.domain != g.domain:
try:
dom = f.domain.unify(g.domain, f.gens)
except UnificationFailed: # pragma: no cover
return NotImplemented
f = f.set_domain(dom)
g = g.set_domain(dom)
return f.rep.items() == g.rep.items()
def _unify(self, other):
other = sympify(other)
if not other.is_Poly:
try:
return (self.domain, self.per, self.rep,
self.rep.ring(self.domain.convert(other)))
except CoercionFailed:
raise UnificationFailed(f"can't unify {self} with {other}")
if len(self.gens) != len(other.gens):
raise UnificationFailed(f"can't unify {self} with {other}")
newring = self.rep.ring.unify(other.rep.ring)
gens = newring.symbols
F, G = self.rep.set_ring(newring), other.rep.set_ring(newring)
cls = self.__class__
dom = newring.domain
def per(rep, dom=dom, gens=gens, remove=None):
if remove is not None:
gens = gens[:remove] + gens[remove + 1:]
if not gens:
return dom.to_expr(rep)
return cls.new(rep, *gens)
return dom, per, F, G
def parallel_poly_from_expr(exprs, *gens, **args):
"""Construct polynomials from expressions."""
from ..functions import Piecewise
opt = build_options(gens, args)
origs, exprs = list(exprs), []
_exprs, _polys, _failed = [], [], []
if not origs and not opt.gens:
raise PolificationFailed(opt, origs, exprs, True)
for i, expr in enumerate(origs):
expr = sympify(expr)
if isinstance(expr, Basic):
if expr.is_Poly:
_polys.append(i)
expr = expr.__class__._from_poly(expr, opt)
else:
if opt.expand:
expr = expr.expand()
try:
expr = Poly._from_expr(expr, opt)
_exprs.append(i)
except GeneratorsNeeded:
_failed.append(i)
else:
raise PolificationFailed(opt, origs, exprs, True)
exprs.append(expr)
if opt.polys is None:
opt.polys = bool(_polys)
_exprs += _polys
for i, j in zip(_exprs, _exprs[1:]):
exprs[i], exprs[j] = exprs[i].unify(exprs[j])
if _exprs:
i = _exprs[-1]
opt.gens = exprs[i].gens
for i in _failed:
try:
exprs[i] = Poly._from_expr(exprs[i], opt)
except GeneratorsNeeded:
raise PolificationFailed(opt, origs, exprs, True)
if opt.domain is None:
opt.domain = ZZ
_exprs += _failed
if _exprs:
for i, j in zip(_exprs, _exprs[1:]):
exprs[i], exprs[j] = exprs[i].unify(exprs[j])
i = _exprs[-1]
opt.domain = exprs[i].domain
cls = exprs[i].func
for i, expr in enumerate(exprs):
exprs[i] = cls._from_poly(expr, opt)
for k in opt.gens:
if isinstance(k, Piecewise) and len(exprs) > 1:
raise PolynomialError('Piecewise generators do not make sense')
return exprs, opt
def degree(f, *gens, **args):
"""
Return the degree of ``f`` in the given variable.
The degree of 0 is negative infinity.
Examples
========
>>> degree(x**2 + y*x + 1, gen=x)
2
>>> degree(x**2 + y*x + 1, gen=y)
1
>>> degree(0, x)
-inf
"""
allowed_flags(args, ['gen', 'polys'])
try:
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('degree', 1, exc)
return sympify(F.degree(opt.gen))
def LC(f, *gens, **args):
"""
Return the leading coefficient of ``f``.
Examples
========
>>> LC(4*x**2 + 2*x*y**2 + x*y + 3*y)
4
"""
allowed_flags(args, ['polys'])
try:
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('LC', 1, exc)
return F.LC(order=opt.order)
def LM(f, *gens, **args):
"""
Return the leading monomial of ``f``.
Examples
========
>>> LM(4*x**2 + 2*x*y**2 + x*y + 3*y)
x**2
"""
allowed_flags(args, ['polys'])
try:
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('LM', 1, exc)
monom = F.LM(order=opt.order)
return monom.as_expr()
def LT(f, *gens, **args):
"""
Return the leading term of ``f``.
Examples
========
>>> LT(4*x**2 + 2*x*y**2 + x*y + 3*y)
4*x**2
"""
allowed_flags(args, ['polys'])
try:
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('LT', 1, exc)
monom, coeff = F.LT(order=opt.order)
return coeff*monom.as_expr()
def div(f, g, *gens, **args):
"""
Compute polynomial division of ``f`` and ``g``.
Examples
========
>>> div(x**2 + 1, 2*x - 4, field=False)
(0, x**2 + 1)
>>> div(x**2 + 1, 2*x - 4)
(x/2 + 1, 5)
"""
allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('div', 2, exc)
q, r = F.div(G, auto=opt.auto)
if not opt.polys:
return q.as_expr(), r.as_expr()
else:
return q, r
def rem(f, g, *gens, **args):
"""
Compute polynomial remainder of ``f`` and ``g``.
Examples
========
>>> rem(x**2 + 1, 2*x - 4, field=False)
x**2 + 1
>>> rem(x**2 + 1, 2*x - 4)
5
"""
allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('rem', 2, exc)
r = F.rem(G, auto=opt.auto)
if not opt.polys:
return r.as_expr()
else:
return r
def quo(f, g, *gens, **args):
"""
Compute polynomial quotient of ``f`` and ``g``.
Examples
========
>>> quo(x**2 + 1, 2*x - 4)
x/2 + 1
>>> quo(x**2 - 1, x - 1)
x + 1
"""
allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('quo', 2, exc)
q = F.quo(G, auto=opt.auto)
if not opt.polys:
return q.as_expr()
else:
return q
def exquo(f, g, *gens, **args):
"""
Compute polynomial exact quotient of ``f`` and ``g``.
Examples
========
>>> exquo(x**2 - 1, x - 1)
x + 1
>>> exquo(x**2 + 1, 2*x - 4)
Traceback (most recent call last):
...
ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1
"""
allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('exquo', 2, exc)
q = F.exquo(G, auto=opt.auto)
if not opt.polys:
return q.as_expr()
else:
return q
def half_gcdex(f, g, *gens, **args):
"""
Half extended Euclidean algorithm of ``f`` and ``g``.
Returns ``(s, h)`` such that ``h = gcd(f, g)`` and ``s*f = h (mod g)``.
Examples
========
>>> half_gcdex(x**4 - 2*x**3 - 6*x**2 + 12*x + 15, x**3 + x**2 - 4*x - 4)
(-x/5 + 3/5, x + 1)
"""
allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
domain, (a, b) = construct_domain(exc.exprs)
s, h = domain.half_gcdex(a, b)
return domain.to_expr(s), domain.to_expr(h)
s, h = F.half_gcdex(G, auto=opt.auto)
if not opt.polys:
return s.as_expr(), h.as_expr()
else:
return s, h
def gcdex(f, g, *gens, **args):
"""
Extended Euclidean algorithm of ``f`` and ``g``.
Returns ``(s, t, h)`` such that ``h = gcd(f, g)`` and ``s*f + t*g = h``.
Examples
========
>>> gcdex(x**4 - 2*x**3 - 6*x**2 + 12*x + 15, x**3 + x**2 - 4*x - 4)
(-x/5 + 3/5, x**2/5 - 6*x/5 + 2, x + 1)
"""
allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
domain, (a, b) = construct_domain(exc.exprs)
s, t, h = domain.gcdex(a, b)
return domain.to_expr(s), domain.to_expr(t), domain.to_expr(h)
s, t, h = F.gcdex(G, auto=opt.auto)
if not opt.polys:
return s.as_expr(), t.as_expr(), h.as_expr()
else:
return s, t, h
def invert(f, g, *gens, **args):
"""
Invert ``f`` modulo ``g`` when possible.
Examples
========
>>> invert(x**2 - 1, 2*x - 1)
-4/3
>>> invert(x**2 - 1, x - 1)
Traceback (most recent call last):
...
NotInvertible: zero divisor
For more efficient inversion of Rationals,
use the ``mod_inverse`` function:
>>> mod_inverse(3, 5)
2
>>> (Integer(2)/5).invert(Integer(7)/3)
5/2
See Also
========
diofant.core.numbers.mod_inverse
"""
allowed_flags(args, ['auto', 'polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
domain, (a, b) = construct_domain(exc.exprs)
return domain.to_expr(domain.invert(a, b))
h = F.invert(G, auto=opt.auto)
if not opt.polys:
return h.as_expr()
else:
return h
def subresultants(f, g, *gens, **args):
"""
Compute subresultant PRS of ``f`` and ``g``.
Examples
========
>>> subresultants(x**2 + 1, x**2 - 1)
[x**2 + 1, x**2 - 1, -2]
"""
allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('subresultants', 2, exc)
result = F.subresultants(G)
if not opt.polys:
return [r.as_expr() for r in result]
else:
return result
def resultant(f, g, *gens, **args):
"""
Compute resultant of ``f`` and ``g``.
Examples
========
>>> resultant(x**2 + 1, x**2 - 1)
4
"""
includePRS = args.pop('includePRS', False)
allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('resultant', 2, exc)
if includePRS:
result, R = F.resultant(G, includePRS=includePRS)
else:
result = F.resultant(G)
if not opt.polys:
if includePRS:
return result.as_expr(), [r.as_expr() for r in R]
return result.as_expr()
else:
if includePRS:
return result, R
return result
def discriminant(f, *gens, **args):
"""
Compute discriminant of ``f``.
Examples
========
>>> discriminant(x**2 + 2*x + 3)
-8
"""
allowed_flags(args, ['polys'])
try:
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('discriminant', 1, exc)
result = F.discriminant()
if not opt.polys:
return result.as_expr()
else:
return result
def cofactors(f, g, *gens, **args):
"""
Compute GCD and cofactors of ``f`` and ``g``.
Returns polynomials ``(h, cff, cfg)`` such that ``h = gcd(f, g)``, and
``cff = quo(f, h)`` and ``cfg = quo(g, h)`` are, so called, cofactors
of ``f`` and ``g``.
Examples
========
>>> cofactors(x**2 - 1, x**2 - 3*x + 2)
(x - 1, x + 1, x - 2)
"""
allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
domain, (a, b) = construct_domain(exc.exprs)
h, cff, cfg = domain.cofactors(a, b)
return tuple(map(domain.to_expr, (h, cff, cfg)))
h, cff, cfg = F.cofactors(G)
if not opt.polys:
return h.as_expr(), cff.as_expr(), cfg.as_expr()
else:
return h, cff, cfg
def gcd(f, g, *gens, **args):
"""
Compute GCD of ``f`` and ``g``.
Examples
========
>>> gcd(x**2 - 1, x**2 - 3*x + 2)
x - 1
"""
allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
domain, (a, b) = construct_domain(exc.exprs)
return domain.to_expr(domain.gcd(a, b))
result = F.gcd(G)
if not opt.polys:
return result.as_expr()
else:
return result
def lcm(f, g, *gens, **args):
"""
Compute LCM of ``f`` and ``g``.
Examples
========
>>> lcm(x**2 - 1, x**2 - 3*x + 2)
x**3 - 2*x**2 - x + 2
"""
allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
domain, (a, b) = construct_domain(exc.exprs)
return domain.to_expr(domain.lcm(a, b))
result = F.lcm(G)
if not opt.polys:
return result.as_expr()
else:
return result
def terms_gcd(f, *gens, **args):
"""
Remove GCD of terms from ``f``.
If the ``deep`` flag is True, then the arguments of ``f`` will have
terms_gcd applied to them.
If a fraction is factored out of ``f`` and ``f`` is an Add, then
an unevaluated Mul will be returned so that automatic simplification
does not redistribute it. The hint ``clear``, when set to False, can be
used to prevent such factoring when all coefficients are not fractions.
Examples
========
>>> terms_gcd(x**6*y**2 + x**3*y)
x**3*y*(x**3*y + 1)
The default action of polys routines is to expand the expression
given to them. terms_gcd follows this behavior:
>>> terms_gcd((3+3*x)*(x+x*y))
3*x*(x*y + x + y + 1)
If this is not desired then the hint ``expand`` can be set to False.
In this case the expression will be treated as though it were comprised
of one or more terms:
>>> terms_gcd((3+3*x)*(x+x*y), expand=False)
(3*x + 3)*(x*y + x)
In order to traverse factors of a Mul or the arguments of other
functions, the ``deep`` hint can be used:
>>> terms_gcd((3 + 3*x)*(x + x*y), expand=False, deep=True)
3*x*(x + 1)*(y + 1)
>>> terms_gcd(cos(x + x*y), deep=True)
cos(x*(y + 1))
Rationals are factored out by default:
>>> terms_gcd(x + y/2)
(2*x + y)/2
Only the y-term had a coefficient that was a fraction; if one
does not want to factor out the 1/2 in cases like this, the
flag ``clear`` can be set to False:
>>> terms_gcd(x + y/2, clear=False)
x + y/2
>>> terms_gcd(x*y/2 + y**2, clear=False)
y*(x/2 + y)
The ``clear`` flag is ignored if all coefficients are fractions:
>>> terms_gcd(x/3 + y/2, clear=False)
(2*x + 3*y)/6
See Also
========
diofant.core.exprtools.gcd_terms, diofant.core.exprtools.factor_terms
"""
from ..core import Equality
orig = sympify(f)
if not isinstance(f, Expr) or f.is_Atom:
return orig
if args.get('deep', False):
new = f.func(*[terms_gcd(a, *gens, **args) for a in f.args])
args.pop('deep')
args['expand'] = False
return terms_gcd(new, *gens, **args)
if isinstance(f, Equality):
return f
clear = args.pop('clear', True)
allowed_flags(args, ['polys'])
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
J, f = F.terms_gcd()
if opt.domain.is_Field:
denom, f = f.clear_denoms(convert=True)
coeff, f = f.primitive()
if opt.domain.is_Field:
coeff /= denom
term = Mul(*[x**j for x, j in zip(f.gens, J)])
if coeff == 1:
coeff = Integer(1)
if term == 1:
return orig
if clear:
return _keep_coeff(coeff, term*f.as_expr())
# base the clearing on the form of the original expression, not
# the (perhaps) Mul that we have now
coeff, f = _keep_coeff(coeff, f.as_expr(), clear=False).as_coeff_Mul()
return _keep_coeff(coeff, term*f, clear=False)
def trunc(f, p, *gens, **args):
"""
Reduce ``f`` modulo a constant ``p``.
Examples
========
>>> trunc(2*x**3 + 3*x**2 + 5*x + 7, 3)
-x**3 - x + 1
"""
allowed_flags(args, ['auto', 'polys'])
try:
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('trunc', 1, exc)
result = F.trunc(sympify(p))
if not opt.polys:
return result.as_expr()
else:
return result
def monic(f, *gens, **args):
"""
Divide all coefficients of ``f`` by ``LC(f)``.
Examples
========
>>> monic(3*x**2 + 4*x + 2)
x**2 + 4*x/3 + 2/3
"""
allowed_flags(args, ['auto', 'polys'])
try:
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('monic', 1, exc)
result = F.monic(auto=opt.auto)
if not opt.polys:
return result.as_expr()
else:
return result
def content(f, *gens, **args):
"""
Compute GCD of coefficients of ``f``.
Examples
========
>>> content(6*x**2 + 8*x + 12)
2
"""
allowed_flags(args, ['polys'])
try:
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('content', 1, exc)
return F.content()
def primitive(f, *gens, **args):
"""
Compute content and the primitive form of ``f``.
Examples
========
>>> primitive(6*x**2 + 8*x + 12)
(2, 3*x**2 + 4*x + 6)
>>> eq = (2 + 2*x)*x + 2
Expansion is performed by default:
>>> primitive(eq)
(2, x**2 + x + 1)
Set ``expand`` to False to shut this off. Note that the
extraction will not be recursive; use the as_content_primitive method
for recursive, non-destructive Rational extraction.
>>> primitive(eq, expand=False)
(1, x*(2*x + 2) + 2)
>>> eq.as_content_primitive()
(2, x*(x + 1) + 1)
"""
allowed_flags(args, ['polys'])
try:
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('primitive', 1, exc)
cont, result = F.primitive()
if not opt.polys:
return cont, result.as_expr()
else:
return cont, result
def compose(f, g, *gens, **args):
"""
Compute functional composition ``f(g)``.
Examples
========
>>> compose(x**2 + x, x - 1)
x**2 - x
"""
allowed_flags(args, ['polys'])
try:
(F, G), opt = parallel_poly_from_expr((f, g), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('compose', 2, exc)
result = F.compose(G)
if not opt.polys:
return result.as_expr()
else:
return result
def decompose(f, *gens, **args):
"""
Compute functional decomposition of ``f``.
Examples
========
>>> decompose(x**4 + 2*x**3 - x - 1)
[x**2 - x - 1, x**2 + x]
"""
allowed_flags(args, ['polys'])
try:
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('decompose', 1, exc)
result = F.decompose()
if not opt.polys:
return [r.as_expr() for r in result]
else:
return result
def sqf_norm(f, *gens, **args):
"""
Compute square-free norm of ``f``.
Returns ``s``, ``f``, ``r``, such that ``g(x) = f(x-sa)`` and
``r(x) = Norm(g(x))`` is a square-free polynomial over ``K``,
where ``a`` is the algebraic extension of the ground domain.
Examples
========
>>> sqf_norm(x**2 + 1, extension=[sqrt(3)])
(1, x**2 - 2*sqrt(3)*x + 4, x**4 - 4*x**2 + 16)
"""
allowed_flags(args, ['polys'])
try:
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('sqf_norm', 1, exc)
s, g, r = F.sqf_norm()
if not opt.polys:
return Integer(s), g.as_expr(), r.as_expr()
else:
return Integer(s), g, r
def sqf_part(f, *gens, **args):
"""
Compute square-free part of ``f``.
Examples
========
>>> sqf_part(x**3 - 3*x - 2)
x**2 - x - 2
"""
allowed_flags(args, ['polys'])
try:
(F,), opt = parallel_poly_from_expr((f,), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('sqf_part', 1, exc)
result = F.sqf_part()
if not opt.polys:
return result.as_expr()
else:
return result
def _sorted_factors(factors, method):
"""Sort a list of ``(expr, exp)`` pairs."""
if method == 'sqf':
def key(obj):
poly, exp = obj
rep = poly.rep
return exp, len(rep), len(poly.gens), default_sort_key(rep)
else:
def key(obj):
poly, exp = obj
rep = poly.rep
return len(rep), len(poly.gens), exp, default_sort_key(rep)
return sorted(factors, key=key)
def _factors_product(factors):
"""Multiply a list of ``(expr, exp)`` pairs."""
return Mul(*[f.as_expr()**k for f, k in factors])
def _symbolic_factor_list(expr, opt, method):
"""Helper function for :func:`_symbolic_factor`."""
coeff, factors = Integer(1), []
args = [i._eval_factor() if hasattr(i, '_eval_factor') else i
for i in Mul.make_args(expr)]
for arg in args:
if arg.is_Number:
coeff *= arg
continue
elif arg.is_Pow and arg.base is not E:
base, exp = arg.base, arg.exp
if base.is_Number:
factors.append((base, exp))
continue
else:
base, exp = arg, Integer(1)
try:
if base.is_Poly:
cls = base.func
else:
cls = Poly
if opt.expand:
base = base.expand()
if opt.polys is None:
opt.polys = base.is_Poly
poly = cls._from_expr(base.as_expr(), opt)
except GeneratorsNeeded:
factors.append((base, exp))
else:
func = getattr(poly, method + '_list')
_coeff, _factors = func()
if _coeff != 1:
if exp.is_Integer:
coeff *= _coeff**exp
elif _coeff.is_positive:
factors.append((_coeff, exp))
else:
_factors.append((_coeff, Integer(1)))
if exp == 1:
factors.extend(_factors)
elif exp.is_integer:
factors.extend([(f, k*exp) for f, k in _factors])
else:
other = []
for f, k in _factors:
if f.as_expr().is_positive:
factors.append((f, k*exp))
else:
other.append((f, k))
factors.append((_factors_product(other), exp))
if method == 'sqf':
factors = [(functools.reduce(operator.mul,
(f for f, _ in factors if _ == k)), k)
for k in set(dict(factors).values())]
return coeff, factors
def _symbolic_factor(expr, opt, method):
"""Helper function for :func:`_factor`."""
if isinstance(expr, Expr) and not expr.is_Relational:
if hasattr(expr, '_eval_factor'):
return expr._eval_factor()
coeff, factors = _symbolic_factor_list(together(expr), opt, method)
return _keep_coeff(coeff, _factors_product(factors))
elif hasattr(expr, 'args'):
return expr.func(*[_symbolic_factor(arg, opt, method) for arg in expr.args])
elif hasattr(expr, '__iter__'):
return expr.__class__([_symbolic_factor(arg, opt, method) for arg in expr])
else:
raise NotImplementedError
def _generic_factor_list(expr, gens, args, method):
"""Helper function for :func:`sqf_list` and :func:`factor_list`."""
allowed_flags(args, ['frac', 'polys'])
opt = build_options(gens, args)
expr = sympify(expr)
if isinstance(expr, Expr) and not expr.is_Relational:
numer, denom = together(expr).as_numer_denom()
cp, fp = _symbolic_factor_list(numer, opt, method)
cq, fq = _symbolic_factor_list(denom, opt, method)
if fq and not opt.frac:
raise PolynomialError(f'a polynomial expected, got {expr}')
_opt = opt.clone({'expand': True})
if not _opt.get('gens'):
_opt['gens'] = set().union(*[set(f.gens)
for f, _ in fp + fq if f.is_Poly])
for factors in (fp, fq):
for i, (f, k) in enumerate(factors):
if not f.is_Poly:
f = Poly._from_expr(f, _opt)
factors[i] = (f, k)
fp = _sorted_factors(fp, method)
fq = _sorted_factors(fq, method)
if not opt.polys:
fp = [(f.as_expr(), k) for f, k in fp]
fq = [(f.as_expr(), k) for f, k in fq]
coeff = cp/cq
if not opt.frac:
return coeff, fp
else:
return coeff, fp, fq
else:
raise PolynomialError(f'a polynomial expected, got {expr}')
def _generic_factor(expr, gens, args, method):
"""Helper function for :func:`sqf` and :func:`factor`."""
allowed_flags(args, [])
opt = build_options(gens, args)
return _symbolic_factor(sympify(expr), opt, method)
def to_rational_coeffs(f):
"""
Try to transform a polynomial to have rational coefficients.
try to find a transformation ``x = alpha*y``
``f(x) = lc*alpha**n * g(y)`` where ``g`` is a polynomial with
rational coefficients, ``lc`` the leading coefficient.
If this fails, try ``x = y + beta``
``f(x) = g(y)``
Returns ``None`` if ``g`` not found;
``(lc, alpha, None, g)`` in case of rescaling
``(None, None, beta, g)`` in case of translation
Notes
=====
Currently it transforms only polynomials without roots larger than 2.
Examples
========
>>> p = (((x**2-1)*(x-2)).subs({x: x*(1 + sqrt(2))})).as_poly(x, domain=EX)
>>> lc, r, _, g = to_rational_coeffs(p)
>>> lc, r
(7 + 5*sqrt(2), -2*sqrt(2) + 2)
>>> g
Poly(x**3 + x**2 - 1/4*x - 1/4, x, domain='QQ')
>>> r1 = simplify(1/r)
>>> (lc*r**3*(g.as_expr()).subs({x: x*r1})).as_poly(x, domain=EX) == p
True
"""
from ..simplify import simplify
def _try_rescale(f, f1=None):
"""
Try rescaling ``x -> alpha*x`` to convert f to a polynomial
with rational coefficients.
Returns ``alpha, f``; if the rescaling is successful,
``alpha`` is the rescaling factor, and ``f`` is the rescaled
polynomial; else ``alpha`` is ``None``.
"""
from ..core import Add
if f.is_multivariate or not (f.gens[0]).is_Atom:
return
n = f.degree()
lc = f.LC()
f1 = f1 or f1.monic()
coeffs = f1.all_coeffs()[:-1]
coeffs = [simplify(coeffx) for coeffx in coeffs]
if coeffs[1]:
rescale1_x = simplify(coeffs[1]/coeffs[0])
coeffs1 = []
for i in range(len(coeffs)):
coeffx = simplify(coeffs[n - i - 1]*rescale1_x**(i + 1))
if not coeffx.is_rational:
break
coeffs1.append(coeffx)
else:
rescale_x = simplify(1/rescale1_x)
x = f.gens[0]
v = [x**n]
for i in range(1, n + 1):
v.append(coeffs1[i - 1]*x**(n - i))
f = Add(*v)
f = Poly(f)
return lc, rescale_x, f
def _try_translate(f, f1=None):
"""
Try translating ``x -> x + alpha`` to convert f to a polynomial
with rational coefficients.
Returns ``alpha, f``; if the translating is successful,
``alpha`` is the translating factor, and ``f`` is the shifted
polynomial; else ``alpha`` is ``None``.
"""
from ..core import Add
if f.is_multivariate or not (f.gens[0]).is_Atom:
return
n = f.degree()
f1 = f1 or f1.monic()
coeffs = f1.all_coeffs()[:-1]
c = simplify(coeffs[-1])
if c and not c.is_rational:
func = Add
if c.is_Add:
args = c.args
func = c.func
else:
args = [c]
sifted = sift(args, lambda z: z.is_rational)
c2 = sifted[False]
alpha = -func(*c2)/n
f2 = f1.shift(alpha)
return alpha, f2
def _has_square_roots(p):
"""Return True if ``f`` is a sum with square roots but no other root."""
from ..core.exprtools import Factors
coeffs = p.coeffs()
has_sq = False
for y in coeffs:
for x in Add.make_args(y):
f = Factors(x).factors
r = [wx.denominator for b, wx in f.items() if
b.is_number and wx.is_Rational and wx.denominator >= 2]
if not r:
continue
if min(r) == 2:
has_sq = True
if max(r) > 2:
return False
return has_sq
if f.domain.is_ExpressionDomain and _has_square_roots(f):
f1 = f.monic()
r = _try_rescale(f, f1)
if r:
return r[0], r[1], None, r[2]
else:
r = _try_translate(f, f1)
if r:
return None, None, r[0], r[1]
def sqf_list(f, *gens, **args):
"""
Compute a list of square-free factors of ``f``.
Examples
========
>>> sqf_list(2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16)
(2, [(x + 1, 2), (x + 2, 3)])
"""
return _generic_factor_list(f, gens, args, method='sqf')
def sqf(f, *gens, **args):
"""
Compute square-free factorization of ``f``.
Examples
========
>>> sqf(2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16)
2*(x + 1)**2*(x + 2)**3
"""
return _generic_factor(f, gens, args, method='sqf')
def factor_list(f, *gens, **args):
"""
Compute a list of irreducible factors of ``f``.
Examples
========
>>> factor_list(2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y)
(2, [(x + y, 1), (x**2 + 1, 2)])
"""
return _generic_factor_list(f, gens, args, method='factor')
def factor(f, *gens, **args):
"""
Compute the factorization of expression, ``f``, into irreducibles. (To
factor an integer into primes, use ``factorint``.)
There two modes implemented: symbolic and formal. If ``f`` is not an
instance of :class:`Poly` and generators are not specified, then the
former mode is used. Otherwise, the formal mode is used.
In symbolic mode, :func:`factor` will traverse the expression tree and
factor its components without any prior expansion, unless an instance
of :class:`~diofant.core.add.Add` is encountered (in this case formal factorization is
used). This way :func:`factor` can handle large or symbolic exponents.
By default, the factorization is computed over the rationals. To factor
over other domain, e.g. an algebraic or finite field, use appropriate
options: ``extension``, ``modulus`` or ``domain``.
Examples
========
>>> factor(2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y)
2*(x + y)*(x**2 + 1)**2
>>> factor(x**2 + 1)
x**2 + 1
>>> factor(x**2 + 1, modulus=2)
(x + 1)**2
>>> factor(x**2 + 1, gaussian=True)
(x - I)*(x + I)
>>> factor(x**2 - 2, extension=sqrt(2))
(x - sqrt(2))*(x + sqrt(2))
>>> factor((x**2 - 1)/(x**2 + 4*x + 4))
(x - 1)*(x + 1)/(x + 2)**2
>>> factor((x**2 + 4*x + 4)**10000000*(x**2 + 1))
(x + 2)**20000000*(x**2 + 1)
By default, factor deals with an expression as a whole:
>>> eq = 2**(x**2 + 2*x + 1)
>>> factor(eq)
2**(x**2 + 2*x + 1)
If the ``deep`` flag is True then subexpressions will
be factored:
>>> factor(eq, deep=True)
2**((x + 1)**2)
See Also
========
diofant.ntheory.factor_.factorint
"""
f = sympify(f)
if args.pop('deep', False):
partials = {}
muladd = f.atoms(Mul, Add)
for p in muladd:
fac = factor(p, *gens, **args)
if (fac.is_Mul or fac.is_Pow) and fac != p:
partials[p] = fac
return f.xreplace(partials)
try:
return _generic_factor(f, gens, args, method='factor')
except PolynomialError as msg:
if not f.is_commutative:
from ..core.exprtools import factor_nc
return factor_nc(f)
else:
raise PolynomialError(msg)
def count_roots(f, inf=None, sup=None):
"""
Return the number of roots of ``f`` in ``[inf, sup]`` interval.
If one of ``inf`` or ``sup`` is complex, it will return the number of roots
in the complex rectangle with corners at ``inf`` and ``sup``.
Examples
========
>>> count_roots(x**4 - 4, -3, 3)
2
>>> count_roots(x**4 - 4, 0, 1 + 3*I)
1
"""
try:
F = Poly(f, greedy=False)
except GeneratorsNeeded:
raise PolynomialError(f"can't count roots of {f}, not a polynomial")
return F.count_roots(inf=inf, sup=sup)
def real_roots(f, multiple=True):
"""
Return a list of real roots with multiplicities of ``f``.
Examples
========
>>> real_roots(2*x**3 - 7*x**2 + 4*x + 4)
[-1/2, 2, 2]
"""
try:
F = Poly(f, greedy=False)
except GeneratorsNeeded:
raise PolynomialError(f"can't compute real roots of {f}, "
'not a polynomial')
return F.real_roots(multiple=multiple)
def nroots(f, n=15, maxsteps=50, cleanup=True):
"""
Compute numerical approximations of roots of ``f``.
Examples
========
>>> nroots(x**2 - 3, n=15)
[-1.73205080756888, 1.73205080756888]
>>> nroots(x**2 - 3, n=30)
[-1.73205080756887729352744634151, 1.73205080756887729352744634151]
"""
try:
F = Poly(f, greedy=False)
except GeneratorsNeeded:
raise PolynomialError(
f"can't compute numerical roots of {f}, not a polynomial")
return F.nroots(n=n, maxsteps=maxsteps, cleanup=cleanup)
def cancel(f, *gens, **args):
"""
Cancel common factors in a rational function ``f``.
Examples
========
>>> A = Symbol('A', commutative=False)
>>> cancel((2*x**2 - 2)/(x**2 - 2*x + 1))
(2*x + 2)/(x - 1)
>>> cancel((sqrt(3) + sqrt(15)*A)/(sqrt(2) + sqrt(10)*A))
sqrt(6)/2
"""
from ..core.exprtools import factor_terms
from ..functions import Piecewise
allowed_flags(args, ['polys'])
f = sympify(f)
if not isinstance(f, Tuple):
if f.is_Atom or isinstance(f, Relational) or not isinstance(f, Expr):
return f
f = factor_terms(f, radical=True)
p, q = f.as_numer_denom()
elif len(f) == 2:
p, q = f
else:
return factor_terms(f)
try:
(F, G), opt = parallel_poly_from_expr((p, q), *gens, **args)
except PolificationFailed:
if not isinstance(f, (tuple, Tuple)):
return f
else:
return Integer(1), p, q
except PolynomialError:
assert not f.is_commutative or f.has(Piecewise)
# Handling of noncommutative and/or piecewise expressions
if f.is_Add or f.is_Mul:
sifted = sift(f.args, lambda x: x.is_commutative is True and not x.has(Piecewise))
c, nc = sifted[True], sifted[False]
nc = [cancel(i) for i in nc]
return f.func(cancel(f.func._from_args(c)), *nc)
else:
reps = []
pot = preorder_traversal(f)
next(pot)
for e in pot:
# XXX: This should really skip anything that's not Expr.
if isinstance(e, (tuple, Tuple, BooleanAtom)):
continue
reps.append((e, cancel(e)))
pot.skip() # this was handled successfully
return f.xreplace(dict(reps))
c, P, Q = F.cancel(G)
if not isinstance(f, (tuple, Tuple)):
return c*(P.as_expr()/Q.as_expr())
else:
if not opt.polys:
return c, P.as_expr(), Q.as_expr()
else:
return c, P, Q
def reduced(f, G, *gens, **args):
"""
Reduces a polynomial ``f`` modulo a set of polynomials ``G``.
Given a polynomial ``f`` and a set of polynomials ``G = (g_1, ..., g_n)``,
computes a set of quotients ``q = (q_1, ..., q_n)`` and the remainder ``r``
such that ``f = q_1*g_1 + ... + q_n*g_n + r``, where ``r`` vanishes or ``r``
is a completely reduced polynomial with respect to ``G``.
Examples
========
>>> reduced(2*x**4 + y**2 - x**2 + y**3, [x**3 - x, y**3 - y])
([2*x, 1], x**2 + y**2 + y)
"""
allowed_flags(args, ['polys', 'auto'])
try:
polys, opt = parallel_poly_from_expr([f] + list(G), *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('reduced', 0, exc)
domain = opt.domain
retract = False
if opt.auto and domain.is_Ring and not domain.is_Field:
opt = opt.clone({'domain': domain.field})
retract = True
_ring = opt.domain.poly_ring(*opt.gens, order=opt.order)
for i, poly in enumerate(polys):
poly = dict(poly.set_domain(opt.domain).rep)
polys[i] = _ring.from_dict(poly)
Q, r = polys[0].div(polys[1:])
Q = [Poly._from_dict(dict(q), opt) for q in Q]
r = Poly._from_dict(dict(r), opt)
if retract:
try:
_Q, _r = [q.to_ring() for q in Q], r.to_ring()
except CoercionFailed:
pass
else:
Q, r = _Q, _r
if not opt.polys:
return [q.as_expr() for q in Q], r.as_expr()
else:
return Q, r
def groebner(F, *gens, **args):
r"""
Computes the reduced Gröbner basis for a set of polynomials.
Parameters
==========
F : list
a set of polynomials
\*gens : tuple
polynomial generators
\**args : dict
a dictionary of parameters, namely
order : str, optional
Monomial order, defaults to ``lex``.
method : {'buchberger', 'f5b'}, optional
Set algorithm to compute Gröbner basis. By default, an improved
implementation of the Buchberger algorithm is used.
field : bool, optional
Force coefficients domain to be a field. Defaults to False.
Examples
========
>>> F = [x*y - 2*x, 2*x**2 - y**2]
>>> groebner(F)
GroebnerBasis([2*x**2 - y**2, x*y - 2*x, y**3 - 2*y**2],
x, y, domain='ZZ', order='lex')
>>> groebner(F, order=grevlex)
GroebnerBasis([y**3 - 2*y**2, 2*x**2 - y**2, x*y - 2*x],
x, y, domain='ZZ', order='grevlex')
>>> groebner(F, field=True)
GroebnerBasis([x**2 - y**2/2, x*y - 2*x, y**3 - 2*y**2],
x, y, domain='QQ', order='lex')
References
==========
* :cite:`Buchberger2001systems`
* :cite:`Cox2015ideals`
See Also
========
diofant.solvers.polysys.solve_poly_system
"""
return GroebnerBasis(F, *gens, **args)
class GroebnerBasis(Basic):
"""Represents a reduced Gröbner basis."""
def __new__(cls, F, *gens, **args):
"""Compute a reduced Gröbner basis for a system of polynomials."""
allowed_flags(args, ['polys', 'method'])
try:
polys, opt = parallel_poly_from_expr(F, *gens, **args)
except PolificationFailed as exc:
raise ComputationFailed('groebner', len(F), exc)
ring = opt.domain.poly_ring(*opt.gens, order=opt.order)
if not ring.domain.is_Exact:
raise ValueError(f'Domain must be exact, got {ring.domain}')
polys = [ring.from_dict(dict(_.rep)) for _ in polys if not _.is_zero]
G = _groebner(polys, ring, method=opt.method)
G = [Poly._from_dict(dict(g), opt) for g in G]
return cls._new(G, opt)
@classmethod
def _new(cls, basis, options):
obj = Basic.__new__(cls)
obj._basis = tuple(basis)
obj._options = options
return obj
@property
def args(self):
return (Tuple(*self.exprs),) + self.gens
@property
def exprs(self):
return [poly.as_expr() for poly in self._basis]
@property
def polys(self):
return list(self._basis)
@property
def gens(self):
return self._options.gens
@property
def domain(self):
return self._options.domain
@property
def order(self):
return self._options.order
def __len__(self):
return len(self._basis)
def __iter__(self):
if self._options.polys:
return iter(self.polys)
else:
return iter(self.exprs)
def __getitem__(self, item):
if self._options.polys:
basis = self.polys
else:
basis = self.exprs
return basis[item]
def __hash__(self):
return hash((self._basis, tuple(sorted(self._options.items()))))
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._basis == other._basis and self._options == other._options
elif iterable(other):
return self.polys == list(other) or self.exprs == list(other)
else:
return False
@property
def dimension(self):
"""Dimension of the ideal, generated by a Gröbner basis."""
sets = self.independent_sets
if sets is not None:
return max(len(s) for s in sets)
@property
def independent_sets(self):
"""Compute independent sets for ideal, generated by a Gröbner basis.
References
==========
* :cite:`Kredel1988indep`
"""
if self.contains(Integer(1)):
return
HTG = [_.LM(order=self.order) for _ in self.polys]
def dimrec(S, U, M):
U1 = U.copy()
while U1:
x = U1.pop(0)
S1 = S + [x]
t = Monomial(Mul(*S1), self.gens)
for ht in HTG:
if all(a and b or not a for a, b in zip(ht, t)):
break
else:
M = dimrec(S1, U1, M)
if any(all(_ in m for _ in S) for m in M):
return M
else:
return [S] + M
return dimrec([], list(self.gens), [])
def set_order(self, order):
"""
Convert a Gröbner basis from one ordering to another.
Notes
=====
The FGLM algorithm :cite:`Faugere1993groebner` used to convert reduced Gröbner bases
of zero-dimensional ideals from one ordering to another. Sometimes it
is infeasible to compute a Gröbner basis with respect to a particular
ordering directly.
Examples
========
>>> F = [x**2 - 3*y - x + 1, y**2 - 2*x + y - 1]
>>> G = groebner(F, order='grlex')
>>> G.set_order('lex') == groebner(F, order='lex')
True
"""
src_order = self.order
dst_order = monomial_key(order)
if src_order == dst_order:
return self
if self.dimension != 0:
raise NotImplementedError("can't convert Gröbner bases of "
'ideals with positive dimension')
polys = self.polys
domain = self.domain
opt = self._options.clone({'domain': domain.field,
'order': dst_order})
_ring = opt.domain.poly_ring(*opt.gens, order=src_order)
for i, poly in enumerate(polys):
poly = dict(poly.set_domain(opt.domain).rep)
polys[i] = _ring.from_dict(poly)
G = matrix_fglm(polys, _ring, dst_order)
G = [Poly._from_dict(dict(g), opt) for g in G]
if not domain.is_Field:
G = [g.clear_denoms(convert=True)[1] for g in G]
opt.domain = domain
return self._new(G, opt)
def reduce(self, expr, auto=True):
"""
Reduces a polynomial modulo a Gröbner basis.
Given a polynomial ``f`` and a set of polynomials ``G = (g_1, ..., g_n)``,
computes a set of quotients ``q = (q_1, ..., q_n)`` and the remainder ``r``
such that ``f = q_1*f_1 + ... + q_n*f_n + r``, where ``r`` vanishes or ``r``
is a completely reduced polynomial with respect to ``G``.
Examples
========
>>> f = 2*x**4 - x**2 + y**3 + y**2
>>> G = groebner([x**3 - x, y**3 - y])
>>> G.reduce(f)
([2*x, 1], x**2 + y**2 + y)
>>> Q, r = _
>>> expand(sum(q*g for q, g in zip(Q, G)) + r)
2*x**4 - x**2 + y**3 + y**2
>>> _ == f
True
"""
poly = Poly._from_expr(expr, self._options)
polys = [poly] + list(self._basis)
opt = self._options
domain = self.domain
retract = False
if auto and domain.is_Ring and not domain.is_Field:
opt = self._options.clone({'domain': domain.field})
retract = True
_ring = opt.domain.poly_ring(*opt.gens, order=opt.order)
for i, poly in enumerate(polys):
poly = dict(poly.set_domain(opt.domain).rep)
polys[i] = _ring.from_dict(poly)
Q, r = polys[0].div(polys[1:])
Q = [Poly._from_dict(dict(q), opt) for q in Q]
r = Poly._from_dict(dict(r), opt)
if retract:
try:
_Q, _r = [q.to_ring() for q in Q], r.to_ring()
except CoercionFailed:
pass
else:
Q, r = _Q, _r
if not opt.polys:
return [q.as_expr() for q in Q], r.as_expr()
else:
return Q, r
def contains(self, poly):
"""
Check if ``poly`` belongs the ideal generated by ``self``.
Examples
========
>>> f = 2*x**3 + y**3 + 3*y
>>> G = groebner([x**2 + y**2 - 1, x*y - 2])
>>> G.contains(f)
True
>>> G.contains(f + 1)
False
"""
return self.reduce(poly)[1] == 0
def poly(expr, *gens, **args):
"""
Efficiently transform an expression into a polynomial.
Examples
========
>>> poly(x*(x**2 + x - 1)**2)
Poly(x**5 + 2*x**4 - x**3 - 2*x**2 + x, x, domain='ZZ')
"""
allowed_flags(args, [])
def _poly(expr, opt):
terms, poly_terms = [], []
for term in Add.make_args(expr):
factors, poly_factors = [], []
for factor in Mul.make_args(term):
if factor.is_Add:
poly_factors.append(_poly(factor, opt))
elif (factor.is_Pow and factor.base.is_Add and
factor.exp.is_Integer and factor.exp >= 0):
poly_factors.append(_poly(factor.base,
opt)**factor.exp)
else:
factors.append(factor)
if not poly_factors:
terms.append(term)
else:
product = poly_factors[0]
for factor in poly_factors[1:]:
product *= factor
if factors:
factor = Mul(*factors)
if factor.is_Number:
product *= factor
else:
product *= Poly._from_expr(factor, opt)
poly_terms.append(product)
if not poly_terms:
result = Poly._from_expr(expr, opt)
else:
result = poly_terms[0]
for term in poly_terms[1:]:
result += term
if terms:
term = Add(*terms)
if term.is_Number:
result += term
else:
result += Poly._from_expr(term, opt)
return result.reorder(*opt.get('gens', ()), **args)
expr = sympify(expr)
if expr.is_Poly:
return Poly(expr, *gens, **args)
opt = build_options(gens, args)
no_gens = not opt.gens
if no_gens:
gens = _find_gens([expr], opt)
opt = opt.clone({'gens': gens})
if 'expand' not in args:
opt = opt.clone({'expand': False})
res = _poly(expr, opt)
if no_gens:
res = res.exclude()
return res
|
CSD-Public/stonix | refs/heads/master | src/stonix_resources/main_window_pyqt5.py | 1 | ###############################################################################
# #
# Copyright 2019. Triad National Security, LLC. All rights reserved. #
# This program was produced under U.S. Government contract 89233218CNA000001 #
# for Los Alamos National Laboratory (LANL), which is operated by Triad #
# National Security, LLC for the U.S. Department of Energy/National Nuclear #
# Security Administration. #
# #
# All rights in the program are reserved by Triad National Security, LLC, and #
# the U.S. Department of Energy/National Nuclear Security Administration. The #
# Government is granted for itself and others acting on its behalf a #
# nonexclusive, paid-up, irrevocable worldwide license in this material to #
# reproduce, prepare derivative works, distribute copies to the public, #
# perform publicly and display publicly, and to permit others to do so. #
# #
###############################################################################
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'dylan_main_listview.ui'
#
# Created: Mon Jun 18 17:03:45 2012
# by: PyQt4 UI code generator 4.6.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1020, 720)
MainWindow.setAutoFillBackground(False)
self.centralwidget = QtWidgets.QWidget(MainWindow)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.centralwidget.sizePolicy().hasHeightForWidth())
self.centralwidget.setSizePolicy(sizePolicy)
self.centralwidget.setAutoFillBackground(True)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setSizeConstraint(QtWidgets.QLayout.SetMaximumSize)
self.gridLayout.setSpacing(1)
self.gridLayout.setContentsMargins(2, 2, 3, 2)
self.gridLayout.setObjectName("gridLayout")
self.splitter = QtWidgets.QSplitter(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.splitter.sizePolicy().hasHeightForWidth())
self.splitter.setSizePolicy(sizePolicy)
self.splitter.setFrameShape(QtWidgets.QFrame.NoFrame)
self.splitter.setFrameShadow(QtWidgets.QFrame.Raised)
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setChildrenCollapsible(True)
self.splitter.setObjectName("splitter")
self.rule_list_widget = QtWidgets.QListWidget(self.splitter)
self.rule_list_widget.setMinimumSize(QtCore.QSize(250, 0))
self.rule_list_widget.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.rule_list_widget.setFrameShape(QtWidgets.QFrame.Box)
self.rule_list_widget.setFrameShadow(QtWidgets.QFrame.Plain)
self.rule_list_widget.setDragEnabled(False)
self.rule_list_widget.setAlternatingRowColors(True)
self.rule_list_widget.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection)
self.rule_list_widget.setLayoutMode(QtWidgets.QListView.SinglePass)
self.rule_list_widget.setViewMode(QtWidgets.QListView.ListMode)
self.rule_list_widget.setSelectionRectVisible(True)
self.rule_list_widget.setObjectName("rule_list_widget")
self.frame_rule_details = QtWidgets.QFrame(self.splitter)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_rule_details.sizePolicy().hasHeightForWidth())
self.frame_rule_details.setSizePolicy(sizePolicy)
self.frame_rule_details.setMinimumSize(QtCore.QSize(0, 0))
self.frame_rule_details.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_rule_details.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_rule_details.setObjectName("frame_rule_details")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.frame_rule_details)
self.verticalLayout_3.setSpacing(0)
self.verticalLayout_3.setSizeConstraint(QtWidgets.QLayout.SetMaximumSize)
#self.verticalLayout_3.setMargin(0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.splitter_right_side = QtWidgets.QSplitter(self.frame_rule_details)
self.splitter_right_side.setFrameShape(QtWidgets.QFrame.NoFrame)
self.splitter_right_side.setFrameShadow(QtWidgets.QFrame.Plain)
self.splitter_right_side.setOrientation(QtCore.Qt.Vertical)
self.splitter_right_side.setOpaqueResize(True)
self.splitter_right_side.setObjectName("splitter_right_side")
self.rule_info = QtWidgets.QFrame(self.splitter_right_side)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.rule_info.sizePolicy().hasHeightForWidth())
self.rule_info.setSizePolicy(sizePolicy)
self.rule_info.setMinimumSize(QtCore.QSize(0, 250))
self.rule_info.setFrameShape(QtWidgets.QFrame.Box)
self.rule_info.setFrameShadow(QtWidgets.QFrame.Plain)
self.rule_info.setObjectName("rule_info")
self.verticalLayout = QtWidgets.QVBoxLayout(self.rule_info)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setSizeConstraint(QtWidgets.QLayout.SetMaximumSize)
#self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName("verticalLayout")
self.rule_action_frame = QtWidgets.QFrame(self.rule_info)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.rule_action_frame.sizePolicy().hasHeightForWidth())
self.rule_action_frame.setSizePolicy(sizePolicy)
self.rule_action_frame.setLayoutDirection(QtCore.Qt.LeftToRight)
self.rule_action_frame.setFrameShape(QtWidgets.QFrame.NoFrame)
self.rule_action_frame.setFrameShadow(QtWidgets.QFrame.Sunken)
self.rule_action_frame.setObjectName("rule_action_frame")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.rule_action_frame)
self.horizontalLayout_3.setSpacing(0)
self.horizontalLayout_3.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.rule_name_label = QtWidgets.QLabel(self.rule_action_frame)
self.rule_name_label.setObjectName("rule_name_label")
self.horizontalLayout_3.addWidget(self.rule_name_label)
spacerItem = QtWidgets.QSpacerItem(200, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem)
self.report_button = QtWidgets.QPushButton(self.rule_action_frame)
self.report_button.setEnabled(False)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.report_button.sizePolicy().hasHeightForWidth())
self.report_button.setSizePolicy(sizePolicy)
self.report_button.setFlat(False)
self.report_button.setObjectName("report_button")
self.horizontalLayout_3.addWidget(self.report_button)
self.fix_button = QtWidgets.QPushButton(self.rule_action_frame)
self.fix_button.setEnabled(False)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.fix_button.sizePolicy().hasHeightForWidth())
self.fix_button.setSizePolicy(sizePolicy)
self.fix_button.setObjectName("fix_button")
self.horizontalLayout_3.addWidget(self.fix_button)
self.revert_button = QtWidgets.QPushButton(self.rule_action_frame)
self.revert_button.setEnabled(False)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.revert_button.sizePolicy().hasHeightForWidth())
self.revert_button.setSizePolicy(sizePolicy)
self.revert_button.setObjectName("revert_button")
self.horizontalLayout_3.addWidget(self.revert_button)
self.verticalLayout.addWidget(self.rule_action_frame)
self.rule_desc_res_scrollarea = QtWidgets.QScrollArea(self.rule_info)
self.rule_desc_res_scrollarea.setFrameShape(QtWidgets.QFrame.NoFrame)
self.rule_desc_res_scrollarea.setWidgetResizable(True)
self.rule_desc_res_scrollarea.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.rule_desc_res_scrollarea.setObjectName("rule_desc_res_scrollarea")
self.rule_desc_scrollAreaWidgetContents = QtWidgets.QWidget(self.rule_desc_res_scrollarea)
self.rule_desc_scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 696, 404))
self.rule_desc_scrollAreaWidgetContents.setObjectName("rule_desc_scrollAreaWidgetContents")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.rule_desc_scrollAreaWidgetContents)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.rule_desc_label = QtWidgets.QLabel(self.rule_desc_scrollAreaWidgetContents)
self.rule_desc_label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.rule_desc_label.setObjectName("rule_desc_label")
self.verticalLayout_2.addWidget(self.rule_desc_label)
self.rule_instructions_text = QtWidgets.QPlainTextEdit(self.rule_desc_scrollAreaWidgetContents)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.rule_instructions_text.sizePolicy().hasHeightForWidth())
self.rule_instructions_text.setSizePolicy(sizePolicy)
self.rule_instructions_text.setMinimumSize(QtCore.QSize(0, 100))
self.rule_instructions_text.setFrameShape(QtWidgets.QFrame.NoFrame)
self.rule_instructions_text.setFrameShadow(QtWidgets.QFrame.Plain)
self.rule_instructions_text.setLineWidth(0)
self.rule_instructions_text.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
self.rule_instructions_text.setTextInteractionFlags(QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse)
self.rule_instructions_text.setObjectName("rule_instructions_text")
self.verticalLayout_2.addWidget(self.rule_instructions_text)
self.rule_results_label = QtWidgets.QLabel(self.rule_desc_scrollAreaWidgetContents)
self.rule_results_label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.rule_results_label.setObjectName("rule_results_label")
self.verticalLayout_2.addWidget(self.rule_results_label)
self.rule_results_text = QtWidgets.QPlainTextEdit(self.rule_desc_scrollAreaWidgetContents)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.rule_results_text.sizePolicy().hasHeightForWidth())
self.rule_results_text.setSizePolicy(sizePolicy)
self.rule_results_text.setMinimumSize(QtCore.QSize(0, 100))
self.rule_results_text.setFrameShape(QtWidgets.QFrame.NoFrame)
self.rule_results_text.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
self.rule_results_text.setTextInteractionFlags(QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse)
self.rule_results_text.setObjectName("rule_results_text")
self.verticalLayout_2.addWidget(self.rule_results_text)
self.rule_desc_res_scrollarea.setWidget(self.rule_desc_scrollAreaWidgetContents)
self.verticalLayout.addWidget(self.rule_desc_res_scrollarea)
self.outer_conf_item_frame = QtWidgets.QFrame(self.splitter_right_side)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.outer_conf_item_frame.sizePolicy().hasHeightForWidth())
self.outer_conf_item_frame.setSizePolicy(sizePolicy)
self.outer_conf_item_frame.setMinimumSize(QtCore.QSize(0, 250))
self.outer_conf_item_frame.setFrameShape(QtWidgets.QFrame.Box)
self.outer_conf_item_frame.setFrameShadow(QtWidgets.QFrame.Plain)
self.outer_conf_item_frame.setObjectName("outer_conf_item_frame")
self.verticalLayout_8 = QtWidgets.QVBoxLayout(self.outer_conf_item_frame)
self.verticalLayout_8.setSpacing(0)
self.verticalLayout_8.setSizeConstraint(QtWidgets.QLayout.SetMaximumSize)
#self.verticalLayout_8.setMargin(0)
self.verticalLayout_8.setObjectName("verticalLayout_8")
self.conf_items_scroll_area = QtWidgets.QScrollArea(self.outer_conf_item_frame)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.conf_items_scroll_area.sizePolicy().hasHeightForWidth())
self.conf_items_scroll_area.setSizePolicy(sizePolicy)
self.conf_items_scroll_area.setFrameShape(QtWidgets.QFrame.NoFrame)
self.conf_items_scroll_area.setWidgetResizable(True)
self.conf_items_scroll_area.setObjectName("conf_items_scroll_area")
self.configitem_scrollarea = QtWidgets.QWidget(self.conf_items_scroll_area)
self.configitem_scrollarea.setGeometry(QtCore.QRect(0, 0, 696, 203))
self.configitem_scrollarea.setObjectName("configitem_scrollarea")
self.conf_items_scroll_area.setWidget(self.configitem_scrollarea)
self.verticalLayout_8.addWidget(self.conf_items_scroll_area)
self.save_cancel_frame = QtWidgets.QFrame(self.outer_conf_item_frame)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.save_cancel_frame.sizePolicy().hasHeightForWidth())
self.save_cancel_frame.setSizePolicy(sizePolicy)
self.save_cancel_frame.setMinimumSize(QtCore.QSize(0, 45))
self.save_cancel_frame.setFrameShape(QtWidgets.QFrame.NoFrame)
self.save_cancel_frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.save_cancel_frame.setObjectName("save_cancel_frame")
self.horizontalLayout_6 = QtWidgets.QHBoxLayout(self.save_cancel_frame)
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem1)
self.save_button = QtWidgets.QPushButton(self.save_cancel_frame)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.save_button.sizePolicy().hasHeightForWidth())
self.save_button.setSizePolicy(sizePolicy)
self.save_button.setObjectName("save_button")
self.horizontalLayout_6.addWidget(self.save_button)
self.cancel_button = QtWidgets.QPushButton(self.save_cancel_frame)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.cancel_button.sizePolicy().hasHeightForWidth())
self.cancel_button.setSizePolicy(sizePolicy)
self.cancel_button.setObjectName("cancel_button")
self.horizontalLayout_6.addWidget(self.cancel_button)
self.verticalLayout_8.addWidget(self.save_cancel_frame)
self.verticalLayout_3.addWidget(self.splitter_right_side)
self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
#####
# start menu creation
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1020, 23))
self.menubar.setObjectName("menubar")
self.menuFile = QtWidgets.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
self.menuEdit = QtWidgets.QMenu(self.menubar)
self.menuEdit.setObjectName("menuEdit")
self.menuRule = QtWidgets.QMenu(self.menubar)
self.menuRule.setObjectName("menuRule")
self.menuHelp = QtWidgets.QMenu(self.menubar)
self.menuHelp.setObjectName("menuHelp")
MainWindow.setMenuBar(self.menubar)
self.actionOpen = QtWidgets.QAction(MainWindow)
self.actionOpen.setObjectName("actionOpen")
self.actionLog = QtWidgets.QAction(MainWindow)
self.actionLog.setObjectName("actionLog")
self.actionLog.setShortcut('Ctrl+L')
# Commented out setStatusTip calls to keep them from stomping on run
# messages
#self.actionLog.setStatusTip('View Run Log')
self.actionQuit = QtWidgets.QAction(MainWindow)
self.actionQuit.setMenuRole(QtWidgets.QAction.QuitRole)
self.actionQuit.setObjectName("actionQuit")
self.actionQuit.setShortcut('Ctrl+Q')
#self.actionQuit.setStatusTip('Quit application')
self.actionAbout = QtWidgets.QAction(MainWindow)
self.actionAbout.setObjectName("actionAbout")
self.actionRun_All = QtWidgets.QAction(MainWindow)
self.actionRun_All.setObjectName("actionRun_All")
self.actionRun_All.setShortcut('Ctrl+R')
#self.actionRun_All.setStatusTip('Run Fix All')
self.actionReport_All = QtWidgets.QAction(MainWindow)
self.actionReport_All.setObjectName("actionReport_All")
self.actionReport_All.setShortcut('Ctrl+G')
#self.actionReport_All.setStatusTip('Run Report All')
self.actionRevert_All = QtWidgets.QAction(MainWindow)
self.actionRevert_All.setObjectName("actionRevert_All")
#self.actionRevert_All.setStatusTip('Attempt to Revert All Changes')
#self.actionEdit_Selected = QtGui.QAction(MainWindow)
#self.actionEdit_Selected.setObjectName("actionEdit_Selected")
self.actionStop = QtWidgets.QAction(MainWindow)
self.actionStop.setObjectName("actionStop")
self.actionStop.setShortcut('Ctrl+K')
#self.actionStop.setStatusTip('Stop Current Run')
self.menuFile.addAction(self.actionLog)
self.actionGuiHelp = QtWidgets.QAction(MainWindow)
self.actionGuiHelp.setObjectName("actionGuiHelp")
self.actionGuiHelp.setShortcut('Ctrl+H')
self.menuHelp.addAction(self.actionGuiHelp)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionQuit)
self.menuRule.addAction(self.actionRun_All)
self.menuRule.addAction(self.actionReport_All)
self.menuRule.addAction(self.actionRevert_All)
self.menuRule.addAction(self.actionStop)
#self.menuRule.addAction(self.actionEdit_Selected)
self.menuHelp.addAction(self.actionAbout)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuEdit.menuAction())
self.menubar.addAction(self.menuRule.menuAction())
self.menubar.addAction(self.menuHelp.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtWidgets.QApplication.translate("MainWindow", "STONIX", None))
self.rule_name_label.setText(QtWidgets.QApplication.translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans\'; font-size:12pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:16pt; font-weight:600;\">[Rule Name]</span></p></body></html>", None))
self.report_button.setText(QtWidgets.QApplication.translate("MainWindow", "Report", None))
self.revert_button.setText(QtWidgets.QApplication.translate("MainWindow", "Revert", None))
self.fix_button.setText(QtWidgets.QApplication.translate("MainWindow", "Fix", None))
self.rule_desc_label.setText(QtWidgets.QApplication.translate("MainWindow", "Rule Description:", None))
self.rule_instructions_text.setPlainText(QtWidgets.QApplication.translate("MainWindow", "sdf\n"
"sdf\n"
"sdf\n"
"sdf", None))
self.rule_results_label.setText(QtWidgets.QApplication.translate("MainWindow", "Results:", None))
self.rule_results_text.setPlainText(QtWidgets.QApplication.translate("MainWindow", "sdf\n"
"sdf\n"
"sdf\n"
"sdf", None))
self.save_button.setText(QtWidgets.QApplication.translate("MainWindow", "Save", None))
self.cancel_button.setText(QtWidgets.QApplication.translate("MainWindow", "Cancel", None))
self.menuFile.setTitle(QtWidgets.QApplication.translate("MainWindow", "File", None))
self.menuEdit.setTitle(QtWidgets.QApplication.translate("MainWindow", "Edit", None))
self.menuRule.setTitle(QtWidgets.QApplication.translate("MainWindow", "Rule", None))
self.menuHelp.setTitle(QtWidgets.QApplication.translate("MainWindow", "Help", None))
self.actionOpen.setText(QtWidgets.QApplication.translate("MainWindow", "Open", None))
self.actionLog.setText(QtWidgets.QApplication.translate("MainWindow", "Log", None))
self.actionQuit.setText(QtWidgets.QApplication.translate("MainWindow", "Quit", None))
self.actionAbout.setText(QtWidgets.QApplication.translate("MainWindow", "About", None))
self.actionRun_All.setText(QtWidgets.QApplication.translate("MainWindow", "Fix All", None))
self.actionReport_All.setText(QtWidgets.QApplication.translate("MainWindow", "Report All", None))
self.actionRevert_All.setText(QtWidgets.QApplication.translate("MainWindow", "Revert All", None))
self.actionStop.setText(QtWidgets.QApplication.translate("MainWindow", "Stop Run", None))
self.actionGuiHelp.setText(QtWidgets.QApplication.translate("MainWindow", "Help", None))
# self.actionEdit_Selected.setText(QtWidgets.QApplication.translate("MainWindow", "Edit Selected", None, QtWidgets.QApplication.UnicodeUTF8))
|
Bysmyyr/chromium-crosswalk | refs/heads/master | tools/telemetry/third_party/gsutilz/third_party/apitools/apitools/base/py/stream_slice.py | 23 | #!/usr/bin/env python
"""Small helper class to provide a small slice of a stream."""
from apitools.base.py import exceptions
class StreamSlice(object):
"""Provides a slice-like object for streams."""
def __init__(self, stream, max_bytes):
self.__stream = stream
self.__remaining_bytes = max_bytes
self.__max_bytes = max_bytes
def __str__(self):
return 'Slice of stream %s with %s/%s bytes not yet read' % (
self.__stream, self.__remaining_bytes, self.__max_bytes)
def __len__(self):
return self.__max_bytes
def __nonzero__(self):
# For 32-bit python2.x, len() cannot exceed a 32-bit number; avoid
# accidental len() calls from httplib in the form of "if this_object:".
return bool(self.__max_bytes)
@property
def length(self):
# For 32-bit python2.x, len() cannot exceed a 32-bit number.
return self.__max_bytes
def read(self, size=None): # pylint: disable=missing-docstring
"""Read at most size bytes from this slice.
Compared to other streams, there is one case where we may
unexpectedly raise an exception on read: if the underlying stream
is exhausted (i.e. returns no bytes on read), and the size of this
slice indicates we should still be able to read more bytes, we
raise exceptions.StreamExhausted.
Args:
size: If provided, read no more than size bytes from the stream.
Returns:
The bytes read from this slice.
Raises:
exceptions.StreamExhausted
"""
if size is not None:
read_size = min(size, self.__remaining_bytes)
else:
read_size = self.__remaining_bytes
data = self.__stream.read(read_size)
if read_size > 0 and not data:
raise exceptions.StreamExhausted(
'Not enough bytes in stream; expected %d, exhausted '
'after %d' % (
self.__max_bytes,
self.__max_bytes - self.__remaining_bytes))
self.__remaining_bytes -= len(data)
return data
|
roger-zhao/ardupilot-3.3 | refs/heads/master | mk/PX4/Tools/genmsg/test/test_genmsg_names.py | 216 | # Software License Agreement (BSD License)
#
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
import sys
def test_normalize_package_context():
# normalize isn't meant to be fancy; just correct minor differences
from genmsg.names import normalize_package_context
assert 'foo' == normalize_package_context('foo')
assert 'foo' == normalize_package_context(' foo ')
assert 'foo' == normalize_package_context('foo/')
assert 'foo' == normalize_package_context('foo//')
assert 'foo' == normalize_package_context('foo///')
def test_resource_name():
from genmsg import resource_name
assert 'foo/bar' == resource_name('foo', 'bar')
assert 'bar' == resource_name('foo', 'bar', my_pkg='foo')
assert 'foo/bar' == resource_name('foo', 'bar', my_pkg='bar')
assert 'foo/bar' == resource_name('foo', 'bar', my_pkg='')
assert 'foo/bar' == resource_name('foo', 'bar', my_pkg=None)
def test_resource_name_base():
from genmsg import resource_name_base
assert '' == resource_name_base('')
assert 'bar' == resource_name_base('bar')
assert 'bar' == resource_name_base('foo/bar')
assert 'bar' == resource_name_base('/bar')
assert '' == resource_name_base('foo/')
def test_resource_name_package():
from genmsg import resource_name_package
assert None == resource_name_package('')
assert None == resource_name_package('foo')
assert 'foo' == resource_name_package('foo/')
assert 'foo' == resource_name_package('foo/bar')
def test_package_resource_name():
from genmsg import package_resource_name
assert ('', '') == package_resource_name('')
assert ('', 'foo') == package_resource_name('foo')
assert ('foo', 'bar') == package_resource_name('foo/bar')
assert ('foo', '') == package_resource_name('foo/')
try:
# only allowed single separator
package_resource_name("foo/bar/baz")
assert False, "should have raised ValueError"
except ValueError:
pass
def test_is_legal_resource_name():
from genmsg import is_legal_resource_name
failures = [None, '', 'hello\n', '\t', 'foo++', 'foo-bar', '#foo',
' name', 'name ',
'~name', '/name',
'1name', 'foo\\']
for f in failures:
assert not is_legal_resource_name(f), f
tests = ['f', 'f1', 'f_', 'foo', 'foo_bar', 'foo/bar', 'roslib/Log']
for t in tests:
assert is_legal_resource_name(t), t
def test_is_legal_resource_base_name():
from genmsg import is_legal_resource_base_name
failures = [None, '', 'hello\n', '\t', 'foo++', 'foo-bar', '#foo',
'f/', 'foo/bar', '/', '/a',
'f//b',
'~f', '~a/b/c',
'~/f',
' name', 'name ',
'1name', 'foo\\']
for f in failures:
assert not is_legal_resource_base_name(f), f
tests = ['f', 'f1', 'f_', 'foo', 'foo_bar']
for t in tests:
assert is_legal_resource_base_name(t), "[%s]"%t
|
fnielsen/dasem | refs/heads/master | tests/test_gutenberg.py | 1 |
import pytest
from dasem.gutenberg import Word2Vec
@pytest.fixture
def w2v():
return Word2Vec()
def test_w2v(w2v):
word_and_similarities = w2v.most_similar('dreng')
assert len(word_and_similarities) == 10
|
abendleiter/Django-facebook | refs/heads/master | django_facebook/settings.py | 22 | from django.conf import settings
import logging
logger = logging.getLogger(__name__)
# : Your facebook app id
FACEBOOK_APP_ID = getattr(settings, 'FACEBOOK_APP_ID', None)
# : Your facebook app secret
FACEBOOK_APP_SECRET = getattr(settings, 'FACEBOOK_APP_SECRET', None)
# : The default scope we should use, note that registration will break without email
FACEBOOK_DEFAULT_SCOPE = getattr(settings, 'FACEBOOK_DEFAULT_SCOPE', [
'email', 'user_about_me', 'user_birthday', 'user_website'])
# : If we should store likes
FACEBOOK_STORE_LIKES = getattr(settings, 'FACEBOOK_STORE_LIKES', False)
# : If we should store friends
FACEBOOK_STORE_FRIENDS = getattr(settings, 'FACEBOOK_STORE_FRIENDS', False)
# : If we should be using celery to store friends and likes (recommended)
FACEBOOK_CELERY_STORE = getattr(settings, 'FACEBOOK_CELERY_STORE', False)
# : Use celery for updating tokens, recommended since it's quite slow
FACEBOOK_CELERY_TOKEN_EXTEND = getattr(
settings, 'FACEBOOK_CELERY_TOKEN_EXTEND', False)
default_registration_backend = 'django_facebook.registration_backends.FacebookRegistrationBackend'
# : Allows you to overwrite the registration backend
# : Specify a full path to a class (defaults to django_facebook.registration_backends.FacebookRegistrationBackend)
FACEBOOK_REGISTRATION_BACKEND = getattr(
settings, 'FACEBOOK_REGISTRATION_BACKEND', default_registration_backend)
# Absolute canvas page url as per facebook standard
FACEBOOK_CANVAS_PAGE = getattr(settings, 'FACEBOOK_CANVAS_PAGE',
'http://apps.facebook.com/django_facebook_test/')
# Disable this setting if you don't want to store a local image
FACEBOOK_STORE_LOCAL_IMAGE = getattr(
settings, 'FACEBOOK_STORE_LOCAL_IMAGE', True)
# Track all raw data coming in from FB
FACEBOOK_TRACK_RAW_DATA = getattr(settings, 'FACEBOOK_TRACK_RAW_DATA', False)
FACEBOOK_DEBUG_REDIRECTS = getattr(settings, 'FACEBOOK_DEBUG_REDIRECTS', False)
# READ only mode, convenient when doing load testing etc.
FACEBOOK_READ_ONLY = getattr(settings, 'FACEBOOK_READ_ONLY', False)
# Allow custom registration template
default_registration_template = [
'django_facebook/registration.html', 'registration/registration_form.html']
FACEBOOK_REGISTRATION_TEMPLATE = getattr(settings,
'FACEBOOK_REGISTRATION_TEMPLATE', default_registration_template)
# Allow custom signup form
FACEBOOK_REGISTRATION_FORM = getattr(settings,
'FACEBOOK_REGISTRATION_FORM', None)
# Fall back redirect location when no other location was found
FACEBOOK_LOGIN_DEFAULT_REDIRECT = getattr(
settings, 'FACEBOOK_LOGIN_DEFAULT_REDIRECT', '/')
# Force profile update every login
FACEBOOK_FORCE_PROFILE_UPDATE_ON_LOGIN = getattr(
settings, 'FACEBOOK_FORCE_PROFILE_UPDATE_ON_LOGIN', False)
# Retry an open graph share 6 times (once every 15 minutes)
FACEBOOK_OG_SHARE_RETRIES = getattr(settings, 'FACEBOOK_OG_SHARE_RETRIES', 6)
# Retry a failed open graph share (when we have an updated token) for this
# number of days
FACEBOOK_OG_SHARE_RETRY_DAYS = getattr(
settings, 'FACEBOOK_OG_SHARE_RETRY_DAYS', 7)
FACEBOOK_OG_SHARE_DB_TABLE = getattr(
settings, 'FACEBOOK_OG_SHARE_DB_TABLE', None)
# Force profile update every login
FACEBOOK_FORCE_PROFILE_UPDATE_ON_LOGIN = getattr(
settings, 'FACEBOOK_FORCE_PROFILE_UPDATE_ON_LOGIN', False)
# Profile image location
FACEBOOK_PROFILE_IMAGE_PATH = getattr(
settings, 'FACEBOOK_PROFILE_IMAGE_PATH', None)
# Ability to easily overwrite classes used for certain tasks
FACEBOOK_CLASS_MAPPING = getattr(
settings, 'FACEBOOK_CLASS_MAPPING', None)
FACEBOOK_SKIP_VALIDATE = getattr(
settings, 'FACEBOOK_SKIP_VALIDATE', False)
|
agaffney/ansible | refs/heads/devel | test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.py | 68 | #!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import sys
def main():
basic_allow_once = True
for path in sys.argv[1:] or sys.stdin.read().splitlines():
with open(path, 'r') as path_fd:
for line, text in enumerate(path_fd.readlines()):
match = re.search(r'([^a-zA-Z0-9_]get_exception[^a-zA-Z0-9_])', text)
if match:
if path == 'lib/ansible/module_utils/basic.py' and basic_allow_once:
# basic.py is allowed to import get_exception for backwards compatibility but should not call it anywhere
basic_allow_once = False
continue
print('%s:%d:%d: do not use `get_exception`' % (
path, line + 1, match.start(1) + 1))
if __name__ == '__main__':
main()
|
barjuegocreador93/pycab | refs/heads/master | resources/model.py | 1 | from sqlalchemy import *
from settings import DATABASE
def init_connection():
if 'engine' in DATABASE.keys():
if DATABASE['engine']=='sqlite3' and 'path' in DATABASE.keys():
return create_engine('sqlite://'+DATABASE['path'])
return object()
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def typer(data):
if isinstance(data,str):
return "'"+(data)+"'"
return str(data)
def Equals(key,data):
return " {} = {}".format(key,typer(data))
def Max(key,data):
return " {} > {} ".format(key,typer(data))
def Min(key,data):
return " {} < {} " .format (key, typer(data))
def MinE(key,data):
return " {} <= {} ".format(key, typer(data))
def MaxE(key,data):
return " {} >= {} ".format(key, typer(data))
def And(*ops):
qry=""
for i in ops:
qry+=' and '+i
return qry
def Or(ops):
qry = ""
for i in ops:
qry += ' or ' + i
return qry
class Model(object):
@staticmethod
def props():
pass
def __init__(self,*arg):
self.id=int(arg[0])
@classmethod
def Create(cls,**args):
e=init_connection()
m=MetaData(reflect=True,bind=e)
table=m.tables[cls.__name__]
ins=table.insert().values(**args)
e.execute(ins)
@classmethod
def Columns(cls):
e = init_connection()
m = MetaData(e, reflect=True)
return m.tables[cls.__name__].c
@classmethod
def Where(cls, qry):
e = init_connection()
m = MetaData(bind=e, reflect=True)
t=m.tables[cls.__name__]
#sera=select([t]).where(qry)
select_m = "SELECT * FROM '"+cls.__name__+"' WHERE "+(str(qry))+""
return [(cls(*i))for i in e.execute(select_m)]
def Update(self):
#update=select([sql[0]]).set(**self.__class__.__dict__).where()
e = init_connection()
m = MetaData(e, reflect=True)
set=''
i=0
for k,v in self.__dict__.iteritems():
if k != 'id' and isinstance(k,str):
data=''
if not isinstance(v,str):
data+= " {} = {} ".format(k,str(v))
else:
data+= " {} = '{}' ".format(k, v)
if i>0 :
data=' , '+data
set+=data
i+=1
update = "UPDATE '" + self.__class__.__name__ + "' SET " + set + " WHERE id = "+str(self.id)
e.execute(update)
pass
@classmethod
def Drop(cls):
e=init_connection()
m=MetaData(bind=e,reflect=True)
table=m.tables[cls.__name__]
table.drop(e)
pass
@classmethod
def create_table(cls,*args):
e=init_connection()
m=MetaData(e)
print args
t=Table(cls.__name__,m,Column('id', Integer, primary_key=True, autoincrement=True),*args)
t.create(e)
@classmethod
def Create_Migration(cls):
pass
#relational methods
def addOne(self,item,fkey="defaultfk",ToOne=False):
if isinstance(item,Model) and isinstance(fkey,str):
if self.__dict__.has_key(fkey):
self.__dict__.__setitem__(fkey,item.id)
self.Update()
else:
fkey=item.__class__.__name__+"_id"
if self.__dict__.has_key(fkey):
self.__dict__.__setitem__(fkey, item.id)
self.Update()
if ToOne : item.addOne(self)
def GetOne(self,typemodel,fkey="defaultfk"):
if isinstance(typemodel,Model):
if self.__dict__.has_key(fkey):
return typemodel.Where(Equals("id",self.__dict__.__getitem__(fkey)))[0]
else:
fkey = typemodel.__class__.__name__ + "_id"
if self.__dict__.has_key(fkey):
return typemodel.Where(Equals("id", self.__dict__.__getitem__(fkey)))[0]
return 0
|
laffra/pava | refs/heads/master | pava/implementation/natives/sun/awt/windows/WKeyboardFocusManagerPeer.py | 1 | def add_native_methods(clazz):
def setNativeFocusOwner__java_awt_peer_ComponentPeer__(a0):
raise NotImplementedError()
def getNativeFocusOwner____():
raise NotImplementedError()
def getNativeFocusedWindow____():
raise NotImplementedError()
clazz.setNativeFocusOwner__java_awt_peer_ComponentPeer__ = staticmethod(setNativeFocusOwner__java_awt_peer_ComponentPeer__)
clazz.getNativeFocusOwner____ = staticmethod(getNativeFocusOwner____)
clazz.getNativeFocusedWindow____ = staticmethod(getNativeFocusedWindow____)
|
invisiblek/python-for-android | refs/heads/master | python3-alpha/python3-src/Lib/wsgiref/util.py | 55 | """Miscellaneous WSGI-related Utilities"""
import posixpath
__all__ = [
'FileWrapper', 'guess_scheme', 'application_uri', 'request_uri',
'shift_path_info', 'setup_testing_defaults',
]
class FileWrapper:
"""Wrapper to convert file-like objects to iterables"""
def __init__(self, filelike, blksize=8192):
self.filelike = filelike
self.blksize = blksize
if hasattr(filelike,'close'):
self.close = filelike.close
def __getitem__(self,key):
data = self.filelike.read(self.blksize)
if data:
return data
raise IndexError
def __iter__(self):
return self
def __next__(self):
data = self.filelike.read(self.blksize)
if data:
return data
raise StopIteration
def guess_scheme(environ):
"""Return a guess for whether 'wsgi.url_scheme' should be 'http' or 'https'
"""
if environ.get("HTTPS") in ('yes','on','1'):
return 'https'
else:
return 'http'
def application_uri(environ):
"""Return the application's base URI (no PATH_INFO or QUERY_STRING)"""
url = environ['wsgi.url_scheme']+'://'
from urllib.parse import quote
if environ.get('HTTP_HOST'):
url += environ['HTTP_HOST']
else:
url += environ['SERVER_NAME']
if environ['wsgi.url_scheme'] == 'https':
if environ['SERVER_PORT'] != '443':
url += ':' + environ['SERVER_PORT']
else:
if environ['SERVER_PORT'] != '80':
url += ':' + environ['SERVER_PORT']
url += quote(environ.get('SCRIPT_NAME') or '/')
return url
def request_uri(environ, include_query=True):
"""Return the full request URI, optionally including the query string"""
url = application_uri(environ)
from urllib.parse import quote
path_info = quote(environ.get('PATH_INFO',''),safe='/;=,')
if not environ.get('SCRIPT_NAME'):
url += path_info[1:]
else:
url += path_info
if include_query and environ.get('QUERY_STRING'):
url += '?' + environ['QUERY_STRING']
return url
def shift_path_info(environ):
"""Shift a name from PATH_INFO to SCRIPT_NAME, returning it
If there are no remaining path segments in PATH_INFO, return None.
Note: 'environ' is modified in-place; use a copy if you need to keep
the original PATH_INFO or SCRIPT_NAME.
Note: when PATH_INFO is just a '/', this returns '' and appends a trailing
'/' to SCRIPT_NAME, even though empty path segments are normally ignored,
and SCRIPT_NAME doesn't normally end in a '/'. This is intentional
behavior, to ensure that an application can tell the difference between
'/x' and '/x/' when traversing to objects.
"""
path_info = environ.get('PATH_INFO','')
if not path_info:
return None
path_parts = path_info.split('/')
path_parts[1:-1] = [p for p in path_parts[1:-1] if p and p != '.']
name = path_parts[1]
del path_parts[1]
script_name = environ.get('SCRIPT_NAME','')
script_name = posixpath.normpath(script_name+'/'+name)
if script_name.endswith('/'):
script_name = script_name[:-1]
if not name and not script_name.endswith('/'):
script_name += '/'
environ['SCRIPT_NAME'] = script_name
environ['PATH_INFO'] = '/'.join(path_parts)
# Special case: '/.' on PATH_INFO doesn't get stripped,
# because we don't strip the last element of PATH_INFO
# if there's only one path part left. Instead of fixing this
# above, we fix it here so that PATH_INFO gets normalized to
# an empty string in the environ.
if name=='.':
name = None
return name
def setup_testing_defaults(environ):
"""Update 'environ' with trivial defaults for testing purposes
This adds various parameters required for WSGI, including HTTP_HOST,
SERVER_NAME, SERVER_PORT, REQUEST_METHOD, SCRIPT_NAME, PATH_INFO,
and all of the wsgi.* variables. It only supplies default values,
and does not replace any existing settings for these variables.
This routine is intended to make it easier for unit tests of WSGI
servers and applications to set up dummy environments. It should *not*
be used by actual WSGI servers or applications, since the data is fake!
"""
environ.setdefault('SERVER_NAME','127.0.0.1')
environ.setdefault('SERVER_PROTOCOL','HTTP/1.0')
environ.setdefault('HTTP_HOST',environ['SERVER_NAME'])
environ.setdefault('REQUEST_METHOD','GET')
if 'SCRIPT_NAME' not in environ and 'PATH_INFO' not in environ:
environ.setdefault('SCRIPT_NAME','')
environ.setdefault('PATH_INFO','/')
environ.setdefault('wsgi.version', (1,0))
environ.setdefault('wsgi.run_once', 0)
environ.setdefault('wsgi.multithread', 0)
environ.setdefault('wsgi.multiprocess', 0)
from io import StringIO, BytesIO
environ.setdefault('wsgi.input', BytesIO())
environ.setdefault('wsgi.errors', StringIO())
environ.setdefault('wsgi.url_scheme',guess_scheme(environ))
if environ['wsgi.url_scheme']=='http':
environ.setdefault('SERVER_PORT', '80')
elif environ['wsgi.url_scheme']=='https':
environ.setdefault('SERVER_PORT', '443')
_hoppish = {
'connection':1, 'keep-alive':1, 'proxy-authenticate':1,
'proxy-authorization':1, 'te':1, 'trailers':1, 'transfer-encoding':1,
'upgrade':1
}.__contains__
def is_hop_by_hop(header_name):
"""Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header"""
return _hoppish(header_name.lower())
|
danielharbor/openerp | refs/heads/master | openerp/tools/amount_to_text.py | 393 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#-------------------------------------------------------------
# French
#-------------------------------------------------------------
to_19_fr = ( u'zéro', 'un', 'deux', 'trois', 'quatre', 'cinq', 'six',
'sept', 'huit', 'neuf', 'dix', 'onze', 'douze', 'treize',
'quatorze', 'quinze', 'seize', 'dix-sept', 'dix-huit', 'dix-neuf' )
tens_fr = ( 'vingt', 'trente', 'quarante', 'Cinquante', 'Soixante', 'Soixante-dix', 'Quatre-vingts', 'Quatre-vingt Dix')
denom_fr = ( '',
'Mille', 'Millions', 'Milliards', 'Billions', 'Quadrillions',
'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion',
'Décillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Icosillion', 'Vigintillion' )
def _convert_nn_fr(val):
""" convert a value < 100 to French
"""
if val < 20:
return to_19_fr[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_fr)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19_fr[val % 10]
return dcap
def _convert_nnn_fr(val):
""" convert a value < 1000 to french
special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19_fr[rem] + ' Cent'
if mod > 0:
word += ' '
if mod > 0:
word += _convert_nn_fr(mod)
return word
def french_number(val):
if val < 100:
return _convert_nn_fr(val)
if val < 1000:
return _convert_nnn_fr(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_fr))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn_fr(l) + ' ' + denom_fr[didx]
if r > 0:
ret = ret + ', ' + french_number(r)
return ret
def amount_to_text_fr(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = french_number(abs(int(list[0])))
end_word = french_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and ' Cents' or ' Cent'
final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name
return final_result
#-------------------------------------------------------------
# Dutch
#-------------------------------------------------------------
to_19_nl = ( 'Nul', 'Een', 'Twee', 'Drie', 'Vier', 'Vijf', 'Zes',
'Zeven', 'Acht', 'Negen', 'Tien', 'Elf', 'Twaalf', 'Dertien',
'Veertien', 'Vijftien', 'Zestien', 'Zeventien', 'Achttien', 'Negentien' )
tens_nl = ( 'Twintig', 'Dertig', 'Veertig', 'Vijftig', 'Zestig', 'Zeventig', 'Tachtig', 'Negentig')
denom_nl = ( '',
'Duizend', 'Miljoen', 'Miljard', 'Triljoen', 'Quadriljoen',
'Quintillion', 'Sextiljoen', 'Septillion', 'Octillion', 'Nonillion',
'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' )
def _convert_nn_nl(val):
""" convert a value < 100 to Dutch
"""
if val < 20:
return to_19_nl[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_nl)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19_nl[val % 10]
return dcap
def _convert_nnn_nl(val):
""" convert a value < 1000 to Dutch
special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19_nl[rem] + ' Honderd'
if mod > 0:
word += ' '
if mod > 0:
word += _convert_nn_nl(mod)
return word
def dutch_number(val):
if val < 100:
return _convert_nn_nl(val)
if val < 1000:
return _convert_nnn_nl(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_nl))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn_nl(l) + ' ' + denom_nl[didx]
if r > 0:
ret = ret + ', ' + dutch_number(r)
return ret
def amount_to_text_nl(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = dutch_number(int(list[0]))
end_word = dutch_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and 'cent' or 'cent'
final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name
return final_result
#-------------------------------------------------------------
# Generic functions
#-------------------------------------------------------------
_translate_funcs = {'fr' : amount_to_text_fr, 'nl' : amount_to_text_nl}
def add_amount_to_text_function(lang, func):
_translate_funcs[lang] = func
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented
def amount_to_text(nbr, lang='fr', currency='euro'):
""" Converts an integer to its textual representation, using the language set in the context if any.
Example::
1654: mille six cent cinquante-quatre.
"""
# if nbr > 1000000:
##TODO: use logger
# print "WARNING: number too large '%d', can't translate it!" % (nbr,)
# return str(nbr)
if not _translate_funcs.has_key(lang):
#TODO: use logger
print "WARNING: no translation function found for lang: '%s'" % (lang,)
#TODO: (default should be en) same as above
lang = 'fr'
return _translate_funcs[lang](abs(nbr), currency)
if __name__=='__main__':
from sys import argv
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", amount_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", amount_to_text(i, lang)
else:
print amount_to_text(int(argv[1]), lang)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
jabdoa2/FreeCAD | refs/heads/master | src/Mod/Draft/DraftTrackers.py | 8 | #***************************************************************************
#* *
#* Copyright (c) 2011 *
#* Yorik van Havre <yorik@uncreated.net> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
__title__="FreeCAD Draft Trackers"
__author__ = "Yorik van Havre"
__url__ = "http://www.freecadweb.org"
## \defgroup DRAFTTRACKERS DraftTrackers
# \ingroup DRAFT
#
# Custom Pivy-based objects used by the Draft workbench
#
# This module contains a collection of Coin3D (pivy)-based objects
# that are used by the Draft workbench to draw temporary geometry
# on the 3D view
import FreeCAD,FreeCADGui,math,Draft, DraftVecUtils
from FreeCAD import Vector
from pivy import coin
class Tracker:
"A generic Draft Tracker, to be used by other specific trackers"
def __init__(self,dotted=False,scolor=None,swidth=None,children=[],ontop=False):
global Part, DraftGeomUtils
import Part, DraftGeomUtils
self.ontop = ontop
color = coin.SoBaseColor()
color.rgb = scolor or FreeCADGui.draftToolBar.getDefaultColor("ui")
drawstyle = coin.SoDrawStyle()
if swidth:
drawstyle.lineWidth = swidth
if dotted:
drawstyle.style = coin.SoDrawStyle.LINES
drawstyle.lineWeight = 3
drawstyle.linePattern = 0x0f0f #0xaa
node = coin.SoSeparator()
for c in [drawstyle, color] + children:
node.addChild(c)
self.switch = coin.SoSwitch() # this is the on/off switch
self.switch.addChild(node)
self.switch.whichChild = -1
self.Visible = False
from DraftGui import todo
todo.delay(self._insertSwitch, self.switch)
def finalize(self):
from DraftGui import todo
todo.delay(self._removeSwitch, self.switch)
self.switch = None
def _insertSwitch(self, switch):
'''insert self.switch into the scene graph. Must not be called
from an event handler (or other scene graph traversal).'''
sg=Draft.get3DView().getSceneGraph()
if self.ontop:
sg.insertChild(switch,0)
else:
sg.addChild(switch)
def _removeSwitch(self, switch):
'''remove self.switch from the scene graph. As with _insertSwitch,
must not be called during scene graph traversal).'''
sg=Draft.get3DView().getSceneGraph()
sg.removeChild(switch)
def on(self):
self.switch.whichChild = 0
self.Visible = True
def off(self):
self.switch.whichChild = -1
self.Visible = False
def lowerTracker(self):
'''lowers the tracker to the bottom of the scenegraph, so
it doesn't obscure the other objects'''
if self.switch:
sg=Draft.get3DView().getSceneGraph()
sg.removeChild(self.switch)
sg.addChild(self.switch)
def raiseTracker(self):
'''raises the tracker to the top of the scenegraph, so
it obscures the other objects'''
if self.switch:
sg=Draft.get3DView().getSceneGraph()
sg.removeChild(self.switch)
sg.insertChild(self.switch,0)
class snapTracker(Tracker):
"A Snap Mark tracker, used by tools that support snapping"
def __init__(self):
color = coin.SoBaseColor()
color.rgb = FreeCADGui.draftToolBar.getDefaultColor("snap")
self.marker = coin.SoMarkerSet() # this is the marker symbol
self.marker.markerIndex = coin.SoMarkerSet.CIRCLE_FILLED_9_9
self.coords = coin.SoCoordinate3() # this is the coordinate
self.coords.point.setValue((0,0,0))
node = coin.SoAnnotation()
node.addChild(self.coords)
node.addChild(color)
node.addChild(self.marker)
Tracker.__init__(self,children=[node])
def setMarker(self,style):
if (style == "square"):
self.marker.markerIndex = coin.SoMarkerSet.DIAMOND_FILLED_9_9
elif (style == "circle"):
self.marker.markerIndex = coin.SoMarkerSet.CIRCLE_LINE_9_9
elif (style == "quad"):
self.marker.markerIndex = coin.SoMarkerSet.SQUARE_FILLED_9_9
elif (style == "empty"):
self.marker.markerIndex = coin.SoMarkerSet.SQUARE_LINE_9_9
else:
self.marker.markerIndex = coin.SoMarkerSet.CIRCLE_FILLED_9_9
def setCoords(self,point):
self.coords.point.setValue((point.x,point.y,point.z))
class lineTracker(Tracker):
"A Line tracker, used by the tools that need to draw temporary lines"
def __init__(self,dotted=False,scolor=None,swidth=None,ontop=False):
line = coin.SoLineSet()
line.numVertices.setValue(2)
self.coords = coin.SoCoordinate3() # this is the coordinate
self.coords.point.setValues(0,2,[[0,0,0],[1,0,0]])
Tracker.__init__(self,dotted,scolor,swidth,[self.coords,line],ontop)
def p1(self,point=None):
"sets or gets the first point of the line"
if point:
self.coords.point.set1Value(0,point.x,point.y,point.z)
else:
return Vector(self.coords.point.getValues()[0].getValue())
def p2(self,point=None):
"sets or gets the second point of the line"
if point:
self.coords.point.set1Value(1,point.x,point.y,point.z)
else:
return Vector(self.coords.point.getValues()[-1].getValue())
def getLength(self):
"returns the length of the line"
p1 = Vector(self.coords.point.getValues()[0].getValue())
p2 = Vector(self.coords.point.getValues()[-1].getValue())
return (p2.sub(p1)).Length
class rectangleTracker(Tracker):
"A Rectangle tracker, used by the rectangle tool"
def __init__(self,dotted=False,scolor=None,swidth=None,face=False):
self.origin = Vector(0,0,0)
line = coin.SoLineSet()
line.numVertices.setValue(5)
self.coords = coin.SoCoordinate3() # this is the coordinate
self.coords.point.setValues(0,50,[[0,0,0],[2,0,0],[2,2,0],[0,2,0],[0,0,0]])
if face:
m1 = coin.SoMaterial()
m1.transparency.setValue(0.5)
m1.diffuseColor.setValue([0.5,0.5,1.0])
f = coin.SoIndexedFaceSet()
f.coordIndex.setValues([0,1,2,3])
Tracker.__init__(self,dotted,scolor,swidth,[self.coords,line,m1,f])
else:
Tracker.__init__(self,dotted,scolor,swidth,[self.coords,line])
self.u = FreeCAD.DraftWorkingPlane.u
self.v = FreeCAD.DraftWorkingPlane.v
def setorigin(self,point):
"sets the base point of the rectangle"
self.coords.point.set1Value(0,point.x,point.y,point.z)
self.coords.point.set1Value(4,point.x,point.y,point.z)
self.origin = point
def update(self,point):
"sets the opposite (diagonal) point of the rectangle"
diagonal = point.sub(self.origin)
inpoint1 = self.origin.add(DraftVecUtils.project(diagonal,self.v))
inpoint2 = self.origin.add(DraftVecUtils.project(diagonal,self.u))
self.coords.point.set1Value(1,inpoint1.x,inpoint1.y,inpoint1.z)
self.coords.point.set1Value(2,point.x,point.y,point.z)
self.coords.point.set1Value(3,inpoint2.x,inpoint2.y,inpoint2.z)
def setPlane(self,u,v=None):
'''sets given (u,v) vectors as working plane. You can give only u
and v will be deduced automatically given current workplane'''
self.u = u
if v:
self.v = v
else:
norm = FreeCAD.DraftWorkingPlane.u.cross(FreeCAD.DraftWorkingPlane.v)
self.v = self.u.cross(norm)
def p1(self,point=None):
"sets or gets the base point of the rectangle"
if point:
self.setorigin(point)
else:
return Vector(self.coords.point.getValues()[0].getValue())
def p2(self):
"gets the second point (on u axis) of the rectangle"
return Vector(self.coords.point.getValues()[3].getValue())
def p3(self,point=None):
"sets or gets the opposite (diagonal) point of the rectangle"
if point:
self.update(point)
else:
return Vector(self.coords.point.getValues()[2].getValue())
def p4(self):
"gets the fourth point (on v axis) of the rectangle"
return Vector(self.coords.point.getValues()[1].getValue())
def getSize(self):
"returns (length,width) of the rectangle"
p1 = Vector(self.coords.point.getValues()[0].getValue())
p2 = Vector(self.coords.point.getValues()[2].getValue())
diag = p2.sub(p1)
return ((DraftVecUtils.project(diag,self.u)).Length,(DraftVecUtils.project(diag,self.v)).Length)
def getNormal(self):
"returns the normal of the rectangle"
return (self.u.cross(self.v)).normalize()
class dimTracker(Tracker):
"A Dimension tracker, used by the dimension tool"
def __init__(self,dotted=False,scolor=None,swidth=None):
line = coin.SoLineSet()
line.numVertices.setValue(4)
self.coords = coin.SoCoordinate3() # this is the coordinate
self.coords.point.setValues(0,4,[[0,0,0],[0,0,0],[0,0,0],[0,0,0]])
Tracker.__init__(self,dotted,scolor,swidth,[self.coords,line])
self.p1 = self.p2 = self.p3 = None
def update(self,pts):
if not pts:
return
elif len(pts) == 1:
self.p3 = pts[0]
else:
self.p1 = pts[0]
self.p2 = pts[1]
if len(pts) > 2:
self.p3 = pts[2]
self.calc()
def calc(self):
import Part
if (self.p1 != None) and (self.p2 != None):
points = [DraftVecUtils.tup(self.p1,True),DraftVecUtils.tup(self.p2,True),\
DraftVecUtils.tup(self.p1,True),DraftVecUtils.tup(self.p2,True)]
if self.p3 != None:
p1 = self.p1
p4 = self.p2
if DraftVecUtils.equals(p1,p4):
proj = None
else:
base = Part.Line(p1,p4).toShape()
proj = DraftGeomUtils.findDistance(self.p3,base)
if not proj:
p2 = p1
p3 = p4
else:
p2 = p1.add(proj.negative())
p3 = p4.add(proj.negative())
points = [DraftVecUtils.tup(p1),DraftVecUtils.tup(p2),DraftVecUtils.tup(p3),DraftVecUtils.tup(p4)]
self.coords.point.setValues(0,4,points)
class bsplineTracker(Tracker):
"A bspline tracker"
def __init__(self,dotted=False,scolor=None,swidth=None,points = []):
self.bspline = None
self.points = points
self.trans = coin.SoTransform()
self.sep = coin.SoSeparator()
self.recompute()
Tracker.__init__(self,dotted,scolor,swidth,[self.trans,self.sep])
def update(self, points):
self.points = points
self.recompute()
def recompute(self):
if (len(self.points) >= 2):
if self.bspline: self.sep.removeChild(self.bspline)
self.bspline = None
c = Part.BSplineCurve()
# DNC: allows to close the curve by placing ends close to each other
if ( len(self.points) >= 3 ) and ( (self.points[0] - self.points[-1]).Length < Draft.tolerance() ):
# YVH: Added a try to bypass some hazardous situations
try:
c.interpolate(self.points[:-1], True)
except Part.OCCError:
pass
elif self.points:
try:
c.interpolate(self.points, False)
except Part.OCCError:
pass
c = c.toShape()
buf=c.writeInventor(2,0.01)
#fp=open("spline.iv","w")
#fp.write(buf)
#fp.close()
try:
ivin = coin.SoInput()
ivin.setBuffer(buf)
ivob = coin.SoDB.readAll(ivin)
except:
# workaround for pivy SoInput.setBuffer() bug
import re
buf = buf.replace("\n","")
pts = re.findall("point \[(.*?)\]",buf)[0]
pts = pts.split(",")
pc = []
for p in pts:
v = p.strip().split()
pc.append([float(v[0]),float(v[1]),float(v[2])])
coords = coin.SoCoordinate3()
coords.point.setValues(0,len(pc),pc)
line = coin.SoLineSet()
line.numVertices.setValue(-1)
self.bspline = coin.SoSeparator()
self.bspline.addChild(coords)
self.bspline.addChild(line)
self.sep.addChild(self.bspline)
else:
if ivob and ivob.getNumChildren() > 1:
self.bspline = ivob.getChild(1).getChild(0)
self.bspline.removeChild(self.bspline.getChild(0))
self.bspline.removeChild(self.bspline.getChild(0))
self.sep.addChild(self.bspline)
else:
FreeCAD.Console.PrintWarning("bsplineTracker.recompute() failed to read-in Inventor string\n")
#######################################
class bezcurveTracker(Tracker):
"A bezcurve tracker"
def __init__(self,dotted=False,scolor=None,swidth=None,points = []):
self.bezcurve = None
self.points = points
self.trans = coin.SoTransform()
self.sep = coin.SoSeparator()
self.recompute()
Tracker.__init__(self,dotted,scolor,swidth,[self.trans,self.sep])
def update(self, points):
self.points = points
self.recompute()
def recompute(self):
if (len(self.points) >= 2):
if self.bezcurve: self.sep.removeChild(self.bezcurve)
self.bezcurve = None
### c = Part.BSplineCurve() #!!!!!!!!!!!!!!!
c = Part.BezierCurve()
# DNC: allows to close the curve by placing ends close to each other
if ( len(self.points) >= 3 ) and ( (self.points[0] - self.points[-1]).Length < Draft.tolerance() ):
# YVH: Added a try to bypass some hazardous situations
try:
### c.interpolate(self.points[:-1], True) #!!!!!!!!!!!!
c.setPoles(self.points[:-1])
except Part.OCCError:
pass
elif self.points:
try:
### c.interpolate(self.points, False) #!!!!!!!
c.setPoles(self.points)
except Part.OCCError:
pass
c = c.toShape() #???? c = Part.Edge(c)?, c = Part.Wire(c)??
buf=c.writeInventor(2,0.01)
#fp=open("spline.iv","w")
#fp.write(buf)
#fp.close()
try:
ivin = coin.SoInput()
ivin.setBuffer(buf)
ivob = coin.SoDB.readAll(ivin)
except:
# workaround for pivy SoInput.setBuffer() bug
import re
buf = buf.replace("\n","")
pts = re.findall("point \[(.*?)\]",buf)[0]
pts = pts.split(",")
pc = []
for p in pts:
v = p.strip().split()
pc.append([float(v[0]),float(v[1]),float(v[2])])
coords = coin.SoCoordinate3()
coords.point.setValues(0,len(pc),pc)
line = coin.SoLineSet()
line.numVertices.setValue(-1)
self.bezcurve = coin.SoSeparator()
self.bezcurve.addChild(coords)
self.bezcurve.addChild(line)
self.sep.addChild(self.bezcurve)
else:
if ivob and ivob.getNumChildren() > 1:
self.bezcurve = ivob.getChild(1).getChild(0)
self.bezcurve.removeChild(self.bezcurve.getChild(0))
self.bezcurve.removeChild(self.bezcurve.getChild(0))
self.sep.addChild(self.bezcurve)
else:
FreeCAD.Console.PrintWarning("bezcurveTracker.recompute() failed to read-in Inventor string\n")
#######################################
class arcTracker(Tracker):
"An arc tracker"
def __init__(self,dotted=False,scolor=None,swidth=None,start=0,end=math.pi*2,normal=None):
self.circle = None
self.startangle = math.degrees(start)
self.endangle = math.degrees(end)
self.trans = coin.SoTransform()
self.trans.translation.setValue([0,0,0])
self.sep = coin.SoSeparator()
if normal:
self.normal = normal
else:
self.normal = FreeCAD.DraftWorkingPlane.axis
self.basevector = self.getDeviation()
self.recompute()
Tracker.__init__(self,dotted,scolor,swidth,[self.trans, self.sep])
def getDeviation(self):
"returns a deviation vector that represents the base of the circle"
import Part
c = Part.makeCircle(1,Vector(0,0,0),self.normal)
return c.Vertexes[0].Point
def setCenter(self,cen):
"sets the center point"
self.trans.translation.setValue([cen.x,cen.y,cen.z])
def setRadius(self,rad):
"sets the radius"
self.trans.scaleFactor.setValue([rad,rad,rad])
def getRadius(self):
"returns the current radius"
return self.trans.scaleFactor.getValue()[0]
def setStartAngle(self,ang):
"sets the start angle"
self.startangle = math.degrees(ang)
self.recompute()
def setEndAngle(self,ang):
"sets the end angle"
self.endangle = math.degrees(ang)
self.recompute()
def getAngle(self,pt):
"returns the angle of a given vector"
c = self.trans.translation.getValue()
center = Vector(c[0],c[1],c[2])
rad = pt.sub(center)
a = DraftVecUtils.angle(rad,self.basevector,self.normal)
#print(a)
return(a)
def getAngles(self):
"returns the start and end angles"
return(self.startangle,self.endangle)
def setStartPoint(self,pt):
"sets the start angle from a point"
self.setStartAngle(-self.getAngle(pt))
def setEndPoint(self,pt):
"sets the end angle from a point"
self.setEndAngle(self.getAngle(pt))
def setApertureAngle(self,ang):
"sets the end angle by giving the aperture angle"
ap = math.degrees(ang)
self.endangle = self.startangle + ap
self.recompute()
def recompute(self):
import Part,re
if self.circle:
self.sep.removeChild(self.circle)
self.circle = None
if (self.endangle < self.startangle):
c = Part.makeCircle(1,Vector(0,0,0),self.normal,self.endangle,self.startangle)
else:
c = Part.makeCircle(1,Vector(0,0,0),self.normal,self.startangle,self.endangle)
buf=c.writeInventor(2,0.01)
try:
ivin = coin.SoInput()
ivin.setBuffer(buf)
ivob = coin.SoDB.readAll(ivin)
except:
# workaround for pivy SoInput.setBuffer() bug
buf = buf.replace("\n","")
pts = re.findall("point \[(.*?)\]",buf)[0]
pts = pts.split(",")
pc = []
for p in pts:
v = p.strip().split()
pc.append([float(v[0]),float(v[1]),float(v[2])])
coords = coin.SoCoordinate3()
coords.point.setValues(0,len(pc),pc)
line = coin.SoLineSet()
line.numVertices.setValue(-1)
self.circle = coin.SoSeparator()
self.circle.addChild(coords)
self.circle.addChild(line)
self.sep.addChild(self.circle)
else:
if ivob and ivob.getNumChildren() > 1:
self.circle = ivob.getChild(1).getChild(0)
self.circle.removeChild(self.circle.getChild(0))
self.circle.removeChild(self.circle.getChild(0))
self.sep.addChild(self.circle)
else:
FreeCAD.Console.PrintWarning("arcTracker.recompute() failed to read-in Inventor string\n")
class ghostTracker(Tracker):
'''A Ghost tracker, that allows to copy whole object representations.
You can pass it an object or a list of objects, or a shape.'''
def __init__(self,sel):
self.trans = coin.SoTransform()
self.trans.translation.setValue([0,0,0])
self.children = [self.trans]
rootsep = coin.SoSeparator()
if not isinstance(sel,list):
sel = [sel]
for obj in sel:
rootsep.addChild(self.getNode(obj))
self.children.append(rootsep)
Tracker.__init__(self,children=self.children)
def update(self,obj):
"recreates the ghost from a new object"
obj.ViewObject.show()
self.finalize()
sep = getNode(obj)
Tracker.__init__(self,children=[self.sep])
self.on()
obj.ViewObject.hide()
def move(self,delta):
"moves the ghost to a given position, relative from its start position"
self.trans.translation.setValue([delta.x,delta.y,delta.z])
def rotate(self,axis,angle):
"rotates the ghost of a given angle"
self.trans.rotation.setValue(coin.SbVec3f(DraftVecUtils.tup(axis)),angle)
def center(self,point):
"sets the rotation/scale center of the ghost"
self.trans.center.setValue(point.x,point.y,point.z)
def scale(self,delta):
"scales the ghost by the given factor"
self.trans.scaleFactor.setValue([delta.x,delta.y,delta.z])
def getNode(self,obj):
"returns a coin node representing the given object"
if isinstance(obj,Part.Shape):
return self.getNodeLight(obj)
elif obj.isDerivedFrom("Part::Feature"):
return self.getNodeFull(obj)
else:
return self.getNodeFull(obj)
def getNode(self,obj):
"gets a coin node which is a full copy of the current representation"
sep = coin.SoSeparator()
try:
sep.addChild(obj.ViewObject.RootNode.copy())
except:
pass
return sep
def getNodeLight(self,shape):
"extract a lighter version directly from a shape"
# very error-prone, will be obsoleted ASAP
sep = coin.SoSeparator()
try:
inputstr = coin.SoInput()
inputstr.setBuffer(shape.writeInventor())
coinobj = coin.SoDB.readAll(inputstr)
# only add wireframe or full node?
sep.addChild(coinobj.getChildren()[1])
# sep.addChild(coinobj)
except:
print("Error retrieving coin node")
return sep
class editTracker(Tracker):
"A node edit tracker"
def __init__(self,pos=Vector(0,0,0),name="None",idx=0,objcol=None,\
marker=coin.SoMarkerSet.SQUARE_FILLED_9_9):
color = coin.SoBaseColor()
if objcol:
color.rgb = objcol[:3]
else:
color.rgb = FreeCADGui.draftToolBar.getDefaultColor("snap")
self.marker = coin.SoMarkerSet() # this is the marker symbol
self.marker.markerIndex = marker
self.coords = coin.SoCoordinate3() # this is the coordinate
self.coords.point.setValue((pos.x,pos.y,pos.z))
selnode = coin.SoType.fromName("SoFCSelection").createInstance()
selnode.documentName.setValue(FreeCAD.ActiveDocument.Name)
selnode.objectName.setValue(name)
selnode.subElementName.setValue("EditNode"+str(idx))
node = coin.SoAnnotation()
selnode.addChild(self.coords)
selnode.addChild(color)
selnode.addChild(self.marker)
node.addChild(selnode)
Tracker.__init__(self,children=[node],ontop=True)
self.on()
def set(self,pos):
self.coords.point.setValue((pos.x,pos.y,pos.z))
def get(self):
p = self.coords.point.getValues()[0]
return Vector(p[0],p[1],p[2])
def move(self,delta):
self.set(self.get().add(delta))
class PlaneTracker(Tracker):
"A working plane tracker"
def __init__(self):
# getting screen distance
p1 = Draft.get3DView().getPoint((100,100))
p2 = Draft.get3DView().getPoint((110,100))
bl = (p2.sub(p1)).Length * (Draft.getParam("snapRange",5)/2)
pick = coin.SoPickStyle()
pick.style.setValue(coin.SoPickStyle.UNPICKABLE)
self.trans = coin.SoTransform()
self.trans.translation.setValue([0,0,0])
m1 = coin.SoMaterial()
m1.transparency.setValue(0.8)
m1.diffuseColor.setValue([0.4,0.4,0.6])
c1 = coin.SoCoordinate3()
c1.point.setValues([[-bl,-bl,0],[bl,-bl,0],[bl,bl,0],[-bl,bl,0]])
f = coin.SoIndexedFaceSet()
f.coordIndex.setValues([0,1,2,3])
m2 = coin.SoMaterial()
m2.transparency.setValue(0.7)
m2.diffuseColor.setValue([0.2,0.2,0.3])
c2 = coin.SoCoordinate3()
c2.point.setValues([[0,bl,0],[0,0,0],[bl,0,0],[-.05*bl,.95*bl,0],[0,bl,0],
[.05*bl,.95*bl,0],[.95*bl,.05*bl,0],[bl,0,0],[.95*bl,-.05*bl,0]])
l = coin.SoLineSet()
l.numVertices.setValues([3,3,3])
s = coin.SoSeparator()
s.addChild(pick)
s.addChild(self.trans)
s.addChild(m1)
s.addChild(c1)
s.addChild(f)
s.addChild(m2)
s.addChild(c2)
s.addChild(l)
Tracker.__init__(self,children=[s])
def set(self,pos=None):
if pos:
Q = FreeCAD.DraftWorkingPlane.getRotation().Rotation.Q
else:
plm = FreeCAD.DraftWorkingPlane.getPlacement()
Q = plm.Rotation.Q
pos = plm.Base
self.trans.translation.setValue([pos.x,pos.y,pos.z])
self.trans.rotation.setValue([Q[0],Q[1],Q[2],Q[3]])
self.on()
class wireTracker(Tracker):
"A wire tracker"
def __init__(self,wire):
self.line = coin.SoLineSet()
self.closed = DraftGeomUtils.isReallyClosed(wire)
if self.closed:
self.line.numVertices.setValue(len(wire.Vertexes)+1)
else:
self.line.numVertices.setValue(len(wire.Vertexes))
self.coords = coin.SoCoordinate3()
self.update(wire)
Tracker.__init__(self,children=[self.coords,self.line])
def update(self,wire,forceclosed=False):
if wire:
if self.closed or forceclosed:
self.line.numVertices.setValue(len(wire.Vertexes)+1)
else:
self.line.numVertices.setValue(len(wire.Vertexes))
for i in range(len(wire.Vertexes)):
p=wire.Vertexes[i].Point
self.coords.point.set1Value(i,[p.x,p.y,p.z])
if self.closed or forceclosed:
t = len(wire.Vertexes)
p = wire.Vertexes[0].Point
self.coords.point.set1Value(t,[p.x,p.y,p.z])
class gridTracker(Tracker):
"A grid tracker"
def __init__(self):
col = [0.2,0.2,0.3]
pick = coin.SoPickStyle()
pick.style.setValue(coin.SoPickStyle.UNPICKABLE)
self.trans = coin.SoTransform()
self.trans.translation.setValue([0,0,0])
mat1 = coin.SoMaterial()
mat1.transparency.setValue(0.7)
mat1.diffuseColor.setValue(col)
self.coords1 = coin.SoCoordinate3()
self.lines1 = coin.SoLineSet()
mat2 = coin.SoMaterial()
mat2.transparency.setValue(0.3)
mat2.diffuseColor.setValue(col)
self.coords2 = coin.SoCoordinate3()
self.lines2 = coin.SoLineSet()
mat3 = coin.SoMaterial()
mat3.transparency.setValue(0)
mat3.diffuseColor.setValue(col)
self.coords3 = coin.SoCoordinate3()
self.lines3 = coin.SoLineSet()
s = coin.SoSeparator()
s.addChild(pick)
s.addChild(self.trans)
s.addChild(mat1)
s.addChild(self.coords1)
s.addChild(self.lines1)
s.addChild(mat2)
s.addChild(self.coords2)
s.addChild(self.lines2)
s.addChild(mat3)
s.addChild(self.coords3)
s.addChild(self.lines3)
Tracker.__init__(self,children=[s])
self.reset()
def update(self):
"redraws the grid"
bound = (self.numlines/2)*self.space
pts = []
mpts = []
apts = []
for i in range(self.numlines+1):
curr = -bound + i*self.space
z = 0
if i/float(self.mainlines) == i/self.mainlines:
if round(curr,4) == 0:
apts.extend([[-bound,curr,z],[bound,curr,z]])
apts.extend([[curr,-bound,z],[curr,bound,z]])
else:
mpts.extend([[-bound,curr,z],[bound,curr,z]])
mpts.extend([[curr,-bound,z],[curr,bound,z]])
else:
pts.extend([[-bound,curr,z],[bound,curr,z]])
pts.extend([[curr,-bound,z],[curr,bound,z]])
idx = []
midx = []
aidx = []
for p in range(0,len(pts),2):
idx.append(2)
for mp in range(0,len(mpts),2):
midx.append(2)
for ap in range(0,len(apts),2):
aidx.append(2)
self.coords1.point.setValues(pts)
self.lines1.numVertices.setValues(idx)
self.coords2.point.setValues(mpts)
self.lines2.numVertices.setValues(midx)
self.coords3.point.setValues(apts)
self.lines3.numVertices.setValues(aidx)
def setSize(self,size):
self.numlines = size
self.update()
def setSpacing(self,space):
self.space = space
self.update()
def setMainlines(self,ml):
self.mainlines = ml
self.update()
def reset(self):
"resets the grid according to preferences settings"
self.space = Draft.getParam("gridSpacing",1)
self.mainlines = Draft.getParam("gridEvery",10)
self.numlines = Draft.getParam("gridSize",100)
self.update()
def set(self):
"moves and rotates the grid according to the current WP"
self.reset()
Q = FreeCAD.DraftWorkingPlane.getRotation().Rotation.Q
P = FreeCAD.DraftWorkingPlane.position
self.trans.rotation.setValue([Q[0],Q[1],Q[2],Q[3]])
self.trans.translation.setValue([P.x,P.y,P.z])
self.on()
def getClosestNode(self,point):
"returns the closest node from the given point"
# get the 2D coords.
# point = FreeCAD.DraftWorkingPlane.projectPoint(point)
pt = FreeCAD.DraftWorkingPlane.getLocalCoords(point)
pu = (round(pt.x/self.space,0))*self.space
pv = (round(pt.y/self.space,0))*self.space
pt = FreeCAD.DraftWorkingPlane.getGlobalCoords(Vector(pu,pv,0))
return pt
class boxTracker(Tracker):
"A box tracker, can be based on a line object"
def __init__(self,line=None,width=0.1,height=1):
self.trans = coin.SoTransform()
m = coin.SoMaterial()
m.transparency.setValue(0.8)
m.diffuseColor.setValue([0.4,0.4,0.6])
self.cube = coin.SoCube()
self.cube.height.setValue(width)
self.cube.depth.setValue(height)
self.baseline = None
if line:
self.baseline = line
self.update()
Tracker.__init__(self,children=[self.trans,m,self.cube])
def update(self,line=None,normal=None):
import WorkingPlane, DraftGeomUtils
if not normal:
normal = FreeCAD.DraftWorkingPlane.axis
if line:
if isinstance(line,list):
bp = line[0]
lvec = line[1].sub(line[0])
else:
lvec = DraftGeomUtils.vec(line.Shape.Edges[0])
bp = line.Shape.Edges[0].Vertexes[0].Point
elif self.baseline:
lvec = DraftGeomUtils.vec(self.baseline.Shape.Edges[0])
bp = self.baseline.Shape.Edges[0].Vertexes[0].Point
else:
return
right = lvec.cross(normal)
self.cube.width.setValue(lvec.Length)
p = WorkingPlane.getPlacementFromPoints([bp,bp.add(lvec),bp.add(right)])
if p:
self.trans.rotation.setValue(p.Rotation.Q)
bp = bp.add(lvec.multiply(0.5))
bp = bp.add(DraftVecUtils.scaleTo(normal,self.cube.depth.getValue()/2))
self.pos(bp)
def setRotation(self,rot):
self.trans.rotation.setValue(rot.Q)
def pos(self,p):
self.trans.translation.setValue(DraftVecUtils.tup(p))
def width(self,w=None):
if w:
self.cube.height.setValue(w)
else:
return self.cube.height.getValue()
def length(self,l=None):
if l:
self.cube.width.setValue(l)
else:
return self.cube.width.getValue()
def height(self,h=None):
if h:
self.cube.depth.setValue(h)
self.update()
else:
return self.cube.depth.getValue()
class radiusTracker(Tracker):
"A tracker that displays a transparent sphere to inicate a radius"
def __init__(self,position=FreeCAD.Vector(0,0,0),radius=1):
self.trans = coin.SoTransform()
self.trans.translation.setValue([position.x,position.y,position.z])
m = coin.SoMaterial()
m.transparency.setValue(0.9)
m.diffuseColor.setValue([0,1,0])
self.sphere = coin.SoSphere()
self.sphere.radius.setValue(radius)
self.baseline = None
Tracker.__init__(self,children=[self.trans,m,self.sphere])
def update(self,arg1,arg2=None):
if isinstance(arg1,FreeCAD.Vector):
self.trans.translation.setValue([arg1.x,arg1.y,arg1.z])
else:
self.sphere.radius.setValue(arg1)
if arg2 != None:
if isinstance(arg2,FreeCAD.Vector):
self.trans.translation.setValue([arg2.x,arg2.y,arg2.z])
else:
self.sphere.radius.setValue(arg2)
class archDimTracker(Tracker):
"A wrapper around a Sketcher dim"
def __init__(self,p1=FreeCAD.Vector(0,0,0),p2=FreeCAD.Vector(1,0,0),mode=1):
import SketcherGui
self.dimnode = coin.SoType.fromName("SoDatumLabel").createInstance()
p1node = coin.SbVec3f([p1.x,p1.y,p1.z])
p2node = coin.SbVec3f([p2.x,p2.y,p2.z])
self.dimnode.pnts.setValues([p1node,p2node])
self.dimnode.lineWidth = 1
color = FreeCADGui.draftToolBar.getDefaultColor("snap")
self.dimnode.textColor.setValue(coin.SbVec3f(color))
self.setString()
self.setMode(mode)
Tracker.__init__(self,children=[self.dimnode])
def setString(self,text=None):
"sets the dim string to the given value or auto value"
self.dimnode.param1.setValue(.5)
p1 = Vector(self.dimnode.pnts.getValues()[0].getValue())
p2 = Vector(self.dimnode.pnts.getValues()[-1].getValue())
m = self.dimnode.datumtype.getValue()
if m == 2:
self.Distance = (DraftVecUtils.project(p2.sub(p1),Vector(1,0,0))).Length
elif m == 3:
self.Distance = (DraftVecUtils.project(p2.sub(p1),Vector(0,1,0))).Length
else:
self.Distance = (p2.sub(p1)).Length
if not text:
text = Draft.getParam("dimPrecision",2)
text = "%."+str(text)+"f"
text = (text % self.Distance)
self.dimnode.string.setValue(text)
def setMode(self,mode=1):
"""sets the mode: 0 = without lines (a simple mark), 1 =
aligned (default), 2 = horizontal, 3 = vertical."""
self.dimnode.datumtype.setValue(mode)
def p1(self,point=None):
"sets or gets the first point of the dim"
if point:
self.dimnode.pnts.set1Value(0,point.x,point.y,point.z)
self.setString()
else:
return Vector(self.dimnode.pnts.getValues()[0].getValue())
def p2(self,point=None):
"sets or gets the second point of the dim"
if point:
self.dimnode.pnts.set1Value(1,point.x,point.y,point.z)
self.setString()
else:
return Vector(self.dimnode.pnts.getValues()[-1].getValue())
|
glaubitz/fs-uae-debian | refs/heads/master | launcher/OpenGL/GLES1/APPLE/copy_texture_levels.py | 8 | '''OpenGL extension APPLE.copy_texture_levels
This module customises the behaviour of the
OpenGL.raw.GLES1.APPLE.copy_texture_levels to provide a more
Python-friendly API
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/APPLE/copy_texture_levels.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLES1 import _types, _glgets
from OpenGL.raw.GLES1.APPLE.copy_texture_levels import *
from OpenGL.raw.GLES1.APPLE.copy_texture_levels import _EXTENSION_NAME
def glInitCopyTextureLevelsAPPLE():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION |
SCOAP3/invenio | refs/heads/master | invenio/legacy/bibcirculation/web/admin/bibcirculationadmin.py | 13 | # This file is part of Invenio.
# Copyright (C) 2008, 2009, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Invenio BibCirculation Administrator (URLs) Interface."""
__revision__ = ""
import invenio.legacy.bibcirculation.adminlib as bal
from invenio.config import CFG_SITE_LANG
from invenio.utils.url import wash_url_argument
def index(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py
"""
return bal.index(req, ln)
def borrower_search(req, empty_barcode=None, redirect_to_new_request=False,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/borrowers_search
"""
return bal.borrower_search(req, empty_barcode,
redirect_to_new_request=redirect_to_new_request, ln=ln)
def item_search(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/holdings_search
"""
return bal.item_search(req, [], ln)
def borrower_notification(req, borrower_id=None, template=None,
message=None, load_msg_template=None,
subject=None, send_message=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/borrower_notification
"""
return bal.borrower_notification(req, borrower_id, template,
message, load_msg_template,
subject, send_message, ln)
def get_pending_requests(req, request_id=None, print_data=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_pending_requests
"""
return bal.get_pending_requests(req, request_id, print_data, ln)
def item_search_result(req, p=None, f=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/item_search_result
"""
return bal.item_search_result(req, p, f, ln)
def loan_return(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/loan_return
"""
return bal.loan_return(req, ln)
def loan_on_desk_step1(req, key=None, string=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/loan_on_desk_step1
"""
return bal.loan_on_desk_step1(req, key, string, ln)
def loan_on_desk_step2(req, user_info=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/loan_on_desk_step2
"""
user_info = user_info.split(',')
return bal.loan_on_desk_step2(req, user_info, ln)
def loan_on_desk_step3(req, user_info=None, barcode=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/loan_on_desk_step4
"""
user_info = eval(user_info)
return bal.loan_on_desk_step3(req, user_info, barcode, ln)
def loan_on_desk_step4(req, list_of_books=None, user_info=None, due_date=None,
note=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/loan_on_desk_step5
"""
user_info = eval(user_info)
list_of_books = eval(list_of_books)
due_date = wash_url_argument(due_date, 'list')
return bal.loan_on_desk_step4(req, list_of_books, user_info,
due_date, note, ln)
def loan_on_desk_confirm(req, barcode=None, borrower_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/loan_on_desk_confirm
"""
return bal.loan_on_desk_confirm(req, barcode, borrower_id, ln)
def register_new_loan(req, barcode=None, borrower_id=None, request_id=None,
new_note=None, print_data=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/register_new_loan
"""
return bal.register_new_loan(req, barcode, borrower_id, request_id,
new_note, print_data, ln)
def loan_return_confirm(req, barcode=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/loan_return_confirm
"""
return bal.loan_return_confirm(req, barcode, ln)
def get_next_waiting_loan_request(req, recid=None, barcode=None, check_id=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_next_waiting_loan_request
"""
return bal.get_next_waiting_loan_request(req, recid, barcode, check_id, ln)
def make_new_loan_from_request(req, check_id=None, barcode=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/make_new_loan_from_request
"""
return bal.make_new_loan_from_request(req, check_id,
barcode, ln)
def all_requests(req, request_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/all_requests
"""
return bal.all_requests(req, request_id, ln)
def get_item_req_historical_overview(req, recid=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_item_req_historical_overview
"""
return bal.get_item_req_historical_overview(req, recid, ln)
def get_item_loans_historical_overview(req, recid=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_item_loans_historical_overview
"""
return bal.get_item_loans_historical_overview(req, recid, ln)
#def all_loans_test(req, ln=CFG_SITE_LANG):
# """
# http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/all_loans
# """
# return bal.all_loans_test(req, ln)
def all_loans(req, msg=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/all_loans
"""
return bal.all_loans(req, msg=msg, ln=ln)
def bor_loans_historical_overview(req, borrower_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/bor_loans_historical_overview
"""
return bal.bor_loans_historical_overview(req, borrower_id, ln)
def bor_requests_historical_overview(req, borrower_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/bor_requests_historical_overview
"""
return bal.bor_requests_historical_overview(req, borrower_id, ln)
def get_item_requests_details(req, recid=None, request_id=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/borrowers_search
"""
return bal.get_item_requests_details(req, recid, request_id, ln)
def get_item_loans_details(req, recid=None, barcode=None, loan_id=None,
force=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/borrowers_search
"""
return bal.get_item_loans_details(req, recid, barcode, loan_id, force, ln)
def get_borrower_details(req, borrower_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/borrowers_search
"""
return bal.get_borrower_details(req, borrower_id, ln)
def get_item_details(req, recid=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/borrowers_search
"""
return bal.get_item_details(req, recid, ln)
def get_library_details(req, library_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_library_details
"""
return bal.get_library_details(req, library_id, ln)
def get_borrower_requests_details(req, borrower_id=None, request_id=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_borrower_requests_details
"""
return bal.get_borrower_requests_details(req, borrower_id, request_id, ln)
def get_borrower_loans_details(req, recid=None, barcode=None, borrower_id=None,
renewall=None, force=None, loan_id=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_borrower_loans_details
"""
return bal.get_borrower_loans_details(req, recid, barcode, borrower_id,
renewall, force, loan_id, ln)
def borrower_search_result(req, column, string, redirect_to_new_request=False,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/borrower_search_result
"""
return bal.borrower_search_result(req, column, string,
redirect_to_new_request=redirect_to_new_request, ln=ln)
def associate_barcode(req, request_id=None, recid=None, borrower_id=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/associate_barcode
"""
return bal.associate_barcode(req, request_id, recid, borrower_id, ln)
def get_borrower_notes(req, borrower_id=None, delete_key=None,
library_notes=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_borrower_notes
"""
return bal.get_borrower_notes(req, borrower_id, delete_key,
library_notes, ln)
def get_loans_notes(req, loan_id=None, recid=None, delete_key=None,
library_notes=None, back="", ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_loans_notes
"""
return bal.get_loans_notes(req, loan_id, delete_key,
library_notes, back, ln)
def get_item_loans_notes(req, loan_id=None, recid=None,
add_notes=None, new_note=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_item_loans_notes
"""
return bal.get_item_loans_notes(req, loan_id, recid, add_notes,
new_note, ln)
def new_item(req, isbn=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/new_item
"""
return bal.new_item(req, isbn, ln)
def add_new_borrower_step1(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_borrower_step1
"""
return bal.add_new_borrower_step1(req, ln)
def add_new_borrower_step2(req, name=None, email=None, phone=None, address=None,
mailbox=None, notes=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_borrower_step2
"""
return bal.add_new_borrower_step2(req, name, email, phone, address,
mailbox, notes, ln)
def add_new_borrower_step3(req, tup_infos=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_borrower_step3
"""
tup_infos = eval(tup_infos)
return bal.add_new_borrower_step3(req, tup_infos, ln)
def update_borrower_info_step1(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_borrower_info_step1
"""
return bal.update_borrower_info_step1(req, ln)
def update_borrower_info_step2(req, column=None, string=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_borrower_info_step2
"""
return bal.update_borrower_info_step2(req, column, string, ln)
def update_borrower_info_step1(req, borrower_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_borrower_info_step1
"""
return bal.update_borrower_info_step1(req, borrower_id, ln)
def update_borrower_info_step2(req, name=None, email=None, phone=None,
address=None, mailbox=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_borrower_info_step2
"""
return bal.update_borrower_info_step2(req, name, email, phone, address,
mailbox, ln)
def add_new_library_step1(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_library_step1
"""
return bal.add_new_library_step1(req, ln)
def add_new_library_step2(req, name=None, email=None, phone=None, address=None,
type=None, notes=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_library_step2
"""
return bal.add_new_library_step2(req, name, email, phone, address,
type, notes, ln)
def add_new_library_step3(req, tup_infos=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_library_step3
"""
tup_infos = eval(tup_infos)
return bal.add_new_library_step3(req, tup_infos, ln)
def update_library_info_step1(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_library_info_step1
"""
return bal.update_library_info_step1(req, ln)
def update_library_info_step2(req, column=None, string=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_library_info_step2
"""
return bal.update_library_info_step2(req, column, string, ln)
def update_library_info_step3(req, library_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_library_info_step3
"""
return bal.update_library_info_step3(req, library_id, ln)
def update_library_info_step4(req, name=None, email=None, phone=None,
address=None, library_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_library_info_step4
"""
return bal.update_library_info_step4(req, name, email, phone, address,
library_id, ln)
def update_library_info_step5(req, tup_infos, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_library_info_step5
"""
tup_infos = eval(tup_infos)
return bal.update_library_info_step5(req, tup_infos, ln)
def new_book_step1(req,ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/new_book_step1
"""
return bal.new_book_step1(req, ln)
def new_book_step2(req,ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/new_book_step2
"""
return bal.new_book_step2(req, ln)
def add_new_copy_step1(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_copy_step1
"""
return bal.add_new_copy_step1(req)
def add_new_copy_step2(req, p=None, f=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_copy_step2
"""
return bal.add_new_copy_step2(req, p, f, ln)
def add_new_copy_step3(req, recid=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_copy_step3
"""
return bal.add_new_copy_step3(req, recid, ln)
def add_new_copy_step4(req, barcode=None, library=None, location=None,
collection=None, description=None, loan_period=None,
status=None, recid=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_copy_step4
"""
return bal.add_new_copy_step4(req, barcode, library, location, collection,
description, loan_period, status, recid, ln)
def add_new_copy_step5(req, tup_infos=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_copy_step5
"""
tup_infos = eval(tup_infos)
return bal.add_new_copy_step5(req, tup_infos, ln)
def update_item_info_step1(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_item_info_step1
"""
return bal.update_item_info_step1(req, ln)
def update_item_info_step2(req, p, f, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_item_info_step2
"""
return bal.update_item_info_step2(req, p, f, ln)
def update_item_info_step3(req, recid, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_item_info_step3
"""
return bal.update_item_info_step3(req, recid, ln)
def update_item_info_step4(req, barcode, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_item_info_step4
"""
return bal.update_item_info_step4(req, barcode, ln)
def update_item_info_step5(req, barcode, library, location, collection,
description, loan_period, status, recid,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_item_info_step5
"""
return bal.update_item_info_step5(req, barcode, library, location,
collection, description, loan_period,
status, recid, ln)
def update_item_info_step6(req, tup_infos, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_item_info_step6
"""
tup_infos = eval(tup_infos)
return bal.update_item_info_step6(req, tup_infos, ln)
def search_library_step1(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/search_library_step1
"""
return bal.search_library_step1(req=req, ln=ln)
def search_library_step2(req, column, string, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/search_library_step2
"""
return bal.search_library_step2(req, column, string, ln)
def get_library_notes(req, library_id=None, delete_key=None, library_notes=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_library_notes
"""
return bal.get_library_notes(req, library_id, delete_key, library_notes, ln)
def change_due_date_step1(req, loan_id=None, borrower_id=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/change_due_date_step1
"""
return bal.change_due_date_step1(req, loan_id, borrower_id, ln)
def change_due_date_step2(req, due_date=None, loan_id=None, borrower_id=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/change_due_date_step2
"""
return bal.change_due_date_step2(req, due_date, loan_id, borrower_id, ln)
def claim_book_return(req, borrower_id=None, recid=None, loan_id=None,
template=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/claim_book_return
"""
return bal.claim_book_return(req, borrower_id, recid, loan_id, template, ln)
def all_expired_loans(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/all_expired_loans
"""
return bal.all_expired_loans(req, ln)
def get_waiting_requests(req, request_id=None, print_data=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_waiting_requests
"""
return bal.get_waiting_requests(req, request_id, print_data, ln)
def create_new_loan_step1(req, borrower_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/create_new_loan_step1
"""
return bal.create_new_loan_step1(req, borrower_id, ln)
def create_new_loan_step2(req, borrower_id=None, barcode=None, notes=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/create_new_loan_step2
"""
return bal.create_new_loan_step2(req, borrower_id, barcode, notes, ln)
def create_new_request_step1(req, borrower_id=None, p=None, f=None, search=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/create_new_request_step1
"""
return bal.create_new_request_step1(req, borrower_id, p, f, search, ln)
def create_new_request_step2(req, recid=None, borrower_id=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/create_new_request_step2
"""
return bal.create_new_request_step2(req, recid, borrower_id, ln)
def create_new_request_step3(req, borrower_id=None, barcode=None, recid=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/create_new_request_step3
"""
return bal.create_new_request_step3(req, borrower_id, barcode, recid, ln)
def create_new_request_step4(req, period_from=None, period_to=None,
barcode=None, borrower_id=None, recid=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/create_new_request_step4
"""
return bal.create_new_request_step4(req, period_from, period_to, barcode,
borrower_id, recid, ln)
def place_new_request_step1(req, barcode=None, recid=None, key=None,
string=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/place_new_request_step1
"""
return bal.place_new_request_step1(req, barcode, recid, key, string, ln)
def place_new_request_step2(req, barcode=None, recid=None, user_info=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/place_new_request_step2
"""
if user_info is not None:
user_info = user_info.split(',')
return bal.place_new_request_step2(req, barcode, recid, user_info, ln)
def place_new_request_step3(req, barcode=None, recid=None, user_info=None,
period_from=None, period_to=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/place_new_request_step3
"""
user_info = eval(user_info)
return bal.place_new_request_step3(req, barcode, recid, user_info,
period_from, period_to, ln)
def place_new_loan_step1(req, barcode=None, recid=None, key=None,
string=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/place_new_loan_step1
"""
return bal.place_new_loan_step1(req, barcode, recid, key, string, ln)
def place_new_loan_step2(req, barcode=None, recid=None, user_info=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/place_new_loan_step2
"""
return bal.place_new_loan_step2(req, barcode, recid, user_info, ln)
def place_new_loan_step3(req, barcode=None, recid=None, ccid=None, name=None,
email=None, phone=None, address=None, mailbox=None,
due_date=None, notes=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/place_new_loan_step3
"""
return bal.place_new_loan_step3(req, barcode, recid, ccid, name, email,
phone, address, mailbox, due_date, notes,
ln)
def order_new_copy_step1(req, recid=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/order_new_copy_step1
"""
return bal.order_new_copy_step1(req, recid, ln)
def order_new_copy_step2 (req, recid=None, barcode=None, vendor_id=None,
cost=None, currency=None, status=None,
order_date=None, expected_date=None,
library_id=None, notes=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/order_new_copy_step2
"""
return bal.order_new_copy_step2(req, recid, barcode, vendor_id, cost,
currency, status, order_date, expected_date,
library_id, notes, ln)
def order_new_copy_step3(req, order_info=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/order_new_copy_step3
"""
order_info = eval(order_info)
return bal.order_new_copy_step3(req, order_info, ln)
def ordered_books(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/ordered_books
"""
return bal.list_ordered_books(req, ln)
def get_purchase_notes(req, purchase_id=None, delete_key=None,
library_notes=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_purchase_notes
"""
return bal.get_purchase_notes(req, purchase_id, delete_key,
library_notes, ln)
def register_ill_request_step0(req, recid=None, key=None, string=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/register_ill_request_step0
"""
return bal.register_ill_request_step0(req, recid, key, string, ln)
def register_ill_request_step1(req, recid=None, user_info=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/register_ill_request_step1
"""
return bal.register_ill_request_step1(req, recid, user_info, ln)
def register_ill_request_step2(req, recid=None, user_info=None,
period_of_interest_from=None,
period_of_interest_to=None, notes=None,
only_edition=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/register_ill_request_step2
"""
return bal.register_ill_request_step2(req, recid, user_info,
period_of_interest_from,
period_of_interest_to,
notes, only_edition, ln)
def register_ill_request_step3(req, borrower_id=None, request_info=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/register_ill_request_step3
"""
request_info = eval(request_info)
return bal.register_ill_request_step3(req, borrower_id, request_info, ln)
def list_ill_request(req, status=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/list_ill_request
"""
return bal.list_ill_request(req, status, ln)
def ill_request_details_step1(req, delete_key=None, ill_request_id=None,
new_status=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/ill_request_details_step1
"""
return bal.ill_request_details_step1(req, delete_key, ill_request_id,
new_status, ln)
def ill_request_details_step2(req, delete_key=None, ill_request_id=None,
new_status=None, library_id=None,
request_date=None, expected_date=None,
arrival_date=None, due_date=None,
return_date=None, cost=None,
currency=None, barcode=None, library_notes=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/ill_request_details_step2
"""
return bal.ill_request_details_step2(req, delete_key, ill_request_id,
new_status, library_id,
request_date, expected_date,
arrival_date, due_date,
return_date, cost, currency,
barcode, library_notes, ln)
def ill_request_details_step3(req, request_info=None, ill_status=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/ill_request_details_step3
"""
request_info = eval(request_info)
return bal.ill_request_details_step3(req, request_info, ill_status, ln)
def ordered_books_details_step1(req, purchase_id=None, delete_key=None,
ln=CFG_SITE_LANG):
"""
"""
return bal.ordered_books_details_step1(req, purchase_id, delete_key, ln)
def ordered_books_details_step2(req, purchase_id=None, recid=None,
vendor_id=None, cost=None, currency=None,
status=None, order_date=None,
expected_date=None, purchase_notes=None,
library_notes=None,
ln=CFG_SITE_LANG):
"""
"""
return bal.ordered_books_details_step2(req, purchase_id, recid, vendor_id,
cost, currency, status, order_date,
expected_date,
purchase_notes, library_notes, ln)
def ordered_books_details_step3(req, purchase_id=None, recid=None,
vendor_id=None, cost=None, currency=None,
status=None, order_date=None, expected_date=None,
purchase_notes=None, library_notes=None,
ln=CFG_SITE_LANG):
"""
"""
return bal.ordered_books_details_step3(req, purchase_id, recid, vendor_id,
cost, currency, status, order_date,
expected_date, purchase_notes,
library_notes, ln)
def add_new_vendor_step1(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_vendor_step1
"""
return bal.add_new_vendor_step1(req, ln)
def add_new_vendor_step2(req, name=None, email=None, phone=None, address=None,
notes=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_vendor_step2
"""
return bal.add_new_vendor_step2(req, name, email, phone, address,
notes, ln)
def add_new_vendor_step3(req, tup_infos=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/add_new_vendor_step3
"""
tup_infos = eval(tup_infos)
return bal.add_new_vendor_step3(req, tup_infos, ln)
def update_vendor_info_step1(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_vendor_info_step1
"""
return bal.update_vendor_info_step1(req, ln)
def update_vendor_info_step2(req, column=None, string=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_vendor_info_step2
"""
return bal.update_vendor_info_step2(req, column, string, ln)
def update_vendor_info_step3(req, vendor_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_vendor_info_step3
"""
return bal.update_vendor_info_step3(req, vendor_id, ln)
def update_vendor_info_step4(req, name=None, email=None, phone=None,
address=None, vendor_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_vendor_info_step4
"""
return bal.update_vendor_info_step4(req, name, email, phone, address,
vendor_id, ln)
def update_vendor_info_step5(req, tup_infos, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/update_vendor_info_step5
"""
tup_infos = eval(tup_infos)
return bal.update_vendor_info_step5(req, tup_infos, ln)
def search_vendor_step1(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/search_vendor_step1
"""
return bal.search_vendor_step1(req, ln)
def search_vendor_step2(req, column, string, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/search_vendor_step2
"""
return bal.search_vendor_step2(req, column, string, ln)
def get_vendor_details(req, vendor_id=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_vendor_details
"""
return bal.get_vendor_details(req, vendor_id, ln)
def get_vendor_notes(req, vendor_id=None, add_notes=None, new_note=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/get_vendor_notes
"""
return bal.get_vendor_notes(req, vendor_id, add_notes, new_note, ln)
def register_ill_request_with_no_recid_step1(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/register_ill_request_with_no_recid_step1
"""
return bal.register_ill_request_with_no_recid_step1(req, ln)
def register_ill_request_with_no_recid_step2(req, title=None, authors=None,
place=None, publisher=None, year=None,
edition=None, isbn=None,
period_of_interest_from=None,
period_of_interest_to=None,
additional_comments=None,
only_edition=None, key=None, string=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/register_ill_request_with_no_recid_step2
"""
return bal.register_ill_request_with_no_recid_step2(req, title, authors,
place, publisher, year, edition, isbn,
period_of_interest_from,
period_of_interest_to,
additional_comments, only_edition,
key, string, ln)
def register_ill_request_with_no_recid_step3(req, book_info=None,
user_info=None,
request_details=None,
ln=CFG_SITE_LANG):
"""
"""
if type(book_info) is str:
book_info = eval(book_info)
if type(request_details) is str:
request_details = eval(request_details)
if type(user_info) is str:
user_info = user_info.split(',')
return bal.register_ill_request_with_no_recid_step3(req, book_info,
user_info,
request_details, ln)
def register_ill_request_with_no_recid_step4(req, book_info=None,
user_info=None,
request_details=None,
ln=CFG_SITE_LANG):
"""
"""
if type(book_info) is str:
book_info = eval(book_info)
if type(request_details) is str:
request_details = eval(request_details)
if type(user_info) is str:
user_info = eval(user_info)
return bal.register_ill_request_with_no_recid_step4(req, book_info,
user_info,
request_details, ln)
def get_borrower_ill_details(req, borrower_id=None, ill_id=None,
ln=CFG_SITE_LANG):
"""
"""
return bal.get_borrower_ill_details(req, borrower_id, ill_id, ln)
def get_ill_library_notes(req, ill_id=None, delete_key=None, library_notes=None,
ln=CFG_SITE_LANG):
"""
"""
return bal.get_ill_library_notes(req, ill_id, delete_key, library_notes, ln)
def get_expired_loans_with_requests(req, request_id=None, ln=CFG_SITE_LANG):
"""
"""
return bal.get_expired_loans_with_requests(req, request_id, ln)
def register_ill_book_request(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/holdings_search
"""
return bal.register_ill_book_request(req, ln)
def register_ill_book_request_result(req, p=None, f=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/item_search_result
"""
return bal.register_ill_book_request_result(req, p, f, ln)
def register_ill_book_request_from_borrower_page(req, borrower_id=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/holdings_search
"""
return bal.register_ill_book_request_from_borrower_page(req, borrower_id,
ln)
def register_ill_book_request_from_borrower_page_result(req, borrower_id=None,
p=None, f=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/item_search_result
"""
return bal.register_ill_book_request_from_borrower_page_result(req,
borrower_id, p, f, ln)
def register_ill_request_from_borrower_page_step1(req, borrower_id=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/register_ill_request_with_no_recid_step1
"""
return bal.register_ill_request_from_borrower_page_step1(req, borrower_id,
ln)
def register_ill_request_from_borrower_page_step2(req, borrower_id=None,
title=None, authors=None, place=None,
publisher=None, year=None, edition=None,
isbn=None, period_of_interest_from=None,
period_of_interest_to=None,
additional_comments=None,
only_edition=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/register_ill_request_with_no_recid_step2
"""
return bal.register_ill_request_from_borrower_page_step2(req, borrower_id,
title, authors, place,
publisher, year, edition, isbn,
period_of_interest_from,
period_of_interest_to,
additional_comments,
only_edition, ln)
def register_ill_article_request_step1(req, ln=CFG_SITE_LANG):
"""
"""
return bal.register_ill_article_request_step1(req, ln)
def register_ill_article_request_step2(req, periodical_title=None,
article_title=None, author=None,
report_number=None, volume=None,
issue=None, page=None, year=None,
issn=None, period_of_interest_from=None,
period_of_interest_to=None,
additional_comments=None,
key=None, string=None, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/register_ill_request_with_no_recid_step2
"""
return bal.register_ill_article_request_step2(req, periodical_title,
article_title, author, report_number,
volume, issue, page, year, issn,
period_of_interest_from,
period_of_interest_to,
additional_comments, key, string, ln)
def register_ill_article_request_step3(req, book_info, user_info,
request_details, ln=CFG_SITE_LANG):
book_info = eval(book_info)
request_details = eval(request_details)
user_info = user_info.split(',')
return bal.register_ill_article_request_step3(req, book_info, user_info,
request_details, ln)
def ill_search(req, ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/holdings_search
"""
return bal.ill_search(req, ln)
def ill_search_result(req, p=None, f=None, date_from=None, date_to=None,
ln=CFG_SITE_LANG):
"""
http://cds.cern.ch/admin/bibcirculation/bibcirculationadmin.py/item_search_result
"""
return bal.ill_search_result(req, p, f, date_from, date_to, ln)
|
junghans/lammps | refs/heads/master | tools/i-pi/ipi/tests/test_contraction.py | 41 | """Tests ring polymer contraction.
Copyright (C) 2013, Joshua More and Michele Ceriotti
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http.//www.gnu.org/licenses/>.
"""
import sys
sys.path.append("../")
sys.path.append("../../")
from ipi.utils import nmtransform
import numpy as np
from numpy.testing import assert_almost_equal as assert_equals
def check_up_and_down_scaling(n, q):
"""Check if q expanding and then contracting a ring polymer is a no-op.
Args:
n: The number of beads in the scaled ring polymer.
q: The original position array.
"""
rescale = nmtransform.nm_rescale(q.shape[0], n)
print "Initial position of the beads:"
print q, q.shape, (q.shape[0], n)
# rescale up to the n beads
beads_n = rescale.b1tob2(q)
print "Upscaled to %d beads:"%n
print beads_n, beads_n.shape
beads_final = rescale.b2tob1(beads_n)
print "Final position of the beads:"
print beads_final
assert_equals(q, beads_final)
return beads_n
def check_rpc_consistency(n, q):
"""Check if q expanding and then contracting a ring polymer is a no-op.
Args:
n: The number of beads in the scaled ring polymer.
q: The original position array.
"""
rescale1 = nmtransform.nm_rescale(q.shape[0], n)
rescale2 = nmtransform.nm_rescale(n,q.shape[0])
beads_n=rescale1.b1tob2(q)
beads_1=rescale1.b2tob1(beads_n)
beads_2=rescale2.b1tob2(beads_n)
assert_equals(beads_1, beads_2)
def check_centroid_pos(n, q):
"""Check if expanding and then contracting a ring polymer
maintains the centroid.
Args:
n: The number of beads in the scaled ring polymer.
q: The original position array.
"""
beads_big = check_up_and_down_scaling(n, q)
rescale_big = nmtransform.mk_rs_matrix(n, 1)
rescale_q = nmtransform.mk_rs_matrix(q.shape[0], 1)
centroid_big = np.dot(rescale_big, beads_big)
centroid_q = np.dot(rescale_q, q)
assert_equals(centroid_q, centroid_big)
numbers_to_check = range(10, 56, 9)
def test_1_to_n():
"""One bead tests."""
for n in numbers_to_check:
q = np.array([[0.0,0.0,0.0, 1.0,0.0,0.0]])
yield check_up_and_down_scaling, n, q
yield check_rpc_consistency, n, q
yield check_centroid_pos, n, q
def test_2_to_n():
"""Two bead tests."""
for n in numbers_to_check:
q = np.array([[0.0,0.0,0.0, 1.0,0.0,0.0],
[0.0,0.1,0.0, 1.0,0.1,0.0]])
yield check_up_and_down_scaling, n, q
yield check_rpc_consistency, n, q
yield check_centroid_pos, n, q
def test_3_to_n():
"""Three bead tests."""
for n in numbers_to_check:
q = np.array([[0.0, 0.0,0.0, 1.0, 0.0,0.0],
[0.0, 0.1,0.0, 1.0, 0.1,0.0],
[0.0,-0.1,0.0, 1.0,-0.1,0.0]])
yield check_up_and_down_scaling, n, q
yield check_rpc_consistency, n, q
yield check_centroid_pos, n, q
def test_4_to_n():
"""Four bead tests."""
for n in numbers_to_check:
q = np.array([[0.0, 0.0,0.0, 1.0, 0.0,0.0],
[0.0, 0.1,0.0, 1.0, 0.1,0.0],
[0.0, 0.2,0.0, 1.0, 0.2,0.0],
[0.0,-0.1,0.0, 1.0,-0.1,0.0]])
yield check_up_and_down_scaling, n, q
yield check_rpc_consistency, n, q
yield check_centroid_pos, n, q
|
ol-loginov/intellij-community | refs/heads/master | python/testData/findUsages/ReassignedClassAttribute.py | 83 | class A(object):
bacaba = 0
def __init__(self):
self.bacaba = 1
def foo(self, x):
self.bacaba = x
class B(A):
bacaba = 2
def __init__(self):
super(B, self).__init__()
self.bacaba = 3
def foo2(self):
print self.bac<caret>aba |
chrisy/vpp | refs/heads/master | test/test_l2tp.py | 2 | #!/usr/bin/env python3
import unittest
from scapy.layers.l2 import Ether
from scapy.layers.inet6 import IPv6
from framework import tag_fixme_vpp_workers
from framework import VppTestCase
@tag_fixme_vpp_workers
class TestL2tp(VppTestCase):
""" L2TP Test Case """
@classmethod
def setUpClass(cls):
super(TestL2tp, cls).setUpClass()
cls.create_pg_interfaces(range(1))
cls.pg0.admin_up()
cls.pg0.config_ip6()
def test_l2tp_decap_local(self):
""" L2TP don't accept packets unless configured """
pkt = (Ether(dst=self.pg0.local_mac, src=self.pg0.remote_mac) /
IPv6(src=self.pg0.remote_ip6, dst=self.pg0.local_ip6, nh=115))
self.pg0.add_stream(pkt)
self.pg_start()
# l2tp should not accept packets
err = self.statistics.get_counter(
'/err/l2tp-decap-local/l2tpv3 session not found')[0]
self.assertEqual(err, 0)
err_count = err
self.vapi.l2tpv3_create_tunnel(client_address=self.pg0.local_ip6,
our_address=self.pg0.remote_ip6)
self.pg0.add_stream(pkt)
self.pg_start()
# l2tp accepts packets
err = self.statistics.get_counter(
'/err/l2tp-decap-local/l2tpv3 session not found')[0]
self.assertEqual(err, 1)
err_count = err
|
jhd/spunout | refs/heads/master | flask/lib/python2.7/site-packages/pip/_vendor/requests/sessions.py | 220 | # -*- coding: utf-8 -*-
"""
requests.session
~~~~~~~~~~~~~~~~
This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
"""
import os
from collections import Mapping
from datetime import datetime
from .compat import cookielib, OrderedDict, urljoin, urlparse, builtin_str
from .cookies import (
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
from .models import Request, PreparedRequest
from .hooks import default_hooks, dispatch_hook
from .utils import to_key_val_list, default_headers
from .exceptions import TooManyRedirects, InvalidSchema
from .structures import CaseInsensitiveDict
from .adapters import HTTPAdapter
from .utils import requote_uri, get_environ_proxies, get_netrc_auth
from .status_codes import codes
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_moved, # 307
)
DEFAULT_REDIRECT_LIMIT = 30
def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
"""
Determines appropriate setting for a given request, taking into account the
explicit setting on that request, and the setting in the session. If a
setting is a dictionary, they will be merged together using `dict_class`
"""
if session_setting is None:
return request_setting
if request_setting is None:
return session_setting
# Bypass if not a dictionary (e.g. verify)
if not (
isinstance(session_setting, Mapping) and
isinstance(request_setting, Mapping)
):
return request_setting
merged_setting = dict_class(to_key_val_list(session_setting))
merged_setting.update(to_key_val_list(request_setting))
# Remove keys that are set to None.
for (k, v) in request_setting.items():
if v is None:
del merged_setting[k]
return merged_setting
def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
"""
Properly merges both requests and session hooks.
This is necessary because when request_hooks == {'response': []}, the
merge breaks Session hooks entirely.
"""
if session_hooks is None or session_hooks.get('response') == []:
return request_hooks
if request_hooks is None or request_hooks.get('response') == []:
return session_hooks
return merge_setting(request_hooks, session_hooks, dict_class)
class SessionRedirectMixin(object):
def resolve_redirects(self, resp, req, stream=False, timeout=None,
verify=True, cert=None, proxies=None):
"""Receives a Response. Returns a generator of Responses."""
i = 0
# ((resp.status_code is codes.see_other))
while ('location' in resp.headers and resp.status_code in REDIRECT_STATI):
prepared_request = req.copy()
resp.content # Consume socket so it can be released
if i >= self.max_redirects:
raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects)
# Release the connection back into the pool.
resp.close()
url = resp.headers['location']
method = req.method
# Handle redirection without scheme (see: RFC 1808 Section 4)
if url.startswith('//'):
parsed_rurl = urlparse(resp.url)
url = '%s:%s' % (parsed_rurl.scheme, url)
# The scheme should be lower case...
parsed = urlparse(url)
url = parsed.geturl()
# Facilitate non-RFC2616-compliant 'location' headers
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
# Compliant with RFC3986, we percent encode the url.
if not urlparse(url).netloc:
url = urljoin(resp.url, requote_uri(url))
else:
url = requote_uri(url)
prepared_request.url = url
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
if (resp.status_code == codes.see_other and
method != 'HEAD'):
method = 'GET'
# Do what the browsers do, despite standards...
# First, turn 302s into GETs.
if resp.status_code == codes.found and method != 'HEAD':
method = 'GET'
# Second, if a POST is responded to with a 301, turn it into a GET.
# This bizarre behaviour is explained in Issue 1704.
if resp.status_code == codes.moved and method == 'POST':
method = 'GET'
prepared_request.method = method
# https://github.com/kennethreitz/requests/issues/1084
if resp.status_code not in (codes.temporary, codes.resume):
if 'Content-Length' in prepared_request.headers:
del prepared_request.headers['Content-Length']
prepared_request.body = None
headers = prepared_request.headers
try:
del headers['Cookie']
except KeyError:
pass
extract_cookies_to_jar(prepared_request._cookies,
prepared_request, resp.raw)
prepared_request._cookies.update(self.cookies)
prepared_request.prepare_cookies(prepared_request._cookies)
resp = self.send(
prepared_request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
allow_redirects=False,
)
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
i += 1
yield resp
class Session(SessionRedirectMixin):
"""A Requests session.
Provides cookie persistence, connection-pooling, and configuration.
Basic Usage::
>>> import requests
>>> s = requests.Session()
>>> s.get('http://httpbin.org/get')
200
"""
__attrs__ = [
'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks',
'params', 'verify', 'cert', 'prefetch', 'adapters', 'stream',
'trust_env', 'max_redirects']
def __init__(self):
#: A case-insensitive dictionary of headers to be sent on each
#: :class:`Request <Request>` sent from this
#: :class:`Session <Session>`.
self.headers = default_headers()
#: Default Authentication tuple or object to attach to
#: :class:`Request <Request>`.
self.auth = None
#: Dictionary mapping protocol to the URL of the proxy (e.g.
#: {'http': 'foo.bar:3128'}) to be used on each
#: :class:`Request <Request>`.
self.proxies = {}
#: Event-handling hooks.
self.hooks = default_hooks()
#: Dictionary of querystring data to attach to each
#: :class:`Request <Request>`. The dictionary values may be lists for
#: representing multivalued query parameters.
self.params = {}
#: Stream response content default.
self.stream = False
#: SSL Verification default.
self.verify = True
#: SSL certificate default.
self.cert = None
#: Maximum number of redirects allowed. If the request exceeds this
#: limit, a :class:`TooManyRedirects` exception is raised.
self.max_redirects = DEFAULT_REDIRECT_LIMIT
#: Should we trust the environment?
self.trust_env = True
#: A CookieJar containing all currently outstanding cookies set on this
#: session. By default it is a
#: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
#: may be any other ``cookielib.CookieJar`` compatible object.
self.cookies = cookiejar_from_dict({})
# Default connection adapters.
self.adapters = OrderedDict()
self.mount('https://', HTTPAdapter())
self.mount('http://', HTTPAdapter())
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def prepare_request(self, request):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for
transmission and returns it. The :class:`PreparedRequest` has settings
merged from the :class:`Request <Request>` instance and those of the
:class:`Session`.
:param request: :class:`Request` instance to prepare with this
session's settings.
"""
cookies = request.cookies or {}
# Bootstrap CookieJar.
if not isinstance(cookies, cookielib.CookieJar):
cookies = cookiejar_from_dict(cookies)
# Merge with session cookies
merged_cookies = merge_cookies(
merge_cookies(RequestsCookieJar(), self.cookies), cookies)
# Set environment's basic authentication if not explicitly set.
auth = request.auth
if self.trust_env and not auth and not self.auth:
auth = get_netrc_auth(request.url)
p = PreparedRequest()
p.prepare(
method=request.method.upper(),
url=request.url,
files=request.files,
data=request.data,
headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
params=merge_setting(request.params, self.params),
auth=merge_setting(auth, self.auth),
cookies=merged_cookies,
hooks=merge_hooks(request.hooks, self.hooks),
)
return p
def request(self, method, url,
params=None,
data=None,
headers=None,
cookies=None,
files=None,
auth=None,
timeout=None,
allow_redirects=True,
proxies=None,
hooks=None,
stream=None,
verify=None,
cert=None):
"""Constructs a :class:`Request <Request>`, prepares it and sends it.
Returns :class:`Response <Response>` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query
string for the :class:`Request`.
:param data: (optional) Dictionary or bytes to send in the body of the
:class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the
:class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the
:class:`Request`.
:param files: (optional) Dictionary of 'filename': file-like-objects
for multipart encoding upload.
:param auth: (optional) Auth tuple or callable to enable
Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) Float describing the timeout of the
request.
:param allow_redirects: (optional) Boolean. Set to True by default.
:param proxies: (optional) Dictionary mapping protocol to the URL of
the proxy.
:param stream: (optional) whether to immediately download the response
content. Defaults to ``False``.
:param verify: (optional) if ``True``, the SSL cert will be verified.
A CA_BUNDLE path can also be provided.
:param cert: (optional) if String, path to ssl client cert file (.pem).
If Tuple, ('cert', 'key') pair.
"""
method = builtin_str(method)
# Create the Request.
req = Request(
method = method.upper(),
url = url,
headers = headers,
files = files,
data = data or {},
params = params or {},
auth = auth,
cookies = cookies,
hooks = hooks,
)
prep = self.prepare_request(req)
proxies = proxies or {}
# Gather clues from the surrounding environment.
if self.trust_env:
# Set environment's proxies.
env_proxies = get_environ_proxies(url) or {}
for (k, v) in env_proxies.items():
proxies.setdefault(k, v)
# Look for configuration.
if not verify and verify is not False:
verify = os.environ.get('REQUESTS_CA_BUNDLE')
# Curl compatibility.
if not verify and verify is not False:
verify = os.environ.get('CURL_CA_BUNDLE')
# Merge all the kwargs.
proxies = merge_setting(proxies, self.proxies)
stream = merge_setting(stream, self.stream)
verify = merge_setting(verify, self.verify)
cert = merge_setting(cert, self.cert)
# Send the request.
send_kwargs = {
'stream': stream,
'timeout': timeout,
'verify': verify,
'cert': cert,
'proxies': proxies,
'allow_redirects': allow_redirects,
}
resp = self.send(prep, **send_kwargs)
return resp
def get(self, url, **kwargs):
"""Sends a GET request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return self.request('GET', url, **kwargs)
def options(self, url, **kwargs):
"""Sends a OPTIONS request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return self.request('OPTIONS', url, **kwargs)
def head(self, url, **kwargs):
"""Sends a HEAD request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', False)
return self.request('HEAD', url, **kwargs)
def post(self, url, data=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('POST', url, data=data, **kwargs)
def put(self, url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('PUT', url, data=data, **kwargs)
def patch(self, url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('PATCH', url, data=data, **kwargs)
def delete(self, url, **kwargs):
"""Sends a DELETE request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('DELETE', url, **kwargs)
def send(self, request, **kwargs):
"""Send a given PreparedRequest."""
# Set defaults that the hooks can utilize to ensure they always have
# the correct parameters to reproduce the previous request.
kwargs.setdefault('stream', self.stream)
kwargs.setdefault('verify', self.verify)
kwargs.setdefault('cert', self.cert)
kwargs.setdefault('proxies', self.proxies)
# It's possible that users might accidentally send a Request object.
# Guard against that specific failure case.
if not isinstance(request, PreparedRequest):
raise ValueError('You can only send PreparedRequests.')
# Set up variables needed for resolve_redirects and dispatching of
# hooks
allow_redirects = kwargs.pop('allow_redirects', True)
stream = kwargs.get('stream')
timeout = kwargs.get('timeout')
verify = kwargs.get('verify')
cert = kwargs.get('cert')
proxies = kwargs.get('proxies')
hooks = request.hooks
# Get the appropriate adapter to use
adapter = self.get_adapter(url=request.url)
# Start time (approximately) of the request
start = datetime.utcnow()
# Send the request
r = adapter.send(request, **kwargs)
# Total elapsed time of the request (approximately)
r.elapsed = datetime.utcnow() - start
# Response manipulation hooks
r = dispatch_hook('response', hooks, r, **kwargs)
# Persist cookies
if r.history:
# If the hooks create history then we want those cookies too
for resp in r.history:
extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
extract_cookies_to_jar(self.cookies, request, r.raw)
# Redirect resolving generator.
gen = self.resolve_redirects(r, request, stream=stream,
timeout=timeout, verify=verify, cert=cert,
proxies=proxies)
# Resolve redirects if allowed.
history = [resp for resp in gen] if allow_redirects else []
# Shuffle things around if there's history.
if history:
# Insert the first (original) request at the start
history.insert(0, r)
# Get the last request made
r = history.pop()
r.history = tuple(history)
return r
def get_adapter(self, url):
"""Returns the appropriate connnection adapter for the given URL."""
for (prefix, adapter) in self.adapters.items():
if url.lower().startswith(prefix):
return adapter
# Nothing matches :-/
raise InvalidSchema("No connection adapters were found for '%s'" % url)
def close(self):
"""Closes all adapters and as such the session"""
for v in self.adapters.values():
v.close()
def mount(self, prefix, adapter):
"""Registers a connection adapter to a prefix.
Adapters are sorted in descending order by key length."""
self.adapters[prefix] = adapter
keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
for key in keys_to_move:
self.adapters[key] = self.adapters.pop(key)
def __getstate__(self):
return dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
def __setstate__(self, state):
for attr, value in state.items():
setattr(self, attr, value)
def session():
"""Returns a :class:`Session` for context-management."""
return Session()
|
kcrt/electron | refs/heads/master | script/upload-node-headers.py | 141 | #!/usr/bin/env python
import argparse
import glob
import os
import shutil
import sys
import tarfile
from lib.config import PLATFORM, get_target_arch, s3_config
from lib.util import execute, safe_mkdir, scoped_cwd, s3put
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
NODE_DIR = os.path.join(SOURCE_ROOT, 'vendor', 'node')
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
HEADERS_SUFFIX = [
'.h',
'.gypi',
]
HEADERS_DIRS = [
'src',
'deps/http_parser',
'deps/zlib',
'deps/uv',
'deps/npm',
'deps/mdb_v8',
]
HEADERS_FILES = [
'common.gypi',
'config.gypi',
]
def main():
safe_mkdir(DIST_DIR)
args = parse_args()
node_headers_dir = os.path.join(DIST_DIR, 'node-{0}'.format(args.version))
iojs_headers_dir = os.path.join(DIST_DIR, 'iojs-{0}'.format(args.version))
iojs2_headers_dir = os.path.join(DIST_DIR,
'iojs-{0}-headers'.format(args.version))
copy_headers(node_headers_dir)
create_header_tarball(node_headers_dir)
copy_headers(iojs_headers_dir)
create_header_tarball(iojs_headers_dir)
copy_headers(iojs2_headers_dir)
create_header_tarball(iojs2_headers_dir)
# Upload node's headers to S3.
bucket, access_key, secret_key = s3_config()
upload_node(bucket, access_key, secret_key, args.version)
def parse_args():
parser = argparse.ArgumentParser(description='upload sumsha file')
parser.add_argument('-v', '--version', help='Specify the version',
required=True)
return parser.parse_args()
def copy_headers(dist_headers_dir):
safe_mkdir(dist_headers_dir)
# Copy standard node headers from node. repository.
for include_path in HEADERS_DIRS:
abs_path = os.path.join(NODE_DIR, include_path)
for dirpath, _, filenames in os.walk(abs_path):
for filename in filenames:
extension = os.path.splitext(filename)[1]
if extension not in HEADERS_SUFFIX:
continue
copy_source_file(os.path.join(dirpath, filename), NODE_DIR,
dist_headers_dir)
for other_file in HEADERS_FILES:
copy_source_file(os.path.join(NODE_DIR, other_file), NODE_DIR,
dist_headers_dir)
# Copy V8 headers from chromium's repository.
src = os.path.join(SOURCE_ROOT, 'vendor', 'brightray', 'vendor', 'download',
'libchromiumcontent', 'src')
for dirpath, _, filenames in os.walk(os.path.join(src, 'v8')):
for filename in filenames:
extension = os.path.splitext(filename)[1]
if extension not in HEADERS_SUFFIX:
continue
copy_source_file(os.path.join(dirpath, filename), src,
os.path.join(dist_headers_dir, 'deps'))
def create_header_tarball(dist_headers_dir):
target = dist_headers_dir + '.tar.gz'
with scoped_cwd(DIST_DIR):
tarball = tarfile.open(name=target, mode='w:gz')
tarball.add(os.path.relpath(dist_headers_dir))
tarball.close()
def copy_source_file(source, start, destination):
relative = os.path.relpath(source, start=start)
final_destination = os.path.join(destination, relative)
safe_mkdir(os.path.dirname(final_destination))
shutil.copy2(source, final_destination)
def upload_node(bucket, access_key, secret_key, version):
with scoped_cwd(DIST_DIR):
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), glob.glob('node-*.tar.gz'))
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), glob.glob('iojs-*.tar.gz'))
if PLATFORM == 'win32':
if get_target_arch() == 'ia32':
node_lib = os.path.join(DIST_DIR, 'node.lib')
iojs_lib = os.path.join(DIST_DIR, 'win-x86', 'iojs.lib')
else:
node_lib = os.path.join(DIST_DIR, 'x64', 'node.lib')
iojs_lib = os.path.join(DIST_DIR, 'win-x64', 'iojs.lib')
safe_mkdir(os.path.dirname(node_lib))
safe_mkdir(os.path.dirname(iojs_lib))
# Copy atom.lib to node.lib and iojs.lib.
atom_lib = os.path.join(OUT_DIR, 'node.dll.lib')
shutil.copy2(atom_lib, node_lib)
shutil.copy2(atom_lib, iojs_lib)
# Upload the node.lib.
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), [node_lib])
# Upload the iojs.lib.
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), [iojs_lib])
if __name__ == '__main__':
sys.exit(main())
|
Jgarcia-IAS/SAT | refs/heads/master | openerp/addons/gamification/models/challenge.py | 25 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
from openerp.tools import ustr, DEFAULT_SERVER_DATE_FORMAT as DF
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
from datetime import date, datetime, timedelta
import calendar
import logging
_logger = logging.getLogger(__name__)
# display top 3 in ranking, could be db variable
MAX_VISIBILITY_RANKING = 3
def start_end_date_for_period(period, default_start_date=False, default_end_date=False):
"""Return the start and end date for a goal period based on today
:return: (start_date, end_date), datetime.date objects, False if the period is
not defined or unknown"""
today = date.today()
if period == 'daily':
start_date = today
end_date = start_date
elif period == 'weekly':
delta = timedelta(days=today.weekday())
start_date = today - delta
end_date = start_date + timedelta(days=7)
elif period == 'monthly':
month_range = calendar.monthrange(today.year, today.month)
start_date = today.replace(day=1)
end_date = today.replace(day=month_range[1])
elif period == 'yearly':
start_date = today.replace(month=1, day=1)
end_date = today.replace(month=12, day=31)
else: # period == 'once':
start_date = default_start_date # for manual goal, start each time
end_date = default_end_date
if start_date and end_date:
return (datetime.strftime(start_date, DF), datetime.strftime(end_date, DF))
else:
return (start_date, end_date)
class gamification_challenge(osv.Model):
"""Gamification challenge
Set of predifined objectives assigned to people with rules for recurrence and
rewards
If 'user_ids' is defined and 'period' is different than 'one', the set will
be assigned to the users for each period (eg: every 1st of each month if
'monthly' is selected)
"""
_name = 'gamification.challenge'
_description = 'Gamification challenge'
_inherit = 'mail.thread'
def _get_next_report_date(self, cr, uid, ids, field_name, arg, context=None):
"""Return the next report date based on the last report date and report
period.
:return: a string in DEFAULT_SERVER_DATE_FORMAT representing the date"""
res = {}
for challenge in self.browse(cr, uid, ids, context=context):
last = datetime.strptime(challenge.last_report_date, DF).date()
if challenge.report_message_frequency == 'daily':
next = last + timedelta(days=1)
res[challenge.id] = next.strftime(DF)
elif challenge.report_message_frequency == 'weekly':
next = last + timedelta(days=7)
res[challenge.id] = next.strftime(DF)
elif challenge.report_message_frequency == 'monthly':
month_range = calendar.monthrange(last.year, last.month)
next = last.replace(day=month_range[1]) + timedelta(days=1)
res[challenge.id] = next.strftime(DF)
elif challenge.report_message_frequency == 'yearly':
res[challenge.id] = last.replace(year=last.year + 1).strftime(DF)
# frequency == 'once', reported when closed only
else:
res[challenge.id] = False
return res
def _get_categories(self, cr, uid, context=None):
return [
('hr', 'Human Ressources / Engagement'),
('other', 'Settings / Gamification Tools'),
]
def _get_report_template(self, cr, uid, context=None):
try:
return self.pool.get('ir.model.data').get_object_reference(cr, uid, 'gamification', 'simple_report_template')[1]
except ValueError:
return False
_order = 'end_date, start_date, name, id'
_columns = {
'name': fields.char('Challenge Name', required=True, translate=True),
'description': fields.text('Description', translate=True),
'state': fields.selection([
('draft', 'Draft'),
('inprogress', 'In Progress'),
('done', 'Done'),
], copy=False,
string='State', required=True, track_visibility='onchange'),
'manager_id': fields.many2one('res.users',
string='Responsible', help="The user responsible for the challenge."),
'user_ids': fields.many2many('res.users', 'gamification_challenge_users_rel',
string='Users',
help="List of users participating to the challenge"),
'user_domain': fields.char('User domain', help="Alternative to a list of users"),
'period': fields.selection([
('once', 'Non recurring'),
('daily', 'Daily'),
('weekly', 'Weekly'),
('monthly', 'Monthly'),
('yearly', 'Yearly')
],
string='Periodicity',
help='Period of automatic goal assigment. If none is selected, should be launched manually.',
required=True),
'start_date': fields.date('Start Date',
help="The day a new challenge will be automatically started. If no periodicity is set, will use this date as the goal start date."),
'end_date': fields.date('End Date',
help="The day a new challenge will be automatically closed. If no periodicity is set, will use this date as the goal end date."),
'invited_user_ids': fields.many2many('res.users', 'gamification_invited_user_ids_rel',
string="Suggest to users"),
'line_ids': fields.one2many('gamification.challenge.line', 'challenge_id',
string='Lines',
help="List of goals that will be set",
required=True, copy=True),
'reward_id': fields.many2one('gamification.badge', string="For Every Succeding User"),
'reward_first_id': fields.many2one('gamification.badge', string="For 1st user"),
'reward_second_id': fields.many2one('gamification.badge', string="For 2nd user"),
'reward_third_id': fields.many2one('gamification.badge', string="For 3rd user"),
'reward_failure': fields.boolean('Reward Bests if not Succeeded?'),
'reward_realtime': fields.boolean('Reward as soon as every goal is reached',
help="With this option enabled, a user can receive a badge only once. The top 3 badges are still rewarded only at the end of the challenge."),
'visibility_mode': fields.selection([
('personal', 'Individual Goals'),
('ranking', 'Leader Board (Group Ranking)'),
],
string="Display Mode", required=True),
'report_message_frequency': fields.selection([
('never', 'Never'),
('onchange', 'On change'),
('daily', 'Daily'),
('weekly', 'Weekly'),
('monthly', 'Monthly'),
('yearly', 'Yearly')
],
string="Report Frequency", required=True),
'report_message_group_id': fields.many2one('mail.group',
string='Send a copy to',
help='Group that will receive a copy of the report in addition to the user'),
'report_template_id': fields.many2one('email.template', string="Report Template", required=True),
'remind_update_delay': fields.integer('Non-updated manual goals will be reminded after',
help="Never reminded if no value or zero is specified."),
'last_report_date': fields.date('Last Report Date'),
'next_report_date': fields.function(_get_next_report_date,
type='date', string='Next Report Date', store=True),
'category': fields.selection(lambda s, *a, **k: s._get_categories(*a, **k),
string="Appears in", help="Define the visibility of the challenge through menus", required=True),
}
_defaults = {
'period': 'once',
'state': 'draft',
'visibility_mode': 'personal',
'report_message_frequency': 'never',
'last_report_date': fields.date.today,
'manager_id': lambda s, cr, uid, c: uid,
'category': 'hr',
'reward_failure': False,
'report_template_id': lambda s, *a, **k: s._get_report_template(*a, **k),
'reward_realtime': True,
}
def create(self, cr, uid, vals, context=None):
"""Overwrite the create method to add the user of groups"""
if vals.get('user_domain'):
user_ids = self._get_challenger_users(cr, uid, vals.get('user_domain'), context=context)
if not vals.get('user_ids'):
vals['user_ids'] = []
vals['user_ids'] += [(4, user_id) for user_id in user_ids]
return super(gamification_challenge, self).create(cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int,long)):
ids = [ids]
if vals.get('user_domain'):
user_ids = self._get_challenger_users(cr, uid, vals.get('user_domain'), context=context)
if not vals.get('user_ids'):
vals['user_ids'] = []
vals['user_ids'] += [(4, user_id) for user_id in user_ids]
write_res = super(gamification_challenge, self).write(cr, uid, ids, vals, context=context)
if vals.get('report_message_frequency', 'never') != 'never':
# _recompute_challenge_users do not set users for challenges with no reports, subscribing them now
for challenge in self.browse(cr, uid, ids, context=context):
self.message_subscribe(cr, uid, [challenge.id], [user.partner_id.id for user in challenge.user_ids], context=context)
if vals.get('state') == 'inprogress':
self._recompute_challenge_users(cr, uid, ids, context=context)
self._generate_goals_from_challenge(cr, uid, ids, context=context)
elif vals.get('state') == 'done':
self.check_challenge_reward(cr, uid, ids, force=True, context=context)
elif vals.get('state') == 'draft':
# resetting progress
if self.pool.get('gamification.goal').search(cr, uid, [('challenge_id', 'in', ids), ('state', '=', 'inprogress')], context=context):
raise osv.except_osv("Error", "You can not reset a challenge with unfinished goals.")
return write_res
##### Update #####
def _cron_update(self, cr, uid, context=None, ids=False):
"""Daily cron check.
- Start planned challenges (in draft and with start_date = today)
- Create the missing goals (eg: modified the challenge to add lines)
- Update every running challenge
"""
if context is None:
context = {}
# start scheduled challenges
planned_challenge_ids = self.search(cr, uid, [
('state', '=', 'draft'),
('start_date', '<=', fields.date.today())])
if planned_challenge_ids:
self.write(cr, uid, planned_challenge_ids, {'state': 'inprogress'}, context=context)
# close scheduled challenges
planned_challenge_ids = self.search(cr, uid, [
('state', '=', 'inprogress'),
('end_date', '>=', fields.date.today())])
if planned_challenge_ids:
self.write(cr, uid, planned_challenge_ids, {'state': 'done'}, context=context)
if not ids:
ids = self.search(cr, uid, [('state', '=', 'inprogress')], context=context)
# in cron mode, will do intermediate commits
# TODO in trunk: replace by parameter
context = dict(context, commit_gamification=True)
return self._update_all(cr, uid, ids, context=context)
def _update_all(self, cr, uid, ids, context=None):
"""Update the challenges and related goals
:param list(int) ids: the ids of the challenges to update, if False will
update only challenges in progress."""
if not ids:
return True
if isinstance(ids, (int,long)):
ids = [ids]
goal_obj = self.pool.get('gamification.goal')
# include yesterday goals to update the goals that just ended
# exclude goals for users that did not connect since the last update
yesterday = date.today() - timedelta(days=1)
cr.execute("""SELECT gg.id
FROM gamification_goal as gg,
gamification_challenge as gc,
res_users as ru
WHERE gg.challenge_id = gc.id
AND gg.user_id = ru.id
AND gg.write_date < ru.login_date
AND gg.closed IS false
AND gc.id IN %s
AND (gg.state = 'inprogress'
OR (gg.state = 'reached'
AND (gg.end_date >= %s OR gg.end_date IS NULL)))
""", (tuple(ids), yesterday.strftime(DF)))
goal_ids = [res[0] for res in cr.fetchall()]
# update every running goal already generated linked to selected challenges
goal_obj.update(cr, uid, goal_ids, context=context)
self._recompute_challenge_users(cr, uid, ids, context=context)
self._generate_goals_from_challenge(cr, uid, ids, context=context)
for challenge in self.browse(cr, uid, ids, context=context):
if challenge.last_report_date != fields.date.today():
# goals closed but still opened at the last report date
closed_goals_to_report = goal_obj.search(cr, uid, [
('challenge_id', '=', challenge.id),
('start_date', '>=', challenge.last_report_date),
('end_date', '<=', challenge.last_report_date)
])
if challenge.next_report_date and fields.date.today() >= challenge.next_report_date:
self.report_progress(cr, uid, challenge, context=context)
elif len(closed_goals_to_report) > 0:
# some goals need a final report
self.report_progress(cr, uid, challenge, subset_goal_ids=closed_goals_to_report, context=context)
self.check_challenge_reward(cr, uid, ids, context=context)
return True
def quick_update(self, cr, uid, challenge_id, context=None):
"""Update all the goals of a specific challenge, no generation of new goals"""
goal_ids = self.pool.get('gamification.goal').search(cr, uid, [('challenge_id', '=', challenge_id)], context=context)
self.pool.get('gamification.goal').update(cr, uid, goal_ids, context=context)
return True
def _get_challenger_users(self, cr, uid, domain, context=None):
user_domain = eval(ustr(domain))
return self.pool['res.users'].search(cr, uid, user_domain, context=context)
def _recompute_challenge_users(self, cr, uid, challenge_ids, context=None):
"""Recompute the domain to add new users and remove the one no longer matching the domain"""
for challenge in self.browse(cr, uid, challenge_ids, context=context):
if challenge.user_domain:
old_user_ids = [user.id for user in challenge.user_ids]
new_user_ids = self._get_challenger_users(cr, uid, challenge.user_domain, context=context)
to_remove_ids = list(set(old_user_ids) - set(new_user_ids))
to_add_ids = list(set(new_user_ids) - set(old_user_ids))
write_op = [(3, user_id) for user_id in to_remove_ids]
write_op += [(4, user_id) for user_id in to_add_ids]
if write_op:
self.write(cr, uid, [challenge.id], {'user_ids': write_op}, context=context)
return True
def action_start(self, cr, uid, ids, context=None):
"""Start a challenge"""
return self.write(cr, uid, ids, {'state': 'inprogress'}, context=context)
def action_check(self, cr, uid, ids, context=None):
"""Check a challenge
Create goals that haven't been created yet (eg: if added users)
Recompute the current value for each goal related"""
return self._update_all(cr, uid, ids=ids, context=context)
def action_report_progress(self, cr, uid, ids, context=None):
"""Manual report of a goal, does not influence automatic report frequency"""
if isinstance(ids, (int,long)):
ids = [ids]
for challenge in self.browse(cr, uid, ids, context=context):
self.report_progress(cr, uid, challenge, context=context)
return True
##### Automatic actions #####
def _generate_goals_from_challenge(self, cr, uid, ids, context=None):
"""Generate the goals for each line and user.
If goals already exist for this line and user, the line is skipped. This
can be called after each change in the list of users or lines.
:param list(int) ids: the list of challenge concerned"""
goal_obj = self.pool.get('gamification.goal')
for challenge in self.browse(cr, uid, ids, context=context):
(start_date, end_date) = start_end_date_for_period(challenge.period)
to_update = []
# if no periodicity, use challenge dates
if not start_date and challenge.start_date:
start_date = challenge.start_date
if not end_date and challenge.end_date:
end_date = challenge.end_date
for line in challenge.line_ids:
# there is potentially a lot of users
# detect the ones with no goal linked to this line
date_clause = ""
query_params = [line.id]
if start_date:
date_clause += "AND g.start_date = %s"
query_params.append(start_date)
if end_date:
date_clause += "AND g.end_date = %s"
query_params.append(end_date)
query = """SELECT u.id AS user_id
FROM res_users u
LEFT JOIN gamification_goal g
ON (u.id = g.user_id)
WHERE line_id = %s
{date_clause}
""".format(date_clause=date_clause)
cr.execute(query, query_params)
user_with_goal_ids = cr.dictfetchall()
participant_user_ids = [user.id for user in challenge.user_ids]
user_without_goal_ids = list(set(participant_user_ids) - set([user['user_id'] for user in user_with_goal_ids]))
user_squating_challenge_ids = list(set([user['user_id'] for user in user_with_goal_ids]) - set(participant_user_ids))
if user_squating_challenge_ids:
# users that used to match the challenge
goal_to_remove_ids = goal_obj.search(cr, uid, [('challenge_id', '=', challenge.id), ('user_id', 'in', user_squating_challenge_ids)], context=context)
goal_obj.unlink(cr, uid, goal_to_remove_ids, context=context)
values = {
'definition_id': line.definition_id.id,
'line_id': line.id,
'target_goal': line.target_goal,
'state': 'inprogress',
}
if start_date:
values['start_date'] = start_date
if end_date:
values['end_date'] = end_date
# the goal is initialised over the limit to make sure we will compute it at least once
if line.condition == 'higher':
values['current'] = line.target_goal - 1
else:
values['current'] = line.target_goal + 1
if challenge.remind_update_delay:
values['remind_update_delay'] = challenge.remind_update_delay
for user_id in user_without_goal_ids:
values.update({'user_id': user_id})
goal_id = goal_obj.create(cr, uid, values, context=context)
to_update.append(goal_id)
goal_obj.update(cr, uid, to_update, context=context)
return True
##### JS utilities #####
def _get_serialized_challenge_lines(self, cr, uid, challenge, user_id=False, restrict_goal_ids=False, restrict_top=False, context=None):
"""Return a serialised version of the goals information if the user has not completed every goal
:challenge: browse record of challenge to compute
:user_id: res.users id of the user retrieving progress (False if no distinction, only for ranking challenges)
:restrict_goal_ids: <list(int)> compute only the results for this subset if gamification.goal ids, if False retrieve every goal of current running challenge
:restrict_top: <int> for challenge lines where visibility_mode == 'ranking', retrieve only these bests results and itself, if False retrieve all
restrict_goal_ids has priority over restrict_top
format list
# if visibility_mode == 'ranking'
{
'name': <gamification.goal.description name>,
'description': <gamification.goal.description description>,
'condition': <reach condition {lower,higher}>,
'computation_mode': <target computation {manually,count,sum,python}>,
'monetary': <{True,False}>,
'suffix': <value suffix>,
'action': <{True,False}>,
'display_mode': <{progress,boolean}>,
'target': <challenge line target>,
'own_goal_id': <gamification.goal id where user_id == uid>,
'goals': [
{
'id': <gamification.goal id>,
'rank': <user ranking>,
'user_id': <res.users id>,
'name': <res.users name>,
'state': <gamification.goal state {draft,inprogress,reached,failed,canceled}>,
'completeness': <percentage>,
'current': <current value>,
}
]
},
# if visibility_mode == 'personal'
{
'id': <gamification.goal id>,
'name': <gamification.goal.description name>,
'description': <gamification.goal.description description>,
'condition': <reach condition {lower,higher}>,
'computation_mode': <target computation {manually,count,sum,python}>,
'monetary': <{True,False}>,
'suffix': <value suffix>,
'action': <{True,False}>,
'display_mode': <{progress,boolean}>,
'target': <challenge line target>,
'state': <gamification.goal state {draft,inprogress,reached,failed,canceled}>,
'completeness': <percentage>,
'current': <current value>,
}
"""
goal_obj = self.pool.get('gamification.goal')
(start_date, end_date) = start_end_date_for_period(challenge.period)
res_lines = []
all_reached = True
for line in challenge.line_ids:
line_data = {
'name': line.definition_id.name,
'description': line.definition_id.description,
'condition': line.definition_id.condition,
'computation_mode': line.definition_id.computation_mode,
'monetary': line.definition_id.monetary,
'suffix': line.definition_id.suffix,
'action': True if line.definition_id.action_id else False,
'display_mode': line.definition_id.display_mode,
'target': line.target_goal,
}
domain = [
('line_id', '=', line.id),
('state', '!=', 'draft'),
]
if restrict_goal_ids:
domain.append(('ids', 'in', restrict_goal_ids))
else:
# if no subset goals, use the dates for restriction
if start_date:
domain.append(('start_date', '=', start_date))
if end_date:
domain.append(('end_date', '=', end_date))
if challenge.visibility_mode == 'personal':
if not user_id:
raise osv.except_osv(_('Error!'),_("Retrieving progress for personal challenge without user information"))
domain.append(('user_id', '=', user_id))
sorting = goal_obj._order
limit = 1
else:
line_data.update({
'own_goal_id': False,
'goals': [],
})
sorting = "completeness desc, current desc"
limit = False
goal_ids = goal_obj.search(cr, uid, domain, order=sorting, limit=limit, context=context)
ranking = 0
for goal in goal_obj.browse(cr, uid, goal_ids, context=context):
if challenge.visibility_mode == 'personal':
# limit=1 so only one result
line_data.update({
'id': goal.id,
'current': goal.current,
'completeness': goal.completeness,
'state': goal.state,
})
if goal.state != 'reached':
all_reached = False
else:
ranking += 1
if user_id and goal.user_id.id == user_id:
line_data['own_goal_id'] = goal.id
elif restrict_top and ranking > restrict_top:
# not own goal and too low to be in top
continue
line_data['goals'].append({
'id': goal.id,
'user_id': goal.user_id.id,
'name': goal.user_id.name,
'rank': ranking,
'current': goal.current,
'completeness': goal.completeness,
'state': goal.state,
})
if goal.state != 'reached':
all_reached = False
if goal_ids:
res_lines.append(line_data)
if all_reached:
return []
return res_lines
##### Reporting #####
def report_progress(self, cr, uid, challenge, context=None, users=False, subset_goal_ids=False):
"""Post report about the progress of the goals
:param challenge: the challenge object that need to be reported
:param users: the list(res.users) of users that are concerned by
the report. If False, will send the report to every user concerned
(goal users and group that receive a copy). Only used for challenge with
a visibility mode set to 'personal'.
:param goal_ids: the list(int) of goal ids linked to the challenge for
the report. If not specified, use the goals for the current challenge
period. This parameter can be used to produce report for previous challenge
periods.
:param subset_goal_ids: a list(int) of goal ids to restrict the report
"""
if context is None:
context = {}
temp_obj = self.pool.get('email.template')
ctx = context.copy()
if challenge.visibility_mode == 'ranking':
lines_boards = self._get_serialized_challenge_lines(cr, uid, challenge, user_id=False, restrict_goal_ids=subset_goal_ids, restrict_top=False, context=context)
ctx.update({'challenge_lines': lines_boards})
body_html = temp_obj.render_template(cr, uid, challenge.report_template_id.body_html, 'gamification.challenge', challenge.id, context=ctx)
# send to every follower and participant of the challenge
self.message_post(cr, uid, challenge.id,
body=body_html,
partner_ids=[user.partner_id.id for user in challenge.user_ids],
context=context,
subtype='mail.mt_comment')
if challenge.report_message_group_id:
self.pool.get('mail.group').message_post(cr, uid, challenge.report_message_group_id.id,
body=body_html,
context=context,
subtype='mail.mt_comment')
else:
# generate individual reports
for user in users or challenge.user_ids:
goals = self._get_serialized_challenge_lines(cr, uid, challenge, user.id, restrict_goal_ids=subset_goal_ids, context=context)
if not goals:
continue
ctx.update({'challenge_lines': goals})
body_html = temp_obj.render_template(cr, user.id, challenge.report_template_id.body_html, 'gamification.challenge', challenge.id, context=ctx)
# send message only to users, not on the challenge
self.message_post(cr, uid, 0,
body=body_html,
partner_ids=[(4, user.partner_id.id)],
context=context,
subtype='mail.mt_comment')
if challenge.report_message_group_id:
self.pool.get('mail.group').message_post(cr, uid, challenge.report_message_group_id.id,
body=body_html,
context=context,
subtype='mail.mt_comment')
return self.write(cr, uid, challenge.id, {'last_report_date': fields.date.today()}, context=context)
##### Challenges #####
# TODO in trunk, remove unused parameter user_id
def accept_challenge(self, cr, uid, challenge_ids, context=None, user_id=None):
"""The user accept the suggested challenge"""
return self._accept_challenge(cr, uid, uid, challenge_ids, context=context)
def _accept_challenge(self, cr, uid, user_id, challenge_ids, context=None):
user = self.pool.get('res.users').browse(cr, uid, user_id, context=context)
message = "%s has joined the challenge" % user.name
self.message_post(cr, SUPERUSER_ID, challenge_ids, body=message, context=context)
self.write(cr, SUPERUSER_ID, challenge_ids, {'invited_user_ids': [(3, user_id)], 'user_ids': [(4, user_id)]}, context=context)
return self._generate_goals_from_challenge(cr, SUPERUSER_ID, challenge_ids, context=context)
# TODO in trunk, remove unused parameter user_id
def discard_challenge(self, cr, uid, challenge_ids, context=None, user_id=None):
"""The user discard the suggested challenge"""
return self._discard_challenge(cr, uid, uid, challenge_ids, context=context)
def _discard_challenge(self, cr, uid, user_id, challenge_ids, context=None):
user = self.pool.get('res.users').browse(cr, uid, user_id, context=context)
message = "%s has refused the challenge" % user.name
self.message_post(cr, SUPERUSER_ID, challenge_ids, body=message, context=context)
return self.write(cr, SUPERUSER_ID, challenge_ids, {'invited_user_ids': (3, user_id)}, context=context)
def reply_challenge_wizard(self, cr, uid, challenge_id, context=None):
result = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'gamification', 'challenge_wizard')
id = result and result[1] or False
result = self.pool.get('ir.actions.act_window').read(cr, uid, [id], context=context)[0]
result['res_id'] = challenge_id
return result
def check_challenge_reward(self, cr, uid, ids, force=False, context=None):
"""Actions for the end of a challenge
If a reward was selected, grant it to the correct users.
Rewards granted at:
- the end date for a challenge with no periodicity
- the end of a period for challenge with periodicity
- when a challenge is manually closed
(if no end date, a running challenge is never rewarded)
"""
if isinstance(ids, (int,long)):
ids = [ids]
commit = context.get('commit_gamification', False)
for challenge in self.browse(cr, uid, ids, context=context):
(start_date, end_date) = start_end_date_for_period(challenge.period, challenge.start_date, challenge.end_date)
yesterday = date.today() - timedelta(days=1)
rewarded_users = []
challenge_ended = end_date == yesterday.strftime(DF) or force
if challenge.reward_id and (challenge_ended or challenge.reward_realtime):
# not using start_date as intemportal goals have a start date but no end_date
reached_goals = self.pool.get('gamification.goal').read_group(cr, uid, [
('challenge_id', '=', challenge.id),
('end_date', '=', end_date),
('state', '=', 'reached')
], fields=['user_id'], groupby=['user_id'], context=context)
for reach_goals_user in reached_goals:
if reach_goals_user['user_id_count'] == len(challenge.line_ids):
# the user has succeeded every assigned goal
user_id = reach_goals_user['user_id'][0]
if challenge.reward_realtime:
badges = self.pool['gamification.badge.user'].search(cr, uid, [
('challenge_id', '=', challenge.id),
('badge_id', '=', challenge.reward_id.id),
('user_id', '=', user_id),
], count=True, context=context)
if badges > 0:
# has already recieved the badge for this challenge
continue
self.reward_user(cr, uid, user_id, challenge.reward_id.id, challenge.id, context=context)
rewarded_users.append(user_id)
if commit:
cr.commit()
if challenge_ended:
# open chatter message
message_body = _("The challenge %s is finished." % challenge.name)
if rewarded_users:
user_names = self.pool['res.users'].name_get(cr, uid, rewarded_users, context=context)
message_body += _("<br/>Reward (badge %s) for every succeeding user was sent to %s." % (challenge.reward_id.name, ", ".join([name for (user_id, name) in user_names])))
else:
message_body += _("<br/>Nobody has succeeded to reach every goal, no badge is rewared for this challenge.")
# reward bests
if challenge.reward_first_id:
(first_user, second_user, third_user) = self.get_top3_users(cr, uid, challenge, context=context)
if first_user:
self.reward_user(cr, uid, first_user.id, challenge.reward_first_id.id, challenge.id, context=context)
message_body += _("<br/>Special rewards were sent to the top competing users. The ranking for this challenge is :")
message_body += "<br/> 1. %s - %s" % (first_user.name, challenge.reward_first_id.name)
else:
message_body += _("Nobody reached the required conditions to receive special badges.")
if second_user and challenge.reward_second_id:
self.reward_user(cr, uid, second_user.id, challenge.reward_second_id.id, challenge.id, context=context)
message_body += "<br/> 2. %s - %s" % (second_user.name, challenge.reward_second_id.name)
if third_user and challenge.reward_third_id:
self.reward_user(cr, uid, third_user.id, challenge.reward_second_id.id, challenge.id, context=context)
message_body += "<br/> 3. %s - %s" % (third_user.name, challenge.reward_third_id.name)
self.message_post(cr, uid, challenge.id,
partner_ids=[user.partner_id.id for user in challenge.user_ids],
body=message_body,
context=context)
if commit:
cr.commit()
return True
def get_top3_users(self, cr, uid, challenge, context=None):
"""Get the top 3 users for a defined challenge
Ranking criterias:
1. succeed every goal of the challenge
2. total completeness of each goal (can be over 100)
Top 3 is computed only for users succeeding every goal of the challenge,
except if reward_failure is True, in which case every user is
considered.
:return: ('first', 'second', 'third'), tuple containing the res.users
objects of the top 3 users. If no user meets the criterias for a rank,
it is set to False. Nobody can receive a rank is noone receives the
higher one (eg: if 'second' == False, 'third' will be False)
"""
goal_obj = self.pool.get('gamification.goal')
(start_date, end_date) = start_end_date_for_period(challenge.period, challenge.start_date, challenge.end_date)
challengers = []
for user in challenge.user_ids:
all_reached = True
total_completness = 0
# every goal of the user for the running period
goal_ids = goal_obj.search(cr, uid, [
('challenge_id', '=', challenge.id),
('user_id', '=', user.id),
('start_date', '=', start_date),
('end_date', '=', end_date)
], context=context)
for goal in goal_obj.browse(cr, uid, goal_ids, context=context):
if goal.state != 'reached':
all_reached = False
if goal.definition_condition == 'higher':
# can be over 100
total_completness += 100.0 * goal.current / goal.target_goal
elif goal.state == 'reached':
# for lower goals, can not get percentage so 0 or 100
total_completness += 100
challengers.append({'user': user, 'all_reached': all_reached, 'total_completness': total_completness})
sorted_challengers = sorted(challengers, key=lambda k: (k['all_reached'], k['total_completness']), reverse=True)
if len(sorted_challengers) == 0 or (not challenge.reward_failure and not sorted_challengers[0]['all_reached']):
# nobody succeeded
return (False, False, False)
if len(sorted_challengers) == 1 or (not challenge.reward_failure and not sorted_challengers[1]['all_reached']):
# only one user succeeded
return (sorted_challengers[0]['user'], False, False)
if len(sorted_challengers) == 2 or (not challenge.reward_failure and not sorted_challengers[2]['all_reached']):
# only one user succeeded
return (sorted_challengers[0]['user'], sorted_challengers[1]['user'], False)
return (sorted_challengers[0]['user'], sorted_challengers[1]['user'], sorted_challengers[2]['user'])
def reward_user(self, cr, uid, user_id, badge_id, challenge_id=False, context=None):
"""Create a badge user and send the badge to him
:param user_id: the user to reward
:param badge_id: the concerned badge
"""
badge_user_obj = self.pool.get('gamification.badge.user')
user_badge_id = badge_user_obj.create(cr, uid, {'user_id': user_id, 'badge_id': badge_id, 'challenge_id':challenge_id}, context=context)
return badge_user_obj._send_badge(cr, uid, [user_badge_id], context=context)
class gamification_challenge_line(osv.Model):
"""Gamification challenge line
Predifined goal for 'gamification_challenge'
These are generic list of goals with only the target goal defined
Should only be created for the gamification_challenge object
"""
_name = 'gamification.challenge.line'
_description = 'Gamification generic goal for challenge'
_order = "sequence, id"
def on_change_definition_id(self, cr, uid, ids, definition_id=False, context=None):
goal_definition = self.pool.get('gamification.goal.definition')
if not definition_id:
return {'value': {'definition_id': False}}
goal_definition = goal_definition.browse(cr, uid, definition_id, context=context)
ret = {
'value': {
'condition': goal_definition.condition,
'definition_full_suffix': goal_definition.full_suffix
}
}
return ret
_columns = {
'name': fields.related('definition_id', 'name', string="Name", type="char"),
'challenge_id': fields.many2one('gamification.challenge',
string='Challenge',
required=True,
ondelete="cascade"),
'definition_id': fields.many2one('gamification.goal.definition',
string='Goal Definition',
required=True,
ondelete="cascade"),
'target_goal': fields.float('Target Value to Reach',
required=True),
'sequence': fields.integer('Sequence',
help='Sequence number for ordering'),
'condition': fields.related('definition_id', 'condition', type="selection",
readonly=True, string="Condition", selection=[('lower', '<='), ('higher', '>=')]),
'definition_suffix': fields.related('definition_id', 'suffix', type="char", readonly=True, string="Unit"),
'definition_monetary': fields.related('definition_id', 'monetary', type="boolean", readonly=True, string="Monetary"),
'definition_full_suffix': fields.related('definition_id', 'full_suffix', type="char", readonly=True, string="Suffix"),
}
_default = {
'sequence': 1,
}
|
nashve/mythbox | refs/heads/master | resources/lib/twisted/twisted/test/test_internet.py | 59 | # Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for lots of functionality provided by L{twisted.internet}.
"""
import os
import sys
import time
from twisted.trial import unittest
from twisted.internet import reactor, protocol, error, abstract, defer
from twisted.internet import interfaces, base
try:
from twisted.internet import ssl
except ImportError:
ssl = None
if ssl and not ssl.supported:
ssl = None
from twisted.internet.defer import Deferred, maybeDeferred
from twisted.python import util, runtime
class ThreePhaseEventTests(unittest.TestCase):
"""
Tests for the private implementation helpers for system event triggers.
"""
def setUp(self):
"""
Create a trigger, an argument, and an event to be used by tests.
"""
self.trigger = lambda x: None
self.arg = object()
self.event = base._ThreePhaseEvent()
def test_addInvalidPhase(self):
"""
L{_ThreePhaseEvent.addTrigger} should raise L{KeyError} when called
with an invalid phase.
"""
self.assertRaises(
KeyError,
self.event.addTrigger, 'xxx', self.trigger, self.arg)
def test_addBeforeTrigger(self):
"""
L{_ThreePhaseEvent.addTrigger} should accept C{'before'} as a phase, a
callable, and some arguments and add the callable with the arguments to
the before list.
"""
self.event.addTrigger('before', self.trigger, self.arg)
self.assertEqual(
self.event.before,
[(self.trigger, (self.arg,), {})])
def test_addDuringTrigger(self):
"""
L{_ThreePhaseEvent.addTrigger} should accept C{'during'} as a phase, a
callable, and some arguments and add the callable with the arguments to
the during list.
"""
self.event.addTrigger('during', self.trigger, self.arg)
self.assertEqual(
self.event.during,
[(self.trigger, (self.arg,), {})])
def test_addAfterTrigger(self):
"""
L{_ThreePhaseEvent.addTrigger} should accept C{'after'} as a phase, a
callable, and some arguments and add the callable with the arguments to
the after list.
"""
self.event.addTrigger('after', self.trigger, self.arg)
self.assertEqual(
self.event.after,
[(self.trigger, (self.arg,), {})])
def test_removeTrigger(self):
"""
L{_ThreePhaseEvent.removeTrigger} should accept an opaque object
previously returned by L{_ThreePhaseEvent.addTrigger} and remove the
associated trigger.
"""
handle = self.event.addTrigger('before', self.trigger, self.arg)
self.event.removeTrigger(handle)
self.assertEqual(self.event.before, [])
def test_removeNonexistentTrigger(self):
"""
L{_ThreePhaseEvent.removeTrigger} should raise L{ValueError} when given
an object not previously returned by L{_ThreePhaseEvent.addTrigger}.
"""
self.assertRaises(ValueError, self.event.removeTrigger, object())
def test_removeRemovedTrigger(self):
"""
L{_ThreePhaseEvent.removeTrigger} should raise L{ValueError} the second
time it is called with an object returned by
L{_ThreePhaseEvent.addTrigger}.
"""
handle = self.event.addTrigger('before', self.trigger, self.arg)
self.event.removeTrigger(handle)
self.assertRaises(ValueError, self.event.removeTrigger, handle)
def test_removeAlmostValidTrigger(self):
"""
L{_ThreePhaseEvent.removeTrigger} should raise L{ValueError} if it is
given a trigger handle which resembles a valid trigger handle aside
from its phase being incorrect.
"""
self.assertRaises(
KeyError,
self.event.removeTrigger, ('xxx', self.trigger, (self.arg,), {}))
def test_fireEvent(self):
"""
L{_ThreePhaseEvent.fireEvent} should call I{before}, I{during}, and
I{after} phase triggers in that order.
"""
events = []
self.event.addTrigger('after', events.append, ('first', 'after'))
self.event.addTrigger('during', events.append, ('first', 'during'))
self.event.addTrigger('before', events.append, ('first', 'before'))
self.event.addTrigger('before', events.append, ('second', 'before'))
self.event.addTrigger('during', events.append, ('second', 'during'))
self.event.addTrigger('after', events.append, ('second', 'after'))
self.assertEqual(events, [])
self.event.fireEvent()
self.assertEqual(events,
[('first', 'before'), ('second', 'before'),
('first', 'during'), ('second', 'during'),
('first', 'after'), ('second', 'after')])
def test_asynchronousBefore(self):
"""
L{_ThreePhaseEvent.fireEvent} should wait for any L{Deferred} returned
by a I{before} phase trigger before proceeding to I{during} events.
"""
events = []
beforeResult = Deferred()
self.event.addTrigger('before', lambda: beforeResult)
self.event.addTrigger('during', events.append, 'during')
self.event.addTrigger('after', events.append, 'after')
self.assertEqual(events, [])
self.event.fireEvent()
self.assertEqual(events, [])
beforeResult.callback(None)
self.assertEqual(events, ['during', 'after'])
def test_beforeTriggerException(self):
"""
If a before-phase trigger raises a synchronous exception, it should be
logged and the remaining triggers should be run.
"""
events = []
class DummyException(Exception):
pass
def raisingTrigger():
raise DummyException()
self.event.addTrigger('before', raisingTrigger)
self.event.addTrigger('before', events.append, 'before')
self.event.addTrigger('during', events.append, 'during')
self.event.fireEvent()
self.assertEqual(events, ['before', 'during'])
errors = self.flushLoggedErrors(DummyException)
self.assertEqual(len(errors), 1)
def test_duringTriggerException(self):
"""
If a during-phase trigger raises a synchronous exception, it should be
logged and the remaining triggers should be run.
"""
events = []
class DummyException(Exception):
pass
def raisingTrigger():
raise DummyException()
self.event.addTrigger('during', raisingTrigger)
self.event.addTrigger('during', events.append, 'during')
self.event.addTrigger('after', events.append, 'after')
self.event.fireEvent()
self.assertEqual(events, ['during', 'after'])
errors = self.flushLoggedErrors(DummyException)
self.assertEqual(len(errors), 1)
def test_synchronousRemoveAlreadyExecutedBefore(self):
"""
If a before-phase trigger tries to remove another before-phase trigger
which has already run, a warning should be emitted.
"""
events = []
def removeTrigger():
self.event.removeTrigger(beforeHandle)
beforeHandle = self.event.addTrigger('before', events.append, ('first', 'before'))
self.event.addTrigger('before', removeTrigger)
self.event.addTrigger('before', events.append, ('second', 'before'))
self.assertWarns(
DeprecationWarning,
"Removing already-fired system event triggers will raise an "
"exception in a future version of Twisted.",
__file__,
self.event.fireEvent)
self.assertEqual(events, [('first', 'before'), ('second', 'before')])
def test_synchronousRemovePendingBefore(self):
"""
If a before-phase trigger removes another before-phase trigger which
has not yet run, the removed trigger should not be run.
"""
events = []
self.event.addTrigger(
'before', lambda: self.event.removeTrigger(beforeHandle))
beforeHandle = self.event.addTrigger(
'before', events.append, ('first', 'before'))
self.event.addTrigger('before', events.append, ('second', 'before'))
self.event.fireEvent()
self.assertEqual(events, [('second', 'before')])
def test_synchronousBeforeRemovesDuring(self):
"""
If a before-phase trigger removes a during-phase trigger, the
during-phase trigger should not be run.
"""
events = []
self.event.addTrigger(
'before', lambda: self.event.removeTrigger(duringHandle))
duringHandle = self.event.addTrigger('during', events.append, 'during')
self.event.addTrigger('after', events.append, 'after')
self.event.fireEvent()
self.assertEqual(events, ['after'])
def test_asynchronousBeforeRemovesDuring(self):
"""
If a before-phase trigger returns a L{Deferred} and later removes a
during-phase trigger before the L{Deferred} fires, the during-phase
trigger should not be run.
"""
events = []
beforeResult = Deferred()
self.event.addTrigger('before', lambda: beforeResult)
duringHandle = self.event.addTrigger('during', events.append, 'during')
self.event.addTrigger('after', events.append, 'after')
self.event.fireEvent()
self.event.removeTrigger(duringHandle)
beforeResult.callback(None)
self.assertEqual(events, ['after'])
def test_synchronousBeforeRemovesConspicuouslySimilarDuring(self):
"""
If a before-phase trigger removes a during-phase trigger which is
identical to an already-executed before-phase trigger aside from their
phases, no warning should be emitted and the during-phase trigger
should not be run.
"""
events = []
def trigger():
events.append('trigger')
self.event.addTrigger('before', trigger)
self.event.addTrigger(
'before', lambda: self.event.removeTrigger(duringTrigger))
duringTrigger = self.event.addTrigger('during', trigger)
self.event.fireEvent()
self.assertEqual(events, ['trigger'])
def test_synchronousRemovePendingDuring(self):
"""
If a during-phase trigger removes another during-phase trigger which
has not yet run, the removed trigger should not be run.
"""
events = []
self.event.addTrigger(
'during', lambda: self.event.removeTrigger(duringHandle))
duringHandle = self.event.addTrigger(
'during', events.append, ('first', 'during'))
self.event.addTrigger(
'during', events.append, ('second', 'during'))
self.event.fireEvent()
self.assertEqual(events, [('second', 'during')])
def test_triggersRunOnce(self):
"""
A trigger should only be called on the first call to
L{_ThreePhaseEvent.fireEvent}.
"""
events = []
self.event.addTrigger('before', events.append, 'before')
self.event.addTrigger('during', events.append, 'during')
self.event.addTrigger('after', events.append, 'after')
self.event.fireEvent()
self.event.fireEvent()
self.assertEqual(events, ['before', 'during', 'after'])
def test_finishedBeforeTriggersCleared(self):
"""
The temporary list L{_ThreePhaseEvent.finishedBefore} should be emptied
and the state reset to C{'BASE'} before the first during-phase trigger
executes.
"""
events = []
def duringTrigger():
events.append('during')
self.assertEqual(self.event.finishedBefore, [])
self.assertEqual(self.event.state, 'BASE')
self.event.addTrigger('before', events.append, 'before')
self.event.addTrigger('during', duringTrigger)
self.event.fireEvent()
self.assertEqual(events, ['before', 'during'])
class SystemEventTestCase(unittest.TestCase):
"""
Tests for the reactor's implementation of the C{fireSystemEvent},
C{addSystemEventTrigger}, and C{removeSystemEventTrigger} methods of the
L{IReactorCore} interface.
@ivar triggers: A list of the handles to triggers which have been added to
the reactor.
"""
def setUp(self):
"""
Create an empty list in which to store trigger handles.
"""
self.triggers = []
def tearDown(self):
"""
Remove all remaining triggers from the reactor.
"""
while self.triggers:
trigger = self.triggers.pop()
try:
reactor.removeSystemEventTrigger(trigger)
except (ValueError, KeyError):
pass
def addTrigger(self, event, phase, func):
"""
Add a trigger to the reactor and remember it in C{self.triggers}.
"""
t = reactor.addSystemEventTrigger(event, phase, func)
self.triggers.append(t)
return t
def removeTrigger(self, trigger):
"""
Remove a trigger by its handle from the reactor and from
C{self.triggers}.
"""
reactor.removeSystemEventTrigger(trigger)
self.triggers.remove(trigger)
def _addSystemEventTriggerTest(self, phase):
eventType = 'test'
events = []
def trigger():
events.append(None)
self.addTrigger(phase, eventType, trigger)
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, [None])
def test_beforePhase(self):
"""
L{IReactorCore.addSystemEventTrigger} should accept the C{'before'}
phase and not call the given object until the right event is fired.
"""
self._addSystemEventTriggerTest('before')
def test_duringPhase(self):
"""
L{IReactorCore.addSystemEventTrigger} should accept the C{'during'}
phase and not call the given object until the right event is fired.
"""
self._addSystemEventTriggerTest('during')
def test_afterPhase(self):
"""
L{IReactorCore.addSystemEventTrigger} should accept the C{'after'}
phase and not call the given object until the right event is fired.
"""
self._addSystemEventTriggerTest('after')
def test_unknownPhase(self):
"""
L{IReactorCore.addSystemEventTrigger} should reject phases other than
C{'before'}, C{'during'}, or C{'after'}.
"""
eventType = 'test'
self.assertRaises(
KeyError, self.addTrigger, 'xxx', eventType, lambda: None)
def test_beforePreceedsDuring(self):
"""
L{IReactorCore.addSystemEventTrigger} should call triggers added to the
C{'before'} phase before it calls triggers added to the C{'during'}
phase.
"""
eventType = 'test'
events = []
def beforeTrigger():
events.append('before')
def duringTrigger():
events.append('during')
self.addTrigger('before', eventType, beforeTrigger)
self.addTrigger('during', eventType, duringTrigger)
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, ['before', 'during'])
def test_duringPreceedsAfter(self):
"""
L{IReactorCore.addSystemEventTrigger} should call triggers added to the
C{'during'} phase before it calls triggers added to the C{'after'}
phase.
"""
eventType = 'test'
events = []
def duringTrigger():
events.append('during')
def afterTrigger():
events.append('after')
self.addTrigger('during', eventType, duringTrigger)
self.addTrigger('after', eventType, afterTrigger)
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, ['during', 'after'])
def test_beforeReturnsDeferred(self):
"""
If a trigger added to the C{'before'} phase of an event returns a
L{Deferred}, the C{'during'} phase should be delayed until it is called
back.
"""
triggerDeferred = Deferred()
eventType = 'test'
events = []
def beforeTrigger():
return triggerDeferred
def duringTrigger():
events.append('during')
self.addTrigger('before', eventType, beforeTrigger)
self.addTrigger('during', eventType, duringTrigger)
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, [])
triggerDeferred.callback(None)
self.assertEqual(events, ['during'])
def test_multipleBeforeReturnDeferred(self):
"""
If more than one trigger added to the C{'before'} phase of an event
return L{Deferred}s, the C{'during'} phase should be delayed until they
are all called back.
"""
firstDeferred = Deferred()
secondDeferred = Deferred()
eventType = 'test'
events = []
def firstBeforeTrigger():
return firstDeferred
def secondBeforeTrigger():
return secondDeferred
def duringTrigger():
events.append('during')
self.addTrigger('before', eventType, firstBeforeTrigger)
self.addTrigger('before', eventType, secondBeforeTrigger)
self.addTrigger('during', eventType, duringTrigger)
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, [])
firstDeferred.callback(None)
self.assertEqual(events, [])
secondDeferred.callback(None)
self.assertEqual(events, ['during'])
def test_subsequentBeforeTriggerFiresPriorBeforeDeferred(self):
"""
If a trigger added to the C{'before'} phase of an event calls back a
L{Deferred} returned by an earlier trigger in the C{'before'} phase of
the same event, the remaining C{'before'} triggers for that event
should be run and any further L{Deferred}s waited on before proceeding
to the C{'during'} events.
"""
eventType = 'test'
events = []
firstDeferred = Deferred()
secondDeferred = Deferred()
def firstBeforeTrigger():
return firstDeferred
def secondBeforeTrigger():
firstDeferred.callback(None)
def thirdBeforeTrigger():
events.append('before')
return secondDeferred
def duringTrigger():
events.append('during')
self.addTrigger('before', eventType, firstBeforeTrigger)
self.addTrigger('before', eventType, secondBeforeTrigger)
self.addTrigger('before', eventType, thirdBeforeTrigger)
self.addTrigger('during', eventType, duringTrigger)
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, ['before'])
secondDeferred.callback(None)
self.assertEqual(events, ['before', 'during'])
def test_removeSystemEventTrigger(self):
"""
A trigger removed with L{IReactorCore.removeSystemEventTrigger} should
not be called when the event fires.
"""
eventType = 'test'
events = []
def firstBeforeTrigger():
events.append('first')
def secondBeforeTrigger():
events.append('second')
self.addTrigger('before', eventType, firstBeforeTrigger)
self.removeTrigger(
self.addTrigger('before', eventType, secondBeforeTrigger))
self.assertEqual(events, [])
reactor.fireSystemEvent(eventType)
self.assertEqual(events, ['first'])
def test_removeNonExistentSystemEventTrigger(self):
"""
Passing an object to L{IReactorCore.removeSystemEventTrigger} which was
not returned by a previous call to
L{IReactorCore.addSystemEventTrigger} or which has already been passed
to C{removeSystemEventTrigger} should result in L{TypeError},
L{KeyError}, or L{ValueError} being raised.
"""
b = self.addTrigger('during', 'test', lambda: None)
self.removeTrigger(b)
self.assertRaises(
TypeError, reactor.removeSystemEventTrigger, None)
self.assertRaises(
ValueError, reactor.removeSystemEventTrigger, b)
self.assertRaises(
KeyError,
reactor.removeSystemEventTrigger,
(b[0], ('xxx',) + b[1][1:]))
def test_interactionBetweenDifferentEvents(self):
"""
L{IReactorCore.fireSystemEvent} should behave the same way for a
particular system event regardless of whether Deferreds are being
waited on for a different system event.
"""
events = []
firstEvent = 'first-event'
firstDeferred = Deferred()
def beforeFirstEvent():
events.append(('before', 'first'))
return firstDeferred
def afterFirstEvent():
events.append(('after', 'first'))
secondEvent = 'second-event'
secondDeferred = Deferred()
def beforeSecondEvent():
events.append(('before', 'second'))
return secondDeferred
def afterSecondEvent():
events.append(('after', 'second'))
self.addTrigger('before', firstEvent, beforeFirstEvent)
self.addTrigger('after', firstEvent, afterFirstEvent)
self.addTrigger('before', secondEvent, beforeSecondEvent)
self.addTrigger('after', secondEvent, afterSecondEvent)
self.assertEqual(events, [])
# After this, firstEvent should be stuck before 'during' waiting for
# firstDeferred.
reactor.fireSystemEvent(firstEvent)
self.assertEqual(events, [('before', 'first')])
# After this, secondEvent should be stuck before 'during' waiting for
# secondDeferred.
reactor.fireSystemEvent(secondEvent)
self.assertEqual(events, [('before', 'first'), ('before', 'second')])
# After this, firstEvent should have finished completely, but
# secondEvent should be at the same place.
firstDeferred.callback(None)
self.assertEqual(events, [('before', 'first'), ('before', 'second'),
('after', 'first')])
# After this, secondEvent should have finished completely.
secondDeferred.callback(None)
self.assertEqual(events, [('before', 'first'), ('before', 'second'),
('after', 'first'), ('after', 'second')])
class TimeTestCase(unittest.TestCase):
"""
Tests for the IReactorTime part of the reactor.
"""
def test_seconds(self):
"""
L{twisted.internet.reactor.seconds} should return something
like a number.
1. This test specifically does not assert any relation to the
"system time" as returned by L{time.time} or
L{twisted.python.runtime.seconds}, because at some point we
may find a better option for scheduling calls than
wallclock-time.
2. This test *also* does not assert anything about the type of
the result, because operations may not return ints or
floats: For example, datetime-datetime == timedelta(0).
"""
now = reactor.seconds()
self.assertEquals(now-now+now, now)
def test_callLaterUsesReactorSecondsInDelayedCall(self):
"""
L{reactor.callLater} should use the reactor's seconds factory
to produce the time at which the DelayedCall will be called.
"""
oseconds = reactor.seconds
reactor.seconds = lambda: 100
try:
call = reactor.callLater(5, lambda: None)
self.assertEquals(call.getTime(), 105)
finally:
reactor.seconds = oseconds
def test_callLaterUsesReactorSecondsAsDelayedCallSecondsFactory(self):
"""
L{reactor.callLater} should propagate its own seconds factory
to the DelayedCall to use as its own seconds factory.
"""
oseconds = reactor.seconds
reactor.seconds = lambda: 100
try:
call = reactor.callLater(5, lambda: None)
self.assertEquals(call.seconds(), 100)
finally:
reactor.seconds = oseconds
def test_callLater(self):
"""
Test that a DelayedCall really calls the function it is
supposed to call.
"""
d = Deferred()
reactor.callLater(0, d.callback, None)
d.addCallback(self.assertEqual, None)
return d
def test_cancelDelayedCall(self):
"""
Test that when a DelayedCall is cancelled it does not run.
"""
called = []
def function():
called.append(None)
call = reactor.callLater(0, function)
call.cancel()
# Schedule a call in two "iterations" to check to make sure that the
# above call never ran.
d = Deferred()
def check():
try:
self.assertEqual(called, [])
except:
d.errback()
else:
d.callback(None)
reactor.callLater(0, reactor.callLater, 0, check)
return d
def test_cancelCancelledDelayedCall(self):
"""
Test that cancelling a DelayedCall which has already been cancelled
raises the appropriate exception.
"""
call = reactor.callLater(0, lambda: None)
call.cancel()
self.assertRaises(error.AlreadyCancelled, call.cancel)
def test_cancelCalledDelayedCallSynchronous(self):
"""
Test that cancelling a DelayedCall in the DelayedCall's function as
that function is being invoked by the DelayedCall raises the
appropriate exception.
"""
d = Deferred()
def later():
try:
self.assertRaises(error.AlreadyCalled, call.cancel)
except:
d.errback()
else:
d.callback(None)
call = reactor.callLater(0, later)
return d
def test_cancelCalledDelayedCallAsynchronous(self):
"""
Test that cancelling a DelayedCall after it has run its function
raises the appropriate exception.
"""
d = Deferred()
def check():
try:
self.assertRaises(error.AlreadyCalled, call.cancel)
except:
d.errback()
else:
d.callback(None)
def later():
reactor.callLater(0, check)
call = reactor.callLater(0, later)
return d
def testCallLaterTime(self):
d = reactor.callLater(10, lambda: None)
try:
self.failUnless(d.getTime() - (time.time() + 10) < 1)
finally:
d.cancel()
def testCallLaterOrder(self):
l = []
l2 = []
def f(x):
l.append(x)
def f2(x):
l2.append(x)
def done():
self.assertEquals(l, range(20))
def done2():
self.assertEquals(l2, range(10))
for n in range(10):
reactor.callLater(0, f, n)
for n in range(10):
reactor.callLater(0, f, n+10)
reactor.callLater(0.1, f2, n)
reactor.callLater(0, done)
reactor.callLater(0.1, done2)
d = Deferred()
reactor.callLater(0.2, d.callback, None)
return d
testCallLaterOrder.todo = "See bug 1396"
testCallLaterOrder.skip = "Trial bug, todo doesn't work! See bug 1397"
def testCallLaterOrder2(self):
# This time destroy the clock resolution so that it fails reliably
# even on systems that don't have a crappy clock resolution.
def seconds():
return int(time.time())
base_original = base.seconds
runtime_original = runtime.seconds
base.seconds = seconds
runtime.seconds = seconds
def cleanup(x):
runtime.seconds = runtime_original
base.seconds = base_original
return x
return maybeDeferred(self.testCallLaterOrder).addBoth(cleanup)
testCallLaterOrder2.todo = "See bug 1396"
testCallLaterOrder2.skip = "Trial bug, todo doesn't work! See bug 1397"
def testDelayedCallStringification(self):
# Mostly just make sure str() isn't going to raise anything for
# DelayedCalls within reason.
dc = reactor.callLater(0, lambda x, y: None, 'x', y=10)
str(dc)
dc.reset(5)
str(dc)
dc.cancel()
str(dc)
dc = reactor.callLater(0, lambda: None, x=[({'hello': u'world'}, 10j), reactor], *range(10))
str(dc)
dc.cancel()
str(dc)
def calledBack(ignored):
str(dc)
d = Deferred().addCallback(calledBack)
dc = reactor.callLater(0, d.callback, None)
str(dc)
return d
def testDelayedCallSecondsOverride(self):
"""
Test that the C{seconds} argument to DelayedCall gets used instead of
the default timing function, if it is not None.
"""
def seconds():
return 10
dc = base.DelayedCall(5, lambda: None, (), {}, lambda dc: None,
lambda dc: None, seconds)
self.assertEquals(dc.getTime(), 5)
dc.reset(3)
self.assertEquals(dc.getTime(), 13)
class CallFromThreadTests(unittest.TestCase):
def testWakeUp(self):
# Make sure other threads can wake up the reactor
d = Deferred()
def wake():
time.sleep(0.1)
# callFromThread will call wakeUp for us
reactor.callFromThread(d.callback, None)
reactor.callInThread(wake)
return d
if interfaces.IReactorThreads(reactor, None) is None:
testWakeUp.skip = "Nothing to wake up for without thread support"
def _stopCallFromThreadCallback(self):
self.stopped = True
def _callFromThreadCallback(self, d):
reactor.callFromThread(self._callFromThreadCallback2, d)
reactor.callLater(0, self._stopCallFromThreadCallback)
def _callFromThreadCallback2(self, d):
try:
self.assert_(self.stopped)
except:
# Send the error to the deferred
d.errback()
else:
d.callback(None)
def testCallFromThreadStops(self):
"""
Ensure that callFromThread from inside a callFromThread
callback doesn't sit in an infinite loop and lets other
things happen too.
"""
self.stopped = False
d = defer.Deferred()
reactor.callFromThread(self._callFromThreadCallback, d)
return d
class DelayedTestCase(unittest.TestCase):
def setUp(self):
self.finished = 0
self.counter = 0
self.timers = {}
self.deferred = defer.Deferred()
def tearDown(self):
for t in self.timers.values():
t.cancel()
def checkTimers(self):
l1 = self.timers.values()
l2 = list(reactor.getDelayedCalls())
# There should be at least the calls we put in. There may be other
# calls that are none of our business and that we should ignore,
# though.
missing = []
for dc in l1:
if dc not in l2:
missing.append(dc)
if missing:
self.finished = 1
self.failIf(missing, "Should have been missing no calls, instead was missing " + repr(missing))
def callback(self, tag):
del self.timers[tag]
self.checkTimers()
def addCallback(self, tag):
self.callback(tag)
self.addTimer(15, self.callback)
def done(self, tag):
self.finished = 1
self.callback(tag)
self.deferred.callback(None)
def addTimer(self, when, callback):
self.timers[self.counter] = reactor.callLater(when * 0.01, callback,
self.counter)
self.counter += 1
self.checkTimers()
def testGetDelayedCalls(self):
if not hasattr(reactor, "getDelayedCalls"):
return
# This is not a race because we don't do anything which might call
# the reactor until we have all the timers set up. If we did, this
# test might fail on slow systems.
self.checkTimers()
self.addTimer(35, self.done)
self.addTimer(20, self.callback)
self.addTimer(30, self.callback)
which = self.counter
self.addTimer(29, self.callback)
self.addTimer(25, self.addCallback)
self.addTimer(26, self.callback)
self.timers[which].cancel()
del self.timers[which]
self.checkTimers()
self.deferred.addCallback(lambda x : self.checkTimers())
return self.deferred
def test_active(self):
"""
L{IDelayedCall.active} returns False once the call has run.
"""
dcall = reactor.callLater(0.01, self.deferred.callback, True)
self.assertEquals(dcall.active(), True)
def checkDeferredCall(success):
self.assertEquals(dcall.active(), False)
return success
self.deferred.addCallback(checkDeferredCall)
return self.deferred
resolve_helper = """
import %(reactor)s
%(reactor)s.install()
from twisted.internet import reactor
class Foo:
def __init__(self):
reactor.callWhenRunning(self.start)
self.timer = reactor.callLater(3, self.failed)
def start(self):
reactor.resolve('localhost').addBoth(self.done)
def done(self, res):
print 'done', res
reactor.stop()
def failed(self):
print 'failed'
self.timer = None
reactor.stop()
f = Foo()
reactor.run()
"""
class ChildResolveProtocol(protocol.ProcessProtocol):
def __init__(self, onCompletion):
self.onCompletion = onCompletion
def connectionMade(self):
self.output = []
self.error = []
def outReceived(self, out):
self.output.append(out)
def errReceived(self, err):
self.error.append(err)
def processEnded(self, reason):
self.onCompletion.callback((reason, self.output, self.error))
self.onCompletion = None
class Resolve(unittest.TestCase):
def testChildResolve(self):
# I've seen problems with reactor.run under gtk2reactor. Spawn a
# child which just does reactor.resolve after the reactor has
# started, fail if it does not complete in a timely fashion.
helperPath = os.path.abspath(self.mktemp())
helperFile = open(helperPath, 'w')
# Eeueuuggg
reactorName = reactor.__module__
helperFile.write(resolve_helper % {'reactor': reactorName})
helperFile.close()
env = os.environ.copy()
env['PYTHONPATH'] = os.pathsep.join(sys.path)
helperDeferred = Deferred()
helperProto = ChildResolveProtocol(helperDeferred)
reactor.spawnProcess(helperProto, sys.executable, ("python", "-u", helperPath), env)
def cbFinished((reason, output, error)):
# If the output is "done 127.0.0.1\n" we don't really care what
# else happened.
output = ''.join(output)
if output != 'done 127.0.0.1\n':
self.fail((
"The child process failed to produce the desired results:\n"
" Reason for termination was: %r\n"
" Output stream was: %r\n"
" Error stream was: %r\n") % (reason.getErrorMessage(), output, ''.join(error)))
helperDeferred.addCallback(cbFinished)
return helperDeferred
if not interfaces.IReactorProcess(reactor, None):
Resolve.skip = "cannot run test: reactor doesn't support IReactorProcess"
class CallFromThreadTestCase(unittest.TestCase):
"""
Task scheduling from threads tests.
"""
if interfaces.IReactorThreads(reactor, None) is None:
skip = "Nothing to test without thread support"
def setUp(self):
self.counter = 0
self.deferred = Deferred()
def schedule(self, *args, **kwargs):
"""
Override in subclasses.
"""
reactor.callFromThread(*args, **kwargs)
def test_lotsOfThreadsAreScheduledCorrectly(self):
"""
L{IReactorThreads.callFromThread} can be used to schedule a large
number of calls in the reactor thread.
"""
def addAndMaybeFinish():
self.counter += 1
if self.counter == 100:
self.deferred.callback(True)
for i in xrange(100):
self.schedule(addAndMaybeFinish)
return self.deferred
def test_threadsAreRunInScheduledOrder(self):
"""
Callbacks should be invoked in the order they were scheduled.
"""
order = []
def check(_):
self.assertEquals(order, [1, 2, 3])
self.deferred.addCallback(check)
self.schedule(order.append, 1)
self.schedule(order.append, 2)
self.schedule(order.append, 3)
self.schedule(reactor.callFromThread, self.deferred.callback, None)
return self.deferred
def test_scheduledThreadsNotRunUntilReactorRuns(self):
"""
Scheduled tasks should not be run until the reactor starts running.
"""
def incAndFinish():
self.counter = 1
self.deferred.callback(True)
self.schedule(incAndFinish)
# Callback shouldn't have fired yet.
self.assertEquals(self.counter, 0)
return self.deferred
class MyProtocol(protocol.Protocol):
"""
Sample protocol.
"""
class MyFactory(protocol.Factory):
"""
Sample factory.
"""
protocol = MyProtocol
class ProtocolTestCase(unittest.TestCase):
def testFactory(self):
factory = MyFactory()
protocol = factory.buildProtocol(None)
self.assertEquals(protocol.factory, factory)
self.assert_( isinstance(protocol, factory.protocol) )
class DummyProducer(object):
"""
Very uninteresting producer implementation used by tests to ensure the
right methods are called by the consumer with which it is registered.
@type events: C{list} of C{str}
@ivar events: The producer/consumer related events which have happened to
this producer. Strings in this list may be C{'resume'}, C{'stop'}, or
C{'pause'}. Elements are added as they occur.
"""
def __init__(self):
self.events = []
def resumeProducing(self):
self.events.append('resume')
def stopProducing(self):
self.events.append('stop')
def pauseProducing(self):
self.events.append('pause')
class SillyDescriptor(abstract.FileDescriptor):
"""
A descriptor whose data buffer gets filled very fast.
Useful for testing FileDescriptor's IConsumer interface, since
the data buffer fills as soon as at least four characters are
written to it, and gets emptied in a single doWrite() cycle.
"""
bufferSize = 3
connected = True
def writeSomeData(self, data):
"""
Always write all data.
"""
return len(data)
def startWriting(self):
"""
Do nothing: bypass the reactor.
"""
stopWriting = startWriting
class ReentrantProducer(DummyProducer):
"""
Similar to L{DummyProducer}, but with a resumeProducing method which calls
back into an L{IConsumer} method of the consumer against which it is
registered.
@ivar consumer: The consumer with which this producer has been or will
be registered.
@ivar methodName: The name of the method to call on the consumer inside
C{resumeProducing}.
@ivar methodArgs: The arguments to pass to the consumer method invoked in
C{resumeProducing}.
"""
def __init__(self, consumer, methodName, *methodArgs):
super(ReentrantProducer, self).__init__()
self.consumer = consumer
self.methodName = methodName
self.methodArgs = methodArgs
def resumeProducing(self):
super(ReentrantProducer, self).resumeProducing()
getattr(self.consumer, self.methodName)(*self.methodArgs)
class TestProducer(unittest.TestCase):
"""
Test abstract.FileDescriptor's consumer interface.
"""
def test_doubleProducer(self):
"""
Verify that registering a non-streaming producer invokes its
resumeProducing() method and that you can only register one producer
at a time.
"""
fd = abstract.FileDescriptor()
fd.connected = 1
dp = DummyProducer()
fd.registerProducer(dp, 0)
self.assertEquals(dp.events, ['resume'])
self.assertRaises(RuntimeError, fd.registerProducer, DummyProducer(), 0)
def test_unconnectedFileDescriptor(self):
"""
Verify that registering a producer when the connection has already
been closed invokes its stopProducing() method.
"""
fd = abstract.FileDescriptor()
fd.disconnected = 1
dp = DummyProducer()
fd.registerProducer(dp, 0)
self.assertEquals(dp.events, ['stop'])
def _dontPausePullConsumerTest(self, methodName):
descriptor = SillyDescriptor()
producer = DummyProducer()
descriptor.registerProducer(producer, streaming=False)
self.assertEqual(producer.events, ['resume'])
del producer.events[:]
# Fill up the descriptor's write buffer so we can observe whether or
# not it pauses its producer in that case.
getattr(descriptor, methodName)('1234')
self.assertEqual(producer.events, [])
def test_dontPausePullConsumerOnWrite(self):
"""
Verify that FileDescriptor does not call producer.pauseProducing() on a
non-streaming pull producer in response to a L{IConsumer.write} call
which results in a full write buffer. Issue #2286.
"""
return self._dontPausePullConsumerTest('write')
def test_dontPausePullConsumerOnWriteSequence(self):
"""
Like L{test_dontPausePullConsumerOnWrite}, but for a call to
C{writeSequence} rather than L{IConsumer.write}.
C{writeSequence} is not part of L{IConsumer}, but
L{abstract.FileDescriptor} has supported consumery behavior in response
to calls to L{writeSequence} forever.
"""
return self._dontPausePullConsumerTest('writeSequence')
def _reentrantStreamingProducerTest(self, methodName):
descriptor = SillyDescriptor()
producer = ReentrantProducer(descriptor, methodName, 'spam')
descriptor.registerProducer(producer, streaming=True)
# Start things off by filling up the descriptor's buffer so it will
# pause its producer.
getattr(descriptor, methodName)('spam')
# Sanity check - make sure that worked.
self.assertEqual(producer.events, ['pause'])
del producer.events[:]
# After one call to doWrite, the buffer has been emptied so the
# FileDescriptor should resume its producer. That will result in an
# immediate call to FileDescriptor.write which will again fill the
# buffer and result in the producer being paused.
descriptor.doWrite()
self.assertEqual(producer.events, ['resume', 'pause'])
del producer.events[:]
# After a second call to doWrite, the exact same thing should have
# happened. Prior to the bugfix for which this test was written,
# FileDescriptor would have incorrectly believed its producer was
# already resumed (it was paused) and so not resume it again.
descriptor.doWrite()
self.assertEqual(producer.events, ['resume', 'pause'])
def test_reentrantStreamingProducerUsingWrite(self):
"""
Verify that FileDescriptor tracks producer's paused state correctly.
Issue #811, fixed in revision r12857.
"""
return self._reentrantStreamingProducerTest('write')
def test_reentrantStreamingProducerUsingWriteSequence(self):
"""
Like L{test_reentrantStreamingProducerUsingWrite}, but for calls to
C{writeSequence}.
C{writeSequence} is B{not} part of L{IConsumer}, however
C{abstract.FileDescriptor} has supported consumery behavior in response
to calls to C{writeSequence} forever.
"""
return self._reentrantStreamingProducerTest('writeSequence')
class PortStringification(unittest.TestCase):
if interfaces.IReactorTCP(reactor, None) is not None:
def testTCP(self):
p = reactor.listenTCP(0, protocol.ServerFactory())
portNo = p.getHost().port
self.assertNotEqual(str(p).find(str(portNo)), -1,
"%d not found in %s" % (portNo, p))
return p.stopListening()
if interfaces.IReactorUDP(reactor, None) is not None:
def testUDP(self):
p = reactor.listenUDP(0, protocol.DatagramProtocol())
portNo = p.getHost().port
self.assertNotEqual(str(p).find(str(portNo)), -1,
"%d not found in %s" % (portNo, p))
return p.stopListening()
if interfaces.IReactorSSL(reactor, None) is not None and ssl:
def testSSL(self, ssl=ssl):
pem = util.sibpath(__file__, 'server.pem')
p = reactor.listenSSL(0, protocol.ServerFactory(), ssl.DefaultOpenSSLContextFactory(pem, pem))
portNo = p.getHost().port
self.assertNotEqual(str(p).find(str(portNo)), -1,
"%d not found in %s" % (portNo, p))
return p.stopListening()
|
ojengwa/talk | refs/heads/master | venv/lib/python2.7/site-packages/django/test/__init__.py | 99 | """
Django Unit Test and Doctest framework.
"""
from django.test.client import Client, RequestFactory
from django.test.testcases import (
TestCase, TransactionTestCase,
SimpleTestCase, LiveServerTestCase, skipIfDBFeature,
skipUnlessDBFeature
)
from django.test.utils import modify_settings, override_settings, override_system_checks
__all__ = [
'Client', 'RequestFactory', 'TestCase', 'TransactionTestCase',
'SimpleTestCase', 'LiveServerTestCase', 'skipIfDBFeature',
'skipUnlessDBFeature', 'modify_settings', 'override_settings',
'override_system_checks'
]
|
jotes/boto | refs/heads/develop | boto/rds/dbsnapshot.py | 168 | # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class DBSnapshot(object):
"""
Represents a RDS DB Snapshot
Properties reference available from the AWS documentation at http://docs.amazonwebservices.com/AmazonRDS/latest/APIReference/API_DBSnapshot.html
:ivar engine_version: Specifies the version of the database engine
:ivar license_model: License model information for the restored DB instance
:ivar allocated_storage: Specifies the allocated storage size in gigabytes (GB)
:ivar availability_zone: Specifies the name of the Availability Zone the DB Instance was located in at the time of the DB Snapshot
:ivar connection: boto.rds.RDSConnection associated with the current object
:ivar engine: Specifies the name of the database engine
:ivar id: Specifies the identifier for the DB Snapshot (DBSnapshotIdentifier)
:ivar instance_create_time: Specifies the time (UTC) when the snapshot was taken
:ivar instance_id: Specifies the the DBInstanceIdentifier of the DB Instance this DB Snapshot was created from (DBInstanceIdentifier)
:ivar master_username: Provides the master username for the DB Instance
:ivar port: Specifies the port that the database engine was listening on at the time of the snapshot
:ivar snapshot_create_time: Provides the time (UTC) when the snapshot was taken
:ivar status: Specifies the status of this DB Snapshot. Possible values are [ available, backing-up, creating, deleted, deleting, failed, modifying, rebooting, resetting-master-credentials ]
:ivar iops: Specifies the Provisioned IOPS (I/O operations per second) value of the DB instance at the time of the snapshot.
:ivar option_group_name: Provides the option group name for the DB snapshot.
:ivar percent_progress: The percentage of the estimated data that has been transferred.
:ivar snapshot_type: Provides the type of the DB snapshot.
:ivar source_region: The region that the DB snapshot was created in or copied from.
:ivar vpc_id: Provides the Vpc Id associated with the DB snapshot.
"""
def __init__(self, connection=None, id=None):
self.connection = connection
self.id = id
self.engine = None
self.engine_version = None
self.snapshot_create_time = None
self.instance_create_time = None
self.port = None
self.status = None
self.availability_zone = None
self.master_username = None
self.allocated_storage = None
self.instance_id = None
self.availability_zone = None
self.license_model = None
self.iops = None
self.option_group_name = None
self.percent_progress = None
self.snapshot_type = None
self.source_region = None
self.vpc_id = None
def __repr__(self):
return 'DBSnapshot:%s' % self.id
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'Engine':
self.engine = value
elif name == 'EngineVersion':
self.engine_version = value
elif name == 'InstanceCreateTime':
self.instance_create_time = value
elif name == 'SnapshotCreateTime':
self.snapshot_create_time = value
elif name == 'DBInstanceIdentifier':
self.instance_id = value
elif name == 'DBSnapshotIdentifier':
self.id = value
elif name == 'Port':
self.port = int(value)
elif name == 'Status':
self.status = value
elif name == 'AvailabilityZone':
self.availability_zone = value
elif name == 'MasterUsername':
self.master_username = value
elif name == 'AllocatedStorage':
self.allocated_storage = int(value)
elif name == 'SnapshotTime':
self.time = value
elif name == 'LicenseModel':
self.license_model = value
elif name == 'Iops':
self.iops = int(value)
elif name == 'OptionGroupName':
self.option_group_name = value
elif name == 'PercentProgress':
self.percent_progress = int(value)
elif name == 'SnapshotType':
self.snapshot_type = value
elif name == 'SourceRegion':
self.source_region = value
elif name == 'VpcId':
self.vpc_id = value
else:
setattr(self, name, value)
def update(self, validate=False):
"""
Update the DB snapshot's status information by making a call to fetch
the current snapshot attributes from the service.
:type validate: bool
:param validate: By default, if EC2 returns no data about the
instance the update method returns quietly. If
the validate param is True, however, it will
raise a ValueError exception if no data is
returned from EC2.
"""
rs = self.connection.get_all_dbsnapshots(self.id)
if len(rs) > 0:
for i in rs:
if i.id == self.id:
self.__dict__.update(i.__dict__)
elif validate:
raise ValueError('%s is not a valid Snapshot ID' % self.id)
return self.status
|
jamesblunt/twine | refs/heads/master | tests/test_upload.py | 2 | # Copyright 2014 Ian Cordasco
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pretend
import pytest
from twine.commands import upload
def test_find_dists_expands_globs():
files = sorted(upload.find_dists(['twine/__*.py']))
expected = ['twine/__init__.py', 'twine/__main__.py']
assert expected == files
def test_find_dists_errors_on_invalid_globs():
with pytest.raises(ValueError):
upload.find_dists(['twine/*.rb'])
def test_find_dists_handles_real_files():
expected = ['twine/__init__.py', 'twine/__main__.py', 'twine/cli.py',
'twine/utils.py', 'twine/wheel.py']
files = upload.find_dists(expected)
assert expected == files
def test_sign_file(monkeypatch):
replaced_check_call = pretend.call_recorder(lambda args: None)
monkeypatch.setattr(upload.subprocess, 'check_call', replaced_check_call)
upload.sign_file('gpg2', 'my_file.tar.gz', None)
args = ['gpg2', '--detach-sign', '-a', 'my_file.tar.gz']
assert replaced_check_call.calls == [pretend.call(args)]
def test_sign_file_with_identity(monkeypatch):
replaced_check_call = pretend.call_recorder(lambda args: None)
monkeypatch.setattr(upload.subprocess, 'check_call', replaced_check_call)
upload.sign_file('gpg', 'my_file.tar.gz', 'identity')
args = ['gpg', '--detach-sign', '--local-user', 'identity', '-a',
'my_file.tar.gz']
assert replaced_check_call.calls == [pretend.call(args)]
|
rajpushkar83/cloudmesh | refs/heads/master | cloudmesh_common/tables.py | 1 | """Convenient methods and classes to print tables"""
from pytimeparse.timeparse import timeparse
from prettytable import PrettyTable
from datetime import datetime
from datetime import timedelta
import json
import yaml
import hostlist
def print_format_dict(d, header=None, kind='table'):
"""kind = json, yaml, table, pprint"""
if kind == "json":
return json.dumps(d, indent=4)
elif kind == "yaml":
return yaml.dump(d, default_flow_style=False)
else:
return two_column_table(d.keys(), header)
def array_dict_table_printer(array, order=None, header=None, vertical=False):
"""prints a pretty table from an array of dicts
:param array: A an array with dicts of the same type.
Each key will be a column
:param order: The order in which the columns are printed.
The order is specified by the key names of the dict.
:param header: The Header of each of the columns
:param vertical: vertical printing table
"""
if array is None or array == []:
return None
# header
if header is None:
header = array[0].keys()
if order is None:
order = header
if header is None:
if vertical:
x = PrettyTable()
x.add_column("Item", order)
else:
x = PrettyTable(order)
else:
if vertical:
x = PrettyTable()
x.add_column("Item", header)
else:
x = PrettyTable(header)
for element in array:
values = []
for key in order:
try:
tmp = str(element[key])
except:
tmp = ' '
values.append(tmp)
if vertical:
x.add_column(" ", values)
else:
x.add_row(values)
x.align = "l"
return x
def column_table(column_dict, order=None):
"""prints a pretty table from data in the dict.
:param column_dict: A dict that has an array for each key in the dict.
All arrays are of the same length.
The keys are used as headers
:param order: The order in which the columns are printed.XS
The order is specified by the key names of the dict.
"""
# header
header = column_dict.keys()
x = PrettyTable()
if order is None:
order = header
for key in order:
x.add_column(key, column_dict[key])
x.align = "l"
return x
def row_table(d, order=None, labels=None):
"""prints a pretty table from data in the dict.
:param d: A dict to be printed
:param order: The order in which the columns are printed.XS
The order is specified by the key names of the dict.
"""
# header
header = d.keys()
x = PrettyTable(labels)
if order is None:
order = header
for key in order:
value = d[key]
if type(value) == list:
x.add_row([key, value[0]])
for element in value[1:]:
x.add_row(["", element])
elif type(value) == dict:
value_keys = value.keys()
first_key = value_keys[0]
rest_keys = value_keys[1:]
x.add_row([key, "{0} : {1}".format(first_key, value[first_key])])
for element in rest_keys:
x.add_row(["", "{0} : {1}".format(element, value[element])])
else:
x.add_row([key, value])
x.align = "l"
return x
def two_column_table(column_dict, header=['Default', 'Value']):
"""prints a table with two columns where the first column are the
attributes, and the second column are the values.
:param column_dic: the dictionary to be printed
"""
if not header:
header = ['Default', 'Value']
x = PrettyTable()
x.add_column(header[0], column_dict.keys())
x.add_column(header[1], column_dict.values())
x.align = "l"
return x
def one_column_table(column, header='Value'):
"""prints a table with two columns where the first column are the
attributes, and the second column are the values.
:param column_dic: the dictionary to be printed
"""
x = PrettyTable()
x.add_column(header, column)
x.align = "l"
return x
def table_printer(the_dict, header_info=None):
"""
prints recurseively a dict as an html. The header info is simpli
a list with collun names.
:param the_dict: the dictionary to be printed.
:param header_info: an array of two values that are used in the header
"""
# header_info ["attribute", "value"]
if (header_info is not None) or (header_info == ""):
result = '<tr><th>{0}</th><th>{1}</th></tr>' \
.format(header_info[0], header_info[1])
else:
result = ''
if isinstance(the_dict, dict):
for name, value in the_dict.iteritems():
result = result + \
'<tr><td>{0}</td><td>{1}</td></tr>' \
.format(name.title(), str(table_printer(value)))
result = '<table>' + result + '</table>'
return result
elif isinstance(the_dict, list):
for element in the_dict:
try:
for name, value in element.iteritems():
result = result + \
'<tr><td>{0}</td><td>{1}</td></tr>' \
.format(name.title(), str(table_printer(value)))
except:
# If the element is not dict
return str(element)
result = '<table>' + result + '</table>'
return result
else:
return the_dict
def parse_time_interval(time_start, time_end):
"""created time values for time_start and time_end, while time_end
will be replaced with time_start+ a duration if the duration is
given in time_end. The format of the duration is intuitive through
the timeparse module. YOu can specify values such as +1d, +1w10s.
:param time_start: the start time, if the string 'current_time' is
passed it will be replaced by the current time
:param time_end: either a time or a duration
"""
t_end = time_end
t_start = time_start
if t_start is not None:
if t_start in ["current_time", "now"]:
t_start = str(datetime.now())
if t_end is not None:
if t_end.startswith("+"):
duration = t_end[1:]
delta = timeparse(duration)
t_start = datetime.strptime(t_start, "%Y-%m-%d %H:%M:%S.%f")
t_end = t_start + timedelta(seconds=delta)
return (str(t_start), str(t_end))
def dict_key_list_table_printer(d, indexed=False):
'''
accept a dict in the form:
{key1: [list1],
key2: [list2],
.......
=>
| key1 | key2 |
| l
| i
| s
| t
'''
x = PrettyTable()
temp = d.values()
l = 0
for item in temp:
l0 = len(item)
if l0 > l:
l = l0
if indexed:
if l == 0:
index_list = []
else:
index_list = hostlist.expand_hostlist("[1-{0}]".format(str(l)))
x.add_column("index", index_list)
for k, v in d.iteritems():
v0 = v + [" "] * (l - len(v))
x.add_column(k, v0)
x.align = "l"
return x
|
ddrager/python-bitcoinlib | refs/heads/master | doc/conf.py | 8 | # -*- coding: utf-8 -*-
#
# bitcoin documentation build configuration file, created by
# sphinx-quickstart on Thu May 28 20:40:55 2015.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
PROJECT = 'python-bitcoinlib'
DESCRIPTION = 'The Swiss Army Knife of the Bitcoin protocol.'
AUTHORS = 'The python-bitcoinlib developers'
import sphinx
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
from bitcoin import __version__
# Prevent loading openssl when generating API docs. Either the whole library or
# the necessary elliptic curve might not be available, causing import to fail.
try:
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
sys.modules['ctypes'] = MagicMock()
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
]
autodoc_default_flags = [
'members',
'undoc-members',
'show-inheritance',
]
# Include __init__ docstring in class level docs
autoclass_content = 'both'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = PROJECT
copyright = '2012-2015, ' + AUTHORS
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
if getattr(sphinx, 'version_info', (0, 0)) >= (1, 3):
html_theme = 'classic'
else:
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = PROJECT + 'doc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', PROJECT + '.tex', PROJECT + ' Documentation',
AUTHORS, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', PROJECT, PROJECT + ' Documentation',
[AUTHORS], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', PROJECT, PROJECT + ' Documentation',
AUTHORS, PROJECT, DESCRIPTION,
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = PROJECT
epub_author = AUTHORS
epub_publisher = AUTHORS
epub_copyright = copyright
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
|
h2oloopan/easymerge | refs/heads/master | EasyMerge/tests/scrapy/scrapy/contrib/pipeline/media.py | 28 | from __future__ import print_function
from collections import defaultdict
from twisted.internet.defer import Deferred, DeferredList
from twisted.python.failure import Failure
from scrapy.utils.defer import mustbe_deferred, defer_result
from scrapy import log
from scrapy.utils.request import request_fingerprint
from scrapy.utils.misc import arg_to_iter
class MediaPipeline(object):
LOG_FAILED_RESULTS = True
class SpiderInfo(object):
def __init__(self, spider):
self.spider = spider
self.downloading = set()
self.downloaded = {}
self.waiting = defaultdict(list)
def __init__(self, download_func=None):
self.download_func = download_func
@classmethod
def from_crawler(cls, crawler):
try:
pipe = cls.from_settings(crawler.settings)
except AttributeError:
pipe = cls()
pipe.crawler = crawler
return pipe
def open_spider(self, spider):
self.spiderinfo = self.SpiderInfo(spider)
def process_item(self, item, spider):
info = self.spiderinfo
requests = arg_to_iter(self.get_media_requests(item, info))
dlist = [self._process_request(r, info) for r in requests]
dfd = DeferredList(dlist, consumeErrors=1)
return dfd.addCallback(self.item_completed, item, info)
def _process_request(self, request, info):
fp = request_fingerprint(request)
cb = request.callback or (lambda _: _)
eb = request.errback
request.callback = None
request.errback = None
# Return cached result if request was already seen
if fp in info.downloaded:
return defer_result(info.downloaded[fp]).addCallbacks(cb, eb)
# Otherwise, wait for result
wad = Deferred().addCallbacks(cb, eb)
info.waiting[fp].append(wad)
# Check if request is downloading right now to avoid doing it twice
if fp in info.downloading:
return wad
# Download request checking media_to_download hook output first
info.downloading.add(fp)
dfd = mustbe_deferred(self.media_to_download, request, info)
dfd.addCallback(self._check_media_to_download, request, info)
dfd.addBoth(self._cache_result_and_execute_waiters, fp, info)
dfd.addErrback(log.err, spider=info.spider)
return dfd.addBoth(lambda _: wad) # it must return wad at last
def _check_media_to_download(self, result, request, info):
if result is not None:
return result
if self.download_func:
# this ugly code was left only to support tests. TODO: remove
dfd = mustbe_deferred(self.download_func, request, info.spider)
dfd.addCallbacks(
callback=self.media_downloaded, callbackArgs=(request, info),
errback=self.media_failed, errbackArgs=(request, info))
else:
request.meta['handle_httpstatus_all'] = True
dfd = self.crawler.engine.download(request, info.spider)
dfd.addCallbacks(
callback=self.media_downloaded, callbackArgs=(request, info),
errback=self.media_failed, errbackArgs=(request, info))
return dfd
def _cache_result_and_execute_waiters(self, result, fp, info):
if isinstance(result, Failure):
# minimize cached information for failure
result.cleanFailure()
result.frames = []
result.stack = None
info.downloading.remove(fp)
info.downloaded[fp] = result # cache result
for wad in info.waiting.pop(fp):
defer_result(result).chainDeferred(wad)
### Overridable Interface
def media_to_download(self, request, info):
"""Check request before starting download"""
pass
def get_media_requests(self, item, info):
"""Returns the media requests to download"""
pass
def media_downloaded(self, response, request, info):
"""Handler for success downloads"""
return response
def media_failed(self, failure, request, info):
"""Handler for failed downloads"""
return failure
def item_completed(self, results, item, info):
"""Called per item when all media requests has been processed"""
if self.LOG_FAILED_RESULTS:
msg = '%s found errors proessing %s' % (self.__class__.__name__, item)
for ok, value in results:
if not ok:
log.err(value, msg, spider=info.spider)
return item
|
drufat/sympy | refs/heads/master | sympy/physics/optics/tests/test_medium.py | 70 | from __future__ import division
from sympy import sqrt, simplify
from sympy.physics.optics import Medium
from sympy.abc import epsilon, mu
from sympy.physics.units import c, u0, e0, m, kg, s, A
def test_medium():
m1 = Medium('m1')
assert m1.intrinsic_impedance == sqrt(u0/e0)
assert m1.speed == 1/sqrt(e0*u0)
assert m1.refractive_index == c*sqrt(e0*u0)
assert m1.permittivity == e0
assert m1.permeability == u0
m2 = Medium('m2', epsilon, mu)
assert m2.intrinsic_impedance == sqrt(mu/epsilon)
assert m2.speed == 1/sqrt(epsilon*mu)
assert m2.refractive_index == c*sqrt(epsilon*mu)
assert m2.permittivity == epsilon
assert m2.permeability == mu
# Increasing electric permittivity and magnetic permeability
# by small amount from its value in vacuum.
m3 = Medium('m3', 9.0*10**(-12)*s**4*A**2/(m**3*kg), 1.45*10**(-6)*kg*m/(A**2*s**2))
assert m3.refractive_index > m1.refractive_index
assert m3 > m1
# Decreasing electric permittivity and magnetic permeability
# by small amount from its value in vacuum.
m4 = Medium('m4', 7.0*10**(-12)*s**4*A**2/(m**3*kg), 1.15*10**(-6)*kg*m/(A**2*s**2))
assert m4.refractive_index < m1.refractive_index
assert m4 < m1
m5 = Medium('m5', permittivity=710*10**(-12)*s**4*A**2/(m**3*kg), n=1.33)
assert abs(m5.intrinsic_impedance - 6.24845417765552*kg*m**2/(A**2*s**3)) \
< 1e-12*kg*m**2/(A**2*s**3)
assert abs(m5.speed - 225407863.157895*m/s) < 1e-6*m/s
assert abs(m5.refractive_index - 1.33000000000000) < 1e-12
assert abs(m5.permittivity - 7.1e-10*A**2*s**4/(kg*m**3)) \
< 1e-20*A**2*s**4/(kg*m**3)
assert abs(m5.permeability - 2.77206575232851e-8*kg*m/(A**2*s**2)) \
< 1e-20*kg*m/(A**2*s**2)
|
unlessbamboo/grocery-shop | refs/heads/master | language/python/src/descripter/descripter-test.py | 1 | #!/usr/bin/env python
# coding:utf-8
##
# @file testDescipter.py
# @brief 测试descipter的某些简单功能
# 1,descripter是属性、实例方法、静态方法、类方法、super的实现机制
# @author unlessbamboo
# @version 0.1
# @date 2016-01-29
import types
import requests
class TestDescipter(object):
"""TestDescipter"""
def __init__(self, name):
"""__init__
:param name: Object's name
"""
self.name = name
self._score = 0
def testMethod(self):
"""testMethod"""
print 'testMeothd:{0}'.format(self.name)
def classMethod(TestDescipter):
print 'classMethod!'
##
# @brief property:相当于声明了score属性,另外还有
# @property.getter和setter、deleter的声明
#
@property
def score(self):
return self._score
@score.setter
def score(self, value):
"""score:使用property修饰
:param value:
"""
if not isinstance(value, int):
raise
self._score = value
@score.getter
def score(self):
"""score:使用property修饰"""
self._score += 1
return self._score
def methodTest():
"""methodTest:测试method是如何建立在func的基础之上的。
前期知识:
1,类中所有的函数都是non-data descripter,返回bound或者
unbound方法
2,方法调用
aObj.f
等价于:
aObj.__dict__['fun1'].__get__(aObj, type(aObj))
其中__get__方法定义如下:
class Function:
def __get__(self, obj, objtype=None):
return types.MethodType(self, obj, objtype)
其中self表示函数实例,MethodType返回方法类型,例如:
<bound method dict.items of {'age': 3}>
3,func是一个non-data descripter
4,方法的转换:
普通方法:f(*args) ---> f(obj, *args)
静态方法:f(*args) ---> f(*args)
类方法:f(class, *args) --> f(type(obj), *args)
检验步骤:
1,输出方法对象本身,输出type(obj).__dict__['func'].__get__(obj, cls),
比对结果
2,输出类方法本身,输出Cls.__dict__['func'].__get__(None, cls),
比对结果
3,从而验证property的本质:
调用方法,本身就是使用__get__调用;
调用类方法,本身就是使用__get__调用呢,参数不同而已
"""
test1 = TestDescipter('test1')
print '对象调用(验证相等)-方法对象打印:{0}\n\t方法类型:{1}'.format(
test1.testMethod,
types.MethodType(test1.testMethod, test1, type(test1)))
print '对象调用__dict__[...]输出:{0}'.format(
TestDescipter.__dict__['testMethod'].__get__(test1, TestDescipter))
print '类调用-方法对象打印:{0}'.format(
TestDescipter.classMethod)
print '类调用__dict__[...]输出:{0}'.format(
TestDescipter.__dict__['classMethod'].__get__(None, TestDescipter))
print '使用@property,逐渐递增score值:\n'
print 'Score value:{0}, {1}, {2}, {3}'.format(
test1.score, test1.score, test1.score, test1.score)
if __name__ == '__main__':
"""main"""
methodTest()
|
box/ClusterRunner | refs/heads/master | app/util/url_builder.py | 2 | import re
from urllib.parse import urljoin
from app.util.conf.configuration import Configuration
class UrlBuilder(object):
"""
Stores the host, port, scheme, and api version information for our URLs and centralizes their generation
"""
API_VERSION_1 = 'v1'
def __init__(self, service_address, api_version=API_VERSION_1):
"""
:param service_address: A host and port and optional scheme, like "http(s)://hostname.example.com:43000"
:type service_address: str
:type api_version: str
"""
self._service_address = service_address
self._api_version = api_version
self._scheme = '{}://'.format(Configuration['protocol_scheme'])
def url(self, *args):
"""
Produces a url given a set of paths
:param args: A list of args to string together into a url path
:type args: iterable [str|int]
:rtype: str
"""
schemed_address = self._scheme + re.sub(r'^[a-z]+://', '', self._service_address)
versioned_url = urljoin(schemed_address, self._api_version)
return '/'.join([versioned_url] + [str(arg).strip('/') for arg in args])
|
packenx/PythonInstaller | refs/heads/master | app/src/main/assets/arm/static/python/lib/python2.7/site-packages/dpkt/stp.py | 15 | # $Id: stp.py 23 2006-11-08 15:45:33Z dugsong $
"""Spanning Tree Protocol."""
import dpkt
class STP(dpkt.Packet):
__hdr__ = (
('proto_id', 'H', 0),
('v', 'B', 0),
('type', 'B', 0),
('flags', 'B', 0),
('root_id', '8s', ''),
('root_path', 'I', 0),
('bridge_id', '8s', ''),
('port_id', 'H', 0),
('age', 'H', 0),
('max_age', 'H', 0),
('hello', 'H', 0),
('fd', 'H', 0)
)
|
valexandersaulys/prudential_insurance_kaggle | refs/heads/master | venv/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.py | 527 | # Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import abc
import functools
import itertools
import re
from ._compat import string_types, with_metaclass
from .version import Version, LegacyVersion, parse
class InvalidSpecifier(ValueError):
"""
An invalid specifier was found, users should refer to PEP 440.
"""
class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod
def __str__(self):
"""
Returns the str representation of this Specifier like object. This
should be representative of the Specifier itself.
"""
@abc.abstractmethod
def __hash__(self):
"""
Returns a hash value for this Specifier like object.
"""
@abc.abstractmethod
def __eq__(self, other):
"""
Returns a boolean representing whether or not the two Specifier like
objects are equal.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Returns a boolean representing whether or not the two Specifier like
objects are not equal.
"""
@abc.abstractproperty
def prereleases(self):
"""
Returns whether or not pre-releases as a whole are allowed by this
specifier.
"""
@prereleases.setter
def prereleases(self, value):
"""
Sets whether or not pre-releases as a whole are allowed by this
specifier.
"""
@abc.abstractmethod
def contains(self, item, prereleases=None):
"""
Determines if the given item is contained within this specifier.
"""
@abc.abstractmethod
def filter(self, iterable, prereleases=None):
"""
Takes an iterable of items and filters them so that only items which
are contained within this specifier are allowed in it.
"""
class _IndividualSpecifier(BaseSpecifier):
_operators = {}
def __init__(self, spec="", prereleases=None):
match = self._regex.search(spec)
if not match:
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
self._spec = (
match.group("operator").strip(),
match.group("version").strip(),
)
# Store whether or not this Specifier should accept prereleases
self._prereleases = prereleases
def __repr__(self):
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
else ""
)
return "<{0}({1!r}{2})>".format(
self.__class__.__name__,
str(self),
pre,
)
def __str__(self):
return "{0}{1}".format(*self._spec)
def __hash__(self):
return hash(self._spec)
def __eq__(self, other):
if isinstance(other, string_types):
try:
other = self.__class__(other)
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
return self._spec == other._spec
def __ne__(self, other):
if isinstance(other, string_types):
try:
other = self.__class__(other)
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
return self._spec != other._spec
def _get_operator(self, op):
return getattr(self, "_compare_{0}".format(self._operators[op]))
def _coerce_version(self, version):
if not isinstance(version, (LegacyVersion, Version)):
version = parse(version)
return version
@property
def operator(self):
return self._spec[0]
@property
def version(self):
return self._spec[1]
@property
def prereleases(self):
return self._prereleases
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
def __contains__(self, item):
return self.contains(item)
def contains(self, item, prereleases=None):
# Determine if prereleases are to be allowed or not.
if prereleases is None:
prereleases = self.prereleases
# Normalize item to a Version or LegacyVersion, this allows us to have
# a shortcut for ``"2.0" in Specifier(">=2")
item = self._coerce_version(item)
# Determine if we should be supporting prereleases in this specifier
# or not, if we do not support prereleases than we can short circuit
# logic if this version is a prereleases.
if item.is_prerelease and not prereleases:
return False
# Actually do the comparison to determine if this item is contained
# within this Specifier or not.
return self._get_operator(self.operator)(item, self.version)
def filter(self, iterable, prereleases=None):
yielded = False
found_prereleases = []
kw = {"prereleases": prereleases if prereleases is not None else True}
# Attempt to iterate over all the values in the iterable and if any of
# them match, yield them.
for version in iterable:
parsed_version = self._coerce_version(version)
if self.contains(parsed_version, **kw):
# If our version is a prerelease, and we were not set to allow
# prereleases, then we'll store it for later incase nothing
# else matches this specifier.
if (parsed_version.is_prerelease
and not (prereleases or self.prereleases)):
found_prereleases.append(version)
# Either this is not a prerelease, or we should have been
# accepting prereleases from the begining.
else:
yielded = True
yield version
# Now that we've iterated over everything, determine if we've yielded
# any values, and if we have not and we have any prereleases stored up
# then we will go ahead and yield the prereleases.
if not yielded and found_prereleases:
for version in found_prereleases:
yield version
class LegacySpecifier(_IndividualSpecifier):
_regex = re.compile(
r"""
^
\s*
(?P<operator>(==|!=|<=|>=|<|>))
\s*
(?P<version>
[^\s]* # We just match everything, except for whitespace since this
# is a "legacy" specifier and the version string can be just
# about anything.
)
\s*
$
""",
re.VERBOSE | re.IGNORECASE,
)
_operators = {
"==": "equal",
"!=": "not_equal",
"<=": "less_than_equal",
">=": "greater_than_equal",
"<": "less_than",
">": "greater_than",
}
def _coerce_version(self, version):
if not isinstance(version, LegacyVersion):
version = LegacyVersion(str(version))
return version
def _compare_equal(self, prospective, spec):
return prospective == self._coerce_version(spec)
def _compare_not_equal(self, prospective, spec):
return prospective != self._coerce_version(spec)
def _compare_less_than_equal(self, prospective, spec):
return prospective <= self._coerce_version(spec)
def _compare_greater_than_equal(self, prospective, spec):
return prospective >= self._coerce_version(spec)
def _compare_less_than(self, prospective, spec):
return prospective < self._coerce_version(spec)
def _compare_greater_than(self, prospective, spec):
return prospective > self._coerce_version(spec)
def _require_version_compare(fn):
@functools.wraps(fn)
def wrapped(self, prospective, spec):
if not isinstance(prospective, Version):
return False
return fn(self, prospective, spec)
return wrapped
class Specifier(_IndividualSpecifier):
_regex = re.compile(
r"""
^
\s*
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
(?P<version>
(?:
# The identity operators allow for an escape hatch that will
# do an exact string match of the version you wish to install.
# This will not be parsed by PEP 440 and we cannot determine
# any semantic meaning from it. This operator is discouraged
# but included entirely as an escape hatch.
(?<====) # Only match for the identity operator
\s*
[^\s]* # We just match everything, except for whitespace
# since we are only testing for strict identity.
)
|
(?:
# The (non)equality operators allow for wild card and local
# versions to be specified so we have to define these two
# operators separately to enable that.
(?<===|!=) # Only match for equals and not equals
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)* # release
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
# You cannot use a wild card and a dev or local version
# together so group them with a | and make them optional.
(?:
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
\.\* # Wild card syntax of .*
)?
)
|
(?:
# The compatible operator requires at least two digits in the
# release segment.
(?<=~=) # Only match for the compatible operator
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
)
|
(?:
# All other operators only allow a sub set of what the
# (non)equality operators do. Specifically they do not allow
# local versions to be specified nor do they allow the prefix
# matching wild cards.
(?<!==|!=|~=) # We have special cases for these
# operators so we want to make sure they
# don't match here.
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)* # release
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
)
)
\s*
$
""",
re.VERBOSE | re.IGNORECASE,
)
_operators = {
"~=": "compatible",
"==": "equal",
"!=": "not_equal",
"<=": "less_than_equal",
">=": "greater_than_equal",
"<": "less_than",
">": "greater_than",
"===": "arbitrary",
}
@_require_version_compare
def _compare_compatible(self, prospective, spec):
# Compatible releases have an equivalent combination of >= and ==. That
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
# implement this in terms of the other specifiers instead of
# implementing it ourselves. The only thing we need to do is construct
# the other specifiers.
# We want everything but the last item in the version, but we want to
# ignore post and dev releases and we want to treat the pre-release as
# it's own separate segment.
prefix = ".".join(
list(
itertools.takewhile(
lambda x: (not x.startswith("post")
and not x.startswith("dev")),
_version_split(spec),
)
)[:-1]
)
# Add the prefix notation to the end of our string
prefix += ".*"
return (self._get_operator(">=")(prospective, spec)
and self._get_operator("==")(prospective, prefix))
@_require_version_compare
def _compare_equal(self, prospective, spec):
# We need special logic to handle prefix matching
if spec.endswith(".*"):
# Split the spec out by dots, and pretend that there is an implicit
# dot in between a release segment and a pre-release segment.
spec = _version_split(spec[:-2]) # Remove the trailing .*
# Split the prospective version out by dots, and pretend that there
# is an implicit dot in between a release segment and a pre-release
# segment.
prospective = _version_split(str(prospective))
# Shorten the prospective version to be the same length as the spec
# so that we can determine if the specifier is a prefix of the
# prospective version or not.
prospective = prospective[:len(spec)]
# Pad out our two sides with zeros so that they both equal the same
# length.
spec, prospective = _pad_version(spec, prospective)
else:
# Convert our spec string into a Version
spec = Version(spec)
# If the specifier does not have a local segment, then we want to
# act as if the prospective version also does not have a local
# segment.
if not spec.local:
prospective = Version(prospective.public)
return prospective == spec
@_require_version_compare
def _compare_not_equal(self, prospective, spec):
return not self._compare_equal(prospective, spec)
@_require_version_compare
def _compare_less_than_equal(self, prospective, spec):
return prospective <= Version(spec)
@_require_version_compare
def _compare_greater_than_equal(self, prospective, spec):
return prospective >= Version(spec)
@_require_version_compare
def _compare_less_than(self, prospective, spec):
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
spec = Version(spec)
# Check to see if the prospective version is less than the spec
# version. If it's not we can short circuit and just return False now
# instead of doing extra unneeded work.
if not prospective < spec:
return False
# This special case is here so that, unless the specifier itself
# includes is a pre-release version, that we do not accept pre-release
# versions for the version mentioned in the specifier (e.g. <3.1 should
# not match 3.1.dev0, but should match 3.0.dev0).
if not spec.is_prerelease and prospective.is_prerelease:
if Version(prospective.base_version) == Version(spec.base_version):
return False
# If we've gotten to here, it means that prospective version is both
# less than the spec version *and* it's not a pre-release of the same
# version in the spec.
return True
@_require_version_compare
def _compare_greater_than(self, prospective, spec):
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
spec = Version(spec)
# Check to see if the prospective version is greater than the spec
# version. If it's not we can short circuit and just return False now
# instead of doing extra unneeded work.
if not prospective > spec:
return False
# This special case is here so that, unless the specifier itself
# includes is a post-release version, that we do not accept
# post-release versions for the version mentioned in the specifier
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
if not spec.is_postrelease and prospective.is_postrelease:
if Version(prospective.base_version) == Version(spec.base_version):
return False
# Ensure that we do not allow a local version of the version mentioned
# in the specifier, which is techincally greater than, to match.
if prospective.local is not None:
if Version(prospective.base_version) == Version(spec.base_version):
return False
# If we've gotten to here, it means that prospective version is both
# greater than the spec version *and* it's not a pre-release of the
# same version in the spec.
return True
def _compare_arbitrary(self, prospective, spec):
return str(prospective).lower() == str(spec).lower()
@property
def prereleases(self):
# If there is an explicit prereleases set for this, then we'll just
# blindly use that.
if self._prereleases is not None:
return self._prereleases
# Look at all of our specifiers and determine if they are inclusive
# operators, and if they are if they are including an explicit
# prerelease.
operator, version = self._spec
if operator in ["==", ">=", "<=", "~=", "==="]:
# The == specifier can include a trailing .*, if it does we
# want to remove before parsing.
if operator == "==" and version.endswith(".*"):
version = version[:-2]
# Parse the version, and if it is a pre-release than this
# specifier allows pre-releases.
if parse(version).is_prerelease:
return True
return False
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
def _version_split(version):
result = []
for item in version.split("."):
match = _prefix_regex.search(item)
if match:
result.extend(match.groups())
else:
result.append(item)
return result
def _pad_version(left, right):
left_split, right_split = [], []
# Get the release segment of our versions
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
# Get the rest of our versions
left_split.append(left[len(left_split):])
right_split.append(left[len(right_split):])
# Insert our padding
left_split.insert(
1,
["0"] * max(0, len(right_split[0]) - len(left_split[0])),
)
right_split.insert(
1,
["0"] * max(0, len(left_split[0]) - len(right_split[0])),
)
return (
list(itertools.chain(*left_split)),
list(itertools.chain(*right_split)),
)
class SpecifierSet(BaseSpecifier):
def __init__(self, specifiers="", prereleases=None):
# Split on , to break each indidivual specifier into it's own item, and
# strip each item to remove leading/trailing whitespace.
specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
# Parsed each individual specifier, attempting first to make it a
# Specifier and falling back to a LegacySpecifier.
parsed = set()
for specifier in specifiers:
try:
parsed.add(Specifier(specifier))
except InvalidSpecifier:
parsed.add(LegacySpecifier(specifier))
# Turn our parsed specifiers into a frozen set and save them for later.
self._specs = frozenset(parsed)
# Store our prereleases value so we can use it later to determine if
# we accept prereleases or not.
self._prereleases = prereleases
def __repr__(self):
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
else ""
)
return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
def __str__(self):
return ",".join(sorted(str(s) for s in self._specs))
def __hash__(self):
return hash(self._specs)
def __and__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif not isinstance(other, SpecifierSet):
return NotImplemented
specifier = SpecifierSet()
specifier._specs = frozenset(self._specs | other._specs)
if self._prereleases is None and other._prereleases is not None:
specifier._prereleases = other._prereleases
elif self._prereleases is not None and other._prereleases is None:
specifier._prereleases = self._prereleases
elif self._prereleases == other._prereleases:
specifier._prereleases = self._prereleases
else:
raise ValueError(
"Cannot combine SpecifierSets with True and False prerelease "
"overrides."
)
return specifier
def __eq__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif isinstance(other, _IndividualSpecifier):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
return self._specs == other._specs
def __ne__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif isinstance(other, _IndividualSpecifier):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
return self._specs != other._specs
def __len__(self):
return len(self._specs)
def __iter__(self):
return iter(self._specs)
@property
def prereleases(self):
# If we have been given an explicit prerelease modifier, then we'll
# pass that through here.
if self._prereleases is not None:
return self._prereleases
# If we don't have any specifiers, and we don't have a forced value,
# then we'll just return None since we don't know if this should have
# pre-releases or not.
if not self._specs:
return None
# Otherwise we'll see if any of the given specifiers accept
# prereleases, if any of them do we'll return True, otherwise False.
return any(s.prereleases for s in self._specs)
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
def __contains__(self, item):
return self.contains(item)
def contains(self, item, prereleases=None):
# Ensure that our item is a Version or LegacyVersion instance.
if not isinstance(item, (LegacyVersion, Version)):
item = parse(item)
# Determine if we're forcing a prerelease or not, if we're not forcing
# one for this particular filter call, then we'll use whatever the
# SpecifierSet thinks for whether or not we should support prereleases.
if prereleases is None:
prereleases = self.prereleases
# We can determine if we're going to allow pre-releases by looking to
# see if any of the underlying items supports them. If none of them do
# and this item is a pre-release then we do not allow it and we can
# short circuit that here.
# Note: This means that 1.0.dev1 would not be contained in something
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
if not prereleases and item.is_prerelease:
return False
# We simply dispatch to the underlying specs here to make sure that the
# given version is contained within all of them.
# Note: This use of all() here means that an empty set of specifiers
# will always return True, this is an explicit design decision.
return all(
s.contains(item, prereleases=prereleases)
for s in self._specs
)
def filter(self, iterable, prereleases=None):
# Determine if we're forcing a prerelease or not, if we're not forcing
# one for this particular filter call, then we'll use whatever the
# SpecifierSet thinks for whether or not we should support prereleases.
if prereleases is None:
prereleases = self.prereleases
# If we have any specifiers, then we want to wrap our iterable in the
# filter method for each one, this will act as a logical AND amongst
# each specifier.
if self._specs:
for spec in self._specs:
iterable = spec.filter(iterable, prereleases=bool(prereleases))
return iterable
# If we do not have any specifiers, then we need to have a rough filter
# which will filter out any pre-releases, unless there are no final
# releases, and which will filter out LegacyVersion in general.
else:
filtered = []
found_prereleases = []
for item in iterable:
# Ensure that we some kind of Version class for this item.
if not isinstance(item, (LegacyVersion, Version)):
parsed_version = parse(item)
else:
parsed_version = item
# Filter out any item which is parsed as a LegacyVersion
if isinstance(parsed_version, LegacyVersion):
continue
# Store any item which is a pre-release for later unless we've
# already found a final version or we are accepting prereleases
if parsed_version.is_prerelease and not prereleases:
if not filtered:
found_prereleases.append(item)
else:
filtered.append(item)
# If we've found no items except for pre-releases, then we'll go
# ahead and use the pre-releases
if not filtered and found_prereleases and prereleases is None:
return found_prereleases
return filtered
|
woogers/volatility | refs/heads/master | volatility/debug.py | 54 | # Volatility
#
# Authors:
# Michael Cohen <scudette@users.sourceforge.net>
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
""" General debugging framework """
import pdb
import sys
import inspect
import logging
import volatility.conf
config = volatility.conf.ConfObject()
config.add_option("DEBUG", short_option = 'd', default = 0,
cache_invalidator = False,
action = 'count', help = "Debug volatility")
# Largest debug value used + 1
MAX_DEBUG = 3
def setup(level = 0):
"""Sets up the global logging environment"""
formatstr = "%(levelname)-8s: %(name)-20s: %(message)s"
logging.basicConfig(format = formatstr)
rootlogger = logging.getLogger('')
rootlogger.setLevel(logging.DEBUG + 1 - level)
for i in range(1, 9):
logging.addLevelName(logging.DEBUG - i, "DEBUG" + str(i))
def debug(msg, level = 1):
"""Logs a message at the DEBUG level"""
log(msg, logging.DEBUG + 1 - level)
def info(msg):
"""Logs a message at the INFO level"""
log(msg, logging.INFO)
def warning(msg):
"""Logs a message at the WARNING level"""
log(msg, logging.WARNING)
def error(msg):
log(msg, logging.ERROR)
sys.exit(1)
def critical(msg):
log(msg, logging.CRITICAL)
sys.exit(1)
def log(msg, level):
modname = "volatility.py"
try:
frm = inspect.currentframe()
modname = "volatility.debug"
while modname == "volatility.debug":
frm = frm.f_back
mod = inspect.getmodule(frm)
modname = mod.__name__
except AttributeError:
pass
finally:
del frm
_log(msg, modname, level)
def _log(msg, facility, loglevel):
"""Outputs a debugging message"""
logger = logging.getLogger(facility)
logger.log(loglevel, msg)
def b(level = 1):
"""Enters the debugger at the call point"""
if config.DEBUG >= level:
pdb.set_trace()
trace = b
def post_mortem(level = 1):
"""Provides a command line interface to python after an exception's occurred"""
if config.DEBUG >= level:
pdb.post_mortem()
|
cuboxi/android_external_chromium_org | refs/heads/kitkat | tools/json_schema_compiler/cpp_util.py | 23 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utilies and constants specific to Chromium C++ code.
"""
from code import Code
from datetime import datetime
from model import PropertyType
import os
import re
CHROMIUM_LICENSE = (
"""// Copyright (c) %d The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.""" % datetime.now().year
)
GENERATED_FILE_MESSAGE = """// GENERATED FROM THE API DEFINITION IN
// %s
// DO NOT EDIT.
"""
GENERATED_BUNDLE_FILE_MESSAGE = """// GENERATED FROM THE API DEFINITIONS IN
// %s
// DO NOT EDIT.
"""
GENERATED_FEATURE_MESSAGE = """// GENERATED FROM THE FEATURE DEFINITIONS IN
// %s
// DO NOT EDIT.
"""
def Classname(s):
"""Translates a namespace name or function name into something more
suited to C++.
eg experimental.downloads -> Experimental_Downloads
updateAll -> UpdateAll.
"""
return '_'.join([x[0].upper() + x[1:] for x in re.split('\W', s)])
def GetAsFundamentalValue(type_, src, dst):
"""Returns the C++ code for retrieving a fundamental type from a
Value into a variable.
src: Value*
dst: Property*
"""
return {
PropertyType.BOOLEAN: '%s->GetAsBoolean(%s)',
PropertyType.DOUBLE: '%s->GetAsDouble(%s)',
PropertyType.INTEGER: '%s->GetAsInteger(%s)',
PropertyType.STRING: '%s->GetAsString(%s)',
}[type_.property_type] % (src, dst)
def GetValueType(type_):
"""Returns the Value::Type corresponding to the model.Type.
"""
return {
PropertyType.ARRAY: 'base::Value::TYPE_LIST',
PropertyType.BINARY: 'base::Value::TYPE_BINARY',
PropertyType.BOOLEAN: 'base::Value::TYPE_BOOLEAN',
# PropertyType.CHOICES can be any combination of types.
PropertyType.DOUBLE: 'base::Value::TYPE_DOUBLE',
PropertyType.ENUM: 'base::Value::TYPE_STRING',
PropertyType.FUNCTION: 'base::Value::TYPE_DICTIONARY',
PropertyType.INTEGER: 'base::Value::TYPE_INTEGER',
PropertyType.OBJECT: 'base::Value::TYPE_DICTIONARY',
PropertyType.STRING: 'base::Value::TYPE_STRING',
}[type_.property_type]
def GetParameterDeclaration(param, type_):
"""Gets a parameter declaration of a given model.Property and its C++
type.
"""
if param.type_.property_type in (PropertyType.ANY,
PropertyType.ARRAY,
PropertyType.CHOICES,
PropertyType.OBJECT,
PropertyType.REF,
PropertyType.STRING):
arg = 'const %(type)s& %(name)s'
else:
arg = '%(type)s %(name)s'
return arg % {
'type': type_,
'name': param.unix_name,
}
def GenerateIfndefName(path, filename):
"""Formats a path and filename as a #define name.
e.g chrome/extensions/gen, file.h becomes CHROME_EXTENSIONS_GEN_FILE_H__.
"""
return (('%s_%s_H__' % (path, filename))
.upper().replace(os.sep, '_').replace('/', '_'))
def PadForGenerics(var):
"""Appends a space to |var| if it ends with a >, so that it can be compiled
within generic types.
"""
return ('%s ' % var) if var.endswith('>') else var
def OpenNamespace(namespace):
"""Get opening root namespace declarations.
"""
c = Code()
for component in namespace.split('::'):
c.Append('namespace %s {' % component)
return c
def CloseNamespace(namespace):
"""Get closing root namespace declarations.
"""
c = Code()
for component in reversed(namespace.split('::')):
c.Append('} // namespace %s' % component)
return c
def ConstantName(feature_name):
"""Returns a kName for a feature's name.
"""
return ('k' + ''.join(word[0].upper() + word[1:]
for word in feature_name.replace('.', ' ').split()))
def CamelCase(unix_name):
return ''.join(word.capitalize() for word in unix_name.split('_'))
def ClassName(filepath):
return CamelCase(os.path.split(filepath)[1])
|
macs03/demo-cms | refs/heads/master | cms/lib/python2.7/site-packages/django/conf/app_template/views.py | 6445 | from django.shortcuts import render
# Create your views here.
|
justincassidy/scikit-learn | refs/heads/master | examples/model_selection/plot_roc.py | 146 | """
=======================================
Receiver Operating Characteristic (ROC)
=======================================
Example of Receiver Operating Characteristic (ROC) metric to evaluate
classifier output quality.
ROC curves typically feature true positive rate on the Y axis, and false
positive rate on the X axis. This means that the top left corner of the plot is
the "ideal" point - a false positive rate of zero, and a true positive rate of
one. This is not very realistic, but it does mean that a larger area under the
curve (AUC) is usually better.
The "steepness" of ROC curves is also important, since it is ideal to maximize
the true positive rate while minimizing the false positive rate.
ROC curves are typically used in binary classification to study the output of
a classifier. In order to extend ROC curve and ROC area to multi-class
or multi-label classification, it is necessary to binarize the output. One ROC
curve can be drawn per label, but one can also draw a ROC curve by considering
each element of the label indicator matrix as a binary prediction
(micro-averaging).
.. note::
See also :func:`sklearn.metrics.roc_auc_score`,
:ref:`example_model_selection_plot_roc_crossval.py`.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm, datasets
from sklearn.metrics import roc_curve, auc
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import label_binarize
from sklearn.multiclass import OneVsRestClassifier
# Import some data to play with
iris = datasets.load_iris()
X = iris.data
y = iris.target
# Binarize the output
y = label_binarize(y, classes=[0, 1, 2])
n_classes = y.shape[1]
# Add noisy features to make the problem harder
random_state = np.random.RandomState(0)
n_samples, n_features = X.shape
X = np.c_[X, random_state.randn(n_samples, 200 * n_features)]
# shuffle and split training and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.5,
random_state=0)
# Learn to predict each class against the other
classifier = OneVsRestClassifier(svm.SVC(kernel='linear', probability=True,
random_state=random_state))
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute ROC curve and ROC area for each class
fpr = dict()
tpr = dict()
roc_auc = dict()
for i in range(n_classes):
fpr[i], tpr[i], _ = roc_curve(y_test[:, i], y_score[:, i])
roc_auc[i] = auc(fpr[i], tpr[i])
# Compute micro-average ROC curve and ROC area
fpr["micro"], tpr["micro"], _ = roc_curve(y_test.ravel(), y_score.ravel())
roc_auc["micro"] = auc(fpr["micro"], tpr["micro"])
# Plot of a ROC curve for a specific class
plt.figure()
plt.plot(fpr[2], tpr[2], label='ROC curve (area = %0.2f)' % roc_auc[2])
plt.plot([0, 1], [0, 1], 'k--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic example')
plt.legend(loc="lower right")
plt.show()
# Plot ROC curve
plt.figure()
plt.plot(fpr["micro"], tpr["micro"],
label='micro-average ROC curve (area = {0:0.2f})'
''.format(roc_auc["micro"]))
for i in range(n_classes):
plt.plot(fpr[i], tpr[i], label='ROC curve of class {0} (area = {1:0.2f})'
''.format(i, roc_auc[i]))
plt.plot([0, 1], [0, 1], 'k--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Some extension of Receiver operating characteristic to multi-class')
plt.legend(loc="lower right")
plt.show()
|
agry/NGECore2 | refs/heads/master | scripts/object/tangible/deed/city_deed/garage_tatooine_deed.py | 85615 | import sys
def setup(core, object):
return |
fiduswriter/fiduswriter | refs/heads/master | fiduswriter/document/models.py | 1 | import uuid
from builtins import str
from builtins import object
from django.db import models
from django.db.utils import OperationalError, ProgrammingError
from django.contrib.auth.models import User
from django.core import checks
# FW_DOCUMENT_VERSION:
# Also defined in frontend
# document/static/js/modules/schema/index.js
FW_DOCUMENT_VERSION = 3.3
class DocumentTemplate(models.Model):
title = models.CharField(max_length=255, default='', blank=True)
import_id = models.CharField(max_length=255, default='', blank=True)
content = models.JSONField(default=dict)
doc_version = models.DecimalField(
max_digits=3,
decimal_places=1,
default=FW_DOCUMENT_VERSION
)
user = models.ForeignKey(
User,
null=True,
blank=True,
on_delete=models.deletion.CASCADE
)
added = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
auto_delete = True
def __str__(self):
return self.title
def is_deletable(self):
reverse_relations = [
f for f in self._meta.model._meta.get_fields()
if (f.one_to_many or f.one_to_one) and
f.auto_created and not f.concrete and
f.name not in ['documentstyle', 'exporttemplate']
]
for r in reverse_relations:
if r.remote_field.model.objects.filter(
**{r.field.name: self}
).exists():
return False
return True
@classmethod
def check(cls, **kwargs):
errors = super().check(**kwargs)
errors.extend(cls._check_doc_versions(**kwargs))
return errors
@classmethod
def _check_doc_versions(cls, **kwargs):
try:
if len(
cls.objects.filter(doc_version__lt=str(FW_DOCUMENT_VERSION))
):
return [
checks.Warning(
'Document templates need to be upgraded. Please '
'navigate to /admin/document/document/maintenance/ '
'with a browser as a superuser and upgrade all '
'document templates on this server.',
obj=cls
)
]
else:
return []
except (ProgrammingError, OperationalError):
# Database has not yet been initialized, so don't throw any error.
return []
class Document(models.Model):
title = models.CharField(max_length=255, default='', blank=True)
content = models.JSONField(default=dict)
doc_version = models.DecimalField(
max_digits=3,
decimal_places=1,
default=FW_DOCUMENT_VERSION
)
# The doc_version is the version of the data format in the content field.
# We upgrade the content field in JavaScript and not migrations so that
# the same code can be used for migrations and for importing old fidus
# files that are being uploaded. This field is only used for upgrading data
# and is therefore not handed to the editor or document overview page.
version = models.PositiveIntegerField(default=0)
diffs = models.JSONField(default=list, blank=True)
# The last few diffs that were received and approved. The number of stored
# diffs should always be equivalent to or more than all the diffs since the
# last full save of the document.
owner = models.ForeignKey(
User,
related_name='owner',
on_delete=models.deletion.CASCADE
)
added = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
comments = models.JSONField(default=dict, blank=True)
bibliography = models.JSONField(default=dict, blank=True)
# Whether or not document is listed on document overview list page.
# True by default and for all normal documents. Can be set to False when
# documents are added in plugins that list these documents somewhere else.
listed = models.BooleanField(default=True)
template = models.ForeignKey(
DocumentTemplate,
on_delete=models.deletion.CASCADE
)
def __str__(self):
if len(self.title) > 0:
return self.title + ' (' + str(self.id) + ')'
else:
return str(self.id)
class Meta(object):
ordering = ['-id']
def clean(self, *args, **kwargs):
if self.comments is None:
self.comments = "{}"
if self.bibliography is None:
self.bibliography = "{}"
def save(self, *args, **kwargs):
self.clean()
super().save(*args, **kwargs)
def get_absolute_url(self):
return "/document/%i/" % self.id
def is_deletable(self):
reverse_relations = [
f for f in self._meta.model._meta.get_fields()
if (f.one_to_many or f.one_to_one) and
f.auto_created and not f.concrete and
f.name not in [
'accessright',
'accessrightinvite',
'documentrevision',
'documentimage'
]
]
for r in reverse_relations:
if r.remote_field.model.objects.filter(
**{r.field.name: self}
).exists():
return False
return True
@classmethod
def check(cls, **kwargs):
errors = super().check(**kwargs)
errors.extend(cls._check_doc_versions(**kwargs))
return errors
@classmethod
def _check_doc_versions(cls, **kwargs):
try:
if len(
cls.objects.filter(doc_version__lt=str(FW_DOCUMENT_VERSION))
):
return [
checks.Warning(
'Documents need to be upgraded. Please navigate to '
'/admin/document/document/maintenance/ with a browser '
'as a superuser and upgrade all documents on this '
'server.',
obj=cls
)
]
else:
return []
except (ProgrammingError, OperationalError):
# Database has not yet been initialized, so don't throw any error.
return []
RIGHTS_CHOICES = (
('write', 'Writer'),
# Can write content and can read+write comments.
# Can chat with collaborators.
# Has read access to revisions.
('write-tracked', 'Write with tracked changes'),
# Can write tracked content and can read/write comments.
# Cannot turn off tracked changes.
# Can chat with collaborators.
# Has read access to revisions.
('comment', 'Commentator'),
# Can read content and can read+write comments.
# Can chat with collaborators.
# Has read access to revisions.
('review', 'Reviewer'),
# Can read the content and can read/write his own comments.
# Comments by users with this access right only show the user's
# numeric ID, not their username.
# Cannot chat with collaborators nor see that they are connected.
# Has no access to revisions.
('read', 'Reader'),
# Can read content, including comments
# Can chat with collaborators.
# Has read access to revisions.
('read-without-comments', 'Reader without comment access'),
# Can read content, but not the comments.
# Cannot chat with collaborators.
# Has no access to revisions.
)
# Editor and Reviewer can only comment and not edit document
COMMENT_ONLY = ('review', 'comment')
CAN_UPDATE_DOCUMENT = ['write', 'write-tracked', 'review', 'comment']
# Whether the collaborator is allowed to know about other collaborators
# and communicate with them.
CAN_COMMUNICATE = ['read', 'write', 'comment', 'write-tracked']
class AccessRight(models.Model):
document = models.ForeignKey(Document, on_delete=models.deletion.CASCADE)
user = models.ForeignKey(User, on_delete=models.deletion.CASCADE)
rights = models.CharField(
max_length=21,
choices=RIGHTS_CHOICES,
blank=False)
class Meta(object):
unique_together = (("document", "user"),)
def __str__(self):
return (
'%(name)s %(rights)s on %(doc_id)d' %
{
'name': self.user.readable_name,
'rights': self.rights,
'doc_id': self.document.id
}
)
class AccessRightInvite(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
email = models.EmailField() # The email where the invite was sent
document = models.ForeignKey(Document, on_delete=models.deletion.CASCADE)
rights = models.CharField(
max_length=21,
choices=RIGHTS_CHOICES,
blank=False)
def __str__(self):
return (
'%(email)s %(rights)s on %(doc_id)d: %(id)d' %
{
'email': self.email,
'rights': self.rights,
'doc_id': self.document.id,
'id': self.id
}
)
def get_absolute_url(self):
return "/invite/%i/" % self.id
def revision_filename(instance, filename):
return "document-revisions/{id}.fidus".format(id=instance.pk)
class DocumentRevision(models.Model):
document = models.ForeignKey(Document, on_delete=models.deletion.CASCADE)
doc_version = models.DecimalField(
max_digits=3,
decimal_places=1,
default=FW_DOCUMENT_VERSION
)
note = models.CharField(max_length=255, default='', blank=True)
date = models.DateTimeField(auto_now=True)
file_object = models.FileField(upload_to=revision_filename)
file_name = models.CharField(max_length=255, default='', blank=True)
def save(self, *args, **kwargs):
if self.pk is None:
# We remove the file_object the first time so that we can use the
# pk as the name of the saved revision file.
file_object = self.file_object
self.file_object = None
super().save(*args, **kwargs)
self.file_object = file_object
kwargs.pop('force_insert', None)
super(DocumentRevision, self).save(*args, **kwargs)
def __str__(self):
if len(self.note) > 0:
return self.note + ' (' + str(self.id) + ') of ' + \
str(self.document.id)
else:
return str(self.id) + ' of ' + str(self.document.id)
@classmethod
def check(cls, **kwargs):
errors = super().check(**kwargs)
errors.extend(cls._check_doc_versions(**kwargs))
return errors
@classmethod
def _check_doc_versions(cls, **kwargs):
try:
if len(
cls.objects.filter(doc_version__lt=str(FW_DOCUMENT_VERSION))
):
return [
checks.Warning(
'Document revisions need to be upgraded. Please '
'navigate to /admin/document/document/maintenance/ '
'with a browser as a superuser and upgrade all '
'document revisions on this server.',
obj=cls
)
]
else:
return []
except (ProgrammingError, OperationalError):
# Database has not yet been initialized, so don't throw any error.
return []
|
elainenaomi/sciwonc-dataflow-examples | refs/heads/master | dissertation2017/Experiment 1B/instances/10_0_workflow_full_10files_primary_3sh_3rs_noannot_with_proj_3s_range/calculateratio_0/CalculateRatioCpuMemory_0.py | 38 | #!/usr/bin/env python
"""
This activity will calculate the ratio between CPU request and Memory request by (job ID, task index, event type).
These fields are optional and could be null.
"""
# It will connect to DataStoreClient
from sciwonc.dataflow.DataStoreClient import DataStoreClient
import math
import sys
##################################################################
import ConfigDB_Calc_StatsCPUMemory_0
client_statscpumemory = DataStoreClient("mongodb", ConfigDB_Calc_StatsCPUMemory_0)
data_statscpumemory = client_statscpumemory.getData()
if data_statscpumemory:
while True:
doc = data_statscpumemory.next()
if doc is None:
break;
sd_cpu = doc['standard deviation cpu']
avg_cpu = doc['average cpu']
sd_memory = doc['standard deviation memory']
avg_memory = doc['average memory']
sd_ratio = doc['standard deviation ratio']
avg_ratio = doc['average ratio']
##################################################################
import ConfigDB_Calc_TEInfo_0
client_taskinfo = DataStoreClient("mongodb", ConfigDB_Calc_TEInfo_0)
# according to config
eventList = client_taskinfo.getData() # return an array of docs (like a csv reader)
eventInfo = {}
if(eventList):
for index in eventList:
event_type = index[ConfigDB_Calc_TEInfo_0.COLUMN]
while True:
doc = index['data'].next()
if doc is None:
break;
info = {}
info["standard deviation cpu"] = doc["standard deviation cpu"]
info["average cpu"] = doc["average cpu"]
info["standard deviation memory"] = doc["standard deviation memory"]
info["average memory"] = doc["average memory"]
info["standard deviation ratio"] = doc["standard deviation ratio"]
info["average ratio"] = doc["average ratio"]
eventInfo[event_type] = info
##################################################################
import ConfigDB_Calc_TaskEvent_0
client_task = DataStoreClient("mongodb", ConfigDB_Calc_TaskEvent_0)
data_task = client_task.getData() # return an array of docs (like a csv reader)
output = []
count = 1
if(data_task):
# processing
while True:
doc = data_task.next()
if doc is None:
print "================="
print "finish"
print output
print len(output)
print count
if len(output) > 0:
print "done"
client_task.saveData(output, numline=count)
break;
if doc['event type'] in ["2","3","4","6"]:
if doc['CPU request'] and doc['memory request']:
#print doc
cpu = 0 if (not doc['CPU request']) else float(doc['CPU request'])
memory = 0 if not doc['memory request'] else float(doc['memory request'])
ratio = cpu/memory if (memory != 0) else 0
event_type = doc['event type']
event_avg_cpu = eventInfo[event_type]["average cpu"]
event_sd_cpu = eventInfo[event_type]["standard deviation cpu"]
event_avg_memory = eventInfo[event_type]["average memory"]
event_sd_memory = eventInfo[event_type]["standard deviation memory"]
event_avg_ratio = eventInfo[event_type]["average ratio"]
event_sd_ratio = eventInfo[event_type]["standard deviation ratio"]
newline = {}
newline['job ID'] = doc['job ID']
newline['task index'] = doc['task index']
newline['event type'] = doc['event type']
newline['time'] = doc['time']
newline['ratio cpu memory'] = ratio
newline['sds from all avg cpu'] = (cpu - avg_cpu)/sd_cpu if sd_cpu else None
newline['sds from all avg memory'] = (memory - avg_memory)/sd_memory if sd_memory else None
newline['sds from all avg ratio'] = (ratio - avg_ratio)/sd_ratio if sd_ratio else None
newline['sds from event avg cpu'] = (cpu - event_avg_cpu)/event_sd_cpu if event_sd_cpu else None
newline['sds from event avg memory'] = (memory - event_avg_memory)/event_sd_memory if event_sd_memory else None
newline['sds from event avg ratio'] = (ratio - event_avg_ratio)/event_sd_ratio if event_sd_ratio else None
output.append(newline)
if len(output) >= 3000:
print "================="
print "inside"
print count
client_task.saveData(output, numline=count)
count += 3000
output = []
|
sarthakmeh03/django | refs/heads/master | tests/migrations/test_writer.py | 9 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import decimal
import functools
import math
import os
import re
import sys
import tokenize
import unittest
import uuid
import custom_migration_operations.more_operations
import custom_migration_operations.operations
from django import get_version
from django.conf import settings
from django.core.validators import EmailValidator, RegexValidator
from django.db import migrations, models
from django.db.migrations.writer import (
MigrationWriter, OperationWriter, SettingsReference,
)
from django.test import SimpleTestCase, ignore_warnings, mock
from django.utils import datetime_safe, six
from django.utils._os import upath
from django.utils.deconstruct import deconstructible
from django.utils.encoding import force_str
from django.utils.functional import SimpleLazyObject
from django.utils.timezone import FixedOffset, get_default_timezone, utc
from django.utils.translation import ugettext_lazy as _
from .models import FoodManager, FoodQuerySet
try:
import enum
except ImportError:
enum = None
PY36 = sys.version_info >= (3, 6)
class Money(decimal.Decimal):
def deconstruct(self):
return (
'%s.%s' % (self.__class__.__module__, self.__class__.__name__),
[six.text_type(self)],
{}
)
class TestModel1(object):
def upload_to(self):
return "somewhere dynamic"
thing = models.FileField(upload_to=upload_to)
class OperationWriterTests(SimpleTestCase):
def test_empty_signature(self):
operation = custom_migration_operations.operations.TestOperation()
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.TestOperation(\n'
'),'
)
def test_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation(1, 2)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ArgsOperation(\n'
' arg1=1,\n'
' arg2=2,\n'
'),'
)
def test_kwargs_signature(self):
operation = custom_migration_operations.operations.KwargsOperation(kwarg1=1)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.KwargsOperation(\n'
' kwarg1=1,\n'
'),'
)
def test_args_kwargs_signature(self):
operation = custom_migration_operations.operations.ArgsKwargsOperation(1, 2, kwarg2=4)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ArgsKwargsOperation(\n'
' arg1=1,\n'
' arg2=2,\n'
' kwarg2=4,\n'
'),'
)
def test_nested_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation(
custom_migration_operations.operations.ArgsOperation(1, 2),
custom_migration_operations.operations.KwargsOperation(kwarg1=3, kwarg2=4)
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ArgsOperation(\n'
' arg1=custom_migration_operations.operations.ArgsOperation(\n'
' arg1=1,\n'
' arg2=2,\n'
' ),\n'
' arg2=custom_migration_operations.operations.KwargsOperation(\n'
' kwarg1=3,\n'
' kwarg2=4,\n'
' ),\n'
'),'
)
def test_multiline_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation("test\n arg1", "test\narg2")
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
"custom_migration_operations.operations.ArgsOperation(\n"
" arg1='test\\n arg1',\n"
" arg2='test\\narg2',\n"
"),"
)
def test_expand_args_signature(self):
operation = custom_migration_operations.operations.ExpandArgsOperation([1, 2])
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ExpandArgsOperation(\n'
' arg=[\n'
' 1,\n'
' 2,\n'
' ],\n'
'),'
)
def test_nested_operation_expand_args_signature(self):
operation = custom_migration_operations.operations.ExpandArgsOperation(
arg=[
custom_migration_operations.operations.KwargsOperation(
kwarg1=1,
kwarg2=2,
),
]
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ExpandArgsOperation(\n'
' arg=[\n'
' custom_migration_operations.operations.KwargsOperation(\n'
' kwarg1=1,\n'
' kwarg2=2,\n'
' ),\n'
' ],\n'
'),'
)
class WriterTests(SimpleTestCase):
"""
Tests the migration writer (makes migration files from Migration instances)
"""
def safe_exec(self, string, value=None):
d = {}
try:
exec(force_str(string), globals(), d)
except Exception as e:
if value:
self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e))
else:
self.fail("Could not exec %r: %s" % (string.strip(), e))
return d
def serialize_round_trip(self, value):
string, imports = MigrationWriter.serialize(value)
return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result']
def assertSerializedEqual(self, value):
self.assertEqual(self.serialize_round_trip(value), value)
def assertSerializedResultEqual(self, value, target):
self.assertEqual(MigrationWriter.serialize(value), target)
def assertSerializedFieldEqual(self, value):
new_value = self.serialize_round_trip(value)
self.assertEqual(value.__class__, new_value.__class__)
self.assertEqual(value.max_length, new_value.max_length)
self.assertEqual(value.null, new_value.null)
self.assertEqual(value.unique, new_value.unique)
def test_serialize_numbers(self):
self.assertSerializedEqual(1)
self.assertSerializedEqual(1.2)
self.assertTrue(math.isinf(self.serialize_round_trip(float("inf"))))
self.assertTrue(math.isinf(self.serialize_round_trip(float("-inf"))))
self.assertTrue(math.isnan(self.serialize_round_trip(float("nan"))))
self.assertSerializedEqual(decimal.Decimal('1.3'))
self.assertSerializedResultEqual(
decimal.Decimal('1.3'),
("Decimal('1.3')", {'from decimal import Decimal'})
)
self.assertSerializedEqual(Money('1.3'))
self.assertSerializedResultEqual(
Money('1.3'),
("migrations.test_writer.Money('1.3')", {'import migrations.test_writer'})
)
def test_serialize_constants(self):
self.assertSerializedEqual(None)
self.assertSerializedEqual(True)
self.assertSerializedEqual(False)
def test_serialize_strings(self):
self.assertSerializedEqual(b"foobar")
string, imports = MigrationWriter.serialize(b"foobar")
self.assertEqual(string, "b'foobar'")
self.assertSerializedEqual("föobár")
string, imports = MigrationWriter.serialize("foobar")
self.assertEqual(string, "'foobar'")
def test_serialize_multiline_strings(self):
self.assertSerializedEqual(b"foo\nbar")
string, imports = MigrationWriter.serialize(b"foo\nbar")
self.assertEqual(string, "b'foo\\nbar'")
self.assertSerializedEqual("föo\nbár")
string, imports = MigrationWriter.serialize("foo\nbar")
self.assertEqual(string, "'foo\\nbar'")
def test_serialize_collections(self):
self.assertSerializedEqual({1: 2})
self.assertSerializedEqual(["a", 2, True, None])
self.assertSerializedEqual({2, 3, "eighty"})
self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]})
self.assertSerializedEqual(_('Hello'))
def test_serialize_builtin_types(self):
self.assertSerializedEqual([list, tuple, dict, set, frozenset])
self.assertSerializedResultEqual(
[list, tuple, dict, set, frozenset],
("[list, tuple, dict, set, frozenset]", set())
)
def test_serialize_lazy_objects(self):
pattern = re.compile(r'^foo$', re.UNICODE)
lazy_pattern = SimpleLazyObject(lambda: pattern)
self.assertEqual(self.serialize_round_trip(lazy_pattern), pattern)
@unittest.skipUnless(enum, "enum34 is required on Python 2")
def test_serialize_enums(self):
class TextEnum(enum.Enum):
A = 'a-value'
B = 'value-b'
class BinaryEnum(enum.Enum):
A = b'a-value'
B = b'value-b'
class IntEnum(enum.IntEnum):
A = 1
B = 2
self.assertSerializedResultEqual(
TextEnum.A,
("migrations.test_writer.TextEnum('a-value')", {'import migrations.test_writer'})
)
self.assertSerializedResultEqual(
BinaryEnum.A,
("migrations.test_writer.BinaryEnum(b'a-value')", {'import migrations.test_writer'})
)
self.assertSerializedResultEqual(
IntEnum.B,
("migrations.test_writer.IntEnum(2)", {'import migrations.test_writer'})
)
field = models.CharField(default=TextEnum.B, choices=[(m.value, m) for m in TextEnum])
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=["
"('a-value', migrations.test_writer.TextEnum('a-value')), "
"('value-b', migrations.test_writer.TextEnum('value-b'))], "
"default=migrations.test_writer.TextEnum('value-b'))"
)
field = models.CharField(default=BinaryEnum.B, choices=[(m.value, m) for m in BinaryEnum])
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=["
"(b'a-value', migrations.test_writer.BinaryEnum(b'a-value')), "
"(b'value-b', migrations.test_writer.BinaryEnum(b'value-b'))], "
"default=migrations.test_writer.BinaryEnum(b'value-b'))"
)
field = models.IntegerField(default=IntEnum.A, choices=[(m.value, m) for m in IntEnum])
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.IntegerField(choices=["
"(1, migrations.test_writer.IntEnum(1)), "
"(2, migrations.test_writer.IntEnum(2))], "
"default=migrations.test_writer.IntEnum(1))"
)
def test_serialize_uuid(self):
self.assertSerializedEqual(uuid.uuid1())
self.assertSerializedEqual(uuid.uuid4())
uuid_a = uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')
uuid_b = uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')
self.assertSerializedResultEqual(
uuid_a,
("uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')", {'import uuid'})
)
self.assertSerializedResultEqual(
uuid_b,
("uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')", {'import uuid'})
)
field = models.UUIDField(choices=((uuid_a, 'UUID A'), (uuid_b, 'UUID B')), default=uuid_a)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.UUIDField(choices=["
"(uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'), 'UUID A'), "
"(uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2'), 'UUID B')], "
"default=uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'))"
)
def test_serialize_functions(self):
with self.assertRaisesMessage(ValueError, 'Cannot serialize function: lambda'):
self.assertSerializedEqual(lambda x: 42)
self.assertSerializedEqual(models.SET_NULL)
string, imports = MigrationWriter.serialize(models.SET(42))
self.assertEqual(string, 'models.SET(42)')
self.serialize_round_trip(models.SET(42))
def test_serialize_datetime(self):
self.assertSerializedEqual(datetime.datetime.utcnow())
self.assertSerializedEqual(datetime.datetime.utcnow)
self.assertSerializedEqual(datetime.datetime.today())
self.assertSerializedEqual(datetime.datetime.today)
self.assertSerializedEqual(datetime.date.today())
self.assertSerializedEqual(datetime.date.today)
self.assertSerializedEqual(datetime.datetime.now().time())
self.assertSerializedEqual(datetime.datetime(2014, 1, 1, 1, 1, tzinfo=get_default_timezone()))
self.assertSerializedEqual(datetime.datetime(2013, 12, 31, 22, 1, tzinfo=FixedOffset(180)))
self.assertSerializedResultEqual(
datetime.datetime(2014, 1, 1, 1, 1),
("datetime.datetime(2014, 1, 1, 1, 1)", {'import datetime'})
)
self.assertSerializedResultEqual(
datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc),
(
"datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)",
{'import datetime', 'from django.utils.timezone import utc'},
)
)
def test_serialize_datetime_safe(self):
self.assertSerializedResultEqual(
datetime_safe.date(2014, 3, 31),
("datetime.date(2014, 3, 31)", {'import datetime'})
)
self.assertSerializedResultEqual(
datetime_safe.time(10, 25),
("datetime.time(10, 25)", {'import datetime'})
)
self.assertSerializedResultEqual(
datetime_safe.datetime(2014, 3, 31, 16, 4, 31),
("datetime.datetime(2014, 3, 31, 16, 4, 31)", {'import datetime'})
)
def test_serialize_fields(self):
self.assertSerializedFieldEqual(models.CharField(max_length=255))
self.assertSerializedResultEqual(
models.CharField(max_length=255),
("models.CharField(max_length=255)", {"from django.db import models"})
)
self.assertSerializedFieldEqual(models.TextField(null=True, blank=True))
self.assertSerializedResultEqual(
models.TextField(null=True, blank=True),
("models.TextField(blank=True, null=True)", {'from django.db import models'})
)
def test_serialize_settings(self):
self.assertSerializedEqual(SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL"))
self.assertSerializedResultEqual(
SettingsReference("someapp.model", "AUTH_USER_MODEL"),
("settings.AUTH_USER_MODEL", {"from django.conf import settings"})
)
def test_serialize_iterators(self):
self.assertSerializedResultEqual(
((x, x * x) for x in range(3)),
("((0, 0), (1, 1), (2, 4))", set())
)
def test_serialize_compiled_regex(self):
"""
Make sure compiled regex can be serialized.
"""
regex = re.compile(r'^\w+$', re.U)
self.assertSerializedEqual(regex)
def test_serialize_class_based_validators(self):
"""
Ticket #22943: Test serialization of class-based validators, including
compiled regexes.
"""
validator = RegexValidator(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.RegexValidator(message='hello')")
self.serialize_round_trip(validator)
# Test with a compiled regex.
validator = RegexValidator(regex=re.compile(r'^\w+$', re.U))
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.RegexValidator(regex=re.compile('^\\\\w+$', 32))")
self.serialize_round_trip(validator)
# Test a string regex with flag
validator = RegexValidator(r'^[0-9]+$', flags=re.U)
string = MigrationWriter.serialize(validator)[0]
if PY36:
self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=re.RegexFlag(32))")
else:
self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=32)")
self.serialize_round_trip(validator)
# Test message and code
validator = RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')")
self.serialize_round_trip(validator)
# Test with a subclass.
validator = EmailValidator(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.EmailValidator(message='hello')")
self.serialize_round_trip(validator)
validator = deconstructible(path="migrations.test_writer.EmailValidator")(EmailValidator)(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "migrations.test_writer.EmailValidator(message='hello')")
validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(message="hello")
with self.assertRaisesRegex(ImportError, "No module named '?custom'?"):
MigrationWriter.serialize(validator)
validator = deconstructible(path="django.core.validators.EmailValidator2")(EmailValidator)(message="hello")
with self.assertRaisesMessage(ValueError, "Could not find object EmailValidator2 in django.core.validators."):
MigrationWriter.serialize(validator)
def test_serialize_empty_nonempty_tuple(self):
"""
Ticket #22679: makemigrations generates invalid code for (an empty
tuple) default_permissions = ()
"""
empty_tuple = ()
one_item_tuple = ('a',)
many_items_tuple = ('a', 'b', 'c')
self.assertSerializedEqual(empty_tuple)
self.assertSerializedEqual(one_item_tuple)
self.assertSerializedEqual(many_items_tuple)
def test_serialize_builtins(self):
string, imports = MigrationWriter.serialize(range)
self.assertEqual(string, 'range')
self.assertEqual(imports, set())
@unittest.skipUnless(six.PY2, "Only applies on Python 2")
def test_serialize_direct_function_reference(self):
"""
Ticket #22436: You cannot use a function straight from its body
(e.g. define the method and use it in the same body)
"""
with self.assertRaises(ValueError):
self.serialize_round_trip(TestModel1.thing)
def test_serialize_local_function_reference(self):
"""
Neither py2 or py3 can serialize a reference in a local scope.
"""
class TestModel2(object):
def upload_to(self):
return "somewhere dynamic"
thing = models.FileField(upload_to=upload_to)
with self.assertRaises(ValueError):
self.serialize_round_trip(TestModel2.thing)
def test_serialize_local_function_reference_message(self):
"""
Make sure user is seeing which module/function is the issue
"""
class TestModel2(object):
def upload_to(self):
return "somewhere dynamic"
thing = models.FileField(upload_to=upload_to)
with self.assertRaisesMessage(ValueError, 'Could not find function upload_to in migrations.test_writer'):
self.serialize_round_trip(TestModel2.thing)
def test_serialize_managers(self):
self.assertSerializedEqual(models.Manager())
self.assertSerializedResultEqual(
FoodQuerySet.as_manager(),
('migrations.models.FoodQuerySet.as_manager()', {'import migrations.models'})
)
self.assertSerializedEqual(FoodManager('a', 'b'))
self.assertSerializedEqual(FoodManager('x', 'y', c=3, d=4))
def test_serialize_frozensets(self):
self.assertSerializedEqual(frozenset())
self.assertSerializedEqual(frozenset("let it go"))
def test_serialize_timedelta(self):
self.assertSerializedEqual(datetime.timedelta())
self.assertSerializedEqual(datetime.timedelta(minutes=42))
def test_serialize_functools_partial(self):
value = functools.partial(datetime.timedelta, 1, seconds=2)
result = self.serialize_round_trip(value)
self.assertEqual(result.func, value.func)
self.assertEqual(result.args, value.args)
self.assertEqual(result.keywords, value.keywords)
def test_simple_migration(self):
"""
Tests serializing a simple migration.
"""
fields = {
'charfield': models.DateTimeField(default=datetime.datetime.utcnow),
'datetimefield': models.DateTimeField(default=datetime.datetime.utcnow),
}
options = {
'verbose_name': 'My model',
'verbose_name_plural': 'My models',
}
migration = type(str("Migration"), (migrations.Migration,), {
"operations": [
migrations.CreateModel("MyModel", tuple(fields.items()), options, (models.Model,)),
migrations.CreateModel("MyModel2", tuple(fields.items()), bases=(models.Model,)),
migrations.CreateModel(
name="MyModel3", fields=tuple(fields.items()), options=options, bases=(models.Model,)
),
migrations.DeleteModel("MyModel"),
migrations.AddField("OtherModel", "datetimefield", fields["datetimefield"]),
],
"dependencies": [("testapp", "some_other_one")],
})
writer = MigrationWriter(migration)
output = writer.as_string()
# We don't test the output formatting - that's too fragile.
# Just make sure it runs for now, and that things look alright.
result = self.safe_exec(output)
self.assertIn("Migration", result)
# In order to preserve compatibility with Python 3.2 unicode literals
# prefix shouldn't be added to strings.
tokens = tokenize.generate_tokens(six.StringIO(str(output)).readline)
for token_type, token_source, (srow, scol), __, line in tokens:
if token_type == tokenize.STRING:
self.assertFalse(
token_source.startswith('u'),
"Unicode literal prefix found at %d:%d: %r" % (
srow, scol, line.strip()
)
)
# Silence warning on Python 2: Not importing directory
# 'tests/migrations/migrations_test_apps/without_init_file/migrations':
# missing __init__.py
@ignore_warnings(category=ImportWarning)
def test_migration_path(self):
test_apps = [
'migrations.migrations_test_apps.normal',
'migrations.migrations_test_apps.with_package_model',
'migrations.migrations_test_apps.without_init_file',
]
base_dir = os.path.dirname(os.path.dirname(upath(__file__)))
for app in test_apps:
with self.modify_settings(INSTALLED_APPS={'append': app}):
migration = migrations.Migration('0001_initial', app.split('.')[-1])
expected_path = os.path.join(base_dir, *(app.split('.') + ['migrations', '0001_initial.py']))
writer = MigrationWriter(migration)
self.assertEqual(writer.path, expected_path)
def test_custom_operation(self):
migration = type(str("Migration"), (migrations.Migration,), {
"operations": [
custom_migration_operations.operations.TestOperation(),
custom_migration_operations.operations.CreateModel(),
migrations.CreateModel("MyModel", (), {}, (models.Model,)),
custom_migration_operations.more_operations.TestOperation()
],
"dependencies": []
})
writer = MigrationWriter(migration)
output = writer.as_string()
result = self.safe_exec(output)
self.assertIn("custom_migration_operations", result)
self.assertNotEqual(
result['custom_migration_operations'].operations.TestOperation,
result['custom_migration_operations'].more_operations.TestOperation
)
def test_sorted_imports(self):
"""
#24155 - Tests ordering of imports.
"""
migration = type(str("Migration"), (migrations.Migration,), {
"operations": [
migrations.AddField("mymodel", "myfield", models.DateTimeField(
default=datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc),
)),
]
})
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn(
"import datetime\n"
"from django.db import migrations, models\n"
"from django.utils.timezone import utc\n",
output
)
def test_migration_file_header_comments(self):
"""
Test comments at top of file.
"""
migration = type(str("Migration"), (migrations.Migration,), {
"operations": []
})
dt = datetime.datetime(2015, 7, 31, 4, 40, 0, 0, tzinfo=utc)
with mock.patch('django.db.migrations.writer.now', lambda: dt):
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertTrue(
output.startswith(
"# -*- coding: utf-8 -*-\n"
"# Generated by Django %(version)s on 2015-07-31 04:40\n" % {
'version': get_version(),
}
)
)
def test_models_import_omitted(self):
"""
django.db.models shouldn't be imported if unused.
"""
migration = type(str("Migration"), (migrations.Migration,), {
"operations": [
migrations.AlterModelOptions(
name='model',
options={'verbose_name': 'model', 'verbose_name_plural': 'models'},
),
]
})
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn("from django.db import migrations\n", output)
def test_deconstruct_class_arguments(self):
# Yes, it doesn't make sense to use a class as a default for a
# CharField. It does make sense for custom fields though, for example
# an enumfield that takes the enum class as an argument.
class DeconstructibleInstances(object):
def deconstruct(self):
return ('DeconstructibleInstances', [], {})
string = MigrationWriter.serialize(models.CharField(default=DeconstructibleInstances))[0]
self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructibleInstances)")
|
detrout/pykolab | refs/heads/master | tests/functional/test_kolabd/__init__.py | 7 | import pykolab
def setup_package():
conf = pykolab.getConf()
conf.finalize_conf(fatal=False)
|
dhavalmanjaria/dma-student-information-system | refs/heads/master | timetable/views.py | 1 | from django.shortcuts import render, redirect
from .models import TimeTable
from curriculum.models import Course, Semester, Subject
from django.contrib.auth.decorators import login_required, permission_required
import logging
from datetime import datetime
from collections import OrderedDict
from actions.views import SelectCourseSemester
from django.http import JsonResponse
import logging
LOG = logging.getLogger('app')
class SelectTimeTable(SelectCourseSemester):
"""
Select the semester to view the time table of.
"""
def post(self, request):
semester = request.POST.get('semester')
semester = super(SelectTimeTable, self).get_semester_from_post(
request)
LOG.debug(semester)
return redirect('view-timetable', pk=semester.pk)
def get(self, request):
options = super(SelectTimeTable, self).get_options(request, all=True)
if request.is_ajax():
return JsonResponse(options)
return render(request, 'timetable/select-timetable.html')
def select_semester(request):
context = {}
context['semester'] = Semester.objects.all()
return render(request, 'timetable/select_semester.html', context)
def _get_weekday(d):
"""
Simple function to get the name of a weekday. Numbering based on
date.isoweek().
"""
weekdays = {
1: 'Monday',
2: 'Tuesday',
3: 'Wednesday',
4: 'Thursday',
5: 'Friday',
6: 'Saturday',
7: 'Sunday'
}
return weekdays[d]
def _get_sorted_times(semester):
"""
Private function to get the starting times for each time table object
in a sorted order
"""
times = [x.start_time for x in TimeTable.objects.filter(
semester=semester)]
times = sorted(set(times), key=int)
return times
def _get_sorted_days(semester):
"""
Private function to get the days starting from Monday, for each time table
object in a sorted order.
"""
return set(
[x.day_of_week for x in TimeTable.objects.filter(
semester=semester).order_by('day_of_week')])
def _get_timetable(semester):
"""
Get subjects for each day (sorted), on each time (sorted) for the current
semester as an OrderedDict(), i.e. a proper Time Table.
"""
days = _get_sorted_days(semester)
times = [x.start_time for x in TimeTable.objects.filter(
semester=semester)]
times = sorted(set(times), key=int)
timetable = OrderedDict()
for d in days:
row = timetable[_get_weekday(d)] = []
for t in times:
try:
row += [x.subject for x in TimeTable.objects.filter(
day_of_week=d, start_time=t, semester=semester)]
except AttributeError as at:
LOG.debug("_get_timetable():" + str(at))
row += [None for x in TimeTable.objects.filter(
day_of_week=d, start_time=t)]
# Subject name is None probably because that timeslot
# has no subjects yet.
# timetable[_get_weekday(d)] = row
return timetable
def view_timetable(request, pk):
context = {}
semester = Semester.objects.get(pk=pk)
LOG.debug(semester)
context['semester'] = semester
times = _get_sorted_times(semester)
timetable = _get_timetable(semester)
LOG.debug(timetable)
context['timetable'] = timetable
context['formatted_times'] = [datetime.strptime(t, "%H%M") for t in times]
user = request.user
if semester.course.hod == user or user.has_perm(
'user_management.can_auth_FacultyHOD'):
context['edit'] = True
return render(request, 'timetable/view-timetable.html', context)
@login_required
@permission_required('user_management.can_write_time_table')
def edit_timetable(request, pk):
"""
Edit a time table. Note: The success of this function relies heavily on
the fact that the start_times are sorted in ascending order. The times
must be strings representing a 24h format specifically to do this. This
method may not be the most elegant way to do this, but it does work quite
well.
"""
semester = Semester.objects.get(pk=pk)
days = _get_sorted_days(semester)
times = _get_sorted_times(semester)
LOG.debug("edit_TT: " + str(times))
timetable = _get_timetable(semester)
LOG.debug("edit_tt:" + str(timetable))
# TODO: Write test to make sure order is preserved
if request.method == "POST":
for d in days:
for sub, t in zip(request.POST.getlist(_get_weekday(d)), times):
LOG.debug("saving:" + _get_weekday(d) + ", " + t + ", "+ sub)
tt = TimeTable.objects.get(
semester=semester, day_of_week=d, start_time=t)
tt.subject = Subject.objects.get(pk=sub)
tt.save()
return redirect('view-timetable', pk=semester.pk)
if request.method == "GET":
if not timetable:
return redirect('edit-times', pk=semester.pk)
context = {}
context['subjects'] = [s for s in semester.subject_set.all()]
context['timetable'] = timetable
context['times'] = times
context['formatted_times'] = [datetime.strptime(t, "%H%M") for t in times]
context['semester'] = semester
return render(request, 'timetable/edit-timetable.html', context)
@login_required
@permission_required('user_management.can_write_time_table')
def edit_times(request, pk):
semester = Semester.objects.get(pk=pk)
context = {}
times = _get_sorted_times(semester)
if request.method == "POST":
hours = request.POST.get('hours')
minutes = request.POST.get('minutes')
ampm = request.POST.get('a')
new = request.POST.get('new_lecture')
new_time = datetime.strptime(
hours + " " + minutes + " " + ampm, "%I %M %p")
new_time = datetime.strftime(new_time, "%H%M")
if request.POST.get('submit'):
submit = request.POST.get('submit')
if new:
LOG.debug(new)
days = set(
[x.day_of_week for x in TimeTable.objects.filter(
semester=semester)])
if len(days) < 1:
tt = TimeTable(semester=semester, day_of_week=1,
start_time=new_time, subject=None)
tt.save()
for d in days:
tt = TimeTable(semester=semester, day_of_week=d,
start_time=new_time, subject=None)
LOG.debug(tt.get_day_of_week_display() + ":" + tt.start_time)
tt.save()
if submit == "Save":
try:
LOG.debug(new_time)
except ValueError as ve:
LOG.debug(ve)
context['errors'] = """Please check that the values entered are in
the format HH MM AM/PM, eg: 9 00 AM"""
#TODO: should sanitize this
old_time = times[int(request.POST.get('lecture_number'))]
tt_set = TimeTable.objects.filter(
semester=semester, start_time=old_time)
for entry in tt_set:
entry.start_time = new_time
entry.save()
if submit == "Delete":
old_time = times[int(request.POST.get('lecture_number'))]
LOG.debug(old_time)
TimeTable.objects.filter(start_time=old_time).delete()
times = _get_sorted_times(semester) # new times
context['semester'] = semester
context['formatted_times'] = [datetime.strptime(t, "%H%M") for t in times]
return render(request, 'timetable/edit-times.html', context)
@login_required
@permission_required('user_management.can_write_time_table')
def edit_days(request, pk):
semester = Semester.objects.get(pk=pk)
context = {}
context['semester'] = semester
all_weekdays = {}
for d in range(1, 8):
all_weekdays[d] = _get_weekday(d)
context['all_weekdays'] = all_weekdays
current_days = {}
for d in _get_sorted_days(semester):
current_days[d] = _get_weekday(d)
context['current_days'] = current_days
if request.method == "POST":
add_rem = request.POST['add_rem']
new_day = request.POST['new_day']
if add_rem == 'Add':
times = _get_sorted_times(semester)
for t in times:
tt, created = TimeTable.objects.get_or_create(
semester=semester, day_of_week=new_day, start_time=t)
if created:
tt.save()
context['success'] = True
if add_rem == 'Delete':
tt = TimeTable.objects.filter(
day_of_week=new_day, semester=semester)
tt.delete()
context['success'] = True
return redirect('edit-days', pk=semester.pk)
return render(request, 'timetable/edit-days.html', context)
|
idea4bsd/idea4bsd | refs/heads/idea4bsd-master | python/testData/resolve/multiFile/importOsPath2/yos.py | 83 | import sys
if True:
import ypath as path
else:
import zpath as path
sys.modules['yos.path'] = path
|
ProfessorX/Config | refs/heads/master | .PyCharm30/system/python_stubs/-1247972723/PyKDE4/kdecore/KLockFile.py | 1 | # encoding: utf-8
# module PyKDE4.kdecore
# from /usr/lib/python2.7/dist-packages/PyKDE4/kdecore.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtNetwork as __PyQt4_QtNetwork
class KLockFile(): # skipped bases: <type 'sip.wrapper'>
# no doc
def getLockInfo(self, *args, **kwargs): # real signature unknown
pass
def isLocked(self, *args, **kwargs): # real signature unknown
pass
def lock(self, *args, **kwargs): # real signature unknown
pass
def setStaleTime(self, *args, **kwargs): # real signature unknown
pass
def staleTime(self, *args, **kwargs): # real signature unknown
pass
def unlock(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
ForceFlag = 2
LockError = 2
LockFail = 1
LockFlag = None # (!) real value is ''
LockFlags = None # (!) real value is ''
LockOK = 0
LockResult = None # (!) real value is ''
LockStale = 3
NoBlockFlag = 1
|
glycerine/goq | refs/heads/master | vendor/git.apache.org/thrift.git/tutorial/py.tornado/PythonServer.py | 34 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import glob
import sys
sys.path.append('gen-py.tornado')
sys.path.insert(0, glob.glob('../../lib/py/build/lib*')[0])
from tutorial import Calculator
from tutorial.ttypes import Operation, InvalidOperation
from shared.ttypes import SharedStruct
from thrift import TTornado
from thrift.protocol import TBinaryProtocol
from tornado import ioloop
class CalculatorHandler(object):
def __init__(self):
self.log = {}
def ping(self):
print("ping()")
def add(self, n1, n2):
print("add({}, {})".format(n1, n2))
return n1 + n2
def calculate(self, logid, work):
print("calculate({}, {})".format(logid, work))
if work.op == Operation.ADD:
val = work.num1 + work.num2
elif work.op == Operation.SUBTRACT:
val = work.num1 - work.num2
elif work.op == Operation.MULTIPLY:
val = work.num1 * work.num2
elif work.op == Operation.DIVIDE:
if work.num2 == 0:
x = InvalidOperation()
x.whatOp = work.op
x.why = "Cannot divide by 0"
raise x
val = work.num1 / work.num2
else:
x = InvalidOperation()
x.whatOp = work.op
x.why = "Invalid operation"
raise x
log = SharedStruct()
log.key = logid
log.value = '%d' % (val)
self.log[logid] = log
return val
def getStruct(self, key):
print("getStruct({})".format(key))
return self.log[key]
def zip(self):
print("zip()")
def main():
handler = CalculatorHandler()
processor = Calculator.Processor(handler)
pfactory = TBinaryProtocol.TBinaryProtocolFactory()
server = TTornado.TTornadoServer(processor, pfactory)
print("Starting the server...")
server.bind(9090)
server.start(1)
ioloop.IOLoop.instance().start()
print("done.")
if __name__ == "__main__":
main()
|
labsanmartin/Bika-LIMS | refs/heads/master | bika/lims/jsonapi/remove.py | 5 | from bika.lims.jsonapi.read import read
from plone.jsonapi.core import router
from plone.jsonapi.core.interfaces import IRouteProvider
from Products.CMFCore.utils import getToolByName
from zExceptions import BadRequest
from zope import interface
import json
import transaction
class Remove(object):
interface.implements(IRouteProvider)
def initialize(self, context, request):
pass
@property
def routes(self):
return (
("/remove", "remove", self.remove, dict(methods=['GET', 'POST'])),
)
def remove(self, context, request):
"""/@@API/remove: Remove existing object
Required parameters:
- UID: UID for the object.
{
runtime: Function running time.
error: true or string(message) if error. false if no error.
success: true or string(message) if success. false if no success.
}
So.
>>> portal = layer['portal']
>>> portal_url = portal.absolute_url()
>>> from plone.app.testing import SITE_OWNER_NAME
>>> from plone.app.testing import SITE_OWNER_PASSWORD
>>> blah = portal.portal_catalog(Type = "Contact")[-1]
>>> uid = blah.UID
>>> browser = layer['getBrowser'](portal, loggedIn=True, username=SITE_OWNER_NAME, password=SITE_OWNER_PASSWORD)
>>> browser.open(portal_url+"/@@API/remove?UID="+uid)
>>> browser.contents
'{..."success": true...}'
"""
savepoint = transaction.savepoint()
uc = getToolByName(context, 'uid_catalog')
_uid = request.get('UID', '')
if not _uid:
raise BadRequest("No UID specified in request")
ret = {
"url": router.url_for("remove", force_external=True),
"success": True,
"error": False,
}
data = uc(UID=_uid)
if not data:
raise BadRequest("No objects found")
for proxy in data:
try:
parent = proxy.getObject().aq_parent
parent.manage_delObjects([proxy.id])
except Exception as e:
savepoint.rollback()
msg = "Cannot delete '{0}' because ({1})".format(_uid, e.message)
raise BadRequest(msg)
return ret
|
nicolasderam/itools | refs/heads/master | itools/tmx/__init__.py | 3 | # -*- coding: UTF-8 -*-
# Copyright (C) 2005-2009 J. David Ibáñez <jdavid.ibp@gmail.com>
# Copyright (C) 2008 David Versmisse <versmisse@lil.univ-littoral.fr>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Import from itools
from itools.core import add_type, get_abspath
from tmx import TMXFile, Sentence, TMXUnit, TMXNote
from itools.xml import register_dtd
__all__ = ['TMXFile', 'Sentence', 'TMXUnit', 'TMXNote']
add_type('application/x-tmx', '.tmx')
# Register DTD
register_dtd(get_abspath('tmx14.dtd'),
uri='http://www.lisa.org/tmx/tmx14.dtd')
|
p4datasystems/CarnotKEdist | refs/heads/master | dist/Lib/unittest/test/test_loader.py | 99 | import sys
import types
import unittest
class Test_TestLoader(unittest.TestCase):
### Tests for TestLoader.loadTestsFromTestCase
################################################################
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
def test_loadTestsFromTestCase(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
tests = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
loader = unittest.TestLoader()
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests)
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
#
# Make sure it does the right thing even if no tests were found
def test_loadTestsFromTestCase__no_matches(self):
class Foo(unittest.TestCase):
def foo_bar(self): pass
empty_suite = unittest.TestSuite()
loader = unittest.TestLoader()
self.assertEqual(loader.loadTestsFromTestCase(Foo), empty_suite)
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
#
# What happens if loadTestsFromTestCase() is given an object
# that isn't a subclass of TestCase? Specifically, what happens
# if testCaseClass is a subclass of TestSuite?
#
# This is checked for specifically in the code, so we better add a
# test for it.
def test_loadTestsFromTestCase__TestSuite_subclass(self):
class NotATestCase(unittest.TestSuite):
pass
loader = unittest.TestLoader()
try:
loader.loadTestsFromTestCase(NotATestCase)
except TypeError:
pass
else:
self.fail('Should raise TypeError')
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
#
# Make sure loadTestsFromTestCase() picks up the default test method
# name (as specified by TestCase), even though the method name does
# not match the default TestLoader.testMethodPrefix string
def test_loadTestsFromTestCase__default_method_name(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
loader = unittest.TestLoader()
# This has to be false for the test to succeed
self.assertFalse('runTest'.startswith(loader.testMethodPrefix))
suite = loader.loadTestsFromTestCase(Foo)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [Foo('runTest')])
################################################################
### /Tests for TestLoader.loadTestsFromTestCase
### Tests for TestLoader.loadTestsFromModule
################################################################
# "This method searches `module` for classes derived from TestCase"
def test_loadTestsFromModule__TestCase_subclass(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, loader.suiteClass)
expected = [loader.suiteClass([MyTestCase('test')])]
self.assertEqual(list(suite), expected)
# "This method searches `module` for classes derived from TestCase"
#
# What happens if no tests are found (no TestCase instances)?
def test_loadTestsFromModule__no_TestCase_instances(self):
m = types.ModuleType('m')
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [])
# "This method searches `module` for classes derived from TestCase"
#
# What happens if no tests are found (TestCases instances, but no tests)?
def test_loadTestsFromModule__no_TestCase_tests(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [loader.suiteClass()])
# "This method searches `module` for classes derived from TestCase"s
#
# What happens if loadTestsFromModule() is given something other
# than a module?
#
# XXX Currently, it succeeds anyway. This flexibility
# should either be documented or loadTestsFromModule() should
# raise a TypeError
#
# XXX Certain people are using this behaviour. We'll add a test for it
def test_loadTestsFromModule__not_a_module(self):
class MyTestCase(unittest.TestCase):
def test(self):
pass
class NotAModule(object):
test_2 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(NotAModule)
reference = [unittest.TestSuite([MyTestCase('test')])]
self.assertEqual(list(suite), reference)
# Check that loadTestsFromModule honors (or not) a module
# with a load_tests function.
def test_loadTestsFromModule__load_tests(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
load_tests_args = []
def load_tests(loader, tests, pattern):
self.assertIsInstance(tests, unittest.TestSuite)
load_tests_args.extend((loader, tests, pattern))
return tests
m.load_tests = load_tests
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, unittest.TestSuite)
self.assertEqual(load_tests_args, [loader, suite, None])
load_tests_args = []
suite = loader.loadTestsFromModule(m, use_load_tests=False)
self.assertEqual(load_tests_args, [])
def test_loadTestsFromModule__faulty_load_tests(self):
m = types.ModuleType('m')
def load_tests(loader, tests, pattern):
raise TypeError('some failure')
m.load_tests = load_tests
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, unittest.TestSuite)
self.assertEqual(suite.countTestCases(), 1)
test = list(suite)[0]
self.assertRaisesRegexp(TypeError, "some failure", test.m)
################################################################
### /Tests for TestLoader.loadTestsFromModule()
### Tests for TestLoader.loadTestsFromName()
################################################################
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Is ValueError raised in response to an empty name?
def test_loadTestsFromName__empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('')
except ValueError, e:
self.assertEqual(str(e), "Empty module name")
else:
self.fail("TestLoader.loadTestsFromName failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when the name contains invalid characters?
def test_loadTestsFromName__malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise ValueError or ImportError?
try:
loader.loadTestsFromName('abc () //')
except ValueError:
pass
except ImportError:
pass
else:
self.fail("TestLoader.loadTestsFromName failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve ... to a
# module"
#
# What happens when a module by that name can't be found?
def test_loadTestsFromName__unknown_module_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('sdasfasfasdf')
except ImportError, e:
self.assertEqual(str(e), "No module named sdasfasfasdf")
else:
self.fail("TestLoader.loadTestsFromName failed to raise ImportError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when the module is found, but the attribute can't?
def test_loadTestsFromName__unknown_attr_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('unittest.sdasfasfasdf')
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when we provide the module, but the attribute can't be
# found?
def test_loadTestsFromName__relative_unknown_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('sdasfasfasdf', unittest)
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# Does loadTestsFromName raise ValueError when passed an empty
# name relative to a provided module?
#
# XXX Should probably raise a ValueError instead of an AttributeError
def test_loadTestsFromName__relative_empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('', unittest)
except AttributeError:
pass
else:
self.fail("Failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when an impossible name is given, relative to the provided
# `module`?
def test_loadTestsFromName__relative_malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise AttributeError or ValueError?
try:
loader.loadTestsFromName('abc () //', unittest)
except ValueError:
pass
except AttributeError:
pass
else:
self.fail("TestLoader.loadTestsFromName failed to raise ValueError")
# "The method optionally resolves name relative to the given module"
#
# Does loadTestsFromName raise TypeError when the `module` argument
# isn't a module object?
#
# XXX Accepts the not-a-module object, ignorning the object's type
# This should raise an exception or the method name should be changed
#
# XXX Some people are relying on this, so keep it for now
def test_loadTestsFromName__relative_not_a_module(self):
class MyTestCase(unittest.TestCase):
def test(self):
pass
class NotAModule(object):
test_2 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('test_2', NotAModule)
reference = [MyTestCase('test')]
self.assertEqual(list(suite), reference)
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Does it raise an exception if the name resolves to an invalid
# object?
def test_loadTestsFromName__relative_bad_object(self):
m = types.ModuleType('m')
m.testcase_1 = object()
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('testcase_1', m)
except TypeError:
pass
else:
self.fail("Should have raised TypeError")
# "The specifier name is a ``dotted name'' that may
# resolve either to ... a test case class"
def test_loadTestsFromName__relative_TestCase_subclass(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('testcase_1', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
def test_loadTestsFromName__relative_TestSuite(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testsuite = unittest.TestSuite([MyTestCase('test')])
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('testsuite', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a test method within a test case class"
def test_loadTestsFromName__relative_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('testcase_1.test', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Does loadTestsFromName() raise the proper exception when trying to
# resolve "a test method within a test case class" that doesn't exist
# for the given name (relative to a provided module)?
def test_loadTestsFromName__relative_invalid_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('testcase_1.testfoo', m)
except AttributeError, e:
self.assertEqual(str(e), "type object 'MyTestCase' has no attribute 'testfoo'")
else:
self.fail("Failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a ... TestSuite instance"
def test_loadTestsFromName__callable__TestSuite(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
testcase_2 = unittest.FunctionTestCase(lambda: None)
def return_TestSuite():
return unittest.TestSuite([testcase_1, testcase_2])
m.return_TestSuite = return_TestSuite
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('return_TestSuite', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [testcase_1, testcase_2])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase ... instance"
def test_loadTestsFromName__callable__TestCase_instance(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
def return_TestCase():
return testcase_1
m.return_TestCase = return_TestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('return_TestCase', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [testcase_1])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase ... instance"
#*****************************************************************
#Override the suiteClass attribute to ensure that the suiteClass
#attribute is used
def test_loadTestsFromName__callable__TestCase_instance_ProperSuiteClass(self):
class SubTestSuite(unittest.TestSuite):
pass
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
def return_TestCase():
return testcase_1
m.return_TestCase = return_TestCase
loader = unittest.TestLoader()
loader.suiteClass = SubTestSuite
suite = loader.loadTestsFromName('return_TestCase', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [testcase_1])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a test method within a test case class"
#*****************************************************************
#Override the suiteClass attribute to ensure that the suiteClass
#attribute is used
def test_loadTestsFromName__relative_testmethod_ProperSuiteClass(self):
class SubTestSuite(unittest.TestSuite):
pass
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
loader.suiteClass=SubTestSuite
suite = loader.loadTestsFromName('testcase_1.test', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase or TestSuite instance"
#
# What happens if the callable returns something else?
def test_loadTestsFromName__callable__wrong_type(self):
m = types.ModuleType('m')
def return_wrong():
return 6
m.return_wrong = return_wrong
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('return_wrong', m)
except TypeError:
pass
else:
self.fail("TestLoader.loadTestsFromName failed to raise TypeError")
# "The specifier can refer to modules and packages which have not been
# imported; they will be imported as a side-effect"
def test_loadTestsFromName__module_not_loaded(self):
# We're going to try to load this module as a side-effect, so it
# better not be loaded before we try.
#
module_name = 'unittest.test.dummy'
sys.modules.pop(module_name, None)
loader = unittest.TestLoader()
try:
suite = loader.loadTestsFromName(module_name)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [])
# module should now be loaded, thanks to loadTestsFromName()
self.assertIn(module_name, sys.modules)
finally:
if module_name in sys.modules:
del sys.modules[module_name]
################################################################
### Tests for TestLoader.loadTestsFromName()
### Tests for TestLoader.loadTestsFromNames()
################################################################
# "Similar to loadTestsFromName(), but takes a sequence of names rather
# than a single name."
#
# What happens if that sequence of names is empty?
def test_loadTestsFromNames__empty_name_list(self):
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames([])
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [])
# "Similar to loadTestsFromName(), but takes a sequence of names rather
# than a single name."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens if that sequence of names is empty?
#
# XXX Should this raise a ValueError or just return an empty TestSuite?
def test_loadTestsFromNames__relative_empty_name_list(self):
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames([], unittest)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [])
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Is ValueError raised in response to an empty name?
def test_loadTestsFromNames__empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames([''])
except ValueError, e:
self.assertEqual(str(e), "Empty module name")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when presented with an impossible module name?
def test_loadTestsFromNames__malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise ValueError or ImportError?
try:
loader.loadTestsFromNames(['abc () //'])
except ValueError:
pass
except ImportError:
pass
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when no module can be found for the given name?
def test_loadTestsFromNames__unknown_module_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['sdasfasfasdf'])
except ImportError, e:
self.assertEqual(str(e), "No module named sdasfasfasdf")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ImportError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when the module can be found, but not the attribute?
def test_loadTestsFromNames__unknown_attr_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['unittest.sdasfasfasdf', 'unittest'])
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when given an unknown attribute on a specified `module`
# argument?
def test_loadTestsFromNames__unknown_name_relative_1(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['sdasfasfasdf'], unittest)
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# Do unknown attributes (relative to a provided module) still raise an
# exception even in the presence of valid attribute names?
def test_loadTestsFromNames__unknown_name_relative_2(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['TestCase', 'sdasfasfasdf'], unittest)
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when faced with the empty string?
#
# XXX This currently raises AttributeError, though ValueError is probably
# more appropriate
def test_loadTestsFromNames__relative_empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames([''], unittest)
except AttributeError:
pass
else:
self.fail("Failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when presented with an impossible attribute name?
def test_loadTestsFromNames__relative_malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise AttributeError or ValueError?
try:
loader.loadTestsFromNames(['abc () //'], unittest)
except AttributeError:
pass
except ValueError:
pass
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ValueError")
# "The method optionally resolves name relative to the given module"
#
# Does loadTestsFromNames() make sure the provided `module` is in fact
# a module?
#
# XXX This validation is currently not done. This flexibility should
# either be documented or a TypeError should be raised.
def test_loadTestsFromNames__relative_not_a_module(self):
class MyTestCase(unittest.TestCase):
def test(self):
pass
class NotAModule(object):
test_2 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['test_2'], NotAModule)
reference = [unittest.TestSuite([MyTestCase('test')])]
self.assertEqual(list(suite), reference)
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Does it raise an exception if the name resolves to an invalid
# object?
def test_loadTestsFromNames__relative_bad_object(self):
m = types.ModuleType('m')
m.testcase_1 = object()
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['testcase_1'], m)
except TypeError:
pass
else:
self.fail("Should have raised TypeError")
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a test case class"
def test_loadTestsFromNames__relative_TestCase_subclass(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testcase_1'], m)
self.assertIsInstance(suite, loader.suiteClass)
expected = loader.suiteClass([MyTestCase('test')])
self.assertEqual(list(suite), [expected])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a TestSuite instance"
def test_loadTestsFromNames__relative_TestSuite(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testsuite = unittest.TestSuite([MyTestCase('test')])
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testsuite'], m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [m.testsuite])
# "The specifier name is a ``dotted name'' that may resolve ... to ... a
# test method within a test case class"
def test_loadTestsFromNames__relative_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testcase_1.test'], m)
self.assertIsInstance(suite, loader.suiteClass)
ref_suite = unittest.TestSuite([MyTestCase('test')])
self.assertEqual(list(suite), [ref_suite])
# "The specifier name is a ``dotted name'' that may resolve ... to ... a
# test method within a test case class"
#
# Does the method gracefully handle names that initially look like they
# resolve to "a test method within a test case class" but don't?
def test_loadTestsFromNames__relative_invalid_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['testcase_1.testfoo'], m)
except AttributeError, e:
self.assertEqual(str(e), "type object 'MyTestCase' has no attribute 'testfoo'")
else:
self.fail("Failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a ... TestSuite instance"
def test_loadTestsFromNames__callable__TestSuite(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
testcase_2 = unittest.FunctionTestCase(lambda: None)
def return_TestSuite():
return unittest.TestSuite([testcase_1, testcase_2])
m.return_TestSuite = return_TestSuite
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['return_TestSuite'], m)
self.assertIsInstance(suite, loader.suiteClass)
expected = unittest.TestSuite([testcase_1, testcase_2])
self.assertEqual(list(suite), [expected])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase ... instance"
def test_loadTestsFromNames__callable__TestCase_instance(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
def return_TestCase():
return testcase_1
m.return_TestCase = return_TestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['return_TestCase'], m)
self.assertIsInstance(suite, loader.suiteClass)
ref_suite = unittest.TestSuite([testcase_1])
self.assertEqual(list(suite), [ref_suite])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase or TestSuite instance"
#
# Are staticmethods handled correctly?
def test_loadTestsFromNames__callable__call_staticmethod(self):
m = types.ModuleType('m')
class Test1(unittest.TestCase):
def test(self):
pass
testcase_1 = Test1('test')
class Foo(unittest.TestCase):
@staticmethod
def foo():
return testcase_1
m.Foo = Foo
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['Foo.foo'], m)
self.assertIsInstance(suite, loader.suiteClass)
ref_suite = unittest.TestSuite([testcase_1])
self.assertEqual(list(suite), [ref_suite])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase or TestSuite instance"
#
# What happens when the callable returns something else?
def test_loadTestsFromNames__callable__wrong_type(self):
m = types.ModuleType('m')
def return_wrong():
return 6
m.return_wrong = return_wrong
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['return_wrong'], m)
except TypeError:
pass
else:
self.fail("TestLoader.loadTestsFromNames failed to raise TypeError")
# "The specifier can refer to modules and packages which have not been
# imported; they will be imported as a side-effect"
def test_loadTestsFromNames__module_not_loaded(self):
# We're going to try to load this module as a side-effect, so it
# better not be loaded before we try.
#
module_name = 'unittest.test.dummy'
sys.modules.pop(module_name, None)
loader = unittest.TestLoader()
try:
suite = loader.loadTestsFromNames([module_name])
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [unittest.TestSuite()])
# module should now be loaded, thanks to loadTestsFromName()
self.assertIn(module_name, sys.modules)
finally:
if module_name in sys.modules:
del sys.modules[module_name]
################################################################
### /Tests for TestLoader.loadTestsFromNames()
### Tests for TestLoader.getTestCaseNames()
################################################################
# "Return a sorted sequence of method names found within testCaseClass"
#
# Test.foobar is defined to make sure getTestCaseNames() respects
# loader.testMethodPrefix
def test_getTestCaseNames(self):
class Test(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foobar(self): pass
loader = unittest.TestLoader()
self.assertEqual(loader.getTestCaseNames(Test), ['test_1', 'test_2'])
# "Return a sorted sequence of method names found within testCaseClass"
#
# Does getTestCaseNames() behave appropriately if no tests are found?
def test_getTestCaseNames__no_tests(self):
class Test(unittest.TestCase):
def foobar(self): pass
loader = unittest.TestLoader()
self.assertEqual(loader.getTestCaseNames(Test), [])
# "Return a sorted sequence of method names found within testCaseClass"
#
# Are not-TestCases handled gracefully?
#
# XXX This should raise a TypeError, not return a list
#
# XXX It's too late in the 2.5 release cycle to fix this, but it should
# probably be revisited for 2.6
def test_getTestCaseNames__not_a_TestCase(self):
class BadCase(int):
def test_foo(self):
pass
loader = unittest.TestLoader()
names = loader.getTestCaseNames(BadCase)
self.assertEqual(names, ['test_foo'])
# "Return a sorted sequence of method names found within testCaseClass"
#
# Make sure inherited names are handled.
#
# TestP.foobar is defined to make sure getTestCaseNames() respects
# loader.testMethodPrefix
def test_getTestCaseNames__inheritance(self):
class TestP(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foobar(self): pass
class TestC(TestP):
def test_1(self): pass
def test_3(self): pass
loader = unittest.TestLoader()
names = ['test_1', 'test_2', 'test_3']
self.assertEqual(loader.getTestCaseNames(TestC), names)
################################################################
### /Tests for TestLoader.getTestCaseNames()
### Tests for TestLoader.testMethodPrefix
################################################################
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromTestCase(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
tests_1 = unittest.TestSuite([Foo('foo_bar')])
tests_2 = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests_2)
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromModule(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests_1 = [unittest.TestSuite([Foo('foo_bar')])]
tests_2 = [unittest.TestSuite([Foo('test_1'), Foo('test_2')])]
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(list(loader.loadTestsFromModule(m)), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(list(loader.loadTestsFromModule(m)), tests_2)
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromName(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests_1 = unittest.TestSuite([Foo('foo_bar')])
tests_2 = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(loader.loadTestsFromName('Foo', m), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(loader.loadTestsFromName('Foo', m), tests_2)
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromNames(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests_1 = unittest.TestSuite([unittest.TestSuite([Foo('foo_bar')])])
tests_2 = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
tests_2 = unittest.TestSuite([tests_2])
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(loader.loadTestsFromNames(['Foo'], m), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(loader.loadTestsFromNames(['Foo'], m), tests_2)
# "The default value is 'test'"
def test_testMethodPrefix__default_value(self):
loader = unittest.TestLoader()
self.assertTrue(loader.testMethodPrefix == 'test')
################################################################
### /Tests for TestLoader.testMethodPrefix
### Tests for TestLoader.sortTestMethodsUsing
################################################################
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromTestCase(self):
def reversed_cmp(x, y):
return -cmp(x, y)
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = loader.suiteClass([Foo('test_2'), Foo('test_1')])
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromModule(self):
def reversed_cmp(x, y):
return -cmp(x, y)
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
m.Foo = Foo
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = [loader.suiteClass([Foo('test_2'), Foo('test_1')])]
self.assertEqual(list(loader.loadTestsFromModule(m)), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromName(self):
def reversed_cmp(x, y):
return -cmp(x, y)
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
m.Foo = Foo
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = loader.suiteClass([Foo('test_2'), Foo('test_1')])
self.assertEqual(loader.loadTestsFromName('Foo', m), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromNames(self):
def reversed_cmp(x, y):
return -cmp(x, y)
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
m.Foo = Foo
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = [loader.suiteClass([Foo('test_2'), Foo('test_1')])]
self.assertEqual(list(loader.loadTestsFromNames(['Foo'], m)), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames()"
#
# Does it actually affect getTestCaseNames()?
def test_sortTestMethodsUsing__getTestCaseNames(self):
def reversed_cmp(x, y):
return -cmp(x, y)
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
test_names = ['test_2', 'test_1']
self.assertEqual(loader.getTestCaseNames(Foo), test_names)
# "The default value is the built-in cmp() function"
def test_sortTestMethodsUsing__default_value(self):
loader = unittest.TestLoader()
self.assertTrue(loader.sortTestMethodsUsing is cmp)
# "it can be set to None to disable the sort."
#
# XXX How is this different from reassigning cmp? Are the tests returned
# in a random order or something? This behaviour should die
def test_sortTestMethodsUsing__None(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = None
test_names = ['test_2', 'test_1']
self.assertEqual(set(loader.getTestCaseNames(Foo)), set(test_names))
################################################################
### /Tests for TestLoader.sortTestMethodsUsing
### Tests for TestLoader.suiteClass
################################################################
# "Callable object that constructs a test suite from a list of tests."
def test_suiteClass__loadTestsFromTestCase(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
tests = [Foo('test_1'), Foo('test_2')]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests)
# It is implicit in the documentation for TestLoader.suiteClass that
# all TestLoader.loadTestsFrom* methods respect it. Let's make sure
def test_suiteClass__loadTestsFromModule(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests = [[Foo('test_1'), Foo('test_2')]]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromModule(m), tests)
# It is implicit in the documentation for TestLoader.suiteClass that
# all TestLoader.loadTestsFrom* methods respect it. Let's make sure
def test_suiteClass__loadTestsFromName(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests = [Foo('test_1'), Foo('test_2')]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromName('Foo', m), tests)
# It is implicit in the documentation for TestLoader.suiteClass that
# all TestLoader.loadTestsFrom* methods respect it. Let's make sure
def test_suiteClass__loadTestsFromNames(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests = [[Foo('test_1'), Foo('test_2')]]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromNames(['Foo'], m), tests)
# "The default value is the TestSuite class"
def test_suiteClass__default_value(self):
loader = unittest.TestLoader()
self.assertTrue(loader.suiteClass is unittest.TestSuite)
if __name__ == '__main__':
unittest.main()
|
undoware/neutron-drive | refs/heads/master | google_appengine/lib/webapp2/tests/extras_appengine_sessions_ndb_test.py | 21 | # -*- coding: utf-8 -*-
from google.appengine.api import datastore_errors
from google.appengine.api import memcache
import webapp2
from webapp2_extras import sessions
from webapp2_extras import sessions_ndb
import test_base
app = webapp2.WSGIApplication(config={
'webapp2_extras.sessions': {
'secret_key': 'my-super-secret',
},
})
class TestNdbSession(test_base.BaseTestCase):
#factory = sessions_ndb.DatastoreSessionFactory
def setUp(self):
super(TestNdbSession, self).setUp()
self.register_model('Session', sessions_ndb.Session)
def test_get_save_session(self):
# Round 1 -------------------------------------------------------------
req = webapp2.Request.blank('/')
req.app = app
store = sessions.SessionStore(req)
session = store.get_session(backend='datastore')
rsp = webapp2.Response()
# Nothing changed, we want to test anyway.
store.save_sessions(rsp)
session['a'] = 'b'
session['c'] = 'd'
session['e'] = 'f'
store.save_sessions(rsp)
# Round 2 -------------------------------------------------------------
cookies = rsp.headers.get('Set-Cookie')
req = webapp2.Request.blank('/', headers=[('Cookie', cookies)])
req.app = app
store = sessions.SessionStore(req)
session = store.get_session(backend='datastore')
self.assertEqual(session['a'], 'b')
self.assertEqual(session['c'], 'd')
self.assertEqual(session['e'], 'f')
session['g'] = 'h'
rsp = webapp2.Response()
store.save_sessions(rsp)
# Round 3 -------------------------------------------------------------
cookies = rsp.headers.get('Set-Cookie')
req = webapp2.Request.blank('/', headers=[('Cookie', cookies)])
req.app = app
store = sessions.SessionStore(req)
session = store.get_session(backend='datastore')
self.assertEqual(session['a'], 'b')
self.assertEqual(session['c'], 'd')
self.assertEqual(session['e'], 'f')
self.assertEqual(session['g'], 'h')
# Round 4 -------------------------------------------------------------
# For this attempt we don't want the memcache backup.
sid = session.container.sid
memcache.delete(sid)
cookies = rsp.headers.get('Set-Cookie')
req = webapp2.Request.blank('/', headers=[('Cookie', cookies)])
req.app = app
store = sessions.SessionStore(req)
session = store.get_session(backend='datastore')
self.assertEqual(session['a'], 'b')
self.assertEqual(session['c'], 'd')
self.assertEqual(session['e'], 'f')
self.assertEqual(session['g'], 'h')
def test_flashes(self):
# Round 1 -------------------------------------------------------------
req = webapp2.Request.blank('/')
req.app = app
store = sessions.SessionStore(req)
session = store.get_session(backend='datastore')
flashes = session.get_flashes()
self.assertEqual(flashes, [])
session.add_flash('foo')
rsp = webapp2.Response()
store.save_sessions(rsp)
# Round 2 -------------------------------------------------------------
cookies = rsp.headers.get('Set-Cookie')
req = webapp2.Request.blank('/', headers=[('Cookie', cookies)])
req.app = app
store = sessions.SessionStore(req)
session = store.get_session(backend='datastore')
flashes = session.get_flashes()
self.assertEqual(flashes, [(u'foo', None)])
flashes = session.get_flashes()
self.assertEqual(flashes, [])
session.add_flash('bar')
session.add_flash('baz', 'important')
rsp = webapp2.Response()
store.save_sessions(rsp)
# Round 3 -------------------------------------------------------------
cookies = rsp.headers.get('Set-Cookie')
req = webapp2.Request.blank('/', headers=[('Cookie', cookies)])
req.app = app
store = sessions.SessionStore(req)
session = store.get_session(backend='datastore')
flashes = session.get_flashes()
self.assertEqual(flashes, [(u'bar', None), (u'baz', 'important')])
flashes = session.get_flashes()
self.assertEqual(flashes, [])
rsp = webapp2.Response()
store.save_sessions(rsp)
# Round 4 -------------------------------------------------------------
cookies = rsp.headers.get('Set-Cookie')
req = webapp2.Request.blank('/', headers=[('Cookie', cookies)])
req.app = app
store = sessions.SessionStore(req)
session = store.get_session(backend='datastore')
flashes = session.get_flashes()
self.assertEqual(flashes, [])
def test_misc(self):
s = sessions_ndb.Session(id='foo')
key = s.put()
s = key.get()
self.assertEqual(s.data, None)
if __name__ == '__main__':
test_base.main()
|
aleida/django | refs/heads/master | django/contrib/auth/forms.py | 1 | from __future__ import unicode_literals
from django import forms
from django.forms.util import flatatt
from django.template import loader
from django.utils.datastructures import SortedDict
from django.utils.html import format_html, format_html_join
from django.utils.http import int_to_base36
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext, ugettext_lazy as _
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.contrib.auth.hashers import UNUSABLE_PASSWORD, is_password_usable, identify_hasher
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.models import get_current_site
UNMASKED_DIGITS_TO_SHOW = 6
mask_password = lambda p: "%s%s" % (p[:UNMASKED_DIGITS_TO_SHOW], "*" * max(len(p) - UNMASKED_DIGITS_TO_SHOW, 0))
class ReadOnlyPasswordHashWidget(forms.Widget):
def render(self, name, value, attrs):
encoded = value
if not is_password_usable(encoded):
return "None"
final_attrs = self.build_attrs(attrs)
try:
hasher = identify_hasher(encoded)
except ValueError:
summary = mark_safe("<strong>Invalid password format or unknown hashing algorithm.</strong>")
else:
summary = format_html_join('',
"<strong>{0}</strong>: {1} ",
((ugettext(key), value)
for key, value in hasher.safe_summary(encoded).items())
)
return format_html("<div{0}>{1}</div>", flatatt(final_attrs), summary)
class ReadOnlyPasswordHashField(forms.Field):
widget = ReadOnlyPasswordHashWidget
def __init__(self, *args, **kwargs):
kwargs.setdefault("required", False)
super(ReadOnlyPasswordHashField, self).__init__(*args, **kwargs)
class UserCreationForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
'duplicate_username': _("A user with that username already exists."),
'password_mismatch': _("The two password fields didn't match."),
}
username = forms.RegexField(label=_("Username"), max_length=30,
regex=r'^[\w.@+-]+$',
help_text = _("Required. 30 characters or fewer. Letters, digits and "
"@/./+/-/_ only."),
error_messages = {
'invalid': _("This value may contain only letters, numbers and "
"@/./+/-/_ characters.")})
password1 = forms.CharField(label=_("Password"),
widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text = _("Enter the same password as above, for verification."))
class Meta:
model = User
fields = ("username",)
def clean_username(self):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
username = self.cleaned_data["username"]
try:
User.objects.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(self.error_messages['duplicate_username'])
def clean_password2(self):
password1 = self.cleaned_data.get("password1", "")
password2 = self.cleaned_data["password2"]
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'])
return password2
def save(self, commit=True):
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
username = forms.RegexField(
label=_("Username"), max_length=30, regex=r"^[\w.@+-]+$",
help_text = _("Required. 30 characters or fewer. Letters, digits and "
"@/./+/-/_ only."),
error_messages = {
'invalid': _("This value may contain only letters, numbers and "
"@/./+/-/_ characters.")})
password = ReadOnlyPasswordHashField(label=_("Password"),
help_text=_("Raw passwords are not stored, so there is no way to see "
"this user's password, but you can change the password "
"using <a href=\"password/\">this form</a>."))
def clean_password(self):
return self.initial["password"]
class Meta:
model = User
def __init__(self, *args, **kwargs):
super(UserChangeForm, self).__init__(*args, **kwargs)
f = self.fields.get('user_permissions', None)
if f is not None:
f.queryset = f.queryset.select_related('content_type')
class AuthenticationForm(forms.Form):
"""
Base class for authenticating users. Extend this to get a form that accepts
username/password logins.
"""
username = forms.CharField(label=_("Username"), max_length=30)
password = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
error_messages = {
'invalid_login': _("Please enter a correct username and password. "
"Note that both fields are case-sensitive."),
'no_cookies': _("Your Web browser doesn't appear to have cookies "
"enabled. Cookies are required for logging in."),
'inactive': _("This account is inactive."),
}
def __init__(self, request=None, *args, **kwargs):
"""
If request is passed in, the form will validate that cookies are
enabled. Note that the request (a HttpRequest object) must have set a
cookie with the key TEST_COOKIE_NAME and value TEST_COOKIE_VALUE before
running this validation.
"""
self.request = request
self.user_cache = None
super(AuthenticationForm, self).__init__(*args, **kwargs)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if username and password:
self.user_cache = authenticate(username=username,
password=password)
if self.user_cache is None:
raise forms.ValidationError(
self.error_messages['invalid_login'])
elif not self.user_cache.is_active:
raise forms.ValidationError(self.error_messages['inactive'])
self.check_for_test_cookie()
return self.cleaned_data
def check_for_test_cookie(self):
if self.request and not self.request.session.test_cookie_worked():
raise forms.ValidationError(self.error_messages['no_cookies'])
def get_user_id(self):
if self.user_cache:
return self.user_cache.id
return None
def get_user(self):
return self.user_cache
class PasswordResetForm(forms.Form):
error_messages = {
'unknown': _("That e-mail address doesn't have an associated "
"user account. Are you sure you've registered?"),
'unusable': _("The user account associated with this e-mail "
"address cannot reset the password."),
}
email = forms.EmailField(label=_("E-mail"), max_length=75)
def clean_email(self):
"""
Validates that an active user exists with the given email address.
"""
email = self.cleaned_data["email"]
self.users_cache = User.objects.filter(email__iexact=email,
is_active=True)
if not len(self.users_cache):
raise forms.ValidationError(self.error_messages['unknown'])
if any((user.password == UNUSABLE_PASSWORD)
for user in self.users_cache):
raise forms.ValidationError(self.error_messages['unusable'])
return email
def save(self, domain_override=None,
subject_template_name='registration/password_reset_subject.txt',
email_template_name='registration/password_reset_email.html',
use_https=False, token_generator=default_token_generator,
from_email=None, request=None):
"""
Generates a one-use only link for resetting password and sends to the
user.
"""
from django.core.mail import send_mail
for user in self.users_cache:
if not domain_override:
current_site = get_current_site(request)
site_name = current_site.name
domain = current_site.domain
else:
site_name = domain = domain_override
c = {
'email': user.email,
'domain': domain,
'site_name': site_name,
'uid': int_to_base36(user.id),
'user': user,
'token': token_generator.make_token(user),
'protocol': use_https and 'https' or 'http',
}
subject = loader.render_to_string(subject_template_name, c)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
email = loader.render_to_string(email_template_name, c)
send_mail(subject, email, from_email, [user.email])
class SetPasswordForm(forms.Form):
"""
A form that lets a user change set his/her password without entering the
old password
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
new_password1 = forms.CharField(label=_("New password"),
widget=forms.PasswordInput)
new_password2 = forms.CharField(label=_("New password confirmation"),
widget=forms.PasswordInput)
def __init__(self, user, *args, **kwargs):
self.user = user
super(SetPasswordForm, self).__init__(*args, **kwargs)
def clean_new_password2(self):
password1 = self.cleaned_data.get('new_password1')
password2 = self.cleaned_data.get('new_password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'])
return password2
def save(self, commit=True):
self.user.set_password(self.cleaned_data['new_password1'])
if commit:
self.user.save()
return self.user
class PasswordChangeForm(SetPasswordForm):
"""
A form that lets a user change his/her password by entering
their old password.
"""
error_messages = dict(SetPasswordForm.error_messages, **{
'password_incorrect': _("Your old password was entered incorrectly. "
"Please enter it again."),
})
old_password = forms.CharField(label=_("Old password"),
widget=forms.PasswordInput)
def clean_old_password(self):
"""
Validates that the old_password field is correct.
"""
old_password = self.cleaned_data["old_password"]
if not self.user.check_password(old_password):
raise forms.ValidationError(
self.error_messages['password_incorrect'])
return old_password
PasswordChangeForm.base_fields = SortedDict([
(k, PasswordChangeForm.base_fields[k])
for k in ['old_password', 'new_password1', 'new_password2']
])
class AdminPasswordChangeForm(forms.Form):
"""
A form used to change the password of a user in the admin interface.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
password1 = forms.CharField(label=_("Password"),
widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password (again)"),
widget=forms.PasswordInput)
def __init__(self, user, *args, **kwargs):
self.user = user
super(AdminPasswordChangeForm, self).__init__(*args, **kwargs)
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'])
return password2
def save(self, commit=True):
"""
Saves the new password.
"""
self.user.set_password(self.cleaned_data["password1"])
if commit:
self.user.save()
return self.user
|
brownharryb/erpnext | refs/heads/develop | erpnext/assets/doctype/asset_settings/asset_settings.py | 13 | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
class AssetSettings(Document):
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.