repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
jiaphuan/models | refs/heads/master | research/differential_privacy/dp_sgd/dp_optimizer/dp_pca.py | 16 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Differentially private optimizers.
"""
import tensorflow as tf
from differential_privacy.dp_sgd.dp_optimizer import sanitizer as san
def ComputeDPPrincipalProjection(data, projection_dims,
sanitizer, eps_delta, sigma):
"""Compute differentially private projection.
Args:
data: the input data, each row is a data vector.
projection_dims: the projection dimension.
sanitizer: the sanitizer used for achieving privacy.
eps_delta: (eps, delta) pair.
sigma: if not None, use noise sigma; otherwise compute it using
eps_delta pair.
Returns:
A projection matrix with projection_dims columns.
"""
eps, delta = eps_delta
# Normalize each row.
normalized_data = tf.nn.l2_normalize(data, 1)
covar = tf.matmul(tf.transpose(normalized_data), normalized_data)
saved_shape = tf.shape(covar)
num_examples = tf.slice(tf.shape(data), [0], [1])
if eps > 0:
# Since the data is already normalized, there is no need to clip
# the covariance matrix.
assert delta > 0
saned_covar = sanitizer.sanitize(
tf.reshape(covar, [1, -1]), eps_delta, sigma=sigma,
option=san.ClipOption(1.0, False), num_examples=num_examples)
saned_covar = tf.reshape(saned_covar, saved_shape)
# Symmetrize saned_covar. This also reduces the noise variance.
saned_covar = 0.5 * (saned_covar + tf.transpose(saned_covar))
else:
saned_covar = covar
# Compute the eigen decomposition of the covariance matrix, and
# return the top projection_dims eigen vectors, represented as columns of
# the projection matrix.
eigvals, eigvecs = tf.self_adjoint_eig(saned_covar)
_, topk_indices = tf.nn.top_k(eigvals, projection_dims)
topk_indices = tf.reshape(topk_indices, [projection_dims])
# Gather and return the corresponding eigenvectors.
return tf.transpose(tf.gather(tf.transpose(eigvecs), topk_indices))
|
rew4332/tensorflow | refs/heads/rew4332-patch-1 | tensorflow/contrib/losses/python/losses/loss_ops_test.py | 4 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for contrib.losses.python.losses.loss_ops."""
# pylint: disable=unused-import,g-bad-import-order
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: enable=unused-import
import numpy as np
import tensorflow as tf
class AbsoluteDifferenceLossTest(tf.test.TestCase):
def setUp(self):
self._predictions = tf.constant([4, 8, 12, 8, 1, 3], shape=(2, 3))
self._targets = tf.constant([1, 9, 2, -5, -2, 6], shape=(2, 3))
def testValueErrorThrownWhenWeightIsNone(self):
with self.test_session():
with self.assertRaises(ValueError):
tf.contrib.losses.absolute_difference(
self._predictions, self._predictions, weight=None)
def testAllCorrectNoLossWeight(self):
loss = tf.contrib.losses.absolute_difference(
self._predictions, self._predictions)
with self.test_session():
self.assertAlmostEqual(0.0, loss.eval(), 3)
def testNonZeroLoss(self):
loss = tf.contrib.losses.absolute_difference(
self._predictions, self._targets)
with self.test_session():
self.assertAlmostEqual(5.5, loss.eval(), 3)
def testNonZeroLossWithPythonScalarWeight(self):
weight = 2.3
loss = tf.contrib.losses.absolute_difference(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(5.5 * weight, loss.eval(), 3)
def testNonZeroLossWithScalarTensorWeight(self):
weight = 2.3
loss = tf.contrib.losses.absolute_difference(
self._predictions, self._targets, tf.constant(weight))
with self.test_session():
self.assertAlmostEqual(5.5 * weight, loss.eval(), 3)
def testNonZeroLossWithOneDimBatchSpecificWeights(self):
weight = tf.constant([1.2, 0.0], shape=[2,])
loss = tf.contrib.losses.absolute_difference(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(5.6, loss.eval(), 3)
def testNonZeroLossWithTwoDimBatchSpecificWeights(self):
weight = tf.constant([1.2, 0.0], shape=[2, 1])
loss = tf.contrib.losses.absolute_difference(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(5.6, loss.eval(), 3)
def testNonZeroLossWithSampleSpecificWeights(self):
weight = tf.constant([3, 6, 5, 0, 4, 2], shape=[2, 3])
loss = tf.contrib.losses.absolute_difference(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(16.6, loss.eval(), 3)
def testNonZeroLossWithSampleSpecificWeightsMostZero(self):
weight = tf.constant([0, 0, 0, 0, 0, 2], shape=[2, 3])
loss = tf.contrib.losses.absolute_difference(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(6.0, loss.eval(), 3)
def testLossWithSampleSpecificWeightsAllZero(self):
weight = tf.zeros((2, 3))
loss = tf.contrib.losses.absolute_difference(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(0.0, loss.eval(), 3)
class SoftmaxCrossEntropyLossTest(tf.test.TestCase):
def testNoneWeightRaisesValueError(self):
logits = tf.constant([[10.0, 0.0, 0.0],
[0.0, 10.0, 0.0],
[0.0, 0.0, 10.0]])
labels = tf.constant([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
with self.test_session():
with self.assertRaises(ValueError):
tf.contrib.losses.softmax_cross_entropy(logits, labels, weight=None)
def testAllCorrect(self):
with self.test_session():
logits = tf.constant([[10.0, 0.0, 0.0],
[0.0, 10.0, 0.0],
[0.0, 0.0, 10.0]])
labels = tf.constant([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
loss = tf.contrib.losses.softmax_cross_entropy(logits, labels)
self.assertEquals(loss.op.name, 'softmax_cross_entropy_loss/value')
self.assertAlmostEqual(loss.eval(), 0.0, 3)
def testAllWrong(self):
logits = tf.constant([[10.0, 0.0, 0.0],
[0.0, 10.0, 0.0],
[0.0, 0.0, 10.0]])
labels = tf.constant([[0, 0, 1],
[1, 0, 0],
[0, 1, 0]])
with self.test_session():
loss = tf.contrib.losses.softmax_cross_entropy(logits, labels)
self.assertEquals(loss.op.name, 'softmax_cross_entropy_loss/value')
self.assertAlmostEqual(loss.eval(), 10.0, 3)
def testNonZeroLossWithPythonScalarWeight(self):
logits = tf.constant([[10.0, 0.0, 0.0],
[0.0, 10.0, 0.0],
[0.0, 0.0, 10.0]])
labels = tf.constant([[0, 0, 1],
[1, 0, 0],
[0, 1, 0]])
weight = 2.3
with self.test_session():
loss = tf.contrib.losses.softmax_cross_entropy(logits, labels, weight)
self.assertAlmostEqual(loss.eval(), weight * 10.0, 3)
def testNonZeroLossWithScalarTensorWeight(self):
logits = tf.constant([[10.0, 0.0, 0.0],
[0.0, 10.0, 0.0],
[0.0, 0.0, 10.0]])
labels = tf.constant([[0, 0, 1],
[1, 0, 0],
[0, 1, 0]])
weight = 2.3
with self.test_session():
loss = tf.contrib.losses.softmax_cross_entropy(
logits, labels, tf.constant(weight))
self.assertAlmostEqual(loss.eval(), weight * 10.0, 3)
def testNonZeroLossWithOneDimBatchSpecificWeights(self):
logits = tf.constant([[10.0, 0.0, 0.0],
[0.0, 10.0, 0.0],
[0.0, 0.0, 10.0]])
labels = tf.constant([[0, 0, 1],
[1, 0, 0],
[0, 1, 0]])
weight = tf.constant([1.2, 3.4, 5.6], shape=[3])
with self.test_session():
loss = tf.contrib.losses.softmax_cross_entropy(logits, labels, weight)
self.assertAlmostEqual(loss.eval(), (1.2 + 3.4 + 5.6) * 10.0 / 3.0, 3)
def testAllWrongAllMissing(self):
logits = tf.constant([[10.0, 0.0, 0.0],
[0.0, 10.0, 0.0],
[0.0, 0.0, 10.0]])
labels = tf.constant([[0, 0, 1],
[1, 0, 0],
[0, 1, 0]])
weight = tf.constant([0, 0, 0], shape=[3])
with self.test_session():
loss = tf.contrib.losses.softmax_cross_entropy(logits, labels, weight)
self.assertAlmostEqual(loss.eval(), 0.0, 3)
def testSomeMissing(self):
logits = tf.constant([[10.0, 0.0, 0.0],
[0.0, 10.0, 0.0],
[0.0, 0.0, 10.0]])
labels = tf.constant([[0, 0, 1],
[1, 0, 0],
[0, 1, 0]])
weight = tf.constant([1.2, 0, 0], shape=[3])
with self.test_session():
loss = tf.contrib.losses.softmax_cross_entropy(logits, labels, weight)
self.assertAlmostEqual(loss.eval(), 12.0, 3)
def testSoftmaxWithMeasurementSpecificWeightsRaisesException(self):
with self.test_session():
logits = tf.constant([[100.0, -100.0, -100.0],
[-100.0, 100.0, -100.0],
[-100.0, -100.0, 100.0]])
labels = tf.constant([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
weight = tf.constant([[3, 4, 5],
[2, 6, 0],
[8, 0, 1]])
with self.assertRaises(ValueError):
tf.contrib.losses.softmax_cross_entropy(
logits, labels, weight=weight).eval()
def testSoftmaxLabelSmoothing(self):
with self.test_session():
# Softmax Cross Entropy Loss is:
# -\sum_i p_i \log q_i
# where for a softmax activation
# \log q_i = x_i - \log \sum_j \exp x_j
# = x_i - x_max - \log \sum_j \exp (x_j - x_max)
# For our activations, [100, -100, -100] the log partion function becomes
# \log ( exp(0) + exp(-200) + exp(-200) ) = 0
# so our log softmaxes become: [0, -200, -200]
# so our cross entropy loss is:
# -(1 - L + L/n) * 0 + 400 * L/n = 400 L/n
logits = tf.constant([[100.0, -100.0, -100.0]])
labels = tf.constant([[1, 0, 0]])
label_smoothing = 0.1
loss = tf.contrib.losses.softmax_cross_entropy(
logits, labels, label_smoothing=label_smoothing)
self.assertEquals(loss.op.name, 'softmax_cross_entropy_loss/value')
expected_value = 400.0 * label_smoothing / 3.0
self.assertAlmostEqual(loss.eval(), expected_value, 3)
class SigmoidCrossEntropyLossTest(tf.test.TestCase):
def testAllCorrectSigmoid(self):
with self.test_session():
logits = tf.constant([[100.0, -100.0, -100.0],
[-100.0, 100.0, -100.0],
[-100.0, -100.0, 100.0]])
labels = tf.constant([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
loss = tf.contrib.losses.sigmoid_cross_entropy(logits, labels)
self.assertEquals(loss.op.name, 'sigmoid_cross_entropy_loss/value')
self.assertAlmostEqual(loss.eval(), 0.0, 3)
def testAllWrongSigmoid(self):
with self.test_session():
logits = tf.constant([[100.0, -100.0, -100.0],
[-100.0, 100.0, -100.0],
[-100.0, -100.0, 100.0]])
labels = tf.constant([[0, 0, 1],
[1, 0, 0],
[0, 1, 0]])
loss = tf.contrib.losses.sigmoid_cross_entropy(logits, labels)
self.assertEquals(loss.op.name, 'sigmoid_cross_entropy_loss/value')
self.assertAlmostEqual(loss.eval(), 600.0 / 9.0, 3)
def testAllWrongSigmoidWithMeasurementSpecificWeights(self):
with self.test_session():
logits = tf.constant([[100.0, -100.0, -100.0],
[-100.0, 100.0, -100.0],
[-100.0, -100.0, 100.0]])
labels = tf.constant([[0, 0, 1],
[1, 0, 0],
[0, 1, 0]])
weight = tf.constant([[3, 4, 5],
[2, 6, 0],
[8, 0, 1]])
loss = tf.contrib.losses.sigmoid_cross_entropy(
logits, labels, weight=weight)
self.assertEquals(loss.op.name, 'sigmoid_cross_entropy_loss/value')
self.assertAlmostEqual(loss.eval(), 1700.0 / 7.0, 3)
def testMultiCorrectSigmoid(self):
logits = tf.constant([[100.0, -100.0, 100.0],
[100.0, 100.0, -100.0],
[-100.0, 100.0, 100.0]])
labels = tf.constant([[1, 0, 1],
[1, 1, 0],
[0, 1, 1]])
loss = tf.contrib.losses.sigmoid_cross_entropy(logits, labels)
self.assertEquals(loss.op.name, 'sigmoid_cross_entropy_loss/value')
with self.test_session():
self.assertAlmostEqual(loss.eval(), 0.0, 3)
def testSigmoidLabelSmoothingCorrect(self):
with self.test_session():
logits = tf.constant([[100.0, -100.0, -100.0]])
labels = tf.constant([[1, 0, 1]])
# Sigmoid cross entropy loss is:
# max(x,0) - x*z + log(1 + exp(-abs(x)))
# The new labels are:
# z' = z * (1 - L) + 0.5 L
# 1 -> 1 - 0.5 L
# 0 -> 0.5 L
# here we expect:
# 1/3 * (100 - 100 * (1 - 0.5 L) + 0
# + 0 + 100 * (0.5 L) + 0
# + 0 + 100 * (1 - 0.5 L) + 0)
# = 1/3 * (100 + 50 L)
label_smoothing = 0.1
loss = tf.contrib.losses.sigmoid_cross_entropy(
logits, labels, label_smoothing=label_smoothing)
self.assertEquals(loss.op.name, 'sigmoid_cross_entropy_loss/value')
expected_value = (100.0 + 50.0 * label_smoothing) / 3.0
self.assertAlmostEqual(loss.eval(), expected_value, 3)
def testSigmoidLabelSmoothingEqualsSoftmaxTwoLabel(self):
with self.test_session():
label_smoothing = 0.1
sigmoid_logits = tf.constant([[100.0, -100.0, -100.0]])
sigmoid_labels = tf.constant([[1, 0, 1]])
sigmoid_loss = tf.contrib.losses.sigmoid_cross_entropy(
sigmoid_logits, sigmoid_labels, label_smoothing=label_smoothing)
softmax_logits = tf.constant([[0.0, 100.0], [100.0, 0.0], [100.0, 0.0]])
softmax_labels = tf.constant([[0, 1], [1, 0], [0, 1]])
softmax_loss = tf.contrib.losses.softmax_cross_entropy(
softmax_logits, softmax_labels, label_smoothing=label_smoothing)
self.assertAlmostEqual(sigmoid_loss.eval(), softmax_loss.eval(), 3)
class LogLossTest(tf.test.TestCase):
def setUp(self):
predictions = np.asarray([.9, .2, .2, .8, .4, .6]).reshape((2, 3))
targets = np.asarray([1.0, 0.0, 1.0, 1.0, 0.0, 0.0]).reshape((2, 3))
self._np_predictions = predictions
self._np_targets = targets
epsilon = 1e-7
self._expected_losses = np.multiply(
targets, np.log(predictions + epsilon)) + np.multiply(
1 - targets, np.log(1 - predictions + epsilon))
self._predictions = tf.constant(predictions)
self._targets = tf.constant(targets)
def testValueErrorThrownWhenWeightIsNone(self):
with self.test_session():
with self.assertRaises(ValueError):
tf.contrib.losses.log_loss(self._targets, self._targets, weight=None)
def testAllCorrectNoLossWeight(self):
loss = tf.contrib.losses.log_loss(self._targets, self._targets)
with self.test_session():
self.assertAlmostEqual(0.0, loss.eval(), 3)
def testAllCorrectNoLossWeightWithPlaceholder(self):
tf_predictions = tf.placeholder(tf.float32, shape=self._np_targets.shape)
loss = tf.contrib.losses.log_loss(tf_predictions, self._targets)
with self.test_session():
self.assertAlmostEqual(0.0, loss.eval(feed_dict={
tf_predictions: self._np_targets}), 3)
def testNonZeroLoss(self):
loss = tf.contrib.losses.log_loss(self._predictions, self._targets)
with self.test_session():
self.assertAlmostEqual(-np.sum(self._expected_losses) / 6.0,
loss.eval(), 3)
def testNonZeroLossWithPythonScalarWeight(self):
weight = 2.3
loss = tf.contrib.losses.log_loss(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(weight * -np.sum(self._expected_losses) / 6.0,
loss.eval(), 3)
def testNonZeroLossWithScalarTensorWeight(self):
weight = 2.3
loss = tf.contrib.losses.log_loss(
self._predictions, self._targets, tf.constant(weight))
with self.test_session():
self.assertAlmostEqual(weight * -np.sum(self._expected_losses) / 6.0,
loss.eval(), 3)
def testNonZeroLossWithScalarTensorWeightAndPlaceholder(self):
tf_predictions = tf.placeholder(tf.float32,
shape=self._np_predictions.shape)
weight = 2.3
loss = tf.contrib.losses.log_loss(
tf_predictions, self._targets, tf.constant(weight))
with self.test_session() as sess:
loss = sess.run(loss, feed_dict={tf_predictions: self._np_predictions})
self.assertAlmostEqual(weight * -np.sum(self._expected_losses) / 6.0,
loss, 3)
def testNonZeroLossWithScalarTensorWeightAndPlaceholderWithRankOnly(self):
tf_predictions = tf.placeholder(tf.float32, shape=[None, None])
weight = 2.3
loss = tf.contrib.losses.log_loss(
tf_predictions, self._targets, tf.constant(weight))
with self.test_session() as sess:
loss = sess.run(loss, feed_dict={tf_predictions: self._np_predictions})
self.assertAlmostEqual(weight * -np.sum(self._expected_losses) / 6.0,
loss, 3)
def testNonZeroLossWithOneDimBatchSpecificWeights(self):
weight = tf.constant([1.2, 3.4], shape=[2])
expected_losses = np.multiply(
self._expected_losses,
np.asarray([1.2, 1.2, 1.2, 3.4, 3.4, 3.4]).reshape((2, 3)))
loss = tf.contrib.losses.log_loss(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(-np.sum(expected_losses) / 6.0,
loss.eval(), 3)
def testNonZeroLossWithOneDimBatchSpecificWeightsSomeZero(self):
weight = tf.constant([1.2, 0], shape=[2])
expected_losses = np.multiply(
self._expected_losses,
np.asarray([1.2, 1.2, 1.2, 0, 0, 0]).reshape((2, 3)))
loss = tf.contrib.losses.log_loss(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(-np.sum(expected_losses) / 3.0,
loss.eval(), 3)
def testNonZeroLossWithTwoDimBatchSpecificWeightsSomeZero(self):
weight = tf.constant([1.2, 0], shape=[2, 1])
expected_losses = np.multiply(
self._expected_losses,
np.asarray([1.2, 1.2, 1.2, 0, 0, 0]).reshape((2, 3)))
loss = tf.contrib.losses.log_loss(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(-np.sum(expected_losses) / 3.0,
loss.eval(), 3)
def testWeightsWithSameNumDimsButWrongShapeThrowsException(self):
weight = tf.constant(np.random.normal(size=(2, 4)), shape=[2, 4])
with self.test_session():
with self.assertRaises(ValueError):
tf.contrib.losses.log_loss(self._predictions, self._targets, weight)
def testNonZeroLossWithMeasurementSpecificWeights(self):
weight = np.array([3, 6, 5, 0, 4, 2]).reshape((2, 3))
expected_losses = np.multiply(self._expected_losses, weight)
loss = tf.contrib.losses.log_loss(
self._predictions,
self._targets,
weight=tf.constant(weight, shape=(2, 3)))
with self.test_session():
self.assertAlmostEqual(-np.sum(expected_losses) / 5.0, loss.eval(), 3)
def testNonZeroLossWithMeasurementSpecificWeightsWithPlaceholder(self):
weight = np.array([3, 6, 5, 0, 4, 2]).reshape((2, 3))
expected_losses = np.multiply(self._expected_losses, weight)
tf_predictions = tf.placeholder(tf.float32, shape=[2, 3])
loss = tf.contrib.losses.log_loss(
tf_predictions,
self._targets,
weight=tf.constant(weight, shape=(2, 3)))
with self.test_session() as sess:
loss = sess.run(loss, feed_dict={tf_predictions: self._np_predictions})
self.assertAlmostEqual(-np.sum(expected_losses) / 5.0, loss, 3)
def testNonZeroLossWithSampleSpecificWeightsMostZero(self):
weight = np.array([0, 0, 0, 0, 0, 2]).reshape((2, 3))
expected_losses = np.multiply(self._expected_losses, weight)
loss = tf.contrib.losses.log_loss(
self._predictions,
self._targets,
weight=tf.constant(weight, shape=(2, 3)))
with self.test_session():
self.assertAlmostEqual(-np.sum(expected_losses), loss.eval(), 3)
def testNonZeroLossWithSampleSpecificWeightsMostZeroWithPlaceholder(self):
weight = np.array([0, 0, 0, 0, 0, 2]).reshape((2, 3))
expected_losses = np.multiply(self._expected_losses, weight)
tf_predictions = tf.placeholder(tf.float32, shape=[2, 3])
tf_weight = tf.constant(weight, shape=(2, 3))
loss = tf.contrib.losses.log_loss(tf_predictions, self._targets, tf_weight)
with self.test_session() as sess:
loss = sess.run(loss, feed_dict={tf_predictions: self._np_predictions})
self.assertAlmostEqual(-np.sum(expected_losses), loss, 3)
def testLossWithSampleSpecificWeightsAllZero(self):
tf_weight = tf.zeros(shape=(2, 3))
loss = tf.contrib.losses.log_loss(
self._predictions, self._targets, tf_weight)
with self.test_session():
self.assertAlmostEqual(0.0, loss.eval(), 3)
class HingeLossTest(tf.test.TestCase):
def testIncompatibleShapes(self):
with self.test_session():
logits = tf.constant([[-1.0], [2.1]])
target = tf.constant([0.0, 1.0])
with self.assertRaises(ValueError):
_ = tf.contrib.losses.hinge_loss(logits, target).eval()
def testAllOutsideMargin(self):
with self.test_session():
logits = tf.constant([1.2, -1.4, -1.0, 2.1])
target = tf.constant([1.0, 0.0, 0.0, 1.0])
loss = tf.contrib.losses.hinge_loss(logits, target)
self.assertAllClose(loss.eval(), [0.0, 0.0, 0.0, 0.0], atol=1e-3)
def testSomeInsideMargin(self):
with self.test_session():
logits = tf.constant([[-0.7], [-1.4], [1.4], [0.6]])
target = tf.constant([[0.0], [0.0], [1.0], [1.0]])
loss = tf.contrib.losses.hinge_loss(logits, target)
# Examples 1 and 4 are on the correct side of the hyperplane but within
# the margin so they incur some (small) loss.
self.assertAllClose(loss.eval(), [[0.3], [0.0], [0.0], [0.4]], atol=1e-3)
def testSomeMisclassified(self):
with self.test_session():
logits = tf.constant([[[1.2], [0.4], [-1.0], [-1.1]]])
target = tf.constant([[[1.0], [0.0], [0.0], [1.0]]])
loss = tf.contrib.losses.hinge_loss(logits, target)
# Examples 2 and 4 are on the wrong side of the hyperplane so they incur
# some (fairly large) loss.
self.assertAllClose(
loss.eval(), [[[0.0], [1.4], [0.0], [2.1]]], atol=1e-3)
class SumOfSquaresLossTest(tf.test.TestCase):
def setUp(self):
self._predictions = tf.constant([4, 8, 12, 8, 1, 3], shape=(2, 3))
self._targets = tf.constant([1, 9, 2, -5, -2, 6], shape=(2, 3))
def testValueErrorThrownWhenWeightIsNone(self):
with self.test_session():
with self.assertRaises(ValueError):
tf.contrib.losses.sum_of_squares(
self._predictions, self._predictions, weight=None)
def testAllCorrectNoLossWeight(self):
loss = tf.contrib.losses.sum_of_squares(
self._predictions, self._predictions)
with self.test_session():
self.assertAlmostEqual(0.0, loss.eval(), 3)
def testNonZeroLoss(self):
loss = tf.contrib.losses.sum_of_squares(
self._predictions, self._targets)
with self.test_session():
self.assertAlmostEqual(49.5, loss.eval(), 3)
def testNonZeroLossWithPythonScalarWeight(self):
weight = 2.3
loss = tf.contrib.losses.sum_of_squares(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(49.5 * weight, loss.eval(), 3)
def testNonZeroLossWithScalarTensorWeight(self):
weight = 2.3
loss = tf.contrib.losses.sum_of_squares(
self._predictions, self._targets, tf.constant(weight))
with self.test_session():
self.assertAlmostEqual(49.5 * weight, loss.eval(), 3)
def testNonZeroLossWithOneDimBatchSpecificWeights(self):
weight = tf.constant([1.2, 3.4], shape=[2,])
loss = tf.contrib.losses.sum_of_squares(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(767.8 / 6.0, loss.eval(), 3)
def testNonZeroLossWithTwoDimBatchSpecificWeights(self):
weight = tf.constant([1.2, 3.4], shape=[2, 1])
loss = tf.contrib.losses.sum_of_squares(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(767.8 / 6.0, loss.eval(), 3)
def testNonZeroLossWithSampleSpecificWeights(self):
weight = tf.constant([3, 6, 5, 0, 4, 2], shape=[2, 3])
loss = tf.contrib.losses.sum_of_squares(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(587 / 5.0, loss.eval(), 3)
def testNonZeroLossWithSampleSpecificWeightsMostZero(self):
weight = tf.constant([0, 0, 0, 0, 0, 2], shape=[2, 3])
loss = tf.contrib.losses.sum_of_squares(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(18.0, loss.eval(), 3)
def testLossWithSampleSpecificWeightsAllZero(self):
weight = tf.zeros((2, 3))
loss = tf.contrib.losses.sum_of_squares(
self._predictions, self._targets, weight)
with self.test_session():
self.assertAlmostEqual(0.0, loss.eval(), 3)
class SumOfPairwiseSquaresLossTest(tf.test.TestCase):
def setUp(self):
self._predictions = np.array([[4, 8, 12],
[8, 1, 3]])
self._targets = np.array([[1, 9, 2],
[-5, -5, 7]])
batch_size, dims = self._targets.shape
# Compute the expected loss 'manually'.
total = np.zeros((batch_size, 1))
for b in range(batch_size):
for i in range(dims):
for j in range(dims):
x = self._predictions[b, i].item() - self._predictions[b, j].item()
y = self._targets[b, i].item() - self._targets[b, j].item()
tmp = (x-y) * (x-y)
total[b] += tmp
self._expected_losses = np.divide(total, 9.0)
def testValueErrorThrownWhenWeightIsNone(self):
with self.test_session():
with self.assertRaises(ValueError):
tf.contrib.losses.sum_of_pairwise_squares(
predictions=tf.constant(self._targets),
targets=tf.constant(self._targets),
weight=None)
def testAllCorrectNoLossWeight(self):
loss = tf.contrib.losses.sum_of_pairwise_squares(
predictions=tf.constant(self._targets),
targets=tf.constant(self._targets))
with self.test_session():
self.assertAlmostEqual(0.0, loss.eval(), 3)
def testNonZeroLoss(self):
loss = tf.contrib.losses.sum_of_pairwise_squares(
predictions=tf.constant(self._predictions),
targets=tf.constant(self._targets))
with self.test_session():
self.assertAlmostEqual(np.sum(self._expected_losses), loss.eval(), 3)
def testGradientWithZeroWeight(self):
with tf.Graph().as_default():
tf.set_random_seed(0)
inputs = tf.ones((2, 3))
weights = tf.get_variable('weights',
shape=[3, 4],
initializer=tf.truncated_normal_initializer())
predictions = tf.matmul(inputs, weights)
optimizer = tf.train.MomentumOptimizer(learning_rate=0.001, momentum=0.9)
loss = tf.contrib.losses.sum_of_pairwise_squares(
predictions,
predictions,
0)
gradients_to_variables = optimizer.compute_gradients(loss)
init_op = tf.initialize_all_variables()
with self.test_session() as sess:
sess.run(init_op)
for grad, _ in gradients_to_variables:
np_grad = sess.run(grad)
self.assertFalse(np.isnan(np_grad).any())
def testNonZeroLossWithPythonScalarWeight(self):
weight = 2.3
loss = tf.contrib.losses.sum_of_pairwise_squares(
predictions=tf.constant(self._predictions),
targets=tf.constant(self._targets),
weight=weight)
with self.test_session():
self.assertAlmostEqual(weight * np.sum(self._expected_losses),
loss.eval(), 3)
def testNonZeroLossWithScalarTensorWeight(self):
weight = 2.3
loss = tf.contrib.losses.sum_of_pairwise_squares(
predictions=tf.constant(self._predictions),
targets=tf.constant(self._targets),
weight=tf.constant(weight))
with self.test_session():
self.assertAlmostEqual(weight * np.sum(self._expected_losses),
loss.eval(), 3)
def testNonZeroLossWithScalarZeroWeight(self):
weight = 0
loss = tf.contrib.losses.sum_of_pairwise_squares(
predictions=tf.constant(self._predictions),
targets=tf.constant(self._targets),
weight=tf.constant(weight))
with self.test_session():
self.assertAlmostEqual(0, loss.eval(), 3)
def testNonZeroLossWithScalarTensorWeightWithPlaceholder(self):
weight = 2.3
tf_predictions = tf.placeholder(tf.float32, shape=self._predictions.shape)
tf_targets = tf.placeholder(tf.float32, shape=self._targets.shape)
loss = tf.contrib.losses.sum_of_pairwise_squares(
predictions=tf_predictions,
targets=tf_targets,
weight=tf.constant(weight))
with self.test_session() as sess:
loss = sess.run(loss, feed_dict={
tf_predictions: self._predictions,
tf_targets: self._targets,
})
self.assertAlmostEqual(weight * np.sum(self._expected_losses), loss, 3)
def testNonZeroLossWithOneDimBatchSpecificWeights(self):
weight = np.asarray([2.0, 1.0]).reshape((2, 1))
expected_losses = np.multiply(weight, self._expected_losses)
loss = tf.contrib.losses.sum_of_pairwise_squares(
predictions=tf.constant(self._predictions),
targets=tf.constant(self._targets),
weight=tf.constant(weight, shape=[2]))
with self.test_session():
self.assertAlmostEqual(np.sum(expected_losses), loss.eval(), 3)
def testZeroLossWithOneDimBatchZeroWeights(self):
weight = np.asarray([0.0, 0.0]).reshape((2, 1))
loss = tf.contrib.losses.sum_of_pairwise_squares(
predictions=tf.constant(self._predictions),
targets=tf.constant(self._targets),
weight=tf.constant(weight, shape=[2]))
with self.test_session():
self.assertAlmostEqual(0, loss.eval(), 3)
def testNonZeroLossWithOneDimBatchSpecificWeightsAndPlaceholders(self):
weight = np.asarray([1.2, 3.4]).reshape((2, 1))
expected_losses = np.multiply(weight, self._expected_losses)
tf_predictions = tf.placeholder(tf.float32, shape=self._predictions.shape)
tf_targets = tf.placeholder(tf.int32, shape=self._targets.shape)
loss = tf.contrib.losses.sum_of_pairwise_squares(
predictions=tf_predictions,
targets=tf_targets,
weight=tf.constant(weight, shape=[2]))
with self.test_session() as sess:
loss = sess.run(loss, feed_dict={
tf_predictions: self._predictions,
tf_targets: self._targets,
})
self.assertAlmostEqual(np.sum(expected_losses), loss, 3)
def testLossWithAllZeroBatchSpecificWeights(self):
weight = np.zeros((2, 1))
loss = tf.contrib.losses.sum_of_pairwise_squares(
predictions=tf.constant(self._predictions),
targets=tf.constant(self._targets),
weight=tf.constant(weight, shape=[2]))
with self.test_session():
self.assertAlmostEqual(0.0, loss.eval(), 3)
class CosineDistanceLossTest(tf.test.TestCase):
def setUp(self):
self._predictions = np.asarray([[1, 0, 0], # Batch 1
[0, 0, -1],
[1, 0, 0], # Batch 2
[1, 0, 0],
[0, 0, -1], # Batch 3
[1, 0, 0]]).reshape((3, 2, 3))
self._targets = np.asarray([[1, 0, 0],
[0, 0, 1],
[0, 1, 0],
[1, 0, 0],
[0, 0, 1],
[0, 1, 0]]).reshape((3, 2, 3))
def testValueErrorThrownWhenWeightIsNone(self):
with self.test_session():
with self.assertRaises(ValueError):
tf.contrib.losses.cosine_distance(
predictions=tf.constant(self._targets),
targets=tf.constant(self._targets),
dim=2,
weight=None)
def testAllCorrectNoWeights(self):
loss = tf.contrib.losses.cosine_distance(
predictions=tf.constant(self._targets),
targets=tf.constant(self._targets),
dim=2)
with self.test_session():
self.assertAlmostEqual(0, loss.eval(), 5)
def testPartiallyCorrectWithIntegerValues(self):
loss = tf.contrib.losses.cosine_distance(
predictions=tf.constant(self._predictions),
targets=tf.constant(self._targets),
dim=2)
with self.test_session():
self.assertAlmostEqual(1, loss.eval(), 5)
def testPartiallyCorrectFloatingPointValues(self):
predictions = np.matrix((
'0.819031913261206 0.567041924552012 0.087465312324590;'
'-0.665139432070255 -0.739487441769973 -0.103671883216994;'
'0.707106781186548 -0.707106781186548 0'))
targets = np.matrix((
'0.819031913261206 0.567041924552012 0.087465312324590;'
'0.665139432070255 0.739487441769973 0.103671883216994;'
'0.707106781186548 0.707106781186548 0'))
tf_preds = tf.constant(predictions, shape=(3, 1, 3), dtype=tf.float32)
tf_targets = tf.constant(targets, shape=(3, 1, 3), dtype=tf.float32)
loss = tf.contrib.losses.cosine_distance(tf_preds, tf_targets, dim=2)
with self.test_session():
self.assertAlmostEqual(1.0, loss.eval(), 5)
def testSampleSpecificWeights(self):
loss = tf.contrib.losses.cosine_distance(
predictions=tf.constant(self._predictions),
targets=tf.constant(self._targets),
dim=2,
weight=tf.constant([1, 0, 0]))
with self.test_session():
self.assertEqual(1.0, loss.eval())
def testMeasurementSpecificWeights(self):
loss = tf.contrib.losses.cosine_distance(
predictions=tf.constant(self._predictions),
targets=tf.constant(self._targets),
dim=2,
weight=tf.constant([1, 0, 0, 1, 1, 1], shape=(3, 2)))
with self.test_session():
self.assertEqual(3.0 / 4.0, loss.eval())
def testValueErrorThrownWithShapelessPlaceholder(self):
tf_predictions = tf.placeholder(tf.float32)
with self.test_session():
with self.assertRaises(ValueError):
tf.contrib.losses.cosine_distance(
predictions=tf_predictions,
targets=tf.constant(self._targets),
dim=2,
weight=tf.constant([1, 0, 0, 1, 1, 1], shape=(3, 2)))
def testMeasurementSpecificWeightsWithPlaceholderWithShape(self):
tf_predictions = tf.placeholder(tf.float32, shape=self._targets.shape)
loss = tf.contrib.losses.cosine_distance(
predictions=tf_predictions,
targets=tf.constant(self._targets),
dim=2,
weight=tf.constant([1, 0, 0, 1, 1, 1], shape=(3, 2)))
with self.test_session() as sess:
loss = sess.run(loss, feed_dict={tf_predictions: self._predictions})
self.assertEqual(3.0 / 4.0, loss)
def testZeroLossWhenAllSampleSpecificWeightsAreZero(self):
loss = tf.contrib.losses.cosine_distance(
predictions=tf.constant(self._predictions),
targets=tf.constant(self._targets),
dim=2,
weight=tf.zeros((3,)))
with self.test_session():
self.assertEqual(0, loss.eval())
def testZeroLossWhenAllMeasurementSpecificWeightsAreZero(self):
loss = tf.contrib.losses.cosine_distance(
predictions=tf.constant(self._predictions),
targets=tf.constant(self._targets),
dim=2,
weight=tf.zeros((3, 2)))
with self.test_session():
self.assertEqual(0, loss.eval())
if __name__ == '__main__':
tf.test.main()
|
Maximilian-Reuter/SickRage-1 | refs/heads/master | lib/sqlalchemy/connectors/__init__.py | 79 | # connectors/__init__.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
class Connector(object):
pass
|
anushray/django-sass-processor | refs/heads/master | sass_processor/templatetags/sass_tags.py | 4 | # -*- coding: utf-8 -*-
import os
import json
import sass
from django.conf import settings
from django.core.files.base import ContentFile
from django.template import Library, Context
from django.template.base import Node, TemplateSyntaxError
from django.utils.encoding import iri_to_uri
from django.utils.six.moves.urllib.parse import urljoin
from ..storage import SassFileStorage, find_file
register = Library()
class SassSrcNode(Node):
def __init__(self, path):
self.storage = SassFileStorage()
self.include_paths = list(getattr(settings, 'SASS_PROCESSOR_INCLUDE_DIRS', []))
self.prefix = iri_to_uri(getattr(settings, 'STATIC_URL', ''))
self._sass_exts = ('.scss', '.sass')
self._path = path
@classmethod
def handle_token(cls, parser, token):
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'{0}' takes a URL to a CSS file as its only argument".format(*bits))
path = parser.compile_filter(bits[1])
return cls(path)
@property
def path(self):
context = Context()
return self._path.resolve(context)
@property
def is_sass(self):
_, ext = os.path.splitext(self.path)
return ext in self._sass_exts
def is_latest(self, sourcemap_filename):
sourcemap_file = find_file(sourcemap_filename)
if not sourcemap_file or not os.path.isfile(sourcemap_file):
return False
sourcemap_mtime = os.stat(sourcemap_file).st_mtime
with open(sourcemap_file, 'r') as fp:
sourcemap = json.load(fp)
for srcfilename in sourcemap.get('sources'):
components = os.path.normpath(srcfilename).split(os.path.sep)
srcfilename = ''.join([os.path.sep + c for c in components if c != os.path.pardir])
if not os.path.isfile(srcfilename) or os.stat(srcfilename).st_mtime > sourcemap_mtime:
# at least one of the source is younger that the sourcemap referring it
return False
return True
def render(self, context):
path = self._path.resolve(context)
basename, ext = os.path.splitext(path)
filename = find_file(path)
if ext not in self._sass_exts:
# return the given path, since it ends neither in `.scss` nor in `.sass`
return urljoin(self.prefix, path)
# compare timestamp of sourcemap file with all its dependencies, and check if we must recompile
css_filename = basename + '.css'
url = urljoin(self.prefix, css_filename)
if not getattr(settings, 'SASS_PROCESSOR_ENABLED', settings.DEBUG):
return url
sourcemap_filename = css_filename + '.map'
if self.is_latest(sourcemap_filename):
return url
# otherwise compile the SASS/SCSS file into .css and store it
sourcemap_url = self.storage.url(sourcemap_filename)
content, sourcemap = sass.compile(filename=filename,
source_map_filename=sourcemap_url, include_paths=self.include_paths)
if self.storage.exists(css_filename):
self.storage.delete(css_filename)
self.storage.save(css_filename, ContentFile(content))
if self.storage.exists(sourcemap_filename):
self.storage.delete(sourcemap_filename)
self.storage.save(sourcemap_filename, ContentFile(sourcemap))
return url
@register.tag(name='sass_src')
def render_sass_src(parser, token):
return SassSrcNode.handle_token(parser, token)
|
tinkhaven-organization/odoo | refs/heads/8.0 | addons/account/project/report/__init__.py | 427 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import analytic_journal
import analytic_balance
import inverted_analytic_balance
import cost_ledger
import quantity_cost_ledger
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
kermitfr/kermit-webui | refs/heads/master | src/webui/agent/templatetags/split.py | 1 | from django import template
from django.utils.safestring import mark_safe, SafeData
from django.template.defaultfilters import stringfilter
register = template.Library()
class SplitNode(template.Node):
def __init__(self, name, value):
self.name = name
self.value = value
def render(self, context):
context[self.name] = self.value
return ''
@register.simple_tag
def split_as_option(value, splitter='|', autoescape=None):
if not isinstance(value, SafeData):
value = mark_safe(value)
value = value.split(splitter)
result = ""
for v in value:
result += '<option value="%s">%s</option>\n' % (v, v)
return mark_safe(result)
split_as_option.is_safe = True
split_as_option.needs_autoescape = True
@register.simple_tag
def split_as_list(value, context_name='splitted_list', splitter=',', autoescape=None):
if not isinstance(value, SafeData):
value = mark_safe(value)
value = value.split(splitter)
return SplitNode(context_name, value)
|
mgarg1/ecg | refs/heads/master | ecg_visualizer_arduino_PC/galry/visuals/framebuffer_visual.py | 7 | from visual import Visual
import numpy as np
class FrameBufferVisual(Visual):
def initialize(self, shape=None, ntextures=1, coeffs=None, display=True):
if shape is None:
shape = (600, 600)
for i in xrange(ntextures):
self.add_texture('fbotex%d' % i, ncomponents=3, ndim=2, shape=shape,
data=np.zeros((shape[0], shape[1], 3)))
# self.add_texture('fbotex2', ncomponents=3, ndim=2, shape=shape,
# data=np.zeros((shape[0], shape[1], 3)))
# self.add_framebuffer('fbo', texture=['fbotex', 'fbotex2'])
self.add_framebuffer('fbo', texture=['fbotex%d' % i for i in xrange(ntextures)])
if not display:
self.add_attribute('position', ndim=2)#, data=np.zeros((1, 2)))
self.size = 0
return
points = (-1, -1, 1, 1)
x0, y0, x1, y1 = points
x0, x1 = min(x0, x1), max(x0, x1)
y0, y1 = min(y0, y1), max(y0, y1)
position = np.zeros((4,2))
position[0,:] = (x0, y0)
position[1,:] = (x1, y0)
position[2,:] = (x0, y1)
position[3,:] = (x1, y1)
tex_coords = np.zeros((4,2))
tex_coords[0,:] = (0, 0)
tex_coords[1,:] = (1, 0)
tex_coords[2,:] = (0, 1)
tex_coords[3,:] = (1, 1)
self.size = 4
self.primitive_type = 'TRIANGLE_STRIP'
# texture coordinates
self.add_attribute("tex_coords", vartype="float", ndim=2,
data=tex_coords)
self.add_varying("vtex_coords", vartype="float", ndim=2)
self.add_attribute("position", vartype="float", ndim=2, data=position)
self.add_vertex_main("""vtex_coords = tex_coords;""")
if coeffs is None:
self.add_fragment_main("""
out_color = texture2D(fbotex0, vtex_coords);
""")
else:
FS = ""
for i in xrange(ntextures):
FS += """
vec4 out%d = texture2D(fbotex%d, vtex_coords);
""" % (i, i)
FS += "out_color = " + " + ".join(["%.5f * out%d" % (coeffs[i], i) for i in xrange(ntextures)]) + ";"
self.add_fragment_main(FS)
|
madelynfreed/rlundo | refs/heads/master | venv/lib/python2.7/site-packages/IPython/utils/io.py | 4 | # encoding: utf-8
"""
IO related utilities.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
from __future__ import print_function
from __future__ import absolute_import
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import codecs
from contextlib import contextmanager
import io
import os
import shutil
import stat
import sys
import tempfile
from .capture import CapturedIO, capture_output
from .py3compat import string_types, input, PY3
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
class IOStream:
def __init__(self,stream, fallback=None):
if not hasattr(stream,'write') or not hasattr(stream,'flush'):
if fallback is not None:
stream = fallback
else:
raise ValueError("fallback required, but not specified")
self.stream = stream
self._swrite = stream.write
# clone all methods not overridden:
def clone(meth):
return not hasattr(self, meth) and not meth.startswith('_')
for meth in filter(clone, dir(stream)):
setattr(self, meth, getattr(stream, meth))
def __repr__(self):
cls = self.__class__
tpl = '{mod}.{cls}({args})'
return tpl.format(mod=cls.__module__, cls=cls.__name__, args=self.stream)
def write(self,data):
try:
self._swrite(data)
except:
try:
# print handles some unicode issues which may trip a plain
# write() call. Emulate write() by using an empty end
# argument.
print(data, end='', file=self.stream)
except:
# if we get here, something is seriously broken.
print('ERROR - failed to write data to stream:', self.stream,
file=sys.stderr)
def writelines(self, lines):
if isinstance(lines, string_types):
lines = [lines]
for line in lines:
self.write(line)
# This class used to have a writeln method, but regular files and streams
# in Python don't have this method. We need to keep this completely
# compatible so we removed it.
@property
def closed(self):
return self.stream.closed
def close(self):
pass
# setup stdin/stdout/stderr to sys.stdin/sys.stdout/sys.stderr
devnull = open(os.devnull, 'w')
stdin = IOStream(sys.stdin, fallback=devnull)
stdout = IOStream(sys.stdout, fallback=devnull)
stderr = IOStream(sys.stderr, fallback=devnull)
class IOTerm:
""" Term holds the file or file-like objects for handling I/O operations.
These are normally just sys.stdin, sys.stdout and sys.stderr but for
Windows they can can replaced to allow editing the strings before they are
displayed."""
# In the future, having IPython channel all its I/O operations through
# this class will make it easier to embed it into other environments which
# are not a normal terminal (such as a GUI-based shell)
def __init__(self, stdin=None, stdout=None, stderr=None):
mymodule = sys.modules[__name__]
self.stdin = IOStream(stdin, mymodule.stdin)
self.stdout = IOStream(stdout, mymodule.stdout)
self.stderr = IOStream(stderr, mymodule.stderr)
class Tee(object):
"""A class to duplicate an output stream to stdout/err.
This works in a manner very similar to the Unix 'tee' command.
When the object is closed or deleted, it closes the original file given to
it for duplication.
"""
# Inspired by:
# http://mail.python.org/pipermail/python-list/2007-May/442737.html
def __init__(self, file_or_name, mode="w", channel='stdout'):
"""Construct a new Tee object.
Parameters
----------
file_or_name : filename or open filehandle (writable)
File that will be duplicated
mode : optional, valid mode for open().
If a filename was give, open with this mode.
channel : str, one of ['stdout', 'stderr']
"""
if channel not in ['stdout', 'stderr']:
raise ValueError('Invalid channel spec %s' % channel)
if hasattr(file_or_name, 'write') and hasattr(file_or_name, 'seek'):
self.file = file_or_name
else:
self.file = open(file_or_name, mode)
self.channel = channel
self.ostream = getattr(sys, channel)
setattr(sys, channel, self)
self._closed = False
def close(self):
"""Close the file and restore the channel."""
self.flush()
setattr(sys, self.channel, self.ostream)
self.file.close()
self._closed = True
def write(self, data):
"""Write data to both channels."""
self.file.write(data)
self.ostream.write(data)
self.ostream.flush()
def flush(self):
"""Flush both channels."""
self.file.flush()
self.ostream.flush()
def __del__(self):
if not self._closed:
self.close()
def ask_yes_no(prompt, default=None, interrupt=None):
"""Asks a question and returns a boolean (y/n) answer.
If default is given (one of 'y','n'), it is used if the user input is
empty. If interrupt is given (one of 'y','n'), it is used if the user
presses Ctrl-C. Otherwise the question is repeated until an answer is
given.
An EOF is treated as the default answer. If there is no default, an
exception is raised to prevent infinite loops.
Valid answers are: y/yes/n/no (match is not case sensitive)."""
answers = {'y':True,'n':False,'yes':True,'no':False}
ans = None
while ans not in answers.keys():
try:
ans = input(prompt+' ').lower()
if not ans: # response was an empty string
ans = default
except KeyboardInterrupt:
if interrupt:
ans = interrupt
except EOFError:
if default in answers.keys():
ans = default
print()
else:
raise
return answers[ans]
def temp_pyfile(src, ext='.py'):
"""Make a temporary python file, return filename and filehandle.
Parameters
----------
src : string or list of strings (no need for ending newlines if list)
Source code to be written to the file.
ext : optional, string
Extension for the generated file.
Returns
-------
(filename, open filehandle)
It is the caller's responsibility to close the open file and unlink it.
"""
fname = tempfile.mkstemp(ext)[1]
f = open(fname,'w')
f.write(src)
f.flush()
return fname, f
def _copy_metadata(src, dst):
"""Copy the set of metadata we want for atomic_writing.
Permission bits and flags. We'd like to copy file ownership as well, but we
can't do that.
"""
shutil.copymode(src, dst)
st = os.stat(src)
if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
os.chflags(dst, st.st_flags)
@contextmanager
def atomic_writing(path, text=True, encoding='utf-8', **kwargs):
"""Context manager to write to a file only if the entire write is successful.
This works by creating a temporary file in the same directory, and renaming
it over the old file if the context is exited without an error. If other
file names are hard linked to the target file, this relationship will not be
preserved.
On Windows, there is a small chink in the atomicity: the target file is
deleted before renaming the temporary file over it. This appears to be
unavoidable.
Parameters
----------
path : str
The target file to write to.
text : bool, optional
Whether to open the file in text mode (i.e. to write unicode). Default is
True.
encoding : str, optional
The encoding to use for files opened in text mode. Default is UTF-8.
**kwargs
Passed to :func:`io.open`.
"""
# realpath doesn't work on Windows: http://bugs.python.org/issue9949
# Luckily, we only need to resolve the file itself being a symlink, not
# any of its directories, so this will suffice:
if os.path.islink(path):
path = os.path.join(os.path.dirname(path), os.readlink(path))
dirname, basename = os.path.split(path)
tmp_dir = tempfile.mkdtemp(prefix=basename, dir=dirname)
tmp_path = os.path.join(tmp_dir, basename)
if text:
fileobj = io.open(tmp_path, 'w', encoding=encoding, **kwargs)
else:
fileobj = io.open(tmp_path, 'wb', **kwargs)
try:
yield fileobj
except:
fileobj.close()
shutil.rmtree(tmp_dir)
raise
# Flush to disk
fileobj.flush()
os.fsync(fileobj.fileno())
# Written successfully, now rename it
fileobj.close()
# Copy permission bits, access time, etc.
try:
_copy_metadata(path, tmp_path)
except OSError:
# e.g. the file didn't already exist. Ignore any failure to copy metadata
pass
if os.name == 'nt' and os.path.exists(path):
# Rename over existing file doesn't work on Windows
os.remove(path)
os.rename(tmp_path, path)
shutil.rmtree(tmp_dir)
def raw_print(*args, **kw):
"""Raw print to sys.__stdout__, otherwise identical interface to print()."""
print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
file=sys.__stdout__)
sys.__stdout__.flush()
def raw_print_err(*args, **kw):
"""Raw print to sys.__stderr__, otherwise identical interface to print()."""
print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
file=sys.__stderr__)
sys.__stderr__.flush()
# Short aliases for quick debugging, do NOT use these in production code.
rprint = raw_print
rprinte = raw_print_err
def unicode_std_stream(stream='stdout'):
u"""Get a wrapper to write unicode to stdout/stderr as UTF-8.
This ignores environment variables and default encodings, to reliably write
unicode to stdout or stderr.
::
unicode_std_stream().write(u'ł@e¶ŧ←')
"""
assert stream in ('stdout', 'stderr')
stream = getattr(sys, stream)
if PY3:
try:
stream_b = stream.buffer
except AttributeError:
# sys.stdout has been replaced - use it directly
return stream
else:
stream_b = stream
return codecs.getwriter('utf-8')(stream_b)
|
jsmartin/shade | refs/heads/master | shade/tests/unit/test_keypair.py | 1 | #
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shade
from mock import patch
from novaclient import exceptions as nova_exc
from shade import exc
from shade import meta
from shade.tests import fakes
from shade.tests.unit import base
class TestKeypair(base.TestCase):
def setUp(self):
super(TestKeypair, self).setUp()
self.cloud = shade.openstack_cloud()
@patch.object(shade.OpenStackCloud, 'nova_client')
def test_create_keypair(self, mock_nova):
keyname = 'my_keyname'
pub_key = 'ssh-rsa BLAH'
key = fakes.FakeKeypair('keyid', keyname, pub_key)
mock_nova.keypairs.create.return_value = key
new_key = self.cloud.create_keypair(keyname, pub_key)
mock_nova.keypairs.create.assert_called_once_with(
name=keyname, public_key=pub_key
)
self.assertEqual(meta.obj_to_dict(key), new_key)
@patch.object(shade.OpenStackCloud, 'nova_client')
def test_create_keypair_exception(self, mock_nova):
mock_nova.keypairs.create.side_effect = Exception()
self.assertRaises(exc.OpenStackCloudException,
self.cloud.create_keypair, '', '')
@patch.object(shade.OpenStackCloud, 'nova_client')
def test_delete_keypair(self, mock_nova):
self.assertTrue(self.cloud.delete_keypair('mykey'))
mock_nova.keypairs.delete.assert_called_once_with(
key='mykey'
)
@patch.object(shade.OpenStackCloud, 'nova_client')
def test_delete_keypair_not_found(self, mock_nova):
mock_nova.keypairs.delete.side_effect = nova_exc.NotFound('')
self.assertFalse(self.cloud.delete_keypair('invalid'))
@patch.object(shade.OpenStackCloud, 'nova_client')
def test_list_keypairs(self, mock_nova):
self.cloud.list_keypairs()
mock_nova.keypairs.list.assert_called_once_with()
@patch.object(shade.OpenStackCloud, 'nova_client')
def test_list_keypairs_exception(self, mock_nova):
mock_nova.keypairs.list.side_effect = Exception()
self.assertRaises(exc.OpenStackCloudException,
self.cloud.list_keypairs)
|
simvisage/oricreate | refs/heads/master | docs/howtos/ex07_sim_task/sim03_map_pattern_to_target_face_plot.py | 1 | r'''
Construct a general crease pattern configuration,
used in the examples below demonstrating the evaluation of goal functions
and constraints.
'''
from .sim03_map_pattern_to_target_face import create_sim_step
if __name__ == '__main__':
import mayavi.mlab as m
sim_step = create_sim_step()
cp = sim_step.cp_state
m.figure(bgcolor=(1.0, 1.0, 1.0), fgcolor=(0.6, 0.6, 0.6))
cp.plot_mlab(m, lines=True)
m.axes(extent=[0, 1, 0, 1, 0, .5])
m.show()
arr = m.screenshot()
import pylab as p
p.imshow(arr)
p.axis('off')
p.show()
|
sadleader/odoo | refs/heads/master | addons/l10n_tr/__openerp__.py | 259 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Turkey - Accounting',
'version': '1.beta',
'category': 'Localization/Account Charts',
'description': """
Türkiye için Tek düzen hesap planı şablonu OpenERP Modülü.
==========================================================
Bu modül kurulduktan sonra, Muhasebe yapılandırma sihirbazı çalışır
* Sihirbaz sizden hesap planı şablonu, planın kurulacağı şirket, banka hesap
bilgileriniz, ilgili para birimi gibi bilgiler isteyecek.
""",
'author': 'Ahmet Altınışık',
'maintainer':'https://launchpad.net/~openerp-turkey',
'website':'https://launchpad.net/openerp-turkey',
'depends': [
'account',
'base_vat',
'account_chart',
],
'data': [
'account_code_template.xml',
'account_tdhp_turkey.xml',
'account_tax_code_template.xml',
'account_chart_template.xml',
'account_tax_template.xml',
'l10n_tr_wizard.xml',
],
'demo': [],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
alikhil/pcms-standing-parser | refs/heads/master | analytics.py | 1 | import pandas as pd
def get_analytics(submissions, data_type="month"):
dataframe = pd.DataFrame.from_records([s.__dict__ for s in submissions])
success_counts = dataframe[data_type][dataframe["status"] == "success"].value_counts()
submissions_counts = dataframe[data_type].value_counts()
return {"all": submissions_counts.to_json(), "success": success_counts.to_json(), "data_type": data_type}
|
thaumos/ansible | refs/heads/devel | test/units/modules/network/ios/test_ios_bgp.py | 38 | #
# (c) 2019, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.network.ios.providers.cli.config.bgp.process import Provider
from ansible.modules.network.ios import ios_bgp
from .ios_module import TestIosModule, load_fixture
class TestIosBgpModule(TestIosModule):
module = ios_bgp
def setUp(self):
super(TestIosBgpModule, self).setUp()
self._bgp_config = load_fixture('ios_bgp_config.cfg')
def test_ios_bgp(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, router_id='192.0.2.2', networks=None,
address_family=None), operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, ['router bgp 64496', 'bgp router-id 192.0.2.2', 'exit'])
def test_ios_bgp_idempotent(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, router_id='192.0.2.1', networks=None,
address_family=None), operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_ios_bgp_remove(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, networks=None, address_family=None), operation='delete'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, ['no router bgp 64496'])
def test_ios_bgp_neighbor(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, neighbors=[dict(neighbor='192.51.100.2', remote_as=64496)],
networks=None, address_family=None),
operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, ['router bgp 64496', 'neighbor 192.51.100.2 remote-as 64496', 'exit'])
def test_ios_bgp_neighbor_idempotent(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, neighbors=[dict(neighbor='192.51.100.1', remote_as=64496,
timers=dict(keepalive=120, holdtime=360,
min_neighbor_holdtime=360))],
networks=None, address_family=None),
operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_ios_bgp_network(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, networks=[dict(prefix='192.0.1.0', masklen=23, route_map='RMAP_1')],
address_family=None),
operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(sorted(commands), sorted(['router bgp 64496', 'network 192.0.1.0 mask 255.255.254.0 route-map RMAP_1',
'exit']))
def test_ios_bgp_network_idempotent(self):
obj = Provider(
params=dict(config=dict(bgp_as=64496, networks=[dict(prefix='192.0.2.0', masklen=23, route_map='RMAP_1'),
dict(prefix='198.51.100.0', masklen=25,
route_map='RMAP_2')],
address_family=None),
operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_ios_bgp_address_family_redistribute(self):
rd_1 = dict(protocol='ospf', id='233', metric=90, route_map=None)
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', safi='unicast', redistribute=[rd_1])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
cmd = ['router bgp 64496', 'address-family ipv4', 'redistribute ospf 233 metric 90',
'exit-address-family', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_ios_bgp_address_family_redistribute_idempotent(self):
rd_1 = dict(protocol='eigrp', metric=10, route_map='RMAP_3', id=None)
rd_2 = dict(protocol='static', metric=100, id=None, route_map=None)
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', safi='unicast', redistribute=[rd_1, rd_2])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_ios_bgp_address_family_neighbors(self):
af_nbr_1 = dict(neighbor='192.51.100.1', maximum_prefix=35, activate=True)
af_nbr_2 = dict(neighbor='192.51.100.3', route_reflector_client=True, activate=True)
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', safi='multicast', neighbors=[af_nbr_1, af_nbr_2])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
cmd = ['router bgp 64496', 'address-family ipv4 multicast', 'neighbor 192.51.100.1 activate',
'neighbor 192.51.100.1 maximum-prefix 35', 'neighbor 192.51.100.3 activate',
'neighbor 192.51.100.3 route-reflector-client', 'exit-address-family', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_ios_bgp_address_family_neighbors_idempotent(self):
af_nbr_1 = dict(neighbor='203.0.113.1', remove_private_as=True, maximum_prefix=100)
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', safi='unicast', neighbors=[af_nbr_1])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_ios_bgp_address_family_networks(self):
net = dict(prefix='1.0.0.0', masklen=8, route_map='RMAP_1')
net2 = dict(prefix='192.168.1.0', masklen=24, route_map='RMAP_2')
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', safi='multicast', networks=[net, net2])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
cmd = ['router bgp 64496', 'address-family ipv4 multicast', 'network 1.0.0.0 mask 255.0.0.0 route-map RMAP_1',
'network 192.168.1.0 mask 255.255.255.0 route-map RMAP_2', 'exit-address-family', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_ios_bgp_address_family_networks_idempotent(self):
net = dict(prefix='203.0.113.0', masklen=27, route_map='RMAP_1')
net2 = dict(prefix='192.0.2.0', masklen=26, route_map='RMAP_2')
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', safi='multicast', networks=[net, net2])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_ios_bgp_operation_override(self):
net_1 = dict(prefix='1.0.0.0', masklen=8, route_map='RMAP_1')
net_2 = dict(prefix='192.168.1.0', masklen=24, route_map='RMAP_2')
nbr_1 = dict(neighbor='192.51.100.1', remote_as=64496, update_source='GigabitEthernet0/1')
nbr_2 = dict(neighbor='192.51.100.3', remote_as=64496, timers=dict(keepalive=300, holdtime=360,
min_neighbor_holdtime=360))
af_nbr_1 = dict(neighbor='192.51.100.1', maximum_prefix=35)
af_nbr_2 = dict(neighbor='192.51.100.3', route_reflector_client=True)
af_1 = dict(afi='ipv4', safi='unicast', neighbors=[af_nbr_1, af_nbr_2])
af_2 = dict(afi='ipv4', safi='multicast', networks=[net_1, net_2])
config = dict(bgp_as=64496, neighbors=[nbr_1, nbr_2], address_family=[af_1, af_2], networks=None)
obj = Provider(params=dict(config=config, operation='override'))
commands = obj.render(self._bgp_config)
cmd = ['no router bgp 64496', 'router bgp 64496', 'neighbor 192.51.100.1 remote-as 64496',
'neighbor 192.51.100.1 update-source GigabitEthernet0/1', 'neighbor 192.51.100.3 remote-as 64496',
'neighbor 192.51.100.3 timers 300 360 360', 'address-family ipv4',
'neighbor 192.51.100.1 maximum-prefix 35', 'neighbor 192.51.100.3 route-reflector-client',
'exit-address-family',
'address-family ipv4 multicast', 'network 1.0.0.0 mask 255.0.0.0 route-map RMAP_1',
'network 192.168.1.0 mask 255.255.255.0 route-map RMAP_2',
'exit-address-family', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_ios_bgp_operation_replace(self):
rd = dict(protocol='ospf', id=223, metric=110, route_map=None)
net = dict(prefix='203.0.113.0', masklen=27, route_map='RMAP_1')
net2 = dict(prefix='192.0.2.0', masklen=26, route_map='RMAP_2')
af_1 = dict(afi='ipv4', safi='unicast', redistribute=[rd])
af_2 = dict(afi='ipv4', safi='multicast', networks=[net, net2])
config = dict(bgp_as=64496, address_family=[af_1, af_2], networks=None)
obj = Provider(params=dict(config=config, operation='replace'))
commands = obj.render(self._bgp_config)
cmd = ['router bgp 64496', 'address-family ipv4', 'redistribute ospf 223 metric 110',
'no redistribute eigrp',
'no redistribute static', 'exit-address-family', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_ios_bgp_operation_replace_with_new_as(self):
rd = dict(protocol='ospf', id=223, metric=110, route_map=None)
af_1 = dict(afi='ipv4', safi='unicast', redistribute=[rd])
config = dict(bgp_as=64497, address_family=[af_1], networks=None)
obj = Provider(params=dict(config=config, operation='replace'))
commands = obj.render(self._bgp_config)
cmd = ['no router bgp 64496', 'router bgp 64497', 'address-family ipv4',
'redistribute ospf 223 metric 110',
'exit-address-family', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
|
pedrobaeza/website | refs/heads/8.0 | website_signup_legal_page_required/__init__.py | 14 | # -*- coding: utf-8 -*-
# © 2015 Antiun Ingeniería, S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import controllers, models
|
Senseg/Py4A | refs/heads/master | python-modules/twisted/twisted/trial/test/mockcustomsuite3.py | 87 | # Copyright (c) 2006 Twisted Matrix Laboratories. See LICENSE for details
"""
Mock test module that contains both a C{test_suite} and a C{testSuite} method.
L{runner.TestLoader} should load the tests from the C{testSuite}, not from the
C{Foo} C{TestCase} nor from the C{test_suite} method.
See {twisted.trial.test.test_loader.LoaderTest.test_loadModuleWithBothCustom}.
"""
from twisted.trial import unittest, runner
class Foo(unittest.TestCase):
def test_foo(self):
pass
def test_suite():
ts = runner.TestSuite()
ts.name = "test_suite"
return ts
def testSuite():
ts = runner.TestSuite()
ts.name = "testSuite"
return ts
|
chongtianfeiyu/kbengine | refs/heads/master | kbe/res/scripts/common/Lib/test/test_structseq.py | 84 | import os
import time
import unittest
from test import support
class StructSeqTest(unittest.TestCase):
def test_tuple(self):
t = time.gmtime()
self.assertIsInstance(t, tuple)
astuple = tuple(t)
self.assertEqual(len(t), len(astuple))
self.assertEqual(t, astuple)
# Check that slicing works the same way; at one point, slicing t[i:j] with
# 0 < i < j could produce NULLs in the result.
for i in range(-len(t), len(t)):
self.assertEqual(t[i:], astuple[i:])
for j in range(-len(t), len(t)):
self.assertEqual(t[i:j], astuple[i:j])
for j in range(-len(t), len(t)):
self.assertEqual(t[:j], astuple[:j])
self.assertRaises(IndexError, t.__getitem__, -len(t)-1)
self.assertRaises(IndexError, t.__getitem__, len(t))
for i in range(-len(t), len(t)-1):
self.assertEqual(t[i], astuple[i])
def test_repr(self):
t = time.gmtime()
self.assertTrue(repr(t))
t = time.gmtime(0)
self.assertEqual(repr(t),
"time.struct_time(tm_year=1970, tm_mon=1, tm_mday=1, tm_hour=0, "
"tm_min=0, tm_sec=0, tm_wday=3, tm_yday=1, tm_isdst=0)")
# os.stat() gives a complicated struct sequence.
st = os.stat(__file__)
rep = repr(st)
self.assertTrue(rep.startswith("os.stat_result"))
self.assertIn("st_mode=", rep)
self.assertIn("st_ino=", rep)
self.assertIn("st_dev=", rep)
def test_concat(self):
t1 = time.gmtime()
t2 = t1 + tuple(t1)
for i in range(len(t1)):
self.assertEqual(t2[i], t2[i+len(t1)])
def test_repeat(self):
t1 = time.gmtime()
t2 = 3 * t1
for i in range(len(t1)):
self.assertEqual(t2[i], t2[i+len(t1)])
self.assertEqual(t2[i], t2[i+2*len(t1)])
def test_contains(self):
t1 = time.gmtime()
for item in t1:
self.assertIn(item, t1)
self.assertNotIn(-42, t1)
def test_hash(self):
t1 = time.gmtime()
self.assertEqual(hash(t1), hash(tuple(t1)))
def test_cmp(self):
t1 = time.gmtime()
t2 = type(t1)(t1)
self.assertEqual(t1, t2)
self.assertTrue(not (t1 < t2))
self.assertTrue(t1 <= t2)
self.assertTrue(not (t1 > t2))
self.assertTrue(t1 >= t2)
self.assertTrue(not (t1 != t2))
def test_fields(self):
t = time.gmtime()
self.assertEqual(len(t), t.n_sequence_fields)
self.assertEqual(t.n_unnamed_fields, 0)
self.assertEqual(t.n_fields, time._STRUCT_TM_ITEMS)
def test_constructor(self):
t = time.struct_time
self.assertRaises(TypeError, t)
self.assertRaises(TypeError, t, None)
self.assertRaises(TypeError, t, "123")
self.assertRaises(TypeError, t, "123", dict={})
self.assertRaises(TypeError, t, "123456789", dict=None)
s = "123456789"
self.assertEqual("".join(t(s)), s)
def test_eviltuple(self):
class Exc(Exception):
pass
# Devious code could crash structseqs' contructors
class C:
def __getitem__(self, i):
raise Exc
def __len__(self):
return 9
self.assertRaises(Exc, time.struct_time, C())
def test_reduce(self):
t = time.gmtime()
x = t.__reduce__()
def test_extended_getslice(self):
# Test extended slicing by comparing with list slicing.
t = time.gmtime()
L = list(t)
indices = (0, None, 1, 3, 19, 300, -1, -2, -31, -300)
for start in indices:
for stop in indices:
# Skip step 0 (invalid)
for step in indices[1:]:
self.assertEqual(list(t[start:stop:step]),
L[start:stop:step])
def test_main():
support.run_unittest(StructSeqTest)
if __name__ == "__main__":
test_main()
|
mlodz/theghostacademy | refs/heads/master | scripts/run.py | 1 | import sys
import os.path
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
)
from models import parser
from models import facts
from models import terran
def main():
# Setup Facts
units = parser.parse_dependency_file('data/HotS/terran_units.csv')
buildings = parser.parse_dependency_file('data/HotS/terran_buildings.csv')
research = parser.parse_dependency_file('data/HotS/terran_research.csv')
abilities = parser.parse_building_ability_file('data/HotS/terran_abilities.csv')
item_facts = facts.Facts(units, buildings, research, abilities)
#build_order = parser.parse_build_order_file('input_examples/dev_test.txt', item_facts)
#build_order = parser.parse_build_order_file('input_examples/HotS_banshee_opener.txt', item_facts)
build_order = parser.parse_build_order_file('input_examples/standard_opener.txt', item_facts)
game = terran.HotsGame(build_order, item_facts).run()
main()
|
pierg75/pier-sosreport | refs/heads/master | sos/plugins/nfsganesha.py | 2 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class NfsGanesha(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin):
"""NFS-Ganesha file server information
"""
plugin_name = 'nfsganesha'
profiles = ('storage', 'network', 'nfs')
packages = ('nfs-ganesha')
def setup(self):
self.add_copy_spec([
"/etc/ganesha",
"/etc/sysconfig/ganesha",
"/var/run/sysconfig/ganesha",
"/var/log/ganesha.log",
"/var/log/ganesha-gfapi.log"
])
self.add_cmd_output([
"dbus-send --type=method_call --print-reply"
" --system --dest=org.ganesha.nfsd "
"/org/ganesha/nfsd/ExportMgr "
"org.ganesha.nfsd.exportmgr.ShowExports"
])
return
# vim: set et ts=4 sw=4 :
|
foobarbazblarg/stayclean | refs/heads/master | stayclean-2020-december/venv/lib/python3.8/site-packages/pip/_vendor/requests/__version__.py | 9 | # .-. .-. .-. . . .-. .-. .-. .-.
# |( |- |.| | | |- `-. | `-.
# ' ' `-' `-`.`-' `-' `-' ' `-'
__title__ = 'requests'
__description__ = 'Python HTTP for Humans.'
__url__ = 'https://requests.readthedocs.io'
__version__ = '2.25.0'
__build__ = 0x022500
__author__ = 'Kenneth Reitz'
__author_email__ = 'me@kennethreitz.org'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2020 Kenneth Reitz'
__cake__ = u'\u2728 \U0001f370 \u2728'
|
rmboggs/django | refs/heads/master | tests/admin_views/forms.py | 339 | from django import forms
from django.contrib.admin.forms import AdminAuthenticationForm
class CustomAdminAuthenticationForm(AdminAuthenticationForm):
class Media:
css = {'all': ('path/to/media.css',)}
def clean_username(self):
username = self.cleaned_data.get('username')
if username == 'customform':
raise forms.ValidationError('custom form error')
return username
|
Perferom/android_external_chromium_org | refs/heads/android-4.4 | tools/site_compare/scrapers/firefox/__init__.py | 179 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Selects the appropriate scraper for Firefox."""
def GetScraper(version):
"""Returns the scraper module for the given version.
Args:
version: version string of IE, or None for most recent
Returns:
scrape module for given version
"""
# Pychecker will warn that the parameter is unused; we only
# support one version of Firefox at this time
# We only have one version of the Firefox scraper for now
return __import__("firefox2", globals(), locals(), [''])
# if invoked rather than imported, test
if __name__ == "__main__":
print GetScraper("2.0.0.6").version
|
bkirui/odoo | refs/heads/8.0 | addons/stock_picking_wave/__openerp__.py | 312 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Warehouse Management: Waves',
'version': '1.0',
'category': 'Stock Management',
'description': """
This module adds the picking wave option in warehouse management.
=================================================================
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/warehouse',
'depends': ['stock'],
'data': ['security/ir.model.access.csv',
'stock_picking_wave_view.xml',
'stock_picking_wave_sequence.xml',
'wizard/picking_to_wave_view.xml',
],
'demo': [
'stock_picking_wave_demo.xml',
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
rocopartners/django-oscar | refs/heads/master | src/oscar/apps/shipping/migrations/0002_auto_20150604_1450.py | 61 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('shipping', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='orderanditemcharges',
name='countries',
field=models.ManyToManyField(to='address.Country', verbose_name='Countries', blank=True),
),
migrations.AlterField(
model_name='weightbased',
name='countries',
field=models.ManyToManyField(to='address.Country', verbose_name='Countries', blank=True),
),
]
|
mbelmadani/motifgp | refs/heads/master | motifgp/utils.py | 1 | # Generator to create multiple patterns from a regex
import altschulEriksonDinuclShuffle #dinuclShuffle
import re
import sys
class Utils():
def __init__(self):
self.complement = {'A': 'T',
'C': 'G',
'G': 'C',
'T': 'A',
'[': ']',
']': '['
}
def readfasta(self, filepath, HARDMASK=False):
with open(filepath, 'r') as f:
buff = ""
for line in f:
if HARDMASK:
text = line.strip()
else:
# FIXME: Legacy behavior
text = line.upper().strip()
#print "text",text, len(text)
if len(text) == 0:
continue
if text[0] == '>':
if len(buff) > 0:
yield buff
buff = ""
yield text
elif text[0].upper() in ["A","C","G","T","N"]:
#only add to buffer lines that are sequences
buff += text
if len(buff) > 0:
yield buff
def produce_sequences(self, regex):
regex = regex.replace("*", "[ACTG]") # Easier that way...
regex = regex.strip()
regex_as_list = []
current_charclass = ""
charclass_flag = 0
for c in regex:
if c == '[':
charclass_flag = 1
elif c == ']':
charclass_flag = 0
regex_as_list.append(current_charclass)
current_charclass = ""
elif len(str(c)) == 1:
if charclass_flag:
current_charclass += c
else:
regex_as_list.append(c)
else:
print "Rejecting token",c
list_of_sequences = [""]
for token in regex_as_list:
if len(token) > 1:
old_list = list_of_sequences[:] # Copy old list by value
old_list_count = len(old_list)
for i in range(len(token)-1):
list_of_sequences.extend(old_list)
for i in range(old_list_count): #For each sublist
for j in range(len(token)): #For each item in the charclass..
index = j*old_list_count + i
list_of_sequences[index] += token[j]
else:
for i in range(len(list_of_sequences)):
list_of_sequences[i] += token
return list_of_sequences
# More or less deprecated...
def JDonnerRegex(pattern):
"""
Used for compatibility with J. Donner's Suffix Tree
"""
return utils.produce_sequences(pattern)
def list_to_regex_obj(self, a_list):
"""
Creates a regex objects out of a list of regex tokens
"""
expr = self.list_to_regex_string(a_list)
try:
return re.compile(expr)
except Exception as e:
print "Bad regular expression:", expr
raise e
def list_to_regex_string(self, a_list):
"""
Creates a regex string out of a list of regex tokens
"""
expr = "".join(["["+x+"]" if len(x) > 1 else x for x in a_list])
return expr
def load_datasets(self,
path_pos,
path_neg,
background_alg,
SHOULD_SHORT=0,
HEADERS=False,
HARDMASK=False
):
positive = []
negative = []
header_pos = []
header_neg = []
print "HARDMASK:", HARDMASK
#Load positive for path_pos FASTA
for line in self.readfasta(path_pos, HARDMASK):
if HEADERS and line[0] == '>':
header_pos.append(line)
else:
positive.append(line)
if SHOULD_SHORT > 0 and int(SHOULD_SHORT) == len(positive):
break
#Load background as shuffled copy of pisitive
if path_neg != None:
#Load background from path_neg FASTA
for line in self.readfasta(path_neg): # Should bg determi hardmasking with the same variable as positive?
if HEADERS and line[0] == '>':
header_neg.append(line)
else:
negative.append(line)
if SHOULD_SHORT > 0 and int(SHOULD_SHORT) == len(negative):
break
elif background_alg == "dinuclShuffle":
for line in positive:
sh_line = altschulEriksonDinuclShuffle.dinuclShuffle(line.upper())
negative.append(sh_line)
else:
#No background
negative.append("NNNNNNNNNNNNNNNNNNNNNNNNNNN")
print "Lengths:", len(positive), len(negative)
if HEADERS:
return positive, negative, header_pos, header_neg
else:
return positive, negative
def motif_str_to_list(self, string):
"""
Convert str to a list of of chars or charclasses
"""
motif = []
flag = 0 # Charclass parsing
buff = ""
string = string.strip()
for c in string:
if flag == 1:
if c == ']':
flag = 0
motif.append(buff)
buff = ""
else:
buff += c
else:
if c == '[':
flag = 1
else:
motif.append(c)
motif = filter(bool, motif)
return motif
"""
string = string.replace('[', ']').split(']')
string = [ x.strip() for x in string]
a_list = filter(bool, string)
return a_list
"""
def reverse_complement(self, pattern):
revcomp = ""
revcomp = "".join([self.complement.get(c) for c in reversed(pattern)])
return revcomp
def add_reverse_complement(self, pattern):
# Append reverse complment to the pattern
# Sorting to return consistant key on cache, even if complement is given next.
merge = "|".join(sorted([pattern, self.reverse_complement(pattern)]))
return merge
def motif_eraser(self, dataset, patterns ):
"""
EXPERIMENTAL: Takes a list of sequences a removes occurences of patterns
Consideration: The order in which patterns are deleted matters.
"""
for _p in patterns:
p = re.compile(_p)
erased = []
for idx in range(len(dataset)):
m = p.search(dataset[idx])
if m:
replacement = 'N' * len(m.group(0))
if p.sub(replacement, dataset[idx]):
erased.append(p.sub(replacement, dataset[idx]))
else:
erased.append( dataset[idx] )
dataset = erased
return dataset
def get_motifs_from_nef(self, nef_path, tag="NE"):
"""
Finds motifs in a .tsv format (such as .nef or .neft) stored under the field tag.
Returns a list of motifs
"""
NETWORK_EXPRESSION_TAG = tag
motifs = []
with open(nef_path) as f:
idx = None
for line in f:
fields = line.split("\t")
if idx:
network_expression = fields[idx]
motifs.append(network_expression)
elif NETWORK_EXPRESSION_TAG in fields:
idx = fields.index(NETWORK_EXPRESSION_TAG)
else:
print "Format error!"
print "line:", line
sys.exit(-1)
return motifs
|
ntymtsiv/tempest | refs/heads/master | tempest/api/compute/images/test_images.py | 1 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest import clients
from tempest.common.utils import data_utils
from tempest import config
from tempest import exceptions
from tempest.test import attr
CONF = config.CONF
class ImagesTestJSON(base.BaseV2ComputeTest):
_interface = 'json'
@classmethod
def setUpClass(cls):
super(ImagesTestJSON, cls).setUpClass()
if not CONF.service_available.glance:
skip_msg = ("%s skipped as glance is not available" % cls.__name__)
raise cls.skipException(skip_msg)
cls.client = cls.images_client
cls.servers_client = cls.servers_client
cls.image_ids = []
if cls.multi_user:
if CONF.compute.allow_tenant_isolation:
creds = cls.isolated_creds.get_alt_creds()
username, tenant_name, password = creds
cls.alt_manager = clients.Manager(username=username,
password=password,
tenant_name=tenant_name)
else:
# Use the alt_XXX credentials in the config file
cls.alt_manager = clients.AltManager()
cls.alt_client = cls.alt_manager.images_client
def tearDown(self):
"""Terminate test instances created after a test is executed."""
for image_id in self.image_ids:
self.client.delete_image(image_id)
self.image_ids.remove(image_id)
super(ImagesTestJSON, self).tearDown()
def __create_image__(self, server_id, name, meta=None):
resp, body = self.client.create_image(server_id, name, meta)
image_id = data_utils.parse_image_id(resp['location'])
self.client.wait_for_image_status(image_id, 'ACTIVE')
self.image_ids.append(image_id)
return resp, body
@attr(type=['negative', 'gate'])
def test_create_image_from_deleted_server(self):
# An image should not be created if the server instance is removed
resp, server = self.create_test_server(wait_until='ACTIVE')
# Delete server before trying to create server
self.servers_client.delete_server(server['id'])
self.servers_client.wait_for_server_termination(server['id'])
# Create a new image after server is deleted
name = data_utils.rand_name('image')
meta = {'image_type': 'test'}
self.assertRaises(exceptions.NotFound,
self.__create_image__,
server['id'], name, meta)
@attr(type=['negative', 'gate'])
def test_create_image_from_invalid_server(self):
# An image should not be created with invalid server id
# Create a new image with invalid server id
name = data_utils.rand_name('image')
meta = {'image_type': 'test'}
resp = {}
resp['status'] = None
self.assertRaises(exceptions.NotFound, self.__create_image__,
'!@#$%^&*()', name, meta)
@attr(type=['negative', 'gate'])
def test_create_image_from_stopped_server(self):
resp, server = self.create_test_server(wait_until='ACTIVE')
self.servers_client.stop(server['id'])
self.servers_client.wait_for_server_status(server['id'],
'SHUTOFF')
self.addCleanup(self.servers_client.delete_server, server['id'])
snapshot_name = data_utils.rand_name('test-snap-')
resp, image = self.create_image_from_server(server['id'],
name=snapshot_name,
wait_until='ACTIVE',
wait_for_server=False)
self.addCleanup(self.client.delete_image, image['id'])
self.assertEqual(snapshot_name, image['name'])
@attr(type='gate')
def test_delete_saving_image(self):
snapshot_name = data_utils.rand_name('test-snap-')
resp, server = self.create_test_server(wait_until='ACTIVE')
self.addCleanup(self.servers_client.delete_server, server['id'])
resp, image = self.create_image_from_server(server['id'],
name=snapshot_name,
wait_until='SAVING')
resp, body = self.client.delete_image(image['id'])
self.assertEqual('204', resp['status'])
@attr(type=['negative', 'gate'])
def test_create_image_specify_uuid_35_characters_or_less(self):
# Return an error if Image ID passed is 35 characters or less
snapshot_name = data_utils.rand_name('test-snap-')
test_uuid = ('a' * 35)
self.assertRaises(exceptions.NotFound, self.client.create_image,
test_uuid, snapshot_name)
@attr(type=['negative', 'gate'])
def test_create_image_specify_uuid_37_characters_or_more(self):
# Return an error if Image ID passed is 37 characters or more
snapshot_name = data_utils.rand_name('test-snap-')
test_uuid = ('a' * 37)
self.assertRaises(exceptions.NotFound, self.client.create_image,
test_uuid, snapshot_name)
@attr(type=['negative', 'gate'])
def test_delete_image_with_invalid_image_id(self):
# An image should not be deleted with invalid image id
self.assertRaises(exceptions.NotFound, self.client.delete_image,
'!@$%^&*()')
@attr(type=['negative', 'gate'])
def test_delete_non_existent_image(self):
# Return an error while trying to delete a non-existent image
non_existent_image_id = '11a22b9-12a9-5555-cc11-00ab112223fa'
self.assertRaises(exceptions.NotFound, self.client.delete_image,
non_existent_image_id)
@attr(type=['negative', 'gate'])
def test_delete_image_blank_id(self):
# Return an error while trying to delete an image with blank Id
self.assertRaises(exceptions.NotFound, self.client.delete_image, '')
@attr(type=['negative', 'gate'])
def test_delete_image_non_hex_string_id(self):
# Return an error while trying to delete an image with non hex id
image_id = '11a22b9-120q-5555-cc11-00ab112223gj'
self.assertRaises(exceptions.NotFound, self.client.delete_image,
image_id)
@attr(type=['negative', 'gate'])
def test_delete_image_negative_image_id(self):
# Return an error while trying to delete an image with negative id
self.assertRaises(exceptions.NotFound, self.client.delete_image, -1)
@attr(type=['negative', 'gate'])
def test_delete_image_id_is_over_35_character_limit(self):
# Return an error while trying to delete image with id over limit
self.assertRaises(exceptions.NotFound, self.client.delete_image,
'11a22b9-12a9-5555-cc11-00ab112223fa-3fac')
class ImagesTestXML(ImagesTestJSON):
_interface = 'xml'
|
VaneCloud/horizon | refs/heads/stable/kilo | openstack_dashboard/test/integration_tests/pages/project/data_processing/imageregistrypage.py | 37 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from selenium.webdriver.common import by
from openstack_dashboard.test.integration_tests.pages import basepage
from openstack_dashboard.test.integration_tests.regions import forms
from openstack_dashboard.test.integration_tests.regions import tables
class ImageregistryPage(basepage.BaseNavigationPage):
_image_table_locator = (by.By.CSS_SELECTOR, 'table#image_registry')
_unregister_form_locator = (by.By.CSS_SELECTOR, 'div.modal-dialog')
_register_form_locator = (by.By.CSS_SELECTOR, 'div.modal-dialog')
IMAGE_TABLE_ACTIONS = ("register_image", "unregister_images")
IMAGE_TABLE_ROW_ACTIONS = {
tables.ComplexActionRowRegion.PRIMARY_ACTION: "edit_tags",
tables.ComplexActionRowRegion.SECONDARY_ACTIONS: ("unregister_image",)
}
TABLE_IMAGE_COLUMN = 0
REGISTER_FORM_IMAGE = "image"
REGISTER_FORM_USER_NAME = "user_name"
REGISTER_FORM_DESCRIPTION = "description"
REGISTER_FORM_FIELDS = (REGISTER_FORM_IMAGE, REGISTER_FORM_USER_NAME,
REGISTER_FORM_DESCRIPTION)
def __init__(self, driver, conf):
super(ImageregistryPage, self).__init__(driver, conf)
self._page_title = "Data Processing"
def _get_row_with_image_name(self, name):
return self.image_table.get_row(self.TABLE_IMAGE_COLUMN, name)
@property
def image_table(self):
src_elem = self._get_element(*self._image_table_locator)
return tables.ComplexActionTableRegion(self.driver, self.conf,
src_elem,
self.IMAGE_TABLE_ACTIONS,
self.IMAGE_TABLE_ROW_ACTIONS)
@property
def unregister_form(self):
src_elem = self._get_element(*self._unregister_form_locator)
return forms.BaseFormRegion(self.driver, self.conf, src_elem)
@property
def register_form(self):
src_elem = self._get_element(*self._register_form_locator)
return forms.FormRegion(self.driver, self.conf, src_elem,
self.REGISTER_FORM_FIELDS)
def is_image_registered(self, name):
return bool(self._get_row_with_image_name(name))
def unregister_image(self, name):
self._get_row_with_image_name(name).mark()
self.image_table.unregister_images.click()
self.unregister_form.submit.click()
def register_image(self, image, user_name, description):
self.image_table.register_image.click()
self.register_form.image.text = image
self.register_form.user_name.text = user_name
self.register_form.description.text = description
self.register_form.submit.click()
def wait_until_image_registered(self, name):
self._wait_until(lambda x: self.is_image_registered(name))
|
mr-justdoit/tweetanalyzer | refs/heads/master | tweetanalyzer.py | 1 | #!/usr/bin/python
# coding: utf-8
from __future__ import absolute_import, print_function
import tweepy
import json
import sys
import getopt
import pyaml
import textmining
with open('data.json') as data_file:
data = json.load(data_file)
def twitter_auth(consumer_key, consumer_secret, access_token,
access_token_secret):
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.secure = True
auth.set_access_token(access_token, access_token_secret)
return auth
def load_api(consumer_key, consumer_secret, access_token, access_token_secret):
return tweepy.API(
twitter_auth(consumer_key, consumer_secret, access_token,
access_token_secret),
parser=tweepy.parsers.JSONParser())
def text_on_tweet(api, query, count, page, lang):
results = api.search(q=query, count=count, lang=lang)
textdata = ""
max_id = 0
for j in range(0, page):
for i in range(0, len(results["statuses"])):
d = results["statuses"][i]
textdata += d["text"]
textdata += ' '
max_id = d["id"]
results = api.search(q=query,
count=count,
lang=lang,
max_id=max_id - 1)
return textdata
def output_media(api, query, count, page):
results = api.search(q=query, count=count)
media = ""
pid = []
for k in range(0, page):
for i in range(0, len(results["statuses"])):
d = results["statuses"][i]
if "media" in d["entities"]:
for j in range(0, len(d["entities"]["media"])):
media += d["entities"]["media"][j]["media_url_https"]
media += "\n"
pid.append(d["id"])
results = api.search(q=query, count=count, max_id=pid.pop() - 1)
return print(media)
def output_url(api, query, count, page):
results = api.search(q=query, count=count)
url = ""
pid = []
for k in range(0, page):
for i in range(0, len(results["statuses"])):
d = results["statuses"][i]
if "urls" in d["entities"]:
for j in range(0, len(d["entities"]["urls"])):
url += d["entities"]["urls"][j]["expanded_url"]
url += "\n"
pid.append(d["id"])
results = api.search(q=query, count=count, max_id=pid.pop() - 1)
return print(url)
def output_raw(api, query, count):
return pyaml.dump(
api.search(q=query,
count=count),
sys.stdout,
vspacing=[0, 1])
def output_simplify(api, query, count, page):
results = api.search(q=query, count=count)
tweets = []
d = ""
for j in range(0, page):
if len(results["statuses"]) == 0:
return
for i in range(0, len(results["statuses"])):
d = results["statuses"][i]
tweets.append({"id": d["id"],
"user": d["user"]["screen_name"],
"text": d["text"],
"geo": d["geo"],
"time": d["created_at"],
"lang": d["lang"]})
results = api.search(q=query, count=count, max_id=d["id"] - 1)
return pyaml.dump(tweets, sys.stdout, vspacing=[0, 1])
def output_data(api, query, count, page, metatype):
d = ""
if metatype == "t":
textdata = text_on_tweet(api, query, count, page, lang="en")
d = textmining.output_textdata(textdata)
elif metatype == "r":
d = output_raw(api, query, count)
elif metatype == "m":
d = output_media(api, query, count, page)
elif metatype == "j":
textdata = text_on_tweet(api, query, count, page, lang="ja")
d = textmining.output_ja_text(textdata, ["名詞"])
elif metatype == "s":
d = output_simplify(api, query, count, page)
elif metatype == "u":
d = output_url(api, query, count, page)
return d
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], 'q:t:c:p:h')
except getopt.GetoptError as err:
print(str(err))
usage()
sys.exit(2)
query = ""
metatype = 't'
count = 1
page = 1
for o, a in opts:
if o == "-q":
query = a
elif o == "-t":
metatype = a
elif o == "-c":
count = int(a)
elif o == "-p":
page = int(a)
api = load_api(data["consumer"]["key"], data["consumer"]["secret"],
data["token"]["key"], data["token"]["secret"])
output_data(api, query, count, page, metatype)
if __name__ == "__main__":
main()
|
kjung/scikit-learn | refs/heads/master | sklearn/mixture/tests/test_gmm.py | 48 | import unittest
import copy
import sys
from nose.tools import assert_true
import numpy as np
from numpy.testing import (assert_array_equal, assert_array_almost_equal,
assert_raises)
from scipy import stats
from sklearn import mixture
from sklearn.datasets.samples_generator import make_spd_matrix
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_raise_message
from sklearn.metrics.cluster import adjusted_rand_score
from sklearn.externals.six.moves import cStringIO as StringIO
rng = np.random.RandomState(0)
def test_sample_gaussian():
# Test sample generation from mixture.sample_gaussian where covariance
# is diagonal, spherical and full
n_features, n_samples = 2, 300
axis = 1
mu = rng.randint(10) * rng.rand(n_features)
cv = (rng.rand(n_features) + 1.0) ** 2
samples = mixture.sample_gaussian(
mu, cv, covariance_type='diag', n_samples=n_samples)
assert_true(np.allclose(samples.mean(axis), mu, atol=1.3))
assert_true(np.allclose(samples.var(axis), cv, atol=1.5))
# the same for spherical covariances
cv = (rng.rand() + 1.0) ** 2
samples = mixture.sample_gaussian(
mu, cv, covariance_type='spherical', n_samples=n_samples)
assert_true(np.allclose(samples.mean(axis), mu, atol=1.5))
assert_true(np.allclose(
samples.var(axis), np.repeat(cv, n_features), atol=1.5))
# and for full covariances
A = rng.randn(n_features, n_features)
cv = np.dot(A.T, A) + np.eye(n_features)
samples = mixture.sample_gaussian(
mu, cv, covariance_type='full', n_samples=n_samples)
assert_true(np.allclose(samples.mean(axis), mu, atol=1.3))
assert_true(np.allclose(np.cov(samples), cv, atol=2.5))
# Numerical stability check: in SciPy 0.12.0 at least, eigh may return
# tiny negative values in its second return value.
from sklearn.mixture import sample_gaussian
x = sample_gaussian([0, 0], [[4, 3], [1, .1]],
covariance_type='full', random_state=42)
assert_true(np.isfinite(x).all())
def _naive_lmvnpdf_diag(X, mu, cv):
# slow and naive implementation of lmvnpdf
ref = np.empty((len(X), len(mu)))
stds = np.sqrt(cv)
for i, (m, std) in enumerate(zip(mu, stds)):
ref[:, i] = np.log(stats.norm.pdf(X, m, std)).sum(axis=1)
return ref
def test_lmvnpdf_diag():
# test a slow and naive implementation of lmvnpdf and
# compare it to the vectorized version (mixture.lmvnpdf) to test
# for correctness
n_features, n_components, n_samples = 2, 3, 10
mu = rng.randint(10) * rng.rand(n_components, n_features)
cv = (rng.rand(n_components, n_features) + 1.0) ** 2
X = rng.randint(10) * rng.rand(n_samples, n_features)
ref = _naive_lmvnpdf_diag(X, mu, cv)
lpr = mixture.log_multivariate_normal_density(X, mu, cv, 'diag')
assert_array_almost_equal(lpr, ref)
def test_lmvnpdf_spherical():
n_features, n_components, n_samples = 2, 3, 10
mu = rng.randint(10) * rng.rand(n_components, n_features)
spherecv = rng.rand(n_components, 1) ** 2 + 1
X = rng.randint(10) * rng.rand(n_samples, n_features)
cv = np.tile(spherecv, (n_features, 1))
reference = _naive_lmvnpdf_diag(X, mu, cv)
lpr = mixture.log_multivariate_normal_density(X, mu, spherecv,
'spherical')
assert_array_almost_equal(lpr, reference)
def test_lmvnpdf_full():
n_features, n_components, n_samples = 2, 3, 10
mu = rng.randint(10) * rng.rand(n_components, n_features)
cv = (rng.rand(n_components, n_features) + 1.0) ** 2
X = rng.randint(10) * rng.rand(n_samples, n_features)
fullcv = np.array([np.diag(x) for x in cv])
reference = _naive_lmvnpdf_diag(X, mu, cv)
lpr = mixture.log_multivariate_normal_density(X, mu, fullcv, 'full')
assert_array_almost_equal(lpr, reference)
def test_lvmpdf_full_cv_non_positive_definite():
n_features, n_samples = 2, 10
rng = np.random.RandomState(0)
X = rng.randint(10) * rng.rand(n_samples, n_features)
mu = np.mean(X, 0)
cv = np.array([[[-1, 0], [0, 1]]])
expected_message = "'covars' must be symmetric, positive-definite"
assert_raise_message(ValueError, expected_message,
mixture.log_multivariate_normal_density,
X, mu, cv, 'full')
def test_GMM_attributes():
n_components, n_features = 10, 4
covariance_type = 'diag'
g = mixture.GMM(n_components, covariance_type, random_state=rng)
weights = rng.rand(n_components)
weights = weights / weights.sum()
means = rng.randint(-20, 20, (n_components, n_features))
assert_true(g.n_components == n_components)
assert_true(g.covariance_type == covariance_type)
g.weights_ = weights
assert_array_almost_equal(g.weights_, weights)
g.means_ = means
assert_array_almost_equal(g.means_, means)
covars = (0.1 + 2 * rng.rand(n_components, n_features)) ** 2
g.covars_ = covars
assert_array_almost_equal(g.covars_, covars)
assert_raises(ValueError, g._set_covars, [])
assert_raises(ValueError, g._set_covars,
np.zeros((n_components - 2, n_features)))
assert_raises(ValueError, mixture.GMM, n_components=20,
covariance_type='badcovariance_type')
class GMMTester():
do_test_eval = True
def _setUp(self):
self.n_components = 10
self.n_features = 4
self.weights = rng.rand(self.n_components)
self.weights = self.weights / self.weights.sum()
self.means = rng.randint(-20, 20, (self.n_components, self.n_features))
self.threshold = -0.5
self.I = np.eye(self.n_features)
self.covars = {
'spherical': (0.1 + 2 * rng.rand(self.n_components,
self.n_features)) ** 2,
'tied': (make_spd_matrix(self.n_features, random_state=0)
+ 5 * self.I),
'diag': (0.1 + 2 * rng.rand(self.n_components,
self.n_features)) ** 2,
'full': np.array([make_spd_matrix(self.n_features, random_state=0)
+ 5 * self.I for x in range(self.n_components)])}
def test_eval(self):
if not self.do_test_eval:
return # DPGMM does not support setting the means and
# covariances before fitting There is no way of fixing this
# due to the variational parameters being more expressive than
# covariance matrices
g = self.model(n_components=self.n_components,
covariance_type=self.covariance_type, random_state=rng)
# Make sure the means are far apart so responsibilities.argmax()
# picks the actual component used to generate the observations.
g.means_ = 20 * self.means
g.covars_ = self.covars[self.covariance_type]
g.weights_ = self.weights
gaussidx = np.repeat(np.arange(self.n_components), 5)
n_samples = len(gaussidx)
X = rng.randn(n_samples, self.n_features) + g.means_[gaussidx]
ll, responsibilities = g.score_samples(X)
self.assertEqual(len(ll), n_samples)
self.assertEqual(responsibilities.shape,
(n_samples, self.n_components))
assert_array_almost_equal(responsibilities.sum(axis=1),
np.ones(n_samples))
assert_array_equal(responsibilities.argmax(axis=1), gaussidx)
def test_sample(self, n=100):
g = self.model(n_components=self.n_components,
covariance_type=self.covariance_type, random_state=rng)
# Make sure the means are far apart so responsibilities.argmax()
# picks the actual component used to generate the observations.
g.means_ = 20 * self.means
g.covars_ = np.maximum(self.covars[self.covariance_type], 0.1)
g.weights_ = self.weights
samples = g.sample(n)
self.assertEqual(samples.shape, (n, self.n_features))
def test_train(self, params='wmc'):
g = mixture.GMM(n_components=self.n_components,
covariance_type=self.covariance_type)
g.weights_ = self.weights
g.means_ = self.means
g.covars_ = 20 * self.covars[self.covariance_type]
# Create a training set by sampling from the predefined distribution.
X = g.sample(n_samples=100)
g = self.model(n_components=self.n_components,
covariance_type=self.covariance_type,
random_state=rng, min_covar=1e-1,
n_iter=1, init_params=params)
g.fit(X)
# Do one training iteration at a time so we can keep track of
# the log likelihood to make sure that it increases after each
# iteration.
trainll = []
for _ in range(5):
g.params = params
g.init_params = ''
g.fit(X)
trainll.append(self.score(g, X))
g.n_iter = 10
g.init_params = ''
g.params = params
g.fit(X) # finish fitting
# Note that the log likelihood will sometimes decrease by a
# very small amount after it has more or less converged due to
# the addition of min_covar to the covariance (to prevent
# underflow). This is why the threshold is set to -0.5
# instead of 0.
delta_min = np.diff(trainll).min()
self.assertTrue(
delta_min > self.threshold,
"The min nll increase is %f which is lower than the admissible"
" threshold of %f, for model %s. The likelihoods are %s."
% (delta_min, self.threshold, self.covariance_type, trainll))
def test_train_degenerate(self, params='wmc'):
# Train on degenerate data with 0 in some dimensions
# Create a training set by sampling from the predefined distribution.
X = rng.randn(100, self.n_features)
X.T[1:] = 0
g = self.model(n_components=2, covariance_type=self.covariance_type,
random_state=rng, min_covar=1e-3, n_iter=5,
init_params=params)
g.fit(X)
trainll = g.score(X)
self.assertTrue(np.sum(np.abs(trainll / 100 / X.shape[1])) < 5)
def test_train_1d(self, params='wmc'):
# Train on 1-D data
# Create a training set by sampling from the predefined distribution.
X = rng.randn(100, 1)
# X.T[1:] = 0
g = self.model(n_components=2, covariance_type=self.covariance_type,
random_state=rng, min_covar=1e-7, n_iter=5,
init_params=params)
g.fit(X)
trainll = g.score(X)
if isinstance(g, mixture.DPGMM):
self.assertTrue(np.sum(np.abs(trainll / 100)) < 5)
else:
self.assertTrue(np.sum(np.abs(trainll / 100)) < 2)
def score(self, g, X):
return g.score(X).sum()
class TestGMMWithSphericalCovars(unittest.TestCase, GMMTester):
covariance_type = 'spherical'
model = mixture.GMM
setUp = GMMTester._setUp
class TestGMMWithDiagonalCovars(unittest.TestCase, GMMTester):
covariance_type = 'diag'
model = mixture.GMM
setUp = GMMTester._setUp
class TestGMMWithTiedCovars(unittest.TestCase, GMMTester):
covariance_type = 'tied'
model = mixture.GMM
setUp = GMMTester._setUp
class TestGMMWithFullCovars(unittest.TestCase, GMMTester):
covariance_type = 'full'
model = mixture.GMM
setUp = GMMTester._setUp
def test_multiple_init():
# Test that multiple inits does not much worse than a single one
X = rng.randn(30, 5)
X[:10] += 2
g = mixture.GMM(n_components=2, covariance_type='spherical',
random_state=rng, min_covar=1e-7, n_iter=5)
train1 = g.fit(X).score(X).sum()
g.n_init = 5
train2 = g.fit(X).score(X).sum()
assert_true(train2 >= train1 - 1.e-2)
def test_n_parameters():
# Test that the right number of parameters is estimated
n_samples, n_dim, n_components = 7, 5, 2
X = rng.randn(n_samples, n_dim)
n_params = {'spherical': 13, 'diag': 21, 'tied': 26, 'full': 41}
for cv_type in ['full', 'tied', 'diag', 'spherical']:
g = mixture.GMM(n_components=n_components, covariance_type=cv_type,
random_state=rng, min_covar=1e-7, n_iter=1)
g.fit(X)
assert_true(g._n_parameters() == n_params[cv_type])
def test_1d_1component():
# Test all of the covariance_types return the same BIC score for
# 1-dimensional, 1 component fits.
n_samples, n_dim, n_components = 100, 1, 1
X = rng.randn(n_samples, n_dim)
g_full = mixture.GMM(n_components=n_components, covariance_type='full',
random_state=rng, min_covar=1e-7, n_iter=1)
g_full.fit(X)
g_full_bic = g_full.bic(X)
for cv_type in ['tied', 'diag', 'spherical']:
g = mixture.GMM(n_components=n_components, covariance_type=cv_type,
random_state=rng, min_covar=1e-7, n_iter=1)
g.fit(X)
assert_array_almost_equal(g.bic(X), g_full_bic)
def assert_fit_predict_correct(model, X):
model2 = copy.deepcopy(model)
predictions_1 = model.fit(X).predict(X)
predictions_2 = model2.fit_predict(X)
assert adjusted_rand_score(predictions_1, predictions_2) == 1.0
def test_fit_predict():
"""
test that gmm.fit_predict is equivalent to gmm.fit + gmm.predict
"""
lrng = np.random.RandomState(101)
n_samples, n_dim, n_comps = 100, 2, 2
mu = np.array([[8, 8]])
component_0 = lrng.randn(n_samples, n_dim)
component_1 = lrng.randn(n_samples, n_dim) + mu
X = np.vstack((component_0, component_1))
for m_constructor in (mixture.GMM, mixture.VBGMM, mixture.DPGMM):
model = m_constructor(n_components=n_comps, covariance_type='full',
min_covar=1e-7, n_iter=5,
random_state=np.random.RandomState(0))
assert_fit_predict_correct(model, X)
model = mixture.GMM(n_components=n_comps, n_iter=0)
z = model.fit_predict(X)
assert np.all(z == 0), "Quick Initialization Failed!"
def test_aic():
# Test the aic and bic criteria
n_samples, n_dim, n_components = 50, 3, 2
X = rng.randn(n_samples, n_dim)
SGH = 0.5 * (X.var() + np.log(2 * np.pi)) # standard gaussian entropy
for cv_type in ['full', 'tied', 'diag', 'spherical']:
g = mixture.GMM(n_components=n_components, covariance_type=cv_type,
random_state=rng, min_covar=1e-7)
g.fit(X)
aic = 2 * n_samples * SGH * n_dim + 2 * g._n_parameters()
bic = (2 * n_samples * SGH * n_dim +
np.log(n_samples) * g._n_parameters())
bound = n_dim * 3. / np.sqrt(n_samples)
assert_true(np.abs(g.aic(X) - aic) / n_samples < bound)
assert_true(np.abs(g.bic(X) - bic) / n_samples < bound)
def check_positive_definite_covars(covariance_type):
r"""Test that covariance matrices do not become non positive definite
Due to the accumulation of round-off errors, the computation of the
covariance matrices during the learning phase could lead to non-positive
definite covariance matrices. Namely the use of the formula:
.. math:: C = (\sum_i w_i x_i x_i^T) - \mu \mu^T
instead of:
.. math:: C = \sum_i w_i (x_i - \mu)(x_i - \mu)^T
while mathematically equivalent, was observed a ``LinAlgError`` exception,
when computing a ``GMM`` with full covariance matrices and fixed mean.
This function ensures that some later optimization will not introduce the
problem again.
"""
rng = np.random.RandomState(1)
# we build a dataset with 2 2d component. The components are unbalanced
# (respective weights 0.9 and 0.1)
X = rng.randn(100, 2)
X[-10:] += (3, 3) # Shift the 10 last points
gmm = mixture.GMM(2, params="wc", covariance_type=covariance_type,
min_covar=1e-3)
# This is a non-regression test for issue #2640. The following call used
# to trigger:
# numpy.linalg.linalg.LinAlgError: 2-th leading minor not positive definite
gmm.fit(X)
if covariance_type == "diag" or covariance_type == "spherical":
assert_greater(gmm.covars_.min(), 0)
else:
if covariance_type == "tied":
covs = [gmm.covars_]
else:
covs = gmm.covars_
for c in covs:
assert_greater(np.linalg.det(c), 0)
def test_positive_definite_covars():
# Check positive definiteness for all covariance types
for covariance_type in ["full", "tied", "diag", "spherical"]:
yield check_positive_definite_covars, covariance_type
def test_verbose_first_level():
# Create sample data
X = rng.randn(30, 5)
X[:10] += 2
g = mixture.GMM(n_components=2, n_init=2, verbose=1)
old_stdout = sys.stdout
sys.stdout = StringIO()
try:
g.fit(X)
finally:
sys.stdout = old_stdout
def test_verbose_second_level():
# Create sample data
X = rng.randn(30, 5)
X[:10] += 2
g = mixture.GMM(n_components=2, n_init=2, verbose=2)
old_stdout = sys.stdout
sys.stdout = StringIO()
try:
g.fit(X)
finally:
sys.stdout = old_stdout
|
fighterCui/L4ReFiascoOC | refs/heads/master | l4/pkg/python/contrib/Lib/xml/parsers/expat.py | 230 | """Interface to the Expat non-validating XML parser."""
__version__ = '$Revision: 17640 $'
from pyexpat import *
|
adedayo/intellij-community | refs/heads/master | python/testData/refactoring/pushdown/multiple.after.py | 79 | class Foo:
pass
class Zope(Foo):
def _mine(self):
print "zope"
def foo(self):
print("a")
class Boo(Foo):
def boo(self):
print "rrrrr"
def foo(self):
print("a")
|
eclipse-ease-addons/engines | refs/heads/master | jython/org.jython/Lib/ensurepip/__init__.py | 4 | #!/usr/bin/env python2
from __future__ import print_function
import os
import os.path
import pkgutil
import shutil
import sys
import tempfile
__all__ = ["version", "bootstrap"]
_SETUPTOOLS_VERSION = "14.3.2"
_PIP_VERSION = "1.6"
# pip currently requires ssl support, so we try to provide a nicer
# error message when that is missing (http://bugs.python.org/issue19744)
_MISSING_SSL_MESSAGE = ("pip {} requires SSL/TLS".format(_PIP_VERSION))
try:
import ssl
except ImportError:
ssl = None
def _require_ssl_for_pip():
raise RuntimeError(_MISSING_SSL_MESSAGE)
else:
def _require_ssl_for_pip():
pass
_PROJECTS = [
("setuptools", _SETUPTOOLS_VERSION),
("pip", _PIP_VERSION),
]
def _run_pip(args, additional_paths=None):
# Add our bundled software to the sys.path so we can import it
if additional_paths is not None:
sys.path = additional_paths + sys.path
# Install the bundled software
import pip
pip.main(args)
def version():
"""
Returns a string specifying the bundled version of pip.
"""
return _PIP_VERSION
def _disable_pip_configuration_settings():
# We deliberately ignore all pip environment variables
# when invoking pip
# See http://bugs.python.org/issue19734 for details
keys_to_remove = [k for k in os.environ if k.startswith("PIP_")]
for k in keys_to_remove:
del os.environ[k]
# We also ignore the settings in the default pip configuration file
# See http://bugs.python.org/issue20053 for details
os.environ['PIP_CONFIG_FILE'] = os.devnull
def bootstrap(root=None, upgrade=False, user=False,
altinstall=False, default_pip=True,
verbosity=0):
"""
Bootstrap pip into the current Python installation (or the given root
directory).
Note that calling this function will alter both sys.path and os.environ.
"""
if altinstall and default_pip:
raise ValueError("Cannot use altinstall and default_pip together")
_require_ssl_for_pip()
_disable_pip_configuration_settings()
# By default, installing pip and setuptools installs all of the
# following scripts (X.Y == running Python version):
#
# pip, pipX, pipX.Y, easy_install, easy_install-X.Y
#
# pip 1.5+ allows ensurepip to request that some of those be left out
if altinstall:
# omit pip, pipX and easy_install
os.environ["ENSUREPIP_OPTIONS"] = "altinstall"
elif not default_pip:
# omit pip and easy_install
os.environ["ENSUREPIP_OPTIONS"] = "install"
tmpdir = tempfile.mkdtemp()
try:
# Put our bundled wheels into a temporary directory and construct the
# additional paths that need added to sys.path
additional_paths = []
for project, version in _PROJECTS:
wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
whl = pkgutil.get_data(
"ensurepip",
"_bundled/{}".format(wheel_name),
)
with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
fp.write(whl)
additional_paths.append(os.path.join(tmpdir, wheel_name))
# Construct the arguments to be passed to the pip command
args = ["install", "--no-index", "--find-links", tmpdir]
if root:
args += ["--root", root]
if upgrade:
args += ["--upgrade"]
if user:
args += ["--user"]
if verbosity:
args += ["-" + "v" * verbosity]
_run_pip(args + [p[0] for p in _PROJECTS], additional_paths)
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
def _uninstall_helper(verbosity=0):
"""Helper to support a clean default uninstall process on Windows
Note that calling this function may alter os.environ.
"""
# Nothing to do if pip was never installed, or has been removed
try:
import pip
except ImportError:
return
# If the pip version doesn't match the bundled one, leave it alone
if pip.__version__ != _PIP_VERSION:
msg = ("ensurepip will only uninstall a matching version "
"({!r} installed, {!r} bundled)")
print(msg.format(pip.__version__, _PIP_VERSION), file=sys.stderr)
return
_require_ssl_for_pip()
_disable_pip_configuration_settings()
# Construct the arguments to be passed to the pip command
args = ["uninstall", "-y"]
if verbosity:
args += ["-" + "v" * verbosity]
_run_pip(args + [p[0] for p in reversed(_PROJECTS)])
def _main(argv=None):
if ssl is None:
print("Ignoring ensurepip failure: {}".format(_MISSING_SSL_MESSAGE),
file=sys.stderr)
return
import argparse
parser = argparse.ArgumentParser(prog="python -m ensurepip")
parser.add_argument(
"--version",
action="version",
version="pip {}".format(version()),
help="Show the version of pip that is bundled with this Python.",
)
parser.add_argument(
"-v", "--verbose",
action="count",
default=0,
dest="verbosity",
help=("Give more output. Option is additive, and can be used up to 3 "
"times."),
)
parser.add_argument(
"-U", "--upgrade",
action="store_true",
default=False,
help="Upgrade pip and dependencies, even if already installed.",
)
parser.add_argument(
"--user",
action="store_true",
default=False,
help="Install using the user scheme.",
)
parser.add_argument(
"--root",
default=None,
help="Install everything relative to this alternate root directory.",
)
parser.add_argument(
"--altinstall",
action="store_true",
default=False,
help=("Make an alternate install, installing only the X.Y versioned"
"scripts (Default: pipX, pipX.Y, easy_install-X.Y)"),
)
parser.add_argument(
"--default-pip",
action="store_true",
default=True,
dest="default_pip",
help=argparse.SUPPRESS,
)
parser.add_argument(
"--no-default-pip",
action="store_false",
dest="default_pip",
help=("Make a non default install, installing only the X and X.Y "
"versioned scripts."),
)
args = parser.parse_args(argv)
bootstrap(
root=args.root,
upgrade=args.upgrade,
user=args.user,
verbosity=args.verbosity,
altinstall=args.altinstall,
default_pip=args.default_pip,
)
|
pelletier/django-parallelized_querysets | refs/heads/master | parallelized_querysets/__init__.py | 1 | __version_tuple__ = (0, 0, 4)
__version__ = ".".join(map(str, __version_tuple__))
def parallelized_multiple_querysets(querysets, *args, **kwargs):
from parallelized_querysets.core import parallelized_multiple_querysets
return parallelized_multiple_querysets(querysets, *args, **kwargs)
def parallelized_queryset(queryset, *args, **kwargs):
return parallelized_multiple_querysets([queryset], *args, **kwargs)
|
esten/StoriTell | refs/heads/master | StoriTell/tastypie/fields.py | 2 | from dateutil.parser import parse
import re
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from django.utils import datetime_safe, importlib
from tastypie.bundle import Bundle
from tastypie.exceptions import ApiFieldError, NotFound
from tastypie.utils import dict_strip_unicode_keys
class NOT_PROVIDED:
pass
DATE_REGEX = re.compile('^(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2}).*?$')
DATETIME_REGEX = re.compile('^(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})(T|\s+)(?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2}).*?$')
# All the ApiField variants.
class ApiField(object):
"""The base implementation of a field used by the resources."""
dehydrated_type = 'string'
help_text = ''
def __init__(self, attribute=None, default=NOT_PROVIDED, null=False, readonly=False, unique=False, help_text=None):
"""
Sets up the field. This is generally called when the containing
``Resource`` is initialized.
Optionally accepts an ``attribute``, which should be a string of
either an instance attribute or callable off the object during the
``dehydrate`` or push data onto an object during the ``hydrate``.
Defaults to ``None``, meaning data will be manually accessed.
Optionally accepts a ``default``, which provides default data when the
object being ``dehydrated``/``hydrated`` has no data on the field.
Defaults to ``NOT_PROVIDED``.
Optionally accepts a ``null``, which indicated whether or not a
``None`` is allowable data on the field. Defaults to ``False``.
Optionally accepts a ``readonly``, which indicates whether the field
is used during the ``hydrate`` or not. Defaults to ``False``.
Optionally accepts a ``unique``, which indicates if the field is a
unique identifier for the object.
Optionally accepts ``help_text``, which lets you provide a
human-readable description of the field exposed at the schema level.
Defaults to the per-Field definition.
"""
# Track what the index thinks this field is called.
self.instance_name = None
self._resource = None
self.attribute = attribute
self._default = default
self.null = null
self.readonly = readonly
self.value = None
self.unique = unique
if help_text:
self.help_text = help_text
def contribute_to_class(self, cls, name):
# Do the least we can here so that we don't hate ourselves in the
# morning.
self.instance_name = name
self._resource = cls
def has_default(self):
"""Returns a boolean of whether this field has a default value."""
return self._default is not NOT_PROVIDED
@property
def default(self):
"""Returns the default value for the field."""
if callable(self._default):
return self._default()
return self._default
def dehydrate(self, bundle):
"""
Takes data from the provided object and prepares it for the
resource.
"""
if self.attribute is not None:
# Check for `__` in the field for looking through the relation.
attrs = self.attribute.split('__')
current_object = bundle.obj
for attr in attrs:
previous_object = current_object
current_object = getattr(current_object, attr, None)
if current_object is None:
if self.has_default():
current_object = self._default
# Fall out of the loop, given any further attempts at
# accesses will fail misreably.
break
elif self.null:
current_object = None
# Fall out of the loop, given any further attempts at
# accesses will fail misreably.
break
else:
raise ApiFieldError("The object '%r' has an empty attribute '%s' and doesn't allow a default or null value." % (previous_object, attr))
if callable(current_object):
return current_object()
return current_object
if self.has_default():
return self.default
else:
return None
def convert(self, value):
"""
Handles conversion between the data found and the type of the field.
Extending classes should override this method and provide correct
data coercion.
"""
return value
def hydrate(self, bundle):
"""
Takes data stored in the bundle for the field and returns it. Used for
taking simple data and building a instance object.
"""
if self.readonly:
return None
if not bundle.data.has_key(self.instance_name):
if hasattr(bundle.obj, self.instance_name):
return getattr(bundle.obj, self.instance_name)
elif self.has_default():
if callable(self._default):
return self._default()
return self._default
elif self.null:
return None
else:
raise ApiFieldError("The '%s' field has no data and doesn't allow a default or null value." % self.instance_name)
return bundle.data[self.instance_name]
class CharField(ApiField):
"""
A text field of arbitrary length.
Covers both ``models.CharField`` and ``models.TextField``.
"""
dehydrated_type = 'string'
help_text = 'Unicode string data. Ex: "Hello World"'
def dehydrate(self, obj):
return self.convert(super(CharField, self).dehydrate(obj))
def convert(self, value):
if value is None:
return None
return unicode(value)
class FileField(ApiField):
"""
A file-related field.
Covers both ``models.FileField`` and ``models.ImageField``.
"""
dehydrated_type = 'string'
help_text = 'A file path as a string. Ex: "/tmp/photos/my_photo.jpg"'
def dehydrate(self, obj):
media_url = settings.MEDIA_URL
path = self.convert(super(FileField, self).dehydrate(obj))
if path:
media_url = media_url.rstrip('/')
path = path.lstrip('/')
return u"%s/%s" % (media_url, path)
return path
def convert(self, value):
if value is None:
return None
return unicode(value)
class IntegerField(ApiField):
"""
An integer field.
Covers ``models.IntegerField``, ``models.PositiveIntegerField``,
``models.PositiveSmallIntegerField`` and ``models.SmallIntegerField``.
"""
dehydrated_type = 'integer'
help_text = 'Integer data. Ex: 2673'
def dehydrate(self, obj):
return self.convert(super(IntegerField, self).dehydrate(obj))
def convert(self, value):
if value is None:
return None
return int(value)
class FloatField(ApiField):
"""
A floating point field.
"""
dehydrated_type = 'float'
help_text = 'Floating point numeric data. Ex: 26.73'
def dehydrate(self, obj):
return self.convert(super(FloatField, self).dehydrate(obj))
def convert(self, value):
if value is None:
return None
return float(value)
class BooleanField(ApiField):
"""
A boolean field.
Covers both ``models.BooleanField`` and ``models.NullBooleanField``.
"""
dehydrated_type = 'boolean'
help_text = 'Boolean data. Ex: True'
def dehydrate(self, obj):
return self.convert(super(BooleanField, self).dehydrate(obj))
def convert(self, value):
if value is None:
return None
return bool(value)
class DateField(ApiField):
"""
A date field.
"""
dehydrated_type = 'date'
help_text = 'A date as a string. Ex: "2010-11-10"'
def dehydrate(self, obj):
return self.convert(super(DateField, self).dehydrate(obj))
def convert(self, value):
if value is None:
return None
if isinstance(value, basestring):
match = DATE_REGEX.search(value)
if match:
data = match.groupdict()
return datetime_safe.date(int(data['year']), int(data['month']), int(data['day']))
else:
raise ApiFieldError("Date provided to '%s' field doesn't appear to be a valid date string: '%s'" % (self.instance_name, value))
return value
def hydrate(self, bundle):
value = super(DateField, self).hydrate(bundle)
if value and not hasattr(value, 'year'):
try:
# Try to rip a date/datetime out of it.
value = parse(value)
if hasattr(value, 'hour'):
value = value.date()
except ValueError:
pass
return value
class DateTimeField(ApiField):
"""
A datetime field.
"""
dehydrated_type = 'datetime'
help_text = 'A date & time as a string. Ex: "2010-11-10T03:07:43"'
def dehydrate(self, obj):
return self.convert(super(DateTimeField, self).dehydrate(obj))
def convert(self, value):
if value is None:
return None
if isinstance(value, basestring):
match = DATETIME_REGEX.search(value)
if match:
data = match.groupdict()
return datetime_safe.datetime(int(data['year']), int(data['month']), int(data['day']), int(data['hour']), int(data['minute']), int(data['second']))
else:
raise ApiFieldError("Datetime provided to '%s' field doesn't appear to be a valid datetime string: '%s'" % (self.instance_name, value))
return value
def hydrate(self, bundle):
value = super(DateTimeField, self).hydrate(bundle)
if value and not hasattr(value, 'year'):
try:
# Try to rip a date/datetime out of it.
value = parse(value)
except ValueError:
pass
return value
class RelatedField(ApiField):
"""
Provides access to data that is related within the database.
A base class not intended for direct use but provides functionality that
``ForeignKey`` and ``ManyToManyField`` build upon.
The contents of this field actually point to another ``Resource``,
rather than the related object. This allows the field to represent its data
in different ways.
The abstractions based around this are "leaky" in that, unlike the other
fields provided by ``tastypie``, these fields don't handle arbitrary objects
very well. The subclasses use Django's ORM layer to make things go, though
there is no ORM-specific code at this level.
"""
dehydrated_type = 'related'
is_related = True
self_referential = False
help_text = 'A related resource. Can be either a URI or set of nested resource data.'
def __init__(self, to, attribute, related_name=None, null=False, full=False, unique=False, help_text=None):
"""
Builds the field and prepares it to access to related data.
The ``to`` argument should point to a ``Resource`` class, NOT
to a ``Model``. Required.
The ``attribute`` argument should specify what field/callable points to
the related data on the instance object. Required.
Optionally accepts a ``related_name`` argument. Currently unused, as
unlike Django's ORM layer, reverse relations between ``Resource``
classes are not automatically created. Defaults to ``None``.
Optionally accepts a ``null``, which indicated whether or not a
``None`` is allowable data on the field. Defaults to ``False``.
Optionally accepts a ``full``, which indicates how the related
``Resource`` will appear post-``dehydrate``. If ``False``, the
related ``Resource`` will appear as a URL to the endpoint of that
resource. If ``True``, the result of the sub-resource's
``dehydrate`` will be included in full.
"""
self.instance_name = None
self._resource = None
self.to = to
self.attribute = attribute
self.related_name = related_name
self.null = null
self.full = full
self.readonly = False
self.api_name = None
self.resource_name = None
self.unique = unique
self._to_class = None
if self.to == 'self':
self.self_referential = True
self._to_class = self.__class__
if help_text:
self.help_text = help_text
def contribute_to_class(self, cls, name):
super(RelatedField, self).contribute_to_class(cls, name)
# Check if we're self-referential and hook it up.
# We can't do this quite like Django because there's no ``AppCache``
# here (which I think we should avoid as long as possible).
if self.self_referential or self.to == 'self':
self._to_class = cls
def has_default(self):
"""
Always returns ``False``, as there is no ``default`` available on
related fields.
"""
return False
@property
def default(self):
"""
Raises an exception because related fields do not have a ``default``.
"""
raise ApiFieldError("%r fields do not have default data." % self)
def get_related_resource(self, related_instance):
"""
Instaniates the related resource.
"""
related_resource = self.to_class()
# Fix the ``api_name`` if it's not present.
if related_resource._meta.api_name is None:
if self._resource and not self._resource._meta.api_name is None:
related_resource._meta.api_name = self._resource._meta.api_name
# Try to be efficient about DB queries.
related_resource.instance = related_instance
return related_resource
@property
def to_class(self):
# We need to be lazy here, because when the metaclass constructs the
# Resources, other classes may not exist yet.
# That said, memoize this so we never have to relookup/reimport.
if self._to_class:
return self._to_class
if not isinstance(self.to, basestring):
self._to_class = self.to
return self._to_class
# It's a string. Let's figure it out.
if '.' in self.to:
# Try to import.
module_bits = self.to.split('.')
module_path, class_name = '.'.join(module_bits[:-1]), module_bits[-1]
module = importlib.import_module(module_path)
else:
# We've got a bare class name here, which won't work (No AppCache
# to rely on). Try to throw a useful error.
raise ImportError("Tastypie requires a Python-style path (<module.module.Class>) to lazy load related resources. Only given '%s'." % self.to)
self._to_class = getattr(module, class_name, None)
if self._to_class is None:
raise ImportError("Module '%s' does not appear to have a class called '%s'." % (module_path, class_name))
return self._to_class
def dehydrate_related(self, bundle, related_resource):
"""
Based on the ``full_resource``, returns either the endpoint or the data
from ``full_dehydrate`` for the related resource.
"""
if not self.full:
# Be a good netizen.
return related_resource.get_resource_uri(bundle)
else:
# ZOMG extra data and big payloads.
return related_resource.full_dehydrate(related_resource.instance)
def build_related_resource(self, value):
"""
Used to ``hydrate`` the data provided. If just a URL is provided,
the related resource is attempted to be loaded. If a
dictionary-like structure is provided, a fresh resource is
created.
"""
self.fk_resource = self.to_class()
if isinstance(value, basestring):
# We got a URI. Load the object and assign it.
try:
obj = self.fk_resource.get_via_uri(value)
return self.fk_resource.full_dehydrate(obj)
except ObjectDoesNotExist:
raise ApiFieldError("Could not find the provided object via resource URI '%s'." % value)
elif hasattr(value, 'items'):
# Try to hydrate the data provided.
value = dict_strip_unicode_keys(value)
self.fk_bundle = Bundle(data=value)
try:
return self.fk_resource.obj_update(self.fk_bundle, **value)
except NotFound:
try:
# Attempt lookup by primary key
lookup_kwargs = dict((k, v) for k, v in value.iteritems() if getattr(self.fk_resource, k).unique)
if not lookup_kwargs:
raise NotFound
return self.fk_resource.obj_update(self.fk_bundle, **lookup_kwargs)
except NotFound:
return self.fk_resource.full_hydrate(self.fk_bundle)
except MultipleObjectsReturned:
return self.fk_resource.full_hydrate(self.fk_bundle)
else:
raise ApiFieldError("The '%s' field has was given data that was not a URI and not a dictionary-alike: %s." % (self.instance_name, value))
class ToOneField(RelatedField):
"""
Provides access to related data via foreign key.
This subclass requires Django's ORM layer to work properly.
"""
help_text = 'A single related resource. Can be either a URI or set of nested resource data.'
def __init__(self, to, attribute, related_name=None, null=False, full=False, unique=False, help_text=None):
super(ToOneField, self).__init__(to, attribute, related_name, null=null, full=full, unique=unique, help_text=help_text)
self.fk_resource = None
def dehydrate(self, bundle):
try:
foreign_obj = getattr(bundle.obj, self.attribute)
except ObjectDoesNotExist:
foreign_obj = None
if not foreign_obj:
if not self.null:
raise ApiFieldError("The model '%r' has an empty attribute '%s' and doesn't allow a null value." % (bundle.obj, self.attribute))
return None
self.fk_resource = self.get_related_resource(foreign_obj)
fk_bundle = Bundle(obj=foreign_obj)
return self.dehydrate_related(fk_bundle, self.fk_resource)
def hydrate(self, bundle):
if bundle.data.get(self.instance_name) is None:
if self.null:
return None
else:
raise ApiFieldError("The '%s' field has no data and doesn't allow a null value." % self.instance_name)
return self.build_related_resource(bundle.data.get(self.instance_name))
class ForeignKey(ToOneField):
"""
A convenience subclass for those who prefer to mirror ``django.db.models``.
"""
pass
class OneToOneField(ToOneField):
"""
A convenience subclass for those who prefer to mirror ``django.db.models``.
"""
pass
class ToManyField(RelatedField):
"""
Provides access to related data via a join table.
This subclass requires Django's ORM layer to work properly.
Note that the ``hydrate`` portions of this field are quite different than
any other field. ``hydrate_m2m`` actually handles the data and relations.
This is due to the way Django implements M2M relationships.
"""
is_m2m = True
help_text = 'Many related resources. Can be either a list of URIs or list of individually nested resource data.'
def __init__(self, to, attribute, related_name=None, null=False, full=False, unique=False, help_text=help_text):
super(ToManyField, self).__init__(to, attribute, related_name, null=null, full=full, unique=unique, help_text=help_text)
self.m2m_bundles = []
def dehydrate(self, bundle):
if not bundle.obj or not bundle.obj.pk:
if not self.null:
raise ApiFieldError("The model '%r' does not have a primary key and can not be used in a ToMany context." % bundle.obj)
return []
if not getattr(bundle.obj, self.attribute):
if not self.null:
raise ApiFieldError("The model '%r' has an empty attribute '%s' and doesn't allow a null value." % (bundle.obj, self.attribute))
return []
self.m2m_resources = []
m2m_dehydrated = []
# TODO: Also model-specific and leaky. Relies on there being a
# ``Manager`` there.
for m2m in getattr(bundle.obj, self.attribute).all():
m2m_resource = self.get_related_resource(m2m)
m2m_bundle = Bundle(obj=m2m)
self.m2m_resources.append(m2m_resource)
m2m_dehydrated.append(self.dehydrate_related(m2m_bundle, m2m_resource))
return m2m_dehydrated
def hydrate(self, bundle):
pass
def hydrate_m2m(self, bundle):
if bundle.data.get(self.instance_name) is None:
if self.null:
return []
else:
raise ApiFieldError("The '%s' field has no data and doesn't allow a null value." % self.instance_name)
m2m_hydrated = []
for value in bundle.data.get(self.instance_name):
m2m_hydrated.append(self.build_related_resource(value))
return m2m_hydrated
class ManyToManyField(ToManyField):
"""
A convenience subclass for those who prefer to mirror ``django.db.models``.
"""
pass
class OneToManyField(ToManyField):
"""
A convenience subclass for those who prefer to mirror ``django.db.models``.
"""
pass
# DRL_FIXME: Need more types?
#
# + AutoField
# + BooleanField
# + CharField
# - CommaSeparatedIntegerField
# + DateField
# + DateTimeField
# - DecimalField
# - EmailField
# + FileField
# + FilePathField
# + FloatField
# + ImageField
# + IntegerField
# - IPAddressField
# + NullBooleanField
# + PositiveIntegerField
# + PositiveSmallIntegerField
# - SlugField
# + SmallIntegerField
# - TextField
# - TimeField
# - URLField
# - XMLField
# + ForeignKey
# + ManyToManyField
# + OneToOneField
|
jvkops/django | refs/heads/master | django/contrib/auth/forms.py | 100 | from __future__ import unicode_literals
from django import forms
from django.contrib.auth import (
authenticate, get_user_model, password_validation,
)
from django.contrib.auth.hashers import (
UNUSABLE_PASSWORD_PREFIX, identify_hasher,
)
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import EmailMultiAlternatives
from django.forms.utils import flatatt
from django.template import loader
from django.utils.encoding import force_bytes
from django.utils.html import format_html, format_html_join
from django.utils.http import urlsafe_base64_encode
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils.translation import ugettext, ugettext_lazy as _
class ReadOnlyPasswordHashWidget(forms.Widget):
def render(self, name, value, attrs):
encoded = value
final_attrs = self.build_attrs(attrs)
if not encoded or encoded.startswith(UNUSABLE_PASSWORD_PREFIX):
summary = mark_safe("<strong>%s</strong>" % ugettext("No password set."))
else:
try:
hasher = identify_hasher(encoded)
except ValueError:
summary = mark_safe("<strong>%s</strong>" % ugettext(
"Invalid password format or unknown hashing algorithm."))
else:
summary = format_html_join('',
"<strong>{}</strong>: {} ",
((ugettext(key), value)
for key, value in hasher.safe_summary(encoded).items())
)
return format_html("<div{}>{}</div>", flatatt(final_attrs), summary)
class ReadOnlyPasswordHashField(forms.Field):
widget = ReadOnlyPasswordHashWidget
def __init__(self, *args, **kwargs):
kwargs.setdefault("required", False)
super(ReadOnlyPasswordHashField, self).__init__(*args, **kwargs)
def bound_data(self, data, initial):
# Always return initial because the widget doesn't
# render an input field.
return initial
def has_changed(self, initial, data):
return False
class UserCreationForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
password1 = forms.CharField(label=_("Password"),
widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as before, for verification."))
class Meta:
model = User
fields = ("username",)
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
self.instance.username = self.cleaned_data.get('username')
password_validation.validate_password(self.cleaned_data.get('password2'), self.instance)
return password2
def save(self, commit=True):
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField(label=_("Password"),
help_text=_("Raw passwords are not stored, so there is no way to see "
"this user's password, but you can change the password "
"using <a href=\"../password/\">this form</a>."))
class Meta:
model = User
fields = '__all__'
def __init__(self, *args, **kwargs):
super(UserChangeForm, self).__init__(*args, **kwargs)
f = self.fields.get('user_permissions')
if f is not None:
f.queryset = f.queryset.select_related('content_type')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
class AuthenticationForm(forms.Form):
"""
Base class for authenticating users. Extend this to get a form that accepts
username/password logins.
"""
username = forms.CharField(max_length=254)
password = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
error_messages = {
'invalid_login': _("Please enter a correct %(username)s and password. "
"Note that both fields may be case-sensitive."),
'inactive': _("This account is inactive."),
}
def __init__(self, request=None, *args, **kwargs):
"""
The 'request' parameter is set for custom auth use by subclasses.
The form data comes in via the standard 'data' kwarg.
"""
self.request = request
self.user_cache = None
super(AuthenticationForm, self).__init__(*args, **kwargs)
# Set the label for the "username" field.
UserModel = get_user_model()
self.username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD)
if self.fields['username'].label is None:
self.fields['username'].label = capfirst(self.username_field.verbose_name)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if username and password:
self.user_cache = authenticate(username=username,
password=password)
if self.user_cache is None:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name},
)
else:
self.confirm_login_allowed(self.user_cache)
return self.cleaned_data
def confirm_login_allowed(self, user):
"""
Controls whether the given User may log in. This is a policy setting,
independent of end-user authentication. This default behavior is to
allow login by active users, and reject login by inactive users.
If the given user cannot log in, this method should raise a
``forms.ValidationError``.
If the given user may log in, this method should return None.
"""
if not user.is_active:
raise forms.ValidationError(
self.error_messages['inactive'],
code='inactive',
)
def get_user_id(self):
if self.user_cache:
return self.user_cache.id
return None
def get_user(self):
return self.user_cache
class PasswordResetForm(forms.Form):
email = forms.EmailField(label=_("Email"), max_length=254)
def send_mail(self, subject_template_name, email_template_name,
context, from_email, to_email, html_email_template_name=None):
"""
Sends a django.core.mail.EmailMultiAlternatives to `to_email`.
"""
subject = loader.render_to_string(subject_template_name, context)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
body = loader.render_to_string(email_template_name, context)
email_message = EmailMultiAlternatives(subject, body, from_email, [to_email])
if html_email_template_name is not None:
html_email = loader.render_to_string(html_email_template_name, context)
email_message.attach_alternative(html_email, 'text/html')
email_message.send()
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This allows subclasses to more easily customize the default policies
that prevent inactive users and users with unusable passwords from
resetting their password.
"""
active_users = get_user_model()._default_manager.filter(
email__iexact=email, is_active=True)
return (u for u in active_users if u.has_usable_password())
def save(self, domain_override=None,
subject_template_name='registration/password_reset_subject.txt',
email_template_name='registration/password_reset_email.html',
use_https=False, token_generator=default_token_generator,
from_email=None, request=None, html_email_template_name=None):
"""
Generates a one-use only link for resetting password and sends to the
user.
"""
email = self.cleaned_data["email"]
for user in self.get_users(email):
if not domain_override:
current_site = get_current_site(request)
site_name = current_site.name
domain = current_site.domain
else:
site_name = domain = domain_override
context = {
'email': user.email,
'domain': domain,
'site_name': site_name,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'user': user,
'token': token_generator.make_token(user),
'protocol': 'https' if use_https else 'http',
}
self.send_mail(subject_template_name, email_template_name,
context, from_email, user.email,
html_email_template_name=html_email_template_name)
class SetPasswordForm(forms.Form):
"""
A form that lets a user change set their password without entering the old
password
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
new_password1 = forms.CharField(label=_("New password"),
widget=forms.PasswordInput,
help_text=password_validation.password_validators_help_text_html())
new_password2 = forms.CharField(label=_("New password confirmation"),
widget=forms.PasswordInput)
def __init__(self, user, *args, **kwargs):
self.user = user
super(SetPasswordForm, self).__init__(*args, **kwargs)
def clean_new_password2(self):
password1 = self.cleaned_data.get('new_password1')
password2 = self.cleaned_data.get('new_password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
password_validation.validate_password(password2, self.user)
return password2
def save(self, commit=True):
password = self.cleaned_data["new_password1"]
self.user.set_password(password)
if commit:
self.user.save()
return self.user
class PasswordChangeForm(SetPasswordForm):
"""
A form that lets a user change their password by entering their old
password.
"""
error_messages = dict(SetPasswordForm.error_messages, **{
'password_incorrect': _("Your old password was entered incorrectly. "
"Please enter it again."),
})
old_password = forms.CharField(label=_("Old password"),
widget=forms.PasswordInput)
field_order = ['old_password', 'new_password1', 'new_password2']
def clean_old_password(self):
"""
Validates that the old_password field is correct.
"""
old_password = self.cleaned_data["old_password"]
if not self.user.check_password(old_password):
raise forms.ValidationError(
self.error_messages['password_incorrect'],
code='password_incorrect',
)
return old_password
class AdminPasswordChangeForm(forms.Form):
"""
A form used to change the password of a user in the admin interface.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
required_css_class = 'required'
password1 = forms.CharField(
label=_("Password"),
widget=forms.PasswordInput,
help_text=password_validation.password_validators_help_text_html(),
)
password2 = forms.CharField(
label=_("Password (again)"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as before, for verification."),
)
def __init__(self, user, *args, **kwargs):
self.user = user
super(AdminPasswordChangeForm, self).__init__(*args, **kwargs)
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
password_validation.validate_password(password2, self.user)
return password2
def save(self, commit=True):
"""
Saves the new password.
"""
password = self.cleaned_data["password1"]
self.user.set_password(password)
if commit:
self.user.save()
return self.user
def _get_changed_data(self):
data = super(AdminPasswordChangeForm, self).changed_data
for name in self.fields.keys():
if name not in data:
return []
return ['password']
changed_data = property(_get_changed_data)
|
lukw00/shogun | refs/heads/develop | examples/undocumented/python_modular/features_binned_dot_modular.py | 26 | #!/usr/bin/env python
import numpy
matrix=numpy.array([[-1.0,0,1],[2,3,4],[5,6,7]])
bins=numpy.array([[0.0, 0.0, 0.0],[1.0,1.0,1.0],[2.0,2.0,2.0],[3.0,3.0,3.0],[4.0,4.0,4.0]])
parameter_list = [(matrix,bins)]
def features_binned_dot_modular (matrix, bins):
from modshogun import RealFeatures, BinnedDotFeatures
rf=RealFeatures(matrix)
#print(rf.get_feature_matrix())
bf=BinnedDotFeatures(rf, bins)
filled=bf.get_computed_dot_feature_matrix()
bf.set_fill(False)
unfilled=bf.get_computed_dot_feature_matrix()
bf.set_norm_one(True)
unfilled_normed=bf.get_computed_dot_feature_matrix()
bf.set_fill(True)
filled_normed=bf.get_computed_dot_feature_matrix()
return bf,filled,unfilled,unfilled_normed,filled_normed
if __name__=='__main__':
print('BinnedDotFeatures')
features_binned_dot_modular(*parameter_list[0])
|
avorio/rdflib | refs/heads/master | rdflib/plugins/parsers/pyRdfa/transform/metaname.py | 26 | # -*- coding: utf-8 -*-
"""
Simple transfomer: C{meta} element is extended with a C{property} attribute, with a copy of the
C{name} attribute values.
@author: U{Ivan Herman<a href="http://www.w3.org/People/Ivan/">}
@license: This software is available for use under the
U{W3C® SOFTWARE NOTICE AND LICENSE<href="http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231">}
@contact: Ivan Herman, ivan@w3.org
@version: $Id: metaname.py,v 1.3 2012-01-18 14:16:45 ivan Exp $
$Date: 2012-01-18 14:16:45 $
"""
def meta_transform(html, options, state) :
"""
@param html: a DOM node for the top level html element
@param options: invocation options
@type options: L{Options<pyRdfa.options>}
@param state: top level execution state
@type state: L{State<pyRdfa.state>}
"""
from ..host import HostLanguage
if not( options.host_language in [ HostLanguage.xhtml, HostLanguage.html5, HostLanguage.xhtml5 ] ) :
return
for meta in html.getElementsByTagName("meta") :
if meta.hasAttribute("name") and not meta.hasAttribute("property") :
meta.setAttribute("property", meta.getAttribute("name"))
|
oVirt/ovirt-node | refs/heads/master | src/ovirt/node/setup/ipmi/ipmi_page.py | 2 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# cim_page.py - Copyright (C) 2012 Red Hat, Inc.
# Written by Ryan Barry <rbarry@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA. A copy of the GNU General Public License is
# also available at http://www.gnu.org/copyleft/gpl.html.
from ovirt.node import base, plugins, ui
from ovirt.node.utils import process
from subprocess import CalledProcessError
"""
IPMI Status
"""
class Plugin(plugins.NodePlugin):
_model = None
def __init__(self, app):
super(Plugin, self).__init__(app)
self._model = {}
def has_ui(self):
return True
def name(self):
return "IPMI"
def rank(self):
return 50
def model(self):
model = {"ipmi.enabled": "%s" % Ipmi().check_status()
}
return model
def validators(self):
return {}
def ui_content(self):
ws = [ui.Header("header[0]", "IPMI"),
ui.KeywordLabel("ipmi.enabled", "IPMI Enabled: ")]
if Ipmi().check_status():
ws.extend([
ui.Divider("divider[0]"),
ui.Header("header[1]", "Fan Status:"),
ui.Table("ipmi_output", "", "Fan status",
Ipmi().fan_status())
])
page = ui.Page("page", ws)
page.buttons = []
self.widgets.add(ws)
return page
def on_change(self, changes):
pass
def on_merge(self, effective_changes):
pass
class Ipmi(base.Base):
def _call_ipmi(self, args):
assert type(args) is list
return process.check_output(["ipmitool"] + args, stderr=process.PIPE)
def fan_status(self):
return self._call_ipmi(["-I", "open", "sdr", "elist", "full"])
def check_status(self):
try:
process.check_output(["ipmitool", "-I", "open",
"chassis", "status"])
return True
except CalledProcessError as e:
self.logger.warning("IPMI status call failed with: %s" % e.output)
return False
|
thresholdsoftware/asylum | refs/heads/master | openerp/addons/portal_hr_employees/__init__.py | 53 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_employee
|
lucabe72/linux-reclaiming | refs/heads/master | scripts/tracing/draw_functrace.py | 14679 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
tumbl3w33d/ansible | refs/heads/devel | test/units/modules/storage/netapp/test_na_ontap_cluster_peer.py | 37 | ''' unit tests ONTAP Ansible module: na_ontap_cluster_peer '''
from __future__ import print_function
import json
import pytest
from units.compat import unittest
from units.compat.mock import patch, Mock
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
import ansible.module_utils.netapp as netapp_utils
from ansible.modules.storage.netapp.na_ontap_cluster_peer \
import NetAppONTAPClusterPeer as my_module # module under test
if not netapp_utils.has_netapp_lib():
pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
def set_module_args(args):
"""prepare arguments so that they will be picked up during module creation"""
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught by the test case"""
pass
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught by the test case"""
pass
def exit_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over exit_json; package return data into an exception"""
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
class MockONTAPConnection(object):
''' mock server connection to ONTAP host '''
def __init__(self, kind=None, parm1=None):
''' save arguments '''
self.type = kind
self.data = parm1
self.xml_in = None
self.xml_out = None
def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
''' mock invoke_successfully returning xml data '''
self.xml_in = xml
if self.type == 'cluster_peer':
xml = self.build_cluster_peer_info(self.data)
self.xml_out = xml
return xml
@staticmethod
def build_cluster_peer_info(parm1):
''' build xml data for vserser-info '''
xml = netapp_utils.zapi.NaElement('xml')
attributes = {
'num-records': 1,
'attributes-list': {
'cluster-peer-info': {
'cluster-name': parm1['dest_cluster_name'],
'peer-addresses': parm1['dest_intercluster_lifs']
}
}
}
xml.translate_struct(attributes)
return xml
class TestMyModule(unittest.TestCase):
''' a group of related Unit Tests '''
def setUp(self):
self.mock_module_helper = patch.multiple(basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json)
self.mock_module_helper.start()
self.addCleanup(self.mock_module_helper.stop)
self.server = MockONTAPConnection()
self.mock_cluster_peer = {
'source_intercluster_lifs': '1.2.3.4,1.2.3.5',
'dest_intercluster_lifs': '1.2.3.6,1.2.3.7',
'passphrase': 'netapp123',
'dest_hostname': '10.20.30.40',
'dest_cluster_name': 'cluster2',
'hostname': 'hostname',
'username': 'username',
'password': 'password',
}
def mock_args(self):
return {
'source_intercluster_lifs': self.mock_cluster_peer['source_intercluster_lifs'],
'dest_intercluster_lifs': self.mock_cluster_peer['dest_intercluster_lifs'],
'passphrase': self.mock_cluster_peer['passphrase'],
'dest_hostname': self.mock_cluster_peer['dest_hostname'],
'dest_cluster_name': 'cluster2',
'hostname': 'hostname',
'username': 'username',
'password': 'password',
}
def get_cluster_peer_mock_object(self, kind=None):
"""
Helper method to return an na_ontap_cluster_peer object
:param kind: passes this param to MockONTAPConnection()
:return: na_ontap_cluster_peer object
"""
cluster_peer_obj = my_module()
cluster_peer_obj.asup_log_for_cserver = Mock(return_value=None)
cluster_peer_obj.cluster = Mock()
cluster_peer_obj.cluster.invoke_successfully = Mock()
if kind is None:
cluster_peer_obj.server = MockONTAPConnection()
cluster_peer_obj.dest_server = MockONTAPConnection()
else:
cluster_peer_obj.server = MockONTAPConnection(kind=kind, parm1=self.mock_cluster_peer)
cluster_peer_obj.dest_server = MockONTAPConnection(kind=kind, parm1=self.mock_cluster_peer)
return cluster_peer_obj
def test_module_fail_when_required_args_missing(self):
''' required arguments are reported as errors '''
with pytest.raises(AnsibleFailJson) as exc:
set_module_args({})
my_module()
print('Info: %s' % exc.value.args[0]['msg'])
@patch('ansible.modules.storage.netapp.na_ontap_cluster_peer.NetAppONTAPClusterPeer.cluster_peer_get')
def test_successful_create(self, cluster_peer_get):
''' Test successful create '''
set_module_args(self.mock_args())
cluster_peer_get.side_effect = [
None,
None
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_cluster_peer_mock_object().apply()
assert exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_cluster_peer.NetAppONTAPClusterPeer.cluster_peer_get')
def test_create_idempotency(self, cluster_peer_get):
''' Test create idempotency '''
set_module_args(self.mock_args())
current1 = {
'cluster_name': 'cluster1',
'peer-addresses': '1.2.3.6,1.2.3.7'
}
current2 = {
'cluster_name': 'cluster2',
'peer-addresses': '1.2.3.4,1.2.3.5'
}
cluster_peer_get.side_effect = [
current1,
current2
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_cluster_peer_mock_object('cluster_peer').apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_cluster_peer.NetAppONTAPClusterPeer.cluster_peer_get')
def test_successful_delete(self, cluster_peer_get):
''' Test delete existing interface '''
data = self.mock_args()
data['state'] = 'absent'
data['source_cluster_name'] = 'cluster1'
set_module_args(data)
current1 = {
'cluster_name': 'cluster1',
'peer-addresses': '1.2.3.6,1.2.3.7'
}
current2 = {
'cluster_name': 'cluster2',
'peer-addresses': '1.2.3.4,1.2.3.5'
}
cluster_peer_get.side_effect = [
current1,
current2
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_cluster_peer_mock_object('cluster_peer').apply()
assert exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_cluster_peer.NetAppONTAPClusterPeer.cluster_peer_get')
def test_delete_idempotency(self, cluster_peer_get):
''' Test delete idempotency '''
data = self.mock_args()
data['state'] = 'absent'
data['source_cluster_name'] = 'cluster2'
set_module_args(data)
cluster_peer_get.side_effect = [
None,
None
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_cluster_peer_mock_object().apply()
assert not exc.value.args[0]['changed']
|
mikeibrahim/rpi-dice | refs/heads/master | on.py | 1 | #!/usr/bin/env python
#
# Author: Mike Ibrahim
# Date: 10/01/2016
# Version: 1.0
#
# Repository: https://github.com/mikeibrahim/rpi-dice
#
# on.py
#
# This is to test all the lights one by one
# to make sure they all work
#
# Import Libraries needed
import time
import RPi.GPIO as GPIO
# Initialize settings for GPIO
GPIO.setwarnings(False) # Turn warnings off
GPIO.setmode(GPIO.BCM) # Use BCM Diagram layout
# Pins being used and their order
Pins=[4,5,6,13,19,17,22]
# Loop though all of the pins
for x in range(0, len(Pins)):
GPIO.setup(Pins[x], GPIO.OUT) # Set pin to output mode
GPIO.output(Pins[x], True) # Turn on the pin
time.sleep(.2) # Pause so you can see a blink
print "GPIO %d: %d" % (Pins[x], GPIO.input(Pins[x]))
GPIO.output(Pins[x], False) # Turn off the pin
|
dubourg/openturns | refs/heads/master | python/test/t_DickeyFullerTest_std.py | 3 | #! /usr/bin/env python
from __future__ import print_function
import openturns as ot
size = 100
# ARMA parameters
arcoefficients = ot.ARMACoefficients([0.3])
macoefficients = ot.ARMACoefficients(0)
timeGrid = ot.RegularGrid(0.0, 0.1, size)
# White noise ==> gaussian
whiteNoise = ot.WhiteNoise(ot.Normal(), timeGrid)
myARMA = ot.ARMA(arcoefficients, macoefficients, whiteNoise)
# A realization of the ARMA process
# The realization is supposed to be of a stationnary process
realization = ot.TimeSeries(myARMA.getRealization())
# In the strategy of tests, one has to detect a trend tendency
# We check if the time series writes as x_t = a +b * t + c * x_{t-1}
# H0 = c is equal to one and thus
# p-value threshold : probability of the H0 reject zone : 1-0.95
# p-value : probability (test variable decision > test variable decision (statistic) evaluated on data)
# Test = True <=> p-value > p-value threshold
test = ot.DickeyFullerTest(realization)
print("Drift and linear trend model=",
test.testUnitRootInDriftAndLinearTrendModel(0.95))
print("Drift model=", test.testUnitRootInDriftModel(0.95))
print("AR1 model=", test.testUnitRootInAR1Model(0.95))
|
devs1991/test_edx_docmode | refs/heads/master | venv/lib/python2.7/site-packages/pygments/lexers/ruby.py | 72 | # -*- coding: utf-8 -*-
"""
pygments.lexers.ruby
~~~~~~~~~~~~~~~~~~~~
Lexers for Ruby and related languages.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
bygroups, default, LexerContext, do_insertions, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error, Generic
from pygments.util import shebang_matches
__all__ = ['RubyLexer', 'RubyConsoleLexer', 'FancyLexer']
line_re = re.compile('.*?\n')
RUBY_OPERATORS = (
'*', '**', '-', '+', '-@', '+@', '/', '%', '&', '|', '^', '`', '~',
'[]', '[]=', '<<', '>>', '<', '<>', '<=>', '>', '>=', '==', '==='
)
class RubyLexer(ExtendedRegexLexer):
"""
For `Ruby <http://www.ruby-lang.org>`_ source code.
"""
name = 'Ruby'
aliases = ['rb', 'ruby', 'duby']
filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
'*.rbx', '*.duby']
mimetypes = ['text/x-ruby', 'application/x-ruby']
flags = re.DOTALL | re.MULTILINE
def heredoc_callback(self, match, ctx):
# okay, this is the hardest part of parsing Ruby...
# match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
start = match.start(1)
yield start, Operator, match.group(1) # <<-?
yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
yield match.start(3), Name.Constant, match.group(3) # heredoc name
yield match.start(4), String.Heredoc, match.group(4) # quote again
heredocstack = ctx.__dict__.setdefault('heredocstack', [])
outermost = not bool(heredocstack)
heredocstack.append((match.group(1) == '<<-', match.group(3)))
ctx.pos = match.start(5)
ctx.end = match.end(5)
# this may find other heredocs
for i, t, v in self.get_tokens_unprocessed(context=ctx):
yield i, t, v
ctx.pos = match.end()
if outermost:
# this is the outer heredoc again, now we can process them all
for tolerant, hdname in heredocstack:
lines = []
for match in line_re.finditer(ctx.text, ctx.pos):
if tolerant:
check = match.group().strip()
else:
check = match.group().rstrip()
if check == hdname:
for amatch in lines:
yield amatch.start(), String.Heredoc, amatch.group()
yield match.start(), Name.Constant, match.group()
ctx.pos = match.end()
break
else:
lines.append(match)
else:
# end of heredoc not found -- error!
for amatch in lines:
yield amatch.start(), Error, amatch.group()
ctx.end = len(ctx.text)
del heredocstack[:]
def gen_rubystrings_rules():
def intp_regex_callback(self, match, ctx):
yield match.start(1), String.Regex, match.group(1) # begin
nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
for i, t, v in self.get_tokens_unprocessed(context=nctx):
yield match.start(3)+i, t, v
yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
ctx.pos = match.end()
def intp_string_callback(self, match, ctx):
yield match.start(1), String.Other, match.group(1)
nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
for i, t, v in self.get_tokens_unprocessed(context=nctx):
yield match.start(3)+i, t, v
yield match.start(4), String.Other, match.group(4) # end
ctx.pos = match.end()
states = {}
states['strings'] = [
# easy ones
(r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
(words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
(r":'(\\\\|\\'|[^'])*'", String.Symbol),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r':"', String.Symbol, 'simple-sym'),
(r'([a-zA-Z_]\w*)(:)(?!:)',
bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
(r'"', String.Double, 'simple-string'),
(r'(?<!\.)`', String.Backtick, 'simple-backtick'),
]
# double-quoted string and symbol
for name, ttype, end in ('string', String.Double, '"'), \
('sym', String.Symbol, '"'), \
('backtick', String.Backtick, '`'):
states['simple-'+name] = [
include('string-intp-escaped'),
(r'[^\\%s#]+' % end, ttype),
(r'[\\#]', ttype),
(end, ttype, '#pop'),
]
# braced quoted strings
for lbrace, rbrace, bracecc, name in \
('\\{', '\\}', '{}', 'cb'), \
('\\[', '\\]', '\\[\\]', 'sb'), \
('\\(', '\\)', '()', 'pa'), \
('<', '>', '<>', 'ab'):
states[name+'-intp-string'] = [
(r'\\[\\' + bracecc + ']', String.Other),
(lbrace, String.Other, '#push'),
(rbrace, String.Other, '#pop'),
include('string-intp-escaped'),
(r'[\\#' + bracecc + ']', String.Other),
(r'[^\\#' + bracecc + ']+', String.Other),
]
states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
name+'-intp-string'))
states[name+'-string'] = [
(r'\\[\\' + bracecc + ']', String.Other),
(lbrace, String.Other, '#push'),
(rbrace, String.Other, '#pop'),
(r'[\\#' + bracecc + ']', String.Other),
(r'[^\\#' + bracecc + ']+', String.Other),
]
states['strings'].append((r'%[qsw]' + lbrace, String.Other,
name+'-string'))
states[name+'-regex'] = [
(r'\\[\\' + bracecc + ']', String.Regex),
(lbrace, String.Regex, '#push'),
(rbrace + '[mixounse]*', String.Regex, '#pop'),
include('string-intp'),
(r'[\\#' + bracecc + ']', String.Regex),
(r'[^\\#' + bracecc + ']+', String.Regex),
]
states['strings'].append((r'%r' + lbrace, String.Regex,
name+'-regex'))
# these must come after %<brace>!
states['strings'] += [
# %r regex
(r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
intp_regex_callback),
# regular fancy strings with qsw
(r'%[qsw]([\W_])((?:\\\1|(?!\1).)*)\1', String.Other),
(r'(%[QWx]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
intp_string_callback),
# special forms of fancy strings after operators or
# in method calls with braces
(r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
bygroups(Text, String.Other, None)),
# and because of fixed width lookbehinds the whole thing a
# second time for line startings...
(r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
bygroups(Text, String.Other, None)),
# all regular fancy strings without qsw
(r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
intp_string_callback),
]
return states
tokens = {
'root': [
(r'#.*?$', Comment.Single),
(r'=begin\s.*?\n=end.*?$', Comment.Multiline),
# keywords
(words((
'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?',
'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo',
'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef',
'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'),
Keyword),
# start of function, class and module names
(r'(module)(\s+)([a-zA-Z_]\w*'
r'(?:::[a-zA-Z_]\w*)*)',
bygroups(Keyword, Text, Name.Namespace)),
(r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
(r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
# special methods
(words((
'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader',
'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private',
'module_function', 'public', 'protected', 'true', 'false', 'nil'),
suffix=r'\b'),
Keyword.Pseudo),
(r'(not|and|or)\b', Operator.Word),
(words((
'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include',
'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil',
'private_method_defined', 'protected_method_defined',
'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'),
Name.Builtin),
(r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
(words((
'Array', 'Float', 'Integer', 'String', '__id__', '__send__', 'abort',
'ancestors', 'at_exit', 'autoload', 'binding', 'callcc', 'caller',
'catch', 'chomp', 'chop', 'class_eval', 'class_variables',
'clone', 'const_defined?', 'const_get', 'const_missing', 'const_set',
'constants', 'display', 'dup', 'eval', 'exec', 'exit', 'extend', 'fail', 'fork',
'format', 'freeze', 'getc', 'gets', 'global_variables', 'gsub',
'hash', 'id', 'included_modules', 'inspect', 'instance_eval',
'instance_method', 'instance_methods',
'instance_variable_get', 'instance_variable_set', 'instance_variables',
'lambda', 'load', 'local_variables', 'loop',
'method', 'method_missing', 'methods', 'module_eval', 'name',
'object_id', 'open', 'p', 'print', 'printf', 'private_class_method',
'private_instance_methods',
'private_methods', 'proc', 'protected_instance_methods',
'protected_methods', 'public_class_method',
'public_instance_methods', 'public_methods',
'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', 'require',
'scan', 'select', 'self', 'send', 'set_trace_func', 'singleton_methods', 'sleep',
'split', 'sprintf', 'srand', 'sub', 'syscall', 'system', 'taint',
'test', 'throw', 'to_a', 'to_s', 'trace_var', 'trap', 'untaint',
'untrace_var', 'warn'), prefix=r'(?<!\.)', suffix=r'\b'),
Name.Builtin),
(r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
# normal heredocs
(r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
heredoc_callback),
# empty string heredocs
(r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
(r'__END__', Comment.Preproc, 'end-part'),
# multiline regex (after keywords or assignments)
(r'(?:^|(?<=[=<>~!:])|'
r'(?<=(?:\s|;)when\s)|'
r'(?<=(?:\s|;)or\s)|'
r'(?<=(?:\s|;)and\s)|'
r'(?<=(?:\s|;|\.)index\s)|'
r'(?<=(?:\s|;|\.)scan\s)|'
r'(?<=(?:\s|;|\.)sub\s)|'
r'(?<=(?:\s|;|\.)sub!\s)|'
r'(?<=(?:\s|;|\.)gsub\s)|'
r'(?<=(?:\s|;|\.)gsub!\s)|'
r'(?<=(?:\s|;|\.)match\s)|'
r'(?<=(?:\s|;)if\s)|'
r'(?<=(?:\s|;)elsif\s)|'
r'(?<=^when\s)|'
r'(?<=^index\s)|'
r'(?<=^scan\s)|'
r'(?<=^sub\s)|'
r'(?<=^gsub\s)|'
r'(?<=^sub!\s)|'
r'(?<=^gsub!\s)|'
r'(?<=^match\s)|'
r'(?<=^if\s)|'
r'(?<=^elsif\s)'
r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
# multiline regex (in method calls or subscripts)
(r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
# multiline regex (this time the funny no whitespace rule)
(r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex),
'multiline-regex'),
# lex numbers and ignore following regular expressions which
# are division operators in fact (grrrr. i hate that. any
# better ideas?)
# since pygments 0.7 we also eat a "?" operator after numbers
# so that the char operator does not work. Chars are not allowed
# there so that you can use the ternary operator.
# stupid example:
# x>=0?n[x]:""
(r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
bygroups(Number.Oct, Text, Operator)),
(r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
bygroups(Number.Hex, Text, Operator)),
(r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
bygroups(Number.Bin, Text, Operator)),
(r'([\d]+(?:_\d+)*)(\s*)([/?])?',
bygroups(Number.Integer, Text, Operator)),
# Names
(r'@@[a-zA-Z_]\w*', Name.Variable.Class),
(r'@[a-zA-Z_]\w*', Name.Variable.Instance),
(r'\$\w+', Name.Variable.Global),
(r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
(r'\$-[0adFiIlpvw]', Name.Variable.Global),
(r'::', Operator),
include('strings'),
# chars
(r'\?(\\[MC]-)*' # modifiers
r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
r'(?!\w)',
String.Char),
(r'[A-Z]\w+', Name.Constant),
# this is needed because ruby attributes can look
# like keywords (class) or like this: ` ?!?
(words(RUBY_OPERATORS, prefix=r'(\.|::)'),
bygroups(Operator, Name.Operator)),
(r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
bygroups(Operator, Name)),
(r'[a-zA-Z_]\w*[!?]?', Name),
(r'(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|'
r'!~|&&?|\|\||\.{1,3})', Operator),
(r'[-+/*%=<>&!^|~]=?', Operator),
(r'[(){};,/?:\\]', Punctuation),
(r'\s+', Text)
],
'funcname': [
(r'\(', Punctuation, 'defexpr'),
(r'(?:([a-zA-Z_]\w*)(\.))?'
r'([a-zA-Z_]\w*[!?]?|\*\*?|[-+]@?|'
r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)',
bygroups(Name.Class, Operator, Name.Function), '#pop'),
default('#pop')
],
'classname': [
(r'\(', Punctuation, 'defexpr'),
(r'<<', Operator, '#pop'),
(r'[A-Z_]\w*', Name.Class, '#pop'),
default('#pop')
],
'defexpr': [
(r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
(r'\(', Operator, '#push'),
include('root')
],
'in-intp': [
(r'\{', String.Interpol, '#push'),
(r'\}', String.Interpol, '#pop'),
include('root'),
],
'string-intp': [
(r'#\{', String.Interpol, 'in-intp'),
(r'#@@?[a-zA-Z_]\w*', String.Interpol),
(r'#\$[a-zA-Z_]\w*', String.Interpol)
],
'string-intp-escaped': [
include('string-intp'),
(r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
String.Escape)
],
'interpolated-regex': [
include('string-intp'),
(r'[\\#]', String.Regex),
(r'[^\\#]+', String.Regex),
],
'interpolated-string': [
include('string-intp'),
(r'[\\#]', String.Other),
(r'[^\\#]+', String.Other),
],
'multiline-regex': [
include('string-intp'),
(r'\\\\', String.Regex),
(r'\\/', String.Regex),
(r'[\\#]', String.Regex),
(r'[^\\/#]+', String.Regex),
(r'/[mixounse]*', String.Regex, '#pop'),
],
'end-part': [
(r'.+', Comment.Preproc, '#pop')
]
}
tokens.update(gen_rubystrings_rules())
def analyse_text(text):
return shebang_matches(text, r'ruby(1\.\d)?')
class RubyConsoleLexer(Lexer):
"""
For Ruby interactive console (**irb**) output like:
.. sourcecode:: rbcon
irb(main):001:0> a = 1
=> 1
irb(main):002:0> puts a
1
=> nil
"""
name = 'Ruby irb session'
aliases = ['rbcon', 'irb']
mimetypes = ['text/x-ruby-shellsession']
_prompt_re = re.compile('irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
'|>> |\?> ')
def get_tokens_unprocessed(self, text):
rblexer = RubyLexer(**self.options)
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
m = self._prompt_re.match(line)
if m is not None:
end = m.end()
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:end])]))
curcode += line[end:]
else:
if curcode:
for item in do_insertions(
insertions, rblexer.get_tokens_unprocessed(curcode)):
yield item
curcode = ''
insertions = []
yield match.start(), Generic.Output, line
if curcode:
for item in do_insertions(
insertions, rblexer.get_tokens_unprocessed(curcode)):
yield item
class FancyLexer(RegexLexer):
"""
Pygments Lexer For `Fancy <http://www.fancy-lang.org/>`_.
Fancy is a self-hosted, pure object-oriented, dynamic,
class-based, concurrent general-purpose programming language
running on Rubinius, the Ruby VM.
.. versionadded:: 1.5
"""
name = 'Fancy'
filenames = ['*.fy', '*.fancypack']
aliases = ['fancy', 'fy']
mimetypes = ['text/x-fancysrc']
tokens = {
# copied from PerlLexer:
'balanced-regex': [
(r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'),
(r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
(r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
(r'\{(\\\\|\\\}|[^}])*\}[egimosx]*', String.Regex, '#pop'),
(r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'),
(r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'),
(r'\((\\\\|\\\)|[^)])*\)[egimosx]*', String.Regex, '#pop'),
(r'@(\\\\|\\@|[^@])*@[egimosx]*', String.Regex, '#pop'),
(r'%(\\\\|\\%|[^%])*%[egimosx]*', String.Regex, '#pop'),
(r'\$(\\\\|\\\$|[^$])*\$[egimosx]*', String.Regex, '#pop'),
],
'root': [
(r'\s+', Text),
# balanced delimiters (copied from PerlLexer):
(r's\{(\\\\|\\\}|[^}])*\}\s*', String.Regex, 'balanced-regex'),
(r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'),
(r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'),
(r's\((\\\\|\\\)|[^)])*\)\s*', String.Regex, 'balanced-regex'),
(r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex),
(r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
# Comments
(r'#(.*?)\n', Comment.Single),
# Symbols
(r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol),
# Multi-line DoubleQuotedString
(r'"""(\\\\|\\"|[^"])*"""', String),
# DoubleQuotedString
(r'"(\\\\|\\"|[^"])*"', String),
# keywords
(r'(def|class|try|catch|finally|retry|return|return_local|match|'
r'case|->|=>)\b', Keyword),
# constants
(r'(self|super|nil|false|true)\b', Name.Constant),
(r'[(){};,/?|:\\]', Punctuation),
# names
(words((
'Object', 'Array', 'Hash', 'Directory', 'File', 'Class', 'String',
'Number', 'Enumerable', 'FancyEnumerable', 'Block', 'TrueClass',
'NilClass', 'FalseClass', 'Tuple', 'Symbol', 'Stack', 'Set',
'FancySpec', 'Method', 'Package', 'Range'), suffix=r'\b'),
Name.Builtin),
# functions
(r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
# operators, must be below functions
(r'[-+*/~,<>=&!?%^\[\].$]+', Operator),
('[A-Z]\w*', Name.Constant),
('@[a-zA-Z_]\w*', Name.Variable.Instance),
('@@[a-zA-Z_]\w*', Name.Variable.Class),
('@@?', Operator),
('[a-zA-Z_]\w*', Name),
# numbers - / checks are necessary to avoid mismarking regexes,
# see comment in RubyLexer
(r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
bygroups(Number.Oct, Text, Operator)),
(r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
bygroups(Number.Hex, Text, Operator)),
(r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
bygroups(Number.Bin, Text, Operator)),
(r'([\d]+(?:_\d+)*)(\s*)([/?])?',
bygroups(Number.Integer, Text, Operator)),
(r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
(r'\d+', Number.Integer)
]
}
|
KodiColdkeys/coldkeys-addons | refs/heads/master | repository/plugin.video.white.devil/resources/lib/sources/moviexk_mv_tv.py | 6 | # -*- coding: utf-8 -*-
'''
Exodus Add-on
Copyright (C) 2016 Exodus
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,json
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import directstream
class source:
def __init__(self):
self.language = ['en']
self.domains = ['moviexk.com']
self.base_link = 'http://moviexk.com'
self.search_link = 'aHR0cHM6Ly93d3cuZ29vZ2xlYXBpcy5jb20vY3VzdG9tc2VhcmNoL3YxZWxlbWVudD9rZXk9QUl6YVN5Q1ZBWGlVelJZc01MMVB2NlJ3U0cxZ3VubU1pa1R6UXFZJnJzej1maWx0ZXJlZF9jc2UmbnVtPTEwJmhsPWVuJmN4PTAxMzQ0NjM1MTYzMDQ5MzU5NTE5Nzprc2NlY2tjdXZxcyZnb29nbGVob3N0PXd3dy5nb29nbGUuY29tJnE9JXM='
def movie(self, imdb, title, year):
try:
url = '%s/%s-%s/' % (self.base_link, cleantitle.geturl(title), year)
url = client.request(url, output='geturl')
if url == None: raise Exception()
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
pass
try:
t = cleantitle.get(title)
q = '%s %s' % (title, year)
q = self.search_link.decode('base64') % urllib.quote_plus(q)
r = client.request(q, error=True)
r = json.loads(r)['results']
r = [(i['url'], i['titleNoFormatting']) for i in r]
r = [(i[0], re.findall('(?:^Watch Movie |^Watch |)(.+?)\((\d{4})', i[1])) for i in r]
r = [(i[0], i[1][0][0], i[1][0][1]) for i in r if i[1]]
r = [(urllib.unquote_plus(i[0]), i[1], i[2]) for i in r]
r = [(urlparse.urlparse(i[0]).path, i[1], i[2]) for i in r]
r = [i for i in r if t == cleantitle.get(i[1]) and year == i[2]]
r = re.sub('/watch-movie-|-\d+$', '/', r[0][0].strip())
url = re.findall('(?://.+?|)(/.+)', r)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
pass
def tvshow(self, imdb, tvdb, tvshowtitle, year):
try:
t = cleantitle.get(tvshowtitle)
q = '%s %s' % (tvshowtitle, year)
q = self.search_link.decode('base64') % urllib.quote_plus(q)
r = client.request(q)
r = json.loads(r)['results']
r = [(i['url'], i['titleNoFormatting']) for i in r]
r = [(i[0], re.findall('(?:^Watch Movie |^Watch |)(.+?)$', i[1])) for i in r]
r = [(i[0], i[1][0].rsplit('TV Series')[0].strip('(')) for i in r if i[1]]
r = [(urllib.unquote_plus(i[0]), i[1]) for i in r]
r = [(urlparse.urlparse(i[0]).path, i[1]) for i in r]
r = [i for i in r if t == cleantitle.get(i[1])]
r = urlparse.urljoin(self.base_link, r[0][0].strip())
if '/watch-movie-' in r: r = re.sub('/watch-movie-|-\d+$', '/', r)
y = re.findall('(\d{4})', r)
if y:
y = y[0]
else:
y = client.request(r)
y = re.findall('(?:D|d)ate\s*:\s*(\d{4})', y)[0]
if not year == y: raise Exception()
url = re.findall('(?://.+?|)(/.+)', r)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = '%s?season=%01d&episode=%01d' % (url, int(season), int(episode))
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
f = urlparse.urljoin(self.base_link, url)
url = f.rsplit('?', 1)[0]
r = client.request(url, mobile=True)
r = client.parseDOM(r, 'div', attrs = {'id': 'servers'})
r = client.parseDOM(r, 'li')
r = zip(client.parseDOM(r, 'a', ret='href'), client.parseDOM(r, 'a', ret='title'))
try:
s = urlparse.parse_qs(urlparse.urlparse(f).query)['season'][0]
e = urlparse.parse_qs(urlparse.urlparse(f).query)['episode'][0]
r = [(i[0], re.findall('(\d+)', i[1])) for i in r]
r = [(i[0], '%01d' % int(i[1][0]), '%01d' % int(i[1][1])) for i in r if len(i[1]) > 1]
r = [i[0] for i in r if s == i[1] and e == i[2]]
except:
r = [i[0] for i in r]
for u in r:
try:
url = client.request(u, mobile=True)
url = client.parseDOM(url, 'source', ret='src')
url = [i.strip().split()[0] for i in url]
for i in url:
try: sources.append({'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'], 'provider': 'Moviexk', 'url': i, 'direct': True, 'debridonly': False})
except: pass
except:
pass
return sources
except:
return sources
def resolve(self, url):
return directstream.googlepass(url)
|
ariegg/webiopi-drivers | refs/heads/master | chips/clock/dsrtc/dsrtc.py | 1 | # Copyright 2014-2016 Andreas Riegg - t-h-i-n-x.net
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Changelog
#
# 1.0 2014-06-26 Initial release.
# 1.1 2014-08-17 Updated to match v1.1 of Clock implementation
# 1.2 2014-11-25 Added devices
# 1.3 2016-08-26 Added @api annotations and bus selection.
#
# Config parameters
#
# - control 8 bit Value of the control register
# - bus String Name ot the I2C bus
#
# Usage remarks
#
# - All chips have a fixed slave address of 0x68
# - Driver uses sequential register access to speed up performance
# - Temperature reading from DS3231 is currently not supported
# - Setting alarms for DS1337 and DS3231 is currently not supported
# - tbd: DS1339, DS1340, DS1388
#
from webiopi.utils.types import toint
from webiopi.devices.i2c import I2C
from webiopi.devices.clock import Clock
from webiopi.devices.memory import Memory
from webiopi.decorators.rest import request, response, api
from datetime import datetime, date, time
class DSclock(I2C, Clock):
#---------- Constants and definitions ----------
# Common I2C registers for all DSxxxx clock chips
SEC = 0x00 # Seconds register coded as 2-digit BCD
MIN = 0x01 # Minutes register coded as 2-digit BCD
HRS = 0x02 # Hours register coded as 2-digit BCD
DOW = 0x03 # Day of week register coded as 1-digit BCD
DAY = 0x04 # Day of month register coded as 2-digit BCD
MON = 0x05 # Months register coded as 2-digit BCD
YRS = 0x06 # Years register coded as 2-digit BCD
# Bit masks for common registers for all DSxxxx clock chips
SEC_MASK = 0b01111111
MIN_MASK = 0b01111111
HRS_MASK = 0b00111111
DOW_MASK = 0b00000111
DAY_MASK = 0b00111111
MON_MASK = 0b00011111
YRS_MASK = 0b11111111
#---------- Class initialization ----------
def __init__(self, control, name, bus):
I2C.__init__(self, 0x68, bus)
Clock.__init__(self)
self.name = name
if control != None:
con = toint(control)
if not con in range(0, 0xFF + 1):
raise ValueError("control value [%d] out of range [%d..%d]" % (con, 0x00, 0xFF))
self.__setCon__(con)
else:
self.__setCon__(self.CON_DEFAULT)
#---------- Abstraction framework contracts ----------
def __str__(self):
return "%s(slave=0x%02X, dev=%s)" % (self.name, self.slave, self.device())
#---------- Clock abstraction related methods ----------
def __getSec__(self):
data = self.readRegister(self.SEC)
return self.BcdBits2Int(data & self.SEC_MASK)
def __getMin__(self):
data = self.readRegister(self.MIN)
return self.BcdBits2Int(data & self.MIN_MASK)
def __getHrs__(self):
data = self.readRegister(self.HRS)
return self.BcdBits2Int(data & self.HRS_MASK)
def __getDay__(self):
data = self.readRegister(self.DAY)
return self.BcdBits2Int(data & self.DAY_MASK)
def __getMon__(self):
data = self.readRegister(self.MON)
return self.BcdBits2Int(data & self.MON_MASK)
def __getYrs__(self):
data = self.readRegister(self.YRS)
return 2000 + self.BcdBits2Int(data & self.YRS_MASK)
def __getDow__(self):
data = self.readRegister(self.DOW)
return self.BcdBits2Int(data & self.DOW_MASK)
def __setDow__(self, value):
self.writeRegister(self.DOW, self.Int2BcdBits(value & self.DOW_MASK))
#---------- Clock default re-implementations ----------
# Speed up performance by sequential register access
def __getDateTime__(self):
data = self.readRegisters(self.SEC, 7)
second = self.BcdBits2Int(data[0] & self.SEC_MASK)
minute = self.BcdBits2Int(data[1] & self.MIN_MASK)
hour = self.BcdBits2Int(data[2] & self.HRS_MASK)
day = self.BcdBits2Int(data[4] & self.DAY_MASK)
month = self.BcdBits2Int(data[5] & self.MON_MASK)
year = self.BcdBits2Int(data[6] & self.YRS_MASK) + 2000
return datetime(year, month, day, hour, minute, second)
def __setDateTime__(self, aDatetime):
self.checkYear(aDatetime.year)
dow = self.__getDow__() # preserve current dow value
data = bytearray(7)
data[0] = self.Int2BcdBits(aDatetime.second & self.SEC_MASK)
data[1] = self.Int2BcdBits(aDatetime.minute & self.MIN_MASK)
data[2] = self.Int2BcdBits(aDatetime.hour & self.HRS_MASK)
data[3] = self.Int2BcdBits(dow & self.DOW_MASK)
data[4] = self.Int2BcdBits(aDatetime.day & self.DAY_MASK)
data[5] = self.Int2BcdBits(aDatetime.month & self.MON_MASK)
data[6] = self.Int2BcdBits((aDatetime.year - 2000) & self.YRS_MASK)
self.writeRegisters(self.SEC, data)
def __getDate__(self):
data = self.readRegisters(self.DAY, 3)
day = self.BcdBits2Int(data[0] & self.DAY_MASK)
month = self.BcdBits2Int(data[1] & self.MON_MASK)
year = self.BcdBits2Int(data[2] & self.YRS_MASK) + 2000
return date(year, month, day)
def __setDate__(self, aDate):
self.checkYear(aDate.year)
data = bytearray(3)
data[0] = self.Int2BcdBits(aDate.day & self.DAY_MASK)
data[1] = self.Int2BcdBits(aDate.month & self.MON_MASK)
data[2] = self.Int2BcdBits((aDate.year - 2000) & self.YRS_MASK)
self.writeRegisters(self.DAY, data)
def __getTime__(self):
data = self.readRegisters(self.SEC, 3)
second = self.BcdBits2Int(data[0] & self.SEC_MASK)
minute = self.BcdBits2Int(data[1] & self.MIN_MASK)
hour = self.BcdBits2Int(data[2] & self.HRS_MASK)
return time(hour, minute, second)
def __setTime__(self, aTime):
data = bytearray(3)
data[0] = self.Int2BcdBits(aTime.second & self.SEC_MASK)
data[1] = self.Int2BcdBits(aTime.minute & self.MIN_MASK)
data[2] = self.Int2BcdBits(aTime.hour & self.HRS_MASK)
self.writeRegisters(self.SEC, data)
#---------- Local helpers ----------
def __getCon__(self):
data = self.readRegister(self.CON)
return (data & self.CON_MASK)
def __setCon__(self, value):
self.writeRegister(self.CON, (value & self.CON_MASK))
class DS1307(DSclock, Memory):
CON = 0x07 # Control register address
RAM = 0x08 # SRAM register address
CON_MASK = 0b10010011 # Control register mask
CON_DEFAULT = 0b10000011 # Control register default value
CH = 0b10000000 # Clock halt bit value/mask
#---------- Class initialization ----------
def __init__(self, control=None, name="DS1307", bus=None):
DSclock.__init__(self, control, name, bus)
Memory.__init__(self, 56)
# Clock is stopped by default upon poweron, so start it
self.start()
#---------- Abstraction framework contracts ----------
def __family__(self):
return [Clock.__family__(self), Memory.__family__(self)]
#---------- Clock abstraction related methods ----------
def __setSec__(self, value):
# Keep CH bit unchanged
secValue = self.Int2BcdBits(value)
secRegisterData = self.readRegister(self.SEC)
self.writeRegister(self.SEC, (secRegisterData & ~self.SEC_MASK) | (secValue & self.SEC_MASK))
#---------- Memory abstraction related methods ----------
def __readMemoryByte__(self, address):
return self.readRegister(self.RAM + address)
def __writeMemoryByte__(self, address, value):
self.writeRegister(self.RAM + address, value)
def __readMemoryWord__(self, address):
data = self.readRegisters(self.RAM + address * 2, 2)
return (data[0] << 8) + data[1]
def __writeMemoryWord__(self, address, value):
data = bytearray(2)
data[0] = (value >> 8) & 0xFF
data[1] = value & 0xFF
self.writeRegisters(self.RAM + address * 2, data)
def __readMemoryLong__(self, address):
data = self.readRegisters(self.RAM + address * 4, 4)
return (data[0] << 24) + (data[1] << 16) + (data[2] << 8) + data[3]
def __writeMemoryLong__(self, address, value):
data = bytearray(4)
data[0] = (value >> 24) & 0xFF
data[1] = (value >> 16) & 0xFF
data[2] = (value >> 8) & 0xFF
data[3] = value & 0xFF
self.writeRegisters(self.RAM + address * 4, data)
#---------- Device features ----------
@api("Device", 3, "feature", "driver")
@request("POST", "run/start")
def start(self):
# Clear CH bit, keep sec value unchanged
secRegisterData = self.readRegister(self.SEC)
self.writeRegister(self.SEC, (secRegisterData & ~self.CH))
@api("Device", 3, "feature", "driver")
@request("POST", "run/stop")
def stop(self):
# Set CH bit, keep sec value unchanged
secRegisterData = self.readRegister(self.SEC)
self.writeRegister(self.SEC, (secRegisterData | self.CH))
class DS1338(DS1307):
CON_MASK = 0b10110011 # Control register mask
CON_DEFAULT = 0b10110011 # Control register default value
#---------- Class initialization ----------
def __init__(self, control=None, bus=None):
DS1307.__init__(self, control, "DS1338", bus)
class DS1337(DSclock):
CON = 0x0E # Control register address
CON_MASK = 0b10011111 # Control register mask
CON_DEFAULT = 0b00011000 # Control register default value
EOSC_ = 0b10000000 # Enable oscillator active low bit value/mask
#---------- Class initialization ----------
def __init__(self, control=None, bus=None):
DSclock.__init__(self, control, "DS1337", bus)
#---------- Device features ----------
@api("Device", 3, "feature", "driver")
@request("POST", "run/start")
def start(self):
# Clear EOSC_ bit
conRegisterData = self.__getCon__()
self.__setCon__(conRegisterData & ~self.EOSC_)
@api("Device", 3, "feature", "driver")
@request("POST", "run/stop")
def stop(self):
# Set EOSC_ bit
conRegisterData = self.__getCon__()
self.__setCon__(conRegisterData | self.EOSC_)
class DS3231(DSclock):
CON = 0x0E # Control register address
CON_MASK = 0b11011111 # Control register mask
CON_DEFAULT = 0b00011100 # Control register default value
#---------- Class initialization ----------
def __init__(self, control=None, bus=None):
DSclock.__init__(self, control, "DS3231", bus)
|
davgibbs/django | refs/heads/master | django/contrib/sessions/management/commands/clearsessions.py | 729 | from importlib import import_module
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = (
"Can be run as a cronjob or directly to clean out expired sessions "
"(only with the database backend at the moment)."
)
def handle(self, **options):
engine = import_module(settings.SESSION_ENGINE)
try:
engine.SessionStore.clear_expired()
except NotImplementedError:
self.stderr.write("Session engine '%s' doesn't support clearing "
"expired sessions.\n" % settings.SESSION_ENGINE)
|
shermanng10/superathletebuilder | refs/heads/master | env/lib/python2.7/site-packages/psycopg2/tz.py | 72 | """tzinfo implementations for psycopg2
This module holds two different tzinfo implementations that can be used as
the 'tzinfo' argument to datetime constructors, directly passed to psycopg
functions or used to set the .tzinfo_factory attribute in cursors.
"""
# psycopg/tz.py - tzinfo implementation
#
# Copyright (C) 2003-2010 Federico Di Gregorio <fog@debian.org>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# In addition, as a special exception, the copyright holders give
# permission to link this program with the OpenSSL library (or with
# modified versions of OpenSSL that use the same license as OpenSSL),
# and distribute linked combinations including the two.
#
# You must obey the GNU Lesser General Public License in all respects for
# all of the code used other than OpenSSL.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import datetime
import time
ZERO = datetime.timedelta(0)
class FixedOffsetTimezone(datetime.tzinfo):
"""Fixed offset in minutes east from UTC.
This is exactly the implementation__ found in Python 2.3.x documentation,
with a small change to the `!__init__()` method to allow for pickling
and a default name in the form ``sHH:MM`` (``s`` is the sign.).
The implementation also caches instances. During creation, if a
FixedOffsetTimezone instance has previously been created with the same
offset and name that instance will be returned. This saves memory and
improves comparability.
.. __: http://docs.python.org/library/datetime.html#datetime-tzinfo
"""
_name = None
_offset = ZERO
_cache = {}
def __init__(self, offset=None, name=None):
if offset is not None:
self._offset = datetime.timedelta(minutes = offset)
if name is not None:
self._name = name
def __new__(cls, offset=None, name=None):
"""Return a suitable instance created earlier if it exists
"""
key = (offset, name)
try:
return cls._cache[key]
except KeyError:
tz = super(FixedOffsetTimezone, cls).__new__(cls, offset, name)
cls._cache[key] = tz
return tz
def __repr__(self):
offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
return "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=%r)" \
% (offset_mins, self._name)
def __getinitargs__(self):
offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
return (offset_mins, self._name)
def utcoffset(self, dt):
return self._offset
def tzname(self, dt):
if self._name is not None:
return self._name
else:
seconds = self._offset.seconds + self._offset.days * 86400
hours, seconds = divmod(seconds, 3600)
minutes = seconds/60
if minutes:
return "%+03d:%d" % (hours, minutes)
else:
return "%+03d" % hours
def dst(self, dt):
return ZERO
STDOFFSET = datetime.timedelta(seconds = -time.timezone)
if time.daylight:
DSTOFFSET = datetime.timedelta(seconds = -time.altzone)
else:
DSTOFFSET = STDOFFSET
DSTDIFF = DSTOFFSET - STDOFFSET
class LocalTimezone(datetime.tzinfo):
"""Platform idea of local timezone.
This is the exact implementation from the Python 2.3 documentation.
"""
def utcoffset(self, dt):
if self._isdst(dt):
return DSTOFFSET
else:
return STDOFFSET
def dst(self, dt):
if self._isdst(dt):
return DSTDIFF
else:
return ZERO
def tzname(self, dt):
return time.tzname[self._isdst(dt)]
def _isdst(self, dt):
tt = (dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.weekday(), 0, -1)
stamp = time.mktime(tt)
tt = time.localtime(stamp)
return tt.tm_isdst > 0
LOCAL = LocalTimezone()
# TODO: pre-generate some interesting time zones?
|
dhwright25/npyscreen | refs/heads/master | TESTUNICODE.py | 9 | #!/usr/bin/env python
# encoding: utf-8
import npyscreen
#npyscreen.disableColor()
class TestApp(npyscreen.NPSApp):
def main(self):
# These lines create the form and populate it with widgets.
# A fairly complex screen in only 8 or so lines of code - a line for each control.
F = npyscreen.Form(name = "Welcome to Npyscreen",)
t = F.add(npyscreen.TextfieldUnicode, name = "Text:", value='Ω—Ƣ' )
t.left_margin=10
a = F.add(npyscreen.Textfield, name = "Text:", value='This is the original textfield' )
# This lets the user play with the Form.
F.edit()
if __name__ == "__main__":
App = TestApp()
App.run()
|
pbrod/scipy | refs/heads/master | scipy/optimize/_lsq/common.py | 50 | """Functions used by least-squares algorithms."""
from __future__ import division, print_function, absolute_import
from math import copysign
import numpy as np
from numpy.linalg import norm
from scipy.linalg import cho_factor, cho_solve, LinAlgError
from scipy.sparse import issparse
from scipy.sparse.linalg import LinearOperator, aslinearoperator
EPS = np.finfo(float).eps
# Functions related to a trust-region problem.
def intersect_trust_region(x, s, Delta):
"""Find the intersection of a line with the boundary of a trust region.
This function solves the quadratic equation with respect to t
||(x + s*t)||**2 = Delta**2.
Returns
-------
t_neg, t_pos : tuple of float
Negative and positive roots.
Raises
------
ValueError
If `s` is zero or `x` is not within the trust region.
"""
a = np.dot(s, s)
if a == 0:
raise ValueError("`s` is zero.")
b = np.dot(x, s)
c = np.dot(x, x) - Delta**2
if c > 0:
raise ValueError("`x` is not within the trust region.")
d = np.sqrt(b*b - a*c) # Root from one fourth of the discriminant.
# Computations below avoid loss of significance, see "Numerical Recipes".
q = -(b + copysign(d, b))
t1 = q / a
t2 = c / q
if t1 < t2:
return t1, t2
else:
return t2, t1
def solve_lsq_trust_region(n, m, uf, s, V, Delta, initial_alpha=None,
rtol=0.01, max_iter=10):
"""Solve a trust-region problem arising in least-squares minimization.
This function implements a method described by J. J. More [1]_ and used
in MINPACK, but it relies on a single SVD of Jacobian instead of series
of Cholesky decompositions. Before running this function, compute:
``U, s, VT = svd(J, full_matrices=False)``.
Parameters
----------
n : int
Number of variables.
m : int
Number of residuals.
uf : ndarray
Computed as U.T.dot(f).
s : ndarray
Singular values of J.
V : ndarray
Transpose of VT.
Delta : float
Radius of a trust region.
initial_alpha : float, optional
Initial guess for alpha, which might be available from a previous
iteration. If None, determined automatically.
rtol : float, optional
Stopping tolerance for the root-finding procedure. Namely, the
solution ``p`` will satisfy ``abs(norm(p) - Delta) < rtol * Delta``.
max_iter : int, optional
Maximum allowed number of iterations for the root-finding procedure.
Returns
-------
p : ndarray, shape (n,)
Found solution of a trust-region problem.
alpha : float
Positive value such that (J.T*J + alpha*I)*p = -J.T*f.
Sometimes called Levenberg-Marquardt parameter.
n_iter : int
Number of iterations made by root-finding procedure. Zero means
that Gauss-Newton step was selected as the solution.
References
----------
.. [1] More, J. J., "The Levenberg-Marquardt Algorithm: Implementation
and Theory," Numerical Analysis, ed. G. A. Watson, Lecture Notes
in Mathematics 630, Springer Verlag, pp. 105-116, 1977.
"""
def phi_and_derivative(alpha, suf, s, Delta):
"""Function of which to find zero.
It is defined as "norm of regularized (by alpha) least-squares
solution minus `Delta`". Refer to [1]_.
"""
denom = s**2 + alpha
p_norm = norm(suf / denom)
phi = p_norm - Delta
phi_prime = -np.sum(suf ** 2 / denom**3) / p_norm
return phi, phi_prime
suf = s * uf
# Check if J has full rank and try Gauss-Newton step.
if m >= n:
threshold = EPS * m * s[0]
full_rank = s[-1] > threshold
else:
full_rank = False
if full_rank:
p = -V.dot(uf / s)
if norm(p) <= Delta:
return p, 0.0, 0
alpha_upper = norm(suf) / Delta
if full_rank:
phi, phi_prime = phi_and_derivative(0.0, suf, s, Delta)
alpha_lower = -phi / phi_prime
else:
alpha_lower = 0.0
if initial_alpha is None or not full_rank and initial_alpha == 0:
alpha = max(0.001 * alpha_upper, (alpha_lower * alpha_upper)**0.5)
else:
alpha = initial_alpha
for it in range(max_iter):
if alpha < alpha_lower or alpha > alpha_upper:
alpha = max(0.001 * alpha_upper, (alpha_lower * alpha_upper)**0.5)
phi, phi_prime = phi_and_derivative(alpha, suf, s, Delta)
if phi < 0:
alpha_upper = alpha
ratio = phi / phi_prime
alpha_lower = max(alpha_lower, alpha - ratio)
alpha -= (phi + Delta) * ratio / Delta
if np.abs(phi) < rtol * Delta:
break
p = -V.dot(suf / (s**2 + alpha))
# Make the norm of p equal to Delta, p is changed only slightly during
# this. It is done to prevent p lie outside the trust region (which can
# cause problems later).
p *= Delta / norm(p)
return p, alpha, it + 1
def solve_trust_region_2d(B, g, Delta):
"""Solve a general trust-region problem in 2 dimensions.
The problem is reformulated as a 4-th order algebraic equation,
the solution of which is found by numpy.roots.
Parameters
----------
B : ndarray, shape (2, 2)
Symmetric matrix, defines a quadratic term of the function.
g : ndarray, shape (2,)
Defines a linear term of the function.
Delta : float
Radius of a trust region.
Returns
-------
p : ndarray, shape (2,)
Found solution.
newton_step : bool
Whether the returned solution is the Newton step which lies within
the trust region.
"""
try:
R, lower = cho_factor(B)
p = -cho_solve((R, lower), g)
if np.dot(p, p) <= Delta**2:
return p, True
except LinAlgError:
pass
a = B[0, 0] * Delta**2
b = B[0, 1] * Delta**2
c = B[1, 1] * Delta**2
d = g[0] * Delta
f = g[1] * Delta
coeffs = np.array(
[-b + d, 2 * (a - c + f), 6 * b, 2 * (-a + c + f), -b - d])
t = np.roots(coeffs) # Can handle leading zeros.
t = np.real(t[np.isreal(t)])
p = Delta * np.vstack((2 * t / (1 + t**2), (1 - t**2) / (1 + t**2)))
value = 0.5 * np.sum(p * B.dot(p), axis=0) + np.dot(g, p)
i = np.argmin(value)
p = p[:, i]
return p, False
def update_tr_radius(Delta, actual_reduction, predicted_reduction,
step_norm, bound_hit):
"""Update the radius of a trust region based on the cost reduction.
Returns
-------
Delta : float
New radius.
ratio : float
Ratio between actual and predicted reductions. Zero if predicted
reduction is zero.
"""
if predicted_reduction > 0:
ratio = actual_reduction / predicted_reduction
else:
ratio = 0
if ratio < 0.25:
Delta = 0.25 * step_norm
elif ratio > 0.75 and bound_hit:
Delta *= 2.0
return Delta, ratio
# Construction and minimization of quadratic functions.
def build_quadratic_1d(J, g, s, diag=None, s0=None):
"""Parameterize a multivariate quadratic function along a line.
The resulting univariate quadratic function is given as follows:
::
f(t) = 0.5 * (s0 + s*t).T * (J.T*J + diag) * (s0 + s*t) +
g.T * (s0 + s*t)
Parameters
----------
J : ndarray, sparse matrix or LinearOperator shape (m, n)
Jacobian matrix, affects the quadratic term.
g : ndarray, shape (n,)
Gradient, defines the linear term.
s : ndarray, shape (n,)
Direction vector of a line.
diag : None or ndarray with shape (n,), optional
Addition diagonal part, affects the quadratic term.
If None, assumed to be 0.
s0 : None or ndarray with shape (n,), optional
Initial point. If None, assumed to be 0.
Returns
-------
a : float
Coefficient for t**2.
b : float
Coefficient for t.
c : float
Free term. Returned only if `s0` is provided.
"""
v = J.dot(s)
a = np.dot(v, v)
if diag is not None:
a += np.dot(s * diag, s)
a *= 0.5
b = np.dot(g, s)
if s0 is not None:
u = J.dot(s0)
b += np.dot(u, v)
c = 0.5 * np.dot(u, u) + np.dot(g, s0)
if diag is not None:
b += np.dot(s0 * diag, s)
c += 0.5 * np.dot(s0 * diag, s0)
return a, b, c
else:
return a, b
def minimize_quadratic_1d(a, b, lb, ub, c=0):
"""Minimize a 1-d quadratic function subject to bounds.
The free term `c` is 0 by default. Bounds must be finite.
Returns
-------
t : float
Minimum point.
y : float
Minimum value.
"""
t = [lb, ub]
if a != 0:
extremum = -0.5 * b / a
if lb < extremum < ub:
t.append(extremum)
t = np.asarray(t)
y = a * t**2 + b * t + c
min_index = np.argmin(y)
return t[min_index], y[min_index]
def evaluate_quadratic(J, g, s, diag=None):
"""Compute values of a quadratic function arising in least squares.
The function is 0.5 * s.T * (J.T * J + diag) * s + g.T * s.
Parameters
----------
J : ndarray, sparse matrix or LinearOperator, shape (m, n)
Jacobian matrix, affects the quadratic term.
g : ndarray, shape (n,)
Gradient, defines the linear term.
s : ndarray, shape (k, n) or (n,)
Array containing steps as rows.
diag : ndarray, shape (n,), optional
Addition diagonal part, affects the quadratic term.
If None, assumed to be 0.
Returns
-------
values : ndarray with shape (k,) or float
Values of the function. If `s` was 2-dimensional then ndarray is
returned, otherwise float is returned.
"""
if s.ndim == 1:
Js = J.dot(s)
q = np.dot(Js, Js)
if diag is not None:
q += np.dot(s * diag, s)
else:
Js = J.dot(s.T)
q = np.sum(Js**2, axis=0)
if diag is not None:
q += np.sum(diag * s**2, axis=1)
l = np.dot(s, g)
return 0.5 * q + l
# Utility functions to work with bound constraints.
def in_bounds(x, lb, ub):
"""Check if a point lies within bounds."""
return np.all((x >= lb) & (x <= ub))
def step_size_to_bound(x, s, lb, ub):
"""Compute a min_step size required to reach a bound.
The function computes a positive scalar t, such that x + s * t is on
the bound.
Returns
-------
step : float
Computed step. Non-negative value.
hits : ndarray of int with shape of x
Each element indicates whether a corresponding variable reaches the
bound:
* 0 - the bound was not hit.
* -1 - the lower bound was hit.
* 1 - the upper bound was hit.
"""
non_zero = np.nonzero(s)
s_non_zero = s[non_zero]
steps = np.empty_like(x)
steps.fill(np.inf)
with np.errstate(over='ignore'):
steps[non_zero] = np.maximum((lb - x)[non_zero] / s_non_zero,
(ub - x)[non_zero] / s_non_zero)
min_step = np.min(steps)
return min_step, np.equal(steps, min_step) * np.sign(s).astype(int)
def find_active_constraints(x, lb, ub, rtol=1e-10):
"""Determine which constraints are active in a given point.
The threshold is computed using `rtol` and the absolute value of the
closest bound.
Returns
-------
active : ndarray of int with shape of x
Each component shows whether the corresponding constraint is active:
* 0 - a constraint is not active.
* -1 - a lower bound is active.
* 1 - a upper bound is active.
"""
active = np.zeros_like(x, dtype=int)
if rtol == 0:
active[x <= lb] = -1
active[x >= ub] = 1
return active
lower_dist = x - lb
upper_dist = ub - x
lower_threshold = rtol * np.maximum(1, np.abs(lb))
upper_threshold = rtol * np.maximum(1, np.abs(ub))
lower_active = (np.isfinite(lb) &
(lower_dist <= np.minimum(upper_dist, lower_threshold)))
active[lower_active] = -1
upper_active = (np.isfinite(ub) &
(upper_dist <= np.minimum(lower_dist, upper_threshold)))
active[upper_active] = 1
return active
def make_strictly_feasible(x, lb, ub, rstep=1e-10):
"""Shift a point to the interior of a feasible region.
Each element of the returned vector is at least at a relative distance
`rstep` from the closest bound. If ``rstep=0`` then `np.nextafter` is used.
"""
x_new = x.copy()
active = find_active_constraints(x, lb, ub, rstep)
lower_mask = np.equal(active, -1)
upper_mask = np.equal(active, 1)
if rstep == 0:
x_new[lower_mask] = np.nextafter(lb[lower_mask], ub[lower_mask])
x_new[upper_mask] = np.nextafter(ub[upper_mask], lb[upper_mask])
else:
x_new[lower_mask] = (lb[lower_mask] +
rstep * np.maximum(1, np.abs(lb[lower_mask])))
x_new[upper_mask] = (ub[upper_mask] -
rstep * np.maximum(1, np.abs(ub[upper_mask])))
tight_bounds = (x_new < lb) | (x_new > ub)
x_new[tight_bounds] = 0.5 * (lb[tight_bounds] + ub[tight_bounds])
return x_new
def CL_scaling_vector(x, g, lb, ub):
"""Compute Coleman-Li scaling vector and its derivatives.
Components of a vector v are defined as follows:
::
| ub[i] - x[i], if g[i] < 0 and ub[i] < np.inf
v[i] = | x[i] - lb[i], if g[i] > 0 and lb[i] > -np.inf
| 1, otherwise
According to this definition v[i] >= 0 for all i. It differs from the
definition in paper [1]_ (eq. (2.2)), where the absolute value of v is
used. Both definitions are equivalent down the line.
Derivatives of v with respect to x take value 1, -1 or 0 depending on a
case.
Returns
-------
v : ndarray with shape of x
Scaling vector.
dv : ndarray with shape of x
Derivatives of v[i] with respect to x[i], diagonal elements of v's
Jacobian.
References
----------
.. [1] M.A. Branch, T.F. Coleman, and Y. Li, "A Subspace, Interior,
and Conjugate Gradient Method for Large-Scale Bound-Constrained
Minimization Problems," SIAM Journal on Scientific Computing,
Vol. 21, Number 1, pp 1-23, 1999.
"""
v = np.ones_like(x)
dv = np.zeros_like(x)
mask = (g < 0) & np.isfinite(ub)
v[mask] = ub[mask] - x[mask]
dv[mask] = -1
mask = (g > 0) & np.isfinite(lb)
v[mask] = x[mask] - lb[mask]
dv[mask] = 1
return v, dv
def reflective_transformation(y, lb, ub):
"""Compute reflective transformation and its gradient."""
if in_bounds(y, lb, ub):
return y, np.ones_like(y)
lb_finite = np.isfinite(lb)
ub_finite = np.isfinite(ub)
x = y.copy()
g_negative = np.zeros_like(y, dtype=bool)
mask = lb_finite & ~ub_finite
x[mask] = np.maximum(y[mask], 2 * lb[mask] - y[mask])
g_negative[mask] = y[mask] < lb[mask]
mask = ~lb_finite & ub_finite
x[mask] = np.minimum(y[mask], 2 * ub[mask] - y[mask])
g_negative[mask] = y[mask] > ub[mask]
mask = lb_finite & ub_finite
d = ub - lb
t = np.remainder(y[mask] - lb[mask], 2 * d[mask])
x[mask] = lb[mask] + np.minimum(t, 2 * d[mask] - t)
g_negative[mask] = t > d[mask]
g = np.ones_like(y)
g[g_negative] = -1
return x, g
# Functions to display algorithm's progress.
def print_header_nonlinear():
print("{0:^15}{1:^15}{2:^15}{3:^15}{4:^15}{5:^15}"
.format("Iteration", "Total nfev", "Cost", "Cost reduction",
"Step norm", "Optimality"))
def print_iteration_nonlinear(iteration, nfev, cost, cost_reduction,
step_norm, optimality):
if cost_reduction is None:
cost_reduction = " " * 15
else:
cost_reduction = "{0:^15.2e}".format(cost_reduction)
if step_norm is None:
step_norm = " " * 15
else:
step_norm = "{0:^15.2e}".format(step_norm)
print("{0:^15}{1:^15}{2:^15.4e}{3}{4}{5:^15.2e}"
.format(iteration, nfev, cost, cost_reduction,
step_norm, optimality))
def print_header_linear():
print("{0:^15}{1:^15}{2:^15}{3:^15}{4:^15}"
.format("Iteration", "Cost", "Cost reduction", "Step norm",
"Optimality"))
def print_iteration_linear(iteration, cost, cost_reduction, step_norm,
optimality):
if cost_reduction is None:
cost_reduction = " " * 15
else:
cost_reduction = "{0:^15.2e}".format(cost_reduction)
if step_norm is None:
step_norm = " " * 15
else:
step_norm = "{0:^15.2e}".format(step_norm)
print("{0:^15}{1:^15.4e}{2}{3}{4:^15.2e}".format(
iteration, cost, cost_reduction, step_norm, optimality))
# Simple helper functions.
def compute_grad(J, f):
"""Compute gradient of the least-squares cost function."""
if isinstance(J, LinearOperator):
return J.rmatvec(f)
else:
return J.T.dot(f)
def compute_jac_scale(J, scale_inv_old=None):
"""Compute variables scale based on the Jacobian matrix."""
if issparse(J):
scale_inv = np.asarray(J.power(2).sum(axis=0)).ravel()**0.5
else:
scale_inv = np.sum(J**2, axis=0)**0.5
if scale_inv_old is None:
scale_inv[scale_inv == 0] = 1
else:
scale_inv = np.maximum(scale_inv, scale_inv_old)
return 1 / scale_inv, scale_inv
def left_multiplied_operator(J, d):
"""Return diag(d) J as LinearOperator."""
J = aslinearoperator(J)
def matvec(x):
return d * J.matvec(x)
def matmat(X):
return d * J.matmat(X)
def rmatvec(x):
return J.rmatvec(x.ravel() * d)
return LinearOperator(J.shape, matvec=matvec, matmat=matmat,
rmatvec=rmatvec)
def right_multiplied_operator(J, d):
"""Return J diag(d) as LinearOperator."""
J = aslinearoperator(J)
def matvec(x):
return J.matvec(np.ravel(x) * d)
def matmat(X):
return J.matmat(X * d[:, np.newaxis])
def rmatvec(x):
return d * J.rmatvec(x)
return LinearOperator(J.shape, matvec=matvec, matmat=matmat,
rmatvec=rmatvec)
def regularized_lsq_operator(J, diag):
"""Return a matrix arising in regularized least squares as LinearOperator.
The matrix is
[ J ]
[ D ]
where D is diagonal matrix with elements from `diag`.
"""
J = aslinearoperator(J)
m, n = J.shape
def matvec(x):
return np.hstack((J.matvec(x), diag * x))
def rmatvec(x):
x1 = x[:m]
x2 = x[m:]
return J.rmatvec(x1) + diag * x2
return LinearOperator((m + n, n), matvec=matvec, rmatvec=rmatvec)
def right_multiply(J, d, copy=True):
"""Compute J diag(d).
If `copy` is False, `J` is modified in place (unless being LinearOperator).
"""
if copy and not isinstance(J, LinearOperator):
J = J.copy()
if issparse(J):
J.data *= d.take(J.indices, mode='clip') # scikit-learn recipe.
elif isinstance(J, LinearOperator):
J = right_multiplied_operator(J, d)
else:
J *= d
return J
def left_multiply(J, d, copy=True):
"""Compute diag(d) J.
If `copy` is False, `J` is modified in place (unless being LinearOperator).
"""
if copy and not isinstance(J, LinearOperator):
J = J.copy()
if issparse(J):
J.data *= np.repeat(d, np.diff(J.indptr)) # scikit-learn recipe.
elif isinstance(J, LinearOperator):
J = left_multiplied_operator(J, d)
else:
J *= d[:, np.newaxis]
return J
def check_termination(dF, F, dx_norm, x_norm, ratio, ftol, xtol):
"""Check termination condition for nonlinear least squares."""
ftol_satisfied = dF < ftol * F and ratio > 0.25
xtol_satisfied = dx_norm < xtol * (xtol + x_norm)
if ftol_satisfied and xtol_satisfied:
return 4
elif ftol_satisfied:
return 2
elif xtol_satisfied:
return 3
else:
return None
def scale_for_robust_loss_function(J, f, rho):
"""Scale Jacobian and residuals for a robust loss function.
Arrays are modified in place.
"""
J_scale = rho[1] + 2 * rho[2] * f**2
J_scale[J_scale < EPS] = EPS
J_scale **= 0.5
f *= rho[1] / J_scale
return left_multiply(J, J_scale, copy=False), f
|
VDuda/pyspider | refs/heads/master | pyspider/database/elasticsearch/projectdb.py | 12 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<roy@binux.me>
# http://binux.me
# Created on 2016-01-17 18:32:33
import time
import elasticsearch.helpers
from elasticsearch import Elasticsearch
from pyspider.database.base.projectdb import ProjectDB as BaseProjectDB
class ProjectDB(BaseProjectDB):
__type__ = 'project'
def __init__(self, hosts, index='pyspider'):
self.index = index
self.es = Elasticsearch(hosts=hosts)
self.es.indices.create(index=self.index, ignore=400)
if not self.es.indices.get_mapping(index=self.index, doc_type=self.__type__):
self.es.indices.put_mapping(index=self.index, doc_type=self.__type__, body={
"_all": {"enabled": False},
"properties": {
"updatetime": {"type": "double"}
}
})
def insert(self, name, obj={}):
obj = dict(obj)
obj['name'] = name
obj['updatetime'] = time.time()
obj.setdefault('group', '')
obj.setdefault('status', 'TODO')
obj.setdefault('script', '')
obj.setdefault('comments', '')
obj.setdefault('rate', 0)
obj.setdefault('burst', 0)
return self.es.index(index=self.index, doc_type=self.__type__, body=obj, id=name,
refresh=True)
def update(self, name, obj={}, **kwargs):
obj = dict(obj)
obj.update(kwargs)
obj['updatetime'] = time.time()
return self.es.update(index=self.index, doc_type=self.__type__,
body={'doc': obj}, id=name, refresh=True, ignore=404)
def get_all(self, fields=None):
for record in elasticsearch.helpers.scan(self.es, index=self.index, doc_type=self.__type__,
query={'query': {"match_all": {}}},
_source_include=fields or []):
yield record['_source']
def get(self, name, fields=None):
ret = self.es.get(index=self.index, doc_type=self.__type__, id=name,
_source_include=fields or [], ignore=404)
return ret.get('_source', None)
def check_update(self, timestamp, fields=None):
for record in elasticsearch.helpers.scan(self.es, index=self.index, doc_type=self.__type__,
query={'query': {"range": {
"updatetime": {"gte": timestamp}
}}}, _source_include=fields or []):
yield record['_source']
def drop(self, name):
return self.es.delete(index=self.index, doc_type=self.__type__, id=name, refresh=True)
|
haoyuchen1992/osf.io | refs/heads/develop | tests/framework_tests/test_mongo.py | 37 | """
Tests related to functions in framework.mongo
"""
from unittest import TestCase
from nose.tools import * # flake8: noqa
from modularodm.exceptions import ValidationError, ValidationValueError
from framework.mongo import validators
class TestValidators(TestCase):
def _choice_validator(self, ignore_case=False):
return validators.choice_in(('value1', 'value2', 'VaLuE3', 123),
ignore_case=ignore_case)
def test_string_required_passes_with_string(self):
assert_true(validators.string_required('Hi!'))
def test_string_required_fails_when_empty(self):
with assert_raises(ValidationValueError):
validators.string_required(None)
validators.string_required('')
def test_choice_validator_finds_only_exact_match(self):
new_validator = self._choice_validator(ignore_case=False)
assert_true(new_validator('value1'))
assert_true(new_validator('VaLuE3'))
assert_true(new_validator(123))
with assert_raises(ValidationValueError):
new_validator('VALue2')
def test_choice_validator_fails_when_option_not_present(self):
new_validator = self._choice_validator(ignore_case=False)
with assert_raises(ValidationValueError):
new_validator('You')
new_validator('123')
new_validator(456)
def test_choice_validator_case_insensitive(self):
new_validator = self._choice_validator(ignore_case=True)
assert_true(new_validator('VaLuE3'))
assert_true(new_validator('value3'))
assert_true(new_validator(123))
with assert_raises(ValidationValueError):
new_validator('123')
def test_choice_validator_fails_when_value_is_unhashable_list(self):
new_validator = self._choice_validator(ignore_case=False)
with assert_raises(ValidationError):
new_validator(['e', 'i', 'e', 'i', 'o'])
def test_choice_validator_fails_when_value_is_unhashable_dict(self):
new_validator = self._choice_validator(ignore_case=False)
with assert_raises(ValidationError):
new_validator({'k': 'v', 'k2': 'v2'})
|
skython/eXe | refs/heads/master | twisted/test/process_tester.py | 137 | """Test program for processes."""
import sys, os
test_file_match = "process_test.log.*"
test_file = "process_test.log.%d" % os.getpid()
def main():
f = open(test_file, 'wb')
# stage 1
bytes = sys.stdin.read(4)
f.write("one: %r\n" % bytes)
# stage 2
sys.stdout.write(bytes)
sys.stdout.flush()
os.close(sys.stdout.fileno())
# and a one, and a two, and a...
bytes = sys.stdin.read(4)
f.write("two: %r\n" % bytes)
# stage 3
sys.stderr.write(bytes)
sys.stderr.flush()
os.close(sys.stderr.fileno())
# stage 4
bytes = sys.stdin.read(4)
f.write("three: %r\n" % bytes)
# exit with status code 23
sys.exit(23)
if __name__ == '__main__':
main()
|
Ecpy/ecpy_pulses | refs/heads/main | exopy_pulses/pulses/item.py | 1 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2016-2018 by ExopyPulses Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Basic element of a sequence. Pulse is a subclass of Item.
"""
from numbers import Real
from atom.api import (Int, Str, List, Bool, Float, Enum, ForwardTyped,
Constant, Value)
from .utils.entry_eval import HasEvaluableFields
from .utils.validators import Feval
def sequence():
from .sequences.base_sequences import BaseSequence
return BaseSequence
#: Id used to identify dependencies type.
DEP_TYPE = 'exopy.pulses.item'
class Item(HasEvaluableFields):
""" Base component of a pulse Sequence
"""
#: Identifier for the build dependency collector
dep_type = Constant(DEP_TYPE).tag(pref=True)
#: Index identifying the item inside the sequence.
index = Int()
#: Flag to disable a particular item.
enabled = Bool(True).tag(pref=True)
#: Class of the item to use when rebuilding a sequence.
item_id = Str().tag(pref=True)
#: Name of the variable which can be referenced in other items.
#: Those should not contain the index of the item.
linkable_vars = List()
#: Reference to the parent sequence.
parent = ForwardTyped(sequence)
#: Reference to the root sequence.
#: Not type checking is performed to allow templates to remplace the real
#: root.
# TODO use an ABC for sequences (will need a name : AbstratRootSequence)
root = Value()
#: Boolean representing whever this item has a root sequence or not
has_root = Bool(False)
#: Mode defining how the def_1 and def_2 attrs shiould be interpreted.
def_mode = Enum('Start/Stop',
'Start/Duration',
'Duration/Stop').tag(pref=True)
#: String representing the item first element of definition : according
#: to the selected mode it evaluated value will either be used for the
#: start instant, or duration of the item.
def_1 = Str().tag(pref=True, feval=Feval(types=Real))
#: String representing the item second element of definition : according
#: to the selected mode it evaluated value will either be used for the
#: duration, or stop instant of the item.
def_2 = Str().tag(pref=True, feval=Feval(types=Real))
#: Actual start instant of the item with respect to the beginning of the
#: root sequence. The unit of this time depends of the setting of the
#: context.
start = Float()
#: Actual duration of the item. The unit of this time depends of the
#: setting of the context.
duration = Float()
#: Actual stop instant of the item with respect to the beginning of the
#: root sequence. The unit of this time depends of the setting of the
#: context.
stop = Float()
def eval_entries(self, root_vars, sequence_locals, missings, errors):
""" Attempt to eval the def_1 and def_2 parameters of the item.
Parameters
----------
root_vars : dict
Dictionary of global variables for the all items. This will
tipically contains the i_start/stop/duration and the root vars.
sequence_locals : dict
Dictionary of variables whose scope is limited to this item
parent.
missings : set
Set of unfound local variables.
errors : dict
Dict of the errors which happened when performing the evaluation.
Returns
-------
flag : bool
Boolean indicating whether or not the evaluation succeeded.
"""
res = super(Item, self).eval_entries(root_vars, sequence_locals,
missings, errors)
# Reference to the sequence context.
context = self.root.context
# Name of the parameter which will be evaluated.
par1 = self.def_mode.split('/')[0].lower()
par2 = self.def_mode.split('/')[1].lower()
prefix = '{}_'.format(self.index)
# Validation of the first parameter.
d1 = self._cache.get('def_1')
if 'def_1' in self._cache:
try:
d1 = context.check_time(d1)
except Exception as e:
res = False
errors[prefix + par1] = repr(e)
else:
# Check the value makes sense as a start time or duration.
if d1 >= 0:
setattr(self, par1, d1)
root_vars[prefix + par1] = d1
sequence_locals[prefix + par1] = d1
else:
res = False
if par1 == 'start':
m = 'Got a strictly negative value for start: {}'
else:
m = 'Got a strictly negative value for duration: {}'
errors[prefix + par1] = m.format(d1)
# Validation of the second parameter.
if 'def_2' in self._cache:
d2 = self._cache['def_2']
try:
d2 = context.check_time(d2)
except Exception as e:
res = False
errors[prefix + par2] = repr(e)
else:
# Check the value makes sense as a duration or stop time.
if d2 >= 0 and (par2 == 'duration' or d1 is None or d2 >= d1):
setattr(self, par2, d2)
root_vars[prefix + par2] = d2
sequence_locals[prefix + par2] = d2
else:
res = False
if par2 == 'stop' and d2 <= 0.0:
msg = 'Got a negative or null value for stop: {}'
args = (d2,)
elif par2 == 'stop':
msg = 'Got a stop smaller than start: {} < {}'
args = (d1, d2)
elif d2 < 0.0:
msg = 'Got a negative value for duration: {}'
args = (d2,)
errors[prefix + par2] = msg.format(*args)
# Computation of the third parameter.
if 'def_1' in self._cache and 'def_2' in self._cache and res:
if self.def_mode == 'Start/Duration':
self.stop = d1 + d2
root_vars[prefix + 'stop'] = self.stop
sequence_locals[prefix + 'stop'] = self.stop
elif self.def_mode == 'Start/Stop':
self.duration = d2 - d1
root_vars[prefix + 'duration'] = self.duration
sequence_locals[prefix + 'duration'] = self.duration
else:
self.start = d2 - d1
root_vars[prefix + 'start'] = self.start
sequence_locals[prefix + 'start'] = self.start
return res
def traverse(self, depth=-1):
"""Yield an item and all of its components.
The base implementation simply yields the item itself.
Parameters
----------
depth : int
How deep should we explore the tree of items. When this number
reaches zero deeper children should not be explored but simply
yielded. Negative values should be considered valid.
"""
yield self
def format_global_vars_id(self, member):
"""Format the id under which a value is added in the global vars.
def_1 and def_2 are not added so no special case here.
"""
return '{}_{}'.format(self.index, member)
def format_error_id(self, member):
"""Create the error id for a member.
Special case def1 and 2 to provide a clearer error id.
"""
if member in ('def_1', 'def_2'):
ind = 0 if member == 'def_1' else 1
member = self.def_mode.split('/')[ind].lower()
return '{}_{}'.format(self.index, member)
# --- Private API ---------------------------------------------------------
def _default_item_id(self):
""" Default value for the item_id member.
"""
pack, _ = self.__module__.split('.', 1)
return pack + '.' + type(self).__name__
def _post_setattr_root(self, old, new):
"""Make sure that all children get all the info they need to behave
correctly when the item get its root parent.
"""
if new is None:
self.has_root = False
return
self.has_root = True
|
GStreamer/cerbero | refs/heads/master | cerbero/packages/wix.py | 1 | # cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import uuid
import shutil
from cerbero.utils import etree, to_winepath, shell
from cerbero.errors import FatalError
from cerbero.config import Platform, Architecture
from cerbero.packages import PackageType
from cerbero.packages.package import Package, SDKPackage, App, InstallerPackage
WIX_SCHEMA = "http://schemas.microsoft.com/wix/2006/wi"
class VSTemplatePackage(Package):
'''
A Package for Visual Studio templates
@cvar: vs_template_name: name of the template
@type vs_template_name: string
@cvar vs_template_dir: directory of the template files
@type vs_template_dir: string
@cvar: vs_wizard_dir: directory of the wizard files
@type vs_wizard_dir: string
'''
vs_template_dir = None
vs_wizard_dir = None
vs_template_name = None
def __init__(self, config, store, cookbook):
Package.__init__(self, config, store, cookbook)
def devel_files_list(self):
files = []
for f in [self.vs_template_dir, self.vs_wizard_dir]:
files += shell.ls_dir(os.path.join(self.config.prefix, f),
self.config.prefix)
return files
class WixBase():
def __init__(self, config, package):
self.config = config
self.package = package
self.platform = config.platform
self.target_platform = config.target_platform
self._with_wine = self.platform != Platform.WINDOWS
self.prefix = config.prefix
self.filled = False
self.id_count = 0
self.ids = {}
def fill(self):
if self.filled:
return
self._fill()
self.filled = True
def write(self, filepath):
self.fill()
tree = etree.ElementTree(self.root)
tree.write(filepath, encoding='utf-8', pretty_print=True)
def _format_level(self, selected):
return selected and '1' or '4'
def _format_absent(self, required):
return required and 'disallow' or 'allow'
def _add_root(self):
self.root = etree.Element("Wix", xmlns=WIX_SCHEMA)
def _format_id(self, string, replace_dots=False):
ret = string
ret = ret.replace('_', '__')
for r in ['/', '-', ' ', '@', '+']:
ret = ret.replace(r, '_')
if replace_dots:
ret = ret.replace('.', '')
# For directories starting with a number
return '_' + ret
def _make_unique_id(self, id):
# Wix Id length is limited to 72 characters which can be short
# for some files paths. To guaranty Id unicity we add a number
# at the end of duplicated Id, so we're gonna limit our max length
# to 68 characters to reserve until 3 digits for that purpose.
id = id[:68]
if id not in self.ids:
self.ids[id] = 0
else:
self.ids[id] += 1
if self.ids[id] != 0:
id = '%s_%s' % (id, self.ids[id])
return id
def _format_path_id(self, path, replace_dots=False):
ret = self._format_id(os.path.split(path)[1], replace_dots)
ret = ret.lower()
ret = self._make_unique_id(ret)
return ret
def _format_dir_id(self, string, path, replace_dots=False):
ret = self._format_id(string, replace_dots) + '_' +\
self._format_path_id(path, replace_dots)
ret = self._make_unique_id(ret)
return ret
def _format_group_id(self, string, replace_dots=False):
return self._format_id(string, replace_dots) + '_group'
def _get_uuid(self):
return "%s" % uuid.uuid1()
def _format_version(self, version):
# The heuristics to generate a valid version can get
# very complicated, so we leave it to the user
url = "https://docs.microsoft.com/en-us/windows/desktop/Msi/productversion"
versions = (version.split(".", 3) + ["0", "0", "0"])[:3]
for idx, val in enumerate(versions):
i = int(val)
if idx in [0, 1] and i > 255:
raise FatalError("Invalid version string, major and minor"
"must have a maximum value of 255.\nSee: {}".format(url))
elif idx in [2] and i > 65535:
raise FatalError("Invalid version string, build "
"must have a maximum value of 65535.\nSee: {}".format(url))
return ".".join(versions)
class MergeModule(WixBase):
'''
Creates WiX merge modules from cerbero packages
@ivar package: package with the info to build the merge package
@type pacakge: L{cerbero.packages.package.Package}
'''
def __init__(self, config, files_list, package):
WixBase.__init__(self, config, package)
self.files_list = files_list
self._dirnodes = {}
def _fill(self):
self._add_root()
self._add_module()
self._add_package()
self._add_root_dir()
self._add_files()
def _add_module(self):
self.module = etree.SubElement(self.root, "Module",
Id=self._format_id(self.package.name),
Version=self._format_version(self.package.version),
Language='1033')
def _add_package(self):
self.pkg = etree.SubElement(self.module, "Package",
Id=self.package.uuid or self._get_uuid(),
Description=self.package.shortdesc,
Comments=self.package.longdesc,
Manufacturer=self.package.vendor)
def _add_root_dir(self):
self.rdir = etree.SubElement(self.module, "Directory",
Id='TARGETDIR', Name='SourceDir')
self._dirnodes[''] = self.rdir
def _add_files(self):
for f in self.files_list:
self._add_file(f)
def _add_directory(self, dirpath):
if dirpath in self._dirnodes:
return
parentpath = os.path.split(dirpath)[0]
if parentpath == []:
parentpath = ['']
if parentpath not in self._dirnodes:
self._add_directory(parentpath)
parent = self._dirnodes[parentpath]
dirnode = etree.SubElement(parent, "Directory",
Id=self._format_path_id(dirpath),
Name=os.path.split(dirpath)[1])
self._dirnodes[dirpath] = dirnode
def _add_file(self, filepath):
dirpath, filename = os.path.split(filepath)
self._add_directory(dirpath)
dirnode = self._dirnodes[dirpath]
component = etree.SubElement(dirnode, 'Component',
Id=self._format_path_id(filepath), Guid=self._get_uuid())
filepath = os.path.join(self.prefix, filepath)
p_id = self._format_path_id(filepath, True)
if self._with_wine:
filepath = to_winepath(filepath)
etree.SubElement(component, 'File', Id=p_id, Name=filename,
Source=filepath)
class Fragment(WixBase):
'''
Creates WiX fragment from cerbero packages
@ivar package: package with the info to build the merge package
@type pacakge: L{cerbero.packages.package.Package}
'''
def __init__(self, config, files_list, package):
WixBase.__init__(self, config, package)
self.files_list = files_list
self._dirnodes = {}
self._dirids = {}
def _fill(self):
self._add_root()
self._add_fragment()
self._add_component_group()
self._add_root_dir()
self._add_files()
def _add_fragment(self):
self.fragment = etree.SubElement(self.root, "Fragment")
def _add_component_group(self):
self.component_group = etree.SubElement(self.fragment, "ComponentGroup",
Id=self._format_group_id(self.package.name))
def _add_root_dir(self):
self.rdir = etree.SubElement(self.fragment, "DirectoryRef",
Id='SDKROOTDIR')
self._dirnodes[''] = self.rdir
def _add_files(self):
for f in self.files_list:
self._add_file(f)
def _add_directory(self, dirpath):
if dirpath in self._dirnodes:
return
parentpath = os.path.split(dirpath)[0]
if parentpath == []:
parentpath = ['']
if parentpath not in self._dirnodes:
self._add_directory(parentpath)
parent = self._dirnodes[parentpath]
dirid = self._format_dir_id(self.package.name, dirpath)
dirnode = etree.SubElement(parent, "Directory",
Id=dirid,
Name=os.path.split(dirpath)[1])
self._dirnodes[dirpath] = dirnode
self._dirids[dirpath] = dirid
def _add_file(self, filepath):
dirpath, filename = os.path.split(filepath)
self._add_directory(dirpath)
dirid = self._dirids[dirpath]
component = etree.SubElement(self.component_group, 'Component',
Id=self._format_dir_id(self.package.name, filepath),
Guid=self._get_uuid(), Directory=dirid)
filepath = os.path.join(self.prefix, filepath)
p_id = self._format_dir_id(self.package.name, filepath, True)
if self._with_wine:
filepath = to_winepath(filepath)
etree.SubElement(component, 'File', Id=p_id, Name=filename,
Source=filepath)
class VSMergeModule(MergeModule):
'''
Creates a Merge Module for Visual Studio templates
@ivar package: package with the info to build the merge package
@type pacakge: L{cerbero.packages.package.Package}
'''
def __init__(self, config, files_list, package):
MergeModule.__init__(self, config, files_list, package)
def _add_root_dir(self):
MergeModule._add_root_dir(self)
self._add_vs_templates()
def _add_vs_templates(self):
etree.SubElement(self.module, 'PropertyRef',
Id='VS_PROJECTTEMPLATES_DIR')
etree.SubElement(self.module, 'PropertyRef',
Id='VS_WIZARDS_DIR')
etree.SubElement(self.module, 'CustomActionRef',
Id='VS2010InstallVSTemplates')
etree.SubElement(self.module, 'CustomActionRef',
Id='VC2010InstallVSTemplates')
prop = etree.SubElement(self.module, 'SetProperty',
Id="VSPROJECTTEMPLATESDIR", After="AppSearch",
Value="[VS_PROJECTTEMPLATES_DIR]\\%s" %
self.package.vs_template_name or "")
prop.text = "VS_PROJECTTEMPLATES_DIR"
prop = etree.SubElement(self.module, 'SetProperty',
Id="VSWIZARDSDIR", After="AppSearch",
Value="[VS_WIZARDS_DIR]\\%s" %
os.path.split(self.package.vs_template_dir)[1])
prop.text = "VS_WIZARDS_DIR"
self._wizard_dir = etree.SubElement(self.rdir, 'Directory',
Id='VSPROJECTTEMPLATESDIR')
self._tpl_dir = etree.SubElement(self.rdir, 'Directory',
Id='VSWIZARDSDIR')
self._dirnodes[self.package.vs_template_dir] = self._tpl_dir
self._dirnodes[self.package.vs_wizard_dir] = self._wizard_dir
class WixConfig(WixBase):
wix_config = 'wix/Config.wxi'
def __init__(self, config, package):
self.config_path = os.path.join(config.data_dir, self.wix_config)
self.arch = config.target_arch
self.package = package
if isinstance(self.package, App):
self.ui_type = 'WixUI_InstallDir'
else:
self.ui_type = 'WixUI_Mondo'
def write(self, output_dir):
config_out_path = os.path.join(output_dir,
os.path.basename(self.wix_config))
shutil.copy(self.config_path, os.path.join(output_dir,
os.path.basename(self.wix_config)))
replacements = {
"@ProductID@": '*',
"@UpgradeCode@": self.package.get_wix_upgrade_code(),
"@Language@": '1033',
"@Manufacturer@": self.package.vendor,
"@Version@": self._format_version(self.package.version),
"@PackageComments@": self.package.longdesc,
"@Description@": self.package.shortdesc,
"@ProjectURL": self.package.url,
"@ProductName@": self._product_name(),
"@ProgramFilesFolder@": self._program_folder(),
"@Platform@": self._platform(),
"@UIType@": self.ui_type
}
shell.replace(config_out_path, replacements)
return config_out_path
def _product_name(self):
return '%s' % self.package.shortdesc
def _program_folder(self):
if self.arch == Architecture.X86:
return 'ProgramFilesFolder'
return 'ProgramFiles64Folder'
def _platform(self):
if self.arch == Architecture.X86_64:
return 'x64'
return 'x86'
class MSI(WixBase):
'''Creates an installer package from a
L{cerbero.packages.package.MetaPackage}
@ivar package: meta package used to create the installer package
@type package: L{cerbero.packages.package.MetaPackage}
'''
wix_sources = 'wix/installer.wxs'
REG_ROOT = 'HKLM'
BANNER_BMP = 'banner.bmp'
DIALOG_BMP = 'dialog.bmp'
LICENSE_RTF = 'license.rtf'
ICON = 'icon.ico'
def __init__(self, config, package, packages_deps, wix_config, store):
WixBase.__init__(self, config, package)
self.packages_deps = packages_deps
self.store = store
self.wix_config = wix_config
self._parse_sources()
self._add_include()
self._customize_ui()
self.product = self.root.find(".//Product")
self._add_vs_properties()
def _parse_sources(self):
sources_path = self.package.resources_wix_installer or \
os.path.join(self.config.data_dir, self.wix_sources)
with open(sources_path, 'r') as f:
self.root = etree.fromstring(f.read())
for element in self.root.iter():
element.tag = element.tag[len(WIX_SCHEMA) + 2:]
self.root.set('xmlns', WIX_SCHEMA)
self.product = self.root.find('Product')
def _add_include(self):
if self._with_wine:
self.wix_config = to_winepath(self.wix_config)
inc = etree.PI('include %s' % self.wix_config)
self.root.insert(0, inc)
def _fill(self):
self._add_install_dir()
if isinstance(self.package, App):
self._add_application_merge_module()
else:
self._add_merge_modules()
if isinstance(self.package, SDKPackage):
if self.package.package_mode == PackageType.RUNTIME:
self._add_registry_install_dir()
self._add_sdk_root_env_variable()
if isinstance(self.package, App):
self._add_start_menu_shortcuts()
self._add_get_install_dir_from_registry()
def _add_application_merge_module(self):
self.main_feature = etree.SubElement(self.product, "Feature",
Id=self._format_id(self.package.name + '_app'),
Title=self.package.title, Level='1', Display="expand",
AllowAdvertise="no", ConfigurableDirectory="INSTALLDIR")
if self.package.wix_use_fragment:
etree.SubElement(self.main_feature, 'ComponentGroupRef',
Id=self._format_group_id(self.package.name))
else:
self._add_merge_module(self.package, True, True, [])
etree.SubElement(self.installdir, 'Merge',
Id=self._package_id(self.package.name), Language='1033',
SourceFile=self.packages_deps[self.package], DiskId='1')
def _add_merge_modules(self):
self.main_feature = etree.SubElement(self.product, "Feature",
Id=self._format_id(self.package.name),
Title=self.package.title, Level='1', Display="expand",
AllowAdvertise="no", ConfigurableDirectory="INSTALLDIR")
packages = [(self.store.get_package(x[0]), x[1], x[2]) for x in
self.package.packages]
# Remove empty packages
packages = [x for x in packages if x[0] in list(self.packages_deps.keys())]
if len(packages) == 0:
raise FatalError("All packages are empty: %s" %
[x[0] for x in self.package.packages])
# Fill the list of required packages, which are the ones installed by
# a package that is always installed
req = [x[0] for x in packages if x[1] is True]
required_packages = req[:]
for p in req:
required_packages.extend(self.store.get_package_deps(p, True))
if not self.package.wix_use_fragment:
for package, required, selected in packages:
if package in self.packages_deps:
self._add_merge_module(package, required, selected,
required_packages)
# Add a merge module ref for all the packages or use ComponentGroupRef when using
# wix_use_fragment
for package, path in self.packages_deps.items():
if self.package.wix_use_fragment:
etree.SubElement(self.main_feature, 'ComponentGroupRef',
Id=self._format_group_id(package.name))
else:
etree.SubElement(self.installdir, 'Merge',
Id=self._package_id(package.name), Language='1033',
SourceFile=path, DiskId='1')
def _add_dir(self, parent, dir_id, name):
tdir = etree.SubElement(parent, "Directory",
Id=dir_id, Name=name)
return tdir
def _add_install_dir(self):
self.target_dir = self._add_dir(self.product, 'TARGETDIR', 'SourceDir')
# FIXME: Add a way to install to ProgramFilesFolder
if isinstance(self.package, App):
installdir = self._add_dir(self.target_dir,
'$(var.PlatformProgramFilesFolder)', 'ProgramFilesFolder')
self.installdir = self._add_dir(installdir, 'INSTALLDIR',
'$(var.ProductName)')
self.bindir = self._add_dir(self.installdir, 'INSTALLBINDIR', 'bin')
else:
installdir = self._add_dir(self.target_dir, 'INSTALLDIR',
self.package.get_install_dir())
versiondir = self._add_dir(installdir, "Version", self.package.sdk_version)
# archdir has to be toolchain-specific: mingw_x86_64, uwp-debug_arm64, etc
platform_arch = '_'.join(self.config._get_toolchain_target_platform_arch())
archdir = self._add_dir(versiondir, 'Architecture', platform_arch)
self.installdir = self._add_dir(archdir, 'SDKROOTDIR', '.')
def _package_id(self, package_name):
return self._format_id(package_name)
def _package_var(self):
package_type = self.package.package_mode
self.package.set_mode(PackageType.RUNTIME)
name = self.package.shortdesc
self.package.set_mode(package_type)
return name
def _registry_key(self, name):
return 'Software\\%s\\%s' % (name, self.config.target_arch)
def _customize_ui(self):
# Banner Dialog and License
for path, var in [(self.BANNER_BMP, 'BannerBmp'),
(self.DIALOG_BMP, 'DialogBmp'),
(self.LICENSE_RTF, 'LicenseRtf')]:
path = self.package.relative_path(path)
if self._with_wine:
path = to_winepath(path)
if os.path.exists(path):
etree.SubElement(self.product, 'WixVariable',
Id='WixUI%s' % var, Value=path)
# Icon
path = self.package.relative_path(self.ICON)
if self._with_wine:
path = to_winepath(path)
if os.path.exists(path):
etree.SubElement(self.product, 'Icon',
Id='MainIcon', SourceFile=path)
def _add_sdk_root_env_variable(self):
envcomponent = etree.SubElement(self.installdir, 'Component',
Id='EnvironmentVariables', Guid=self._get_uuid())
# archdir has to be toolchain-specific: mingw_x86_64, uwp-debug_arm64, etc
platform_arch = '_'.join(self.config._get_toolchain_target_platform_arch())
root_env_var = self.package.get_root_env_var(platform_arch)
env = etree.SubElement(envcomponent, 'Environment', Id="SdkRootEnv",
Action="set", Part="all", Name=root_env_var,
Permanent="no", Value='[SDKROOTDIR]')
etree.SubElement(self.main_feature, 'ComponentRef',
Id='EnvironmentVariables')
def _add_registry_install_dir(self):
# Get the package name. Both devel and runtime will share the same
# installation folder
name = self._package_var().replace(' ', '')
# Add INSTALLDIR in the registry only for the runtime package
if self.package.package_mode == PackageType.RUNTIME:
regcomponent = etree.SubElement(self.installdir, 'Component',
Id='RegistryInstallDir', Guid=self._get_uuid())
regkey = etree.SubElement(regcomponent, 'RegistryKey',
Id='RegistryInstallDirRoot',
ForceCreateOnInstall='yes',
ForceDeleteOnUninstall='yes',
Key=self._registry_key(name),
Root=self.REG_ROOT)
etree.SubElement(regkey, 'RegistryValue',
Id='RegistryInstallDirValue',
Type='string', Name='InstallDir', Value='[INSTALLDIR]')
etree.SubElement(regkey, 'RegistryValue',
Id='RegistryVersionValue',
Type='string', Name='Version',
Value=self.package.version)
etree.SubElement(regkey, 'RegistryValue',
Id='RegistrySDKVersionValue',
Type='string', Name='SdkVersion',
Value=self.package.sdk_version)
etree.SubElement(self.main_feature, 'ComponentRef',
Id='RegistryInstallDir')
def _add_get_install_dir_from_registry(self):
name = self._package_var().replace(' ', '')
if isinstance(self.package, InstallerPackage):
name = self.package.windows_sdk_reg or name
key = self._registry_key(name)
# Get INSTALLDIR from the registry key
installdir_prop = etree.SubElement(self.product, 'Property',
Id='INSTALLDIR')
etree.SubElement(installdir_prop, 'RegistrySearch', Id=name,
Type="raw", Root=self.REG_ROOT, Key=key, Name='InstallDir')
def _add_merge_module(self, package, required, selected,
required_packages):
# Create a new feature for this package
feature = etree.SubElement(self.main_feature, 'Feature',
Id=self._format_id(package.name), Title=package.shortdesc,
Level=self._format_level(selected),
Display='expand', Absent=self._format_absent(required))
deps = self.store.get_package_deps(package, True)
# Add all the merge modules required by this package, but excluding
# all the ones that are forced to be installed
if not required:
mergerefs = list(set(deps) - set(required_packages))
else:
mergerefs = [x for x in deps if x in required_packages]
# don't add empty packages
mergerefs = [x for x in mergerefs if x in list(self.packages_deps.keys())]
for p in mergerefs:
etree.SubElement(feature, "MergeRef",
Id=self._package_id(p.name))
etree.SubElement(feature, "MergeRef",
Id=self._package_id(package.name))
if isinstance(package, VSTemplatePackage):
c = etree.SubElement(feature, "Condition", Level="0")
c.text = "NOT VS2010DEVENV AND NOT VC2010EXPRESS_IDE"
def _add_start_menu_shortcuts(self):
# Create a folder with the application name in the Start Menu folder
programs = etree.SubElement(self.target_dir, 'Directory',
Id='ProgramMenuFolder')
etree.SubElement(programs, 'Directory', Id='ApplicationProgramsFolder',
Name='$(var.ProductName)')
# Add the shortcut to the installer package
appf = etree.SubElement(self.product, 'DirectoryRef',
Id='ApplicationProgramsFolder')
apps = etree.SubElement(appf, 'Component', Id='ApplicationShortcut',
Guid=self._get_uuid())
for desc, path, _, _ in self.package.commands[self.config.target_platform]:
etree.SubElement(apps, 'Shortcut',
Id='ApplicationStartMenuShortcut', Name=desc,
Description=desc, Target='[INSTALLBINDIR]' + path,
WorkingDirectory='INSTALLBINDIR',
Icon='MainIcon')
etree.SubElement(apps, 'RemoveFolder', Id='ApplicationProgramsFolder',
On='uninstall')
etree.SubElement(apps, 'RegistryValue', Root='HKCU',
Key='Software\Microsoft\%s' % self.package.name,
Name='installed', Type='integer', Value='1', KeyPath='yes')
# Ref it in the main feature
etree.SubElement(self.main_feature, 'ComponentRef',
Id='ApplicationShortcut')
def _add_vs_properties(self):
etree.SubElement(self.product, 'PropertyRef', Id='VS2010DEVENV')
etree.SubElement(self.product, 'PropertyRef', Id='VC2010EXPRESS_IDE')
|
JetBrains/intellij-community | refs/heads/master | python/testData/dotNet/single_class.py | 80 | import clr
clr.AddReferenceByPartialName("SingleNameSpace")
from S<caret>ingleNameSpace import MyClass
print MyClass |
batxes/4Cin | refs/heads/master | SHH_WT_models_highres/SHH_WT_models_highres_final_output_0.1_-0.1_5000/SHH_WT_models_highres41357.py | 4 | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((2403.83, 2168.13, 5484.54), (0.7, 0.7, 0.7), 182.271)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((2224.24, 2020.81, 5614.05), (0.7, 0.7, 0.7), 258.199)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((2367.69, 2157.68, 5276.77), (0.7, 0.7, 0.7), 123.897)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((2619.77, 2042.59, 5578.82), (0.7, 0.7, 0.7), 146.739)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((2962.65, 1859.32, 5840.38), (0.7, 0.7, 0.7), 179.098)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((2897.62, 1831.31, 5265.61), (0.7, 0.7, 0.7), 148.854)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((2948.26, 1799.62, 4768.13), (0.7, 0.7, 0.7), 196.357)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((3333.88, 1893.16, 5116.47), (0.7, 0.7, 0.7), 166.873)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((3787.98, 1918.08, 5449.93), (0.7, 0.7, 0.7), 95.4711)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((3632.02, 1823.76, 5077.69), (0.7, 0.7, 0.7), 185.401)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((3259.5, 1703.15, 4768.58), (0.7, 0.7, 0.7), 151.984)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((2773.49, 1610.89, 4415.37), (0.7, 0.7, 0.7), 185.612)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((2492.95, 1306.22, 4385.82), (0.7, 0.7, 0.7), 210.273)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((2452.57, 1044.49, 4544.39), (0.7, 0.7, 0.7), 106.892)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((2268.11, 651.114, 4549.84), (0.7, 0.7, 0.7), 202.025)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((1820.56, 308.944, 4598.53), (0.7, 0.7, 0.7), 192.169)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((1261.52, 56.1352, 4629.38), (0.7, 0.7, 0.7), 241.11)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((792.297, 77.4692, 4514.24), (0.7, 0.7, 0.7), 128.465)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((309.569, 152.966, 4637.2), (0.7, 0.7, 0.7), 217.38)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((-224.944, 49.6943, 5071.74), (0.7, 0.7, 0.7), 184.555)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((116.523, 345.578, 4616.85), (0.7, 0.7, 0.7), 140.055)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((347.947, 207.343, 4252.81), (0.7, 0.7, 0.7), 169.708)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((368.905, -69.6118, 3917.89), (0.7, 0.7, 0.7), 184.639)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((304.528, 115.853, 3642.84), (0.7, 0.7, 0.7), 119.286)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((81.4857, 338.627, 3630.28), (0.7, 0.7, 0.7), 147.754)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((104.144, 519.672, 3904.53), (0.7, 0.7, 0.7), 171.4)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((525.764, 566.63, 3911.44), (0.7, 0.7, 0.7), 156.341)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((903.474, 1018.65, 3870.22), (0.7, 0.7, 0.7), 186.501)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((1260.02, 1421.9, 3760.2), (0.7, 0.7, 0.7), 308.325)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((1702.71, 1505.34, 3821.01), (0.7, 0.7, 0.7), 138.617)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((2004.39, 1487.98, 3737.18), (0.7, 0.7, 0.7), 130.03)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((1739.51, 1315.11, 3742.52), (0.7, 0.7, 0.7), 156.552)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((1576.2, 1508.18, 3890.23), (0.7, 0.7, 0.7), 183.244)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((1423.42, 1685.64, 4020.24), (0.7, 0.7, 0.7), 181.382)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((1253.69, 1787.17, 3966.46), (0.7, 0.7, 0.7), 101.943)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((929.895, 1835.13, 4129.52), (1, 0.7, 0), 138.913)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((1692.32, 1913.01, 4878.81), (0.7, 0.7, 0.7), 221.737)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((2224.54, 2268.12, 5425.35), (0.7, 0.7, 0.7), 256.38)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((2612.59, 2768.87, 5374.25), (0.7, 0.7, 0.7), 221.694)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((2854.28, 2786.62, 4743.02), (0.7, 0.7, 0.7), 259.341)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((2781.7, 2359.15, 4070.64), (0.7, 0.7, 0.7), 117.89)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((2285, 1884.45, 3581.8), (0.7, 0.7, 0.7), 116.071)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((1824, 2008.8, 3419.49), (0.7, 0.7, 0.7), 268.224)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((1875.26, 2321.87, 3476.12), (0.7, 0.7, 0.7), 386.918)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((2401.13, 2635.69, 3562.12), (0.7, 0.7, 0.7), 121.316)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((2484.93, 3075.48, 3650.86), (0.7, 0.7, 0.7), 138.363)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((1820.76, 2839.02, 3755.67), (1, 0.7, 0), 175.207)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((2134.85, 3477.41, 3837.25), (0.7, 0.7, 0.7), 131.468)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((2232.25, 4196.6, 3914.57), (0.7, 0.7, 0.7), 287.894)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((2073.98, 3936.93, 3474.48), (0.7, 0.7, 0.7), 88.1109)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((2015.43, 3366.56, 3283.83), (0.7, 0.7, 0.7), 145.385)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((2079.1, 3205.87, 3150.17), (0.7, 0.7, 0.7), 155.452)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((2246.83, 3808.27, 3061.25), (0.7, 0.7, 0.7), 145.512)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((2452.56, 4262.73, 2969.15), (0.7, 0.7, 0.7), 99.9972)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((2696.97, 4625.04, 2908.55), (0.7, 0.7, 0.7), 327.529)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((2980.07, 4067.56, 2738.73), (0.7, 0.7, 0.7), 137.983)
if "particle_56 geometry" not in marker_sets:
s=new_marker_set('particle_56 geometry')
marker_sets["particle_56 geometry"]=s
s= marker_sets["particle_56 geometry"]
mark=s.place_marker((2787.57, 3598.89, 2655.76), (0.7, 0.7, 0.7), 83.3733)
if "particle_57 geometry" not in marker_sets:
s=new_marker_set('particle_57 geometry')
marker_sets["particle_57 geometry"]=s
s= marker_sets["particle_57 geometry"]
mark=s.place_marker((2485.79, 3110.35, 2645.12), (0.7, 0.7, 0.7), 101.562)
if "particle_58 geometry" not in marker_sets:
s=new_marker_set('particle_58 geometry')
marker_sets["particle_58 geometry"]=s
s= marker_sets["particle_58 geometry"]
mark=s.place_marker((2168.33, 2708.28, 2766.35), (0.7, 0.7, 0.7), 165.689)
if "particle_59 geometry" not in marker_sets:
s=new_marker_set('particle_59 geometry')
marker_sets["particle_59 geometry"]=s
s= marker_sets["particle_59 geometry"]
mark=s.place_marker((1910.09, 2869.77, 2845.78), (0.7, 0.7, 0.7), 136.925)
if "particle_60 geometry" not in marker_sets:
s=new_marker_set('particle_60 geometry')
marker_sets["particle_60 geometry"]=s
s= marker_sets["particle_60 geometry"]
mark=s.place_marker((1781.22, 2965.42, 2865.44), (0.7, 0.7, 0.7), 123.389)
if "particle_61 geometry" not in marker_sets:
s=new_marker_set('particle_61 geometry')
marker_sets["particle_61 geometry"]=s
s= marker_sets["particle_61 geometry"]
mark=s.place_marker((1955.83, 3250.4, 2574.52), (0.7, 0.7, 0.7), 184.47)
if "particle_62 geometry" not in marker_sets:
s=new_marker_set('particle_62 geometry')
marker_sets["particle_62 geometry"]=s
s= marker_sets["particle_62 geometry"]
mark=s.place_marker((2167.05, 3773.97, 2009.35), (0.7, 0.7, 0.7), 148.473)
if "particle_63 geometry" not in marker_sets:
s=new_marker_set('particle_63 geometry')
marker_sets["particle_63 geometry"]=s
s= marker_sets["particle_63 geometry"]
mark=s.place_marker((2351.55, 4467.34, 1343.73), (0.7, 0.7, 0.7), 241.406)
if "particle_64 geometry" not in marker_sets:
s=new_marker_set('particle_64 geometry')
marker_sets["particle_64 geometry"]=s
s= marker_sets["particle_64 geometry"]
mark=s.place_marker((2694.41, 4096.78, 1752.12), (0.7, 0.7, 0.7), 182.736)
if "particle_65 geometry" not in marker_sets:
s=new_marker_set('particle_65 geometry')
marker_sets["particle_65 geometry"]=s
s= marker_sets["particle_65 geometry"]
mark=s.place_marker((2751.09, 3876.61, 2154.99), (0.7, 0.7, 0.7), 166.62)
if "particle_66 geometry" not in marker_sets:
s=new_marker_set('particle_66 geometry')
marker_sets["particle_66 geometry"]=s
s= marker_sets["particle_66 geometry"]
mark=s.place_marker((2549.62, 3768.01, 2270.79), (0.7, 0.7, 0.7), 113.872)
if "particle_67 geometry" not in marker_sets:
s=new_marker_set('particle_67 geometry')
marker_sets["particle_67 geometry"]=s
s= marker_sets["particle_67 geometry"]
mark=s.place_marker((2399.56, 3589.87, 2494.73), (0.7, 0.7, 0.7), 110.065)
if "particle_68 geometry" not in marker_sets:
s=new_marker_set('particle_68 geometry')
marker_sets["particle_68 geometry"]=s
s= marker_sets["particle_68 geometry"]
mark=s.place_marker((2162.85, 3573.76, 2796.27), (0.7, 0.7, 0.7), 150.08)
if "particle_69 geometry" not in marker_sets:
s=new_marker_set('particle_69 geometry')
marker_sets["particle_69 geometry"]=s
s= marker_sets["particle_69 geometry"]
mark=s.place_marker((1875.7, 3671.8, 3156.09), (0.7, 0.7, 0.7), 118.525)
if "particle_70 geometry" not in marker_sets:
s=new_marker_set('particle_70 geometry')
marker_sets["particle_70 geometry"]=s
s= marker_sets["particle_70 geometry"]
mark=s.place_marker((1466.85, 3907.48, 3417.37), (0.7, 0.7, 0.7), 163.955)
if "particle_71 geometry" not in marker_sets:
s=new_marker_set('particle_71 geometry')
marker_sets["particle_71 geometry"]=s
s= marker_sets["particle_71 geometry"]
mark=s.place_marker((1258.18, 4203.2, 3255.12), (0.7, 0.7, 0.7), 170.131)
if "particle_72 geometry" not in marker_sets:
s=new_marker_set('particle_72 geometry')
marker_sets["particle_72 geometry"]=s
s= marker_sets["particle_72 geometry"]
mark=s.place_marker((1641.52, 4405.82, 2631.42), (0.7, 0.7, 0.7), 78.2127)
if "particle_73 geometry" not in marker_sets:
s=new_marker_set('particle_73 geometry')
marker_sets["particle_73 geometry"]=s
s= marker_sets["particle_73 geometry"]
mark=s.place_marker((2161.57, 4572.42, 2011.62), (0.7, 0.7, 0.7), 251.896)
if "particle_74 geometry" not in marker_sets:
s=new_marker_set('particle_74 geometry')
marker_sets["particle_74 geometry"]=s
s= marker_sets["particle_74 geometry"]
mark=s.place_marker((2719.93, 4595.38, 1636.54), (0.7, 0.7, 0.7), 167.55)
if "particle_75 geometry" not in marker_sets:
s=new_marker_set('particle_75 geometry')
marker_sets["particle_75 geometry"]=s
s= marker_sets["particle_75 geometry"]
mark=s.place_marker((3104.84, 4458.52, 1560.94), (0.7, 0.7, 0.7), 167.846)
if "particle_76 geometry" not in marker_sets:
s=new_marker_set('particle_76 geometry')
marker_sets["particle_76 geometry"]=s
s= marker_sets["particle_76 geometry"]
mark=s.place_marker((2748.07, 4506.75, 1224.79), (0.7, 0.7, 0.7), 259.68)
if "particle_77 geometry" not in marker_sets:
s=new_marker_set('particle_77 geometry')
marker_sets["particle_77 geometry"]=s
s= marker_sets["particle_77 geometry"]
mark=s.place_marker((2335.88, 4713.76, 1328.44), (0.7, 0.7, 0.7), 80.2854)
if "particle_78 geometry" not in marker_sets:
s=new_marker_set('particle_78 geometry')
marker_sets["particle_78 geometry"]=s
s= marker_sets["particle_78 geometry"]
mark=s.place_marker((2258.34, 4935.78, 1284.49), (0.7, 0.7, 0.7), 82.4427)
if "particle_79 geometry" not in marker_sets:
s=new_marker_set('particle_79 geometry')
marker_sets["particle_79 geometry"]=s
s= marker_sets["particle_79 geometry"]
mark=s.place_marker((2182.37, 5060.67, 932.261), (0.7, 0.7, 0.7), 212.811)
if "particle_80 geometry" not in marker_sets:
s=new_marker_set('particle_80 geometry')
marker_sets["particle_80 geometry"]=s
s= marker_sets["particle_80 geometry"]
mark=s.place_marker((2168.83, 4360.48, 637.415), (0.7, 0.7, 0.7), 176.391)
if "particle_81 geometry" not in marker_sets:
s=new_marker_set('particle_81 geometry')
marker_sets["particle_81 geometry"]=s
s= marker_sets["particle_81 geometry"]
mark=s.place_marker((2159.95, 3689.13, 909.48), (0.7, 0.7, 0.7), 99.3204)
if "particle_82 geometry" not in marker_sets:
s=new_marker_set('particle_82 geometry')
marker_sets["particle_82 geometry"]=s
s= marker_sets["particle_82 geometry"]
mark=s.place_marker((1944.2, 3342.47, 1342.19), (0.7, 0.7, 0.7), 166.62)
if "particle_83 geometry" not in marker_sets:
s=new_marker_set('particle_83 geometry')
marker_sets["particle_83 geometry"]=s
s= marker_sets["particle_83 geometry"]
mark=s.place_marker((1687.66, 3176.06, 1410.96), (0.7, 0.7, 0.7), 102.831)
if "particle_84 geometry" not in marker_sets:
s=new_marker_set('particle_84 geometry')
marker_sets["particle_84 geometry"]=s
s= marker_sets["particle_84 geometry"]
mark=s.place_marker((1789.14, 3684.36, 707.142), (0.7, 0.7, 0.7), 65.0997)
if "particle_85 geometry" not in marker_sets:
s=new_marker_set('particle_85 geometry')
marker_sets["particle_85 geometry"]=s
s= marker_sets["particle_85 geometry"]
mark=s.place_marker((2164.35, 3910.5, 1023.43), (0.7, 0.7, 0.7), 92.1294)
if "particle_86 geometry" not in marker_sets:
s=new_marker_set('particle_86 geometry')
marker_sets["particle_86 geometry"]=s
s= marker_sets["particle_86 geometry"]
mark=s.place_marker((2451.63, 3886.29, 1558.84), (0.7, 0.7, 0.7), 194.791)
if "particle_87 geometry" not in marker_sets:
s=new_marker_set('particle_87 geometry')
marker_sets["particle_87 geometry"]=s
s= marker_sets["particle_87 geometry"]
mark=s.place_marker((2789.28, 3842.05, 1899.25), (0.7, 0.7, 0.7), 120.766)
if "particle_88 geometry" not in marker_sets:
s=new_marker_set('particle_88 geometry')
marker_sets["particle_88 geometry"]=s
s= marker_sets["particle_88 geometry"]
mark=s.place_marker((3209.6, 4108.56, 1583.5), (0.7, 0.7, 0.7), 217.803)
if "particle_89 geometry" not in marker_sets:
s=new_marker_set('particle_89 geometry')
marker_sets["particle_89 geometry"]=s
s= marker_sets["particle_89 geometry"]
mark=s.place_marker((2933.38, 4307.37, 1707.05), (0.7, 0.7, 0.7), 115.775)
if "particle_90 geometry" not in marker_sets:
s=new_marker_set('particle_90 geometry')
marker_sets["particle_90 geometry"]=s
s= marker_sets["particle_90 geometry"]
mark=s.place_marker((2667.79, 4353.54, 2013.88), (0.7, 0.7, 0.7), 115.648)
if "particle_91 geometry" not in marker_sets:
s=new_marker_set('particle_91 geometry')
marker_sets["particle_91 geometry"]=s
s= marker_sets["particle_91 geometry"]
mark=s.place_marker((2659.46, 4048.92, 2153.29), (0.7, 0.7, 0.7), 83.8386)
if "particle_92 geometry" not in marker_sets:
s=new_marker_set('particle_92 geometry')
marker_sets["particle_92 geometry"]=s
s= marker_sets["particle_92 geometry"]
mark=s.place_marker((2997.26, 3892, 2223.2), (0.7, 0.7, 0.7), 124.32)
if "particle_93 geometry" not in marker_sets:
s=new_marker_set('particle_93 geometry')
marker_sets["particle_93 geometry"]=s
s= marker_sets["particle_93 geometry"]
mark=s.place_marker((3362.3, 3804.07, 2487.42), (0.7, 0.7, 0.7), 185.993)
if "particle_94 geometry" not in marker_sets:
s=new_marker_set('particle_94 geometry')
marker_sets["particle_94 geometry"]=s
s= marker_sets["particle_94 geometry"]
mark=s.place_marker((3746.32, 4127.77, 2873.15), (0.7, 0.7, 0.7), 238.826)
if "particle_95 geometry" not in marker_sets:
s=new_marker_set('particle_95 geometry')
marker_sets["particle_95 geometry"]=s
s= marker_sets["particle_95 geometry"]
mark=s.place_marker((3770.22, 4617.97, 3121.1), (0.7, 0.7, 0.7), 128.465)
if "particle_96 geometry" not in marker_sets:
s=new_marker_set('particle_96 geometry')
marker_sets["particle_96 geometry"]=s
s= marker_sets["particle_96 geometry"]
mark=s.place_marker((3167.21, 4642.95, 2987.68), (0.7, 0.7, 0.7), 203.209)
if "particle_97 geometry" not in marker_sets:
s=new_marker_set('particle_97 geometry')
marker_sets["particle_97 geometry"]=s
s= marker_sets["particle_97 geometry"]
mark=s.place_marker((2927.04, 4324.21, 2690.54), (0.7, 0.7, 0.7), 160.486)
if "particle_98 geometry" not in marker_sets:
s=new_marker_set('particle_98 geometry')
marker_sets["particle_98 geometry"]=s
s= marker_sets["particle_98 geometry"]
mark=s.place_marker((3174.47, 4219.87, 2478.22), (0.7, 0.7, 0.7), 149.277)
if "particle_99 geometry" not in marker_sets:
s=new_marker_set('particle_99 geometry')
marker_sets["particle_99 geometry"]=s
s= marker_sets["particle_99 geometry"]
mark=s.place_marker((3347.7, 4716.85, 2335.47), (0.7, 0.7, 0.7), 35.7435)
if "particle_100 geometry" not in marker_sets:
s=new_marker_set('particle_100 geometry')
marker_sets["particle_100 geometry"]=s
s= marker_sets["particle_100 geometry"]
mark=s.place_marker((2571.2, 4122.98, 2398.41), (0.7, 0.7, 0.7), 98.3898)
if "particle_101 geometry" not in marker_sets:
s=new_marker_set('particle_101 geometry')
marker_sets["particle_101 geometry"]=s
s= marker_sets["particle_101 geometry"]
mark=s.place_marker((1921.78, 3304.63, 2433.44), (0.7, 0.7, 0.7), 188.404)
if "particle_102 geometry" not in marker_sets:
s=new_marker_set('particle_102 geometry')
marker_sets["particle_102 geometry"]=s
s= marker_sets["particle_102 geometry"]
mark=s.place_marker((1950.04, 2788.42, 2345.43), (0.7, 0.7, 0.7), 110.318)
if "particle_103 geometry" not in marker_sets:
s=new_marker_set('particle_103 geometry')
marker_sets["particle_103 geometry"]=s
s= marker_sets["particle_103 geometry"]
mark=s.place_marker((2160.82, 3009.75, 2106.5), (0.7, 0.7, 0.7), 127.534)
if "particle_104 geometry" not in marker_sets:
s=new_marker_set('particle_104 geometry')
marker_sets["particle_104 geometry"]=s
s= marker_sets["particle_104 geometry"]
mark=s.place_marker((2371.34, 3343.66, 2066.31), (0.7, 0.7, 0.7), 91.368)
if "particle_105 geometry" not in marker_sets:
s=new_marker_set('particle_105 geometry')
marker_sets["particle_105 geometry"]=s
s= marker_sets["particle_105 geometry"]
mark=s.place_marker((2575.63, 3709.62, 2142.68), (0.7, 0.7, 0.7), 131.045)
if "particle_106 geometry" not in marker_sets:
s=new_marker_set('particle_106 geometry')
marker_sets["particle_106 geometry"]=s
s= marker_sets["particle_106 geometry"]
mark=s.place_marker((2752.92, 3993.64, 2424.52), (0.7, 0.7, 0.7), 143.608)
if "particle_107 geometry" not in marker_sets:
s=new_marker_set('particle_107 geometry')
marker_sets["particle_107 geometry"]=s
s= marker_sets["particle_107 geometry"]
mark=s.place_marker((3120.3, 3982.39, 2607.94), (0.7, 0.7, 0.7), 135.783)
if "particle_108 geometry" not in marker_sets:
s=new_marker_set('particle_108 geometry')
marker_sets["particle_108 geometry"]=s
s= marker_sets["particle_108 geometry"]
mark=s.place_marker((3431.15, 3933.57, 2755.64), (0.7, 0.7, 0.7), 92.5947)
if "particle_109 geometry" not in marker_sets:
s=new_marker_set('particle_109 geometry')
marker_sets["particle_109 geometry"]=s
s= marker_sets["particle_109 geometry"]
mark=s.place_marker((3452.96, 3711.58, 2595.71), (0.7, 0.7, 0.7), 150.123)
if "particle_110 geometry" not in marker_sets:
s=new_marker_set('particle_110 geometry')
marker_sets["particle_110 geometry"]=s
s= marker_sets["particle_110 geometry"]
mark=s.place_marker((3427.13, 3532.13, 2401.56), (0.7, 0.7, 0.7), 121.57)
if "particle_111 geometry" not in marker_sets:
s=new_marker_set('particle_111 geometry')
marker_sets["particle_111 geometry"]=s
s= marker_sets["particle_111 geometry"]
mark=s.place_marker((3677.46, 3522.47, 2208.23), (0.7, 0.7, 0.7), 104.777)
if "particle_112 geometry" not in marker_sets:
s=new_marker_set('particle_112 geometry')
marker_sets["particle_112 geometry"]=s
s= marker_sets["particle_112 geometry"]
mark=s.place_marker((3465.72, 3165.84, 2195.26), (0.7, 0.7, 0.7), 114.844)
if "particle_113 geometry" not in marker_sets:
s=new_marker_set('particle_113 geometry')
marker_sets["particle_113 geometry"]=s
s= marker_sets["particle_113 geometry"]
mark=s.place_marker((3232.67, 2784.31, 2164.81), (0.7, 0.7, 0.7), 150.588)
if "particle_114 geometry" not in marker_sets:
s=new_marker_set('particle_114 geometry')
marker_sets["particle_114 geometry"]=s
s= marker_sets["particle_114 geometry"]
mark=s.place_marker((2799.94, 2791.83, 2136.15), (0.7, 0.7, 0.7), 103.55)
if "particle_115 geometry" not in marker_sets:
s=new_marker_set('particle_115 geometry')
marker_sets["particle_115 geometry"]=s
s= marker_sets["particle_115 geometry"]
mark=s.place_marker((2337.8, 2952.61, 1839.75), (0.7, 0.7, 0.7), 215.392)
if "particle_116 geometry" not in marker_sets:
s=new_marker_set('particle_116 geometry')
marker_sets["particle_116 geometry"]=s
s= marker_sets["particle_116 geometry"]
mark=s.place_marker((1817.79, 2986.9, 1587.9), (0.7, 0.7, 0.7), 99.9126)
if "particle_117 geometry" not in marker_sets:
s=new_marker_set('particle_117 geometry')
marker_sets["particle_117 geometry"]=s
s= marker_sets["particle_117 geometry"]
mark=s.place_marker((1608.46, 3239.44, 954.755), (0.7, 0.7, 0.7), 99.7857)
if "particle_118 geometry" not in marker_sets:
s=new_marker_set('particle_118 geometry')
marker_sets["particle_118 geometry"]=s
s= marker_sets["particle_118 geometry"]
mark=s.place_marker((1506.39, 3593.37, 551.148), (0.7, 0.7, 0.7), 109.98)
if "particle_119 geometry" not in marker_sets:
s=new_marker_set('particle_119 geometry')
marker_sets["particle_119 geometry"]=s
s= marker_sets["particle_119 geometry"]
mark=s.place_marker((1828.42, 3268.57, 769.624), (0.7, 0.7, 0.7), 102.831)
if "particle_120 geometry" not in marker_sets:
s=new_marker_set('particle_120 geometry')
marker_sets["particle_120 geometry"]=s
s= marker_sets["particle_120 geometry"]
mark=s.place_marker((1987.57, 3251.11, 1143.88), (0.7, 0.7, 0.7), 103.593)
if "particle_121 geometry" not in marker_sets:
s=new_marker_set('particle_121 geometry')
marker_sets["particle_121 geometry"]=s
s= marker_sets["particle_121 geometry"]
mark=s.place_marker((2337.55, 3344.1, 1510.72), (0.7, 0.7, 0.7), 173.472)
if "particle_122 geometry" not in marker_sets:
s=new_marker_set('particle_122 geometry')
marker_sets["particle_122 geometry"]=s
s= marker_sets["particle_122 geometry"]
mark=s.place_marker((2640.39, 3789.23, 1661.33), (0.7, 0.7, 0.7), 113.575)
if "particle_123 geometry" not in marker_sets:
s=new_marker_set('particle_123 geometry')
marker_sets["particle_123 geometry"]=s
s= marker_sets["particle_123 geometry"]
mark=s.place_marker((2949.52, 3875.02, 2010.68), (0.7, 0.7, 0.7), 128.296)
if "particle_124 geometry" not in marker_sets:
s=new_marker_set('particle_124 geometry')
marker_sets["particle_124 geometry"]=s
s= marker_sets["particle_124 geometry"]
mark=s.place_marker((3338.57, 3972.16, 2230.47), (0.7, 0.7, 0.7), 145.004)
if "particle_125 geometry" not in marker_sets:
s=new_marker_set('particle_125 geometry')
marker_sets["particle_125 geometry"]=s
s= marker_sets["particle_125 geometry"]
mark=s.place_marker((3647.14, 3841.25, 2602.6), (0.7, 0.7, 0.7), 148.261)
if "particle_126 geometry" not in marker_sets:
s=new_marker_set('particle_126 geometry')
marker_sets["particle_126 geometry"]=s
s= marker_sets["particle_126 geometry"]
mark=s.place_marker((4114.57, 4097.7, 2900.89), (0.7, 0.7, 0.7), 127.704)
if "particle_127 geometry" not in marker_sets:
s=new_marker_set('particle_127 geometry')
marker_sets["particle_127 geometry"]=s
s= marker_sets["particle_127 geometry"]
mark=s.place_marker((4412.2, 4499.03, 3162.19), (0.7, 0.7, 0.7), 129.607)
if "particle_128 geometry" not in marker_sets:
s=new_marker_set('particle_128 geometry')
marker_sets["particle_128 geometry"]=s
s= marker_sets["particle_128 geometry"]
mark=s.place_marker((3988.75, 4653.86, 2899.36), (0.7, 0.7, 0.7), 139.759)
if "particle_129 geometry" not in marker_sets:
s=new_marker_set('particle_129 geometry')
marker_sets["particle_129 geometry"]=s
s= marker_sets["particle_129 geometry"]
mark=s.place_marker((3338.14, 4569.64, 2609.99), (0.7, 0.7, 0.7), 118.567)
if "particle_130 geometry" not in marker_sets:
s=new_marker_set('particle_130 geometry')
marker_sets["particle_130 geometry"]=s
s= marker_sets["particle_130 geometry"]
mark=s.place_marker((3130.48, 4437.44, 2187.13), (0.7, 0.7, 0.7), 136.164)
if "particle_131 geometry" not in marker_sets:
s=new_marker_set('particle_131 geometry')
marker_sets["particle_131 geometry"]=s
s= marker_sets["particle_131 geometry"]
mark=s.place_marker((2932.49, 4133.47, 1891.62), (0.7, 0.7, 0.7), 121.655)
if "particle_132 geometry" not in marker_sets:
s=new_marker_set('particle_132 geometry')
marker_sets["particle_132 geometry"]=s
s= marker_sets["particle_132 geometry"]
mark=s.place_marker((2923.62, 3805.06, 1633.57), (0.7, 0.7, 0.7), 127.492)
if "particle_133 geometry" not in marker_sets:
s=new_marker_set('particle_133 geometry')
marker_sets["particle_133 geometry"]=s
s= marker_sets["particle_133 geometry"]
mark=s.place_marker((3049.13, 3727.65, 1239.85), (0.7, 0.7, 0.7), 138.617)
if "particle_134 geometry" not in marker_sets:
s=new_marker_set('particle_134 geometry')
marker_sets["particle_134 geometry"]=s
s= marker_sets["particle_134 geometry"]
mark=s.place_marker((2998.64, 3386.07, 1107.74), (0.7, 0.7, 0.7), 120.766)
if "particle_135 geometry" not in marker_sets:
s=new_marker_set('particle_135 geometry')
marker_sets["particle_135 geometry"]=s
s= marker_sets["particle_135 geometry"]
mark=s.place_marker((2719.62, 3188.32, 1091.17), (0.7, 0.7, 0.7), 145.893)
if "particle_136 geometry" not in marker_sets:
s=new_marker_set('particle_136 geometry')
marker_sets["particle_136 geometry"]=s
s= marker_sets["particle_136 geometry"]
mark=s.place_marker((2430.93, 3145.58, 1485.27), (0.7, 0.7, 0.7), 185.02)
if "particle_137 geometry" not in marker_sets:
s=new_marker_set('particle_137 geometry')
marker_sets["particle_137 geometry"]=s
s= marker_sets["particle_137 geometry"]
mark=s.place_marker((2210.46, 2921.38, 1910.04), (0.7, 0.7, 0.7), 221.314)
if "particle_138 geometry" not in marker_sets:
s=new_marker_set('particle_138 geometry')
marker_sets["particle_138 geometry"]=s
s= marker_sets["particle_138 geometry"]
mark=s.place_marker((2246.14, 2620.09, 2286.87), (0.7, 0.7, 0.7), 165.139)
if "particle_139 geometry" not in marker_sets:
s=new_marker_set('particle_139 geometry')
marker_sets["particle_139 geometry"]=s
s= marker_sets["particle_139 geometry"]
mark=s.place_marker((2381.71, 2797.69, 2489.19), (0.7, 0.7, 0.7), 179.437)
if "particle_140 geometry" not in marker_sets:
s=new_marker_set('particle_140 geometry')
marker_sets["particle_140 geometry"]=s
s= marker_sets["particle_140 geometry"]
mark=s.place_marker((2252.98, 3077.91, 2232.59), (0.7, 0.7, 0.7), 137.898)
if "particle_141 geometry" not in marker_sets:
s=new_marker_set('particle_141 geometry')
marker_sets["particle_141 geometry"]=s
s= marker_sets["particle_141 geometry"]
mark=s.place_marker((2311.87, 3358.71, 2067.73), (0.7, 0.7, 0.7), 124.658)
if "particle_142 geometry" not in marker_sets:
s=new_marker_set('particle_142 geometry')
marker_sets["particle_142 geometry"]=s
s= marker_sets["particle_142 geometry"]
mark=s.place_marker((2590.5, 3406.92, 1875.17), (0.7, 0.7, 0.7), 97.7553)
if "particle_143 geometry" not in marker_sets:
s=new_marker_set('particle_143 geometry')
marker_sets["particle_143 geometry"]=s
s= marker_sets["particle_143 geometry"]
mark=s.place_marker((2774.55, 3488.44, 1643.79), (0.7, 0.7, 0.7), 92.9331)
if "particle_144 geometry" not in marker_sets:
s=new_marker_set('particle_144 geometry')
marker_sets["particle_144 geometry"]=s
s= marker_sets["particle_144 geometry"]
mark=s.place_marker((2873.36, 3683.84, 1372.41), (0.7, 0.7, 0.7), 123.135)
if "particle_145 geometry" not in marker_sets:
s=new_marker_set('particle_145 geometry')
marker_sets["particle_145 geometry"]=s
s= marker_sets["particle_145 geometry"]
mark=s.place_marker((2626.22, 3495.42, 1629.61), (0.7, 0.7, 0.7), 125.716)
if "particle_146 geometry" not in marker_sets:
s=new_marker_set('particle_146 geometry')
marker_sets["particle_146 geometry"]=s
s= marker_sets["particle_146 geometry"]
mark=s.place_marker((2609.7, 3311.92, 1886.78), (0.7, 0.7, 0.7), 127.534)
if "particle_147 geometry" not in marker_sets:
s=new_marker_set('particle_147 geometry')
marker_sets["particle_147 geometry"]=s
s= marker_sets["particle_147 geometry"]
mark=s.place_marker((2802.82, 3431.57, 2061.54), (0.7, 0.7, 0.7), 94.9212)
if "particle_148 geometry" not in marker_sets:
s=new_marker_set('particle_148 geometry')
marker_sets["particle_148 geometry"]=s
s= marker_sets["particle_148 geometry"]
mark=s.place_marker((2677.4, 3182.38, 2392.7), (0.7, 0.7, 0.7), 137.644)
if "particle_149 geometry" not in marker_sets:
s=new_marker_set('particle_149 geometry')
marker_sets["particle_149 geometry"]=s
s= marker_sets["particle_149 geometry"]
mark=s.place_marker((2697.55, 2994.51, 2689.11), (0.7, 0.7, 0.7), 149.277)
if "particle_150 geometry" not in marker_sets:
s=new_marker_set('particle_150 geometry')
marker_sets["particle_150 geometry"]=s
s= marker_sets["particle_150 geometry"]
mark=s.place_marker((2942.02, 2833.9, 2500.11), (0.7, 0.7, 0.7), 103.677)
if "particle_151 geometry" not in marker_sets:
s=new_marker_set('particle_151 geometry')
marker_sets["particle_151 geometry"]=s
s= marker_sets["particle_151 geometry"]
mark=s.place_marker((3351.85, 2845.86, 2254.03), (0.7, 0.7, 0.7), 99.6588)
if "particle_152 geometry" not in marker_sets:
s=new_marker_set('particle_152 geometry')
marker_sets["particle_152 geometry"]=s
s= marker_sets["particle_152 geometry"]
mark=s.place_marker((3688.54, 2851.72, 2095.19), (0.7, 0.7, 0.7), 134.133)
if "particle_153 geometry" not in marker_sets:
s=new_marker_set('particle_153 geometry')
marker_sets["particle_153 geometry"]=s
s= marker_sets["particle_153 geometry"]
mark=s.place_marker((3395.45, 2733.43, 1980.72), (0.7, 0.7, 0.7), 173.007)
if "particle_154 geometry" not in marker_sets:
s=new_marker_set('particle_154 geometry')
marker_sets["particle_154 geometry"]=s
s= marker_sets["particle_154 geometry"]
mark=s.place_marker((2917.17, 2619.92, 2270.64), (0.7, 0.7, 0.7), 141.028)
if "particle_155 geometry" not in marker_sets:
s=new_marker_set('particle_155 geometry')
marker_sets["particle_155 geometry"]=s
s= marker_sets["particle_155 geometry"]
mark=s.place_marker((2496.24, 2629.14, 2480.88), (0.7, 0.7, 0.7), 161.121)
if "particle_156 geometry" not in marker_sets:
s=new_marker_set('particle_156 geometry')
marker_sets["particle_156 geometry"]=s
s= marker_sets["particle_156 geometry"]
mark=s.place_marker((2435.31, 2939.89, 2630.89), (0.7, 0.7, 0.7), 119.582)
if "particle_157 geometry" not in marker_sets:
s=new_marker_set('particle_157 geometry')
marker_sets["particle_157 geometry"]=s
s= marker_sets["particle_157 geometry"]
mark=s.place_marker((2623.24, 3241.41, 2427.72), (0.7, 0.7, 0.7), 137.094)
if "particle_158 geometry" not in marker_sets:
s=new_marker_set('particle_158 geometry')
marker_sets["particle_158 geometry"]=s
s= marker_sets["particle_158 geometry"]
mark=s.place_marker((2681.42, 3582.82, 2060.1), (0.7, 0.7, 0.7), 149.234)
if "particle_159 geometry" not in marker_sets:
s=new_marker_set('particle_159 geometry')
marker_sets["particle_159 geometry"]=s
s= marker_sets["particle_159 geometry"]
mark=s.place_marker((2281.08, 3510.84, 1877.95), (0.7, 0.7, 0.7), 151.011)
if "particle_160 geometry" not in marker_sets:
s=new_marker_set('particle_160 geometry')
marker_sets["particle_160 geometry"]=s
s= marker_sets["particle_160 geometry"]
mark=s.place_marker((1909.36, 3129.02, 1910.41), (0.7, 0.7, 0.7), 184.216)
if "particle_161 geometry" not in marker_sets:
s=new_marker_set('particle_161 geometry')
marker_sets["particle_161 geometry"]=s
s= marker_sets["particle_161 geometry"]
mark=s.place_marker((2036.06, 2786.29, 1713.77), (0.7, 0.7, 0.7), 170.596)
if "particle_162 geometry" not in marker_sets:
s=new_marker_set('particle_162 geometry')
marker_sets["particle_162 geometry"]=s
s= marker_sets["particle_162 geometry"]
mark=s.place_marker((2311.5, 3111.77, 1232.52), (0.7, 0.7, 0.7), 215.603)
if "particle_163 geometry" not in marker_sets:
s=new_marker_set('particle_163 geometry')
marker_sets["particle_163 geometry"]=s
s= marker_sets["particle_163 geometry"]
mark=s.place_marker((2653.52, 3628.41, 583.238), (0.7, 0.7, 0.7), 79.0164)
if "particle_164 geometry" not in marker_sets:
s=new_marker_set('particle_164 geometry')
marker_sets["particle_164 geometry"]=s
s= marker_sets["particle_164 geometry"]
mark=s.place_marker((2956.11, 3493.92, 513.844), (0.7, 0.7, 0.7), 77.2821)
if "particle_165 geometry" not in marker_sets:
s=new_marker_set('particle_165 geometry')
marker_sets["particle_165 geometry"]=s
s= marker_sets["particle_165 geometry"]
mark=s.place_marker((3089.71, 3341.95, 802.005), (0.7, 0.7, 0.7), 188.658)
if "particle_166 geometry" not in marker_sets:
s=new_marker_set('particle_166 geometry')
marker_sets["particle_166 geometry"]=s
s= marker_sets["particle_166 geometry"]
mark=s.place_marker((3110.47, 3045.57, 706.039), (0.7, 0.7, 0.7), 115.437)
if "particle_167 geometry" not in marker_sets:
s=new_marker_set('particle_167 geometry')
marker_sets["particle_167 geometry"]=s
s= marker_sets["particle_167 geometry"]
mark=s.place_marker((2797.16, 2964.6, 1219.1), (0.7, 0.7, 0.7), 88.4916)
if "particle_168 geometry" not in marker_sets:
s=new_marker_set('particle_168 geometry')
marker_sets["particle_168 geometry"]=s
s= marker_sets["particle_168 geometry"]
mark=s.place_marker((2469.32, 2888.75, 1751.07), (0.7, 0.7, 0.7), 108.88)
if "particle_169 geometry" not in marker_sets:
s=new_marker_set('particle_169 geometry')
marker_sets["particle_169 geometry"]=s
s= marker_sets["particle_169 geometry"]
mark=s.place_marker((2195.47, 2984.45, 1965.25), (0.7, 0.7, 0.7), 172.119)
if "particle_170 geometry" not in marker_sets:
s=new_marker_set('particle_170 geometry')
marker_sets["particle_170 geometry"]=s
s= marker_sets["particle_170 geometry"]
mark=s.place_marker((2326.97, 3194.48, 1551.37), (0.7, 0.7, 0.7), 139.505)
if "particle_171 geometry" not in marker_sets:
s=new_marker_set('particle_171 geometry')
marker_sets["particle_171 geometry"]=s
s= marker_sets["particle_171 geometry"]
mark=s.place_marker((2455.96, 3413.63, 1127.2), (0.7, 0.7, 0.7), 92.7639)
if "particle_172 geometry" not in marker_sets:
s=new_marker_set('particle_172 geometry')
marker_sets["particle_172 geometry"]=s
s= marker_sets["particle_172 geometry"]
mark=s.place_marker((2220.27, 3496.3, 1172.31), (0.7, 0.7, 0.7), 89.8452)
if "particle_173 geometry" not in marker_sets:
s=new_marker_set('particle_173 geometry')
marker_sets["particle_173 geometry"]=s
s= marker_sets["particle_173 geometry"]
mark=s.place_marker((2184.25, 3211.27, 1145.17), (0.7, 0.7, 0.7), 149.446)
if "particle_174 geometry" not in marker_sets:
s=new_marker_set('particle_174 geometry')
marker_sets["particle_174 geometry"]=s
s= marker_sets["particle_174 geometry"]
mark=s.place_marker((2344.76, 3038.96, 904.231), (0.7, 0.7, 0.7), 126.858)
if "particle_175 geometry" not in marker_sets:
s=new_marker_set('particle_175 geometry')
marker_sets["particle_175 geometry"]=s
s= marker_sets["particle_175 geometry"]
mark=s.place_marker((2477.25, 3324.33, 785.931), (0.7, 0.7, 0.7), 106.046)
if "particle_176 geometry" not in marker_sets:
s=new_marker_set('particle_176 geometry')
marker_sets["particle_176 geometry"]=s
s= marker_sets["particle_176 geometry"]
mark=s.place_marker((2397.1, 3788.29, 979.32), (0.7, 0.7, 0.7), 156.298)
if "particle_177 geometry" not in marker_sets:
s=new_marker_set('particle_177 geometry')
marker_sets["particle_177 geometry"]=s
s= marker_sets["particle_177 geometry"]
mark=s.place_marker((2390.83, 4319.91, 1253.17), (0.7, 0.7, 0.7), 231.212)
if "particle_178 geometry" not in marker_sets:
s=new_marker_set('particle_178 geometry')
marker_sets["particle_178 geometry"]=s
s= marker_sets["particle_178 geometry"]
mark=s.place_marker((2064.81, 4397.43, 1691.12), (0.7, 0.7, 0.7), 88.4916)
if "particle_179 geometry" not in marker_sets:
s=new_marker_set('particle_179 geometry')
marker_sets["particle_179 geometry"]=s
s= marker_sets["particle_179 geometry"]
mark=s.place_marker((1850.8, 4063.72, 1962.82), (0.7, 0.7, 0.7), 111.334)
if "particle_180 geometry" not in marker_sets:
s=new_marker_set('particle_180 geometry')
marker_sets["particle_180 geometry"]=s
s= marker_sets["particle_180 geometry"]
mark=s.place_marker((1870.13, 3473.74, 2110.02), (0.7, 0.7, 0.7), 127.619)
if "particle_181 geometry" not in marker_sets:
s=new_marker_set('particle_181 geometry')
marker_sets["particle_181 geometry"]=s
s= marker_sets["particle_181 geometry"]
mark=s.place_marker((1982.68, 3050.89, 2228.75), (0.7, 0.7, 0.7), 230.746)
if "particle_182 geometry" not in marker_sets:
s=new_marker_set('particle_182 geometry')
marker_sets["particle_182 geometry"]=s
s= marker_sets["particle_182 geometry"]
mark=s.place_marker((2240.05, 3301.53, 2011.63), (0.7, 0.7, 0.7), 124.573)
if "particle_183 geometry" not in marker_sets:
s=new_marker_set('particle_183 geometry')
marker_sets["particle_183 geometry"]=s
s= marker_sets["particle_183 geometry"]
mark=s.place_marker((2416.69, 3741.92, 1583.23), (0.7, 0.7, 0.7), 124.489)
if "particle_184 geometry" not in marker_sets:
s=new_marker_set('particle_184 geometry')
marker_sets["particle_184 geometry"]=s
s= marker_sets["particle_184 geometry"]
mark=s.place_marker((2715.48, 3677.03, 1358.16), (0.7, 0.7, 0.7), 196.61)
if "particle_185 geometry" not in marker_sets:
s=new_marker_set('particle_185 geometry')
marker_sets["particle_185 geometry"]=s
s= marker_sets["particle_185 geometry"]
mark=s.place_marker((2673.57, 3654.9, 1619.26), (0.7, 0.7, 0.7), 134.049)
if "particle_186 geometry" not in marker_sets:
s=new_marker_set('particle_186 geometry')
marker_sets["particle_186 geometry"]=s
s= marker_sets["particle_186 geometry"]
mark=s.place_marker((2437.08, 3783.5, 1473.55), (0.7, 0.7, 0.7), 141.493)
if "particle_187 geometry" not in marker_sets:
s=new_marker_set('particle_187 geometry')
marker_sets["particle_187 geometry"]=s
s= marker_sets["particle_187 geometry"]
mark=s.place_marker((2362.65, 4107.55, 1225.71), (0.7, 0.7, 0.7), 172.203)
if "particle_188 geometry" not in marker_sets:
s=new_marker_set('particle_188 geometry')
marker_sets["particle_188 geometry"]=s
s= marker_sets["particle_188 geometry"]
mark=s.place_marker((2785.73, 3621.21, 1346.4), (0.7, 0.7, 0.7), 271.354)
if "particle_189 geometry" not in marker_sets:
s=new_marker_set('particle_189 geometry')
marker_sets["particle_189 geometry"]=s
s= marker_sets["particle_189 geometry"]
mark=s.place_marker((3163.36, 3436.2, 1655.61), (0.7, 0.7, 0.7), 97.0785)
if "particle_190 geometry" not in marker_sets:
s=new_marker_set('particle_190 geometry')
marker_sets["particle_190 geometry"]=s
s= marker_sets["particle_190 geometry"]
mark=s.place_marker((3470.88, 3516.36, 1924.42), (0.7, 0.7, 0.7), 151.857)
if "particle_191 geometry" not in marker_sets:
s=new_marker_set('particle_191 geometry')
marker_sets["particle_191 geometry"]=s
s= marker_sets["particle_191 geometry"]
mark=s.place_marker((3909.53, 3304.69, 2226.22), (0.7, 0.7, 0.7), 199.233)
if "particle_192 geometry" not in marker_sets:
s=new_marker_set('particle_192 geometry')
marker_sets["particle_192 geometry"]=s
s= marker_sets["particle_192 geometry"]
mark=s.place_marker((3829.49, 2902.45, 2628.51), (0.7, 0.7, 0.7), 118.863)
if "particle_193 geometry" not in marker_sets:
s=new_marker_set('particle_193 geometry')
marker_sets["particle_193 geometry"]=s
s= marker_sets["particle_193 geometry"]
mark=s.place_marker((4003.28, 2821.77, 3018.08), (0.7, 0.7, 0.7), 172.415)
if "particle_194 geometry" not in marker_sets:
s=new_marker_set('particle_194 geometry')
marker_sets["particle_194 geometry"]=s
s= marker_sets["particle_194 geometry"]
mark=s.place_marker((4401.13, 2966.87, 3254.92), (0.7, 0.7, 0.7), 134.26)
if "particle_195 geometry" not in marker_sets:
s=new_marker_set('particle_195 geometry')
marker_sets["particle_195 geometry"]=s
s= marker_sets["particle_195 geometry"]
mark=s.place_marker((5232.88, 3342.37, 3405.09), (0.7, 0.7, 0.7), 139.548)
if "particle_196 geometry" not in marker_sets:
s=new_marker_set('particle_196 geometry')
marker_sets["particle_196 geometry"]=s
s= marker_sets["particle_196 geometry"]
mark=s.place_marker((4976.41, 3829.86, 3252.07), (0.7, 0.7, 0.7), 196.526)
if "particle_197 geometry" not in marker_sets:
s=new_marker_set('particle_197 geometry')
marker_sets["particle_197 geometry"]=s
s= marker_sets["particle_197 geometry"]
mark=s.place_marker((4261.01, 3966.66, 2954.48), (0.7, 0.7, 0.7), 136.206)
if "particle_198 geometry" not in marker_sets:
s=new_marker_set('particle_198 geometry')
marker_sets["particle_198 geometry"]=s
s= marker_sets["particle_198 geometry"]
mark=s.place_marker((3336.71, 3689.88, 2904.17), (0.7, 0.7, 0.7), 152.322)
if "particle_199 geometry" not in marker_sets:
s=new_marker_set('particle_199 geometry')
marker_sets["particle_199 geometry"]=s
s= marker_sets["particle_199 geometry"]
mark=s.place_marker((2783.25, 3350.32, 2751.51), (0.7, 0.7, 0.7), 126.054)
if "particle_200 geometry" not in marker_sets:
s=new_marker_set('particle_200 geometry')
marker_sets["particle_200 geometry"]=s
s= marker_sets["particle_200 geometry"]
mark=s.place_marker((2763.4, 3520.65, 2369.22), (0.7, 0.7, 0.7), 164.378)
if "particle_201 geometry" not in marker_sets:
s=new_marker_set('particle_201 geometry')
marker_sets["particle_201 geometry"]=s
s= marker_sets["particle_201 geometry"]
mark=s.place_marker((3018.04, 3601.29, 2027.55), (0.7, 0.7, 0.7), 122.205)
if "particle_202 geometry" not in marker_sets:
s=new_marker_set('particle_202 geometry')
marker_sets["particle_202 geometry"]=s
s= marker_sets["particle_202 geometry"]
mark=s.place_marker((3312.23, 3458.85, 1719.21), (0.7, 0.7, 0.7), 134.979)
if "particle_203 geometry" not in marker_sets:
s=new_marker_set('particle_203 geometry')
marker_sets["particle_203 geometry"]=s
s= marker_sets["particle_203 geometry"]
mark=s.place_marker((3392.55, 3123.17, 1818.3), (0.7, 0.7, 0.7), 136.375)
if "particle_204 geometry" not in marker_sets:
s=new_marker_set('particle_204 geometry')
marker_sets["particle_204 geometry"]=s
s= marker_sets["particle_204 geometry"]
mark=s.place_marker((3190.74, 3227.29, 1779.06), (0.7, 0.7, 0.7), 151.688)
if "particle_205 geometry" not in marker_sets:
s=new_marker_set('particle_205 geometry')
marker_sets["particle_205 geometry"]=s
s= marker_sets["particle_205 geometry"]
mark=s.place_marker((3106.36, 3286.85, 1526), (0.7, 0.7, 0.7), 116.156)
if "particle_206 geometry" not in marker_sets:
s=new_marker_set('particle_206 geometry')
marker_sets["particle_206 geometry"]=s
s= marker_sets["particle_206 geometry"]
mark=s.place_marker((2717.76, 2936.25, 2008.83), (0.7, 0.7, 0.7), 122.839)
if "particle_207 geometry" not in marker_sets:
s=new_marker_set('particle_207 geometry')
marker_sets["particle_207 geometry"]=s
s= marker_sets["particle_207 geometry"]
mark=s.place_marker((2474.66, 2951.48, 2448.92), (0.7, 0.7, 0.7), 164.716)
if "particle_208 geometry" not in marker_sets:
s=new_marker_set('particle_208 geometry')
marker_sets["particle_208 geometry"]=s
s= marker_sets["particle_208 geometry"]
mark=s.place_marker((2909.5, 3692.15, 2307.07), (0.7, 0.7, 0.7), 303.672)
if "particle_209 geometry" not in marker_sets:
s=new_marker_set('particle_209 geometry')
marker_sets["particle_209 geometry"]=s
s= marker_sets["particle_209 geometry"]
mark=s.place_marker((3650.48, 4217.07, 1810.16), (0.7, 0.7, 0.7), 220.298)
if "particle_210 geometry" not in marker_sets:
s=new_marker_set('particle_210 geometry')
marker_sets["particle_210 geometry"]=s
s= marker_sets["particle_210 geometry"]
mark=s.place_marker((3437.76, 3931.86, 1291.97), (0.7, 0.7, 0.7), 175.883)
if "particle_211 geometry" not in marker_sets:
s=new_marker_set('particle_211 geometry')
marker_sets["particle_211 geometry"]=s
s= marker_sets["particle_211 geometry"]
mark=s.place_marker((2890.47, 3868.29, 911.158), (0.7, 0.7, 0.7), 233.581)
if "particle_212 geometry" not in marker_sets:
s=new_marker_set('particle_212 geometry')
marker_sets["particle_212 geometry"]=s
s= marker_sets["particle_212 geometry"]
mark=s.place_marker((2158.65, 4026.44, 1080.09), (0.7, 0.7, 0.7), 231.127)
if "particle_213 geometry" not in marker_sets:
s=new_marker_set('particle_213 geometry')
marker_sets["particle_213 geometry"]=s
s= marker_sets["particle_213 geometry"]
mark=s.place_marker((1647.71, 3813.49, 816.109), (0.7, 0.7, 0.7), 247.413)
if "particle_214 geometry" not in marker_sets:
s=new_marker_set('particle_214 geometry')
marker_sets["particle_214 geometry"]=s
s= marker_sets["particle_214 geometry"]
mark=s.place_marker((1432.97, 3380.93, 370.551), (0.7, 0.7, 0.7), 200.206)
if "particle_215 geometry" not in marker_sets:
s=new_marker_set('particle_215 geometry')
marker_sets["particle_215 geometry"]=s
s= marker_sets["particle_215 geometry"]
mark=s.place_marker((1666.5, 3080.08, 181.166), (0.7, 0.7, 0.7), 150.419)
if "particle_216 geometry" not in marker_sets:
s=new_marker_set('particle_216 geometry')
marker_sets["particle_216 geometry"]=s
s= marker_sets["particle_216 geometry"]
mark=s.place_marker((1556.5, 2944.05, 770.699), (0.7, 0.7, 0.7), 140.14)
if "particle_217 geometry" not in marker_sets:
s=new_marker_set('particle_217 geometry')
marker_sets["particle_217 geometry"]=s
s= marker_sets["particle_217 geometry"]
mark=s.place_marker((1259.15, 2961.1, 1137.31), (0.7, 0.7, 0.7), 132.949)
if "particle_218 geometry" not in marker_sets:
s=new_marker_set('particle_218 geometry')
marker_sets["particle_218 geometry"]=s
s= marker_sets["particle_218 geometry"]
mark=s.place_marker((1125.27, 2865.19, 1502.24), (0.7, 0.7, 0.7), 141.113)
if "particle_219 geometry" not in marker_sets:
s=new_marker_set('particle_219 geometry')
marker_sets["particle_219 geometry"]=s
s= marker_sets["particle_219 geometry"]
mark=s.place_marker((1017.18, 3135.13, 1643.78), (0.7, 0.7, 0.7), 171.526)
if "particle_220 geometry" not in marker_sets:
s=new_marker_set('particle_220 geometry')
marker_sets["particle_220 geometry"]=s
s= marker_sets["particle_220 geometry"]
mark=s.place_marker((1180.13, 3513.53, 1235), (0.7, 0.7, 0.7), 326.937)
if "particle_221 geometry" not in marker_sets:
s=new_marker_set('particle_221 geometry')
marker_sets["particle_221 geometry"]=s
s= marker_sets["particle_221 geometry"]
mark=s.place_marker((1652.92, 3644.95, 969.161), (0.7, 0.7, 0.7), 92.0871)
if "particle_222 geometry" not in marker_sets:
s=new_marker_set('particle_222 geometry')
marker_sets["particle_222 geometry"]=s
s= marker_sets["particle_222 geometry"]
mark=s.place_marker((1830.31, 3671.64, 1355.52), (0.7, 0.7, 0.7), 210.273)
if "particle_223 geometry" not in marker_sets:
s=new_marker_set('particle_223 geometry')
marker_sets["particle_223 geometry"]=s
s= marker_sets["particle_223 geometry"]
mark=s.place_marker((1682.31, 3235.72, 1932.75), (0.7, 0.7, 0.7), 122.628)
if "particle_224 geometry" not in marker_sets:
s=new_marker_set('particle_224 geometry')
marker_sets["particle_224 geometry"]=s
s= marker_sets["particle_224 geometry"]
mark=s.place_marker((1554.78, 3078.07, 2086.54), (0.7, 0.7, 0.7), 109.176)
if "particle_225 geometry" not in marker_sets:
s=new_marker_set('particle_225 geometry')
marker_sets["particle_225 geometry"]=s
s= marker_sets["particle_225 geometry"]
mark=s.place_marker((1727.12, 3147.43, 1861.12), (0.7, 0.7, 0.7), 142.213)
if "particle_226 geometry" not in marker_sets:
s=new_marker_set('particle_226 geometry')
marker_sets["particle_226 geometry"]=s
s= marker_sets["particle_226 geometry"]
mark=s.place_marker((2006.53, 3463.25, 1887.45), (0.7, 0.7, 0.7), 250.078)
if "particle_227 geometry" not in marker_sets:
s=new_marker_set('particle_227 geometry')
marker_sets["particle_227 geometry"]=s
s= marker_sets["particle_227 geometry"]
mark=s.place_marker((2341.49, 3161.94, 1785.41), (0.7, 0.7, 0.7), 123.558)
if "particle_228 geometry" not in marker_sets:
s=new_marker_set('particle_228 geometry')
marker_sets["particle_228 geometry"]=s
s= marker_sets["particle_228 geometry"]
mark=s.place_marker((2437.51, 2711.37, 1903.07), (0.7, 0.7, 0.7), 235.992)
if "particle_229 geometry" not in marker_sets:
s=new_marker_set('particle_229 geometry')
marker_sets["particle_229 geometry"]=s
s= marker_sets["particle_229 geometry"]
mark=s.place_marker((2666.07, 2281.57, 1944.13), (0.7, 0.7, 0.7), 172.373)
if "particle_230 geometry" not in marker_sets:
s=new_marker_set('particle_230 geometry')
marker_sets["particle_230 geometry"]=s
s= marker_sets["particle_230 geometry"]
mark=s.place_marker((3022.29, 2227.58, 1668.49), (0.7, 0.7, 0.7), 152.322)
if "particle_231 geometry" not in marker_sets:
s=new_marker_set('particle_231 geometry')
marker_sets["particle_231 geometry"]=s
s= marker_sets["particle_231 geometry"]
mark=s.place_marker((3199.88, 2269.29, 1430.39), (0.7, 0.7, 0.7), 196.653)
if "particle_232 geometry" not in marker_sets:
s=new_marker_set('particle_232 geometry')
marker_sets["particle_232 geometry"]=s
s= marker_sets["particle_232 geometry"]
mark=s.place_marker((2950.03, 2025.95, 1460.95), (0.7, 0.7, 0.7), 134.091)
if "particle_233 geometry" not in marker_sets:
s=new_marker_set('particle_233 geometry')
marker_sets["particle_233 geometry"]=s
s= marker_sets["particle_233 geometry"]
mark=s.place_marker((2650.23, 1892.82, 1489.93), (0.7, 0.7, 0.7), 180.325)
if "particle_234 geometry" not in marker_sets:
s=new_marker_set('particle_234 geometry')
marker_sets["particle_234 geometry"]=s
s= marker_sets["particle_234 geometry"]
mark=s.place_marker((2650.86, 2280.85, 1728.61), (0.7, 0.7, 0.7), 218.437)
if "particle_235 geometry" not in marker_sets:
s=new_marker_set('particle_235 geometry')
marker_sets["particle_235 geometry"]=s
s= marker_sets["particle_235 geometry"]
mark=s.place_marker((2649.04, 2728.66, 1620.37), (0.7, 0.7, 0.7), 148.008)
if "particle_236 geometry" not in marker_sets:
s=new_marker_set('particle_236 geometry')
marker_sets["particle_236 geometry"]=s
s= marker_sets["particle_236 geometry"]
mark=s.place_marker((2707.49, 3129.04, 1131.24), (0.7, 0.7, 0.7), 191.873)
if "particle_237 geometry" not in marker_sets:
s=new_marker_set('particle_237 geometry')
marker_sets["particle_237 geometry"]=s
s= marker_sets["particle_237 geometry"]
mark=s.place_marker((2500.27, 3397.5, 703.275), (0.7, 0.7, 0.7), 138.575)
if "particle_238 geometry" not in marker_sets:
s=new_marker_set('particle_238 geometry')
marker_sets["particle_238 geometry"]=s
s= marker_sets["particle_238 geometry"]
mark=s.place_marker((2548.03, 3263.22, 296.966), (0.7, 0.7, 0.7), 161.205)
if "particle_239 geometry" not in marker_sets:
s=new_marker_set('particle_239 geometry')
marker_sets["particle_239 geometry"]=s
s= marker_sets["particle_239 geometry"]
mark=s.place_marker((2854.26, 3299.2, 658.293), (0.7, 0.7, 0.7), 288.021)
if "particle_240 geometry" not in marker_sets:
s=new_marker_set('particle_240 geometry')
marker_sets["particle_240 geometry"]=s
s= marker_sets["particle_240 geometry"]
mark=s.place_marker((2865.71, 2654.36, 981.593), (0.7, 0.7, 0.7), 227.405)
if "particle_241 geometry" not in marker_sets:
s=new_marker_set('particle_241 geometry')
marker_sets["particle_241 geometry"]=s
s= marker_sets["particle_241 geometry"]
mark=s.place_marker((2856.02, 2364.83, 1416.66), (0.7, 0.7, 0.7), 126.519)
if "particle_242 geometry" not in marker_sets:
s=new_marker_set('particle_242 geometry')
marker_sets["particle_242 geometry"]=s
s= marker_sets["particle_242 geometry"]
mark=s.place_marker((3112.2, 2471.37, 1312.29), (0.7, 0.7, 0.7), 117.975)
if "particle_243 geometry" not in marker_sets:
s=new_marker_set('particle_243 geometry')
marker_sets["particle_243 geometry"]=s
s= marker_sets["particle_243 geometry"]
mark=s.place_marker((2932.84, 2588.32, 1656.64), (0.7, 0.7, 0.7), 200.883)
if "particle_244 geometry" not in marker_sets:
s=new_marker_set('particle_244 geometry')
marker_sets["particle_244 geometry"]=s
s= marker_sets["particle_244 geometry"]
mark=s.place_marker((2626.55, 2385.78, 1713.22), (0.7, 0.7, 0.7), 158.794)
if "particle_245 geometry" not in marker_sets:
s=new_marker_set('particle_245 geometry')
marker_sets["particle_245 geometry"]=s
s= marker_sets["particle_245 geometry"]
mark=s.place_marker((2346.76, 2269.46, 1626.85), (0.7, 0.7, 0.7), 115.86)
if "particle_246 geometry" not in marker_sets:
s=new_marker_set('particle_246 geometry')
marker_sets["particle_246 geometry"]=s
s= marker_sets["particle_246 geometry"]
mark=s.place_marker((2261.41, 2221.1, 1850.27), (0.7, 0.7, 0.7), 133.034)
if "particle_247 geometry" not in marker_sets:
s=new_marker_set('particle_247 geometry')
marker_sets["particle_247 geometry"]=s
s= marker_sets["particle_247 geometry"]
mark=s.place_marker((2583.78, 2105.94, 2185.37), (0.7, 0.7, 0.7), 314.627)
if "particle_248 geometry" not in marker_sets:
s=new_marker_set('particle_248 geometry')
marker_sets["particle_248 geometry"]=s
s= marker_sets["particle_248 geometry"]
mark=s.place_marker((2715.37, 2227.44, 1861.89), (0.7, 0.7, 0.7), 115.352)
if "particle_249 geometry" not in marker_sets:
s=new_marker_set('particle_249 geometry')
marker_sets["particle_249 geometry"]=s
s= marker_sets["particle_249 geometry"]
mark=s.place_marker((2666.43, 2308.58, 1439.14), (0.7, 0.7, 0.7), 180.621)
if "particle_250 geometry" not in marker_sets:
s=new_marker_set('particle_250 geometry')
marker_sets["particle_250 geometry"]=s
s= marker_sets["particle_250 geometry"]
mark=s.place_marker((2307.55, 2245.05, 1338.92), (0.7, 0.7, 0.7), 126.265)
if "particle_251 geometry" not in marker_sets:
s=new_marker_set('particle_251 geometry')
marker_sets["particle_251 geometry"]=s
s= marker_sets["particle_251 geometry"]
mark=s.place_marker((1941.5, 2275.99, 1502.17), (0.7, 0.7, 0.7), 133.541)
if "particle_252 geometry" not in marker_sets:
s=new_marker_set('particle_252 geometry')
marker_sets["particle_252 geometry"]=s
s= marker_sets["particle_252 geometry"]
mark=s.place_marker((1507.97, 2393.33, 1502.81), (0.7, 0.7, 0.7), 171.019)
if "particle_253 geometry" not in marker_sets:
s=new_marker_set('particle_253 geometry')
marker_sets["particle_253 geometry"]=s
s= marker_sets["particle_253 geometry"]
mark=s.place_marker((1145.54, 2555.95, 1375.88), (0.7, 0.7, 0.7), 115.437)
if "particle_254 geometry" not in marker_sets:
s=new_marker_set('particle_254 geometry')
marker_sets["particle_254 geometry"]=s
s= marker_sets["particle_254 geometry"]
mark=s.place_marker((1222.32, 2271.19, 1298.32), (0.7, 0.7, 0.7), 158.583)
if "particle_255 geometry" not in marker_sets:
s=new_marker_set('particle_255 geometry')
marker_sets["particle_255 geometry"]=s
s= marker_sets["particle_255 geometry"]
mark=s.place_marker((1470.41, 2351.8, 1628.4), (0.7, 0.7, 0.7), 192)
if "particle_256 geometry" not in marker_sets:
s=new_marker_set('particle_256 geometry')
marker_sets["particle_256 geometry"]=s
s= marker_sets["particle_256 geometry"]
mark=s.place_marker((1825.43, 2280.53, 1872.55), (0.7, 0.7, 0.7), 150.165)
if "particle_257 geometry" not in marker_sets:
s=new_marker_set('particle_257 geometry')
marker_sets["particle_257 geometry"]=s
s= marker_sets["particle_257 geometry"]
mark=s.place_marker((1920.63, 2393.41, 2117.75), (0.7, 0.7, 0.7), 157.567)
if "particle_258 geometry" not in marker_sets:
s=new_marker_set('particle_258 geometry')
marker_sets["particle_258 geometry"]=s
s= marker_sets["particle_258 geometry"]
mark=s.place_marker((2004.19, 2469.62, 2200.7), (0.7, 0.7, 0.7), 199.36)
if "particle_259 geometry" not in marker_sets:
s=new_marker_set('particle_259 geometry')
marker_sets["particle_259 geometry"]=s
s= marker_sets["particle_259 geometry"]
mark=s.place_marker((2066.17, 2780.2, 1857.01), (0.7, 0.7, 0.7), 105.369)
if "particle_260 geometry" not in marker_sets:
s=new_marker_set('particle_260 geometry')
marker_sets["particle_260 geometry"]=s
s= marker_sets["particle_260 geometry"]
mark=s.place_marker((2203.28, 2998.22, 1762.68), (0.7, 0.7, 0.7), 118.651)
if "particle_261 geometry" not in marker_sets:
s=new_marker_set('particle_261 geometry')
marker_sets["particle_261 geometry"]=s
s= marker_sets["particle_261 geometry"]
mark=s.place_marker((2354.64, 2684.82, 2045.17), (0.7, 0.7, 0.7), 219.664)
if "particle_262 geometry" not in marker_sets:
s=new_marker_set('particle_262 geometry')
marker_sets["particle_262 geometry"]=s
s= marker_sets["particle_262 geometry"]
mark=s.place_marker((2342.38, 2262.2, 2468.6), (0.7, 0.7, 0.7), 196.018)
if "particle_263 geometry" not in marker_sets:
s=new_marker_set('particle_263 geometry')
marker_sets["particle_263 geometry"]=s
s= marker_sets["particle_263 geometry"]
mark=s.place_marker((2366.32, 1952.49, 2875.41), (0.7, 0.7, 0.7), 218.141)
if "particle_264 geometry" not in marker_sets:
s=new_marker_set('particle_264 geometry')
marker_sets["particle_264 geometry"]=s
s= marker_sets["particle_264 geometry"]
mark=s.place_marker((2147.93, 2135.76, 3087.46), (0.7, 0.7, 0.7), 181.636)
if "particle_265 geometry" not in marker_sets:
s=new_marker_set('particle_265 geometry')
marker_sets["particle_265 geometry"]=s
s= marker_sets["particle_265 geometry"]
mark=s.place_marker((2087.8, 2383.97, 2931.21), (0.7, 0.7, 0.7), 195.003)
if "particle_266 geometry" not in marker_sets:
s=new_marker_set('particle_266 geometry')
marker_sets["particle_266 geometry"]=s
s= marker_sets["particle_266 geometry"]
mark=s.place_marker((2074.34, 2144.27, 3024.07), (0.7, 0.7, 0.7), 139.209)
if "particle_267 geometry" not in marker_sets:
s=new_marker_set('particle_267 geometry')
marker_sets["particle_267 geometry"]=s
s= marker_sets["particle_267 geometry"]
mark=s.place_marker((1997.91, 2110.56, 3018.11), (0.7, 0.7, 0.7), 189.885)
if "particle_268 geometry" not in marker_sets:
s=new_marker_set('particle_268 geometry')
marker_sets["particle_268 geometry"]=s
s= marker_sets["particle_268 geometry"]
mark=s.place_marker((1945.21, 2055.98, 2694.97), (0.7, 0.7, 0.7), 267.674)
if "particle_269 geometry" not in marker_sets:
s=new_marker_set('particle_269 geometry')
marker_sets["particle_269 geometry"]=s
s= marker_sets["particle_269 geometry"]
mark=s.place_marker((2050.58, 1800.25, 2202.93), (0.7, 0.7, 0.7), 196.568)
if "particle_270 geometry" not in marker_sets:
s=new_marker_set('particle_270 geometry')
marker_sets["particle_270 geometry"]=s
s= marker_sets["particle_270 geometry"]
mark=s.place_marker((1751.45, 1791.84, 2253.24), (0.7, 0.7, 0.7), 192.423)
if "particle_271 geometry" not in marker_sets:
s=new_marker_set('particle_271 geometry')
marker_sets["particle_271 geometry"]=s
s= marker_sets["particle_271 geometry"]
mark=s.place_marker((1631.65, 1746.53, 2635.19), (1, 0.7, 0), 202.405)
if "particle_272 geometry" not in marker_sets:
s=new_marker_set('particle_272 geometry')
marker_sets["particle_272 geometry"]=s
s= marker_sets["particle_272 geometry"]
mark=s.place_marker((1818.87, 1765.18, 1787.17), (0.7, 0.7, 0.7), 135.529)
if "particle_273 geometry" not in marker_sets:
s=new_marker_set('particle_273 geometry')
marker_sets["particle_273 geometry"]=s
s= marker_sets["particle_273 geometry"]
mark=s.place_marker((1913.91, 1812.04, 796.271), (0.7, 0.7, 0.7), 114.21)
if "particle_274 geometry" not in marker_sets:
s=new_marker_set('particle_274 geometry')
marker_sets["particle_274 geometry"]=s
s= marker_sets["particle_274 geometry"]
mark=s.place_marker((1990.4, 2128.9, 787.485), (0.7, 0.7, 0.7), 159.133)
if "particle_275 geometry" not in marker_sets:
s=new_marker_set('particle_275 geometry')
marker_sets["particle_275 geometry"]=s
s= marker_sets["particle_275 geometry"]
mark=s.place_marker((2245.28, 2272.47, 1072.93), (0.7, 0.7, 0.7), 144.412)
if "particle_276 geometry" not in marker_sets:
s=new_marker_set('particle_276 geometry')
marker_sets["particle_276 geometry"]=s
s= marker_sets["particle_276 geometry"]
mark=s.place_marker((2465.51, 2346.99, 1293.12), (0.7, 0.7, 0.7), 70.8525)
if "particle_277 geometry" not in marker_sets:
s=new_marker_set('particle_277 geometry')
marker_sets["particle_277 geometry"]=s
s= marker_sets["particle_277 geometry"]
mark=s.place_marker((2338.54, 2323.49, 1904.96), (0.7, 0.7, 0.7), 141.874)
if "particle_278 geometry" not in marker_sets:
s=new_marker_set('particle_278 geometry')
marker_sets["particle_278 geometry"]=s
s= marker_sets["particle_278 geometry"]
mark=s.place_marker((2159.1, 2230.08, 2486.3), (0.7, 0.7, 0.7), 217.337)
if "particle_279 geometry" not in marker_sets:
s=new_marker_set('particle_279 geometry')
marker_sets["particle_279 geometry"]=s
s= marker_sets["particle_279 geometry"]
mark=s.place_marker((2196.44, 2178.98, 2515.98), (0.7, 0.7, 0.7), 237.641)
if "particle_280 geometry" not in marker_sets:
s=new_marker_set('particle_280 geometry')
marker_sets["particle_280 geometry"]=s
s= marker_sets["particle_280 geometry"]
mark=s.place_marker((2579.29, 2158.65, 2264.81), (0.7, 0.7, 0.7), 229.393)
if "particle_281 geometry" not in marker_sets:
s=new_marker_set('particle_281 geometry')
marker_sets["particle_281 geometry"]=s
s= marker_sets["particle_281 geometry"]
mark=s.place_marker((2760.83, 1783.04, 2732.69), (0.7, 0.7, 0.7), 349.906)
if "particle_282 geometry" not in marker_sets:
s=new_marker_set('particle_282 geometry')
marker_sets["particle_282 geometry"]=s
s= marker_sets["particle_282 geometry"]
mark=s.place_marker((2577.7, 1301.4, 3014.78), (0.7, 0.7, 0.7), 162.347)
if "particle_283 geometry" not in marker_sets:
s=new_marker_set('particle_283 geometry')
marker_sets["particle_283 geometry"]=s
s= marker_sets["particle_283 geometry"]
mark=s.place_marker((2544.46, 1092.72, 3008.73), (0.7, 0.7, 0.7), 194.072)
if "particle_284 geometry" not in marker_sets:
s=new_marker_set('particle_284 geometry')
marker_sets["particle_284 geometry"]=s
s= marker_sets["particle_284 geometry"]
mark=s.place_marker((2713.24, 1045.14, 2953.89), (0.7, 0.7, 0.7), 242.21)
if "particle_285 geometry" not in marker_sets:
s=new_marker_set('particle_285 geometry')
marker_sets["particle_285 geometry"]=s
s= marker_sets["particle_285 geometry"]
mark=s.place_marker((2673.17, 576.362, 2684.37), (0.7, 0.7, 0.7), 320.93)
if "particle_286 geometry" not in marker_sets:
s=new_marker_set('particle_286 geometry')
marker_sets["particle_286 geometry"]=s
s= marker_sets["particle_286 geometry"]
mark=s.place_marker((2930.26, 88.6916, 2661.79), (0.7, 0.7, 0.7), 226.432)
if "particle_287 geometry" not in marker_sets:
s=new_marker_set('particle_287 geometry')
marker_sets["particle_287 geometry"]=s
s= marker_sets["particle_287 geometry"]
mark=s.place_marker((3104.35, 239.923, 2984.1), (0.7, 0.7, 0.7), 125.208)
if "particle_288 geometry" not in marker_sets:
s=new_marker_set('particle_288 geometry')
marker_sets["particle_288 geometry"]=s
s= marker_sets["particle_288 geometry"]
mark=s.place_marker((3213.13, 625.957, 3280.19), (0.7, 0.7, 0.7), 197.837)
if "particle_289 geometry" not in marker_sets:
s=new_marker_set('particle_289 geometry')
marker_sets["particle_289 geometry"]=s
s= marker_sets["particle_289 geometry"]
mark=s.place_marker((3802.16, 656.216, 3487.35), (0.7, 0.7, 0.7), 167.804)
if "particle_290 geometry" not in marker_sets:
s=new_marker_set('particle_290 geometry')
marker_sets["particle_290 geometry"]=s
s= marker_sets["particle_290 geometry"]
mark=s.place_marker((4599.67, 420.686, 3613.76), (0.7, 0.7, 0.7), 136.84)
if "particle_291 geometry" not in marker_sets:
s=new_marker_set('particle_291 geometry')
marker_sets["particle_291 geometry"]=s
s= marker_sets["particle_291 geometry"]
mark=s.place_marker((4716.77, 369.387, 3167.5), (0.7, 0.7, 0.7), 85.7421)
if "particle_292 geometry" not in marker_sets:
s=new_marker_set('particle_292 geometry')
marker_sets["particle_292 geometry"]=s
s= marker_sets["particle_292 geometry"]
mark=s.place_marker((3353.59, 530.867, 2787.91), (1, 0.7, 0), 256)
if "particle_293 geometry" not in marker_sets:
s=new_marker_set('particle_293 geometry')
marker_sets["particle_293 geometry"]=s
s= marker_sets["particle_293 geometry"]
mark=s.place_marker((4067.27, 672.399, 3677.6), (0.7, 0.7, 0.7), 138.702)
if "particle_294 geometry" not in marker_sets:
s=new_marker_set('particle_294 geometry')
marker_sets["particle_294 geometry"]=s
s= marker_sets["particle_294 geometry"]
mark=s.place_marker((4250.7, 705.669, 4133.41), (0.7, 0.7, 0.7), 140.732)
if "particle_295 geometry" not in marker_sets:
s=new_marker_set('particle_295 geometry')
marker_sets["particle_295 geometry"]=s
s= marker_sets["particle_295 geometry"]
mark=s.place_marker((4133.83, 506.465, 3927.63), (0.7, 0.7, 0.7), 81.3006)
if "particle_296 geometry" not in marker_sets:
s=new_marker_set('particle_296 geometry')
marker_sets["particle_296 geometry"]=s
s= marker_sets["particle_296 geometry"]
mark=s.place_marker((4294.78, 132.652, 3779.79), (0.7, 0.7, 0.7), 133.837)
if "particle_297 geometry" not in marker_sets:
s=new_marker_set('particle_297 geometry')
marker_sets["particle_297 geometry"]=s
s= marker_sets["particle_297 geometry"]
mark=s.place_marker((3781.42, 299.04, 3417.82), (0.7, 0.7, 0.7), 98.3475)
if "particle_298 geometry" not in marker_sets:
s=new_marker_set('particle_298 geometry')
marker_sets["particle_298 geometry"]=s
s= marker_sets["particle_298 geometry"]
mark=s.place_marker((3105.59, 693.573, 3251.37), (0.7, 0.7, 0.7), 297.623)
if "particle_299 geometry" not in marker_sets:
s=new_marker_set('particle_299 geometry')
marker_sets["particle_299 geometry"]=s
s= marker_sets["particle_299 geometry"]
mark=s.place_marker((2838.22, 837.846, 2964.89), (0.7, 0.7, 0.7), 212.938)
if "particle_300 geometry" not in marker_sets:
s=new_marker_set('particle_300 geometry')
marker_sets["particle_300 geometry"]=s
s= marker_sets["particle_300 geometry"]
mark=s.place_marker((2738.43, 698.418, 3128.09), (0.7, 0.7, 0.7), 154.183)
if "particle_301 geometry" not in marker_sets:
s=new_marker_set('particle_301 geometry')
marker_sets["particle_301 geometry"]=s
s= marker_sets["particle_301 geometry"]
mark=s.place_marker((2701.23, 349.652, 2947.35), (0.7, 0.7, 0.7), 180.832)
if "particle_302 geometry" not in marker_sets:
s=new_marker_set('particle_302 geometry')
marker_sets["particle_302 geometry"]=s
s= marker_sets["particle_302 geometry"]
mark=s.place_marker((2728.64, 201.169, 2638.27), (0.7, 0.7, 0.7), 122.332)
if "particle_303 geometry" not in marker_sets:
s=new_marker_set('particle_303 geometry')
marker_sets["particle_303 geometry"]=s
s= marker_sets["particle_303 geometry"]
mark=s.place_marker((2870.05, 49.6141, 2369.02), (0.7, 0.7, 0.7), 209.047)
if "particle_304 geometry" not in marker_sets:
s=new_marker_set('particle_304 geometry')
marker_sets["particle_304 geometry"]=s
s= marker_sets["particle_304 geometry"]
mark=s.place_marker((2617.03, -234.574, 2538.66), (0.7, 0.7, 0.7), 126.985)
if "particle_305 geometry" not in marker_sets:
s=new_marker_set('particle_305 geometry')
marker_sets["particle_305 geometry"]=s
s= marker_sets["particle_305 geometry"]
mark=s.place_marker((2646.19, -606.366, 2525.67), (0.7, 0.7, 0.7), 122.205)
if "particle_306 geometry" not in marker_sets:
s=new_marker_set('particle_306 geometry')
marker_sets["particle_306 geometry"]=s
s= marker_sets["particle_306 geometry"]
mark=s.place_marker((2812.56, -612.221, 2360), (0.7, 0.7, 0.7), 107.95)
if "particle_307 geometry" not in marker_sets:
s=new_marker_set('particle_307 geometry')
marker_sets["particle_307 geometry"]=s
s= marker_sets["particle_307 geometry"]
mark=s.place_marker((2630.41, -74.4148, 2560.73), (0.7, 0.7, 0.7), 182.567)
if "particle_308 geometry" not in marker_sets:
s=new_marker_set('particle_308 geometry')
marker_sets["particle_308 geometry"]=s
s= marker_sets["particle_308 geometry"]
mark=s.place_marker((2639.79, 521.239, 2762.75), (0.7, 0.7, 0.7), 185.274)
if "particle_309 geometry" not in marker_sets:
s=new_marker_set('particle_309 geometry')
marker_sets["particle_309 geometry"]=s
s= marker_sets["particle_309 geometry"]
mark=s.place_marker((2846.45, 1015.65, 2702.66), (0.7, 0.7, 0.7), 413.567)
if "particle_310 geometry" not in marker_sets:
s=new_marker_set('particle_310 geometry')
marker_sets["particle_310 geometry"]=s
s= marker_sets["particle_310 geometry"]
mark=s.place_marker((2715.79, 1108.4, 2845.71), (0.7, 0.7, 0.7), 240.01)
if "particle_311 geometry" not in marker_sets:
s=new_marker_set('particle_311 geometry')
marker_sets["particle_311 geometry"]=s
s= marker_sets["particle_311 geometry"]
mark=s.place_marker((2721.04, 1092.27, 2817.4), (0.7, 0.7, 0.7), 238.995)
if "particle_312 geometry" not in marker_sets:
s=new_marker_set('particle_312 geometry')
marker_sets["particle_312 geometry"]=s
s= marker_sets["particle_312 geometry"]
mark=s.place_marker((2758.2, 892.986, 3223.59), (0.7, 0.7, 0.7), 203.674)
if "particle_313 geometry" not in marker_sets:
s=new_marker_set('particle_313 geometry')
marker_sets["particle_313 geometry"]=s
s= marker_sets["particle_313 geometry"]
mark=s.place_marker((2832.82, 550.812, 3804.97), (0.7, 0.7, 0.7), 266.744)
if "particle_314 geometry" not in marker_sets:
s=new_marker_set('particle_314 geometry')
marker_sets["particle_314 geometry"]=s
s= marker_sets["particle_314 geometry"]
mark=s.place_marker((2506.18, 369.861, 3617.36), (0.7, 0.7, 0.7), 147.585)
if "particle_315 geometry" not in marker_sets:
s=new_marker_set('particle_315 geometry')
marker_sets["particle_315 geometry"]=s
s= marker_sets["particle_315 geometry"]
mark=s.place_marker((2476.39, 598.808, 3366.38), (0.7, 0.7, 0.7), 249.485)
if "particle_316 geometry" not in marker_sets:
s=new_marker_set('particle_316 geometry')
marker_sets["particle_316 geometry"]=s
s= marker_sets["particle_316 geometry"]
mark=s.place_marker((2731.26, 845.257, 3602.75), (0.7, 0.7, 0.7), 119.371)
if "particle_317 geometry" not in marker_sets:
s=new_marker_set('particle_317 geometry')
marker_sets["particle_317 geometry"]=s
s= marker_sets["particle_317 geometry"]
mark=s.place_marker((3363.46, 714.269, 3793.67), (0.7, 0.7, 0.7), 155.875)
if "particle_318 geometry" not in marker_sets:
s=new_marker_set('particle_318 geometry')
marker_sets["particle_318 geometry"]=s
s= marker_sets["particle_318 geometry"]
mark=s.place_marker((4108.15, 553.679, 3546.58), (0.7, 0.7, 0.7), 189.419)
if "particle_319 geometry" not in marker_sets:
s=new_marker_set('particle_319 geometry')
marker_sets["particle_319 geometry"]=s
s= marker_sets["particle_319 geometry"]
mark=s.place_marker((4271.87, 841.362, 3219.63), (0.7, 0.7, 0.7), 137.475)
if "particle_320 geometry" not in marker_sets:
s=new_marker_set('particle_320 geometry')
marker_sets["particle_320 geometry"]=s
s= marker_sets["particle_320 geometry"]
mark=s.place_marker((4098.7, 1197.86, 3284.04), (0.7, 0.7, 0.7), 176.179)
if "particle_321 geometry" not in marker_sets:
s=new_marker_set('particle_321 geometry')
marker_sets["particle_321 geometry"]=s
s= marker_sets["particle_321 geometry"]
mark=s.place_marker((4087.98, 1383.73, 3568.7), (0.7, 0.7, 0.7), 138.829)
if "particle_322 geometry" not in marker_sets:
s=new_marker_set('particle_322 geometry')
marker_sets["particle_322 geometry"]=s
s= marker_sets["particle_322 geometry"]
mark=s.place_marker((4289.22, 1335.85, 3821.26), (0.7, 0.7, 0.7), 148.727)
if "particle_323 geometry" not in marker_sets:
s=new_marker_set('particle_323 geometry')
marker_sets["particle_323 geometry"]=s
s= marker_sets["particle_323 geometry"]
mark=s.place_marker((4709.23, 1236.3, 4009.04), (0.7, 0.7, 0.7), 230.323)
if "particle_324 geometry" not in marker_sets:
s=new_marker_set('particle_324 geometry')
marker_sets["particle_324 geometry"]=s
s= marker_sets["particle_324 geometry"]
mark=s.place_marker((4372.14, 1078.04, 3486.32), (0.7, 0.7, 0.7), 175.376)
if "particle_325 geometry" not in marker_sets:
s=new_marker_set('particle_325 geometry')
marker_sets["particle_325 geometry"]=s
s= marker_sets["particle_325 geometry"]
mark=s.place_marker((4061.36, 1172.15, 3088.56), (0.7, 0.7, 0.7), 161.163)
if "particle_326 geometry" not in marker_sets:
s=new_marker_set('particle_326 geometry')
marker_sets["particle_326 geometry"]=s
s= marker_sets["particle_326 geometry"]
mark=s.place_marker((4366.41, 1464.66, 2870), (0.7, 0.7, 0.7), 125.885)
if "particle_327 geometry" not in marker_sets:
s=new_marker_set('particle_327 geometry')
marker_sets["particle_327 geometry"]=s
s= marker_sets["particle_327 geometry"]
mark=s.place_marker((4684.09, 1497.16, 2593.72), (0.7, 0.7, 0.7), 206.635)
if "particle_328 geometry" not in marker_sets:
s=new_marker_set('particle_328 geometry')
marker_sets["particle_328 geometry"]=s
s= marker_sets["particle_328 geometry"]
mark=s.place_marker((4466.31, 1858.09, 2761.56), (0.7, 0.7, 0.7), 151.392)
if "particle_329 geometry" not in marker_sets:
s=new_marker_set('particle_329 geometry')
marker_sets["particle_329 geometry"]=s
s= marker_sets["particle_329 geometry"]
mark=s.place_marker((4205.47, 2069.58, 2900.11), (0.7, 0.7, 0.7), 173.388)
if "particle_330 geometry" not in marker_sets:
s=new_marker_set('particle_330 geometry')
marker_sets["particle_330 geometry"]=s
s= marker_sets["particle_330 geometry"]
mark=s.place_marker((4297.34, 2029.02, 2848.04), (0.7, 0.7, 0.7), 135.825)
if "particle_331 geometry" not in marker_sets:
s=new_marker_set('particle_331 geometry')
marker_sets["particle_331 geometry"]=s
s= marker_sets["particle_331 geometry"]
mark=s.place_marker((4643.84, 1863.1, 2993.13), (0.7, 0.7, 0.7), 186.839)
if "particle_332 geometry" not in marker_sets:
s=new_marker_set('particle_332 geometry')
marker_sets["particle_332 geometry"]=s
s= marker_sets["particle_332 geometry"]
mark=s.place_marker((5043.37, 1672.32, 3102.66), (0.7, 0.7, 0.7), 121.189)
if "particle_333 geometry" not in marker_sets:
s=new_marker_set('particle_333 geometry')
marker_sets["particle_333 geometry"]=s
s= marker_sets["particle_333 geometry"]
mark=s.place_marker((4694.17, 1576.62, 2875.29), (0.7, 0.7, 0.7), 102.916)
if "particle_334 geometry" not in marker_sets:
s=new_marker_set('particle_334 geometry')
marker_sets["particle_334 geometry"]=s
s= marker_sets["particle_334 geometry"]
mark=s.place_marker((4135.32, 1405.26, 2650.17), (0.7, 0.7, 0.7), 212.769)
if "particle_335 geometry" not in marker_sets:
s=new_marker_set('particle_335 geometry')
marker_sets["particle_335 geometry"]=s
s= marker_sets["particle_335 geometry"]
mark=s.place_marker((3475.36, 1477.29, 2483.72), (0.7, 0.7, 0.7), 173.092)
if "particle_336 geometry" not in marker_sets:
s=new_marker_set('particle_336 geometry')
marker_sets["particle_336 geometry"]=s
s= marker_sets["particle_336 geometry"]
mark=s.place_marker((3053.6, 1324.46, 2199.67), (0.7, 0.7, 0.7), 264.502)
if "particle_337 geometry" not in marker_sets:
s=new_marker_set('particle_337 geometry')
marker_sets["particle_337 geometry"]=s
s= marker_sets["particle_337 geometry"]
mark=s.place_marker((2889.43, 897.278, 1840.69), (0.7, 0.7, 0.7), 208.666)
if "particle_338 geometry" not in marker_sets:
s=new_marker_set('particle_338 geometry')
marker_sets["particle_338 geometry"]=s
s= marker_sets["particle_338 geometry"]
mark=s.place_marker((2806.61, 395.688, 1767.43), (0.7, 0.7, 0.7), 186.797)
if "particle_339 geometry" not in marker_sets:
s=new_marker_set('particle_339 geometry')
marker_sets["particle_339 geometry"]=s
s= marker_sets["particle_339 geometry"]
mark=s.place_marker((2425.55, 186.744, 2049.76), (0.7, 0.7, 0.7), 255.534)
if "particle_340 geometry" not in marker_sets:
s=new_marker_set('particle_340 geometry')
marker_sets["particle_340 geometry"]=s
s= marker_sets["particle_340 geometry"]
mark=s.place_marker((2475.22, -95.0854, 2377.45), (0.7, 0.7, 0.7), 153.126)
if "particle_341 geometry" not in marker_sets:
s=new_marker_set('particle_341 geometry')
marker_sets["particle_341 geometry"]=s
s= marker_sets["particle_341 geometry"]
mark=s.place_marker((2536.2, -318.786, 2063.22), (0.7, 0.7, 0.7), 165.816)
if "particle_342 geometry" not in marker_sets:
s=new_marker_set('particle_342 geometry')
marker_sets["particle_342 geometry"]=s
s= marker_sets["particle_342 geometry"]
mark=s.place_marker((2312.01, -32.9374, 1924.87), (0.7, 0.7, 0.7), 134.429)
if "particle_343 geometry" not in marker_sets:
s=new_marker_set('particle_343 geometry')
marker_sets["particle_343 geometry"]=s
s= marker_sets["particle_343 geometry"]
mark=s.place_marker((2449.69, 327.717, 1824.28), (0.7, 0.7, 0.7), 178.971)
if "particle_344 geometry" not in marker_sets:
s=new_marker_set('particle_344 geometry')
marker_sets["particle_344 geometry"]=s
s= marker_sets["particle_344 geometry"]
mark=s.place_marker((2948.74, 485.427, 1764.7), (0.7, 0.7, 0.7), 189.969)
if "particle_345 geometry" not in marker_sets:
s=new_marker_set('particle_345 geometry')
marker_sets["particle_345 geometry"]=s
s= marker_sets["particle_345 geometry"]
mark=s.place_marker((3444.4, 320.451, 1403.8), (0.7, 0.7, 0.7), 121.359)
if "particle_346 geometry" not in marker_sets:
s=new_marker_set('particle_346 geometry')
marker_sets["particle_346 geometry"]=s
s= marker_sets["particle_346 geometry"]
mark=s.place_marker((3921.63, 607.966, 1492.38), (0.7, 0.7, 0.7), 187.262)
if "particle_347 geometry" not in marker_sets:
s=new_marker_set('particle_347 geometry')
marker_sets["particle_347 geometry"]=s
s= marker_sets["particle_347 geometry"]
mark=s.place_marker((4089.67, 1184.22, 1801.87), (0.7, 0.7, 0.7), 164.335)
if "particle_348 geometry" not in marker_sets:
s=new_marker_set('particle_348 geometry')
marker_sets["particle_348 geometry"]=s
s= marker_sets["particle_348 geometry"]
mark=s.place_marker((4254.7, 1277.86, 2293.42), (0.7, 0.7, 0.7), 138.363)
if "particle_349 geometry" not in marker_sets:
s=new_marker_set('particle_349 geometry')
marker_sets["particle_349 geometry"]=s
s= marker_sets["particle_349 geometry"]
mark=s.place_marker((4497.28, 1385.73, 2579.61), (0.7, 0.7, 0.7), 138.49)
if "particle_350 geometry" not in marker_sets:
s=new_marker_set('particle_350 geometry')
marker_sets["particle_350 geometry"]=s
s= marker_sets["particle_350 geometry"]
mark=s.place_marker((4501.73, 1710.63, 2440.97), (0.7, 0.7, 0.7), 116.325)
if "particle_351 geometry" not in marker_sets:
s=new_marker_set('particle_351 geometry')
marker_sets["particle_351 geometry"]=s
s= marker_sets["particle_351 geometry"]
mark=s.place_marker((4157.1, 1634.89, 2133.81), (0.7, 0.7, 0.7), 106.511)
if "particle_352 geometry" not in marker_sets:
s=new_marker_set('particle_352 geometry')
marker_sets["particle_352 geometry"]=s
s= marker_sets["particle_352 geometry"]
mark=s.place_marker((3738.58, 1300.9, 1948.23), (0.7, 0.7, 0.7), 151.096)
if "particle_353 geometry" not in marker_sets:
s=new_marker_set('particle_353 geometry')
marker_sets["particle_353 geometry"]=s
s= marker_sets["particle_353 geometry"]
mark=s.place_marker((3384.6, 735.307, 1744.46), (0.7, 0.7, 0.7), 240.856)
if "particle_354 geometry" not in marker_sets:
s=new_marker_set('particle_354 geometry')
marker_sets["particle_354 geometry"]=s
s= marker_sets["particle_354 geometry"]
mark=s.place_marker((3142.83, 261.801, 1678.81), (0.7, 0.7, 0.7), 149.7)
if "particle_355 geometry" not in marker_sets:
s=new_marker_set('particle_355 geometry')
marker_sets["particle_355 geometry"]=s
s= marker_sets["particle_355 geometry"]
mark=s.place_marker((2807.4, 157.071, 1670.65), (0.7, 0.7, 0.7), 165.943)
if "particle_356 geometry" not in marker_sets:
s=new_marker_set('particle_356 geometry')
marker_sets["particle_356 geometry"]=s
s= marker_sets["particle_356 geometry"]
mark=s.place_marker((2478.02, 488.037, 2062.68), (0.7, 0.7, 0.7), 178.971)
if "particle_357 geometry" not in marker_sets:
s=new_marker_set('particle_357 geometry')
marker_sets["particle_357 geometry"]=s
s= marker_sets["particle_357 geometry"]
mark=s.place_marker((1867.85, 832.004, 2368.83), (0.7, 0.7, 0.7), 154.945)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
|
Learningtribes/edx-platform | refs/heads/master | cms/djangoapps/contentstore/features/grading.py | 44 | # pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
from lettuce import world, step
from common import *
from terrain.steps import reload_the_page
from selenium.common.exceptions import InvalidElementStateException
from contentstore.utils import reverse_course_url
from nose.tools import assert_in, assert_equal, assert_not_equal
@step(u'I am viewing the grading settings')
def view_grading_settings(step):
world.click_course_settings()
link_css = 'li.nav-course-settings-grading a'
world.css_click(link_css)
@step(u'I add "([^"]*)" new grade')
def add_grade(step, many):
grade_css = '.new-grade-button'
for i in range(int(many)):
world.css_click(grade_css)
@step(u'I delete a grade')
def delete_grade(step):
#grade_css = 'li.grade-specific-bar > a.remove-button'
#range_css = '.grade-specific-bar'
#world.css_find(range_css)[1].mouseover()
#world.css_click(grade_css)
world.browser.execute_script('document.getElementsByClassName("remove-button")[0].click()')
@step(u'Grade list has "([^"]*)" grades$')
def check_grade_values(step, grade_list): # pylint: disable=unused-argument
visible_list = ''.join(
[grade.text for grade in world.css_find('.letter-grade')]
)
assert_equal(visible_list, grade_list, 'Grade lists should be equal')
@step(u'I see I now have "([^"]*)" grades$')
def view_grade_slider(step, how_many):
grade_slider_css = '.grade-specific-bar'
all_grades = world.css_find(grade_slider_css)
assert_equal(len(all_grades), int(how_many))
@step(u'I move a grading section')
def move_grade_slider(step):
moveable_css = '.ui-resizable-e'
f = world.css_find(moveable_css).first
f.action_chains.drag_and_drop_by_offset(f._element, 100, 0).perform()
@step(u'I see that the grade range has changed')
def confirm_change(step):
range_css = '.range'
all_ranges = world.css_find(range_css)
for i in range(len(all_ranges)):
assert_not_equal(world.css_html(range_css, index=i), '0-50')
@step(u'I change assignment type "([^"]*)" to "([^"]*)"$')
def change_assignment_name(step, old_name, new_name):
name_id = '#course-grading-assignment-name'
index = get_type_index(old_name)
f = world.css_find(name_id)[index]
assert_not_equal(index, -1)
for __ in xrange(len(old_name)):
f._element.send_keys(Keys.END, Keys.BACK_SPACE)
f._element.send_keys(new_name)
@step(u'I go back to the main course page')
def main_course_page(step):
main_page_link = reverse_course_url('course_handler', world.scenario_dict['COURSE'].id)
world.visit(main_page_link)
assert_in('Course Outline', world.css_text('h1.page-header'))
@step(u'I do( not)? see the assignment name "([^"]*)"$')
def see_assignment_name(step, do_not, name):
# TODO: rewrite this once grading has been added back to the course outline
pass
# assignment_menu_css = 'ul.menu > li > a'
# # First assert that it is there, make take a bit to redraw
# assert_true(
# world.css_find(assignment_menu_css),
# msg="Could not find assignment menu"
# )
#
# assignment_menu = world.css_find(assignment_menu_css)
# allnames = [item.html for item in assignment_menu]
# if do_not:
# assert_not_in(name, allnames)
# else:
# assert_in(name, allnames)
@step(u'I delete the assignment type "([^"]*)"$')
def delete_assignment_type(step, to_delete):
delete_css = '.remove-grading-data'
world.css_click(delete_css, index=get_type_index(to_delete))
@step(u'I add a new assignment type "([^"]*)"$')
def add_assignment_type(step, new_name):
add_button_css = '.add-grading-data'
world.css_click(add_button_css)
name_id = '#course-grading-assignment-name'
new_assignment = world.css_find(name_id)[-1]
new_assignment._element.send_keys(new_name)
@step(u'I set the assignment weight to "([^"]*)"$')
def set_weight(step, weight):
weight_id = '#course-grading-assignment-gradeweight'
weight_field = world.css_find(weight_id)[-1]
old_weight = world.css_value(weight_id, -1)
for count in range(len(old_weight)):
weight_field._element.send_keys(Keys.END, Keys.BACK_SPACE)
weight_field._element.send_keys(weight)
@step(u'the assignment weight is displayed as "([^"]*)"$')
def verify_weight(step, weight):
weight_id = '#course-grading-assignment-gradeweight'
assert_equal(world.css_value(weight_id, -1), weight)
@step(u'I do not see the changes persisted on refresh$')
def changes_not_persisted(step):
reload_the_page(step)
name_id = '#course-grading-assignment-name'
assert_equal(world.css_value(name_id), 'Homework')
@step(u'I see the assignment type "(.*)"$')
def i_see_the_assignment_type(_step, name):
assignment_css = '#course-grading-assignment-name'
assignments = world.css_find(assignment_css)
types = [ele['value'] for ele in assignments]
assert_in(name, types)
@step(u'I change the highest grade range to "(.*)"$')
def change_grade_range(_step, range_name):
range_css = 'span.letter-grade'
grade = world.css_find(range_css).first
grade.value = range_name
@step(u'I see the highest grade range is "(.*)"$')
def i_see_highest_grade_range(_step, range_name):
range_css = 'span.letter-grade'
grade = world.css_find(range_css).first
assert_equal(grade.value, range_name)
@step(u'I cannot edit the "Fail" grade range$')
def cannot_edit_fail(_step):
range_css = 'span.letter-grade'
ranges = world.css_find(range_css)
assert_equal(len(ranges), 2)
assert_not_equal(ranges.last.value, 'Failure')
# try to change the grade range -- this should throw an exception
try:
ranges.last.value = 'Failure'
except InvalidElementStateException:
pass # We should get this exception on failing to edit the element
# check to be sure that nothing has changed
ranges = world.css_find(range_css)
assert_equal(len(ranges), 2)
assert_not_equal(ranges.last.value, 'Failure')
@step(u'I change the grace period to "(.*)"$')
def i_change_grace_period(_step, grace_period):
grace_period_css = '#course-grading-graceperiod'
ele = world.css_find(grace_period_css).first
# Sometimes it takes a moment for the JavaScript
# to populate the field. If we don't wait for
# this to happen, then we can end up with
# an invalid value (e.g. "00:0048:00")
# which prevents us from saving.
assert_true(world.css_has_value(grace_period_css, "00:00"))
# Set the new grace period
ele.value = grace_period
@step(u'I see the grace period is "(.*)"$')
def the_grace_period_is(_step, grace_period):
grace_period_css = '#course-grading-graceperiod'
# The default value is 00:00
# so we need to wait for it to change
world.wait_for(
lambda _: world.css_has_value(grace_period_css, grace_period)
)
def get_type_index(name):
name_id = '#course-grading-assignment-name'
all_types = world.css_find(name_id)
for index in range(len(all_types)):
if world.css_value(name_id, index=index) == name:
return index
return -1
|
pjryan126/solid-start-careers | refs/heads/master | store/api/zillow/venv/lib/python2.7/site-packages/pip/wheel.py | 51 | """
Support for installing and building the "wheel" binary package format.
"""
from __future__ import absolute_import
import compileall
import csv
import errno
import functools
import hashlib
import logging
import os
import os.path
import re
import shutil
import stat
import sys
import tempfile
import warnings
from base64 import urlsafe_b64encode
from email.parser import Parser
from pip._vendor.six import StringIO
import pip
from pip.compat import expanduser
from pip.download import path_to_url, unpack_url
from pip.exceptions import (
InstallationError, InvalidWheelFilename, UnsupportedWheel)
from pip.locations import distutils_scheme, PIP_DELETE_MARKER_FILENAME
from pip import pep425tags
from pip.utils import (
call_subprocess, ensure_dir, captured_stdout, rmtree, read_chunks,
)
from pip.utils.ui import open_spinner
from pip.utils.logging import indent_log
from pip.utils.setuptools_build import SETUPTOOLS_SHIM
from pip._vendor.distlib.scripts import ScriptMaker
from pip._vendor import pkg_resources
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.six.moves import configparser
wheel_ext = '.whl'
VERSION_COMPATIBLE = (1, 0)
logger = logging.getLogger(__name__)
class WheelCache(object):
"""A cache of wheels for future installs."""
def __init__(self, cache_dir, format_control):
"""Create a wheel cache.
:param cache_dir: The root of the cache.
:param format_control: A pip.index.FormatControl object to limit
binaries being read from the cache.
"""
self._cache_dir = expanduser(cache_dir) if cache_dir else None
self._format_control = format_control
def cached_wheel(self, link, package_name):
return cached_wheel(
self._cache_dir, link, self._format_control, package_name)
def _cache_for_link(cache_dir, link):
"""
Return a directory to store cached wheels in for link.
Because there are M wheels for any one sdist, we provide a directory
to cache them in, and then consult that directory when looking up
cache hits.
We only insert things into the cache if they have plausible version
numbers, so that we don't contaminate the cache with things that were not
unique. E.g. ./package might have dozens of installs done for it and build
a version of 0.0...and if we built and cached a wheel, we'd end up using
the same wheel even if the source has been edited.
:param cache_dir: The cache_dir being used by pip.
:param link: The link of the sdist for which this will cache wheels.
"""
# We want to generate an url to use as our cache key, we don't want to just
# re-use the URL because it might have other items in the fragment and we
# don't care about those.
key_parts = [link.url_without_fragment]
if link.hash_name is not None and link.hash is not None:
key_parts.append("=".join([link.hash_name, link.hash]))
key_url = "#".join(key_parts)
# Encode our key url with sha224, we'll use this because it has similar
# security properties to sha256, but with a shorter total output (and thus
# less secure). However the differences don't make a lot of difference for
# our use case here.
hashed = hashlib.sha224(key_url.encode()).hexdigest()
# We want to nest the directories some to prevent having a ton of top level
# directories where we might run out of sub directories on some FS.
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
# Inside of the base location for cached wheels, expand our parts and join
# them all together.
return os.path.join(cache_dir, "wheels", *parts)
def cached_wheel(cache_dir, link, format_control, package_name):
if not cache_dir:
return link
if not link:
return link
if link.is_wheel:
return link
if not link.is_artifact:
return link
if not package_name:
return link
canonical_name = canonicalize_name(package_name)
formats = pip.index.fmt_ctl_formats(format_control, canonical_name)
if "binary" not in formats:
return link
root = _cache_for_link(cache_dir, link)
try:
wheel_names = os.listdir(root)
except OSError as e:
if e.errno in (errno.ENOENT, errno.ENOTDIR):
return link
raise
candidates = []
for wheel_name in wheel_names:
try:
wheel = Wheel(wheel_name)
except InvalidWheelFilename:
continue
if not wheel.supported():
# Built for a different python/arch/etc
continue
candidates.append((wheel.support_index_min(), wheel_name))
if not candidates:
return link
candidates.sort()
path = os.path.join(root, candidates[0][1])
return pip.index.Link(path_to_url(path))
def rehash(path, algo='sha256', blocksize=1 << 20):
"""Return (hash, length) for path using hashlib.new(algo)"""
h = hashlib.new(algo)
length = 0
with open(path, 'rb') as f:
for block in read_chunks(f, size=blocksize):
length += len(block)
h.update(block)
digest = 'sha256=' + urlsafe_b64encode(
h.digest()
).decode('latin1').rstrip('=')
return (digest, length)
def open_for_csv(name, mode):
if sys.version_info[0] < 3:
nl = {}
bin = 'b'
else:
nl = {'newline': ''}
bin = ''
return open(name, mode + bin, **nl)
def fix_script(path):
"""Replace #!python with #!/path/to/python
Return True if file was changed."""
# XXX RECORD hashes will need to be updated
if os.path.isfile(path):
with open(path, 'rb') as script:
firstline = script.readline()
if not firstline.startswith(b'#!python'):
return False
exename = sys.executable.encode(sys.getfilesystemencoding())
firstline = b'#!' + exename + os.linesep.encode("ascii")
rest = script.read()
with open(path, 'wb') as script:
script.write(firstline)
script.write(rest)
return True
dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)
\.dist-info$""", re.VERBOSE)
def root_is_purelib(name, wheeldir):
"""
Return True if the extracted wheel in wheeldir should go into purelib.
"""
name_folded = name.replace("-", "_")
for item in os.listdir(wheeldir):
match = dist_info_re.match(item)
if match and match.group('name') == name_folded:
with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
for line in wheel:
line = line.lower().rstrip()
if line == "root-is-purelib: true":
return True
return False
def get_entrypoints(filename):
if not os.path.exists(filename):
return {}, {}
# This is done because you can pass a string to entry_points wrappers which
# means that they may or may not be valid INI files. The attempt here is to
# strip leading and trailing whitespace in order to make them valid INI
# files.
with open(filename) as fp:
data = StringIO()
for line in fp:
data.write(line.strip())
data.write("\n")
data.seek(0)
cp = configparser.RawConfigParser()
cp.optionxform = lambda option: option
cp.readfp(data)
console = {}
gui = {}
if cp.has_section('console_scripts'):
console = dict(cp.items('console_scripts'))
if cp.has_section('gui_scripts'):
gui = dict(cp.items('gui_scripts'))
return console, gui
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
pycompile=True, scheme=None, isolated=False, prefix=None):
"""Install a wheel"""
if not scheme:
scheme = distutils_scheme(
name, user=user, home=home, root=root, isolated=isolated,
prefix=prefix,
)
if root_is_purelib(name, wheeldir):
lib_dir = scheme['purelib']
else:
lib_dir = scheme['platlib']
info_dir = []
data_dirs = []
source = wheeldir.rstrip(os.path.sep) + os.path.sep
# Record details of the files moved
# installed = files copied from the wheel to the destination
# changed = files changed while installing (scripts #! line typically)
# generated = files newly generated during the install (script wrappers)
installed = {}
changed = set()
generated = []
# Compile all of the pyc files that we're going to be installing
if pycompile:
with captured_stdout() as stdout:
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
compileall.compile_dir(source, force=True, quiet=True)
logger.debug(stdout.getvalue())
def normpath(src, p):
return os.path.relpath(src, p).replace(os.path.sep, '/')
def record_installed(srcfile, destfile, modified=False):
"""Map archive RECORD paths to installation RECORD paths."""
oldpath = normpath(srcfile, wheeldir)
newpath = normpath(destfile, lib_dir)
installed[oldpath] = newpath
if modified:
changed.add(destfile)
def clobber(source, dest, is_base, fixer=None, filter=None):
ensure_dir(dest) # common for the 'include' path
for dir, subdirs, files in os.walk(source):
basedir = dir[len(source):].lstrip(os.path.sep)
destdir = os.path.join(dest, basedir)
if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
continue
for s in subdirs:
destsubdir = os.path.join(dest, basedir, s)
if is_base and basedir == '' and destsubdir.endswith('.data'):
data_dirs.append(s)
continue
elif (is_base and
s.endswith('.dist-info') and
# is self.req.project_name case preserving?
s.lower().startswith(
req.project_name.replace('-', '_').lower())):
assert not info_dir, 'Multiple .dist-info directories'
info_dir.append(destsubdir)
for f in files:
# Skip unwanted files
if filter and filter(f):
continue
srcfile = os.path.join(dir, f)
destfile = os.path.join(dest, basedir, f)
# directory creation is lazy and after the file filtering above
# to ensure we don't install empty dirs; empty dirs can't be
# uninstalled.
ensure_dir(destdir)
# We use copyfile (not move, copy, or copy2) to be extra sure
# that we are not moving directories over (copyfile fails for
# directories) as well as to ensure that we are not copying
# over any metadata because we want more control over what
# metadata we actually copy over.
shutil.copyfile(srcfile, destfile)
# Copy over the metadata for the file, currently this only
# includes the atime and mtime.
st = os.stat(srcfile)
if hasattr(os, "utime"):
os.utime(destfile, (st.st_atime, st.st_mtime))
# If our file is executable, then make our destination file
# executable.
if os.access(srcfile, os.X_OK):
st = os.stat(srcfile)
permissions = (
st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
)
os.chmod(destfile, permissions)
changed = False
if fixer:
changed = fixer(destfile)
record_installed(srcfile, destfile, changed)
clobber(source, lib_dir, True)
assert info_dir, "%s .dist-info directory not found" % req
# Get the defined entry points
ep_file = os.path.join(info_dir[0], 'entry_points.txt')
console, gui = get_entrypoints(ep_file)
def is_entrypoint_wrapper(name):
# EP, EP.exe and EP-script.py are scripts generated for
# entry point EP by setuptools
if name.lower().endswith('.exe'):
matchname = name[:-4]
elif name.lower().endswith('-script.py'):
matchname = name[:-10]
elif name.lower().endswith(".pya"):
matchname = name[:-4]
else:
matchname = name
# Ignore setuptools-generated scripts
return (matchname in console or matchname in gui)
for datadir in data_dirs:
fixer = None
filter = None
for subdir in os.listdir(os.path.join(wheeldir, datadir)):
fixer = None
if subdir == 'scripts':
fixer = fix_script
filter = is_entrypoint_wrapper
source = os.path.join(wheeldir, datadir, subdir)
dest = scheme[subdir]
clobber(source, dest, False, fixer=fixer, filter=filter)
maker = ScriptMaker(None, scheme['scripts'])
# Ensure old scripts are overwritten.
# See https://github.com/pypa/pip/issues/1800
maker.clobber = True
# Ensure we don't generate any variants for scripts because this is almost
# never what somebody wants.
# See https://bitbucket.org/pypa/distlib/issue/35/
maker.variants = set(('', ))
# This is required because otherwise distlib creates scripts that are not
# executable.
# See https://bitbucket.org/pypa/distlib/issue/32/
maker.set_mode = True
# Simplify the script and fix the fact that the default script swallows
# every single stack trace.
# See https://bitbucket.org/pypa/distlib/issue/34/
# See https://bitbucket.org/pypa/distlib/issue/33/
def _get_script_text(entry):
if entry.suffix is None:
raise InstallationError(
"Invalid script entry point: %s for req: %s - A callable "
"suffix is required. Cf https://packaging.python.org/en/"
"latest/distributing.html#console-scripts for more "
"information." % (entry, req)
)
return maker.script_template % {
"module": entry.prefix,
"import_name": entry.suffix.split(".")[0],
"func": entry.suffix,
}
maker._get_script_text = _get_script_text
maker.script_template = """# -*- coding: utf-8 -*-
import re
import sys
from %(module)s import %(import_name)s
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(%(func)s())
"""
# Special case pip and setuptools to generate versioned wrappers
#
# The issue is that some projects (specifically, pip and setuptools) use
# code in setup.py to create "versioned" entry points - pip2.7 on Python
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
# the wheel metadata at build time, and so if the wheel is installed with
# a *different* version of Python the entry points will be wrong. The
# correct fix for this is to enhance the metadata to be able to describe
# such versioned entry points, but that won't happen till Metadata 2.0 is
# available.
# In the meantime, projects using versioned entry points will either have
# incorrect versioned entry points, or they will not be able to distribute
# "universal" wheels (i.e., they will need a wheel per Python version).
#
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
# override the versioned entry points in the wheel and generate the
# correct ones. This code is purely a short-term measure until Metadat 2.0
# is available.
#
# To add the level of hack in this section of code, in order to support
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
# variable which will control which version scripts get installed.
#
# ENSUREPIP_OPTIONS=altinstall
# - Only pipX.Y and easy_install-X.Y will be generated and installed
# ENSUREPIP_OPTIONS=install
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
# that this option is technically if ENSUREPIP_OPTIONS is set and is
# not altinstall
# DEFAULT
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
# and easy_install-X.Y.
pip_script = console.pop('pip', None)
if pip_script:
if "ENSUREPIP_OPTIONS" not in os.environ:
spec = 'pip = ' + pip_script
generated.extend(maker.make(spec))
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
spec = 'pip%s = %s' % (sys.version[:1], pip_script)
generated.extend(maker.make(spec))
spec = 'pip%s = %s' % (sys.version[:3], pip_script)
generated.extend(maker.make(spec))
# Delete any other versioned pip entry points
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
for k in pip_ep:
del console[k]
easy_install_script = console.pop('easy_install', None)
if easy_install_script:
if "ENSUREPIP_OPTIONS" not in os.environ:
spec = 'easy_install = ' + easy_install_script
generated.extend(maker.make(spec))
spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
generated.extend(maker.make(spec))
# Delete any other versioned easy_install entry points
easy_install_ep = [
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
]
for k in easy_install_ep:
del console[k]
# Generate the console and GUI entry points specified in the wheel
if len(console) > 0:
generated.extend(
maker.make_multiple(['%s = %s' % kv for kv in console.items()])
)
if len(gui) > 0:
generated.extend(
maker.make_multiple(
['%s = %s' % kv for kv in gui.items()],
{'gui': True}
)
)
# Record pip as the installer
installer = os.path.join(info_dir[0], 'INSTALLER')
temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
with open(temp_installer, 'wb') as installer_file:
installer_file.write(b'pip\n')
shutil.move(temp_installer, installer)
generated.append(installer)
# Record details of all files installed
record = os.path.join(info_dir[0], 'RECORD')
temp_record = os.path.join(info_dir[0], 'RECORD.pip')
with open_for_csv(record, 'r') as record_in:
with open_for_csv(temp_record, 'w+') as record_out:
reader = csv.reader(record_in)
writer = csv.writer(record_out)
for row in reader:
row[0] = installed.pop(row[0], row[0])
if row[0] in changed:
row[1], row[2] = rehash(row[0])
writer.writerow(row)
for f in generated:
h, l = rehash(f)
writer.writerow((normpath(f, lib_dir), h, l))
for f in installed:
writer.writerow((installed[f], '', ''))
shutil.move(temp_record, record)
def _unique(fn):
@functools.wraps(fn)
def unique(*args, **kw):
seen = set()
for item in fn(*args, **kw):
if item not in seen:
seen.add(item)
yield item
return unique
# TODO: this goes somewhere besides the wheel module
@_unique
def uninstallation_paths(dist):
"""
Yield all the uninstallation paths for dist based on RECORD-without-.pyc
Yield paths to all the files in RECORD. For each .py file in RECORD, add
the .pyc in the same directory.
UninstallPathSet.add() takes care of the __pycache__ .pyc.
"""
from pip.utils import FakeFile # circular import
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
for row in r:
path = os.path.join(dist.location, row[0])
yield path
if path.endswith('.py'):
dn, fn = os.path.split(path)
base = fn[:-3]
path = os.path.join(dn, base + '.pyc')
yield path
def wheel_version(source_dir):
"""
Return the Wheel-Version of an extracted wheel, if possible.
Otherwise, return False if we couldn't parse / extract it.
"""
try:
dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
wheel_data = dist.get_metadata('WHEEL')
wheel_data = Parser().parsestr(wheel_data)
version = wheel_data['Wheel-Version'].strip()
version = tuple(map(int, version.split('.')))
return version
except:
return False
def check_compatibility(version, name):
"""
Raises errors or warns if called with an incompatible Wheel-Version.
Pip should refuse to install a Wheel-Version that's a major series
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
installing a version only minor version ahead (e.g 1.2 > 1.1).
version: a 2-tuple representing a Wheel-Version (Major, Minor)
name: name of wheel or package to raise exception about
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
"""
if not version:
raise UnsupportedWheel(
"%s is in an unsupported or invalid wheel" % name
)
if version[0] > VERSION_COMPATIBLE[0]:
raise UnsupportedWheel(
"%s's Wheel-Version (%s) is not compatible with this version "
"of pip" % (name, '.'.join(map(str, version)))
)
elif version > VERSION_COMPATIBLE:
logger.warning(
'Installing from a newer Wheel-Version (%s)',
'.'.join(map(str, version)),
)
class Wheel(object):
"""A wheel file"""
# TODO: maybe move the install code into this class
wheel_file_re = re.compile(
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))
((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
\.whl|\.dist-info)$""",
re.VERBOSE
)
def __init__(self, filename):
"""
:raises InvalidWheelFilename: when the filename is invalid for a wheel
"""
wheel_info = self.wheel_file_re.match(filename)
if not wheel_info:
raise InvalidWheelFilename(
"%s is not a valid wheel filename." % filename
)
self.filename = filename
self.name = wheel_info.group('name').replace('_', '-')
# we'll assume "_" means "-" due to wheel naming scheme
# (https://github.com/pypa/pip/issues/1150)
self.version = wheel_info.group('ver').replace('_', '-')
self.pyversions = wheel_info.group('pyver').split('.')
self.abis = wheel_info.group('abi').split('.')
self.plats = wheel_info.group('plat').split('.')
# All the tag combinations from this file
self.file_tags = set(
(x, y, z) for x in self.pyversions
for y in self.abis for z in self.plats
)
def support_index_min(self, tags=None):
"""
Return the lowest index that one of the wheel's file_tag combinations
achieves in the supported_tags list e.g. if there are 8 supported tags,
and one of the file tags is first in the list, then return 0. Returns
None is the wheel is not supported.
"""
if tags is None: # for mock
tags = pep425tags.supported_tags
indexes = [tags.index(c) for c in self.file_tags if c in tags]
return min(indexes) if indexes else None
def supported(self, tags=None):
"""Is this wheel supported on this system?"""
if tags is None: # for mock
tags = pep425tags.supported_tags
return bool(set(tags).intersection(self.file_tags))
class WheelBuilder(object):
"""Build wheels from a RequirementSet."""
def __init__(self, requirement_set, finder, build_options=None,
global_options=None):
self.requirement_set = requirement_set
self.finder = finder
self._cache_root = requirement_set._wheel_cache._cache_dir
self._wheel_dir = requirement_set.wheel_download_dir
self.build_options = build_options or []
self.global_options = global_options or []
def _build_one(self, req, output_dir, python_tag=None):
"""Build one wheel.
:return: The filename of the built wheel, or None if the build failed.
"""
tempd = tempfile.mkdtemp('pip-wheel-')
try:
if self.__build_one(req, tempd, python_tag=python_tag):
try:
wheel_name = os.listdir(tempd)[0]
wheel_path = os.path.join(output_dir, wheel_name)
shutil.move(os.path.join(tempd, wheel_name), wheel_path)
logger.info('Stored in directory: %s', output_dir)
return wheel_path
except:
pass
# Ignore return, we can't do anything else useful.
self._clean_one(req)
return None
finally:
rmtree(tempd)
def _base_setup_args(self, req):
return [
sys.executable, "-u", '-c',
SETUPTOOLS_SHIM % req.setup_py
] + list(self.global_options)
def __build_one(self, req, tempd, python_tag=None):
base_args = self._base_setup_args(req)
spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,)
with open_spinner(spin_message) as spinner:
logger.debug('Destination directory: %s', tempd)
wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
+ self.build_options
if python_tag is not None:
wheel_args += ["--python-tag", python_tag]
try:
call_subprocess(wheel_args, cwd=req.setup_py_dir,
show_stdout=False, spinner=spinner)
return True
except:
spinner.finish("error")
logger.error('Failed building wheel for %s', req.name)
return False
def _clean_one(self, req):
base_args = self._base_setup_args(req)
logger.info('Running setup.py clean for %s', req.name)
clean_args = base_args + ['clean', '--all']
try:
call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)
return True
except:
logger.error('Failed cleaning build dir for %s', req.name)
return False
def build(self, autobuilding=False):
"""Build wheels.
:param unpack: If True, replace the sdist we built from with the
newly built wheel, in preparation for installation.
:return: True if all the wheels built correctly.
"""
assert self._wheel_dir or (autobuilding and self._cache_root)
# unpack sdists and constructs req set
self.requirement_set.prepare_files(self.finder)
reqset = self.requirement_set.requirements.values()
buildset = []
for req in reqset:
if req.constraint:
continue
if req.is_wheel:
if not autobuilding:
logger.info(
'Skipping %s, due to already being wheel.', req.name)
elif req.editable:
if not autobuilding:
logger.info(
'Skipping bdist_wheel for %s, due to being editable',
req.name)
elif autobuilding and req.link and not req.link.is_artifact:
pass
elif autobuilding and not req.source_dir:
pass
else:
if autobuilding:
link = req.link
base, ext = link.splitext()
if pip.index.egg_info_matches(base, None, link) is None:
# Doesn't look like a package - don't autobuild a wheel
# because we'll have no way to lookup the result sanely
continue
if "binary" not in pip.index.fmt_ctl_formats(
self.finder.format_control,
canonicalize_name(req.name)):
logger.info(
"Skipping bdist_wheel for %s, due to binaries "
"being disabled for it.", req.name)
continue
buildset.append(req)
if not buildset:
return True
# Build the wheels.
logger.info(
'Building wheels for collected packages: %s',
', '.join([req.name for req in buildset]),
)
with indent_log():
build_success, build_failure = [], []
for req in buildset:
python_tag = None
if autobuilding:
python_tag = pep425tags.implementation_tag
output_dir = _cache_for_link(self._cache_root, req.link)
try:
ensure_dir(output_dir)
except OSError as e:
logger.warn("Building wheel for %s failed: %s",
req.name, e)
build_failure.append(req)
continue
else:
output_dir = self._wheel_dir
wheel_file = self._build_one(
req, output_dir,
python_tag=python_tag,
)
if wheel_file:
build_success.append(req)
if autobuilding:
# XXX: This is mildly duplicative with prepare_files,
# but not close enough to pull out to a single common
# method.
# The code below assumes temporary source dirs -
# prevent it doing bad things.
if req.source_dir and not os.path.exists(os.path.join(
req.source_dir, PIP_DELETE_MARKER_FILENAME)):
raise AssertionError(
"bad source dir - missing marker")
# Delete the source we built the wheel from
req.remove_temporary_source()
# set the build directory again - name is known from
# the work prepare_files did.
req.source_dir = req.build_location(
self.requirement_set.build_dir)
# Update the link for this.
req.link = pip.index.Link(
path_to_url(wheel_file))
assert req.link.is_wheel
# extract the wheel into the dir
unpack_url(
req.link, req.source_dir, None, False,
session=self.requirement_set.session)
else:
build_failure.append(req)
# notify success/failure
if build_success:
logger.info(
'Successfully built %s',
' '.join([req.name for req in build_success]),
)
if build_failure:
logger.info(
'Failed to build %s',
' '.join([req.name for req in build_failure]),
)
# Return True if all builds were successful
return len(build_failure) == 0
|
willthames/ansible | refs/heads/devel | lib/ansible/modules/network/aci/aci_rest.py | 9 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2015 Jason Edelman <jason@networktocode.com>, Network to Code, LLC
# Copyright 2017 Dag Wieers <dag@wieers.com>
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
module: aci_rest
short_description: Direct access to the Cisco APIC REST API
description:
- Enables the management of the Cisco ACI fabric through direct access to the Cisco APIC REST API.
- More information regarding the Cisco APIC REST API is available from
U(http://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/2-x/rest_cfg/2_1_x/b_Cisco_APIC_REST_API_Configuration_Guide.html).
author:
- Jason Edelman (@jedelman8)
- Dag Wieers (@dagwieers)
version_added: '2.4'
requirements:
- lxml (when using XML content)
- xmljson >= 0.1.8 (when using XML content)
- python 2.7+ (when using xmljson)
extends_documentation_fragment: aci
options:
method:
description:
- The HTTP method of the request.
- Using C(delete) is typically used for deleting objects.
- Using C(get) is typically used for querying objects.
- Using C(post) is typically used for modifying objects.
required: true
default: get
choices: [ delete, get, post ]
aliases: [ action ]
path:
description:
- URI being used to execute API calls.
- Must end in C(.xml) or C(.json).
required: true
aliases: [ uri ]
content:
description:
- When used instead of C(src), sets the content of the API request directly.
- This may be convenient to template simple requests, for anything complex use the M(template) module.
src:
description:
- Name of the absolute path of the filname that includes the body
of the http request being sent to the ACI fabric.
aliases: [ config_file ]
notes:
- When using inline-JSON (using C(content)), YAML requires to start with a blank line.
Otherwise the JSON statement will be parsed as a YAML mapping (dictionary) and translated into invalid JSON as a result.
- XML payloads require the C(lxml) and C(xmljson) python libraries. For JSON payloads nothing special is needed.
'''
EXAMPLES = r'''
- name: Add a tenant
aci_rest:
hostname: '{{ inventory_hostname }}'
username: '{{ aci_username }}'
password: '{{ aci_password }}'
method: post
path: /api/mo/uni.xml
src: /home/cisco/ansible/aci/configs/aci_config.xml
delegate_to: localhost
- name: Get tenants
aci_rest:
hostname: '{{ inventory_hostname }}'
username: '{{ aci_username }}'
password: '{{ aci_password }}'
method: get
path: /api/node/class/fvTenant.json
delegate_to: localhost
- name: Configure contracts
aci_rest:
hostname: '{{ inventory_hostname }}'
username: '{{ aci_username }}'
password: '{{ aci_password }}'
method: post
path: /api/mo/uni.xml
src: /home/cisco/ansible/aci/configs/contract_config.xml
delegate_to: localhost
- name: Register leaves and spines
aci_rest:
hostname: '{{ inventory_hostname }}'
username: '{{ aci_username }}'
password: '{{ aci_password }}'
validate_certs: no
method: post
path: /api/mo/uni/controller/nodeidentpol.xml
content: |
<fabricNodeIdentPol>
<fabricNodeIdentP name="{{ item.name }}" nodeId="{{ item.nodeid }}" status="{{ item.status }}" serial="{{ item.serial }}"/>
</fabricNodeIdentPol>
with_items:
- '{{ apic_leavesspines }}'
delegate_to: localhost
- name: Wait for all controllers to become ready
aci_rest:
hostname: '{{ inventory_hostname }}'
username: '{{ aci_username }}'
password: '{{ aci_password }}'
validate_certs: no
path: /api/node/class/topSystem.json?query-target-filter=eq(topSystem.role,"controller")
register: apics
until: "'totalCount' in apics and apics.totalCount|int >= groups['apic']|count"
retries: 120
delay: 30
delegate_to: localhost
run_once: yes
'''
RETURN = r'''
error_code:
description: The REST ACI return code, useful for troubleshooting on failure
returned: always
type: int
sample: 122
error_text:
description: The REST ACI descriptive text, useful for troubleshooting on failure
returned: always
type: string
sample: unknown managed object class foo
imdata:
description: Converted output returned by the APIC REST (register this for post-processing)
returned: always
type: string
sample: [{"error": {"attributes": {"code": "122", "text": "unknown managed object class foo"}}}]
payload:
description: The (templated) payload send to the APIC REST API (xml or json)
returned: always
type: string
sample: '<foo bar="boo"/>'
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
response:
description: HTTP response string
returned: always
type: string
sample: 'HTTP Error 400: Bad Request'
status:
description: HTTP status code
returned: always
type: int
sample: 400
totalCount:
description: Number of items in the imdata array
returned: always
type: string
sample: '0'
'''
import json
import os
# Optional, only used for XML payload
try:
import lxml.etree
HAS_LXML_ETREE = True
except ImportError:
HAS_LXML_ETREE = False
# Optional, only used for XML payload
try:
from xmljson import cobra
HAS_XMLJSON_COBRA = True
except ImportError:
HAS_XMLJSON_COBRA = False
# from ansible.module_utils.aci import aci_login
from ansible.module_utils.basic import AnsibleModule, get_exception
from ansible.module_utils.urls import fetch_url
from ansible.module_utils._text import to_bytes
aci_argument_spec = dict(
hostname=dict(type='str', required=True, aliases=['host']),
username=dict(type='str', default='admin', aliases=['user']),
password=dict(type='str', required=True, no_log=True),
protocol=dict(type='str'), # Deprecated in v2.8
timeout=dict(type='int', default=30),
use_ssl=dict(type='bool', default=True),
validate_certs=dict(type='bool', default=True),
)
def aci_login(module, result=dict()):
''' Log in to APIC '''
# Set protocol based on use_ssl parameter
if module.params['protocol'] is None:
module.params['protocol'] = 'https' if module.params.get('use_ssl', True) else 'http'
# Perform login request
url = '%(protocol)s://%(hostname)s/api/aaaLogin.json' % module.params
data = {'aaaUser': {'attributes': {'name': module.params['username'], 'pwd': module.params['password']}}}
resp, auth = fetch_url(module, url, data=json.dumps(data), method="POST", timeout=module.params['timeout'])
# Handle APIC response
if auth['status'] != 200:
try:
result.update(aci_response(auth['body'], 'json'))
result['msg'] = 'Authentication failed: %(error_code)s %(error_text)s' % result
except KeyError:
result['msg'] = '%(msg)s for %(url)s' % auth
result['response'] = auth['msg']
result['status'] = auth['status']
module.fail_json(**result)
return resp
def aci_response(rawoutput, rest_type='xml'):
''' Handle APIC response output '''
result = dict()
if rest_type == 'json':
# Use APIC response as module output
try:
result = json.loads(rawoutput)
except:
e = get_exception()
# Expose RAW output for troubleshooting
result['error_code'] = -1
result['error_text'] = "Unable to parse output as JSON, see 'raw' output. %s" % e
result['raw'] = rawoutput
return result
else:
# NOTE: The XML-to-JSON conversion is using the "Cobra" convention
xmldata = None
try:
xml = lxml.etree.fromstring(to_bytes(rawoutput))
xmldata = cobra.data(xml)
except:
e = get_exception()
# Expose RAW output for troubleshooting
result['error_code'] = -1
result['error_text'] = "Unable to parse output as XML, see 'raw' output. %s" % e
result['raw'] = rawoutput
return result
# Reformat as ACI does for JSON API output
if xmldata and 'imdata' in xmldata:
if 'children' in xmldata['imdata']:
result['imdata'] = xmldata['imdata']['children']
else:
result['imdata'] = dict()
result['totalCount'] = xmldata['imdata']['attributes']['totalCount']
# Handle possible APIC error information
try:
result['error_code'] = result['imdata'][0]['error']['attributes']['code']
result['error_text'] = result['imdata'][0]['error']['attributes']['text']
except KeyError:
result['error_code'] = 0
result['error_text'] = 'Success'
return result
def main():
argument_spec = dict(
path=dict(type='str', required=True, aliases=['uri']),
method=dict(type='str', default='get', choices=['delete', 'get', 'post'], aliases=['action']),
src=dict(type='path', aliases=['config_file']),
content=dict(type='str'),
)
argument_spec.update(aci_argument_spec)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=[['content', 'src']],
supports_check_mode=True,
)
hostname = module.params['hostname']
username = module.params['username']
password = module.params['password']
path = module.params['path']
content = module.params['content']
src = module.params['src']
protocol = module.params['protocol']
use_ssl = module.params['use_ssl']
method = module.params['method']
timeout = module.params['timeout']
result = dict(
changed=False,
payload='',
)
# Report missing file
file_exists = False
if src:
if os.path.isfile(src):
file_exists = True
else:
module.fail_json(msg='Cannot find/access src:\n%s' % src)
# Find request type
if path.find('.xml') != -1:
rest_type = 'xml'
if not HAS_LXML_ETREE:
module.fail_json(msg='The lxml python library is missing, or lacks etree support.')
if not HAS_XMLJSON_COBRA:
module.fail_json(msg='The xmljson python library is missing, or lacks cobra support.')
elif path.find('.json') != -1:
rest_type = 'json'
else:
module.fail_json(msg='Failed to find REST API content type (neither .xml nor .json).')
# Set protocol for further use
if protocol is None:
protocol = 'https' if use_ssl else 'http'
else:
module.deprecate("Parameter 'protocol' is deprecated, please use 'use_ssl' instead.", 2.8)
# Perform login first
auth = aci_login(module, result)
# Prepare request data
if content:
# We include the payload as it may be templated
result['payload'] = content
elif file_exists:
with open(src, 'r') as config_object:
# TODO: Would be nice to template this, requires action-plugin
result['payload'] = config_object.read()
# Ensure changes are reported
if method in ('delete', 'post'):
# FIXME: Hardcoding changed is not idempotent
result['changed'] = True
# In check_mode we assume it works, but we don't actually perform the requested change
# TODO: Could we turn this request in a GET instead ?
if module.check_mode:
module.exit_json(response='OK (Check mode)', status=200, **result)
else:
result['changed'] = False
# Perform actual request using auth cookie
url = '%s://%s/%s' % (protocol, hostname, path.lstrip('/'))
headers = dict(Cookie=auth.headers['Set-Cookie'])
resp, info = fetch_url(module, url, data=result['payload'], method=method.upper(), timeout=timeout, headers=headers)
result['response'] = info['msg']
result['status'] = info['status']
# Report failure
if info['status'] != 200:
try:
result.update(aci_response(info['body'], rest_type))
result['msg'] = 'Task failed: %(error_code)s %(error_text)s' % result
except KeyError:
result['msg'] = '%(msg)s for %(url)s' % info
module.fail_json(**result)
# Report success
result.update(aci_response(resp.read(), rest_type))
module.exit_json(**result)
if __name__ == '__main__':
main()
|
privateip/ansible | refs/heads/devel | lib/ansible/modules/cloud/atomic/atomic_host.py | 23 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public licenses
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION='''
---
module: atomic_host
short_description: Manage the atomic host platform
description:
- Manage the atomic host platform
- Rebooting of Atomic host platform should be done outside this module
version_added: "2.2"
author: "Saravanan KR @krsacme"
notes:
- Host should be an atomic platform (verified by existence of '/run/ostree-booted' file)
requirements:
- atomic
- "python >= 2.6"
options:
revision:
description:
- The version number of the atomic host to be deployed. Providing C(latest) will upgrade to the latest available version.
required: false
default: latest
aliases: ["version"]
'''
EXAMPLES = '''
# Upgrade the atomic host platform to the latest version (atomic host upgrade)
- atomic_host:
revision: latest
# Deploy a specific revision as the atomic host (atomic host deploy 23.130)
- atomic_host:
revision: 23.130
'''
RETURN = '''
msg:
description: The command standard output
returned: always
type: string
sample: 'Already on latest'
'''
def core(module):
revision = module.params['revision']
args = []
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
if revision == 'latest':
args = ['atomic', 'host', 'upgrade']
else:
args = ['atomic', 'host', 'deploy', revision]
out = {}
err = {}
rc = 0
rc, out, err = module.run_command(args, check_rc=False)
if rc == 77 and revision == 'latest':
module.exit_json(msg="Already on latest", changed=False)
elif rc != 0:
module.fail_json(rc=rc, msg=err)
else:
module.exit_json(msg=out, changed=True)
def main():
module = AnsibleModule(
argument_spec = dict(
revision = dict(default='latest', required=False, aliases=["version"]),
),
)
# Verify that the platform is atomic host
if not os.path.exists("/run/ostree-booted"):
module.fail_json(msg="Module atomic_host is applicable for Atomic Host Platforms only")
try:
core(module)
except Exception as e:
module.fail_json(msg=str(e))
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
exploreodoo/datStruct | refs/heads/master | odoo/addons/payment_authorize/__openerp__.py | 232 | # -*- coding: utf-8 -*-
{
'name': 'Authorize.Net Payment Acquirer',
'category': 'Hidden',
'summary': 'Payment Acquirer: Authorize.net Implementation',
'version': '1.0',
'description': """Authorize.Net Payment Acquirer""",
'author': 'Odoo SA',
'depends': ['payment'],
'data': [
'views/authorize.xml',
'views/payment_acquirer.xml',
'data/authorize.xml',
'views/payment_authorize_template.xml',
],
'installable': True,
}
|
ebroder/NetworkManager | refs/heads/master | examples/python/show-bssids.py | 22 | #!/usr/bin/env python
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright (C) 2010 Red Hat, Inc.
#
# This example prints out all the AP BSSIDs that all WiFi devices on the
# machine can see. Useful for location-based services like Skyhook that
# can geolocate you based on the APs you can see.
import dbus
bus = dbus.SystemBus()
# Get a proxy for the base NetworkManager object
proxy = bus.get_object("org.freedesktop.NetworkManager", "/org/freedesktop/NetworkManager")
manager = dbus.Interface(proxy, "org.freedesktop.NetworkManager")
all_aps = []
print "Associated APs:"
# Get all network devices
devices = manager.GetDevices()
for d in devices:
dev_proxy = bus.get_object("org.freedesktop.NetworkManager", d)
prop_iface = dbus.Interface(dev_proxy, "org.freedesktop.DBus.Properties")
# Make sure the device is enabled before we try to use it
state = prop_iface.Get("org.freedesktop.NetworkManager.Device", "State")
if state <= 2:
continue
# Get device's type; we only want wifi devices
iface = prop_iface.Get("org.freedesktop.NetworkManager.Device", "Interface")
dtype = prop_iface.Get("org.freedesktop.NetworkManager.Device", "DeviceType")
if dtype == 2: # WiFi
# Get a proxy for the wifi interface
wifi_iface = dbus.Interface(dev_proxy, "org.freedesktop.NetworkManager.Device.Wireless")
wifi_prop_iface = dbus.Interface(dev_proxy, "org.freedesktop.DBus.Properties")
# Get the associated AP's object path
connected_path = wifi_prop_iface.Get("org.freedesktop.NetworkManager.Device.Wireless", "ActiveAccessPoint")
# Get all APs the card can see
aps = wifi_iface.GetAccessPoints()
for path in aps:
ap_proxy = bus.get_object("org.freedesktop.NetworkManager", path)
ap_prop_iface = dbus.Interface(ap_proxy, "org.freedesktop.DBus.Properties")
bssid = ap_prop_iface.Get("org.freedesktop.NetworkManager.AccessPoint", "HwAddress")
# Cache the BSSID
if not bssid in all_aps:
all_aps.append(bssid)
# Print the current AP's BSSID
if path == connected_path:
print "%s (%s)" % (bssid, iface)
# and print out all APs the wifi devices can see
print"\nFound APs:"
for bssid in all_aps:
print bssid
|
jruiperezv/ANALYSE | refs/heads/master | common/lib/xmodule/xmodule/util/duedate.py | 256 | """
Miscellaneous utility functions.
"""
from functools import partial
def get_extended_due_date(node):
"""
Gets the actual due date for the logged in student for this node, returning
the extendeded due date if one has been granted and it is later than the
global due date, otherwise returning the global due date for the unit.
"""
if isinstance(node, dict):
get = node.get
else:
get = partial(getattr, node)
due_date = get('due', None)
if not due_date:
return due_date
extended = get('extended_due', None)
if not extended or extended < due_date:
return due_date
return extended
|
philgyford/listenbrainz-server | refs/heads/master | server.py | 2 | #!/usr/bin/env python
from webserver import create_app
import argparse
from werkzeug.contrib.profiler import ProfilerMiddleware
application = create_app()
#application.config['PROFILE'] = True
#application.wsgi_app = ProfilerMiddleware(application.wsgi_app, restrictions=[30])
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="ListenBrainz Server")
parser.add_argument("-d", "--debug", action="store_true",
help="Turn on debugging mode to see stack traces on "
"the error pages. This overrides 'DEBUG' value "
"in config file.")
parser.add_argument("-t", "--host", default="0.0.0.0", type=str,
help="Which interfaces to listen on. Default: 0.0.0.0.")
parser.add_argument("-p", "--port", default="8080", type=int,
help="Which port to listen on. Default: 8080.")
args = parser.parse_args()
application.run(debug=True if args.debug else None,
host=args.host, port=args.port)
|
lkhomenk/integration_tests | refs/heads/master | cfme/infrastructure/config_management.py | 3 | # -*- coding: utf-8 -*-
from cached_property import cached_property
from navmazing import NavigateToSibling, NavigateToAttribute
from widgetastic.exceptions import NoSuchElementException
from widgetastic.widget import Checkbox, TextInput, Text, View
from widgetastic_patternfly import BootstrapSelect, Dropdown, Tab
from cfme.base.credential import Credential as BaseCredential
from cfme.base.login import BaseLoggedInPage
from cfme.common import Taggable, TagPageView
from cfme.configure.configuration.region_settings import Category, Tag
from cfme.utils import ParamClassName
from cfme.utils import version, conf
from cfme.utils.appliance import Navigatable
from cfme.utils.appliance.implementations.ui import navigator, CFMENavigateStep, navigate_to
from cfme.utils.log import logger
from cfme.utils.pretty import Pretty
from cfme.utils.update import Updateable
from cfme.utils.wait import wait_for
from widgetastic_manageiq import (
Accordion, BaseEntitiesView, Button, ItemsToolBarViewSelector, ManageIQTree, SummaryTable,
Version, VersionPick, Table)
class ConfigManagementToolbar(View):
"""Toolbar"""
refresh = Button(title=VersionPick({Version.lowest(): 'Reload current display',
'5.9': 'Refresh this page'}))
configuration = Dropdown('Configuration')
lifecycle = Dropdown('Lifecycle')
policy = Dropdown('Policy')
download = Dropdown(title='Download')
view_selector = View.nested(ItemsToolBarViewSelector)
class ConfigManagementDetailsToolbar(View):
"""Toolbar on the details page"""
history = Dropdown(title='History')
refresh = Button(title=VersionPick({Version.lowest(): 'Reload current display',
'5.9': 'Refresh this page'}))
lifecycle = Dropdown('Lifecycle')
policy = Dropdown('Policy')
download = Button(title='Download summary in PDF format')
view_selector = View.nested(ItemsToolBarViewSelector)
class ConfigManagementSideBar(View):
"""Side bar"""
@View.nested
class providers(Accordion): # noqa
ACCORDION_NAME = 'Providers'
tree = ManageIQTree()
@View.nested
class configured_systems(Accordion): # noqa
ACCORDION_NAME = 'Configured Systems'
tree = ManageIQTree()
@View.nested
class job_templates(Accordion): # noqa
ACCORDION_NAME = 'Job Templates'
tree = ManageIQTree()
class ConfigManagementEntities(BaseEntitiesView):
"""The entities on the page"""
class ConfigManagementProfileEntities(BaseEntitiesView):
"""Entities view for the detail page"""
@View.nested
class summary(Tab): # noqa
TAB_NAME = 'Summary'
properties = SummaryTable(title='Properties')
environment = SummaryTable(title='Environment')
operating_system = SummaryTable(title='Operating System')
tenancy = SummaryTable(title='Tenancy')
smart_management = SummaryTable(title='Smart Management')
@View.nested
class configured_systems(Tab): # noqa
TAB_NAME = 'Configured Systems'
elements = Table('//div[@id="main_div"]//div[@id="list_grid" or @id="gtl_div"]//table')
class ConfigManagementAddForm(View):
"""Form to add a provider"""
name = TextInput('name')
provider_type = BootstrapSelect('provider_type')
zone = TextInput('zone')
url = TextInput('url')
ssl = Checkbox('verify_ssl')
username = VersionPick({Version.lowest(): TextInput('log_userid'),
'5.9': TextInput('default_userid')})
password = VersionPick({Version.lowest(): TextInput('log_password'),
'5.9': TextInput('default_password')})
confirm_password = TextInput('log_verify')
validate = Button('Validate')
class ConfigManagementEditForm(View):
"""Form to add a provider"""
name = TextInput('name')
provider_type = BootstrapSelect('provider_type')
zone = TextInput('zone')
url = TextInput('url')
ssl = Checkbox('verify_ssl')
username = TextInput('log_userid')
password = TextInput('log_password')
validate = Button('Validate')
class ConfigManagementAddEntities(View):
"""The entities on the add page"""
title = Text('//div[@id="main-content"]//h1')
form = View.nested(ConfigManagementAddForm)
add = Button('Add')
cancel = Button('Cancel')
class ConfigManagementEditEntities(View):
"""The entities on the edit page"""
title = Text('//div[@id="main-content"]//h1')
form = View.nested(ConfigManagementEditForm)
save = Button('Save')
reset = Button('Reset')
cancel = Button('Cancel')
class ConfigManagementView(BaseLoggedInPage):
"""The base page for both the all and details page"""
@property
def in_config(self):
"""Determine if we're in the config section"""
if (self.context['object'].appliance.version >= '5.8' and
self.context['object'].type == 'Ansible Tower'):
nav_chain = ['Automation', 'Ansible', 'Ansible Tower']
else:
nav_chain = ['Configuration', 'Management']
return self.logged_in_as_current_user and self.navigation.currently_selected == nav_chain
class ConfigManagementAllView(ConfigManagementView):
"""The main list view"""
toolbar = View.nested(ConfigManagementToolbar)
sidebar = View.nested(ConfigManagementSideBar)
including_entities = View.include(ConfigManagementEntities, use_parent=True)
@property
def is_displayed(self):
"""Is this view being displayed?"""
if self.obj.appliance.version >= '5.8' and self.obj.type == 'Ansible Tower':
title_text = 'All Ansible Tower Providers'
else:
title_text = 'All Configuration Manager Providers'
return self.in_config and self.entities.title.text == title_text
class ConfigManagementDetailsView(ConfigManagementView):
"""The details page"""
toolbar = View.nested(ConfigManagementToolbar)
sidebar = View.nested(ConfigManagementSideBar)
including_entities = View.include(ConfigManagementEntities, use_parent=True)
@property
def is_displayed(self):
"""Is this view being displayed?"""
titles = [t.format(name=self.obj.name) for t in [
'Configuration Profiles under Red Hat Satellite Provider "{name} Automation Manager"',
'Inventory Groups under Ansible Tower Provider "{name} Automation Manager"'
]]
return self.in_config and self.entities.title.text in titles
class ConfigManagementProfileView(ConfigManagementView):
"""The profile page"""
toolbar = View.nested(ConfigManagementDetailsToolbar)
sidebar = View.nested(ConfigManagementSideBar)
entities = View.nested(ConfigManagementProfileEntities)
@property
def is_displayed(self):
"""Is this view being displayed?"""
title = 'Configured System ({}) "{}"'.format(self.obj.type, self.obj.name)
return self.in_config and self.entities.title.text == title
class ConfigManagementAddView(ConfigManagementView):
"""The add page"""
sidebar = View.nested(ConfigManagementSideBar)
entities = View.nested(ConfigManagementAddEntities)
@property
def is_displayed(self):
"""Is this view being displayed?"""
return False
class ConfigManagementEditView(ConfigManagementView):
"""The edit page"""
sidebar = View.nested(ConfigManagementSideBar)
entities = View.nested(ConfigManagementEditEntities)
@property
def is_displayed(self):
"""Is this view being displayed?"""
return False
class ConfigManager(Updateable, Pretty, Navigatable):
"""
This is base class for Configuration manager objects (Red Hat Satellite, Foreman, Ansible Tower)
Args:
name: Name of the config. manager
url: URL, hostname or IP of the config. manager
ssl: Boolean value; `True` if SSL certificate validity should be checked, `False` otherwise
credentials: Credentials to access the config. manager
key: Key to access the cfme_data yaml data (same as `name` if not specified)
Usage:
Use Satellite or AnsibleTower classes instead.
"""
pretty_attr = ['name', 'url']
_param_name = ParamClassName('name')
type = None
refresh_flash_msg = 'Refresh Provider initiated for 1 provider'
def __init__(self, name=None, url=None, ssl=None, credentials=None, key=None, appliance=None):
Navigatable.__init__(self, appliance=appliance)
self.name = name
self.url = url
self.ssl = ssl
self.credentials = credentials
self.key = key or name
class Credential(BaseCredential, Updateable):
pass
@property
def ui_name(self):
"""Return the name used in the UI"""
if self.type == 'Ansible Tower':
return '{} Automation Manager'.format(self.name)
else:
return '{} Configuration Manager'.format(self.name)
def create(self, cancel=False, validate_credentials=True, validate=True, force=False):
"""Creates the manager through UI
Args:
cancel (bool): Whether to cancel out of the creation. The cancel is done
after all the information present in the manager has been filled in the UI.
validate_credentials (bool): Whether to validate credentials - if True and the
credentials are invalid, an error will be raised.
validate (bool): Whether we want to wait for the manager's data to load
and show up in it's detail page. True will also wait, False will only set it up.
force (bool): Whether to force the creation even if the manager already exists.
True will try anyway; False will check for its existence and leave, if present.
"""
def config_profiles_loaded():
# Workaround - without this, validation of provider failed
config_profiles_names = [prof.name for prof in self.config_profiles]
logger.info(
"UI: %s\nYAML: %s",
set(config_profiles_names), set(self.yaml_data['config_profiles']))
return all(
[cp in config_profiles_names for cp in self.yaml_data['config_profiles']])
if not force and self.exists:
return
form_dict = self.__dict__
form_dict.update(self.credentials.view_value_mapping)
if self.appliance.version < '5.8':
form_dict['provider_type'] = self.type
view = navigate_to(self, 'Add')
view.entities.form.fill(form_dict)
if validate_credentials:
view.entities.form.validate.click()
view.flash.assert_success_message('Credential validation was successful')
if cancel:
view.entities.cancel.click()
view.flash.assert_success_message('Add of Provider was cancelled by the user')
else:
view.entities.add.click()
success_message = '{} Provider "{}" was added'.format(self.type, self.name)
view.flash.assert_success_message(success_message)
view.flash.assert_success_message(self.refresh_flash_msg)
if validate:
try:
self.yaml_data['config_profiles']
except KeyError as e:
logger.exception(e)
raise
wait_for(
config_profiles_loaded,
fail_func=self.refresh_relationships,
handle_exception=True,
num_sec=180, delay=30)
def update(self, updates, cancel=False, validate_credentials=False):
"""Updates the manager through UI
args:
updates (dict): Data to change.
cancel (bool): Whether to cancel out of the update. The cancel is done
after all the new information has been filled in the UI.
validate_credentials (bool): Whether to validate credentials - if True and the
credentials are invalid, an error will be raised.
Note:
utils.update use is recommended over use of this method.
"""
view = navigate_to(self, 'Edit')
view.entities.form.fill(updates)
if validate_credentials:
view.entities.form.validate.click()
view.flash.assert_success_message('Credential validation was successful')
if cancel:
view.entities.cancel.click()
view.flash.assert_success_message('Edit of Provider was cancelled by the user')
else:
view.entities.save.click()
view.flash.assert_success_message(
'{} Provider "{}" was updated'.format(self.type, updates['name'] or self.name))
self.__dict__.update(**updates)
def delete(self, cancel=False, wait_deleted=True, force=False):
"""Deletes the manager through UI
Args:
cancel (bool): Whether to cancel out of the deletion, when the alert pops up.
wait_deleted (bool): Whether we want to wait for the manager to disappear from the UI.
True will wait; False will only delete it and move on.
force (bool): Whether to try to delete the manager even though it doesn't exist.
True will try to delete it anyway; False will check for its existence and leave,
if not present.
"""
if not force and not self.exists:
return
view = navigate_to(self, 'All')
view.toolbar.view_selector.select('List View')
row = view.entities.paginator.find_row_on_pages(view.entities.elements,
provider_name=self.ui_name)
row[0].check()
remove_item = VersionPick({
'5.8': 'Remove selected items',
'5.9': 'Remove selected items from Inventory'
}).pick(self.appliance.version)
view.toolbar.configuration.item_select(remove_item, handle_alert=not cancel)
if not cancel:
view.flash.assert_success_message('Delete initiated for 1 Provider')
if wait_deleted:
wait_for(func=lambda: self.exists, fail_condition=True, delay=15, num_sec=60)
@property
def exists(self):
"""Returns whether the manager exists in the UI or not"""
view = navigate_to(self, 'All')
view.toolbar.view_selector.select('List View')
try:
view.entities.paginator.find_row_on_pages(view.entities.elements,
provider_name=self.ui_name)
return True
except NoSuchElementException:
pass
return False
def refresh_relationships(self, cancel=False):
"""Refreshes relationships and power states of this manager"""
view = navigate_to(self, 'All')
view.toolbar.view_selector.select('List View')
row = view.entities.paginator.find_row_on_pages(view.entities.elements,
provider_name=self.ui_name)
row[0].check()
if view.toolbar.configuration.item_enabled('Refresh Relationships and Power states'):
view.toolbar.configuration.item_select('Refresh Relationships and Power states',
handle_alert=not cancel)
if not cancel:
view.flash.assert_success_message(self.refresh_flash_msg)
@property
def config_profiles(self):
"""Returns 'ConfigProfile' configuration profiles (hostgroups) available on this manager"""
view = navigate_to(self, 'Details')
# TODO - remove it later.Workaround for BZ 1452425
view.toolbar.view_selector.select('List View')
view.toolbar.refresh.click()
wait_for(lambda: view.entities.elements.is_displayed, fail_func=view.toolbar.refresh.click,
handle_exception=True, num_sec=60, delay=5)
config_profiles = []
for row in view.entities.elements:
if self.type == 'Ansible Tower':
name = row.name.text
else:
name = row.description.text
if 'unassigned' in name.lower():
continue
config_profiles.append(ConfigProfile(name=name, manager=self))
return config_profiles
@property
def systems(self):
"""Returns 'ConfigSystem' configured systems (hosts) available on this manager"""
return reduce(lambda x, y: x + y, [prof.systems for prof in self.config_profiles])
@property
def yaml_data(self):
"""Returns yaml data for this manager"""
return conf.cfme_data.configuration_managers[self.key]
@classmethod
def load_from_yaml(cls, key):
"""Returns 'ConfigManager' object loaded from yamls, based on its key"""
data = conf.cfme_data.configuration_managers[key]
creds = conf.credentials[data['credentials']]
return cls(
name=data['name'],
url=data['url'],
ssl=data['ssl'],
credentials=cls.Credential(
principal=creds['username'], secret=creds['password']),
key=key)
@property
def quad_name(self):
if self.type == 'Ansible Tower':
return '{} Automation Manager'.format(self.name)
else:
return '{} Configuration Manager'.format(self.name)
def get_config_manager_from_config(cfg_mgr_key):
cfg_mgr = conf.cfme_data.get('configuration_managers', {})[cfg_mgr_key]
if cfg_mgr['type'] == 'satellite':
return Satellite.load_from_yaml(cfg_mgr_key)
elif cfg_mgr['type'] == 'ansible':
return AnsibleTower.load_from_yaml(cfg_mgr_key)
else:
raise Exception("Unknown configuration manager key")
class ConfigProfile(Pretty, Navigatable):
"""Configuration profile object (foreman-side hostgroup)
Args:
name: Name of the profile
manager: ConfigManager object which this profile is bound to
"""
pretty_attrs = ['name', 'manager']
def __init__(self, name, manager, appliance=None):
Navigatable.__init__(self, appliance=appliance)
self.name = name
self.manager = manager
@property
def systems(self):
"""Returns 'ConfigSystem' objects that are active under this profile"""
view = navigate_to(self, 'Details')
view.toolbar.view_selector.select('List View')
# Unassigned config profile has no tabstrip
if 'unassigned' not in self.name.lower():
view.entities.configured_systems.click()
if view.entities.configured_systems.elements.is_displayed:
return [ConfigSystem(row.hostname.text, self)
for row in view.entities.configured_systems.elements]
return list()
class ConfigSystem(Pretty, Navigatable, Taggable):
"""The tags pages of the config system"""
pretty_attrs = ['name', 'manager_key']
def __init__(self, name, profile, appliance=None):
Navigatable.__init__(self, appliance=appliance)
self.name = name
self.profile = profile
def get_tags(self, tenant="My Company Tags"):
"""Overridden get_tags method to deal with the fact that configured systems don't have a
details view."""
view = navigate_to(self, 'EditTags')
return [Tag(category=Category(display_name=r.category.text.replace('*', '').strip()),
display_name=r.assigned_value.text.strip())
for r in view.form.tags]
class Satellite(ConfigManager):
"""
Configuration manager object (Red Hat Satellite, Foreman)
Args:
name: Name of the Satellite/Foreman configuration manager
url: URL, hostname or IP of the configuration manager
ssl: Boolean value; `True` if SSL certificate validity should be checked, `False` otherwise
credentials: Credentials to access the config. manager
key: Key to access the cfme_data yaml data (same as `name` if not specified)
Usage:
Create provider:
.. code-block:: python
satellite_cfg_mgr = Satellite('my_satellite', 'my-satellite.example.com',
ssl=False, ConfigManager.Credential(principal='admin',
secret='testing'), key='satellite_yaml_key')
satellite_cfg_mgr.create()
Update provider:
.. code-block:: python
with update(satellite_cfg_mgr):
satellite_cfg_mgr.name = 'new_satellite_name'
Delete provider:
.. code-block:: python
satellite_cfg_mgr.delete()
"""
def __init__(self, name=None, url=None, ssl=None, credentials=None, key=None):
super(Satellite, self).__init__(name=name, url=url, ssl=ssl, credentials=credentials,
key=key)
self.name = name
self.url = url
self.ssl = ssl
self.credentials = credentials
self.key = key or name
@cached_property
def type(self):
"""Returns presumed type of the manager based on CFME version
Note:
We cannot actually know the type of the provider from the UI.
This represents the supported type by CFME version and is to be used in navigation.
"""
return version.pick({version.LOWEST: 'Red Hat Satellite', version.LATEST: 'Foreman'})
class AnsibleTower(ConfigManager):
"""
Configuration manager object (Ansible Tower)
Args:
name: Name of the Ansible Tower configuration manager
url: URL, hostname or IP of the configuration manager
ssl: Boolean value; `True` if SSL certificate validity should be checked, `False` otherwise
credentials: Credentials to access the config. manager
key: Key to access the cfme_data yaml data (same as `name` if not specified)
Usage:
Create provider:
.. code-block:: python
tower_cfg_mgr = AnsibleTower('my_tower', 'https://my-tower.example.com/api/v1',
ssl=False, ConfigManager.Credential(principal='admin',
secret='testing'), key='tower_yaml_key')
tower_cfg_mgr.create()
Update provider:
.. code-block:: python
with update(tower_cfg_mgr):
tower_cfg_mgr.name = 'new_tower_name'
Delete provider:
.. code-block:: python
tower_cfg_mgr.delete()
"""
type = 'Ansible Tower'
def __init__(self, name=None, url=None, ssl=None, credentials=None, key=None):
super(AnsibleTower, self).__init__(name=name, url=url, ssl=ssl, credentials=credentials,
key=key)
self.name = name
self.url = url
self.ssl = ssl
self.credentials = credentials
self.key = key or name
@property
def ui_name(self):
"""Return the name used in the UI"""
return '{} Automation Manager'.format(self.name)
@navigator.register(ConfigManager, 'All')
class MgrAll(CFMENavigateStep):
VIEW = ConfigManagementAllView
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def step(self):
if self.obj.appliance.version < '5.8' or self.obj.type != 'Ansible Tower':
self.prerequisite_view.navigation.select('Configuration', 'Management')
else:
self.prerequisite_view.navigation.select('Automation', 'Ansible Tower', 'Explorer')
def resetter(self):
if self.obj.appliance.version >= '5.8' and self.obj.type == 'Ansible Tower':
self.view.sidebar.providers.tree.click_path('All Ansible Tower Providers')
else:
self.view.sidebar.providers.tree.click_path('All Configuration Manager Providers')
@navigator.register(ConfigManager, 'Add')
class MgrAdd(CFMENavigateStep):
VIEW = ConfigManagementAddView
prerequisite = NavigateToSibling('All')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select('Add a new Provider')
@navigator.register(ConfigManager, 'Edit')
class MgrEdit(CFMENavigateStep):
VIEW = ConfigManagementEditView
prerequisite = NavigateToSibling('All')
def step(self):
self.prerequisite_view.toolbar.view_selector.select('List View')
row = self.prerequisite_view.entities.paginator.find_row_on_pages(
self.prerequisite_view.entities.elements, provider_name=self.obj.ui_name)
row.click()
self.prerequisite_view.toolbar.configuration.item_select('Edit this Provider')
@navigator.register(ConfigManager, 'Details')
class MgrDetails(CFMENavigateStep):
VIEW = ConfigManagementDetailsView
prerequisite = NavigateToSibling('All')
def step(self):
self.prerequisite_view.toolbar.view_selector.select('List View')
row = self.prerequisite_view.entities.paginator.find_row_on_pages(
self.prerequisite_view.entities.elements, provider_name=self.obj.ui_name)
row.click()
@navigator.register(ConfigManager, 'EditFromDetails')
class MgrEditFromDetails(CFMENavigateStep):
VIEW = ConfigManagementEditView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select('Edit this Provider')
@navigator.register(ConfigProfile, 'Details')
class Details(CFMENavigateStep):
VIEW = ConfigManagementProfileView
prerequisite = NavigateToAttribute('manager', 'Details')
def step(self):
self.prerequisite_view.toolbar.view_selector.select('List View')
row = self.prerequisite_view.entities.paginator.find_row_on_pages(
self.prerequisite_view.entities.elements, description=self.obj.name)
row.click()
@navigator.register(ConfigSystem, 'All')
class SysAll(CFMENavigateStep):
VIEW = ConfigManagementAllView
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def step(self):
self.prerequisite_view.navigation.select('Configuration', 'Management')
def resetter(self):
self.view.sidebar.configured_systems.open()
self.view.sidebar.configured_systems.tree.click_path('All Configured Systems')
@navigator.register(ConfigSystem, 'EditTags')
class SysEditTags(CFMENavigateStep):
VIEW = TagPageView
prerequisite = NavigateToSibling('All')
def step(self):
self.prerequisite_view.toolbar.view_selector.select('List View')
row = self.prerequisite_view.entities.paginator.find_row_on_pages(
self.prerequisite_view.entities.elements, hostname=self.obj.name)
row[0].check()
self.prerequisite_view.toolbar.policy.item_select('Edit Tags')
|
jeffery9/mixprint_addons | refs/heads/master | ineco_thai_account/journal.py | 1 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-Today INECO LTD,. PART. (<http://www.ineco.co.th>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
class account_journal(osv.osv):
_inherit = 'account.journal'
_columns = {
'print_sale_tax': fields.boolean('Print Tax Report'),
'customer': fields.boolean('Customer Payment'),
'supplier': fields.boolean('Supplier Payment'),
'active': fields.boolean('Active'),
}
_defaults = {
'print_sale_tax': True,
'customer': False,
'supplier': False,
'active': True,
} |
labelle/evolution | refs/heads/master | node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py | 2698 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the easy_xml.py file. """
import gyp.easy_xml as easy_xml
import unittest
import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def test_EasyXml_simple(self):
self.assertEqual(
easy_xml.XmlToString(['test']),
'<?xml version="1.0" encoding="utf-8"?><test/>')
self.assertEqual(
easy_xml.XmlToString(['test'], encoding='Windows-1252'),
'<?xml version="1.0" encoding="Windows-1252"?><test/>')
def test_EasyXml_simple_with_attributes(self):
self.assertEqual(
easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
def test_EasyXml_escaping(self):
original = '<test>\'"\r&\nfoo'
converted = '<test>\'"
&
foo'
converted_apos = converted.replace("'", ''')
self.assertEqual(
easy_xml.XmlToString(['test3', {'a': original}, original]),
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
(converted, converted_apos))
def test_EasyXml_pretty(self):
self.assertEqual(
easy_xml.XmlToString(
['test3',
['GrandParent',
['Parent1',
['Child']
],
['Parent2']
]
],
pretty=True),
'<?xml version="1.0" encoding="utf-8"?>\n'
'<test3>\n'
' <GrandParent>\n'
' <Parent1>\n'
' <Child/>\n'
' </Parent1>\n'
' <Parent2/>\n'
' </GrandParent>\n'
'</test3>\n')
def test_EasyXml_complex(self):
# We want to create:
target = (
'<?xml version="1.0" encoding="utf-8"?>'
'<Project>'
'<PropertyGroup Label="Globals">'
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
'<Keyword>Win32Proj</Keyword>'
'<RootNamespace>automated_ui_tests</RootNamespace>'
'</PropertyGroup>'
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
'<PropertyGroup '
'Condition="\'$(Configuration)|$(Platform)\'=='
'\'Debug|Win32\'" Label="Configuration">'
'<ConfigurationType>Application</ConfigurationType>'
'<CharacterSet>Unicode</CharacterSet>'
'</PropertyGroup>'
'</Project>')
xml = easy_xml.XmlToString(
['Project',
['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
['Keyword', 'Win32Proj'],
['RootNamespace', 'automated_ui_tests']
],
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
['PropertyGroup',
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
'Label': 'Configuration'},
['ConfigurationType', 'Application'],
['CharacterSet', 'Unicode']
]
])
self.assertEqual(xml, target)
if __name__ == '__main__':
unittest.main()
|
ivan-fedorov/intellij-community | refs/heads/master | python/testData/refactoring/extractmethod/ElseBody.before.py | 83 | def foo():
for arg in sys.argv[1:]:
try:
f = open(arg, 'r')
except IOError:
print('cannot open', arg)
else:
<selection>length = len(f.readlines()) #<---extract something from here
print("hi from else")</selection>
#anything else you need |
plotly/plotly.py | refs/heads/master | packages/python/plotly/plotly/validators/scatterternary/hoverlabel/font/_color.py | 1 | import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self,
plotly_name="color",
parent_name="scatterternary.hoverlabel.font",
**kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
|
dmccue/ansible | refs/heads/devel | v1/ansible/module_utils/rax.py | 280 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by
# Ansible still belong to the author of the module, and may assign their own
# license to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from uuid import UUID
FINAL_STATUSES = ('ACTIVE', 'ERROR')
VOLUME_STATUS = ('available', 'attaching', 'creating', 'deleting', 'in-use',
'error', 'error_deleting')
CLB_ALGORITHMS = ['RANDOM', 'LEAST_CONNECTIONS', 'ROUND_ROBIN',
'WEIGHTED_LEAST_CONNECTIONS', 'WEIGHTED_ROUND_ROBIN']
CLB_PROTOCOLS = ['DNS_TCP', 'DNS_UDP', 'FTP', 'HTTP', 'HTTPS', 'IMAPS',
'IMAPv4', 'LDAP', 'LDAPS', 'MYSQL', 'POP3', 'POP3S', 'SMTP',
'TCP', 'TCP_CLIENT_FIRST', 'UDP', 'UDP_STREAM', 'SFTP']
NON_CALLABLES = (basestring, bool, dict, int, list, type(None))
PUBLIC_NET_ID = "00000000-0000-0000-0000-000000000000"
SERVICE_NET_ID = "11111111-1111-1111-1111-111111111111"
def rax_slugify(value):
"""Prepend a key with rax_ and normalize the key name"""
return 'rax_%s' % (re.sub('[^\w-]', '_', value).lower().lstrip('_'))
def rax_clb_node_to_dict(obj):
"""Function to convert a CLB Node object to a dict"""
if not obj:
return {}
node = obj.to_dict()
node['id'] = obj.id
node['weight'] = obj.weight
return node
def rax_to_dict(obj, obj_type='standard'):
"""Generic function to convert a pyrax object to a dict
obj_type values:
standard
clb
server
"""
instance = {}
for key in dir(obj):
value = getattr(obj, key)
if obj_type == 'clb' and key == 'nodes':
instance[key] = []
for node in value:
instance[key].append(rax_clb_node_to_dict(node))
elif (isinstance(value, list) and len(value) > 0 and
not isinstance(value[0], NON_CALLABLES)):
instance[key] = []
for item in value:
instance[key].append(rax_to_dict(item))
elif (isinstance(value, NON_CALLABLES) and not key.startswith('_')):
if obj_type == 'server':
if key == 'image':
if not value:
instance['rax_boot_source'] = 'volume'
else:
instance['rax_boot_source'] = 'local'
key = rax_slugify(key)
instance[key] = value
if obj_type == 'server':
for attr in ['id', 'accessIPv4', 'name', 'status']:
instance[attr] = instance.get(rax_slugify(attr))
return instance
def rax_find_bootable_volume(module, rax_module, server, exit=True):
"""Find a servers bootable volume"""
cs = rax_module.cloudservers
cbs = rax_module.cloud_blockstorage
server_id = rax_module.utils.get_id(server)
volumes = cs.volumes.get_server_volumes(server_id)
bootable_volumes = []
for volume in volumes:
vol = cbs.get(volume)
if module.boolean(vol.bootable):
bootable_volumes.append(vol)
if not bootable_volumes:
if exit:
module.fail_json(msg='No bootable volumes could be found for '
'server %s' % server_id)
else:
return False
elif len(bootable_volumes) > 1:
if exit:
module.fail_json(msg='Multiple bootable volumes found for server '
'%s' % server_id)
else:
return False
return bootable_volumes[0]
def rax_find_image(module, rax_module, image, exit=True):
"""Find a server image by ID or Name"""
cs = rax_module.cloudservers
try:
UUID(image)
except ValueError:
try:
image = cs.images.find(human_id=image)
except(cs.exceptions.NotFound,
cs.exceptions.NoUniqueMatch):
try:
image = cs.images.find(name=image)
except (cs.exceptions.NotFound,
cs.exceptions.NoUniqueMatch):
if exit:
module.fail_json(msg='No matching image found (%s)' %
image)
else:
return False
return rax_module.utils.get_id(image)
def rax_find_volume(module, rax_module, name):
"""Find a Block storage volume by ID or name"""
cbs = rax_module.cloud_blockstorage
try:
UUID(name)
volume = cbs.get(name)
except ValueError:
try:
volume = cbs.find(name=name)
except rax_module.exc.NotFound:
volume = None
except Exception, e:
module.fail_json(msg='%s' % e)
return volume
def rax_find_network(module, rax_module, network):
"""Find a cloud network by ID or name"""
cnw = rax_module.cloud_networks
try:
UUID(network)
except ValueError:
if network.lower() == 'public':
return cnw.get_server_networks(PUBLIC_NET_ID)
elif network.lower() == 'private':
return cnw.get_server_networks(SERVICE_NET_ID)
else:
try:
network_obj = cnw.find_network_by_label(network)
except (rax_module.exceptions.NetworkNotFound,
rax_module.exceptions.NetworkLabelNotUnique):
module.fail_json(msg='No matching network found (%s)' %
network)
else:
return cnw.get_server_networks(network_obj)
else:
return cnw.get_server_networks(network)
def rax_find_server(module, rax_module, server):
"""Find a Cloud Server by ID or name"""
cs = rax_module.cloudservers
try:
UUID(server)
server = cs.servers.get(server)
except ValueError:
servers = cs.servers.list(search_opts=dict(name='^%s$' % server))
if not servers:
module.fail_json(msg='No Server was matched by name, '
'try using the Server ID instead')
if len(servers) > 1:
module.fail_json(msg='Multiple servers matched by name, '
'try using the Server ID instead')
# We made it this far, grab the first and hopefully only server
# in the list
server = servers[0]
return server
def rax_find_loadbalancer(module, rax_module, loadbalancer):
"""Find a Cloud Load Balancer by ID or name"""
clb = rax_module.cloud_loadbalancers
try:
found = clb.get(loadbalancer)
except:
found = []
for lb in clb.list():
if loadbalancer == lb.name:
found.append(lb)
if not found:
module.fail_json(msg='No loadbalancer was matched')
if len(found) > 1:
module.fail_json(msg='Multiple loadbalancers matched')
# We made it this far, grab the first and hopefully only item
# in the list
found = found[0]
return found
def rax_argument_spec():
"""Return standard base dictionary used for the argument_spec
argument in AnsibleModule
"""
return dict(
api_key=dict(type='str', aliases=['password'], no_log=True),
auth_endpoint=dict(type='str'),
credentials=dict(type='str', aliases=['creds_file']),
env=dict(type='str'),
identity_type=dict(type='str', default='rackspace'),
region=dict(type='str'),
tenant_id=dict(type='str'),
tenant_name=dict(type='str'),
username=dict(type='str'),
verify_ssl=dict(choices=BOOLEANS, type='bool'),
)
def rax_required_together():
"""Return the default list used for the required_together argument to
AnsibleModule"""
return [['api_key', 'username']]
def setup_rax_module(module, rax_module, region_required=True):
"""Set up pyrax in a standard way for all modules"""
rax_module.USER_AGENT = 'ansible/%s %s' % (ANSIBLE_VERSION,
rax_module.USER_AGENT)
api_key = module.params.get('api_key')
auth_endpoint = module.params.get('auth_endpoint')
credentials = module.params.get('credentials')
env = module.params.get('env')
identity_type = module.params.get('identity_type')
region = module.params.get('region')
tenant_id = module.params.get('tenant_id')
tenant_name = module.params.get('tenant_name')
username = module.params.get('username')
verify_ssl = module.params.get('verify_ssl')
if env is not None:
rax_module.set_environment(env)
rax_module.set_setting('identity_type', identity_type)
if verify_ssl is not None:
rax_module.set_setting('verify_ssl', verify_ssl)
if auth_endpoint is not None:
rax_module.set_setting('auth_endpoint', auth_endpoint)
if tenant_id is not None:
rax_module.set_setting('tenant_id', tenant_id)
if tenant_name is not None:
rax_module.set_setting('tenant_name', tenant_name)
try:
username = username or os.environ.get('RAX_USERNAME')
if not username:
username = rax_module.get_setting('keyring_username')
if username:
api_key = 'USE_KEYRING'
if not api_key:
api_key = os.environ.get('RAX_API_KEY')
credentials = (credentials or os.environ.get('RAX_CREDENTIALS') or
os.environ.get('RAX_CREDS_FILE'))
region = (region or os.environ.get('RAX_REGION') or
rax_module.get_setting('region'))
except KeyError, e:
module.fail_json(msg='Unable to load %s' % e.message)
try:
if api_key and username:
if api_key == 'USE_KEYRING':
rax_module.keyring_auth(username, region=region)
else:
rax_module.set_credentials(username, api_key=api_key,
region=region)
elif credentials:
credentials = os.path.expanduser(credentials)
rax_module.set_credential_file(credentials, region=region)
else:
raise Exception('No credentials supplied!')
except Exception, e:
if e.message:
msg = str(e.message)
else:
msg = repr(e)
module.fail_json(msg=msg)
if region_required and region not in rax_module.regions:
module.fail_json(msg='%s is not a valid region, must be one of: %s' %
(region, ','.join(rax_module.regions)))
return rax_module
|
derekjchow/models | refs/heads/master | research/vid2depth/ops/__init__.py | 10 | # Copyright 2017 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
|
nirmeshk/oh-mainline | refs/heads/master | vendor/packages/sphinx/sphinx/util/png.py | 16 | # -*- coding: utf-8 -*-
"""
sphinx.util.png
~~~~~~~~~~~~~~~
PNG image manipulation helpers.
:copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import struct
import binascii
from sphinx.util.pycompat import b
LEN_IEND = 12
LEN_DEPTH = 22
DEPTH_CHUNK_LEN = struct.pack('!i', 10)
DEPTH_CHUNK_START = b('tEXtDepth\x00')
IEND_CHUNK = b('\x00\x00\x00\x00IEND\xAE\x42\x60\x82')
def read_png_depth(filename):
"""Read the special tEXt chunk indicating the depth from a PNG file."""
result = None
f = open(filename, 'rb')
try:
f.seek(- (LEN_IEND + LEN_DEPTH), 2)
depthchunk = f.read(LEN_DEPTH)
if not depthchunk.startswith(DEPTH_CHUNK_LEN + DEPTH_CHUNK_START):
# either not a PNG file or not containing the depth chunk
return None
result = struct.unpack('!i', depthchunk[14:18])[0]
finally:
f.close()
return result
def write_png_depth(filename, depth):
"""Write the special tEXt chunk indicating the depth to a PNG file.
The chunk is placed immediately before the special IEND chunk.
"""
data = struct.pack('!i', depth)
f = open(filename, 'r+b')
try:
# seek to the beginning of the IEND chunk
f.seek(-LEN_IEND, 2)
# overwrite it with the depth chunk
f.write(DEPTH_CHUNK_LEN + DEPTH_CHUNK_START + data)
# calculate the checksum over chunk name and data
crc = binascii.crc32(DEPTH_CHUNK_START + data) & 0xffffffff
f.write(struct.pack('!I', crc))
# replace the IEND chunk
f.write(IEND_CHUNK)
finally:
f.close()
|
koslibpro/GreyIndex | refs/heads/master | services/__init__.py | 3 | __author__ = 'koslib'
|
manuzhang/incubator-beam | refs/heads/master | sdks/python/apache_beam/runners/portability/maptask_executor_runner.py | 9 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Beam runner for testing/profiling worker code directly.
"""
import collections
import logging
import time
import apache_beam as beam
from apache_beam.internal import pickler
from apache_beam.io import iobase
from apache_beam.metrics.execution import MetricsEnvironment
from apache_beam.options import pipeline_options
from apache_beam.runners import DataflowRunner
from apache_beam.runners.dataflow.internal.dependency import _dependency_file_copy
from apache_beam.runners.dataflow.internal.names import PropertyNames
from apache_beam.runners.dataflow.native_io.iobase import NativeSource
from apache_beam.runners.runner import PipelineResult
from apache_beam.runners.runner import PipelineRunner
from apache_beam.runners.runner import PipelineState
from apache_beam.runners.worker import operation_specs
from apache_beam.runners.worker import operations
try:
from apache_beam.runners.worker import statesampler
except ImportError:
from apache_beam.runners.worker import statesampler_fake as statesampler
from apache_beam.typehints import typehints
from apache_beam.utils import profiler
from apache_beam.utils.counters import CounterFactory
# This module is experimental. No backwards-compatibility guarantees.
class MapTaskExecutorRunner(PipelineRunner):
"""Beam runner translating a pipeline into map tasks that are then executed.
Primarily intended for testing and profiling the worker code paths.
"""
def __init__(self):
self.executors = []
def has_metrics_support(self):
"""Returns whether this runner supports metrics or not.
"""
return False
def run(self, pipeline):
MetricsEnvironment.set_metrics_supported(self.has_metrics_support())
# List of map tasks Each map task is a list of
# (stage_name, operation_specs.WorkerOperation) instructions.
self.map_tasks = []
# Map of pvalues to
# (map_task_index, producer_operation_index, producer_output_index)
self.outputs = {}
# Unique mappings of PCollections to strings.
self.side_input_labels = collections.defaultdict(
lambda: str(len(self.side_input_labels)))
# Mapping of map task indices to all map tasks that must preceed them.
self.dependencies = collections.defaultdict(set)
# Visit the graph, building up the map_tasks and their metadata.
super(MapTaskExecutorRunner, self).run(pipeline)
# Now run the tasks in topological order.
def compute_depth_map(deps):
memoized = {}
def compute_depth(x):
if x not in memoized:
memoized[x] = 1 + max([-1] + [compute_depth(y) for y in deps[x]])
return memoized[x]
return {x: compute_depth(x) for x in deps.keys()}
map_task_depths = compute_depth_map(self.dependencies)
ordered_map_tasks = sorted((map_task_depths.get(ix, -1), map_task)
for ix, map_task in enumerate(self.map_tasks))
profile_options = pipeline.options.view_as(
pipeline_options.ProfilingOptions)
if profile_options.profile_cpu:
with profiler.Profile(
profile_id='worker-runner',
profile_location=profile_options.profile_location,
log_results=True, file_copy_fn=_dependency_file_copy):
self.execute_map_tasks(ordered_map_tasks)
else:
self.execute_map_tasks(ordered_map_tasks)
return WorkerRunnerResult(PipelineState.UNKNOWN)
def metrics_containers(self):
return [op.metrics_container
for ex in self.executors
for op in ex.operations()]
def execute_map_tasks(self, ordered_map_tasks):
tt = time.time()
for ix, (_, map_task) in enumerate(ordered_map_tasks):
logging.info('Running %s', map_task)
t = time.time()
stage_names, all_operations = zip(*map_task)
# TODO(robertwb): The DataflowRunner worker receives system step names
# (e.g. "s3") that are used to label the output msec counters. We use the
# operation names here, but this is not the same scheme used by the
# DataflowRunner; the result is that the output msec counters are named
# differently.
system_names = stage_names
# Create the CounterFactory and StateSampler for this MapTask.
# TODO(robertwb): Output counters produced here are currently ignored.
counter_factory = CounterFactory()
state_sampler = statesampler.StateSampler('%s-' % ix, counter_factory)
map_executor = operations.SimpleMapTaskExecutor(
operation_specs.MapTask(
all_operations, 'S%02d' % ix,
system_names, stage_names, system_names),
counter_factory,
state_sampler)
self.executors.append(map_executor)
map_executor.execute()
logging.info(
'Stage %s finished: %0.3f sec', stage_names[0], time.time() - t)
logging.info('Total time: %0.3f sec', time.time() - tt)
def run_Read(self, transform_node):
self._run_read_from(transform_node, transform_node.transform.source)
def _run_read_from(self, transform_node, source):
"""Used when this operation is the result of reading source."""
if not isinstance(source, NativeSource):
source = iobase.SourceBundle(1.0, source, None, None)
output = transform_node.outputs[None]
element_coder = self._get_coder(output)
read_op = operation_specs.WorkerRead(source, output_coders=[element_coder])
self.outputs[output] = len(self.map_tasks), 0, 0
self.map_tasks.append([(transform_node.full_label, read_op)])
return len(self.map_tasks) - 1
def run_ParDo(self, transform_node):
transform = transform_node.transform
output = transform_node.outputs[None]
element_coder = self._get_coder(output)
map_task_index, producer_index, output_index = self.outputs[
transform_node.inputs[0]]
# If any of this ParDo's side inputs depend on outputs from this map_task,
# we can't continue growing this map task.
def is_reachable(leaf, root):
if leaf == root:
return True
else:
return any(is_reachable(x, root) for x in self.dependencies[leaf])
if any(is_reachable(self.outputs[side_input.pvalue][0], map_task_index)
for side_input in transform_node.side_inputs):
# Start a new map tasks.
input_element_coder = self._get_coder(transform_node.inputs[0])
output_buffer = OutputBuffer(input_element_coder)
fusion_break_write = operation_specs.WorkerInMemoryWrite(
output_buffer=output_buffer,
write_windowed_values=True,
input=(producer_index, output_index),
output_coders=[input_element_coder])
self.map_tasks[map_task_index].append(
(transform_node.full_label + '/Write', fusion_break_write))
original_map_task_index = map_task_index
map_task_index, producer_index, output_index = len(self.map_tasks), 0, 0
fusion_break_read = operation_specs.WorkerRead(
output_buffer.source_bundle(),
output_coders=[input_element_coder])
self.map_tasks.append(
[(transform_node.full_label + '/Read', fusion_break_read)])
self.dependencies[map_task_index].add(original_map_task_index)
def create_side_read(side_input):
label = self.side_input_labels[side_input]
output_buffer = self.run_side_write(
side_input.pvalue, '%s/%s' % (transform_node.full_label, label))
return operation_specs.WorkerSideInputSource(
output_buffer.source(), label)
do_op = operation_specs.WorkerDoFn( #
serialized_fn=pickler.dumps(DataflowRunner._pardo_fn_data(
transform_node,
lambda side_input: self.side_input_labels[side_input])),
output_tags=[PropertyNames.OUT] + ['%s_%s' % (PropertyNames.OUT, tag)
for tag in transform.output_tags
],
# Same assumption that DataflowRunner has about coders being compatible
# across outputs.
output_coders=[element_coder] * (len(transform.output_tags) + 1),
input=(producer_index, output_index),
side_inputs=[create_side_read(side_input)
for side_input in transform_node.side_inputs])
producer_index = len(self.map_tasks[map_task_index])
self.outputs[transform_node.outputs[None]] = (
map_task_index, producer_index, 0)
for ix, tag in enumerate(transform.output_tags):
self.outputs[transform_node.outputs[
tag]] = map_task_index, producer_index, ix + 1
self.map_tasks[map_task_index].append((transform_node.full_label, do_op))
for side_input in transform_node.side_inputs:
self.dependencies[map_task_index].add(self.outputs[side_input.pvalue][0])
def run_side_write(self, pcoll, label):
map_task_index, producer_index, output_index = self.outputs[pcoll]
windowed_element_coder = self._get_coder(pcoll)
output_buffer = OutputBuffer(windowed_element_coder)
write_sideinput_op = operation_specs.WorkerInMemoryWrite(
output_buffer=output_buffer,
write_windowed_values=True,
input=(producer_index, output_index),
output_coders=[windowed_element_coder])
self.map_tasks[map_task_index].append(
(label, write_sideinput_op))
return output_buffer
def run__GroupByKeyOnly(self, transform_node):
map_task_index, producer_index, output_index = self.outputs[
transform_node.inputs[0]]
grouped_element_coder = self._get_coder(transform_node.outputs[None],
windowed=False)
windowed_ungrouped_element_coder = self._get_coder(transform_node.inputs[0])
output_buffer = GroupingOutputBuffer(grouped_element_coder)
shuffle_write = operation_specs.WorkerInMemoryWrite(
output_buffer=output_buffer,
write_windowed_values=False,
input=(producer_index, output_index),
output_coders=[windowed_ungrouped_element_coder])
self.map_tasks[map_task_index].append(
(transform_node.full_label + '/Write', shuffle_write))
output_map_task_index = self._run_read_from(
transform_node, output_buffer.source())
self.dependencies[output_map_task_index].add(map_task_index)
def run_Flatten(self, transform_node):
output_buffer = OutputBuffer(self._get_coder(transform_node.outputs[None]))
output_map_task = self._run_read_from(transform_node,
output_buffer.source())
for input in transform_node.inputs:
map_task_index, producer_index, output_index = self.outputs[input]
element_coder = self._get_coder(input)
flatten_write = operation_specs.WorkerInMemoryWrite(
output_buffer=output_buffer,
write_windowed_values=True,
input=(producer_index, output_index),
output_coders=[element_coder])
self.map_tasks[map_task_index].append(
(transform_node.full_label + '/Write', flatten_write))
self.dependencies[output_map_task].add(map_task_index)
def apply_CombinePerKey(self, transform, input):
# TODO(robertwb): Support side inputs.
assert not transform.args and not transform.kwargs
return (input
| PartialGroupByKeyCombineValues(transform.fn)
| beam.GroupByKey()
| MergeAccumulators(transform.fn)
| ExtractOutputs(transform.fn))
def run_PartialGroupByKeyCombineValues(self, transform_node):
element_coder = self._get_coder(transform_node.outputs[None])
_, producer_index, output_index = self.outputs[transform_node.inputs[0]]
combine_op = operation_specs.WorkerPartialGroupByKey(
combine_fn=pickler.dumps(
(transform_node.transform.combine_fn, (), {}, ())),
output_coders=[element_coder],
input=(producer_index, output_index))
self._run_as_op(transform_node, combine_op)
def run_MergeAccumulators(self, transform_node):
self._run_combine_transform(transform_node, 'merge')
def run_ExtractOutputs(self, transform_node):
self._run_combine_transform(transform_node, 'extract')
def _run_combine_transform(self, transform_node, phase):
transform = transform_node.transform
element_coder = self._get_coder(transform_node.outputs[None])
_, producer_index, output_index = self.outputs[transform_node.inputs[0]]
combine_op = operation_specs.WorkerCombineFn(
serialized_fn=pickler.dumps(
(transform.combine_fn, (), {}, ())),
phase=phase,
output_coders=[element_coder],
input=(producer_index, output_index))
self._run_as_op(transform_node, combine_op)
def _get_coder(self, pvalue, windowed=True):
# TODO(robertwb): This should be an attribute of the pvalue itself.
return DataflowRunner._get_coder(
pvalue.element_type or typehints.Any,
pvalue.windowing.windowfn.get_window_coder() if windowed else None)
def _run_as_op(self, transform_node, op):
"""Single-output operation in the same map task as its input."""
map_task_index, _, _ = self.outputs[transform_node.inputs[0]]
op_index = len(self.map_tasks[map_task_index])
output = transform_node.outputs[None]
self.outputs[output] = map_task_index, op_index, 0
self.map_tasks[map_task_index].append((transform_node.full_label, op))
class InMemorySource(iobase.BoundedSource):
"""Source for reading an (as-yet unwritten) set of in-memory encoded elements.
"""
def __init__(self, encoded_elements, coder):
self._encoded_elements = encoded_elements
self._coder = coder
def get_range_tracker(self, unused_start_position, unused_end_position):
return None
def read(self, unused_range_tracker):
for encoded_element in self._encoded_elements:
yield self._coder.decode(encoded_element)
def default_output_coder(self):
return self._coder
class OutputBuffer(object):
def __init__(self, coder):
self.coder = coder
self.elements = []
self.encoded_elements = []
def source(self):
return InMemorySource(self.encoded_elements, self.coder)
def source_bundle(self):
return iobase.SourceBundle(
1.0, InMemorySource(self.encoded_elements, self.coder), None, None)
def __repr__(self):
return 'GroupingOutput[%r]' % len(self.elements)
def append(self, value):
self.elements.append(value)
self.encoded_elements.append(self.coder.encode(value))
class GroupingOutputBuffer(object):
def __init__(self, grouped_coder):
self.grouped_coder = grouped_coder
self.elements = collections.defaultdict(list)
self.frozen = False
def source(self):
return InMemorySource(self.encoded_elements, self.grouped_coder)
def __repr__(self):
return 'GroupingOutputBuffer[%r]' % len(self.elements)
def append(self, pair):
assert not self.frozen
k, v = pair
self.elements[k].append(v)
def freeze(self):
if not self.frozen:
self._encoded_elements = [self.grouped_coder.encode(kv)
for kv in self.elements.iteritems()]
self.frozen = True
return self._encoded_elements
@property
def encoded_elements(self):
return GroupedOutputBuffer(self)
class GroupedOutputBuffer(object):
def __init__(self, buffer):
self.buffer = buffer
def __getitem__(self, ix):
return self.buffer.freeze()[ix]
def __iter__(self):
return iter(self.buffer.freeze())
def __len__(self):
return len(self.buffer.freeze())
def __nonzero__(self):
return True
class PartialGroupByKeyCombineValues(beam.PTransform):
def __init__(self, combine_fn, native=True):
self.combine_fn = combine_fn
self.native = native
def expand(self, input):
if self.native:
return beam.pvalue.PCollection(input.pipeline)
else:
def to_accumulator(v):
return self.combine_fn.add_input(
self.combine_fn.create_accumulator(), v)
return input | beam.Map(lambda (k, v): (k, to_accumulator(v)))
class MergeAccumulators(beam.PTransform):
def __init__(self, combine_fn, native=True):
self.combine_fn = combine_fn
self.native = native
def expand(self, input):
if self.native:
return beam.pvalue.PCollection(input.pipeline)
else:
merge_accumulators = self.combine_fn.merge_accumulators
return input | beam.Map(lambda (k, vs): (k, merge_accumulators(vs)))
class ExtractOutputs(beam.PTransform):
def __init__(self, combine_fn, native=True):
self.combine_fn = combine_fn
self.native = native
def expand(self, input):
if self.native:
return beam.pvalue.PCollection(input.pipeline)
else:
extract_output = self.combine_fn.extract_output
return input | beam.Map(lambda (k, v): (k, extract_output(v)))
class WorkerRunnerResult(PipelineResult):
def wait_until_finish(self, duration=None):
pass
|
peppelinux/pyDBsync | refs/heads/master | sqlautocode/build/lib.linux-i686-2.7/sqlautocode/constants.py | 5 | # Nice print stuff
TAB = 12*' '
NLTAB = ',\n'+TAB
USAGE = """usage: autoload.py [options]
Generates python code for a given database schema.
options:
-h, --help Show this help
-u URL, --url URL Database url (e.g.: postgresql+psycopg2://postgres:user@password/Database)
-o FILE, --output FILE Where to put the output (default is stdout)
-s NAME, --schema NAME Name of the schema to output (default is 'default')
-t T1,T2,.. , --tables T1,T2 Name of tables to inspect (default is 'all').
Support globbing character to select more tables.
ex.: -t Download* will generate a model for all tables starting with Download
-i --noindex Do not generate index information
-g --generic-types Generate generic column types rather than database-specific type
-e --example Generate code with examples how to access data
-3 --z3c Generate code for use with z3c.sqlalchemy
"""
HEADER = """\
# -*- coding: %(encoding)s -*-
## File autogenerated by SQLAutoCode
## see http://code.google.com/p/sqlautocode/
from sqlalchemy import *
%(dialect)s
metadata = MetaData()
"""
HEADER_Z3C = """\
# -*- coding: %(encoding)s -*-
## File autogenerated by SQLAutoCode
## see http://code.google.com/p/sqlautocode/
## Export type: z3c.sqlalchemy
from sqlalchemy import *
%(dialect)s
from z3c.sqlalchemy import Model
from z3c.sqlalchemy.mapper import MappedClassBase
def getModel(metadata):
model = Model()
"""
PG_IMPORT = """\
try:
from sqlalchemy.dialects.postgresql import *
except ImportError:
from sqlalchemy.databases.postgres import *
"""
FOOTER_Z3C = """
return model
"""
FOOTER_EXAMPLE = """
# some example usage
if __name__ == '__main__':
db = create_engine(%(url)r)
metadata.bind = db
# fetch first 10 items from %(tablename)s
s = %(tablename)s.select().limit(10)
rs = s.execute()
for row in rs:
print row
"""
TABLE = """ Table('%(name)s', metadata,
%(columns)s,
%(constraints)s
%(schema)s
)
"""
COLUMN = """Column(%(name)r, %(type)s%(constraints)s%(args)s)"""
FOREIGN_KEY = """ForeignKeyConstraint(%(names)s, %(specs)s, name=%(name)s)"""
INDEX = """Index(%(name)s, %(columns)s, unique=%(unique)s)"""
HEADER_DECL = """#autogenerated by sqlautocode
from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relation
engine = create_engine('%s')
DeclarativeBase = declarative_base()
metadata = DeclarativeBase.metadata
metadata.bind = engine
"""
EXAMPLE_DECL = """#example on how to query your Schema
from sqlalchemy.orm import sessionmaker
session = sessionmaker(bind=engine)()
objs = session.query(%s).all()
print 'All %s objects: %%s'%%objs
"""
INTERACTIVE = """
print 'Trying to start IPython shell...',
try:
from IPython.Shell import IPShellEmbed
print 'Success! Press <ctrl-d> to exit.'
print 'Available models:%%s'%%%s
print '\\nTry something like: session.query(%s).all()'
ipshell = IPShellEmbed()
ipshell()
except:
'Failed. please easy_install ipython'
"""
|
mdaniel/intellij-community | refs/heads/master | python/testData/resolve/multiFile/fromQualifiedPackageImportFile/mypackage/__init__.py | 166 | # obligatory
|
vongochung/buiquocviet | refs/heads/master | django/db/backends/postgresql_psycopg2/base.py | 81 | """
PostgreSQL database backend for Django.
Requires psycopg 2: http://initd.org/projects/psycopg2
"""
import sys
from django.db import utils
from django.db.backends import *
from django.db.backends.signals import connection_created
from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations
from django.db.backends.postgresql_psycopg2.client import DatabaseClient
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation
from django.db.backends.postgresql_psycopg2.version import get_version
from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection
from django.utils.log import getLogger
from django.utils.safestring import SafeUnicode, SafeString
from django.utils.timezone import utc
try:
import psycopg2 as Database
import psycopg2.extensions
except ImportError, e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e)
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_adapter(SafeString, psycopg2.extensions.QuotedString)
psycopg2.extensions.register_adapter(SafeUnicode, psycopg2.extensions.QuotedString)
logger = getLogger('django.db.backends')
def utc_tzinfo_factory(offset):
if offset != 0:
raise AssertionError("database connection isn't set to UTC")
return utc
class CursorWrapper(object):
"""
A thin wrapper around psycopg2's normal cursor class so that we can catch
particular exception instances and reraise them with the right types.
"""
def __init__(self, cursor):
self.cursor = cursor
def execute(self, query, args=None):
try:
return self.cursor.execute(query, args)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def executemany(self, query, args):
try:
return self.cursor.executemany(query, args)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor)
class DatabaseFeatures(BaseDatabaseFeatures):
needs_datetime_string_cast = False
can_return_id_from_insert = True
requires_rollback_on_dirty_transaction = True
has_real_datatype = True
can_defer_constraint_checks = True
has_select_for_update = True
has_select_for_update_nowait = True
has_bulk_insert = True
supports_tablespaces = True
can_distinct_on_fields = True
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'postgresql'
operators = {
'exact': '= %s',
'iexact': '= UPPER(%s)',
'contains': 'LIKE %s',
'icontains': 'LIKE UPPER(%s)',
'regex': '~ %s',
'iregex': '~* %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE %s',
'endswith': 'LIKE %s',
'istartswith': 'LIKE UPPER(%s)',
'iendswith': 'LIKE UPPER(%s)',
}
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures(self)
autocommit = self.settings_dict["OPTIONS"].get('autocommit', False)
self.features.uses_autocommit = autocommit
if autocommit:
level = psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
else:
level = psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED
self._set_isolation_level(level)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
self._pg_version = None
def check_constraints(self, table_names=None):
"""
To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they
are returned to deferred.
"""
self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE')
self.cursor().execute('SET CONSTRAINTS ALL DEFERRED')
def close(self):
self.validate_thread_sharing()
if self.connection is None:
return
try:
self.connection.close()
self.connection = None
except Database.Error:
# In some cases (database restart, network connection lost etc...)
# the connection to the database is lost without giving Django a
# notification. If we don't set self.connection to None, the error
# will occur a every request.
self.connection = None
logger.warning('psycopg2 error while closing the connection.',
exc_info=sys.exc_info()
)
raise
def _get_pg_version(self):
if self._pg_version is None:
self._pg_version = get_version(self.connection)
return self._pg_version
pg_version = property(_get_pg_version)
def _cursor(self):
settings_dict = self.settings_dict
if self.connection is None:
if settings_dict['NAME'] == '':
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You need to specify NAME in your Django settings file.")
conn_params = {
'database': settings_dict['NAME'],
}
conn_params.update(settings_dict['OPTIONS'])
if 'autocommit' in conn_params:
del conn_params['autocommit']
if settings_dict['USER']:
conn_params['user'] = settings_dict['USER']
if settings_dict['PASSWORD']:
conn_params['password'] = settings_dict['PASSWORD']
if settings_dict['HOST']:
conn_params['host'] = settings_dict['HOST']
if settings_dict['PORT']:
conn_params['port'] = settings_dict['PORT']
self.connection = Database.connect(**conn_params)
self.connection.set_client_encoding('UTF8')
tz = 'UTC' if settings.USE_TZ else settings_dict.get('TIME_ZONE')
if tz:
try:
get_parameter_status = self.connection.get_parameter_status
except AttributeError:
# psycopg2 < 2.0.12 doesn't have get_parameter_status
conn_tz = None
else:
conn_tz = get_parameter_status('TimeZone')
if conn_tz != tz:
# Set the time zone in autocommit mode (see #17062)
self.connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
self.connection.cursor().execute(
self.ops.set_time_zone_sql(), [tz])
self.connection.set_isolation_level(self.isolation_level)
self._get_pg_version()
connection_created.send(sender=self.__class__, connection=self)
cursor = self.connection.cursor()
cursor.tzinfo_factory = utc_tzinfo_factory if settings.USE_TZ else None
return CursorWrapper(cursor)
def _enter_transaction_management(self, managed):
"""
Switch the isolation level when needing transaction support, so that
the same transaction is visible across all the queries.
"""
if self.features.uses_autocommit and managed and not self.isolation_level:
self._set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_READ_COMMITTED)
def _leave_transaction_management(self, managed):
"""
If the normal operating mode is "autocommit", switch back to that when
leaving transaction management.
"""
if self.features.uses_autocommit and not managed and self.isolation_level:
self._set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
def _set_isolation_level(self, level):
"""
Do all the related feature configurations for changing isolation
levels. This doesn't touch the uses_autocommit feature, since that
controls the movement *between* isolation levels.
"""
assert level in range(5)
try:
if self.connection is not None:
self.connection.set_isolation_level(level)
finally:
self.isolation_level = level
self.features.uses_savepoints = bool(level)
def _commit(self):
if self.connection is not None:
try:
return self.connection.commit()
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
|
notionparallax/ShadowWolf | refs/heads/master | beowulf/app/model.py | 1 | import pypyodbc
class Model(object):
def __init__(self, conn_string=None):
self.conn_string = conn_string or "Driver=SQLITEY;Database=/ShadowWolf/beowulf/test.db"
def conn(self):
return pypyodbc.connect(self.conn_string)
def execute(self, query, args=[], save_result=True):
results = None
with self.conn() as conn:
cursor = conn.cursor()
cursor.execute(query,args)
if save_result: results = cursor.fetchall()
return results
def getlogins(self, project_number):
return self.execute("""
SELECT resourceid, SUM(actualhours)
FROM ProjResourceDimension prd
JOIN ProjFact pf on prd.SYSResourceID = pf.SYSResourceID
JOIN ProjProjectOutlineDimension ppod on pf.SYSProjectOutlineID = ppod.SYSProjectOutlineID
WHERE projectcode = ?
AND actualhours <> 0
GROUP BY resourceid
""", [project_number])
def getprojectnumbers(self, login):
#return self.execute("""
# SELECT projectcode, SUM(actualhours)
# FROM ProjResourceDimension prd
# JOIN ProjFact pf on prd.SYSResourceID = pf.SYSResourceID
# JOIN ProjProjectOutlineDimension ppod on pf.SYSProjectOutlineID = ppod.SYSProjectOutlineID
# WHERE resourceid = ?
# AND actualhours <> 0
# GROUP BY projectcode
# """, [login])
return self.execute("""
SELECT projectcode, SUM(actualhours)
FROM ProjResourceDimension prd
JOIN ProjFact pf on prd.SYSResourceID = pf.SYSResourceID
JOIN ProjProjectOutlineDimension ppod on pf.SYSProjectOutlineID = ppod.SYSProjectOutlineID
WHERE resourceid = ?
AND actualhours <> 0
GROUP BY projectcode
ORDER BY projectcode
""", [login])
|
albertomurillo/ansible | refs/heads/devel | lib/ansible/modules/network/cnos/cnos_user.py | 38 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
#
# Copyright (C) 2019 Lenovo.
# (c) 2017, Ansible by Red Hat, inc
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Module to work on management of local users on Lenovo CNOS Switches
# Lenovo Networking
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: cnos_user
version_added: "2.8"
author: "Anil Kumar Muraleedharan (@amuraleedhar)"
short_description: Manage the collection of local users on Lenovo CNOS devices
description:
- This module provides declarative management of the local usernames
configured on Lenovo CNOS devices. It allows playbooks to manage
either individual usernames or the collection of usernames in the
current running config. It also supports purging usernames from the
configuration that are not explicitly defined.
options:
aggregate:
description:
- The set of username objects to be configured on the remote
Lenovo CNOS device. The list entries can either be the username
or a hash of username and properties. This argument is mutually
exclusive with the C(name) argument.
aliases: ['users', 'collection']
name:
description:
- The username to be configured on the remote Lenovo CNOS
device. This argument accepts a string value and is mutually
exclusive with the C(aggregate) argument.
configured_password:
description:
- The password to be configured on the network device. The
password needs to be provided in cleartext and it will be encrypted
on the device.
Please note that this option is not same as C(provider password).
update_password:
description:
- Since passwords are encrypted in the device running config, this
argument will instruct the module when to change the password. When
set to C(always), the password will always be updated in the device
and when set to C(on_create) the password will be updated only if
the username is created.
default: always
choices: ['on_create', 'always']
role:
description:
- The C(role) argument configures the role for the username in the
device running configuration. The argument accepts a string value
defining the role name. This argument does not check if the role
has been configured on the device.
aliases: ['roles']
sshkey:
description:
- The C(sshkey) argument defines the SSH public key to configure
for the username. This argument accepts a valid SSH key value.
purge:
description:
- The C(purge) argument instructs the module to consider the
resource definition absolute. It will remove any previously
configured usernames on the device with the exception of the
`admin` user which cannot be deleted per cnos constraints.
type: bool
default: 'no'
state:
description:
- The C(state) argument configures the state of the username definition
as it relates to the device operational configuration. When set
to I(present), the username(s) should be configured in the device active
configuration and when set to I(absent) the username(s) should not be
in the device active configuration
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: create a new user
cnos_user:
name: ansible
sshkey: "{{ lookup('file', '~/.ssh/id_rsa.pub') }}"
state: present
- name: remove all users except admin
cnos_user:
purge: yes
- name: set multiple users role
aggregate:
- name: netop
- name: netend
role: network-operator
state: present
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- name ansible
- name ansible password password
start:
description: The time the job started
returned: always
type: str
sample: "2016-11-16 10:38:15.126146"
end:
description: The time the job ended
returned: always
type: str
sample: "2016-11-16 10:38:25.595612"
delta:
description: The time elapsed to perform all operations
returned: always
type: str
sample: "0:00:10.469466"
"""
import re
from copy import deepcopy
from functools import partial
from ansible.module_utils.network.cnos.cnos import run_commands, load_config
from ansible.module_utils.network.cnos.cnos import get_config
from ansible.module_utils.network.cnos.cnos import cnos_argument_spec
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import string_types, iteritems
from ansible.module_utils.network.common.utils import to_list
from ansible.module_utils.network.common.utils import remove_default_spec
from ansible.module_utils.network.cnos.cnos import get_user_roles
def validate_roles(value, module):
for item in value:
if item not in get_user_roles():
module.fail_json(msg='invalid role specified')
def map_obj_to_commands(updates, module):
commands = list()
state = module.params['state']
update_password = module.params['update_password']
for update in updates:
want, have = update
def needs_update(x):
return want.get(x) and (want.get(x) != have.get(x))
def add(x):
return commands.append('username %s %s' % (want['name'], x))
def remove(x):
return commands.append('no username %s %s' % (want['name'], x))
if want['state'] == 'absent':
commands.append('no username %s' % want['name'])
continue
if want['state'] == 'present' and not have:
commands.append('username %s' % want['name'])
if needs_update('configured_password'):
if update_password == 'always' or not have:
add('password %s' % want['configured_password'])
if needs_update('sshkey'):
add('sshkey %s' % want['sshkey'])
if want['roles']:
if have:
for item in set(have['roles']).difference(want['roles']):
remove('role %s' % item)
for item in set(want['roles']).difference(have['roles']):
add('role %s' % item)
else:
for item in want['roles']:
add('role %s' % item)
return commands
def parse_password(data):
if 'no password set' in data:
return None
return '<PASSWORD>'
def parse_roles(data):
roles = list()
if 'role:' in data:
items = data.split()
my_item = items[items.index('role:') + 1]
roles.append(my_item)
return roles
def parse_username(data):
name = data.split(' ', 1)[0]
username = name[1:]
return username
def parse_sshkey(data):
key = None
if 'sskkey:' in data:
items = data.split()
key = items[items.index('sshkey:') + 1]
return key
def map_config_to_obj(module):
out = run_commands(module, ['show user-account'])
data = out[0]
objects = list()
datum = data.split('User')
for item in datum:
objects.append({
'name': parse_username(item),
'configured_password': parse_password(item),
'sshkey': parse_sshkey(item),
'roles': parse_roles(item),
'state': 'present'
})
return objects
def get_param_value(key, item, module):
# if key doesn't exist in the item, get it from module.params
if not item.get(key):
value = module.params[key]
# if key does exist, do a type check on it to validate it
else:
value_type = module.argument_spec[key].get('type', 'str')
type_checker = module._CHECK_ARGUMENT_TYPES_DISPATCHER[value_type]
type_checker(item[key])
value = item[key]
return value
def map_params_to_obj(module):
aggregate = module.params['aggregate']
if not aggregate:
if not module.params['name'] and module.params['purge']:
return list()
elif not module.params['name']:
module.fail_json(msg='username is required')
else:
collection = [{'name': module.params['name']}]
else:
collection = list()
for item in aggregate:
if not isinstance(item, dict):
collection.append({'name': item})
elif 'name' not in item:
module.fail_json(msg='name is required')
else:
collection.append(item)
objects = list()
for item in collection:
get_value = partial(get_param_value, item=item, module=module)
item.update({
'configured_password': get_value('configured_password'),
'sshkey': get_value('sshkey'),
'roles': get_value('roles'),
'state': get_value('state')
})
for key, value in iteritems(item):
if value:
# validate the param value (if validator func exists)
validator = globals().get('validate_%s' % key)
if all((value, validator)):
validator(value, module)
objects.append(item)
return objects
def update_objects(want, have):
updates = list()
for entry in want:
item = next((i for i in have if i['name'] == entry['name']), None)
if all((item is None, entry['state'] == 'present')):
updates.append((entry, {}))
elif item:
for key, value in iteritems(entry):
if value and value != item[key]:
updates.append((entry, item))
return updates
def main():
""" main entry point for module execution
"""
element_spec = dict(
name=dict(),
configured_password=dict(no_log=True),
update_password=dict(default='always', choices=['on_create', 'always']),
roles=dict(type='list', aliases=['role']),
sshkey=dict(),
state=dict(default='present', choices=['present', 'absent'])
)
aggregate_spec = deepcopy(element_spec)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict',
options=aggregate_spec, aliases=['collection', 'users']),
purge=dict(type='bool', default=False)
)
argument_spec.update(element_spec)
mutually_exclusive = [('name', 'aggregate')]
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
result = {'changed': False}
result['warnings'] = warnings
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands(update_objects(want, have), module)
if module.params['purge']:
want_users = [x['name'] for x in want]
have_users = [x['name'] for x in have]
for item in set(have_users).difference(want_users):
if item != 'admin':
if not item.strip():
continue
item = item.replace("\\", "\\\\")
commands.append('no username %s' % item)
result['commands'] = commands
# the cnos cli prevents this by rule but still
if 'no username admin' in commands:
module.fail_json(msg='Cannot delete the `admin` account')
if commands:
if not module.check_mode:
load_config(module, commands)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
|
azraelrabbit/ShadowDNS | refs/heads/master | test.py | 79 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
sys.path.insert(0, 'shadowsocks')
import os
import signal
import select
import time
from subprocess import Popen, PIPE
p1 = Popen(['sudo', sys.executable, 'shadowdns/dnsrelay.py', '-c', sys.argv[-1]],
shell=False, bufsize=0, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
p2 = Popen(['ssserver', '-c', sys.argv[-1]], shell=False, bufsize=0, stdin=PIPE,
stdout=PIPE, stderr=PIPE, close_fds=True, env=os.environ)
p3 = None
try:
local_ready = False
server_ready = False
fdset = [p1.stdout, p2.stdout, p1.stderr, p2.stderr]
while True:
r, w, e = select.select(fdset, [], fdset)
if e:
break
for fd in r:
line = fd.readline()
sys.stdout.write(line)
if line.find('starting dns') >= 0:
local_ready = True
if line.find('starting server') >= 0:
server_ready = True
if local_ready and server_ready and p3 is None:
time.sleep(1)
p3 = Popen(['dig', '@127.0.0.1', 'any', 'google.com'],
shell=False, bufsize=0, close_fds=True)
break
if p3 is not None:
r = p3.wait()
if r == 0:
print 'test passed'
sys.exit(r)
finally:
for p in [p1, p2]:
try:
os.kill(p.pid, signal.SIGTERM)
except OSError:
pass
sys.exit(-1)
|
lmorchard/django-allauth | refs/heads/master | allauth/socialaccount/providers/odnoklassniki/tests.py | 62 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from allauth.socialaccount.tests import create_oauth2_tests
from allauth.socialaccount.providers import registry
from allauth.tests import MockedResponse
from .provider import OdnoklassnikiProvider
class OdnoklassnikiTests(
create_oauth2_tests(registry.by_id(OdnoklassnikiProvider.id))):
def get_mocked_response(self, verified_email=True):
return MockedResponse(200, """
{"uid":"561999209121","birthday":"1999-09-09","age":33,"first_name":"Ivan","last_name":"Petrov","name":"Ivan Petrov","locale":"en","gender":"male","has_email":true,"location":{"city":"Moscow","country":"RUSSIAN_FEDERATION","countryCode":"RU","countryName":"Russia"},"online":"web","pic_1":"http://i500.mycdn.me/res/stub_50x50.gif","pic_2":"http://usd1.mycdn.me/res/stub_128x96.gif"}
""")
def get_login_response_json(self, with_refresh_token=True):
return '{"access_token": "testac"}' # noqa
|
grap/odoo-eshop | refs/heads/8.0 | odoo_eshop/eshop_app/controllers/controller_sale_order.py | 1 | #! /usr/bin/env python
# -*- encoding: utf-8 -*-
# Extra Libs
from flask import request, render_template, flash, jsonify
from flask.ext.babel import gettext as _
# Custom Tools
from ..application import app
from ..tools.web import redirect_url_for
from ..tools.auth import requires_auth
from ..models.tools import currency
from ..models.models import execute_odoo_command
from ..models.sale_order import (
get_current_sale_order_lines,
get_current_sale_order,
set_quantity,
)
from ..models.res_partner import get_current_partner_id
from ..models.res_company import get_current_company
# ############################################################################
# Shopping Cart Management Routes
# ############################################################################
@app.route("/shopping_cart")
@requires_auth
def shopping_cart():
order = get_current_sale_order()
if not order:
return redirect_url_for('home')
sale_order_lines = get_current_sale_order_lines(order)
return render_template(
'shopping_cart.html',
sale_order_lines=sale_order_lines)
@app.route('/shopping_cart_note_update', methods=['POST'])
def shopping_cart_note_update():
note = execute_odoo_command(
"sale.order",
"eshop_set_note",
get_current_partner_id(),
request.form['note'],
)
result = {
'state': 'success',
'note': note,
'message': _("Your comment has been successfully updated.")}
if request.is_xhr:
return jsonify(result=result)
flash(result['message'], result['state'])
return redirect_url_for('shopping_cart')
@app.route('/shopping_cart_quantity_update', methods=['POST'])
def shopping_cart_quantity_update():
res = set_quantity(
int(request.form['product_id']), request.form['new_quantity'], False,
'set')
if request.is_xhr:
return jsonify(result=res)
flash(res['message'], res['state'])
return redirect_url_for('shopping_cart')
@app.route("/shopping_cart_delete")
@requires_auth
def shopping_cart_delete():
execute_odoo_command(
"sale.order",
"eshop_delete_current_sale_order",
get_current_partner_id(),
)
flash(_("Your shopping cart has been successfully deleted."), 'success')
return redirect_url_for('home_logged')
@app.route("/shopping_cart_delete_line/<int:line_id>")
@requires_auth
def shopping_cart_delete_line(line_id):
result = execute_odoo_command(
"sale.order",
"eshop_delete_sale_order_line",
get_current_partner_id(),
line_id,
)
if result == "line_deleted":
flash(_("The Line has been successfully deleted."), 'success')
return redirect_url_for('shopping_cart')
else:
flash(_("Your shopping cart has been deleted."), 'success')
return redirect_url_for('home_logged')
# ############################################################################
# Recovery Moment Place Route
# ############################################################################
@app.route("/recovery_moment_place")
@requires_auth
def recovery_moment_place():
company = get_current_company()
recovery_moments = execute_odoo_command(
"sale.recovery.moment", "browse",
[('state', '=', 'pending_sale')],
order="min_recovery_date"
)
sale_order = get_current_sale_order()
if (company.eshop_minimum_price != 0
and company.eshop_minimum_price > sale_order.amount_total):
flash(
_("You have not reached the ceiling : ") +
currency(company.eshop_minimum_price),
'warning')
return redirect_url_for('shopping_cart')
return render_template(
'recovery_moment_place.html',
recovery_moments=recovery_moments)
@app.route("/select_recovery_moment/<int:recovery_moment_id>")
@requires_auth
def select_recovery_moment(recovery_moment_id):
result = execute_odoo_command(
"sale.order",
"eshop_select_recovery_moment",
get_current_partner_id(),
recovery_moment_id,
)
if result == "recovery_moment_complete":
flash(_(
"The recovery moment is complete."
" Please try again."), 'error')
return redirect_url_for('shopping_cart')
else:
flash(_("Your Sale Order is now confirmed."), 'success')
return redirect_url_for('orders')
|
aaronfang/personal_scripts | refs/heads/master | af_scripts/tmp/freezeVtxTransform.py | 2 | import pymel.core as pm
def freezeVtxTransformation():
curSel = pm.ls(sl=1, fl=1)
for sel in curSel:
sel = sel.getShape()
pm.polyMoveVertex(sel, ch=0)
pm.select(d=True)
freezeVtxTransformation()
|
stormi/weblate | refs/heads/master | weblate/trans/tests/test_lock.py | 11 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Tests for locking.
"""
from weblate.trans.tests.test_views import ViewTestCase
from weblate.trans.models.subproject import SubProject
from django.core.urlresolvers import reverse
class LockTest(ViewTestCase):
def setUp(self):
super(LockTest, self).setUp()
# Need extra power
self.user.is_superuser = True
self.user.save()
def assertComponentLocked(self):
subproject = SubProject.objects.get(
slug=self.subproject.slug,
project__slug=self.project.slug,
)
self.assertTrue(subproject.locked)
response = self.client.get(
reverse('subproject', kwargs=self.kw_subproject)
)
self.assertContains(
response,
'This translation is currently locked for updates!'
)
def assertComponentNotLocked(self):
subproject = SubProject.objects.get(
slug=self.subproject.slug,
project__slug=self.project.slug,
)
self.assertFalse(subproject.locked)
response = self.client.get(
reverse('subproject', kwargs=self.kw_subproject)
)
self.assertNotContains(
response,
'This translation is currently locked for updates!'
)
def test_subproject(self):
response = self.client.get(
reverse('lock_subproject', kwargs=self.kw_subproject)
)
self.assertRedirects(
response,
reverse('subproject', kwargs=self.kw_subproject)
)
self.assertComponentLocked()
response = self.client.get(
reverse('unlock_subproject', kwargs=self.kw_subproject)
)
self.assertRedirects(
response,
reverse('subproject', kwargs=self.kw_subproject)
)
self.assertComponentNotLocked()
def test_project(self):
response = self.client.get(
reverse('lock_project', kwargs=self.kw_project)
)
self.assertRedirects(
response,
reverse('project', kwargs=self.kw_project)
)
self.assertComponentLocked()
response = self.client.get(
reverse('subproject', kwargs=self.kw_subproject)
)
self.assertContains(
response,
'This translation is currently locked for updates!'
)
response = self.client.get(
reverse('unlock_project', kwargs=self.kw_project)
)
self.assertRedirects(
response,
reverse('project', kwargs=self.kw_project)
)
self.assertComponentNotLocked()
def test_translation(self):
response = self.client.get(
reverse('lock_translation', kwargs=self.kw_translation)
)
self.assertRedirects(
response,
reverse('translation', kwargs=self.kw_translation)
)
self.assertTrue(self.get_translation().is_user_locked())
response = self.client.get(
reverse('unlock_translation', kwargs=self.kw_translation)
)
self.assertRedirects(
response,
reverse('translation', kwargs=self.kw_translation)
)
self.assertFalse(self.get_translation().is_user_locked())
response = self.client.get(
reverse('js-lock', kwargs=self.kw_translation)
)
self.assertFalse(self.get_translation().is_user_locked())
|
taohungyang/cloud-custodian | refs/heads/master | tests/test_ec2.py | 1 | # Copyright 2015-2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import unittest
import time
from datetime import datetime
from dateutil import tz, zoneinfo
from mock import mock
from jsonschema.exceptions import ValidationError
from c7n.exceptions import PolicyValidationError
from c7n.resources import ec2
from c7n.resources.ec2 import actions, QueryFilter
from c7n import tags, utils
from .common import BaseTest
class TestTagAugmentation(BaseTest):
def test_tag_augment_empty(self):
session_factory = self.replay_flight_data("test_ec2_augment_tag_empty")
# recording was modified to be sans tags
policy = self.load_policy(
{"name": "ec2-tags", "resource": "ec2"}, session_factory=session_factory
)
resources = policy.run()
self.assertEqual(len(resources), 0)
def test_tag_augment(self):
session_factory = self.replay_flight_data("test_ec2_augment_tags")
# recording was modified to be sans tags
policy = self.load_policy(
{
"name": "ec2-tags",
"resource": "ec2",
"filters": [{"tag:Env": "Production"}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
class TestInstanceAttrFilter(BaseTest):
def test_attr_filter(self):
session_factory = self.replay_flight_data("test_ec2_instance_attribute")
policy = self.load_policy(
{
"name": "ec2-attr",
"resource": "ec2",
"filters": [
{
"type": "instance-attribute",
"attribute": "rootDeviceName",
"key": "Value",
"value": "/dev/sda1",
}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(
resources[0]["c7n:attribute-rootDeviceName"], {"Value": "/dev/sda1"}
)
class TestMetricFilter(BaseTest):
def test_metric_filter(self):
session_factory = self.replay_flight_data("test_ec2_metric")
policy = self.load_policy(
{
"name": "ec2-utilization",
"resource": "ec2",
"filters": [
{
"type": "metrics",
"name": "CPUUtilization",
"days": 3,
"value": 1.5,
}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
class TestPropagateSpotTags(BaseTest):
def test_propagate_spot(self):
session_factory = self.replay_flight_data("test_ec2_propagate_spot_tags")
policy = self.load_policy(
{
"name": "ec2-spot",
"resource": "ec2",
"query": [{"instance-id": "i-01db165f1452ef5e4"}],
"actions": [{"type": "propagate-spot-tags", "only_tags": ["Name"]}],
},
session_factory=session_factory,
)
policy.run()
client = session_factory().client("ec2")
tags = {
t["Key"]: t["Value"]
for t in client.describe_tags(
Filters=[{"Name": "resource-id", "Values": ["i-01db165f1452ef5e4"]}]
).get(
"Tags", []
)
}
self.assertEqual(tags, {"Name": "Test"})
class TestDisableApiTermination(BaseTest):
def test_term_prot_enabled(self):
session_factory = self.replay_flight_data(
"test_ec2_termination-protected_filter"
)
policy = self.load_policy(
{
"name": "ec2-termination-enabled",
"resource": "ec2",
"filters": [{"type": "termination-protected"}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]["InstanceId"], "i-092f500eaad726b71")
def test_term_prot_not_enabled(self):
session_factory = self.replay_flight_data(
"test_ec2_termination-protected_filter"
)
policy = self.load_policy(
{
"name": "ec2-termination-NOT-enabled",
"resource": "ec2",
"filters": [{"not": [{"type": "termination-protected"}]}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 2)
self.assertEqual(
sorted([x["InstanceId"] for x in resources]),
["i-02117c13e1d21b229", "i-0718418de3bb4ae2a"],
)
def test_policy_permissions(self):
session_factory = self.replay_flight_data(
"test_ec2_termination-protected_filter"
)
policy = self.load_policy(
{
"name": "ec2-termination-enabled",
"resource": "ec2",
"filters": [{"type": "termination-protected"}],
},
session_factory=session_factory,
)
perms = policy.get_permissions()
self.assertEqual(
perms,
set(
(
"ec2:DescribeInstances",
"ec2:DescribeTags",
"ec2:DescribeInstanceAttribute",
)
),
)
class TestHealthEventsFilter(BaseTest):
def test_ec2_health_events_filter(self):
session_factory = self.replay_flight_data("test_ec2_health_events_filter")
policy = self.load_policy(
{
"name": "ec2-health-events-filter",
"resource": "ec2",
"filters": [{"type": "health-event"}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
class TestTagTrim(BaseTest):
def test_ec2_tag_trim(self):
self.patch(tags.TagTrim, "max_tag_count", 10)
session_factory = self.replay_flight_data("test_ec2_tag_trim")
ec2 = session_factory().client("ec2")
start_tags = {
t["Key"]: t["Value"]
for t in ec2.describe_tags(
Filters=[{"Name": "resource-id", "Values": ["i-fdb01920"]}]
)[
"Tags"
]
}
policy = self.load_policy(
{
"name": "ec2-tag-trim",
"resource": "ec2",
"filters": [{"type": "tag-count", "count": 10}],
"actions": [
{
"type": "tag-trim",
"space": 1,
"preserve": [
"Name",
"Env",
"Account",
"Platform",
"Classification",
"Planet",
],
}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
end_tags = {
t["Key"]: t["Value"]
for t in ec2.describe_tags(
Filters=[{"Name": "resource-id", "Values": ["i-fdb01920"]}]
)[
"Tags"
]
}
self.assertEqual(len(start_tags) - 1, len(end_tags))
self.assertTrue("Containers" in start_tags)
self.assertFalse("Containers" in end_tags)
class TestVolumeFilter(BaseTest):
def test_ec2_attached_ebs_filter(self):
session_factory = self.replay_flight_data("test_ec2_attached_ebs_filter")
policy = self.load_policy(
{
"name": "ec2-unencrypted-vol",
"resource": "ec2",
"filters": [{"type": "ebs", "key": "Encrypted", "value": False}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
# DISABLED / Re-record flight data on public account
def test_ec2_attached_volume_skip_block(self):
session_factory = self.replay_flight_data("test_ec2_attached_ebs_filter")
policy = self.load_policy(
{
"name": "ec2-unencrypted-vol",
"resource": "ec2",
"filters": [
{
"type": "ebs",
"skip-devices": ["/dev/sda1", "/dev/xvda", "/dev/sdb1"],
"key": "Encrypted",
"value": False,
}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 0)
class TestResizeInstance(BaseTest):
def test_ec2_resize(self):
# preconditions - three instances (2 m4.4xlarge, 1 m4.1xlarge)
# one of the instances stopped
session_factory = self.replay_flight_data("test_ec2_resize")
policy = self.load_policy(
{
"name": "ec2-resize",
"resource": "ec2",
"filters": [
{
"type": "value",
"key": "State.Name",
"value": ["running", "stopped"],
"op": "in",
},
{
"type": "value",
"key": "InstanceType",
"value": ["m4.2xlarge", "m4.4xlarge"],
"op": "in",
},
],
"actions": [
{
"type": "resize",
"restart": True,
"default": "m4.large",
"type-map": {"m4.4xlarge": "m4.2xlarge"},
}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 3)
stopped, running = [], []
for i in resources:
if i["State"]["Name"] == "running":
running.append(i["InstanceId"])
if i["State"]["Name"] == "stopped":
stopped.append(i["InstanceId"])
instances = utils.query_instances(
session_factory(), InstanceIds=[r["InstanceId"] for r in resources]
)
cur_stopped, cur_running = [], []
for i in instances:
if i["State"]["Name"] == "running":
cur_running.append(i["InstanceId"])
if i["State"]["Name"] == "stopped":
cur_stopped.append(i["InstanceId"])
cur_running.sort()
running.sort()
self.assertEqual(cur_stopped, stopped)
self.assertEqual(cur_running, running)
instance_types = [i["InstanceType"] for i in instances]
instance_types.sort()
self.assertEqual(
instance_types, list(sorted(["m4.large", "m4.2xlarge", "m4.2xlarge"]))
)
class TestStateTransitionAgeFilter(BaseTest):
def test_ec2_state_transition_age(self):
session_factory = self.replay_flight_data(
"test_ec2_state_transition_age_filter"
)
policy = self.load_policy(
{
"name": "ec2-state-transition-age",
"resource": "ec2",
"filters": [
{"State.Name": "running"}, {"type": "state-age", "days": 30}
],
},
session_factory=session_factory,
)
resources = policy.run()
# compare stateTransition reason to expected
self.assertEqual(len(resources), 1)
self.assertEqual(
resources[0]["StateTransitionReason"],
"User initiated (2015-11-25 10:11:55 GMT)",
)
def test_date_parsing(self):
instance = ec2.StateTransitionAge(None)
# Missing key
self.assertIsNone(instance.get_resource_date({}))
# Bad date format
self.assertRaises(
ValueError,
instance.get_resource_date,
{"StateTransitionReason": "User initiated (201-02-06 17:77:00 GMT)"},
)
# Won't match regex
self.assertIsNone(
instance.get_resource_date(
{"StateTransitionReason": "Server.InternalError"}
)
)
# Test for success
self.assertEqual(
instance.get_resource_date(
{"StateTransitionReason": "User initiated (2017-02-06 17:57:00 GMT)"}
),
datetime(2017, 2, 6, 17, 57, tzinfo=tz.tzutc()),
)
class TestImageAgeFilter(BaseTest):
def test_ec2_image_age(self):
session_factory = self.replay_flight_data("test_ec2_image_age_filter")
policy = self.load_policy(
{
"name": "ec2-image-age",
"resource": "ec2",
"filters": [
{"State.Name": "running"}, {"type": "image-age", "days": 30}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
class TestImageFilter(BaseTest):
def test_ec2_image(self):
session_factory = self.replay_flight_data("test_ec2_image_filter")
policy = self.load_policy(
{
"name": "ec2-image",
"resource": "ec2",
"filters": [{"type": "image", "key": "Public", "value": True}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]["InstanceId"], "i-039628786cabe8c16")
class TestInstanceAge(BaseTest):
# placebo doesn't record tz information
def test_ec2_instance_age(self):
session_factory = self.replay_flight_data("test_ec2_instance_age_filter")
policy = self.load_policy(
{
"name": "ec2-instance-age",
"resource": "ec2",
"filters": [
{"State.Name": "running"}, {"type": "instance-age", "days": 0}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
class TestTag(BaseTest):
def test_ec2_tag(self):
session_factory = self.replay_flight_data("test_ec2_mark")
policy = self.load_policy(
{
"name": "ec2-test-mark",
"resource": "ec2",
"filters": [{"State.Name": "running"}],
"actions": [{"type": "tag", "key": "Testing", "value": "Testing123"}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
def test_ec2_tag_errors(self):
# Specifying both 'key' and 'tag' is an error
policy = {
"name": "ec2-tag-error",
"resource": "ec2",
"actions": [
{"type": "tag", "key": "Testing", "tag": "foo", "value": "TestingError"}
],
}
self.assertRaises(PolicyValidationError, self.load_policy, policy)
# Invalid op for 'mark-for-op' action
policy = {
"name": "ec2-tag-error",
"resource": "ec2",
"actions": [{"type": "mark-for-op", "op": "fake"}],
}
self.assertRaises(PolicyValidationError, self.load_policy, policy)
def test_ec2_untag(self):
session_factory = self.replay_flight_data("test_ec2_untag")
policy = self.load_policy(
{
"name": "ec2-test-unmark",
"resource": "ec2",
"filters": [{"tag:Testing": "not-null"}],
"actions": [{"type": "remove-tag", "tags": ["Testing"]}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
def test_ec2_untag_array(self):
session_factory = self.replay_flight_data("test_ec2_untag_array")
policy = self.load_policy(
{
"name": "ec2-test-unmark-array",
"resource": "ec2",
"filters": [{"tag:Testing": "not-null"}],
"actions": [
{
"type": "remove-tag",
"tags": ["Testing", "TestingTwo", "TestingThree"],
}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
def test_ec2_normalize_tag(self):
session_factory = self.replay_flight_data("test_ec2_normalize_tag")
policy = self.load_policy(
{
"name": "ec2-test-normalize-tag-lower",
"resource": "ec2",
"filters": [{"tag:Testing-lower": "not-null"}],
"actions": [
{"type": "normalize-tag", "key": "Testing-lower", "action": "lower"}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
policy = self.load_policy(
{
"name": "ec2-test-normalize-tag-upper",
"resource": "ec2",
"filters": [{"tag:Testing-upper": "not-null"}],
"actions": [
{"type": "normalize-tag", "key": "Testing-upper", "action": "upper"}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
policy = self.load_policy(
{
"name": "ec2-test-normalize-tag-title",
"resource": "ec2",
"filters": [{"tag:Testing-title": "not-null"}],
"actions": [
{"type": "normalize-tag", "key": "Testing-title", "action": "title"}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
policy = self.load_policy(
{
"name": "ec2-test-normalize-tag-strip",
"resource": "ec2",
"filters": [{"tag:Testing-strip": "not-null"}],
"actions": [
{
"type": "normalize-tag",
"key": "Testing-strip",
"action": "strip",
"value": "blah",
}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
def test_ec2_rename_tag(self):
session_factory = self.replay_flight_data("test_ec2_rename_tag")
policy = self.load_policy(
{
"name": "ec2-rename-start",
"resource": "ec2",
"filters": [{"tag:Testing": "present"}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 3)
policy = self.load_policy(
{
"name": "ec2-rename-tag",
"resource": "ec2",
"actions": [
{"type": "rename-tag", "old_key": "Testing", "new_key": "Testing1"}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 3)
policy = self.load_policy(
{
"name": "ec2-rename-end",
"resource": "ec2",
"filters": [{"tag:Testing1": "present"}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 3)
def test_ec2_mark_zero(self):
localtz = zoneinfo.gettz("America/New_York")
dt = datetime.now(localtz)
dt = dt.replace(year=2017, month=11, day=24, hour=7, minute=00)
session_factory = self.replay_flight_data("test_ec2_mark_zero")
session = session_factory(region="us-east-1")
ec2 = session.client("ec2")
resource = ec2.describe_instances(InstanceIds=["i-04d3e0630bd342566"])[
"Reservations"
][
0
][
"Instances"
][
0
]
tags = [t["Value"] for t in resource["Tags"] if t["Key"] == "maid_status"]
self.assertEqual(len(tags), 0)
policy = self.load_policy(
{
"name": "ec2-mark-zero-days",
"resource": "ec2",
"filters": [{"tag:CreatorName": "joshuaroot"}],
"actions": [{"type": "mark-for-op", "days": 0, "op": "terminate"}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]["InstanceId"], "i-04d3e0630bd342566")
resource = ec2.describe_instances(InstanceIds=["i-04d3e0630bd342566"])[
"Reservations"
][
0
][
"Instances"
][
0
]
tags = [t["Value"] for t in resource["Tags"] if t["Key"] == "maid_status"]
result = datetime.strptime(
tags[0].strip().split("@", 1)[-1], "%Y/%m/%d"
).replace(
tzinfo=localtz
)
self.assertEqual(result.date(), dt.date())
def test_ec2_mark_hours(self):
localtz = zoneinfo.gettz("America/New_York")
dt = datetime.now(localtz)
dt = dt.replace(
year=2018, month=2, day=20, hour=18, minute=00, second=0, microsecond=0
)
session_factory = self.replay_flight_data("test_ec2_mark_hours")
session = session_factory(region="us-east-1")
ec2 = session.client("ec2")
policy = self.load_policy(
{
"name": "ec2-mark-5-hours",
"resource": "ec2",
"filters": [
{"tag:hourly-mark": "absent"}, {"tag:CreatorName": "joshuaroot"}
],
"actions": [
{
"type": "mark-for-op",
"tag": "hourly-mark",
"hours": 3,
"op": "stop",
}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
resource = ec2.describe_instances(InstanceIds=[resources[0]["InstanceId"]])[
"Reservations"
][
0
][
"Instances"
][
0
]
tags = [t["Value"] for t in resource["Tags"] if t["Key"] == "hourly-mark"]
result = datetime.strptime(
tags[0].strip().split("@", 1)[-1], "%Y/%m/%d %H%M %Z"
).replace(
tzinfo=localtz
)
self.assertEqual(result, dt)
def test_ec2_marked_hours(self):
session_factory = self.replay_flight_data("test_ec2_marked_hours")
policy = self.load_policy(
{
"name": "ec2-mark-5-hours",
"resource": "ec2",
"filters": [
{
"type": "marked-for-op",
"tag": "hourly-mark",
"op": "stop",
"skew_hours": 3,
}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]["InstanceId"], "i-098dae2615acb5809")
class TestStop(BaseTest):
def test_ec2_stop(self):
session_factory = self.replay_flight_data("test_ec2_stop")
policy = self.load_policy(
{
"name": "ec2-test-stop",
"resource": "ec2",
"filters": [{"tag:Testing": "not-null"}],
"actions": [{"type": "stop"}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
class TestReboot(BaseTest):
def test_ec2_reboot(self):
session_factory = self.replay_flight_data("test_ec2_reboot")
policy = self.load_policy(
{
"name": "ec2-test-reboot",
"resource": "ec2",
"filters": [{"tag:Testing": "not-null"}],
"actions": [{"type": "reboot"}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 2)
running = []
for i in resources:
if i["State"]["Name"] == "running":
running.append(i["InstanceId"])
if self.recording:
time.sleep(25)
instances = utils.query_instances(
session_factory(), InstanceIds=[r["InstanceId"] for r in resources]
)
cur_running = []
for i in instances:
if i["State"]["Name"] == "running":
cur_running.append(i["InstanceId"])
cur_running.sort()
running.sort()
self.assertEqual(cur_running, running)
class TestStart(BaseTest):
def test_ec2_start(self):
session_factory = self.replay_flight_data("test_ec2_start")
policy = self.load_policy(
{
"name": "ec2-test-start",
"resource": "ec2",
"filters": [],
"actions": [{"type": "start"}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 2)
def test_ec2_start_fails(self):
session_factory = self.replay_flight_data("test_ec2_start")
policy = self.load_policy(
{
"name": "ec2-test-start",
"resource": "ec2",
"filters": [],
"actions": [{"type": "start"}],
},
session_factory=session_factory,
)
output = self.capture_logging("custodian.actions", level=logging.DEBUG)
with mock.patch.object(ec2.Start, "process_instance_set", return_value=True):
try:
policy.run()
except RuntimeError:
pass
else:
self.fail("should have raised error")
log_output = output.getvalue()
self.assertIn("Could not start 1 of 1 instances", log_output)
self.assertIn("t2.micro us-west-2c", log_output)
self.assertIn("i-08270b9cfb568a1c4", log_output)
class TestOr(BaseTest):
def test_ec2_or_condition(self):
session_factory = self.replay_flight_data("test_ec2_stop")
policy = self.load_policy(
{
"name": "ec2-test-snapshot",
"resource": "ec2",
"filters": [
{"or": [{"tag:Name": "CompileLambda"}, {"tag:Name": "Spinnaker"}]}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 2)
self.assertEqual(
sorted([r["InstanceId"] for r in resources]), [u"i-13413bd7", u"i-1aebf7c0"]
)
class TestSnapshot(BaseTest):
def test_ec2_snapshot_no_copy_tags(self):
session_factory = self.replay_flight_data("test_ec2_snapshot")
policy = self.load_policy(
{
"name": "ec2-test-snapshot",
"resource": "ec2",
"filters": [{"tag:Name": "CompileLambda"}],
"actions": [{"type": "snapshot"}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
def test_ec2_snapshot_copy_tags(self):
session_factory = self.replay_flight_data("test_ec2_snapshot")
policy = self.load_policy(
{
"name": "ec2-test-snapshot",
"resource": "ec2",
"filters": [{"tag:Name": "CompileLambda"}],
"actions": [{"type": "snapshot", "copy-tags": ["ASV" "Testing123"]}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
class TestSetInstanceProfile(BaseTest):
def test_ec2_set_instance_profile_existing(self):
factory = self.replay_flight_data(
'test_ec2_set_instance_profile_existing')
p = self.load_policy({
'name': 'ec2-set-profile-extant',
'resource': 'ec2',
'filters': [{'tag:Name': 'role-test'}],
'actions': [{
'type': 'set-instance-profile',
'name': 'ecsInstanceRole'}]}, session_factory=factory)
client = factory().client('ec2')
resources = p.run()
# 3 instances covering no role, target role, different role.
self.assertEqual(len(resources), 3)
previous_associations = {
i['InstanceId']: i.get('IamInstanceProfile', {}).get('Arn')
for i in resources}
self.assertEqual(
previous_associations,
{u'i-01b7ee380879d3fd8': u'arn:aws:iam::644160558196:instance-profile/CloudCustodianRole', # noqa
u'i-06305b4b9f5e3f8b8': u'arn:aws:iam::644160558196:instance-profile/ecsInstanceRole',
u'i-0aef5d5ffb60c8615': None})
# verify changes
associations = {
a['InstanceId']: a['IamInstanceProfile']['Arn']
for a in client.describe_iam_instance_profile_associations(
Filters=[
{'Name': 'instance-id',
'Values': [i['InstanceId'] for i in resources]},
{'Name': 'state', 'Values': ['associating', 'associated']}]
).get('IamInstanceProfileAssociations', ())}
self.assertEqual(
associations,
{'i-01b7ee380879d3fd8': 'arn:aws:iam::644160558196:instance-profile/ecsInstanceRole',
'i-06305b4b9f5e3f8b8': 'arn:aws:iam::644160558196:instance-profile/ecsInstanceRole',
'i-0aef5d5ffb60c8615': 'arn:aws:iam::644160558196:instance-profile/ecsInstanceRole'})
def test_ec2_set_instance_profile_disassocation(self):
session_factory = self.replay_flight_data(
"test_ec2_set_instance_profile_disassociation"
)
policy = self.load_policy(
{
"name": "ec2-test-set-instance-profile-disassociation",
"resource": "ec2",
"filters": [
{"tag:Name": "MissingInstanceProfile"},
{
"type": "value",
"key": "IamInstanceProfile.Arn",
"op": "regex",
"value": ".*/ec2-default",
},
],
"actions": [{"type": "set-instance-profile"}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertGreaterEqual(len(resources), 1)
ec2 = session_factory().client("ec2")
associations = ec2.describe_iam_instance_profile_associations(
Filters=[
{"Name": "instance-id", "Values": [r["InstanceId"] for r in resources]}
]
)
for a in associations["IamInstanceProfileAssociations"]:
self.assertIn(a["State"], ("disassociating", "disassociated"))
class TestEC2QueryFilter(unittest.TestCase):
def test_parse(self):
self.assertEqual(QueryFilter.parse([]), [])
x = QueryFilter.parse([{"instance-state-name": "running"}])
self.assertEqual(
x[0].query(), {"Name": "instance-state-name", "Values": ["running"]}
)
self.assertTrue(
isinstance(QueryFilter.parse([{"tag:ASV": "REALTIMEMSG"}])[0], QueryFilter)
)
self.assertRaises(PolicyValidationError, QueryFilter.parse, [{"tag:ASV": None}])
class TestTerminate(BaseTest):
def test_ec2_terminate(self):
# Test conditions: single running instance, with delete protection
session_factory = self.replay_flight_data("test_ec2_terminate")
p = self.load_policy(
{
"name": "ec2-term",
"resource": "ec2",
"filters": [{"InstanceId": "i-017cf4e2a33b853fe"}],
"actions": [{"type": "terminate", "force": True}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
instances = utils.query_instances(
session_factory(), InstanceIds=["i-017cf4e2a33b853fe"]
)
self.assertEqual(instances[0]["State"]["Name"], "shutting-down")
class TestDefaultVpc(BaseTest):
def test_ec2_default_vpc(self):
session_factory = self.replay_flight_data("test_ec2_default_vpc")
p = self.load_policy(
{
"name": "ec2-default-filters",
"resource": "ec2",
"filters": [{"type": "default-vpc"}],
},
config={"region": "us-west-2"},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]["InstanceId"], "i-0bfe468063b02d018")
class TestSingletonFilter(BaseTest):
def test_ec2_singleton_filter(self):
session_factory = self.replay_flight_data("test_ec2_singleton")
p = self.load_policy(
{
"name": "ec2-singleton-filters",
"resource": "ec2",
"filters": [{"tag:Name": "Singleton"}, {"type": "singleton"}],
},
config={"region": "us-west-1"},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]["InstanceId"], "i-00fe7967fb7167c62")
class TestActions(unittest.TestCase):
def test_action_construction(self):
self.assertIsInstance(actions.factory("mark", None), tags.Tag)
self.assertIsInstance(actions.factory("stop", None), ec2.Stop)
self.assertIsInstance(actions.factory("terminate", None), ec2.Terminate)
class TestModifySecurityGroupsActionSchema(BaseTest):
def test_remove_dependencies(self):
policy = {
"name": "remove-with-no-isolation-or-add",
"resource": "ec2",
"actions": [{"type": "modify-security-groups", "remove": "matched"}],
}
self.assertRaises(ValidationError, self.load_policy, data=policy, validate=True)
def test_invalid_remove_params(self):
# string invalid
policy = {
"name": "remove-with-incorrect-param-string",
"resource": "ec2",
"actions": [{"type": "modify-security-groups", "remove": "none"}],
}
self.assertRaises(ValidationError, self.load_policy, data=policy, validate=True)
# list - one valid, one invalid
policy = {
"name": "remove-with-incorrect-param-list",
"resource": "ec2",
"actions": [
{
"type": "modify-security-groups",
"remove": ["invalid-sg", "sg-abcd1234"],
}
],
}
self.assertRaises(ValidationError, self.load_policy, policy, validate=True)
def test_invalid_add_params(self):
# string invalid
policy = {
"name": "add-with-incorrect-param-string",
"resource": "ec2",
"actions": [
{"type": "modify-security-groups", "add": "none"},
{
"type": "modify-security-groups",
"add": ["invalid-sg", "sg-abcd1234"],
},
],
}
self.assertRaises(ValidationError, self.load_policy, data=policy, validate=True)
def test_invalid_isolation_group_params(self):
policy = {
"name": "isolation-group-with-incorrect-param-string",
"resource": "ec2",
"actions": [{"type": "modify-security-groups", "isolation-group": "none"}],
}
self.assertRaises(ValidationError, self.load_policy, data=policy, validate=True)
# list - one valid, one invalid
policy = {
"name": "isolation-group-with-incorrect-param-list",
"resource": "ec2",
"actions": [
{
"type": "modify-security-groups",
"isolation-group": ["invalid-sg", "sg-abcd1234"],
}
],
}
self.assertRaises(ValidationError, self.load_policy, data=policy, validate=True)
class TestModifySecurityGroupAction(BaseTest):
def test_security_group_type(self):
# Test conditions:
# - running two instances; one with TestProductionInstanceProfile
# and one with none
# - security group named TEST-PROD-ONLY-SG exists in VPC and is
# attached to both test instances
session_factory = self.replay_flight_data("test_ec2_security_group_filter")
# Catch on anything that uses the *PROD-ONLY* security groups but isn't in a prod role
policy = self.load_policy(
{
"name": "restrict-sensitive-sg",
"resource": "ec2",
"filters": [
{
"or": [
{
"and": [
{
"type": "value",
"key": "IamInstanceProfile.Arn",
"value": "(?!.*TestProductionInstanceProfile)(.*)",
"op": "regex",
},
{
"type": "value",
"key": "IamInstanceProfile.Arn",
"value": "not-null",
},
]
},
{
"type": "value",
"key": "IamInstanceProfile",
"value": "absent",
},
]
},
{
"type": "security-group",
"key": "GroupName",
"value": "(.*PROD-ONLY.*)",
"op": "regex",
},
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]["InstanceId"], "i-0dd3919bc5bac1ea8")
def test_security_group_modify_groups_action(self):
# Test conditions:
# - running two instances; one with TestProductionInstanceProfile
# and one with none
# - security group named TEST-PROD-ONLY-SG exists in VPC and is
# attached to both test instances
session_factory = self.replay_flight_data("test_ec2_modify_groups_action")
client = session_factory().client("ec2")
default_sg_id = client.describe_security_groups(GroupNames=["default"])[
"SecurityGroups"
][
0
][
"GroupId"
]
# Catch on anything that uses the *PROD-ONLY* security groups but isn't in a prod role
policy = self.load_policy(
{
"name": "remove-sensitive-sg",
"resource": "ec2",
"filters": [
{
"or": [
{
"and": [
{
"type": "value",
"key": "IamInstanceProfile.Arn",
"value": "(?!.*TestProductionInstanceProfile)(.*)",
"op": "regex",
},
{
"type": "value",
"key": "IamInstanceProfile.Arn",
"value": "not-null",
},
]
},
{
"type": "value",
"key": "IamInstanceProfile",
"value": "absent",
},
]
},
{
"type": "security-group",
"key": "GroupName",
"value": "(.*PROD-ONLY.*)",
"op": "regex",
},
],
"actions": [
{
"type": "modify-security-groups",
"remove": "matched",
"isolation-group": default_sg_id,
}
],
},
session_factory=session_factory,
)
before_action_resources = policy.run()
after_action_resources = policy.run()
self.assertEqual(len(before_action_resources), 1)
self.assertEqual(
before_action_resources[0]["InstanceId"], "i-0dd3919bc5bac1ea8"
)
self.assertEqual(len(after_action_resources), 0)
def test_invalid_modify_groups_schema(self):
policy = {
"name": "invalid-modify-security-groups-action",
"resource": "ec2",
"filters": [],
"actions": [{"type": "modify-security-groups", "change": "matched"}],
}
self.assertRaises(ValidationError, self.load_policy, policy, validate=True)
def test_ec2_add_security_groups(self):
# Test conditions:
# - running one instance with TestProductionInstanceProfile
# - security group named TEST-PROD-ONLY-SG exists in VPC and
# is attached to test instance
# - security group with id sg-8a4b64f7 exists in VPC and is selected
# in a policy to be attached
session_factory = self.replay_flight_data("test_ec2_add_security_groups")
policy = self.load_policy(
{
"name": "add-sg-to-prod-instances",
"resource": "ec2",
"filters": [
{
"type": "value",
"key": "IamInstanceProfile.Arn",
"value": "(.*TestProductionInstanceProfile)",
"op": "regex",
}
],
"actions": [{"type": "modify-security-groups", "add": "sg-8a4b64f7"}],
},
session_factory=session_factory,
)
first_resources = policy.run()
self.assertEqual(len(first_resources[0]["NetworkInterfaces"][0]["Groups"]), 1)
second_resources = policy.run()
self.assertEqual(len(second_resources[0]["NetworkInterfaces"][0]["Groups"]), 2)
class TestAutoRecoverAlarmAction(BaseTest):
def test_autorecover_alarm(self):
session_factory = self.replay_flight_data("test_ec2_autorecover_alarm")
p = self.load_policy(
{
"name": "ec2-autorecover-alarm",
"resource": "ec2",
"filters": [{"tag:c7n-test": "autorecover-alarm"}],
"actions": [{"type": "autorecover-alarm"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 2)
self.assertEqual(resources[0]["InstanceId"], "i-0aaaaec4b77188b69")
try:
client = session_factory().client("cloudwatch")
result = client.describe_alarms(
AlarmNames=["recover-{}".format(resources[0]["InstanceId"])]
)
self.assertTrue(result.get("MetricAlarms"))
except AssertionError:
self.fail("alarm not found")
class TestFilter(BaseTest):
def test_not_filter(self):
# This test is to get coverage for the `not` filter's process_set method
session_factory = self.replay_flight_data("test_ec2_not_filter")
policy = self.load_policy(
{
"name": "list-ec2-test-not",
"resource": "ec2",
"filters": [{"not": [{"InstanceId": "i-036ee05e8c2ca83b3"}]}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 2)
policy = self.load_policy(
{
"name": "list-ec2-test-not",
"resource": "ec2",
"filters": [
{
"not": [
{
"or": [
{"InstanceId": "i-036ee05e8c2ca83b3"},
{"InstanceId": "i-03d8207d8285cbf53"},
]
}
]
}
],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertEqual(len(resources), 1)
class TestUserData(BaseTest):
def test_regex_filter(self):
session_factory = self.replay_flight_data("test_ec2_userdata")
policy = self.load_policy(
{
"name": "ec2_userdata",
"resource": "ec2",
'filters': [{'or': [
{'type': 'user-data', 'op': 'regex', 'value': '(?smi).*A[KS]IA'}
]}],
},
session_factory=session_factory,
)
resources = policy.run()
self.assertGreater(len(resources), 0)
|
miyosuda/intro-to-dl-android | refs/heads/master | HandWriting/jni-build/jni/include/tensorflow/python/framework/gen_docs_combined.py | 3 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Updates generated docs from Python doc comments."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import tensorflow.python.platform
import sys
import tensorflow as tf
from tensorflow.python.framework import docs
from tensorflow.python.framework import framework_lib
from tensorflow.python.client import client_lib
tf.flags.DEFINE_string("out_dir", None,
"Directory to which docs should be written.")
tf.flags.DEFINE_boolean("print_hidden_regex", False,
"Dump a regular expression matching any hidden symbol")
FLAGS = tf.flags.FLAGS
# TODO(josh11b,wicke): Remove the ../../api_docs/python/ once the
# website can handle it.
PREFIX_TEXT = """
Note: Functions taking `Tensor` arguments can also take anything accepted by
[`tf.convert_to_tensor`](../../api_docs/python/framework.md#convert_to_tensor).
"""
def get_module_to_name():
return {tf: 'tf',
tf.errors: 'tf.errors',
tf.image: 'tf.image',
tf.nn: 'tf.nn',
tf.train: 'tf.train',
tf.python_io: 'tf.python_io'}
def all_libraries(module_to_name, members, documented):
# A list of (filename, docs.Library) pairs representing the individual files
# that we want to create.
def library(name, title, module=None, **args):
if module is None:
module = sys.modules["tensorflow.python.ops" +
("" if name == "ops" else "." + name)]
return (name + ".md", docs.Library(title=title,
module_to_name=module_to_name,
members=members,
documented=documented,
module=module,
**args))
return [
# Splits of module 'tf'.
library("framework", "Building Graphs", framework_lib),
library("constant_op", "Constants, Sequences, and Random Values",
prefix=PREFIX_TEXT),
library("state_ops", "Variables", prefix=PREFIX_TEXT),
library("array_ops", "Tensor Transformations",
exclude_symbols=["list_diff"], prefix=PREFIX_TEXT),
library("math_ops", "Math",
exclude_symbols=["sparse_matmul", "arg_min", "arg_max",
"lin_space", "sparse_segment_mean_grad"],
prefix=PREFIX_TEXT),
library("control_flow_ops", "Control Flow", prefix=PREFIX_TEXT),
library("image", "Images", tf.image, exclude_symbols=["ResizeMethod"],
prefix=PREFIX_TEXT),
library("sparse_ops", "Sparse Tensors",
exclude_symbols=["serialize_sparse", "serialize_many_sparse",
"deserialize_many_sparse"],
prefix=PREFIX_TEXT),
library("io_ops", "Inputs and Readers",
exclude_symbols=["LookupTableBase", "HashTable",
"initialize_all_tables",
"parse_single_sequence_example",
"string_to_hash_bucket"],
prefix=PREFIX_TEXT),
library("python_io", "Data IO (Python functions)", tf.python_io),
library("nn", "Neural Network", tf.nn,
exclude_symbols=["deconv2d", "conv2d_backprop_input",
"conv2d_backprop_filter", "avg_pool_grad",
"max_pool_grad", "max_pool_grad_with_argmax",
"batch_norm_with_global_normalization_grad",
"lrn_grad", "relu6_grad", "softplus_grad",
"softsign_grad", "xw_plus_b", "relu_layer",
"lrn", "batch_norm_with_global_normalization",
"batch_norm_with_global_normalization_grad",
"all_candidate_sampler",
"embedding_lookup_sparse",
"rnn", "state_saving_rnn", "bidirectional_rnn",
"dynamic_rnn", "seq2seq", "rnn_cell"],
prefix=PREFIX_TEXT),
library('client', "Running Graphs", client_lib),
library("train", "Training", tf.train,
exclude_symbols=["Feature", "Features", "BytesList", "FloatList",
"Int64List", "Example", "InferenceExample",
"FeatureList", "FeatureLists",
"RankingExample", "SequenceExample"]),
]
_hidden_symbols = ["Event", "Summary", "xrange",
"HistogramProto", "ConfigProto", "NodeDef", "GraphDef",
"GPUOptions", "SessionInterface", "BaseSession"]
def main(unused_argv):
if not FLAGS.out_dir:
tf.logging.error("out_dir not specified")
return -1
# Document libraries
documented = set()
module_to_name = get_module_to_name()
members = docs.collect_members(module_to_name)
libraries = all_libraries(module_to_name, members, documented)
# Define catch_all library before calling write_libraries to avoid complaining
# about generically hidden symbols.
catch_all = docs.Library(title="Catch All", module=None,
exclude_symbols=_hidden_symbols,
module_to_name=module_to_name, members=members,
documented=documented)
# Write docs to files
docs.write_libraries(FLAGS.out_dir, libraries)
# Make it easy to search for hidden symbols
if FLAGS.print_hidden_regex:
hidden = set(_hidden_symbols)
for _, lib in libraries:
hidden.update(lib.exclude_symbols)
print(r"hidden symbols regex = r'\b(%s)\b'" % "|".join(sorted(hidden)))
# Verify that all symbols are mentioned in some library doc.
catch_all.assert_no_leftovers()
# Generate index
with open(os.path.join(FLAGS.out_dir, "index.md"), "w") as f:
docs.Index(module_to_name, members, libraries,
"../../api_docs/python/").write_markdown_to_file(f)
if __name__ == "__main__":
tf.app.run()
|
hhauer/myinfo | refs/heads/master | oam_base/settings.py | 1 | # Basic settings for django. They should enable localhost-only development.
# These settings are extended for actual production use.
# Before go-live review: https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
import os.path
TEMPLATE_DEBUG = DEBUG = True
# If development is set to true, all calls to Sailpoint are bypassed with stubbed returns.
DEVELOPMENT = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'sqlite.db',
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = False
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = False
# MyInfo / OAM does not use user-uploaded media at any point.
MEDIA_ROOT = ''
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = '/var/www/html/static/'
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'developmentkey'
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'downtime.middleware.DowntimeMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django_cas.middleware.CASMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'lib.backends.OAMLoginBackend',
'lib.backends.AccountPickupBackend',
'lib.backends.ForgotPasswordBackend',
'django_cas.backends.CASBackend',
)
ROOT_URLCONF = 'oam_base.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'oam_base.wsgi.application'
# django.core.context_processors moves in django 1.8
# to django.template.context_processors
TEMPLATE_CONTEXT_PROCESSORS = (
# defaults
"django.contrib.auth.context_processors.auth", # provides 'user' and 'perms'
"django.template.context_processors.debug", # provides 'debug', 'sql_queries'
"django.template.context_processors.static", # provides 'STATIC_URL'
"django.contrib.messages.context_processors.messages", # provides 'messages' and 'DEFAULT_MESSAGE_LEVELS'
# Custom
"oam_base.context_processors.identity", # request.session['identity']
"oam_base.context_processors.cancel", # request.session['ALLOW_CANCEL']
)
TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), 'templates').replace('\\', '/'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.admin',
'CustomUser',
'MyInfo',
'AccountPickup',
'PasswordReset',
'lib',
# Apps related to plugins.
'ajax',
'localflavor',
'downtime',
'ods_integration',
'Duo',
'debug_toolbar.apps.DebugToolbarConfig',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s - %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'RotatingFileHandler': {
'filename': 'myinfo.log',
'maxBytes': 262144,
'backupCount': 5,
'formatter': 'verbose',
'class': 'logging.handlers.RotatingFileHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'oam_base': {
'handlers': ['RotatingFileHandler'],
'level': 'DEBUG',
'propagate': False,
},
'AccountPickup': {
'handlers': ['RotatingFileHandler'],
'level': 'DEBUG',
'propagate': False,
},
'MyInfo': {
'handlers': ['RotatingFileHandler'],
'level': 'DEBUG',
'propagate': False,
},
'PasswordReset': {
'handlers': ['RotatingFileHandler'],
'level': 'DEBUG',
'propagate': False,
},
'django_cas': {
'handlers': ['RotatingFileHandler'],
'level': 'DEBUG',
'propagate': False,
},
'lib': {
'handlers': ['RotatingFileHandler'],
'level': 'DEBUG',
'propagate': False,
},
}
}
AUTH_USER_MODEL = 'CustomUser.PSUCustomUser'
# Settings related to CAS authentication.
# CAS_SERVER_URL = ''
# CAS_VERSION = '1'
CAS_SERVER_URL = 'https://ssodevel.oit.pdx.edu/cas/'
CAS_VERSION = 'CAS_2_SAML_1_0'
# Settings related to sailpoint.
SAILPOINT_SERVER_URL = ''
SAILPOINT_USERNAME = ''
SAILPOINT_PASSWORD = ''
# Username length patch.
MAX_USERNAME_LENGTH = 36
# Downtime exempt paths.
DOWNTIME_EXEMPT_PATHS = (
'/admin',
'/accounts/login',
'/MyInfo/ping',
)
# Settings for password reset import management command.
ORACLE_USER = ''
ORACLE_PASS = ''
ORACLE_HOST = ''
ORACLE_PORT = ''
ORACLE_SID = ''
ORACLE_SQL = ''
# Cache backend for ratelimiting when behind a load balancer.
RATELIMIT_CACHE_BACKEND = 'lib.brake_cache.LoadBalancerCache'
# Duo Settings
DUO_LOGIN_URL = 'duo'
DUO_HOST = 'api-ac9fc019.duosecurity.com'
# Duo WebSDK Api
DUO_IKEY = ''
DUO_SKEY = ''
DUO_AKEY = ''
|
rldhont/Quantum-GIS | refs/heads/master | python/plugins/processing/algs/gdal/rearrange_bands.py | 16 | # -*- coding: utf-8 -*-
"""
***************************************************************************
rearrange_bands.py
---------------------
Date : August 2018
Copyright : (C) 2018 by Mathieu Pellerin
Email : nirvn dot asia at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Mathieu Pellerin'
__date__ = 'August 2018'
__copyright__ = '(C) 2018, Mathieu Pellerin'
import os
import re
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsRasterFileWriter,
QgsProcessingException,
QgsProcessingParameterEnum,
QgsProcessingParameterDefinition,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterBand,
QgsProcessingParameterString,
QgsProcessingParameterRasterDestination)
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class rearrange_bands(GdalAlgorithm):
INPUT = 'INPUT'
BANDS = 'BANDS'
OPTIONS = 'OPTIONS'
DATA_TYPE = 'DATA_TYPE'
OUTPUT = 'OUTPUT'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.TYPES = [self.tr('Use Input Layer Data Type'), 'Byte', 'Int16', 'UInt16', 'UInt32', 'Int32', 'Float32', 'Float64', 'CInt16', 'CInt32', 'CFloat32', 'CFloat64']
self.addParameter(QgsProcessingParameterRasterLayer(self.INPUT, self.tr('Input layer')))
self.addParameter(QgsProcessingParameterBand(self.BANDS,
self.tr('Selected band(s)'),
None,
self.INPUT,
allowMultiple=True))
options_param = QgsProcessingParameterString(self.OPTIONS,
self.tr('Additional creation options'),
defaultValue='',
optional=True)
options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
options_param.setMetadata({
'widget_wrapper': {
'class': 'processing.algs.gdal.ui.RasterOptionsWidget.RasterOptionsWidgetWrapper'}})
self.addParameter(options_param)
dataType_param = QgsProcessingParameterEnum(self.DATA_TYPE,
self.tr('Output data type'),
self.TYPES,
allowMultiple=False,
defaultValue=0)
dataType_param.setFlags(dataType_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(dataType_param)
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT,
self.tr('Converted')))
def name(self):
return 'rearrange_bands'
def displayName(self):
return self.tr('Rearrange bands')
def group(self):
return self.tr('Raster conversion')
def groupId(self):
return 'rasterconversion'
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'translate.png'))
def shortHelpString(self):
return self.tr("This algorithm creates a new raster using selected band(s) from a given raster layer.\n\n"
"The algorithm also makes it possible to reorder the bands for the newly-created raster.")
def commandName(self):
return 'gdal_translate'
def getConsoleCommands(self, parameters, context, feedback, executing=True):
inLayer = self.parameterAsRasterLayer(parameters, self.INPUT, context)
if inLayer is None:
raise QgsProcessingException(self.invalidRasterError(parameters, self.INPUT))
out = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
self.setOutputValue(self.OUTPUT, out)
arguments = []
bands = self.parameterAsInts(parameters, self.BANDS, context)
for band in bands:
arguments.append('-b {}'.format(band))
data_type = self.parameterAsEnum(parameters, self.DATA_TYPE, context)
if data_type:
arguments.append('-ot ' + self.TYPES[data_type])
arguments.append('-of')
arguments.append(QgsRasterFileWriter.driverForExtension(os.path.splitext(out)[1]))
options = self.parameterAsString(parameters, self.OPTIONS, context)
if options:
arguments.extend(GdalUtils.parseCreationOptions(options))
arguments.append(inLayer.source())
arguments.append(out)
return [self.commandName(), GdalUtils.escapeAndJoin(arguments)]
|
yanbober/SmallReptileTraining | refs/heads/master | AndroidSpider/Spider_ethsacn2.py | 1 | #-*-coding:utf-8 -*-
# 将ETHSCAN记录保存的脚本
import urllib.request as urllib2
from urllib import request
import random
from bs4 import BeautifulSoup
'''
# user_agent是爬虫与反爬虫斗争的第一步
ua_headers = {
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0',
}'''
# 用于模拟http头的User-agent
ua_list = [
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv2.0.1) Gecko/20100101 Firefox/4.0.1",
"Mozilla/5.0 (Windows NT 6.1; rv2.0.1) Gecko/20100101 Firefox/4.0.1",
"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11",
"Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11"
]
user_agent=random.choice(ua_list)
#要查询的以太地址
address="0x7751278c8b6b224946d065776fc67c81a5202958"
page_number_start=0
page_count=1
for ii in range(page_count):
page_number_start=page_number_start+1
page_number=str(page_number_start)
#url="https://etherscan.io/txs?a="+address+"&p="+page_number
url='https://etherscan.io/txsInternal?a='+address+'&&valid=true&p='+page_number
print(url)
# 通过Request()方法构造一个请求对象
request1=urllib2.Request(url=url)
# 把头添加进去
request1.add_header('User-Agent',user_agent)
# 向指定的url地址发送请求,并返回服务器响应的类文件对象
response=urllib2.urlopen(request1)
# 服务器返回的类文件对象支持python文件对象的操作方法
#html=response.read()
#print(html.decode('utf-8'))
soup=BeautifulSoup(response,"html.parser")
#k=0
# 只能访问第个标签的内容,怎么解决
for i in soup.find_all('table',{'class':'table table-hover'}):
print(i)
'''k=k+1
m=k%7
if m==0:
br='\n'
else:
br=''
tbody=i.get_text()
data=str(tbody.encode('gbk','ignore'))+","+br
with open('test12.csv', 'a') as f:
f.write(data)
'''
#print("已完成:",str(page_number)+"/"+str(page_count)) |
tmimori/erpnext | refs/heads/develop | erpnext/patches/v4_2/add_currency_turkish_lira.py | 77 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.geo.country_info import get_country_info
from erpnext.setup.install import add_country_and_currency
def execute():
country = get_country_info(country="Turkey")
add_country_and_currency("Turkey", country)
|
hugofragata/PyNEAT | refs/heads/master | species.py | 1 | from genome import Genome
class Species():
"""The number of excess and disjoint genes between a pair of genomes is a natural
measure of their compatibility distance. The more disjoint two genomes are, the less
evolutionary history they share, and thus the less compatible they are. Therefore, we
can measure the compatibility distance δ of different structures in NEAT as a simple linear
combination of the number of excess E and disjoint D genes, as well as the average
weight differences of matching genes W, including disabled genes:
δ = c1E/N + c2D/N + c3·W (1)
The coefficients c1, c2, and c3 allow us to adjust the importance of the three factors, and
the factor N, the number of genes in the larger genome, normalizes for genome size (N
can be set to 1 if both genomes are small, i.e., consist of fewer than 20 genes).
The distance measure δ allows us to speciate using a compatibility threshold δt.
An ordered list of species is maintained. In each generation, genomes are sequentially
placed into species. Each existing species is represented by a random genome inside
the species from the previous generation. A given genome g in the current generation is
placed in the first species in which g is compatible with the representative genome of
that species. This way, species do not overlap.1
If g is not compatible with any existing
species, a new species is created with g as its representative.(stanley)"""
def __init__(self, species_id, representative):
self.id = species_id
if not type(representative) == Genome:
raise SpeciesError("Representative provided is not a Genome but it should be.")
self.representative = representative
self.average_fitness = 0.0
self.max_fitness = 0.0
self.max_fitness_ever = 0.0
self.members = [representative]
class SpeciesError:
def __init__(self, a):
print("SPECIES ERROR: " + str(a))
pass
|
BrechtBa/webopt | refs/heads/master | django/webinterface/models.py | 10644 | from django.db import models
# Create your models here.
|
ywangd/pybufrkit | refs/heads/master | pybufrkit/__main__.py | 1 | from __future__ import absolute_import
from __future__ import print_function
import sys
if __name__ == '__main__':
from pybufrkit import main
sys.exit(main())
|
edx/edx-platform | refs/heads/master | common/test/acceptance/fixtures/catalog.py | 4 | """
Tools to create catalog-related data for use in bok choy tests.
"""
import json
import requests
from common.test.acceptance.fixtures import CATALOG_STUB_URL
from common.test.acceptance.fixtures.config import ConfigModelFixture
class CatalogFixture:
"""
Interface to set up mock responses from the Catalog stub server.
"""
def install_programs(self, programs):
"""
Stub the discovery service's program list and detail API endpoints.
Arguments:
programs (list): A list of programs. Both list and detail endpoints
will be stubbed using data from this list.
"""
key = 'catalog.programs'
uuids = []
for program in programs:
uuid = program['uuid']
uuids.append(uuid)
program_key = f'{key}.{uuid}'
requests.put(
f'{CATALOG_STUB_URL}/set_config',
data={program_key: json.dumps(program)},
)
# Stub list endpoint as if the uuids_only query param had been passed.
requests.put(
f'{CATALOG_STUB_URL}/set_config',
data={key: json.dumps(uuids)},
)
def install_pathways(self, pathways):
"""
Stub the discovery service's credit pathways API endpoint
Arguments:
pathways (list): A list of credit pathways. List endpoint will be stubbed using data from this list.
"""
requests.put(
f'{CATALOG_STUB_URL}/set_config',
data={'catalog.pathways': json.dumps({'results': pathways, 'next': None})}
)
def install_program_types(self, program_types):
"""
Stub the discovery service's program type list API endpoints.
Arguments:
program_types (list): A list of program types. List endpoint will be stubbed using data from this list.
"""
requests.put(
f'{CATALOG_STUB_URL}/set_config',
data={'catalog.programs_types': json.dumps(program_types)},
)
class CatalogIntegrationMixin:
"""Mixin providing a method used to configure the catalog integration."""
def set_catalog_integration(self, is_enabled=False, service_username=None):
"""Use this to change the catalog integration config model during tests."""
ConfigModelFixture('/config/catalog', {
'enabled': is_enabled,
'internal_api_url': f'{CATALOG_STUB_URL}/api/v1/',
'cache_ttl': 0,
'service_username': service_username,
}).install()
|
certik/hermes1d | refs/heads/master | src/gen/legendre.py | 3 | #! /usr/bin/env python
print "Importing..."
import os
from jinja2 import Environment, FileSystemLoader
from sympy import Symbol
from common import horner_scheme, ccode_pow2, legendre_norm, \
legendre_shape_function
n_functions = 50
precision = 25
factor_const = True
x = Symbol("x")
env = Environment(loader=FileSystemLoader('.'))
functions = []
print "Calculating shape functions..."
for i in range(n_functions):
print " i=%d" % i
lob = legendre_shape_function(i, x)
lob_diff = lob.diff(x)
lob = horner_scheme(lob, x, factor_const=factor_const)
lob_diff = horner_scheme(lob_diff, x, factor_const=factor_const)
functions.append({"id": i,
"expr": ccode_pow2(lob),
"expr_diff": ccode_pow2(lob_diff),
})
print "Generating the C file..."
template = "legendre.cpp"
t = env.get_template(template)
open(os.path.join("..", template), "w").write(t.render({
"functions": functions,
}))
|
afdelgado/askbot | refs/heads/master | askbot/management/commands/post_emailed_questions.py | 17 | """Copyright 2011 Askbot.org and Askbot project contributors.
Custom management command that takes questions posted
via email at the IMAP server
Expects subject line of emails to have format:
[Tag1, Tag2] Question title
Tags can be entered as multiword, but the space character
within the tag may be replaced with a dash, per live
setting EMAIL_REPLACE_SPACE_IN_TAGS
also, to make use of this command, the feature must
be enabled via ALLOW_ASKING_BY_EMAIL
and IMAP settings in the settings.py must be configured
correctly
todo: use templates for the email formatting
"""
import imaplib
import email
import quopri
import base64
from django.conf import settings as django_settings
from django.core.management.base import NoArgsCommand, CommandError
from askbot.conf import settings as askbot_settings
from askbot import mail
class CannotParseEmail(Exception):
"""This exception will bounce the email"""
def __init__(self, email_address, subject):
super(CannotParseEmail, self).__init__()
mail.bounce_email(email_address, subject, reason = 'problem_posting')
def parse_message(msg):
"""returns a tuple
(<from email address>, <subject>, <body>)
the function will attempt to decode the email
supported encodings are "quoted-printable" and "base64"
not supported - emails using language - specific encodings
"""
sender = msg.get('From')
subject = msg.get('Subject')
if msg.is_multipart():
msg = msg.get_payload()
if isinstance(msg, list):
raise CannotParseEmail(sender, subject)
#ctype = msg.get_content_type()#text/plain only
raw_body = msg.get_payload()#text/plain only
encoding = msg.get('Content-Transfer-Encoding')
if encoding == 'base64':
body = base64.b64decode(raw_body)
elif encoding == 'quoted-printable':
body = quopri.decodestring(raw_body)
else:
body = raw_body
return (sender, subject, body)
class Command(NoArgsCommand):
"""the django management command class"""
def handle_noargs(self, **options):
"""reads all emails from the INBOX of
imap server and posts questions based on
those emails. Badly formatted questions are
bounced, as well as emails from unknown users
all messages are deleted thereafter
"""
if not askbot_settings.ALLOW_ASKING_BY_EMAIL:
raise CommandError('Asking by email is not enabled')
#open imap server and select the inbox
if django_settings.IMAP_USE_TLS:
imap_getter = imaplib.IMAP4_SSL
else:
imap_getter = imaplib.IMAP4
imap = imap_getter(
django_settings.IMAP_HOST,
django_settings.IMAP_PORT
)
imap.login(
django_settings.IMAP_HOST_USER,
django_settings.IMAP_HOST_PASSWORD
)
imap.select('INBOX')
#get message ids
junk, ids = imap.search(None, 'ALL')
if len(ids[0].strip()) == 0:
#with empty inbox - close and exit
imap.close()
imap.logout()
return
#for each id - read a message, parse it and post a question
for msg_id in ids[0].split(' '):
junk, data = imap.fetch(msg_id, '(RFC822)')
#message_body = data[0][1]
msg = email.message_from_string(data[0][1])
imap.store(msg_id, '+FLAGS', '\\Deleted')
try:
(sender, subject, body) = parse_message(msg)
except CannotParseEmail:
continue
sender = mail.extract_first_email_address(sender)
mail.process_emailed_question(sender, subject, body)
imap.expunge()
imap.close()
imap.logout()
|
ncbray/pystream | refs/heads/master | bin/translator/dataflowtransform/shaderdescription.py | 1 | # Copyright 2011 Nicholas Bray
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .. import intrinsics
from language.python import ast
from language.glsl import ast as glsl
class IOInfo(object):
def __init__(self):
self.uniforms = set()
self.inputs = set()
self.outputs = set()
self.builtin = {}
self.same = {}
self.fieldTrans = {}
self.specialInputs = {}
self.specialOutputs = {}
class ProgramDescription(object):
__slots__ = 'prgm', 'name', 'vscontext', 'fscontext', 'mapping', 'vs2fs', 'ioinfo', 'uniformBlock'
def __init__(self, prgm, name, vscontext, fscontext):
self.prgm = prgm
self.name = name
self.vscontext = vscontext
self.fscontext = fscontext
self.vs2fs = {}
self.uniformBlock = None
def makeMap(self, tree, slot, mapping):
if tree.used:
assert slot not in mapping
mapping[slot] = tree.ioname
assert tree.ioname not in self.vs2fs
self.vs2fs[tree.ioname] = slot
def handleOutputToLocal(self, tree, lcl, mapping):
self.makeMap(tree, lcl, mapping)
refs = lcl.annotation.references.merged
self.traverse(tree, refs, mapping)
def handleOutputToField(self, tree, field, mapping):
self.makeMap(tree, field, mapping)
refs = field
self.traverse(tree, refs, mapping)
def traverse(self, tree, refs, mapping):
for fieldName, child in tree.fields.iteritems():
for obj in refs:
if fieldName in obj.slots:
self.handleOutputToField(child, obj.slots[fieldName], mapping)
def linkVarying(self):
# Generate fs input => vs output mappings
# This is a reverse mapping, as that makes subsequent operations easier
vsoutputs = self.vscontext.shaderdesc.outputs.varying
fsparams = self.fscontext.originalParams.params[2:]
assert len(vsoutputs) == len(fsparams)
mapping = {}
for output, param in zip(vsoutputs, fsparams):
self.handleOutputToLocal(output, param, mapping)
self.mapping = mapping
def link(self):
# Uniform objects and fields have the same names, so it is unnecessary to link them.
self.linkVarying()
def markTraverse(self, refs, marks):
for ref in refs:
for field in ref:
self.markField(field, marks)
def markEquivilents(self, field, marks):
lcls = []
for fields in (self.vscontext.shaderdesc.fields, self.fscontext.shaderdesc.fields):
if field in fields:
lcl = fields[field]
marks.add(lcl)
lcls.append(lcl)
if len(lcls) > 0:
self.ioinfo.fieldTrans[field] = lcls[0]
if len(lcls) > 1:
self.ioinfo.same[lcls[0]] = lcls[1]
self.ioinfo.same[lcls[1]] = lcls[0]
def markField(self, field, marks):
if field not in marks and not intrinsics.isIntrinsicSlot(field):
marks.add(field)
self.markEquivilents(field, marks)
self.markTraverse(field, marks)
def markParam(self, param, marks):
if param.isDoNotCare(): return
marks.add(param)
self.markTraverse(param.annotation.references.merged, marks)
def markParams(self, context):
params = context.originalParams
self.markParam(params.params[0], self.ioinfo.uniforms)
# TODO what about the context?
context.shaderdesc.outputs.markSpecial(self.ioinfo)
for param in params.params[2:]:
self.markParam(param, self.ioinfo.inputs)
def makeIOInfo(self):
self.ioinfo = IOInfo()
self.markParams(self.vscontext)
self.markParams(self.fscontext)
self.vscontext.shaderdesc.outputs.updateInfo(self.ioinfo)
self.fscontext.shaderdesc.outputs.updateInfo(self.ioinfo)
fsfields = self.fscontext.shaderdesc.fields
for src, dst in self.vs2fs.iteritems():
dst = fsfields.get(dst, dst)
self.ioinfo.same[src] = dst
return self.ioinfo
class ShaderDescription(object):
__slots__ = 'fields', 'outputs'
class OutputBase(object):
__slots__ = 'flat'
def updateInfo(self, info):
info.outputs.update(self.collectIONames())
self.getBuiltin(info)
def generateOutput(self, tree, block, flat):
if tree.used:
if tree.ioname is None:
tree.ioname = ast.IOName(None)
block.append(ast.Output(tree._lcl, tree.ioname))
flat.append(tree.ioname)
for child in tree.fields.itervalues():
self.generateOutput(child, block, flat)
def generateOutputStatements(self):
block = []
flat = []
for tree in self.getOutputs():
self.generateOutput(tree, block, flat)
self.flat = flat
return ast.OutputBlock(block)
def _collectIONames(self, tree, outputs):
if tree.used and tree.ioname is not None:
outputs.append(tree.ioname)
for child in tree.fields.itervalues():
self._collectIONames(child, outputs)
def collectIONames(self):
outputs = []
for tree in self.getOutputs():
self._collectIONames(tree, outputs)
return outputs
class VSOutputDescription(OutputBase):
__slots__ = 'position', 'varying'
def collectUsed(self):
used = set()
self.position.collectUsed(used)
for var in self.varying:
var.collectUsed(used)
return used
def getBuiltin(self, info):
vec4T = intrinsics.intrinsicTypeNodes[intrinsics.vec.vec4]
info.builtin[self.position.ioname] = glsl.OutputDecl(None, False, False, True, vec4T, 'gl_Position')
def getOutputs(self):
outputs = [self.position]
outputs.extend(self.varying)
return outputs
def markSpecial(self, ioinfo):
ioinfo.specialOutputs[self.position.ioname] = 'gl_Position'
class FSOutputDescription(OutputBase):
__slots__ = 'colors', 'depth'
def collectUsed(self):
used = set()
if self.depth:
self.depth.collectUsed(used)
for color in self.colors:
color.collectUsed(used)
return used
def getBuiltin(self, info):
if self.depth:
floatT = intrinsics.intrinsicTypeNodes[float]
info.builtin[self.position.ioname] = glsl.OutputDecl(None, False, False, True, floatT, 'gl_Depth')
def getOutputs(self):
outputs = []
if self.depth:
outputs.append(self.depth)
outputs.extend(self.colors)
return outputs
def markSpecial(self, ioinfo):
if self.depth:
ioinfo.specialOutputs[self.depth.ioname] = 'gl_Depth'
class TreeNode(object):
def __init__(self, lcl):
self._lcl = lcl
self.fields = {}
self.used = intrinsics.isIntrinsicType(self.pythonType())
self.ioname = None
def refs(self):
return self._lcl.annotation.references.merged
def pythonType(self):
refs = self.refs()
pt = refs[0].xtype.obj.pythonType()
for ref in refs[1:]:
assert pt is ref.xtype.obj.pythonType()
return pt
def field(self, fieldName, lcl):
tree = TreeNode(lcl)
self.fields[fieldName] = tree
return tree
def extractTuple(self):
array = {}
length = None
refs = self.refs()
for ref in refs:
assert ref.xtype.obj.pythonType() is tuple, refs
for fieldName, child in self.fields.iteritems():
if fieldName.type == 'LowLevel':
if fieldName.name.pyobj == 'length':
length = child.refs()[0].xtype.obj.pyobj
elif fieldName.type == 'Array':
array[fieldName.name.pyobj] = child
else:
assert False, fieldName
assert len(array) == length
return tuple([array[i] for i in range(length)])
|
lostdj/Jaklin-OpenJFX | refs/heads/jaklin-master | modules/web/src/main/native/Tools/Scripts/webkitpy/thirdparty/mock.py | 148 | # mock.py
# Test tools for mocking and patching.
# Copyright (C) 2007-2009 Michael Foord
# E-mail: fuzzyman AT voidspace DOT org DOT uk
# mock 0.6.0
# http://www.voidspace.org.uk/python/mock/
# Released subject to the BSD License
# Please see http://www.voidspace.org.uk/python/license.shtml
# 2009-11-25: Licence downloaded from above URL.
# BEGIN DOWNLOADED LICENSE
#
# Copyright (c) 2003-2009, Michael Foord
# All rights reserved.
# E-mail : fuzzyman AT voidspace DOT org DOT uk
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of Michael Foord nor the name of Voidspace
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# END DOWNLOADED LICENSE
# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
# Comments, suggestions and bug reports welcome.
__all__ = (
'Mock',
'patch',
'patch_object',
'sentinel',
'DEFAULT'
)
__version__ = '0.6.0'
class SentinelObject(object):
def __init__(self, name):
self.name = name
def __repr__(self):
return '<SentinelObject "%s">' % self.name
class Sentinel(object):
def __init__(self):
self._sentinels = {}
def __getattr__(self, name):
return self._sentinels.setdefault(name, SentinelObject(name))
sentinel = Sentinel()
DEFAULT = sentinel.DEFAULT
class OldStyleClass:
pass
ClassType = type(OldStyleClass)
def _is_magic(name):
return '__%s__' % name[2:-2] == name
def _copy(value):
if type(value) in (dict, list, tuple, set):
return type(value)(value)
return value
class Mock(object):
def __init__(self, spec=None, side_effect=None, return_value=DEFAULT,
name=None, parent=None, wraps=None):
self._parent = parent
self._name = name
if spec is not None and not isinstance(spec, list):
spec = [member for member in dir(spec) if not _is_magic(member)]
self._methods = spec
self._children = {}
self._return_value = return_value
self.side_effect = side_effect
self._wraps = wraps
self.reset_mock()
def reset_mock(self):
self.called = False
self.call_args = None
self.call_count = 0
self.call_args_list = []
self.method_calls = []
for child in self._children.itervalues():
child.reset_mock()
if isinstance(self._return_value, Mock):
self._return_value.reset_mock()
def __get_return_value(self):
if self._return_value is DEFAULT:
self._return_value = Mock()
return self._return_value
def __set_return_value(self, value):
self._return_value = value
return_value = property(__get_return_value, __set_return_value)
def __call__(self, *args, **kwargs):
self.called = True
self.call_count += 1
self.call_args = (args, kwargs)
self.call_args_list.append((args, kwargs))
parent = self._parent
name = self._name
while parent is not None:
parent.method_calls.append((name, args, kwargs))
if parent._parent is None:
break
name = parent._name + '.' + name
parent = parent._parent
ret_val = DEFAULT
if self.side_effect is not None:
if (isinstance(self.side_effect, Exception) or
isinstance(self.side_effect, (type, ClassType)) and
issubclass(self.side_effect, Exception)):
raise self.side_effect
ret_val = self.side_effect(*args, **kwargs)
if ret_val is DEFAULT:
ret_val = self.return_value
if self._wraps is not None and self._return_value is DEFAULT:
return self._wraps(*args, **kwargs)
if ret_val is DEFAULT:
ret_val = self.return_value
return ret_val
def __getattr__(self, name):
if self._methods is not None:
if name not in self._methods:
raise AttributeError("Mock object has no attribute '%s'" % name)
elif _is_magic(name):
raise AttributeError(name)
if name not in self._children:
wraps = None
if self._wraps is not None:
wraps = getattr(self._wraps, name)
self._children[name] = Mock(parent=self, name=name, wraps=wraps)
return self._children[name]
def assert_called_with(self, *args, **kwargs):
assert self.call_args == (args, kwargs), 'Expected: %s\nCalled with: %s' % ((args, kwargs), self.call_args)
def _dot_lookup(thing, comp, import_path):
try:
return getattr(thing, comp)
except AttributeError:
__import__(import_path)
return getattr(thing, comp)
def _importer(target):
components = target.split('.')
import_path = components.pop(0)
thing = __import__(import_path)
for comp in components:
import_path += ".%s" % comp
thing = _dot_lookup(thing, comp, import_path)
return thing
class _patch(object):
def __init__(self, target, attribute, new, spec, create):
self.target = target
self.attribute = attribute
self.new = new
self.spec = spec
self.create = create
self.has_local = False
def __call__(self, func):
if hasattr(func, 'patchings'):
func.patchings.append(self)
return func
def patched(*args, **keywargs):
# don't use a with here (backwards compatability with 2.5)
extra_args = []
for patching in patched.patchings:
arg = patching.__enter__()
if patching.new is DEFAULT:
extra_args.append(arg)
args += tuple(extra_args)
try:
return func(*args, **keywargs)
finally:
for patching in getattr(patched, 'patchings', []):
patching.__exit__()
patched.patchings = [self]
patched.__name__ = func.__name__
patched.compat_co_firstlineno = getattr(func, "compat_co_firstlineno",
func.func_code.co_firstlineno)
return patched
def get_original(self):
target = self.target
name = self.attribute
create = self.create
original = DEFAULT
if _has_local_attr(target, name):
try:
original = target.__dict__[name]
except AttributeError:
# for instances of classes with slots, they have no __dict__
original = getattr(target, name)
elif not create and not hasattr(target, name):
raise AttributeError("%s does not have the attribute %r" % (target, name))
return original
def __enter__(self):
new, spec, = self.new, self.spec
original = self.get_original()
if new is DEFAULT:
# XXXX what if original is DEFAULT - shouldn't use it as a spec
inherit = False
if spec == True:
# set spec to the object we are replacing
spec = original
if isinstance(spec, (type, ClassType)):
inherit = True
new = Mock(spec=spec)
if inherit:
new.return_value = Mock(spec=spec)
self.temp_original = original
setattr(self.target, self.attribute, new)
return new
def __exit__(self, *_):
if self.temp_original is not DEFAULT:
setattr(self.target, self.attribute, self.temp_original)
else:
delattr(self.target, self.attribute)
del self.temp_original
def patch_object(target, attribute, new=DEFAULT, spec=None, create=False):
return _patch(target, attribute, new, spec, create)
def patch(target, new=DEFAULT, spec=None, create=False):
try:
target, attribute = target.rsplit('.', 1)
except (TypeError, ValueError):
raise TypeError("Need a valid target to patch. You supplied: %r" % (target,))
target = _importer(target)
return _patch(target, attribute, new, spec, create)
def _has_local_attr(obj, name):
try:
return name in vars(obj)
except TypeError:
# objects without a __dict__
return hasattr(obj, name)
|
chemlab/chemlab | refs/heads/master | chemlab/graphics/renderers/cylinder_imp.py | 6 | from .base import ShaderBaseRenderer
import pkgutil
from ..shaders import set_uniform
from ..buffers import VertexBuffer
from OpenGL.GL import *
import numpy as np
class CylinderImpostorRenderer(ShaderBaseRenderer):
"""Render pixel-perfect cylinders by using raytraced impostors.
This method provide a very fast way to render cylinders by using
an advanced shader program. It should be the preferred way to
render cylinders.
"""
def __init__(self, widget, bounds, radii, colors, shading='phong'):
vert = pkgutil.get_data("chemlab.graphics.renderers.shaders",
"cylinderimp.vert")
frag = pkgutil.get_data("chemlab.graphics.renderers.shaders",
"cylinderimp.frag")
super(CylinderImpostorRenderer, self).__init__(widget, vert, frag)
self.shading = shading
self.ldir = np.array([0.0, 0.0, 10.0, 1.0])
self.widget = widget
self.change_attributes(bounds, radii, colors)
self.widget.update()
def change_attributes(self, bounds, radii, colors):
"""Reinitialize the buffers, to accomodate the new
attributes. This is used to change the number of cylinders to
be displayed.
"""
self.n_cylinders = len(bounds)
self.is_empty = True if self.n_cylinders == 0 else False
if self.is_empty:
self.bounds = bounds
self.radii = radii
self.colors = colors
return # Do nothing
# We pass the starting position 8 times, and each of these has
# a mapping to the bounding box corner.
self.bounds = np.array(bounds, dtype='float32')
vertices, directions = self._gen_bounds(self.bounds)
self.radii = np.array(radii, dtype='float32')
prim_radii = self._gen_radii(self.radii)
self.colors = np.array(colors, dtype='uint8')
prim_colors = self._gen_colors(self.colors)
local = np.array([
# First face -- front
0.0, 0.0, 0.0,
0.0, 1.0, 0.0,
1.0, 1.0, 0.0,
0.0, 0.0, 0.0,
1.0, 1.0, 0.0,
1.0, 0.0, 0.0,
# Second face -- back
0.0, 0.0, 1.0,
0.0, 1.0, 1.0,
1.0, 1.0, 1.0,
0.0, 0.0, 1.0,
1.0, 1.0, 1.0,
1.0, 0.0, 1.0,
# Third face -- left
0.0, 0.0, 0.0,
0.0, 0.0, 1.0,
0.0, 1.0, 1.0,
0.0, 0.0, 0.0,
0.0, 1.0, 1.0,
0.0, 1.0, 0.0,
# Fourth face -- right
1.0, 0.0, 0.0,
1.0, 0.0, 1.0,
1.0, 1.0, 1.0,
1.0, 0.0, 0.0,
1.0, 1.0, 1.0,
1.0, 1.0, 0.0,
# Fifth face -- up
0.0, 1.0, 0.0,
0.0, 1.0, 1.0,
1.0, 1.0, 1.0,
0.0, 1.0, 0.0,
1.0, 1.0, 1.0,
1.0, 1.0, 0.0,
# Sixth face -- down
0.0, 0.0, 0.0,
0.0, 0.0, 1.0,
1.0, 0.0, 1.0,
0.0, 0.0, 0.0,
1.0, 0.0, 1.0,
1.0, 0.0, 0.0,
]).astype('float32')
local = np.tile(local, self.n_cylinders)
self._verts_vbo = VertexBuffer(vertices,GL_DYNAMIC_DRAW)
self._directions_vbo = VertexBuffer(directions, GL_DYNAMIC_DRAW)
self._local_vbo = VertexBuffer(local,GL_DYNAMIC_DRAW)
self._color_vbo = VertexBuffer(prim_colors, GL_DYNAMIC_DRAW)
self._radii_vbo = VertexBuffer(prim_radii, GL_DYNAMIC_DRAW)
def setup_shader(self):
glUseProgram(self.shader)
set_uniform(self.shader, 'model_view_rotation_mat', 'mat4fv',
self.viewer.camera._get_rotation_matrix())
set_uniform(self.shader, 'model_view_mat', 'mat4fv',
self.viewer.camera.matrix)
set_uniform(self.shader, 'model_view_projection_mat', 'mat4fv',
np.dot(self.viewer.camera.projection,
self.viewer.camera.matrix))
set_uniform(self.shader, 'projection_mat', 'mat4fv',
self.viewer.camera.projection)
set_uniform(self.shader, 'light_dir', '3f', self.ldir[:3])
cam = np.dot(self.viewer.camera.matrix[:3,:3],
-self.viewer.camera.position)
set_uniform(self.shader, 'camera_position', '3f', cam)
shd = {'phong' : 0,
'toon': 1}[self.shading]
set_uniform(self.shader, 'shading_type', '1i', shd)
def draw(self):
if self.is_empty:
return # Do nothing
self.setup_shader()
local_attr = glGetAttribLocation(self.shader,
b"vert_local_coordinate")
cylinder_axis_attr = glGetAttribLocation(self.shader,
b"cylinder_axis")
radius_attr = glGetAttribLocation(self.shader,
b"cylinder_radius")
glEnableVertexAttribArray(local_attr)
glEnableVertexAttribArray(cylinder_axis_attr)
glEnableVertexAttribArray(radius_attr)
glEnableClientState(GL_VERTEX_ARRAY)
self._verts_vbo.bind_vertexes(3, GL_FLOAT)
self._local_vbo.bind_attrib(local_attr, 3, GL_FLOAT)
self._directions_vbo.bind_attrib(cylinder_axis_attr, 3, GL_FLOAT)
self._radii_vbo.bind_attrib(radius_attr, 1, GL_FLOAT)
glEnableClientState(GL_COLOR_ARRAY)
self._color_vbo.bind_colors(4, GL_UNSIGNED_BYTE)
glDrawArrays(GL_TRIANGLES, 0, 36 * self.n_cylinders)
self._verts_vbo.unbind()
self._local_vbo.unbind()
self._color_vbo.unbind()
self._radii_vbo.unbind()
self._directions_vbo.unbind()
glDisableVertexAttribArray(local_attr)
glDisableVertexAttribArray(cylinder_axis_attr)
glDisableVertexAttribArray(radius_attr)
glDisableClientState(GL_VERTEX_ARRAY)
glDisableClientState(GL_COLOR_ARRAY)
glUseProgram(0)
def _gen_bounds(self, bounds):
vertices = np.repeat(bounds[:, 0], 36, axis=0).astype('float32')
directions = np.repeat(bounds[:, 1] - bounds[:, 0],
36, axis=0).astype('float32')
return vertices, directions
def _gen_radii(self, radii):
return np.repeat(radii, 36, axis=0).astype('float32')
def _gen_colors(self, colors):
return np.repeat(colors, 36, axis=0).astype('uint8')
def update_bounds(self, bounds):
'''Update the bounds inplace'''
self.bounds = np.array(bounds, dtype='float32')
vertices, directions = self._gen_bounds(self.bounds)
self._verts_vbo.set_data(vertices)
self._directions_vbo.set_data(directions)
self.widget.update()
def update_radii(self, radii):
'''Update the radii inplace'''
self.radii = np.array(radii, dtype='float32')
prim_radii = self._gen_radii(self.radii)
self._radii_vbo.set_data(prim_radii)
self.widget.update()
def update_colors(self, colors):
'''Update the colors inplace'''
self.colors = np.array(colors, dtype='uint8')
prim_colors = self._gen_colors(self.colors)
self._color_vbo.set_data(prim_colors)
self.widget.update()
|
elzaggo/pydoop | refs/heads/develop | examples/avro/py/avro_base.py | 1 | #!/usr/bin/env python
# BEGIN_COPYRIGHT
#
# Copyright 2009-2018 CRS4.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# END_COPYRIGHT
import sys
import abc
from collections import Counter
import pydoop.mapreduce.api as api
import pydoop.mapreduce.pipes as pp
from pydoop.avrolib import AvroContext
class ColorPickBase(api.Mapper):
@abc.abstractmethod
def get_user(self, ctx):
"""
Get the user record. This is just to avoid writing near identical
examples for the various key/value cases. In a real application,
carrying records over keys or values would be a design decision,
so you would simply do, e.g., ``user = self.value``.
"""
def map(self, ctx):
user = self.get_user(ctx)
color = user['favorite_color']
if color is not None:
ctx.emit(user['office'], Counter({color: 1}))
class AvroKeyColorPick(ColorPickBase):
def get_user(self, ctx):
return ctx.key
class AvroValueColorPick(ColorPickBase):
def get_user(self, ctx):
return ctx.value
class AvroKeyValueColorPick(ColorPickBase):
def get_user(self, ctx):
return ctx.key
def map(self, ctx):
sys.stdout.write("value (unused): %r\n" % (ctx.value,))
super(AvroKeyValueColorPick, self).map(ctx)
class ColorCountBase(api.Reducer):
def reduce(self, ctx):
s = sum(ctx.values, Counter())
self.emit(s, ctx)
@abc.abstractmethod
def emit(self, s, ctx):
"""
Emit the sum to the ctx. As in the base mapper, this is just to
avoid writing near identical examples.
"""
class NoAvroColorCount(ColorCountBase):
def emit(self, s, ctx):
ctx.emit(ctx.key, "%r" % s)
class AvroKeyColorCount(ColorCountBase):
def emit(self, s, ctx):
ctx.emit({'office': ctx.key, 'counts': s}, ctx.key)
class AvroValueColorCount(ColorCountBase):
def emit(self, s, ctx):
ctx.emit(ctx.key, {'office': ctx.key, 'counts': s})
class AvroKeyValueColorCount(ColorCountBase):
def emit(self, s, ctx):
record = {'office': ctx.key, 'counts': s}
ctx.emit(record, record) # FIXME: do something fancier
def run_task(mapper_class, reducer_class=NoAvroColorCount):
pp.run_task(
pp.Factory(mapper_class=mapper_class, reducer_class=reducer_class),
private_encoding=True, context_class=AvroContext
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.