code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
import datetime
import pytest
from django.core.exceptions import ValidationError
from treeherder.perf.models import (PerformanceAlert,
PerformanceAlertSummary,
PerformanceSignature)
def test_summary_modification(test_repository, test_perf_signature,
test_perf_alert_summary, test_perf_alert):
(s, a) = (test_perf_alert_summary, test_perf_alert)
assert s.bug_number is None
assert s.status == PerformanceAlertSummary.UNTRIAGED
# acknowledge alert, make sure summary status is updated
a.status = PerformanceAlert.ACKNOWLEDGED
a.save()
s = PerformanceAlertSummary.objects.get(id=1)
assert s.status == PerformanceAlertSummary.INVESTIGATING
# reset alert to untriaged, likewise make sure summary status
# gets updated
a.status = PerformanceAlert.UNTRIAGED
a.save()
s = PerformanceAlertSummary.objects.get(id=1)
assert s.status == PerformanceAlertSummary.UNTRIAGED
def test_summary_status(test_repository, test_perf_signature,
test_perf_alert_summary, test_perf_framework):
signature1 = test_perf_signature
signature2 = PerformanceSignature.objects.create(
repository=test_repository,
signature_hash=(40*'u'),
framework=test_perf_signature.framework,
platform=test_perf_signature.platform,
option_collection=test_perf_signature.option_collection,
suite='mysuite_2',
test='mytest_2',
has_subtests=False,
last_updated=datetime.datetime.now()
)
s = test_perf_alert_summary
a = PerformanceAlert.objects.create(
summary=s,
series_signature=signature1,
is_regression=True,
amount_pct=0.5,
amount_abs=50.0,
prev_value=100.0,
new_value=150.0,
t_value=20.0)
# this is the test case
# ignore downstream and reassigned to update the summary status
a.status = PerformanceAlert.REASSIGNED
a.related_summary = s
a.save()
b = PerformanceAlert.objects.create(
summary=s,
series_signature=signature2,
is_regression=False,
amount_pct=0.5,
amount_abs=50.0,
prev_value=100.0,
new_value=150.0,
t_value=20.0)
b.status = PerformanceAlert.ACKNOWLEDGED
b.save()
s = PerformanceAlertSummary.objects.get(id=1)
assert s.status == PerformanceAlertSummary.IMPROVEMENT
def test_alert_modification(test_perf_signature, test_perf_alert_summary,
push_stored, test_perf_alert):
p = test_perf_alert
s2 = PerformanceAlertSummary.objects.create(
id=2,
repository=test_perf_alert_summary.repository,
prev_push_id=3,
push_id=4,
created=datetime.datetime.now(),
framework=test_perf_alert_summary.framework,
manually_created=False)
assert p.related_summary is None
assert p.status == PerformanceAlert.UNTRIAGED
# set related summary, but no status, make sure an exception is thrown
p.related_summary = s2
with pytest.raises(ValidationError):
p.save()
# set related summary with downstream status, make sure that works
p = PerformanceAlert.objects.get(id=1)
p.status = PerformanceAlert.DOWNSTREAM
p.related_summary = s2
p.save()
p = PerformanceAlert.objects.get(id=1)
assert p.related_summary.id == 2
assert p.status == PerformanceAlert.DOWNSTREAM
# unset related summary, but don't set status, make sure we get
# another exception
with pytest.raises(ValidationError):
p.related_summary = None
p.save()
p.status = PerformanceAlert.UNTRIAGED
p.save()
# then make sure it succeeds when set
p = PerformanceAlert.objects.get(id=1)
assert p.related_summary is None
assert p.status == PerformanceAlert.UNTRIAGED
| edmorley/treeherder | tests/perfalert/test_alert_modification.py | Python | mpl-2.0 | 3,921 |
import happyforms
from django import forms
from django.contrib import messages
from remo.base.forms import BaseEmailUsersForm
from remo.base.tasks import send_remo_mail
from remo.profiles.models import FunctionalArea, UserProfile
class EmailRepsForm(BaseEmailUsersForm):
"""Generic form to send email to multiple users."""
functional_area = forms.ModelChoiceField(
queryset=FunctionalArea.active_objects.all(), empty_label=None,
widget=forms.HiddenInput())
def send_email(self, request, users):
"""Send mail to recipients list."""
recipients = users.values_list('id', flat=True)
if recipients:
from_email = '%s <%s>' % (request.user.get_full_name(),
request.user.email)
send_remo_mail.delay(sender=from_email,
recipients_list=recipients,
subject=self.cleaned_data['subject'],
message=self.cleaned_data['body'])
messages.success(request, 'Email sent successfully.')
else:
messages.error(request, 'Email not sent. An error occured.')
class TrackFunctionalAreasForm(happyforms.ModelForm):
"""Form for tracking interests in functional areas for Mozillians."""
class Meta:
model = UserProfile
fields = ['tracked_functional_areas']
| tsmrachel/remo | remo/dashboard/forms.py | Python | bsd-3-clause | 1,405 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Python ops defined in image_grad.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.eager import backprop
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import image_ops
from tensorflow.python.platform import test
@test_util.for_all_test_methods(test_util.disable_xla,
'align_corners=False not supported by XLA')
class ResizeNearestNeighborOpTest(test.TestCase):
TYPES = [np.float32, np.float64]
def testShapeIsCorrectAfterOp(self):
in_shape = [1, 2, 2, 1]
out_shape = [1, 4, 6, 1]
for nptype in self.TYPES:
x = np.arange(0, 4).reshape(in_shape).astype(nptype)
with self.cached_session(use_gpu=True):
input_tensor = constant_op.constant(x, shape=in_shape)
resize_out = image_ops.resize_nearest_neighbor(input_tensor,
out_shape[1:3])
self.assertEqual(out_shape, list(resize_out.get_shape()))
resize_out = self.evaluate(resize_out)
self.assertEqual(out_shape, list(resize_out.shape))
@test_util.run_deprecated_v1
def testGradFromResizeToLargerInBothDims(self):
in_shape = [1, 2, 3, 1]
out_shape = [1, 4, 6, 1]
for nptype in self.TYPES:
x = np.arange(0, 6).reshape(in_shape).astype(nptype)
with self.cached_session(use_gpu=True):
input_tensor = constant_op.constant(x, shape=in_shape)
resize_out = image_ops.resize_nearest_neighbor(input_tensor,
out_shape[1:3])
err = gradient_checker.compute_gradient_error(
input_tensor, in_shape, resize_out, out_shape, x_init_value=x)
self.assertLess(err, 1e-3)
@test_util.run_deprecated_v1
def testGradFromResizeToSmallerInBothDims(self):
in_shape = [1, 4, 6, 1]
out_shape = [1, 2, 3, 1]
for nptype in self.TYPES:
x = np.arange(0, 24).reshape(in_shape).astype(nptype)
with self.cached_session(use_gpu=True):
input_tensor = constant_op.constant(x, shape=in_shape)
resize_out = image_ops.resize_nearest_neighbor(input_tensor,
out_shape[1:3])
err = gradient_checker.compute_gradient_error(
input_tensor, in_shape, resize_out, out_shape, x_init_value=x)
self.assertLess(err, 1e-3)
@test_util.run_deprecated_v1
def testCompareGpuVsCpu(self):
in_shape = [1, 4, 6, 3]
out_shape = [1, 8, 16, 3]
for nptype in self.TYPES:
x = np.arange(0, np.prod(in_shape)).reshape(in_shape).astype(nptype)
for align_corners in [True, False]:
with self.cached_session(use_gpu=False):
input_tensor = constant_op.constant(x, shape=in_shape)
resize_out = image_ops.resize_nearest_neighbor(
input_tensor, out_shape[1:3], align_corners=align_corners)
grad_cpu = gradient_checker.compute_gradient(
input_tensor, in_shape, resize_out, out_shape, x_init_value=x)
with self.cached_session(use_gpu=True):
input_tensor = constant_op.constant(x, shape=in_shape)
resize_out = image_ops.resize_nearest_neighbor(
input_tensor, out_shape[1:3], align_corners=align_corners)
grad_gpu = gradient_checker.compute_gradient(
input_tensor, in_shape, resize_out, out_shape, x_init_value=x)
self.assertAllClose(grad_cpu, grad_gpu, rtol=1e-5, atol=1e-5)
class ResizeBilinearOpTest(test.TestCase):
def testShapeIsCorrectAfterOp(self):
in_shape = [1, 2, 2, 1]
out_shape = [1, 4, 6, 1]
x = np.arange(0, 4).reshape(in_shape).astype(np.float32)
with self.cached_session():
input_tensor = constant_op.constant(x, shape=in_shape)
resize_out = image_ops.resize_bilinear(input_tensor, out_shape[1:3])
self.assertEqual(out_shape, list(resize_out.get_shape()))
resize_out = self.evaluate(resize_out)
self.assertEqual(out_shape, list(resize_out.shape))
@test_util.run_deprecated_v1
def testGradFromResizeToLargerInBothDims(self):
in_shape = [1, 2, 3, 1]
out_shape = [1, 4, 6, 1]
x = np.arange(0, 6).reshape(in_shape).astype(np.float32)
with self.cached_session():
input_tensor = constant_op.constant(x, shape=in_shape)
resize_out = image_ops.resize_bilinear(input_tensor, out_shape[1:3])
err = gradient_checker.compute_gradient_error(
input_tensor, in_shape, resize_out, out_shape, x_init_value=x)
self.assertLess(err, 1e-3)
@test_util.run_deprecated_v1
def testGradFromResizeToSmallerInBothDims(self):
in_shape = [1, 4, 6, 1]
out_shape = [1, 2, 3, 1]
x = np.arange(0, 24).reshape(in_shape).astype(np.float32)
with self.cached_session():
input_tensor = constant_op.constant(x, shape=in_shape)
resize_out = image_ops.resize_bilinear(input_tensor, out_shape[1:3])
err = gradient_checker.compute_gradient_error(
input_tensor, in_shape, resize_out, out_shape, x_init_value=x)
self.assertLess(err, 1e-3)
@test_util.run_deprecated_v1
def testCompareGpuVsCpu(self):
in_shape = [2, 4, 6, 3]
out_shape = [2, 8, 16, 3]
size = np.prod(in_shape)
x = 1.0 / size * np.arange(0, size).reshape(in_shape).astype(np.float32)
# Align corners will be deprecated for tf2.0 and the false version is not
# supported by XLA.
align_corner_options = [True
] if test_util.is_xla_enabled() else [True, False]
for align_corners in align_corner_options:
grad = {}
for use_gpu in [False, True]:
with self.cached_session(use_gpu=use_gpu):
input_tensor = constant_op.constant(x, shape=in_shape)
resized_tensor = image_ops.resize_bilinear(
input_tensor, out_shape[1:3], align_corners=align_corners)
grad[use_gpu] = gradient_checker.compute_gradient(
input_tensor, in_shape, resized_tensor, out_shape, x_init_value=x)
self.assertAllClose(grad[False], grad[True], rtol=1e-4, atol=1e-4)
@test_util.run_deprecated_v1
def testTypes(self):
in_shape = [1, 4, 6, 1]
out_shape = [1, 2, 3, 1]
x = np.arange(0, 24).reshape(in_shape)
with self.cached_session() as sess:
for dtype in [np.float16, np.float32, np.float64]:
input_tensor = constant_op.constant(x.astype(dtype), shape=in_shape)
resize_out = image_ops.resize_bilinear(input_tensor, out_shape[1:3])
grad = sess.run(gradients_impl.gradients(resize_out, input_tensor))[0]
self.assertAllEqual(in_shape, grad.shape)
# Not using gradient_checker.compute_gradient as I didn't work out
# the changes required to compensate for the lower precision of
# float16 when computing the numeric jacobian.
# Instead, we just test the theoretical jacobian.
self.assertAllEqual([[[[1.], [0.], [1.], [0.], [1.], [0.]], [[0.], [
0.
], [0.], [0.], [0.], [0.]], [[1.], [0.], [1.], [0.], [1.], [0.]],
[[0.], [0.], [0.], [0.], [0.], [0.]]]], grad)
class ResizeBicubicOpTest(test.TestCase):
def testShapeIsCorrectAfterOp(self):
in_shape = [1, 2, 2, 1]
out_shape = [1, 4, 6, 1]
x = np.arange(0, 4).reshape(in_shape).astype(np.float32)
for align_corners in [True, False]:
with self.cached_session():
input_tensor = constant_op.constant(x, shape=in_shape)
resize_out = image_ops.resize_bicubic(input_tensor, out_shape[1:3],
align_corners=align_corners)
self.assertEqual(out_shape, list(resize_out.get_shape()))
resize_out = self.evaluate(resize_out)
self.assertEqual(out_shape, list(resize_out.shape))
@test_util.run_deprecated_v1
def testGradFromResizeToLargerInBothDims(self):
in_shape = [1, 2, 3, 1]
out_shape = [1, 4, 6, 1]
x = np.arange(0, 6).reshape(in_shape).astype(np.float32)
for align_corners in [True, False]:
with self.cached_session():
input_tensor = constant_op.constant(x, shape=in_shape)
resize_out = image_ops.resize_bicubic(input_tensor, out_shape[1:3],
align_corners=align_corners)
err = gradient_checker.compute_gradient_error(
input_tensor, in_shape, resize_out, out_shape, x_init_value=x)
self.assertLess(err, 1e-3)
@test_util.run_deprecated_v1
def testGradFromResizeToSmallerInBothDims(self):
in_shape = [1, 4, 6, 1]
out_shape = [1, 2, 3, 1]
x = np.arange(0, 24).reshape(in_shape).astype(np.float32)
for align_corners in [True, False]:
with self.cached_session():
input_tensor = constant_op.constant(x, shape=in_shape)
resize_out = image_ops.resize_bicubic(input_tensor, out_shape[1:3],
align_corners=align_corners)
err = gradient_checker.compute_gradient_error(
input_tensor, in_shape, resize_out, out_shape, x_init_value=x)
self.assertLess(err, 1e-3)
@test_util.run_deprecated_v1
def testGradOnUnsupportedType(self):
in_shape = [1, 4, 6, 1]
out_shape = [1, 2, 3, 1]
x = np.arange(0, 24).reshape(in_shape).astype(np.uint8)
with self.cached_session():
input_tensor = constant_op.constant(x, shape=in_shape)
resize_out = image_ops.resize_bicubic(input_tensor, out_shape[1:3])
grad = gradients_impl.gradients(input_tensor, [resize_out])
self.assertEqual([None], grad)
class ScaleAndTranslateOpTest(test.TestCase):
@test_util.run_deprecated_v1
def testGrads(self):
in_shape = [1, 2, 3, 1]
out_shape = [1, 4, 6, 1]
x = np.arange(0, 6).reshape(in_shape).astype(np.float32)
kernel_types = [
'lanczos1', 'lanczos3', 'lanczos5', 'gaussian', 'box', 'triangle',
'keyscubic', 'mitchellcubic'
]
scales = [(1.0, 1.0), (0.37, 0.47), (2.1, 2.1)]
translations = [(0.0, 0.0), (3.14, 1.19), (2.1, 3.1), (100.0, 200.0)]
for scale in scales:
for translation in translations:
for kernel_type in kernel_types:
for antialias in [True, False]:
with self.cached_session():
input_tensor = constant_op.constant(x, shape=in_shape)
scale_and_translate_out = image_ops.scale_and_translate(
input_tensor,
out_shape[1:3],
scale=constant_op.constant(scale),
translation=constant_op.constant(translation),
kernel_type=kernel_type,
antialias=antialias)
err = gradient_checker.compute_gradient_error(
input_tensor,
in_shape,
scale_and_translate_out,
out_shape,
x_init_value=x)
self.assertLess(err, 1e-3)
def testIdentityGrads(self):
"""Tests that Gradients for 1.0 scale should be ones for some kernels."""
in_shape = [1, 2, 3, 1]
out_shape = [1, 4, 6, 1]
x = np.arange(0, 6).reshape(in_shape).astype(np.float32)
kernel_types = ['lanczos1', 'lanczos3', 'lanczos5', 'triangle', 'keyscubic']
scale = (1.0, 1.0)
translation = (0.0, 0.0)
antialias = True
for kernel_type in kernel_types:
with self.cached_session():
input_tensor = constant_op.constant(x, shape=in_shape)
with backprop.GradientTape() as tape:
tape.watch(input_tensor)
scale_and_translate_out = image_ops.scale_and_translate(
input_tensor,
out_shape[1:3],
scale=constant_op.constant(scale),
translation=constant_op.constant(translation),
kernel_type=kernel_type,
antialias=antialias)
grad = tape.gradient(scale_and_translate_out, input_tensor)[0]
grad_v = self.evaluate(grad)
self.assertAllClose(np.ones_like(grad_v), grad_v)
class CropAndResizeOpTest(test.TestCase):
def testShapeIsCorrectAfterOp(self):
batch = 2
image_height = 3
image_width = 4
crop_height = 4
crop_width = 5
depth = 2
num_boxes = 2
image_shape = [batch, image_height, image_width, depth]
crop_size = [crop_height, crop_width]
crops_shape = [num_boxes, crop_height, crop_width, depth]
image = np.arange(0, batch * image_height * image_width *
depth).reshape(image_shape).astype(np.float32)
boxes = np.array([[0, 0, 1, 1], [.1, .2, .7, .8]], dtype=np.float32)
box_ind = np.array([0, 1], dtype=np.int32)
with self.session(use_gpu=True) as sess:
crops = image_ops.crop_and_resize(
constant_op.constant(
image, shape=image_shape),
constant_op.constant(
boxes, shape=[num_boxes, 4]),
constant_op.constant(
box_ind, shape=[num_boxes]),
constant_op.constant(
crop_size, shape=[2]))
self.assertEqual(crops_shape, list(crops.get_shape()))
crops = self.evaluate(crops)
self.assertEqual(crops_shape, list(crops.shape))
def _randomUniformAvoidAnchors(self, low, high, anchors, radius, num_samples):
"""Generate samples that are far enough from a set of anchor points.
We generate uniform samples in [low, high], then reject those that are less
than radius away from any point in anchors. We stop after we have accepted
num_samples samples.
Args:
low: The lower end of the interval.
high: The upper end of the interval.
anchors: A list of length num_crops with anchor points to avoid.
radius: Distance threshold for the samples from the anchors.
num_samples: How many samples to produce.
Returns:
samples: A list of length num_samples with the accepted samples.
"""
self.assertTrue(low < high)
self.assertTrue(radius >= 0)
num_anchors = len(anchors)
# Make sure that at least half of the interval is not forbidden.
self.assertTrue(2 * radius * num_anchors < 0.5 * (high - low))
anchors = np.reshape(anchors, num_anchors)
samples = []
while len(samples) < num_samples:
sample = np.random.uniform(low, high)
if np.all(np.fabs(sample - anchors) > radius):
samples.append(sample)
return samples
@test_util.run_deprecated_v1
def testGradRandomBoxes(self):
"""Test that the gradient is correct for randomly generated boxes.
The mapping is piecewise differentiable with respect to the box coordinates.
The points where the function is not differentiable are those which are
mapped to image pixels, i.e., the normalized y coordinates in
np.linspace(0, 1, image_height) and normalized x coordinates in
np.linspace(0, 1, image_width). Make sure that the box coordinates are
sufficiently far away from those rectangular grid centers that are points of
discontinuity, so that the finite difference Jacobian is close to the
computed one.
"""
np.random.seed(1) # Make it reproducible.
delta = 1e-3
radius = 2 * delta
low, high = -0.5, 1.5 # Also covers the case of extrapolation.
image_height = 4
for image_width in range(1, 3):
for crop_height in range(1, 3):
for crop_width in range(2, 4):
for depth in range(1, 3):
for num_boxes in range(1, 3):
batch = num_boxes
image_shape = [batch, image_height, image_width, depth]
crop_size = [crop_height, crop_width]
crops_shape = [num_boxes, crop_height, crop_width, depth]
boxes_shape = [num_boxes, 4]
image = np.arange(0, batch * image_height * image_width *
depth).reshape(image_shape).astype(np.float32)
boxes = []
for _ in range(num_boxes):
# pylint: disable=unbalanced-tuple-unpacking
y1, y2 = self._randomUniformAvoidAnchors(
low, high, np.linspace(0, 1, image_height), radius, 2)
x1, x2 = self._randomUniformAvoidAnchors(
low, high, np.linspace(0, 1, image_width), radius, 2)
# pylint: enable=unbalanced-tuple-unpacking
boxes.append([y1, x1, y2, x2])
boxes = np.array(boxes, dtype=np.float32)
box_ind = np.arange(batch, dtype=np.int32)
with self.cached_session(use_gpu=True):
image_tensor = constant_op.constant(image, shape=image_shape)
boxes_tensor = constant_op.constant(boxes, shape=[num_boxes, 4])
box_ind_tensor = constant_op.constant(
box_ind, shape=[num_boxes])
crops = image_ops.crop_and_resize(
image_tensor,
boxes_tensor,
box_ind_tensor,
constant_op.constant(
crop_size, shape=[2]))
err = gradient_checker.compute_gradient_error(
[image_tensor, boxes_tensor], [image_shape, boxes_shape],
crops,
crops_shape,
delta=delta,
x_init_value=[image, boxes])
self.assertLess(err, 2e-3)
if __name__ == "__main__":
test.main()
| ghchinoy/tensorflow | tensorflow/python/ops/image_grad_test.py | Python | apache-2.0 | 18,282 |
#===============================================================================
# Copyright 2010 Matt Chaput
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#===============================================================================
from whoosh.qparser.default import QueryParser, MultifieldParser
from whoosh.qparser.simple import SimpleParser, DisMaxParser
from whoosh.qparser.ngram import SimpleNgramParser
| archatas/whoosh | whoosh/qparser/__init__.py | Python | apache-2.0 | 919 |
import requests
import json
import spotipy
import spotipy.util as util
import math
import operator
import config
def main():
playlist_name = "Will squared Playlist"
image_file_name = "will&will.jpg"
maximum = 20
token = util.prompt_for_user_token(
config.username, config.scope, client_id=config.client_id, client_secret=config.client_secret, redirect_uri=config.redirect_uri)
if token:
sp = spotipy.Spotify(auth=token)
if(not duplicates(playlist_name,sp)):
print "creating new playlist:",playlist_name
playlist_id = sp.user_playlist_create(config.username,playlist_name,public=True)['id']
else:
playlist_id = find_playlist_id(playlist_name,sp)
# print playlist_id
tracks_uri = get_tracks_uri_from_playlist(sp)
tracks_valence = []
track_dict = {}
emot_valence = get_valence_score_from_emotion(config._url,config._key,image_file_name)
for uri in get_tracks_uri_from_playlist(sp):
# track_dict[uri] = valence_each_track(uri,sp)
track_dict[uri] = calculate_distance(valence_each_track(uri,sp),emot_valence )
tracks_valence.append(valence_each_track(uri,sp))
track_dict_sorted = sorted(track_dict.items(), key=operator.itemgetter(1))
# print track_dict
# print tracks_valence
# print calculate_distance(tracks_valence, get_valence_score_from_emotion(_url,_key))
# print track_dict_sorted
# print emot_valence
uris = []
for uri in track_dict_sorted[:maximum]:
uris.append(uri[0])
# uris.append(uri[0])
sp.user_playlist_add_tracks(config.username, playlist_id, uris)
print "done"
# ensure a duplicate playlist is not created
def find_playlist_id(playlist_name,sp):
for item in sp.current_user_playlists(limit=50, offset=0)['items']:
if item['name'] == playlist_name:
return item['id']
def duplicates(name,sp):
for item in sp.current_user_playlists(limit=50, offset=0)['items']:
if item['name'] == name:
return True
return False
def get_tracks_uri_from_playlist(sp):
""" get user playlist tracks"""
results = sp.current_user_saved_tracks(limit=50)
for item in results['items']:
track = item['track']
yield track['uri']
def valence_each_track(track_uri,sp):
""" get the valence for each track"""
tracks_valence = sp.audio_features([track_uri])
return tracks_valence[0]['valence']
def calculate_distance(tracks_valence,emotion_valence):
""" calculate the euclidean distance between each track's valence and the emotion valence """
return abs(tracks_valence - emotion_valence)
def get_valence_score_from_emotion(_url,_key,image_file_name):
# 'application/octet-stream',
emotion_headers = {
'Content-Type': 'application/octet-stream',
'Ocp-Apim-Subscription-Key': _key,
}
with open(image_file_name, 'rb') as f:
data = f.read()
req = requests.post(_url, data=data, headers=emotion_headers)
response_json = req.json()
print req.text
# get the most likely emotion
happy = 0
sad = 0
neutral = 0.5
emotion = ""
sad_emotions = ["sadness", "contempt", "disgust", "anger", "fear"]
happy_emotions = ["happiness", "surprise"]
for person in response_json:
for key in person["scores"]:
if key in sad_emotions:
sad += person["scores"][key]
elif key in happy_emotions:
happy += person["scores"][key]
# print happy, sad
# valence between the range of 0 to 1
valence_score = neutral + (happy - sad) / (2.0 * len(response_json))
return valence_score
if __name__ == '__main__':
main()
| hasanbanna/partyplaylist | main.py | Python | mit | 3,712 |
# -*- encoding: utf-8 -*-
"""A utility that tries saved genetic tests and removes those failing"""
import asyncio
import yaml
from pathlib import Path
from logzero import logger
from rizza import entity_tester
from rizza import genetic_tester
def genetic_prune(conf, entity='All'):
"""Check all saved genetic_tester tests for an entity, prune failures"""
if entity == 'All':
for target in list(entity_tester.EntityTester.pull_entities()):
genetic_prune(conf, target)
else:
test_file = conf.base_dir.joinpath(
'data/genetic_tests/{}.yaml'.format(entity))
logger.debug('Current target file: {}'.format(test_file))
to_remove = []
if test_file.exists() and test_file.stat().st_size > 10:
logger.debug('Beginning tests for {}'.format(entity))
tests = yaml.load(test_file.open('r'))
for test in tests:
ent, method, mode = test.split(' ')
if mode == 'positive':
logger.debug('Running test {}'.format(method))
result = genetic_tester.GeneticEntityTester(
conf, entity, method
).run_best()
if result == -1:
logger.debug('{} failed.'.format(test))
to_remove.append(test)
else:
logger.debug('{} passed.'.format(test))
for test in to_remove:
logger.warning('Removing {} from {}'.format(test, test_file))
del tests[test]
logger.debug('Deleting file {}'.format(test_file))
test_file.unlink()
logger.debug('Writing tests to {}'.format(test_file))
yaml.dump(tests, test_file.open('w+'), default_flow_style=False)
logger.info('Done pruning {}'.format(entity))
if test_file.exists() and test_file.stat().st_size < 10:
logger.warning('Deleting empty file {}'.format(test_file))
test_file.unlink()
async def _async_prune(conf, entity, loop, sem):
"""Run an individual prune task"""
async with sem:
await loop.run_in_executor(
None, # use default executor
genetic_prune, conf, entity # function and args
)
async def _async_prune_all(conf, loop, sem):
"""Construct all the prune tasks, and await them"""
tasks = [
asyncio.ensure_future(_async_prune(conf, entity, loop, sem))
for entity in list(entity_tester.EntityTester.pull_entities())
]
await asyncio.wait(tasks)
def async_genetic_prune(conf, entity='All', async_limit=100):
"""Asynchronously perform a genetic prune for all entities"""
if entity != 'All':
genetic_prune(conf, entity)
return
sem = asyncio.Semaphore(async_limit)
loop = asyncio.get_event_loop()
loop.run_until_complete(_async_prune_all(conf, loop, sem))
loop.close()
| JacobCallahan/rizza | rizza/helpers/prune.py | Python | gpl-3.0 | 2,971 |
from __future__ import unicode_literals
import frappe
def get_context(context):
context.read_only = 1
def get_list_context(context):
context.row_template = "erpnext/templates/includes/healthcare/lab_test_row_template.html"
context.get_list = get_lab_test_list
def get_lab_test_list(doctype, txt, filters, limit_start, limit_page_length = 20, order_by='modified desc'):
patient = get_patient()
lab_tests = frappe.db.sql("""select * from `tabLab Test`
where patient = %s order by result_date""", patient, as_dict = True)
return lab_tests
def get_patient():
return frappe.get_value("Patient",{"email": frappe.session.user}, "name")
def has_website_permission(doc, ptype, user, verbose=False):
if doc.patient == get_patient():
return True
else:
return False
| ovresko/erpnext | erpnext/healthcare/web_form/lab_test/lab_test.py | Python | gpl-3.0 | 775 |
import decimal
import os
import json
import logging
import time
import traceback
import types
import arrow
from contextlib import contextmanager
from sqlalchemy import create_engine
from sqlalchemy.engine import Engine
from sqlalchemy.ext.declarative import declarative_base as real_declarative_base
from sqlalchemy.orm import sessionmaker
### SQLALCHEMY INIT START ###
declarative_base = lambda cls: real_declarative_base(cls=cls)
@declarative_base
class BetterBase(object):
'''
Add some default properties and methods to the SQLAlchemy declarative base.
'''
@property
def columns(self):
'''
Return all of the columns.
'''
return (c.name for c in self.__table__.columns)
#return (c.key for c in class_mapper(self.__class__).columns)
@property
def frontend_columns(self):
'''
The columns we see when listing all objects of this class.
An iterable of tuples (attribute_name, display_name).
'''
return ((c, c) for c in self.columns)
@property
def main_columns(self):
'''
The columns we see when listing similar objects of this class.
An iterable of tuples (attribute_name, display_name).
Example: listing the stats per level.
'''
return self.frontend_columns
main_tabs = []
# Will be an iterable of dicts representing different tabs on a category
# such as a comparison of models.Character
@property
def search_id(self):
'''
The id used to search for this object using get_by_id().
This ignores varying attributes such as stats per level.
'''
# My *_tables do not have an id attribute so this is an AttributeError.
return self.id
_main_panels = None
# Will be an iterable of dicts representing this object on its main page.
def generate_main_panels(self):
self._main_panels = []
@property
def main_panels(self):
if self._main_panels is None:
self.generate_main_panels()
return self._main_panels
extra_tabs = []
# Will be an iterable of dicts representing different pages used to display
# objects similar to this object but differing slightly
# (such as stats per level).
additional_columns = ()
# Will be an an iterable of tuples (key, attributes/properties)
# to add to the dict representation.
def __repr__(self):
return u'{}({})'.format(self.__class__.__name__, self.columns)
def dict(self):
'''
Transform the model into a dictionary.
'''
ret = dict((c, getattr(self, c)) for c in self.columns)
if self.search_id is not None:
ret['search_id'] = self.search_id
for k, v in self.additional_columns:
ret[k] = v
return ret
def jsonify(self):
'''
Transform the model into JSON.
'''
return json.dumps(self.dict(),
default=default_encode, separators=(',',':'))
def default_encode(obj):
if isinstance(obj, decimal.Decimal):
return u'{:.2f}'.format(self._value)
if isinstance(obj, arrow.arrow.Arrow):
return obj.for_json()
if isinstance(obj, arrow.arrow.datetime):
return arrow.get(obj).for_json()
#return arrow.get(obj).timestamp
#if obj.utcoffset() is not None:
# obj = obj - obj.utcoffset()
#obj = obj.replace(tzinfo=None)
#return (obj - arrow.arrow.datetime(1970, 1, 1)).total_seconds()
if isinstance(obj, BetterBase):
return obj.dict()
if isinstance(obj, types.GeneratorType):
return [i for i in obj]
raise TypeError('{} is not JSON serializable'.format(obj))
engine = create_engine(
'mysql+pymysql://{}:{}@{}:{}/{}{}'.format(
os.environ['OPENSHIFT_MYSQL_DB_USERNAME'],
os.environ['OPENSHIFT_MYSQL_DB_PASSWORD'],
os.environ['OPENSHIFT_MYSQL_DB_HOST'],
os.environ['OPENSHIFT_MYSQL_DB_PORT'],
'ffrk',
#'',
'?charset=utf8',
#'?charset=utf8mb4',
),
# convert_unicode=True,
# encoding='utf-8',
# echo=True,
pool_recycle=3600
)
create_session = sessionmaker(bind=engine)
# I do not use this plugin
'''
plugin = sqlalchemy.Plugin(
engine,
#Base.metadata,
BetterBase.metadata,
keyword='db',
create=True,
commit=True,
use_kwargs=False
)
'''
@contextmanager
def session_scope():
'''
Provide a transactional scope around a series of operations.
'''
session = create_session()
try:
yield session
session.commit()
except Exception as e:
session.rollback()
logging.error(e, e.args)
logging.error(traceback.print_exc())
logging.error('exc_info=True', exc_info=True)
finally:
session.close()
# TODO 2015-05-08
# aaargh this
if False:
#if True:
logging.basicConfig()
logger = logging.getLogger('ffrk.sqltime')
logger.setLevel(logging.DEBUG)
@event.listens_for(Engine, 'before_cursor_execute')
def before_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
conn.info.setdefault('query_start_time', []).append(time.time())
logger.debug('Start Query: {}'.format(statement))
@event.listens_for(Engine, 'after_cursor_execute')
def after_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
total = time.time() - conn.info['query_start_time'].pop(-1)
logger.debug('Query Complete!')
logger.debug('Total Time: {:f}'.format(total))
def make_tables():
BetterBase.metadata.create_all(engine)
create_tables = make_tables
### SQLALCHEMY INIT END ###
### MODULE GLOBALS START ###
STRFTIME = '%Y-%m-%dT%H:%M:%S%z (%Z)'
### MODULE GLOBALS END ###
### EOF ###
| rEtSaMfF/ffrk-bottle | models/base.py | Python | gpl-3.0 | 5,917 |
from django.conf.urls import url
from .views import chooser, endnotes
app_name = 'content_notes'
urlpatterns = [
url(r'^choose/$', chooser.choose, name='choose'),
url(r'^choose/(\d+)/$', chooser.chosen, name='chosen'),
url(r'^$', endnotes.list, name='list'),
]
| CIGIHub/greyjay | greyjay/content_notes/urls.py | Python | mit | 277 |
import multiprocessing
bind = ['0.0.0.0:8200']
workers = multiprocessing.cpu_count() * 2 + 1
logfile = 'logs/gunicorn.log'
timeout = 30
backlog = 2048
graceful_timeout = 30
limit_request_field_size = 8000
preload_app = False
user = None
pidfile = None
umask = 0
user = None
group = None
tmp_upload_dir = None
debug = False
reload = False
| Aplopio/document-converter | gunicorn.conf.py | Python | mit | 339 |
# -*- coding: utf-8 -*-
#
# Used when Staging a NNTP Post
#
# Copyright (C) 2017 Chris Caron <lead2gold@gmail.com>
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import UnicodeText
from sqlalchemy import DateTime
from sqlalchemy import Sequence
from .ObjectBase import ObjectBase
class StagedArticle(ObjectBase):
"""
An article associated with an NZB-File. This object is used when posting.
It provides a means of tracking what was posted and what hasn't been
posted yet.
"""
__tablename__ = 'staged_article'
# Our unique identifier
id = Column(
Integer, Sequence('staged_article_id_seq'), primary_key=True)
# The filename to associate with the staged content; if no filename is
# specified then the filename associated with the filepath is used.
localfile = Column(String(256), nullable=False)
# Article (Unique) Message-ID
message_id = Column(String(128), index=True)
# Article Subject
subject = Column(String(256), nullable=False)
# Article Body (this does not include the yEnc attachment)
body = Column(UnicodeText(), nullable=False)
# Article Poster
poster = Column(String(128), nullable=False)
# Filename as it should appear to Usenet
remotefile = Column(String(256), nullable=False)
# Article Size
size = Column(Integer, default=0, nullable=False)
# Local files sha1 checksum; this is verified prior to posting to ensure
# the contents has not changed
sha1 = Column(String(40), default=None, nullable=True)
# Article Post Date; This is only initialized after the post has been
# successful.
posted_date = Column(DateTime, default=None, nullable=True)
# Upon posting, this boolean flag is toggled after verifying the posted
# content was performed successfully.
verified_date = Column(DateTime, default=None, nullable=True)
# The sequence # associated with the filename. A sequence value of zero (0)
# always identifies the root/main file. This is used when generating our
# posted content content will always be posted in order of the sequence
# number and then by the filename
sequence_no = Column(Integer, default=0, nullable=False)
# The Sort no should only differ between segmented files
sort_no = Column(Integer, default=0, nullable=False)
def __init__(self, *args, **kwargs):
super(StagedArticle, self).__init__(*args, **kwargs)
def __repr__(self):
return "<StagedArticle(localfile=%s)>" % (self.localfile)
| caronc/newsreap | newsreap/objects/post/StagedArticle.py | Python | gpl-3.0 | 3,097 |
# -*- coding: utf-8 -*-
from Screens.Screen import Screen
from Components.BlinkingPixmap import BlinkingPixmapConditional
from Components.config import config, ConfigInteger
from Components.Label import Label
from Components.ServiceEventTracker import ServiceEventTracker
from enigma import eDVBSatelliteEquipmentControl, eTimer, iPlayableService
from enigma import eServiceCenter, iServiceInformation
INVALID_POSITION = 9999
config.misc.lastrotorposition = ConfigInteger(INVALID_POSITION)
class Dish(Screen):
STATE_HIDDEN = 0
STATE_SHOWN = 1
def __init__(self, session):
Screen.__init__(self, session)
self["Dishpixmap"] = BlinkingPixmapConditional()
self["Dishpixmap"].onVisibilityChange.append(self.DishpixmapVisibilityChanged)
self["turnTime"] = Label("")
self["posFrom"] = Label("")
self["posGoto"] = Label("")
self["From"] = Label (_("From :"))
self["Goto"] = Label (_("Goto :"))
self.rotorTimer = eTimer()
self.rotorTimer.callback.append(self.updateRotorMovingState)
self.turnTimer = eTimer()
self.turnTimer.callback.append(self.turnTimerLoop)
self.showTimer = eTimer()
self.showTimer.callback.append(self.hide)
config.usage.showdish.addNotifier(self.configChanged)
self.configChanged(config.usage.showdish)
self.rotor_pos = self.cur_orbpos = config.misc.lastrotorposition.getValue()
self.turn_time = self.total_time = None
self.cur_polar = 0
self.__state = self.STATE_HIDDEN
self.onShow.append(self.__onShow)
self.onHide.append(self.__onHide)
self.__event_tracker = ServiceEventTracker(screen=self,
eventmap= {
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evTunedIn: self.__serviceTuneEnd,
iPlayableService.evTuneFailed: self.__serviceTuneEnd,
})
def updateRotorMovingState(self):
moving = eDVBSatelliteEquipmentControl.getInstance().isRotorMoving()
#if not moving:
if moving:
if self.__state == self.STATE_HIDDEN:
self.show()
#self.rotorTimer.start(500, True)
else:
if self.__state == self.STATE_SHOWN:
#self.rotorTimer.stop()
self.hide()
def turnTimerLoop(self):
self.turn_time -= 1
self["turnTime"].setText(self.FormatTurnTime(self.turn_time))
def __onShow(self):
self.__state = self.STATE_SHOWN
prev_rotor_pos = self.rotor_pos
self.rotor_pos = self.cur_orbpos
self.total_time = self.getTurnTime(prev_rotor_pos, self.rotor_pos, self.cur_polar)
self.turn_time = self.total_time
self["posFrom"].setText(self.OrbToStr(prev_rotor_pos))
self["posGoto"].setText(self.OrbToStr(self.rotor_pos))
self["turnTime"].setText(self.FormatTurnTime(self.turn_time))
self.turnTimer.start(1000, False)
def __onHide(self):
self.__state = self.STATE_HIDDEN
self.turnTimer.stop()
def __serviceStarted(self):
if self.__state == self.STATE_SHOWN:
self.hide()
if self.showdish == "off":
return
service = self.session.nav.getCurrentService()
info = service and service.info()
data = info and info.getInfoObject(iServiceInformation.sTransponderData)
if not data or data == -1:
return
tuner_type = data.get("tuner_type")
if tuner_type and tuner_type.find("DVB-S") != -1:
self.cur_orbpos = data.get("orbital_position", INVALID_POSITION)
if self.cur_orbpos != INVALID_POSITION:
config.misc.lastrotorposition.setValue(self.cur_orbpos)
config.misc.lastrotorposition.save()
self.cur_polar = data.get("polarization", 0)
self.rotorTimer.start(500, False)
def __serviceTuneEnd(self):
self.rotorTimer.stop()
if self.__state == self.STATE_SHOWN:
#self.showTimer.start(25000, True)
self.hide()
def configChanged(self, configElement):
self.showdish = configElement.getValue()
if configElement.getValue() == "off":
self["Dishpixmap"].setConnect(lambda: False)
else:
self["Dishpixmap"].setConnect(eDVBSatelliteEquipmentControl.getInstance().isRotorMoving)
def DishpixmapVisibilityChanged(self, state):
if self.showdish == "flashing":
if state:
self["Dishpixmap"].show() # show dish picture
else:
self["Dishpixmap"].hide() # hide dish picture
else:
self["Dishpixmap"].show() # show dish picture
def getTurnTime(self, start, end, pol=0):
mrt = abs(start - end) if start and end else 0
if mrt > 0:
if (mrt > 1800):
mrt = 3600 - mrt
if (mrt % 10):
mrt += 10
#mrt = (mrt * 2000) / 10000 + 3 # 0.5° per second
if pol in (1, 3): # vertical
mrt = (mrt * 1000) / 10000 + 3 # 1.0° per second
else: # horizontal
mrt = (mrt * 667) / 10000 + 3 # 1.5° per second
return mrt
def OrbToStr(self, orbpos):
if orbpos == INVALID_POSITION:
return "N/A"
if orbpos > 1800:
orbpos = 3600 - orbpos
return "%d.%d°W" % (orbpos/10, orbpos%10)
return "%d.%d°E" % (orbpos/10, orbpos%10)
def FormatTurnTime(self, time):
t = abs(time)
return "%s%02d:%02d:%02d" % (time < 0 and "- " or "", t/3600%24, t/60%60, t%60)
| Ophiuchus1312/enigma2-master | lib/python/Screens/Dish.py | Python | gpl-2.0 | 4,880 |
# This file is part of ucgrad, Copyright 2008 David M. Rogers.
#
# ucgrad is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ucgrad is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ucgrad (i.e. frc_solve/COPYING).
# If not, contact the author(s) immediately \at/ wantye \/ gmail.com or
# http://forceSolve.sourceforge.net/. And see http://www.gnu.org/licenses/
# for a copy of the GNU GPL.
from numpy import *
def read_array(name, shape):
list = read_list(name)
pi = 1
for n in shape:
pi *= n
if(len(list) != pi):
if(len(list) > pi):
print "Warning! Data file \"%s\" has %d values and " \
"is incompatible with shape: "%(name, len(list)) + str(shape)
print "Using last %d values to construct array."%(pi)
list = list[-pi:] # take end of file
else:
raise RuntimeError, "Data file \"%s\" has %d values and " \
"is incompatible with shape: "%(name, len(list)) + str(shape)
return reshape(list, shape)
# matrix data, find
def read_matrix(name, sep=" ,\t\n"):
import re
digits = "01234567890.-+"
isfile = type(name) == file
if isfile:
ifile = name
else:
ifile = open(name)
list = []
sep = re.compile("["+re.escape(sep)+"]*")
cols = -1
i = 0
for line in ifile.xreadlines():
j = 0
for tok in sep.split(line):
if not tok: # Ignore blank generated from line's \n.
continue
if tok[0] not in digits:
break
list.append(float(tok))
j += 1
if(j > 0):
if(cols == -1):
cols = j
elif(j != cols):
print "Warning! line %d contains %d columns, "\
"terminating read and ignoring last line."%(i+1, j)
#raise ValueError, "Error, line %d contains wrong " \
# "number of columns"%(i+1)
list = list[:-j]
break
i += 1
if(cols == -1):
raise ValueError, "No data!"
if not isfile:
ifile.close()
return reshape(array(list), (i,cols))
def read_list(name):
digits = "01234567890.-+"
isfile = type(name) == file
if isfile:
ifile = name
else:
ifile = open(name)
list = []
for line in ifile.xreadlines():
for tok in line.split():
if(tok[0] not in digits): # stop line parsing
break # at first non-numeric
list.append(float(tok)) # value
if not isfile:
ifile.close()
return array(list)
def write_graph(name, list, *dims):
if len(list.shape) == 1:
write_plot(name, list, *dims)
elif len(list.shape) == 2:
write_surface(name, list, *dims)
else:
raise runtimeError, "Error! %dD graphs not supported."%(len(dims))
def write_plot(name, list, dim=(0., 1.)):
out = open(name, 'w')
if(len(list.shape) != 1):
raise ValueError, "Error: List should be 1D!"
for i in range(len(list)):
out.write("%10f %e\n"%(dim[0]+(i+0.5)*dim[1], list[i]))
out.write('\n')
out.close()
def write_surface(name, mat, idim=(0., 1.), jdim=(0., 1.)):
out = open(name, 'w')
if(len(mat.shape) != 2):
raise ValueError, "Error: Matrix should be 2D!"
for i in range(mat.shape[0]):
for j in range(mat.shape[1]):
out.write("%8.3f %8.3f %e\n"%(idim[0]+(i+0.5)*idim[1], \
jdim[0]+(j+0.5)*jdim[1], mat[i,j]))
out.write('\n')
out.write('\n')
out.close()
def flatten(d, fmt=" %d"):
d = reshape(d, d.size)
s = ""
for i in d:
s += fmt%i
return s
def write_array(name, arr, mode='w'):
if(len(arr.shape) < 3):
if(len(arr.shape) == 1):
write_list(name, arr, mode)
elif(len(arr.shape) == 2):
write_matrix(name, arr, mode)
else:
print "Warning! zero-size array not written to %s"%(name)
return 0
# loop over highest dimension and recursively call myself
write_array(name, arr[0], mode) # possibly the first call to me
ofile = open(name, 'a')
ofile.write("\n") # append a newline to give dimension to the data
ofile.close()
for i in range(1, arr.shape[0]):
write_array(name, arr[i], 'a')
ofile = open(name, 'a')
ofile.write("\n")
ofile.close()
def write_list(name, list, mode='w'):
out = open(name, mode)
if(len(list.shape) != 1):
raise ValueError, "Error: List should be 1D!"
for i in list:
out.write(str(i) + '\n')
out.close()
def write_matrix(name, mat, mode='w'):
out = open(name, mode)
if(len(mat.shape) != 2):
raise ValueError, "Error: Matrix should be 2D!"
for i in range(mat.shape[0]):
line = ""
for j in range(mat.shape[1]):
line += " " + str(mat[i,j])
out.write(line + '\n')
del line
out.close()
# really just here for me to remember HOW-TO
def read_binarray(ifile, shape="linear", type='float'):
if shape == "linear":
return fromfile(ifile, type)
else:
return reshape(fromfile(ifile, type), shape)
def write_binarray(ofile, x, type='float'):
x.astype(type).tofile(ofile)
| frobnitzem/forcesolve | cg_topol/ucgrad/array_io.py | Python | gpl-3.0 | 5,121 |
# Copyright (C) 2010 Canonical
#
# Authors:
# Gary Lasker
# Natalia Bidart
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import gi
gi.require_version("Gtk", "3.0")
from gi.repository import Gtk, GLib
from softwarecenter.enums import SOFTWARE_CENTER_DEBUG_TABS
class SpinnerView(Gtk.Viewport):
"""A panel that contains a spinner with an optional legend.
The spinner can be specified in one of two sizes, and defaults to
the larger size. An optional label_text value can be specified for
display with the spinner.
"""
# define spinner size options
(LARGE,
SMALL) = range(2)
def __init__(self, label_text="", spinner_size=LARGE):
Gtk.Viewport.__init__(self)
self.spinner = Gtk.Spinner()
if spinner_size not in (self.SMALL, self.LARGE):
raise ValueError('The value of spinner_size must be '
'one of SpinnerView.SMALL or SpinnerView.LARGE')
if spinner_size == self.LARGE:
self.spinner.set_size_request(48, 48)
else:
self.spinner.set_size_request(24, 24)
# use a table for the spinner (otherwise the spinner is massive!)
spinner_table = Gtk.Table(3, 3, False)
self.spinner_label = Gtk.Label()
self.spinner_label.set_markup('<big>%s</big>' % label_text)
spinner_vbox = Gtk.VBox()
spinner_vbox.pack_start(self.spinner, True, True, 0)
spinner_vbox.pack_start(self.spinner_label, True, True, 10)
spinner_table.attach(spinner_vbox, 1, 2, 1, 2,
Gtk.AttachOptions.EXPAND, Gtk.AttachOptions.EXPAND)
#~ self.modify_bg(Gtk.StateType.NORMAL, Gdk.Color(1.0, 1.0, 1.0))
self.add(spinner_table)
self.set_shadow_type(Gtk.ShadowType.NONE)
def start_and_show(self):
"""Start the spinner and show it."""
self.spinner.start()
self.spinner.show()
def stop_and_hide(self):
"""Stop the spinner and hide it."""
self.spinner.stop()
self.spinner.hide()
def set_text(self, spinner_text=""):
"""Add/remove/change this spinner's label text."""
self.spinner_label.set_markup('<big>%s</big>' % spinner_text)
def get_text(self):
"""Return the spinner's currently set label text."""
return self.spinner_label.get_text()
class SpinnerNotebook(Gtk.Notebook):
""" this provides a Gtk.Notebook that contains a content page
and a spinner page.
"""
(CONTENT_PAGE,
SPINNER_PAGE) = range(2)
def __init__(self, content, msg="", spinner_size=SpinnerView.LARGE):
Gtk.Notebook.__init__(self)
self._last_timeout_id = None
self.spinner_view = SpinnerView(msg, spinner_size)
# its critical to show() the spinner early as otherwise
# gtk_notebook_set_active_page() will not switch to it
self.spinner_view.show()
if not SOFTWARE_CENTER_DEBUG_TABS:
self.set_show_tabs(False)
self.set_show_border(False)
self.append_page(content, Gtk.Label("content"))
self.append_page(self.spinner_view, Gtk.Label("spinner"))
def _unmask_view_spinner(self):
# start is actually start_and_show()
self.spinner_view.start_and_show()
self.set_current_page(self.SPINNER_PAGE)
self._last_timeout_id = None
return False
def show_spinner(self, msg=""):
""" show the spinner page with a alternative message """
if msg:
self.spinner_view.set_text(msg)
# delay showing the spinner view prevent it from flashing into
# view in the case of short delays where it isn't actually needed
# (but only if its not already visible anyway)
if self.get_current_page() == self.CONTENT_PAGE:
self.spinner_view.stop_and_hide()
self._last_timeout_id = GLib.timeout_add(
250, self._unmask_view_spinner)
def hide_spinner(self):
""" hide the spinner page again and show the content page """
if self._last_timeout_id is not None:
GLib.source_remove(self._last_timeout_id)
self._last_timeout_id = None
self.spinner_view.stop_and_hide()
self.set_current_page(self.CONTENT_PAGE)
| ceibal-tatu/software-center | softwarecenter/ui/gtk3/widgets/spinner.py | Python | lgpl-3.0 | 4,857 |
#!/usr/bin/env python
from astrodata.AstroDataType import *
from optparse import OptionParser
from astrodata import Descriptors as ds
import os
import re
try:
import pydot
from pydot import *
except:
print "couldn't import pydot, type graphs will not be drawn"
sys.exit(0)
cl = get_classification_library()
# print repr(cl.typesDict)
#FROM COMMANDLINE WHEN READY
parser = OptionParser()
parser.add_option("-a", "--assignments", dest = "showAssignments",
action="store_true",
default = False, help="Show Primitive and Descriptor Assignments")
(options, args) = parser.parse_args()
if len(args)>0:
astrotype = args[0]
else:
astrotype = None
import astrodata
import astrodata.RecipeManager as rm
def createEdges(typ, parents=False, children=False):
ces = {}
if parents and typ.parentDCO :
pen = typ.parentDCO.name+"_"+typ.name
pe = Edge(typ.parentDCO.name, typ.name)
ces.update({pen:pe})
if children and typ.childDCOs:
for child in typ.childDCOs:
ce = Edge(typ.name, child.name)
cen = typ.name+"_"+child.name
ces.update({cen:ce})
return ces
psdict = rm.centralPrimitivesIndex
descdict = ds.centralCalculatorIndex
if (astrotype):
typeobjs = [astrotype]
displaytype = astrotype
else:
displaytype = "GEMINI"
typeobjs = cl.get_available_types();
lasttyp = None
lastnode = None
#nodes and annotations
for typename in args:
typeobj = cl.get_type_obj(typename)
ndict = {}
edict = {}
adict = {} # contains node list keyed by type
ddict = {} # contains node list keyed by type
postfix = ""
if options.showAssignments:
print "Creating type tree graph with assignments for ...", typename
else:
print "Creating type tree graph for ...", typename
for typ in typeobj.walk():
if options.showAssignments:
postfix = "-pd"
# just always link to the assignment charts
root4url = typ.name.split("_")[0]
tip = re.sub(r".*?ADCONFIG_", "ADCONFIG_", typ.fullpath)
node = Node(typ.name, shape="house",
URL = root4url + "-tree%s.svg"%"-pd", # hard coded to link to assignment graphs
tooltip = tip
)
ndict.update({typ.name: node})
if options.showAssignments:
if typ.name in psdict:
anodes = []
labels = []
i = 0
for cfg in psdict[typ.name]:
labels.append("<f%d>" % i + cfg[0])
i += 1
#anode = Node(cfg[0], shape="box")
#anodes.append(anode)
label = "{"+"|".join(labels)+"}"
tnode = Node(typ.name+"_PrimSet",
shape="record",
label = label,
fontsize = "10")
anodes.append(tnode)
adict.update({typ:anodes})
if typ.name in descdict:
anodes = []
cfg = descdict[typ.name]
nam = typ.name+"_Descriptor"
anode = Node(nam, shape="box",
label= nam,
fontsize="10",
fillcolor = "#f0a0a0",
style="filled"
)
anodes.append(anode)
ddict.update({typ:anodes})
# edges
for typ in typeobj.walk(style="children"):
es = createEdges(typ, children=True)
edict.update(es)
for typ in typeobj.walk(style="parent"):
es = createEdges(typ, parents = True)
edict.update(es)
# create graph
graph = Dot(typ.name+"_Type_Graph", graph_type="digraph", )
for n in ndict:
graph.add_node(ndict[n])
for e in edict:
graph.add_edge(edict[e])
# do primitive set dict
for typ in adict:
subg = Cluster(typ.name,
bgcolor = "#e0e0ff",
label=typ.name+" PrimSet",
fontsize="9")
for an in adict[typ]:
subg.add_node(an)
graph.add_subgraph(subg)
anedge = Edge(adict[typ][0], ndict[typ.name],
lhead=subg.get_name(),
style="dotted")
graph.add_edge(anedge)
# do descriptor set dict
for typ in ddict:
# subg = Cluster(typ.name,bgcolor = "#a02070",label="")
for an in ddict[typ]:
graph.add_node(an)
# subg.add_node(an)
#graph.add_subgraph(subg)
anedge = Edge(ddict[typ][0], ndict[typ.name],
style="dashed",)
graph.add_edge(anedge)
outbase = typename+"-tree"+postfix
graph.write_svg(outbase+".broke.svg")
graph.write_png(outbase+".png")
graph.write_dot(outbase+".dot")
# fix svg
svg = file(outbase+".broke.svg")
nsvg = file(outbase+".svg", mode="w")
for line in svg.readlines():
nline = re.sub(r'font-size:(.*?);', r'font-size:\1px;', line)
# print "tl137:", line, "--->", line
nsvg.write(nline)
svg.close()
nsvg.close()
if False:# a = cl.gviz_doc(astrotype= astrotype, writeout = True, assign_dict = assdict)
import webbrowser
# url = "file://"+os.path.join(os.path.abspath("."), "gemdtype.viz.svg")
url = "file://"+os.path.join(os.path.abspath("."), displaytype +"-tree.svg")
webbrowser.open(url);
| pyrrho314/recipesystem | trunk/astrodata/scripts/typelib.py | Python | mpl-2.0 | 5,571 |
from queue import Empty
from .exceptions.processor_errors import KernelClientStartingError, ExecutionTimeoutError
from .results_processor import ResultsProcessor
class ClientWrapper:
"""Wrapper for Jupyter Client"""
def __init__(self, client, language, document_language, execution_timeout):
"""Initializes and starts client"""
self.__client = client
self.__execution_timeout = execution_timeout
self.__doc_lang = document_language
self.__client.start_channels()
try:
self.__client.wait_for_ready()
except RuntimeError:
self.__client.stop_channels()
raise KernelClientStartingError(language)
def execute(self, code, processing_manager, output_types, execution_timeout=None, allow_errors=False):
"""Executes code, returns results"""
timeout = execution_timeout if execution_timeout is not None else self.__execution_timeout
request_id = self.__client.execute(code, allow_stdin=False)
# Processing SHELL messages
try:
while True:
msg = self.__client.get_shell_msg(timeout=timeout)
# Ignore 'Execute requests'
if msg['msg_type'] != 'execute_reply':
continue
# Ignore answers to other messages
if msg['parent_header']['msg_id'] != request_id:
continue
status = msg['content']['status']
# Go to next step if status OK
if status in ['ok', 'error']:
break
# TODO: ?
print('SHELL - ABORT!')
except Empty:
raise ExecutionTimeoutError(code)
output = ResultsProcessor(allow_errors, output_types, processing_manager)
# Processing IOPUB messages
try:
while True:
msg = self.__client.get_iopub_msg(timeout=timeout)
msg_type = msg['msg_type']
parent_id = msg['parent_header']['msg_id']
# Ignore answers to other requests
if parent_id != request_id:
continue
# End of calculations
if msg_type == 'status' and msg['content']['execution_state'] == 'idle':
break
if msg_type == 'error':
content = msg['content']
output.process_error(content['ename'], content['evalue'], content['traceback'])
continue
if msg_type == 'stream':
output.process_stream(msg['content']['text'], msg['content']['name'])
continue
if msg_type in ['display_data', 'execute_result']:
data = msg['content']['data']
for key, value in data.items():
output.process_data(key, value)
continue
except Empty:
raise ExecutionTimeoutError(code)
return output.get_result()
| jablonskim/jupyweave | jupyweave/client_wrapper.py | Python | mit | 3,082 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from chain.core import resources
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^', include(resources.urls)),
# Examples:
# url(r'^$', 'chain.views.home', name='home'),
# url(r'^chain/', include('chain.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns('',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
handler404 = 'chain.core.api.handle404'
handler500 = 'chain.core.api.handle500'
| ResEnv/chain-api | chain/urls.py | Python | mit | 829 |
__author__ = 'abird' | ooici/coi-services | ion/services/eoi/__init__.py | Python | bsd-2-clause | 20 |
import os
import sys
import logging
import asyncio
import hashlib
import json
import re
import aiohttp
import cssutils
import yarl
from bs4 import BeautifulSoup
from asyncio import Queue
from collections import defaultdict
animation = "|/-\\"
class Cloner(object):
def __init__(self, root, max_depth, css_validate, default_path="/opt/snare"):
self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.DEBUG)
self.visited_urls = []
self.root, self.error_page = self.add_scheme(root)
self.max_depth = max_depth
self.moved_root = None
self.default_path = default_path
if (self.root.host is None) or (len(self.root.host) < 4):
sys.exit("invalid target {}".format(self.root.host))
self.target_path = "{}/pages/{}".format(self.default_path, self.root.host)
if not os.path.exists(self.target_path):
os.makedirs(self.target_path)
self.css_validate = css_validate
self.new_urls = Queue()
self.meta = defaultdict(dict)
self.counter = 0
self.itr = 0
@staticmethod
def add_scheme(url):
new_url = yarl.URL(url)
if not new_url.scheme:
new_url = yarl.URL("http://" + url)
err_url = new_url.with_path("/status_404").with_query(None).with_fragment(None)
return new_url, err_url
@staticmethod
def get_headers(response):
ignored_headers_lowercase = [
"age",
"cache-control",
"connection",
"content-encoding",
"content-length",
"date",
"etag",
"expires",
"x-cache",
]
headers = []
for key, value in response.headers.items():
if key.lower() not in ignored_headers_lowercase:
headers.append({key: value})
return headers
async def process_link(self, url, level, check_host=False):
try:
url = yarl.URL(url)
except UnicodeError:
return None
if url.scheme in ["data", "javascript", "file"]:
return url.human_repr()
if not url.is_absolute():
if self.moved_root is None:
url = self.root.join(url)
else:
url = self.moved_root.join(url)
host = url.host
if check_host:
if (
(host != self.root.host and self.moved_root is None)
or url.fragment
or (self.moved_root is not None and host != self.moved_root.host)
):
return None
if url.human_repr() not in self.visited_urls and (level + 1) <= self.max_depth:
await self.new_urls.put((url, level + 1))
res = None
try:
res = url.relative().human_repr()
except ValueError:
self.logger.error("ValueError while processing the %s link", url)
return res
async def replace_links(self, data, level):
soup = BeautifulSoup(data, "html.parser")
# find all relative links
for link in soup.findAll(href=True):
res = await self.process_link(link["href"], level, check_host=True)
if res is not None:
link["href"] = res
# find all images and scripts
for elem in soup.findAll(src=True):
res = await self.process_link(elem["src"], level)
if res is not None:
elem["src"] = res
# find all action elements
for act_link in soup.findAll(action=True):
res = await self.process_link(act_link["action"], level)
if res is not None:
act_link["action"] = res
# prevent redirects
for redir in soup.findAll(True, attrs={"name": re.compile("redirect.*")}):
if redir["value"] != "":
redir["value"] = yarl.URL(redir["value"]).relative().human_repr()
return soup
def _make_filename(self, url):
host = url.host
if url.is_absolute():
file_name = url.relative().human_repr()
else:
file_name = url.human_repr()
if not file_name.startswith("/"):
file_name = "/" + file_name
if file_name == "/" or file_name == "":
if host == self.root.host or self.moved_root is not None and self.moved_root.host == host:
file_name = "/index.html"
else:
file_name = host
m = hashlib.md5()
m.update(file_name.encode("utf-8"))
hash_name = m.hexdigest()
return file_name, hash_name
async def get_body(self, session):
while not self.new_urls.empty():
print(animation[self.itr % len(animation)], end="\r")
self.itr = self.itr + 1
current_url, level = await self.new_urls.get()
if current_url.human_repr() in self.visited_urls:
continue
self.visited_urls.append(current_url.human_repr())
file_name, hash_name = self._make_filename(current_url)
self.logger.debug("Cloned file: %s", file_name)
data = None
content_type = None
try:
response = await session.get(current_url, headers={"Accept": "text/html"}, timeout=10.0)
headers = self.get_headers(response)
content_type = response.content_type
data = await response.read()
except (aiohttp.ClientError, asyncio.TimeoutError) as client_error:
self.logger.error(client_error)
else:
await response.release()
if data is not None:
self.meta[file_name]["hash"] = hash_name
self.meta[file_name]["headers"] = headers
self.counter = self.counter + 1
if content_type == "text/html":
soup = await self.replace_links(data, level)
data = str(soup).encode()
elif content_type == "text/css":
css = cssutils.parseString(data, validate=self.css_validate)
for carved_url in cssutils.getUrls(css):
if carved_url.startswith("data"):
continue
carved_url = yarl.URL(carved_url)
if not carved_url.is_absolute():
carved_url = self.root.join(carved_url)
if carved_url.human_repr() not in self.visited_urls:
await self.new_urls.put((carved_url, level + 1))
with open(os.path.join(self.target_path, hash_name), "wb") as index_fh:
index_fh.write(data)
async def get_root_host(self):
try:
async with aiohttp.ClientSession() as session:
resp = await session.get(self.root)
if resp.host != self.root.host:
self.moved_root = resp.url
resp.close()
except aiohttp.ClientError as err:
self.logger.error("Can't connect to target host: %s", err)
exit(-1)
async def run(self):
session = aiohttp.ClientSession()
try:
await self.new_urls.put((self.root, 0))
await self.new_urls.put((self.error_page, 0))
await self.get_body(session)
except KeyboardInterrupt:
raise
finally:
with open(os.path.join(self.target_path, "meta.json"), "w") as mj:
json.dump(self.meta, mj)
await session.close()
| mushorg/snare | snare/cloner.py | Python | gpl-3.0 | 7,680 |
from __future__ import unicode_literals
from .helpers import (
compose,
non_string,
format_tags,
single_result,
language_codes,
element_to_dict,
oai_process_uris,
build_properties,
datetime_formatter,
doe_process_contributors,
oai_process_contributors,
dif_process_contributors
)
DOESCHEMA = {
"description": ('//dc:description/node()', compose(lambda x: x.strip(), single_result)),
"contributors": ('//dc:creator/node()', compose(doe_process_contributors, lambda x: x.split(';'), single_result)),
"title": ('//dc:title/node()', compose(lambda x: x.strip(), single_result)),
"providerUpdatedDateTime": ('//dc:dateEntry/node()', compose(datetime_formatter, single_result)),
"uris": {
"canonicalUri": ('//dcq:identifier-citation/node()', compose(lambda x: x.strip(), single_result)),
"objectUris": [('//dc:doi/node()', compose(lambda x: 'http://dx.doi.org/' + x, single_result))]
},
"languages": ("//dc:language/text()", language_codes),
"publisher": {
"name": ("//dcq:publisher/node()", single_result)
},
"sponsorships": [{
"sponsor": {
"sponsorName": ("//dcq:publisherSponsor/node()", single_result)
}
}],
"otherProperties": build_properties(
('coverage', '//dc:coverage/node()'),
('date', '//dc:date/node()'),
('format', '//dc:format/node()'),
('identifier', '//dc:identifier/node()'),
('identifierDOEcontract', '//dcq:identifierDOEcontract/node()'),
('identifierOther', '//dc:identifierOther/node()'),
('identifier-purl', '//dc:identifier-purl/node()'),
('identifierReport', '//dc:identifierReport/node()'),
('publisherAvailability', '//dcq:publisherAvailability/node()'),
('publisherCountry', '//dcq:publisherCountry/node()'),
('publisherResearch', '//dcq:publisherResearch/node()'),
('relation', '//dc:relation/node()'),
('rights', '//dc:rights/node()'),
('type', '//dc:type/node()'),
('typeQualifier', '//dc:typeQualifier/node()')
)
}
OAISCHEMA = {
"contributors": ('//dc:creator/node()', '//dc:contributor/node()', oai_process_contributors),
"uris": ('//dc:doi/node()', '//dc:identifier/node()', oai_process_uris),
'providerUpdatedDateTime': ('//ns0:header/ns0:datestamp/node()', compose(datetime_formatter, single_result)),
'title': ('//dc:title/node()', single_result),
'description': ('//dc:description/node()', single_result),
'subjects': ('//dc:subject/node()', format_tags),
'publisher': {
'name': ('//dc:publisher/node()', single_result)
},
'languages': ('//dc:language/text()', language_codes)
}
DIFSCHEMA = {
"abstract": ('//dif:Summary/dif:Abstract/node()', single_result),
"uris": ('//dif:URL/node()', oai_process_uris),
"title": ('//dif:Entry_Title/node()', single_result),
'providerUpdatedDateTime': ('//OAI-PMH:header/OAI-PMH:datestamp/node()', compose(datetime_formatter, single_result)),
"contributors": ('//dif:Personnel/dif:First_Name/node()', '//dif:Personnel/dif:Last_Name/node()', dif_process_contributors),
"otherProperties": build_properties(
('metadataName', '//dif:Metadata_Name/node()'),
('metadataVersion', '//dif:Metadata_Version/node()'),
('lastDIFRevisionDate', '//dif:Last_DIF_Revision_Date/node()'),
('dataCenter', ('//dif:Data_Center/node()', compose(
list,
lambda x: map(element_to_dict, x),
lambda x: filter(non_string, x)
))),
('relatedUrl', ('//dif:Related_URL/node()', compose(
list,
lambda x: map(element_to_dict, x),
lambda x: filter(non_string, x)
))),
)
}
| CenterForOpenScience/scrapi | scrapi/base/schemas.py | Python | apache-2.0 | 3,782 |
from . import AppCase
from app.models.comment import Comment
class CommentTest(AppCase):
def setUp(self):
self.setup_app()
self.create_user()
self.create_issue()
self.comment = Comment(
body='foo bar',
issue=self.test_issue,
author=self.test_user
)
self.comment.save()
def tearDown(self):
self.teardown_dbs()
def test_pre_delete(self):
self.test_user.references.append(self.comment)
self.test_user.save()
self.test_issue.comments.append(self.comment)
self.test_issue.save()
self.comment.mentions.append(self.test_user)
self.comment.save()
self.assertEqual(Comment.objects.count(), 1)
self.assertEqual(len(self.test_user.references), 1)
self.assertEqual(len(self.test_issue.comments), 1)
self.comment.delete()
self.assertEqual(Comment.objects.count(), 0)
self.assertEqual(len(self.test_user.references), 0)
self.assertEqual(len(self.test_issue.comments), 0)
| publicscience/hive | app/tests/comment_test.py | Python | mit | 1,086 |
# -*- coding: utf-8 -*-
"""
Author: @gabvaztor
StartDate: 04/03/2017
With this class you can pre-process your data. For example,
you can add statistical information or order your data.
Style: "Google Python Style Guide"
https://google.github.io/styleguide/pyguide.html
"""
"""
# --------------------------------------------------------------------------
# --------------------------------------------------------------------------
# IMPORTS
# --------------------------------------------------------------------------
# --------------------------------------------------------------------------
"""
# --------------------------------------------------------------------------
"""Reader class
"""
from TFBoost.TFReader import Reader
# --------------------------------------------------------------------------
class DataMining(Reader):
# TODO DOCS
"""DataMining Class
With this class you can add a lot of information to your data. For example,
you can add statistical information, order your data, reducing noise and others.
To access DataMining class you have to create a Reader object before.
Attributes:
reader (obj:'Reader'): Reader Object. This attribute contains the sets to manipulate.
chooses (:obj:'Strings List', optional): Contains how data will be manipulated
"""
# TODO Defining Chooses
reader = None
chooses = []
def __init__(self, reader, chooses = None):
# TODO Define this
"""
:param reader:
:param chooses:
"""
if reader:
self.reader = reader
if chooses:
self.chooses = chooses
self.manipulate()
def manipulate(self):
"""
:return:
"""
return self.reader
if __name__ == '__main__':
print ("Creating DataMining") | Gabvaztor/TFBoost | src/services/ccboost/TFDataMining.py | Python | apache-2.0 | 1,840 |
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
# This is a template config file for web-platform-tests test.
import os
import sys
config = {
# test harness options are located in the gecko tree
"in_tree_config": "config/mozharness/web_platform_tests_config.py",
"exes": {
'python': sys.executable,
'virtualenv': [sys.executable, 'c:/mozilla-build/buildbotve/virtualenv.py'],
'hg': 'c:/mozilla-build/hg/hg',
'mozinstall': ['%s/build/venv/scripts/python' % os.getcwd(),
'%s/build/venv/scripts/mozinstall-script.py' % os.getcwd()],
'tooltool.py': [sys.executable, 'C:/mozilla-build/tooltool.py'],
},
"options": [],
"find_links": [
"http://pypi.pvt.build.mozilla.org/pub",
"http://pypi.pub.build.mozilla.org/pub",
],
"pip_index": False,
"buildbot_json_path": "buildprops.json",
"default_blob_upload_servers": [
"https://blobupload.elasticbeanstalk.com",
],
"blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
"download_minidump_stackwalk": True,
"tooltool_servers": ["http://runtime-binaries.pvt.build.mozilla.org/tooltool/"],
}
| walac/build-mozharness | configs/web_platform_tests/prod_config_windows.py | Python | mpl-2.0 | 1,460 |
#!/usr/bin/python
#
# Copyright 2008 Qumranet, Inc. All rights reserved.
# Use is subject to license terms.
#
import sys, getopt
import os
import subprocess
import logging, logging.config
import traceback
import string
import random
import re
SUPPORTED_PLATFORMS = [ "RedHatEnterpriseServer", "Fedora" ]
HYPERVISOR_PLATFORMS = [ "RedHatEnterpriseVirtualizationHypervisor", "RedHatEnterpriseHypervisor", "oVirtNodeHypervisor" ]
HYPERVISOR_RELEASE_FILE = '/etc/rhev-hypervisor-release'
REDHAT_RELEASE_FILE = '/etc/redhat-release'
vdsm_reg_conf_file = '/etc/vdsm-reg/vdsm-reg.conf'
def printNlog(s):
print s
logging.debug(s)
rnum = random.randint(100,1000000).__repr__()
log_filename = '/tmp/vds_installer.'+rnum+'.log'
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename=log_filename,
filemode='w')
# "curl -o /tmp/vds_bootstrap_{GUID}.py {URL1}; chmod +x /tmp/vds_bootstrap_{GUID}.py; /tmp/vds_bootstrap_{GUID}.py {URL2} {vds-server} {GUID}";
# "curl -o /tmp/vds_bootstrap_complete_{GUID}.py {URL1}; chmod +x /tmp/vds_bootstrap_complete_{GUID}.py; /tmp/vds_bootstrap_complete_{GUID}.py {GUID}";
def isScriptValid(module):
try:
__import__(module)
except:
logging.error(traceback.format_exc())
return False
return True
def buildScriptName(random_num, vds_complete):
if not vds_complete:
script_name = 'vds_bootstrap_'+random_num+'.py'
else:
script_name = 'vds_bootstrap_complete_'+random_num+'.py'
return script_name
def releaseFileExists():
""" -- According to lsb_release:
Read first line from HYPERVISOR_RELEASE_FILE, then try REDHAT_RELEASE_FILE and then return it.
"""
if os.path.exists(HYPERVISOR_RELEASE_FILE):
return True, HYPERVISOR_RELEASE_FILE
elif os.path.exists(REDHAT_RELEASE_FILE):
return True, REDHAT_RELEASE_FILE
else:
return False, HYPERVISOR_RELEASE_FILE + ", " + REDHAT_RELEASE_FILE
def get_id_line():
line = ''
RELEASE_FILE = None
try:
fileExists, releaseFile = releaseFileExists()
RELEASE_FILE = releaseFile
if (fileExists):
release = open(releaseFile, "r")
line = release.readline()
line = line.replace ("\n", "")
release.close
logging.debug("get_id_line: read line %s.", line)
else:
line = None
message = "Failed to find the release file(s): " + releaseFile
logging.error(message)
except:
line = None
message = "Failed to read release file: " + str(RELEASE_FILE)
logging.error(message + "\n" + traceback.format_exc())
return line
def lsb_release():
""" -- According to lsb_release:
1. Remove 'Linux'
2. Remove release data
3. For short format, remove spaces.
"""
res = get_id_line()
logging.debug("lsb_release: input line %s.", res)
if res is not None:
res = re.sub(r' [Ll][Ii][Nn][Uu][Xx]', '', res)
res = re.sub(r'relea.*', '', res)
res = re.sub(r' ', '', res)
logging.debug("lsb_release: return: %s.", res)
return res
def testPlatform():
''' testPlatform evaluates the platform version and returns
0 - platform is eligible for installation
1 - platform is ovirt-node
2 - platform is not eligible for installation
'''
fReturn = 0
st = "OK"
message = "Test platform succeeded"
component = "INSTALLER"
try:
res = lsb_release()
if res is None:
fReturn = 2
message = 'Unable to calculate platform ID'
logging.error(message)
st = "FAIL"
elif res in HYPERVISOR_PLATFORMS:
fReturn = 1
component = "RHEV_INSTALL"
message = "oVirt Node DETECTED"
logging.debug(message)
st = "OK"
elif res not in SUPPORTED_PLATFORMS:
fReturn = 2
message = "Unsupported platform: %s" % res
logging.error(message)
st = "FAIL"
except:
fReturn = 2
message = "Failed to test platform compatibility"
logging.error(message + "\n" + traceback.format_exc())
st = "FAIL"
printNlog("<BSTRAP component='%s' status='%s' message='%s'/>" % (component,st, message))
sys.stdout.flush()
return fReturn
def downloadBootstrap(url_bs, random_num, vds_complete):
""" -- Download vds bootstrap scripts
"""
install_script = None
install_lib = None
st = 'FAIL'
try:
if not vds_complete:
script_name = 'vds_bootstrap.py'
install_lib = 'deployUtil.py'
else:
script_name = 'vds_bootstrap_complete.py'
# check whether url ends with '/'
if url_bs[-1] != '/':
url_bs = url_bs + '/'
src_url = url_bs + script_name
tmp_script_name = buildScriptName(random_num, vds_complete)
trg_script = "/tmp/%s"%(tmp_script_name)
if install_lib is not None and not os.path.exists(install_lib):
src_lib_url = url_bs + install_lib
trg_lib = "/tmp/%s"%(install_lib)
execfn = ["/usr/bin/curl","-s", "-k", "-w", "%{http_code}", "-o", trg_lib, src_lib_url]
logging.debug("trying to fetch %s script cmd = '%s'",install_lib, string.join(execfn, " "))
code = subprocess.Popen(execfn, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0]
if code == '200':
if isScriptValid(os.path.splitext(install_lib)[0]):
st = 'OK'
message = "%s download succeeded"%(install_lib)
else:
st = 'FAIL'
message = "%s download failed. Pathname could not be resolved (verify component web site path)."%(install_lib)
else:
st = 'FAIL'
message = "%s download failed. Pathname could not be resolved (verify computer/domain name)."%(install_lib)
else:
st = 'OK'
message = "Install library already exists"
printNlog("<BSTRAP component='INSTALLER LIB' status='%s' message='%s'/>"%(st, message))
sys.stdout.flush()
if st != 'OK':
return install_script
if not os.path.exists(trg_script):
execfn = ["/usr/bin/curl","-s","-k", "-w", "%{http_code}", "-o", trg_script, src_url]
logging.debug("trying to fetch %s script cmd = '%s'",script_name, string.join(execfn, " "))
code = subprocess.Popen(execfn, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0]
if code == '200':
if isScriptValid(os.path.splitext(tmp_script_name)[0]):
st = 'OK'
message = "%s download succeeded"%(script_name)
install_script = trg_script
else:
st = 'FAIL'
message = "%s download failed. Pathname could not be resolved (verify component web site path)."%(script_name)
install_script = None
else:
st = 'FAIL'
message = "%s download failed. Pathname could not be resolved (verify computer/domain name)."%(script_name)
install_script = None
else:
st = 'OK'
message = "%s already exist"%(script_name)
install_script = trg_script
subprocess.Popen(["/bin/chmod","+x", trg_script], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
printNlog("<BSTRAP component='INSTALLER' status='%s' message='%s'/>" % (st, message))
sys.stdout.flush()
except:
install_script = None
logging.error(traceback.format_exc())
return install_script
def runInstaller(remote_nfs, orgName, systime, ncport, usevdcrepo, vds_config_str, url_rpm, vds_server, random_num, script, vds_complete, firewall_rules_file):
""" -- Run VDS bootstrap scripts
"""
try:
if os.path.exists(script):
if not vds_complete:
execfn = [script]
if remote_nfs:
execfn += ["-m", remote_nfs]
if orgName:
execfn += ["-O", orgName]
if systime:
execfn += ["-t", systime]
if ncport:
execfn += ["-n", ncport]
if usevdcrepo:
execfn += ["-u", str(usevdcrepo)]
if firewall_rules_file:
execfn += ["-f", firewall_rules_file]
execfn += [url_rpm, vds_server, random_num]
else:
if vds_config_str:
execfn = [script, "-c", vds_config_str, random_num]
else:
execfn = [script, random_num]
logging.debug("trying to run %s script cmd = '%s'",script, string.join(execfn, " "))
subprocess.Popen(execfn).communicate()
else:
logging.debug("script %s doen not exist",script)
except:
logging.error(traceback.format_exc())
def process_ovirt_platform(url_bs, engine_port, random_num, systime ):
""" update vdsm-reg.conf and restart vdsm-reg service """
import time
import calendar
downloadBootstrap(url_bs, random_num, False)
return_value = False
ticket = None
try:
time_struct = time.strptime(systime, '%Y-%m-%dT%H:%M:%S')
ticket = calendar.timegm(time_struct)
except ValueError, ex:
logging.debug("setHostTime: Failed to parse ENGINE time. message= " + str(ex))
return 1
if ticket is not None:
return_value = update_and_restart_vdsm_reg(url_bs, engine_port, ticket)
return return_value
def update_and_restart_vdsm_reg(url_bs, engine_port, ticket):
from urlparse import urlparse
try:
import deployUtil
except:
printNlog("<BSTRAP component='INIT' status='FAIL' message='Error trying to deploy library.'/>")
logging.error(traceback.format_exc())
return False
return_value = False
if not os.path.exists(vdsm_reg_conf_file):
message = "Error trying to configure registration service."
printNlog("<BSTRAP component='UPDATE_VDSM_REG_CONF' status='FAIL' message='%s'/>" % (message) )
logging.debug("file %s does not exist", vdsm_reg_conf_file)
else:
vdc_url = urlparse(url_bs)
if engine_port is None:
if vdc_url.port is not None:
engine_port = str(vdc_url.port)
if engine_port is not None:
deployUtil._updateFileLine(vdsm_reg_conf_file, "vdc_host_port", str(engine_port), True)
deployUtil._updateFileLine(vdsm_reg_conf_file, "vdc_host_name", str(vdc_url.hostname), True)
deployUtil._updateFileLine(vdsm_reg_conf_file, "ticket", str(ticket), True)
deployUtil.ovirtfunctions.ovirt_store_config(vdsm_reg_conf_file)
if handle_ssh_key(vdc_url.hostname, str(engine_port)):
out, err, return_code = deployUtil.setService('vdsm-reg', 'restart')
else:
return_code = None
if not return_code:
return_value = True
return return_value
def handle_ssh_key(host, port):
import deployUtil
ssh_result = False
strKey = deployUtil.getAuthKeysFile(host, port)
if strKey is not None:
ssh_result = deployUtil.handleSSHKey(strKey)
if ssh_result:
printNlog("<BSTRAP component='RHEV_INSTALL' status='OK' message='RHEV-H ACCESSIBLE'/>")
else:
printNlog("<BSTRAP component='RHEV_INSTALL' status='FAIL' message='Host failed to download management server public-key.'/>")
return ssh_result
def main():
"""Usage: vds_installer.py [-c vds_config_str] [-m remote_nfs] [-r rev_num] [-O organizationName] [-t YYYY-MM-DDTHH:mm:SS_system_time] [-n netconsole_host:port] [-p engine_port] <url_bs> <url_rpm> <vds_server> <random_num> <vds_complete>
url_bs - components url
url_rpm - rpm download url
random_num - random number for temp. file names generation
vds_server - vds server for CSR usage
vds_complete - to run first vds_bootstrap script = false
to run second vds_bootstrap_complete script = true
"""
try:
remote_nfs = None
rev_num = None
vds_config_str = None
orgName = None
systime = None
ncport = None
usevdcrepo = False
engine_port = None
firewall_rules_file = None
opts, args = getopt.getopt(sys.argv[1:], "c:m:r:O:t:n:u:p:f:")
for o,v in opts:
if o == "-c":
vds_config_str = v
if o == "-m":
remote_nfs = v
if o == "-r":
rev_num = v
if o == "-O":
orgName = v
if o == "-t":
systime = v
if o == "-n":
ncport = v
if o == "-u":
usevdcrepo = (v[0].upper() == 'T')
if o == "-p":
engine_port = v
if o =="-f":
firewall_rules_file = v
url_bs = args[0]
url_rpm = args[1]
vds_server = args[2]
random_num = args[3]
vds_complete = args[4]
if vds_complete.lower() == 'true':
vds_complete = True
elif vds_complete.lower() == 'false':
vds_complete = False
else:
printNlog(main.__doc__)
return 1
except:
printNlog(main.__doc__)
return 1
try:
logging.debug('**** Start VDS Installation ****')
res = testPlatform()
if res == 0:
vds_script = downloadBootstrap(url_bs, random_num, vds_complete)
if vds_script:
runInstaller(remote_nfs, orgName, systime, ncport, usevdcrepo, vds_config_str, url_rpm, vds_server, random_num, vds_script, vds_complete, firewall_rules_file)
if firewall_rules_file is not None:
try:
os.unlink(firewall_rules_file)
except:
logging.warn("Failed to delete firewall conf file: %s" , firewall_rules_file)
if vds_complete:
file_name = '/tmp/vds_installer_'+random_num+'.py'
os.unlink(file_name)
elif res == 1:
ret_value = process_ovirt_platform(url_bs, engine_port, random_num, systime)
if ret_value is False:
printNlog("<BSTRAP component='RHEV_INSTALL' status='FAIL'/>")
return ret_value
elif res == 2:
logging.error("Failed platform test.")
return 1
except:
logging.error(traceback.format_exc())
return 1
return 0
if __name__ == "__main__":
sys.exit(main())
| raksha-rao/gluster-ovirt | backend/manager/conf/vds_installer.py | Python | apache-2.0 | 15,169 |
from nose import SkipTest
from nose.tools import eq_, assert_raises
from dxr.indexers import (unsparsify, by_line, group_needles, span_to_lines,
key_object_pair, Extent, Position, split_into_lines,
FileToSkim)
KV1 = ('x', 'v1')
KV2 = ('y', 'v2')
KV3 = ('z', 'v3')
NEEDLE1 = (KV1, Extent(Position(1, 3), Position(1, 7)))
NEEDLE2 = (KV2, Extent(Position(1, 5), Position(3, 7)))
NEEDLE3 = (KV3, Extent(Position(1, 0), Position(0, 0)))
def list_eq(result, expected):
eq_(list(result), list(expected))
def test_needle_smoke_test():
list_eq(unsparsify(lambda: [])(), [])
def test_unsparsify_invalid():
"""Make sure unsparify raises ValueError on extents whose ends come before
their starts."""
raise SkipTest("At the moment, we tolerate these and simply warn. Once the clang compiler plugin doesn't spit these out anymore, return to raising an exception.")
assert_raises(ValueError, unsparsify(lambda: [NEEDLE3]))
def test_unsparsify():
# Test 2 overlapping dense needles:
output = [[key_object_pair(KV1, 3, 7), key_object_pair(KV2, 5, None)], # the overlap.
[key_object_pair(KV2, 0, None)], # just the second one,
[key_object_pair(KV2, 0, 7)]] # extending beyond the first
list_eq(unsparsify(lambda: [NEEDLE1, NEEDLE2])(), output)
def test_group_needles():
list_eq(group_needles([]), [])
list_eq(group_needles([('A', 1), ('B', 1), ('C', 2), ('D', 3)]),
[['A', 'B'],
['C'],
['D']])
def test_by_line():
list_eq(by_line([]), [])
list_eq(by_line([NEEDLE1, NEEDLE2]),
[(key_object_pair(KV1, 3, 7), 1),
(key_object_pair(KV2, 5, None), 1),
(key_object_pair(KV2, 0, None), 2),
(key_object_pair(KV2, 0, 7), 3)])
def test_span_to_lines():
list_eq(span_to_lines(NEEDLE1),
[((('x', 'v1'), 3, 7), 1)])
list_eq(span_to_lines(NEEDLE2),
[((('y', 'v2'), 5, None), 1),
((('y', 'v2'), 0, None), 2),
((('y', 'v2'), 0, 7), 3)])
assert_raises(ValueError, lambda x: list(span_to_lines(x)), [])
def test_split_into_lines():
list_eq(split_into_lines([('k', {'m': 'ap'}, Extent(Position(1, 5), Position(3, 7)))]),
[('k', {'m': 'ap'}, Extent(Position(1, 5), Position(1, None))),
('k', {'m': 'ap'}, Extent(Position(2, 0), Position(2, None))),
('k', {'m': 'ap'}, Extent(Position(3, 0), Position(3, 7)))])
def test_char_offset():
"""Make sure char_offset() deals with different kinds of line breaks and
handles the first and last lines correctly."""
skimmer = FileToSkim('/some/path', u'abc\r\nde\nfghi', 'dummy_plugin', 'dummy_tree')
eq_(skimmer.char_offset(1, 1), 1)
eq_(skimmer.char_offset(2, 1), 6)
eq_(skimmer.char_offset(3, 1), 9)
| pombredanne/dxr | tests/test_indexers.py | Python | mit | 2,910 |
import argparse
from wsqa import create_app
# Create the flask app.
app = create_app()
# Run the app
if __name__ == '__main__':
# Define the arguments.
parser = argparse.ArgumentParser()
parser.add_argument(
'--host',
default='0.0.0.0',
help='Host to bind to: [%(default)s].')
parser.add_argument(
'--port',
type=int,
default=app.config['SERVER_PORT'],
help='Port to listen to: [%(default)s].')
parser.add_argument(
'--debug',
action='store_true',
default=False,
help='Debug mode: [%(default)s].')
# Parse arguemnts and run the app.
args = parser.parse_args()
app.run(debug=args.debug, host=args.host, port=args.port)
| opendatakosovo/water-surface-quality-api | runserver.py | Python | gpl-2.0 | 745 |
"""
Modules created by others. Versioned and distributed by One Codex
"""
__all__ = ['potion_client']
| onecodex/onecodex | onecodex/vendored/__init__.py | Python | mit | 103 |
# Building inheritance
class MITPerson(Person):
nextIdNum = 0 #next ID number to assing
def __init__(self, name):
Person.__init__(self, name) #initialize Person attributes
# new MITPerson atrribute: a unique ID number
self.idNum = MITPerson.nextIdNum
MITPerson.nextIdNum += 1
def getIdNum(self):
return self.idNum
def __It__(self, other):
return self.idNum < other.idNum
class Student(MITPerson):
pass
class UG(Student): #UG = under graduate ###------
def __init__(self, name, classYear):
MITPerson.__init__(self, name)
self.year = classYear
def getClass(self): # getter method
return self.year
class Grad(Student): ##----
pass
class TransferStudent(Student):
pass
def isStudent(obj):
return isinstance(obj, Student)
| teichopsia-/python_practice | old_class_material/MITPerson_class.py | Python | mpl-2.0 | 903 |
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response as render
from django.template import RequestContext
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
import re, time, urllib
import openid
if openid.__version__ < '2.0.0':
raise ImportError, 'You need python-openid 2.0.0 or newer'
elif openid.__version__ < '2.1.0':
from openid.sreg import SRegRequest
else:
from openid.extensions.sreg import SRegRequest
try:
from openid.extensions.pape import Request as PapeRequest
except ImportError:
from openid.extensions import pape as openid_pape
PapeRequest = openid_pape.Request
from openid.extensions.ax import FetchRequest as AXFetchRequest
from openid.extensions.ax import AttrInfo
from openid.consumer.consumer import Consumer, \
SUCCESS, CANCEL, FAILURE, SETUP_NEEDED
from openid.consumer.discover import DiscoveryFailure
from openid.yadis import xri
from util import OpenID, DjangoOpenIDStore, from_openid_response
from middleware import OpenIDMiddleware
from django.utils.html import escape
def get_url_host(request):
if request.is_secure():
protocol = 'https'
else:
protocol = 'http'
host = escape(request.get_host())
return '%s://%s' % (protocol, host)
def get_full_url(request):
return get_url_host(request) + request.get_full_path()
next_url_re = re.compile('^/[-\w/]+$')
def is_valid_next_url(next):
# When we allow this:
# /openid/?next=/welcome/
# For security reasons we want to restrict the next= bit to being a local
# path, not a complete URL.
return bool(next_url_re.match(next))
def begin(request, redirect_to=None, on_failure=None, user_url=None,
template_name='openid_consumer/signin.html'):
on_failure = on_failure or default_on_failure
trust_root = getattr(
settings, 'OPENID_TRUST_ROOT', get_url_host(request) + '/'
)
# foo derbis.
redirect_to = redirect_to or getattr(
settings, 'OPENID_REDIRECT_TO',
# If not explicitly set, assume current URL with complete/ appended
get_full_url(request).split('?')[0] + 'complete/'
)
# In case they were lazy...
if not (
redirect_to.startswith('http://')
or
redirect_to.startswith('https://')):
redirect_to = get_url_host(request) + redirect_to
if request.GET.get('next') and is_valid_next_url(request.GET['next']):
if '?' in redirect_to:
join = '&'
else:
join = '?'
redirect_to += join + urllib.urlencode({
'next': request.GET['next']
})
if not user_url:
user_url = request.REQUEST.get('openid_url', None)
if not user_url:
request_path = request.path
if request.GET.get('next'):
request_path += '?' + urllib.urlencode({
'next': request.GET['next']
})
return render(template_name, {
'action': request_path,
}, RequestContext(request))
if xri.identifierScheme(user_url) == 'XRI' and getattr(
settings, 'OPENID_DISALLOW_INAMES', False
):
return on_failure(request, _('i-names are not supported'))
consumer = Consumer(request.session, DjangoOpenIDStore())
try:
auth_request = consumer.begin(user_url)
except DiscoveryFailure:
return on_failure(request, _('The OpenID was invalid'))
sreg = getattr(settings, 'OPENID_SREG', False)
if sreg:
s = SRegRequest()
for sarg in sreg:
if sarg.lower().lstrip() == "policy_url":
s.policy_url = sreg[sarg]
else:
for v in sreg[sarg].split(','):
s.requestField(field_name=v.lower().lstrip(),
required=(sarg.lower().lstrip() ==
"required"))
auth_request.addExtension(s)
pape = getattr(settings, 'OPENID_PAPE', False)
if pape:
if openid.__version__ <= '2.0.0' and openid.__version__ >= '2.1.0':
raise (ImportError,
'For pape extension you need python-openid 2.1.0 or newer')
p = PapeRequest()
for parg in pape:
if parg.lower().strip() == 'policy_list':
for v in pape[parg].split(','):
p.addPolicyURI(v)
elif parg.lower().strip() == 'max_auth_age':
p.max_auth_age = pape[parg]
auth_request.addExtension(p)
OPENID_AX_PROVIDER_MAP = getattr(settings, 'OPENID_AX_PROVIDER_MAP', {})
openid_provider = ('Google' if
'google' in request.session.get('openid_provider', '')
else 'Default')
ax = OPENID_AX_PROVIDER_MAP.get(openid_provider)
if ax:
axr = AXFetchRequest()
for attr_name, attr_url in ax.items():
# axr.add(AttrInfo(i['type_uri'],
# i['count'], i['required'],
# i['alias']))
# setting all as required attrs
axr.add(AttrInfo(attr_url, required=True))
auth_request.addExtension(axr)
redirect_url = auth_request.redirectURL(trust_root, redirect_to)
return HttpResponseRedirect(redirect_url)
def complete(request, on_success=None, on_failure=None,
failure_template='openid_consumer/failure.html'):
on_success = on_success or default_on_success
on_failure = on_failure or default_on_failure
consumer = Consumer(request.session, DjangoOpenIDStore())
#dummydebug
#for r in request.GET.items():
# print r
# JanRain library raises a warning if passed unicode objects as the keys,
# so we convert to bytestrings before passing to the library
query_dict = dict([
(k.encode('utf8'),
v.encode('utf8')) for k, v in request.REQUEST.items()
])
url = get_url_host(request) + request.path
openid_response = consumer.complete(query_dict, url)
if openid_response.status == SUCCESS:
return on_success(request,
openid_response.identity_url,
openid_response)
elif openid_response.status == CANCEL:
return on_failure(request,
_('The request was cancelled'), failure_template)
elif openid_response.status == FAILURE:
return on_failure(request, openid_response.message, failure_template)
elif openid_response.status == SETUP_NEEDED:
return on_failure(request, _('Setup needed'), failure_template)
else:
assert False, "Bad openid status: %s" % openid_response.status
def default_on_success(request, identity_url, openid_response):
if 'openids' not in request.session.keys():
request.session['openids'] = []
# Eliminate any duplicates
request.session['openids'] = [
o for o in request.session['openids'] if o.openid != identity_url
]
request.session['openids'].append(from_openid_response(openid_response))
# Set up request.openids and request.openid, reusing middleware logic
OpenIDMiddleware().process_request(request)
next = request.GET.get('next', '').strip()
if not next or not is_valid_next_url(next):
next = getattr(settings, 'OPENID_REDIRECT_NEXT', '/')
return HttpResponseRedirect(next)
def default_on_failure(request, message,
template_name='openid_consumer/failure.html'):
return render(template_name, {
'message': message
}, RequestContext(request))
def signout(request):
request.session['openids'] = []
next = request.GET.get('next', '/')
if not is_valid_next_url(next):
next = '/'
return HttpResponseRedirect(next)
| agiliq/Django-Socialauth | openid_consumer/views.py | Python | gpl-3.0 | 7,943 |
import claripy
import logging
import time
from ... import sim_options as o
l = logging.getLogger("angr.engines.vex.dirty")
#####################
# Dirty calls
#####################
# they return retval, constraints
# Reference:
# http://www-inteng.fnal.gov/Integrated_Eng/GoodwinDocs/pdf/Sys%20docs/PowerPC/PowerPC%20Elapsed%20Time.pdf
# and
# http://www.cap-lore.com/code/TB/
def ppcg_dirtyhelper_MFTB(state):
# TODO: This is an incorrect implementation. Fix it later!
return state.se.BVV(0x200, 64), [ ]
def ppc32g_dirtyhelper_MFSPR_287(state):
return state.se.BVV(0x200, 32), [ ]
def amd64g_dirtyhelper_RDTSC(state):
if o.USE_SYSTEM_TIMES in state.options:
val = state.solver.BVV(int(time.clock() * 1000000) + 12345678, 64)
else:
val = state.solver.BVS('RDTSC', 64, key=('hardware', 'rdtsc'))
return val, []
x86g_dirtyhelper_RDTSC = amd64g_dirtyhelper_RDTSC
# For all the CPUID helpers: we've implemented the very nice CPUID functions, but we don't use them.
# we claim to be a much dumber cpu than we can support because otherwise we get bogged down doing
# various tasks in the libc initializers.
# Copied basically directly from the vex source
def amd64g_dirtyhelper_CPUID_baseline(state, _):
old_eax = state.regs.rax[31:0]
def SET_ABCD(a, b, c, d, condition=None):
if condition is None:
state.registers.store('rax', a, size=8)
state.registers.store('rbx', b, size=8)
state.registers.store('rcx', c, size=8)
state.registers.store('rdx', d, size=8)
else:
cond = old_eax == condition
state.registers.store('rax', a, size=8, condition=cond)
state.registers.store('rbx', b, size=8, condition=cond)
state.registers.store('rcx', c, size=8, condition=cond)
state.registers.store('rdx', d, size=8, condition=cond)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000)
SET_ABCD(0x00000001, 0x72676e41, 0x21444955, 0x50432079, 0)
SET_ABCD(0x00000f5a, 0x01000800, 0x00000000, 0x078bfbff, 1)
SET_ABCD(0x80000018, 0x68747541, 0x444d4163, 0x69746e65, 0x80000000)
SET_ABCD(0x00000f5a, 0x00000505, 0x00000000, 0x21d3fbff, 0x80000001)
SET_ABCD(0x20444d41, 0x6574704f, 0x206e6f72, 0x296d7428, 0x80000002)
SET_ABCD(0x6f725020, 0x73736563, 0x3820726f, 0x00003834, 0x80000003)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x80000004)
SET_ABCD(0xff08ff08, 0xff20ff20, 0x40020140, 0x40020140, 0x80000005)
SET_ABCD(0x00000000, 0x42004200, 0x04008140, 0x00000000, 0x80000006)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x0000000f, 0x80000007)
SET_ABCD(0x00003028, 0x00000000, 0x00000000, 0x00000000, 0x80000008)
return None, [ ]
amd64g_dirtyhelper_CPUID_avx_and_cx16 = amd64g_dirtyhelper_CPUID_baseline
amd64g_dirtyhelper_CPUID_avx2 = amd64g_dirtyhelper_CPUID_baseline
def CORRECT_amd64g_dirtyhelper_CPUID_avx_and_cx16(state, _):
old_eax = state.regs.rax[31:0]
old_ecx = state.regs.rcx[31:0]
def SET_ABCD(a, b, c, d, condition=None, condition2=None):
if condition is None:
state.registers.store('rax', a, size=8)
state.registers.store('rbx', b, size=8)
state.registers.store('rcx', c, size=8)
state.registers.store('rdx', d, size=8)
elif condition2 is None:
cond = old_eax == condition
state.registers.store('rax', a, size=8, condition=cond)
state.registers.store('rbx', b, size=8, condition=cond)
state.registers.store('rcx', c, size=8, condition=cond)
state.registers.store('rdx', d, size=8, condition=cond)
else:
cond = claripy.And(old_eax == condition, old_ecx == condition2)
state.registers.store('rax', a, size=8, condition=cond)
state.registers.store('rbx', b, size=8, condition=cond)
state.registers.store('rcx', c, size=8, condition=cond)
state.registers.store('rdx', d, size=8, condition=cond)
SET_ABCD(0x00000007, 0x00000340, 0x00000340, 0x00000000)
SET_ABCD(0x0000000d, 0x756e6547, 0x6c65746e, 0x49656e69, 0x00000000)
SET_ABCD(0x000206a7, 0x00100800, 0x1f9ae3bf, 0xbfebfbff, 0x00000001)
SET_ABCD(0x76035a01, 0x00f0b0ff, 0x00000000, 0x00ca0000, 0x00000002)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000003)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000004)
SET_ABCD(0x1c004121, 0x01c0003f, 0x0000003f, 0x00000000, 0x00000004, 0x00000000)
SET_ABCD(0x1c004122, 0x01c0003f, 0x0000003f, 0x00000000, 0x00000004, 0x00000001)
SET_ABCD(0x1c004143, 0x01c0003f, 0x000001ff, 0x00000000, 0x00000004, 0x00000002)
SET_ABCD(0x1c03c163, 0x02c0003f, 0x00001fff, 0x00000006, 0x00000004, 0x00000003)
SET_ABCD(0x00000040, 0x00000040, 0x00000003, 0x00001120, 0x00000005)
SET_ABCD(0x00000077, 0x00000002, 0x00000009, 0x00000000, 0x00000006)
SET_ABCD(0x00000000, 0x00000800, 0x00000000, 0x00000000, 0x00000007)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000008)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000009)
SET_ABCD(0x07300803, 0x00000000, 0x00000000, 0x00000603, 0x0000000a)
SET_ABCD(0x00000000, 0x00000000, old_ecx, 0x00000000, 0x0000000b)
SET_ABCD(0x00000001, 0x00000001, 0x00000100, 0x00000000, 0x0000000b, 0x00000000)
SET_ABCD(0x00000004, 0x00000004, 0x00000201, 0x00000000, 0x0000000b, 0x00000001)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x0000000c)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x0000000d)
SET_ABCD(0x00000007, 0x00000340, 0x00000340, 0x00000000, 0x0000000d, 0x00000000)
SET_ABCD(0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x0000000d, 0x00000001)
SET_ABCD(0x00000100, 0x00000240, 0x00000000, 0x00000000, 0x0000000d, 0x00000002)
SET_ABCD(0x00000007, 0x00000340, 0x00000340, 0x00000000, 0x0000000e)
SET_ABCD(0x00000007, 0x00000340, 0x00000340, 0x00000000, 0x0000000f)
SET_ABCD(0x80000008, 0x00000000, 0x00000000, 0x00000000, 0x80000000)
SET_ABCD(0x00000000, 0x00000000, 0x00000001, 0x28100800, 0x80000001)
SET_ABCD(0x20202020, 0x20202020, 0x65746e49, 0x2952286c, 0x80000002)
SET_ABCD(0x726f4320, 0x4d542865, 0x35692029, 0x3033322d, 0x80000003)
SET_ABCD(0x50432030, 0x20402055, 0x30382e32, 0x007a4847, 0x80000004)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x80000005)
SET_ABCD(0x00000000, 0x00000000, 0x01006040, 0x00000000, 0x80000006)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000100, 0x80000007)
SET_ABCD(0x00003024, 0x00000000, 0x00000000, 0x00000000, 0x80000008)
return None, [ ]
def amd64g_dirtyhelper_IN(state, portno, sz): #pylint:disable=unused-argument
return state.se.Unconstrained('IN', 64, key=('hardware', 'in')), [ ]
def amd64g_dirtyhelper_OUT(state, portno, data, sz): #pylint:disable=unused-argument
return None, [ ]
def amd64g_dirtyhelper_SxDT(state, addr, op): #pylint:disable=unused-argument
# SIDT and SGDT are the only instructions dealt with by vex
# and they both store 80 bit of data
# See http://amd-dev.wpengine.netdna-cdn.com/wordpress/media/2008/10/24594_APM_v3.pdf
# page 377
state.memory.store(addr, state.se.Unconstrained('SxDT', 80))
return None, [ ]
def x86g_dirtyhelper_CPUID_sse0(state, _):
old_eax = state.regs.eax
def SET_ABCD(a, b, c, d, condition=None, condition2=None):
if condition is None:
state.registers.store('eax', a, size=4)
state.registers.store('ebx', b, size=4)
state.registers.store('ecx', c, size=4)
state.registers.store('edx', d, size=4)
elif condition2 is None:
cond = old_eax == condition
state.registers.store('eax', a, size=4, condition=cond)
state.registers.store('ebx', b, size=4, condition=cond)
state.registers.store('ecx', c, size=4, condition=cond)
state.registers.store('edx', d, size=4, condition=cond)
SET_ABCD(0x543, 0, 0, 0x8001bf)
SET_ABCD(0x1, 0x72676e41, 0x21444955, 0x50432079, 0)
return None, [ ]
x86g_dirtyhelper_CPUID_sse2 = x86g_dirtyhelper_CPUID_sse0
x86g_dirtyhelper_CPUID_sse3 = x86g_dirtyhelper_CPUID_sse0
def CORRECT_x86g_dirtyhelper_CPUID_sse2(state, _):
old_eax = state.regs.eax
old_ecx = state.regs.ecx
def SET_ABCD(a, b, c, d, condition=None, condition2=None):
if condition is None:
state.registers.store('eax', a, size=4)
state.registers.store('ebx', b, size=4)
state.registers.store('ecx', c, size=4)
state.registers.store('edx', d, size=4)
elif condition2 is None:
cond = old_eax == condition
state.registers.store('eax', a, size=4, condition=cond)
state.registers.store('ebx', b, size=4, condition=cond)
state.registers.store('ecx', c, size=4, condition=cond)
state.registers.store('edx', d, size=4, condition=cond)
else:
cond = claripy.And(old_eax == condition, old_ecx == condition2)
state.registers.store('eax', a, size=4, condition=cond)
state.registers.store('ebx', b, size=4, condition=cond)
state.registers.store('ecx', c, size=4, condition=cond)
state.registers.store('edx', d, size=4, condition=cond)
SET_ABCD(0x07280202, 0x00000000, 0x00000000, 0x00000000)
SET_ABCD(0x0000000a, 0x756e6547, 0x6c65746e, 0x49656e69, 0x00000000)
SET_ABCD(0x000006f6, 0x00020800, 0x0000e3bd, 0xbfebfbff, 0x00000001)
SET_ABCD(0x05b0b101, 0x005657f0, 0x00000000, 0x2cb43049, 0x00000002)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000003)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000004)
SET_ABCD(0x04000121, 0x01c0003f, 0x0000003f, 0x00000001, 0x00000004, 0x00000000)
SET_ABCD(0x04000122, 0x01c0003f, 0x0000003f, 0x00000001, 0x00000004, 0x00000001)
SET_ABCD(0x04004143, 0x03c0003f, 0x00000fff, 0x00000001, 0x00000004, 0x00000002)
SET_ABCD(0x00000040, 0x00000040, 0x00000003, 0x00000020, 0x00000005)
SET_ABCD(0x00000001, 0x00000002, 0x00000001, 0x00000000, 0x00000006)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000007)
SET_ABCD(0x00000400, 0x00000000, 0x00000000, 0x00000000, 0x00000008)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000009)
SET_ABCD(0x07280202, 0x00000000, 0x00000000, 0x00000000, 0x0000000a)
SET_ABCD(0x80000008, 0x00000000, 0x00000000, 0x00000000, 0x80000000)
SET_ABCD(0x00000000, 0x00000000, 0x00000001, 0x20100000, 0x80000001)
SET_ABCD(0x65746e49, 0x2952286c, 0x726f4320, 0x4d542865, 0x80000002)
SET_ABCD(0x43203229, 0x20205550, 0x20202020, 0x20202020, 0x80000003)
SET_ABCD(0x30303636, 0x20402020, 0x30342e32, 0x007a4847, 0x80000004)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x80000005)
SET_ABCD(0x00000000, 0x00000000, 0x10008040, 0x00000000, 0x80000006)
SET_ABCD(0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x80000007)
SET_ABCD(0x00003024, 0x00000000, 0x00000000, 0x00000000, 0x80000008)
return None, [ ]
def x86g_dirtyhelper_IN(state, portno, sz): #pylint:disable=unused-argument
return state.se.Unconstrained('IN', 32, key=('hardware', 'in')), [ ]
def x86g_dirtyhelper_OUT(state, portno, data, sz): #pylint:disable=unused-argument
return None, [ ]
def x86g_dirtyhelper_SxDT(state, addr, op):
# SIDT and SGDT are the only instructions dealt with by vex
# and they both store 48 bit data
if not op.concrete:
# resolved failed
return None, [ ]
elif op._model_concrete.value == 0:
state.memory.store(addr, state.se.Unconstrained('SIDT', 48))
elif op._model_concrete.value == 1:
state.memory.store(addr, state.regs.gdt)
return None, [ ]
def x86g_dirtyhelper_LGDT_LIDT(state, addr, op):
if not op.concrete:
# resolved failed
return None, [ ]
limit = state.memory.load(addr, 2, endness='Iend_LE')
base = state.memory.load(addr + 2, 4, endness='Iend_LE')
if op._model_concrete.value == 2:
state.regs.gdt = state.se.Concat(base, limit).zero_extend(16)
elif op._model_concrete.value == 3:
# LIDT is a nop
pass
return None, [ ]
def x86g_dirtyhelper_FINIT(state, gsptr): #pylint:disable=unused-argument
state.regs.fpu_tags = 0
state.regs.fpround = 0
state.regs.fc3210 = 0x0300
state.regs.ftop = 0
return None, [ ]
def x86g_dirtyhelper_write_cr0(state, value):
# make a deep copy of the arch before modifying it so we don't accidentally modify it for all other states
state.arch = state.arch.copy()
state.arch.vex_archinfo['x86_cr0'] = state.se.eval_one(value)
return None, [ ]
def x86g_dirtyhelper_loadF80le(state, addr):
tbyte = state.memory.load(addr, size=10, endness='Iend_LE')
sign = tbyte[79]
exponent = tbyte[78:64]
mantissa = tbyte[62:0]
normalized_exponent = exponent[10:0] - 16383 + 1023
zero_exponent = state.se.BVV(0, 11)
inf_exponent = state.se.BVV(-1, 11)
final_exponent = claripy.If(exponent == 0, zero_exponent, claripy.If(exponent == -1, inf_exponent, normalized_exponent))
normalized_mantissa = tbyte[62:11]
zero_mantissa = claripy.BVV(0, 52)
inf_mantissa = claripy.BVV(-1, 52)
final_mantissa = claripy.If(exponent == 0, zero_mantissa, claripy.If(exponent == -1, claripy.If(mantissa == 0, zero_mantissa, inf_mantissa), normalized_mantissa))
qword = claripy.Concat(sign, final_exponent, final_mantissa)
assert len(qword) == 64
return qword, []
def x86g_dirtyhelper_storeF80le(state, addr, qword):
sign = qword[63]
exponent = qword[62:52]
mantissa = qword[51:0]
normalized_exponent = exponent.zero_extend(4) - 1023 + 16383
zero_exponent = state.se.BVV(0, 15)
inf_exponent = state.se.BVV(-1, 15)
final_exponent = claripy.If(exponent == 0, zero_exponent, claripy.If(exponent == -1, inf_exponent, normalized_exponent))
normalized_mantissa = claripy.Concat(claripy.BVV(1, 1), mantissa, claripy.BVV(0, 11))
zero_mantissa = claripy.BVV(0, 64)
inf_mantissa = claripy.BVV(-1, 64)
final_mantissa = claripy.If(exponent == 0, zero_mantissa, claripy.If(exponent == -1, claripy.If(mantissa == 0, zero_mantissa, inf_mantissa), normalized_mantissa))
tbyte = claripy.Concat(sign, final_exponent, final_mantissa)
assert len(tbyte) == 80
state.memory.store(addr, tbyte, endness='Iend_LE')
return None, []
| tyb0807/angr | angr/engines/vex/dirty.py | Python | bsd-2-clause | 14,622 |
import logging
import unittest
from ds.vortex.core import baseNode
from ds.vortex.core import graph
from ds.vortex.core import plug as plugs
from ds.vortex.nodes.math.basic import sum
logger = logging.getLogger(__name__)
class TestGraph(unittest.TestCase):
def setUp(self):
self.graph = graph.Graph(name="testGraph")
self.testNode = baseNode.BaseNode("testNode")
self.testNode2 = baseNode.BaseNode("testNode2")
self.testNode.addPlug(plugs.InputPlug("testInput", self.testNode))
self.testNode.addPlug(plugs.OutputPlug("testOutput", self.testNode))
self.testNode2.addPlug(plugs.InputPlug("testInput1", self.testNode2))
self.testNode2.addPlug(plugs.OutputPlug("testOutput1", self.testNode2))
def testNodeUniqueNodeName(self):
self.graph.addNode(self.testNode)
testNode = baseNode.BaseNode("testNode")
self.graph.addNode(testNode)
self.assertEquals(testNode.name, "testNode0")
def testAddNode(self):
self.assertEquals(self.graph.addNode(self.testNode), self.testNode)
self.assertTrue(len(self.graph.nodes))
# should raise valueError if the node is already in the graph
self.assertRaises(ValueError, self.graph.addNode(self.testNode))
def testDeleteNode(self):
self.graph.addNode(self.testNode)
self.graph.deleteNode(self.testNode)
self.assertEquals(len(self.graph.nodes.values()), 0)
def testGetNode(self):
self.graph.addNode(self.testNode)
self.assertEquals(self.graph.getNode("testNode"), self.testNode)
def testGetter(self):
self.graph.addNode(self.testNode)
self.graph.addNode(self.testNode2)
self.assertEquals(self.graph.get("testNode|testInput"), self.testNode.getPlug("testInput"))
self.assertEquals(self.graph.get("testNode|testOutput"), self.testNode.getPlug("testOutput"))
self.assertEquals(self.graph.get("testNode"), self.testNode)
testEdge = self.testNode.getPlug("testOutput").connect(self.testNode2.getPlug("testInput1"))
self.assertEquals(self.graph.get("testOutput_testInput1"), testEdge)
def testClearGraph(self):
self.graph.addNode(self.testNode)
self.graph.addNode(self.testNode2)
self.graph.clear()
self.assertEquals(len(self.graph.nodes), 0)
def testGraphAllLeaves(self):
self.graph.addNode(self.testNode)
self.graph.addNode(self.testNode2)
self.testNode.getPlug("testOutput").connect(self.testNode2.getPlug("testInput1"))
leafNodes = self.graph.allLeaves()
self.assertEquals(len(leafNodes), 1)
self.assertEquals(leafNodes[0], self.testNode)
self.testNode.getPlug("testOutput").disconnect(self.testNode2.getPlug("testInput1"))
leafNodes = self.graph.allLeaves()
self.assertEquals(len(leafNodes), 2)
def testContains(self):
self.graph.addNode(self.testNode)
self.graph.addNode(self.testNode2)
isIn = self.testNode in self.graph
self.assertTrue(self.testNode, isIn)
class TestGraphDirty(unittest.TestCase):
def setUp(self):
self.graph = graph.Graph(name="testPushGraph")
self.addNode1 = sum.SumNode("addNode1")
self.addNode2 = sum.SumNode("addNode2")
self.addNode3 = sum.SumNode("addNode3")
self.graph.addNode(self.addNode1)
self.graph.addNode(self.addNode2, value1=5, value2=15)
self.graph.addNode(self.addNode3)
self.addNode1.getPlug("output").connect(self.addNode3.getPlug("value1"))
self.addNode2.getPlug("output").connect(self.addNode3.getPlug("value2"))
def testSetValuePropagatesDirtyDownStream(self):
self.addNode1.getPlug("value1").value = 50
self.addNode1.getPlug("value2").value = 20
# test that all plugs are dirty
self.assertTrue(self.addNode1.getPlug("value2").dirty)
self.assertTrue(self.addNode1.getPlug("output").dirty)
self.assertTrue(self.addNode2.getPlug("value1").dirty)
self.assertTrue(self.addNode2.getPlug("value2").dirty)
self.assertTrue(self.addNode2.getPlug("output").dirty)
self.assertTrue(self.addNode3.getPlug("value1").dirty)
self.assertTrue(self.addNode3.getPlug("value2").dirty)
self.assertTrue(self.addNode3.getPlug("output").dirty)
self.assertEquals(self.addNode3.getPlug("output").value, 90)
# requestEvalate computes all dirty nodes, so all the plugs should be clean
self.assertFalse(self.addNode1.getPlug("value2").dirty)
self.assertFalse(self.addNode1.getPlug("output").dirty)
self.assertFalse(self.addNode2.getPlug("value1").dirty)
self.assertFalse(self.addNode2.getPlug("value2").dirty)
self.assertFalse(self.addNode2.getPlug("output").dirty)
self.assertFalse(self.addNode3.getPlug("value1").dirty)
self.assertFalse(self.addNode3.getPlug("value2").dirty)
self.assertFalse(self.addNode3.getPlug("output").dirty)
class TestSerialize(unittest.TestCase):
def setUp(self):
self.graph = graph.Graph("serialGraph")
def testEmptyGraph(self):
savedGraph = self.graph.serializeGraph()
self.newGraph = graph.Graph("emptyGraph")
self.newGraph.loadGraph(savedGraph)
self.assertEquals(len(self.graph), len(self.newGraph))
def testSerializeNodeToGraph(self):
self.graph.addNode(baseNode.BaseNode("testNode"))
self.graph.addNode(baseNode.BaseNode("testNode2"))
savedGraph = self.graph.serializeGraph()
self.newGraph = graph.Graph.loadGraph(savedGraph)
self.assertEquals(len(self.graph.nodes), len(self.newGraph.nodes))
self.assertEquals(self.graph._name, self.newGraph._name)
def testSerializeNodePlugsToGraph(self):
node = baseNode.BaseNode("testNode")
node2 = baseNode.BaseNode("testNode2")
node.addPlug(plugs.InputPlug("testInputPlug", node=node))
node.addPlug(plugs.OutputPlug("testOutputPlug", node=node))
node2.addPlug(plugs.InputPlug("testInputPlug", node=node2))
node2.addPlug(plugs.OutputPlug("testOutputPlug", node=node2))
node2.getPlug("testOutputPlug").connect(node.getPlug("testInputPlug"))
self.graph.addNode(node)
self.graph.addNode(node2)
savedGraph = self.graph.serializeGraph()
self.newGraph = graph.Graph.loadGraph(savedGraph)
self.assertEquals(len(self.graph), len(self.newGraph))
self.assertEquals(self.graph._name, self.newGraph._name)
self.assertDictEqual(savedGraph, self.newGraph.serializeGraph())
if __name__ == "__main__":
unittest.main(verbosity=2)
| dsparrow27/vortex | src/ds/vortex/tests/testGraph.py | Python | mit | 6,846 |
from __future__ import print_function
from os import system, path as os_path
import sys
import re
import six
from six.moves import range
if sys.version_info[0] < 3:
from string import maketrans, strip
from enigma import eConsoleAppContainer
from Components.config import config, ConfigYesNo, NoSave, ConfigSubsection, ConfigText, ConfigSelection, ConfigPassword
from Components.Console import Console
from Components.Network import iNetwork
from wifi.scan import Cell
liste = ["WPA/WPA2", "WPA2", "WPA", "WEP", "Unencrypted"]
weplist = ["ASCII", "HEX"]
config.plugins.wlan = ConfigSubsection()
config.plugins.wlan.essid = NoSave(ConfigText(default="", fixed_size=False))
config.plugins.wlan.hiddenessid = NoSave(ConfigYesNo(default=False))
config.plugins.wlan.encryption = NoSave(ConfigSelection(liste, default="WPA/WPA2"))
config.plugins.wlan.wepkeytype = NoSave(ConfigSelection(weplist, default="ASCII"))
config.plugins.wlan.psk = NoSave(ConfigPassword(default="", fixed_size=False))
def getWlanConfigName(iface):
driver = iNetwork.detectWlanModule(iface)
if driver in ('brcm-wl', ):
return '/etc/wl.conf.' + iface
return '/etc/wpa_supplicant.' + iface + '.conf'
class Wlan:
def __init__(self, iface=None):
self.iface = iface
self.oldInterfaceState = None
a = ''
b = ''
for i in list(range(0, 255)):
a += chr(i)
if i < 32 or i > 127:
b += ' '
else:
b += chr(i)
if sys.version_info[0] >= 3:
self.asciitrans = str.maketrans(a, b)
else:
self.asciitrans = maketrans(a, b)
def asciify(self, str):
return str.translate(self.asciitrans)
def getWirelessInterfaces(self):
device = re.compile('[a-z]{2,}[0-9]*:')
ifnames = []
fp = open('/proc/net/wireless', 'r')
for line in fp:
try:
ifnames.append(device.search(line).group()[:-1])
except AttributeError:
pass
return ifnames
def setInterface(self, iface=None):
self.iface = iface
def getInterface(self):
return self.iface
def getNetworkList(self):
if self.oldInterfaceState is None:
self.oldInterfaceState = iNetwork.getAdapterAttribute(self.iface, "up")
if self.oldInterfaceState is False:
if iNetwork.getAdapterAttribute(self.iface, "up") is False:
iNetwork.setAdapterAttribute(self.iface, "up", True)
system("ifconfig " + self.iface + " up")
driver = iNetwork.detectWlanModule(self.iface)
if driver in ('brcm-wl', ):
system("wl up")
scanresults = list(Cell.all(self.iface))
aps = {}
if scanresults is not None:
for i in range(len(scanresults)):
bssid = scanresults[i].ssid
aps[bssid] = {
'active': True,
'bssid': scanresults[i].ssid,
'essid': scanresults[i].ssid,
'channel': scanresults[i].channel,
'encrypted': scanresults[i].encrypted,
'encryption_type': scanresults[i].encryption_type if scanresults[i].encrypted else "none",
'iface': self.iface,
'maxrate': scanresults[i].bitrates,
'mode': scanresults[i].mode,
'quality': scanresults[i].quality,
'signal': scanresults[i].signal,
'frequency': scanresults[i].frequency,
'frequency_norm': scanresults[i].frequency_norm,
'address': scanresults[i].address,
'noise': scanresults[i].noise,
'pairwise_ciphers': scanresults[i].pairwise_ciphers,
'authentication_suites': scanresults[i].authentication_suites,
}
return aps
def stopGetNetworkList(self):
if self.oldInterfaceState is not None:
if self.oldInterfaceState is False:
iNetwork.setAdapterAttribute(self.iface, "up", False)
system("ifconfig " + self.iface + " down")
driver = iNetwork.detectWlanModule(self.iface)
if driver in ('brcm-wl', ):
system("wl down")
self.oldInterfaceState = None
self.iface = None
iWlan = Wlan()
class brcmWLConfig:
def __init__(self):
pass
def writeConfig(self, iface):
essid = config.plugins.wlan.essid.value
hiddenessid = config.plugins.wlan.hiddenessid.value
encryption = config.plugins.wlan.encryption.value
wepkeytype = config.plugins.wlan.wepkeytype.value
psk = config.plugins.wlan.psk.value
fp = open(getWlanConfigName(iface), 'w')
fp.write('ssid=' + essid + '\n')
if encryption in ('WPA', 'WPA2', 'WPA/WPA2', 'WEP'):
if encryption == "WPA/WPA2":
encryption = "WPA2"
fp.write('method=' + encryption.lower() + '\n')
if encryption.lower() == "unencrypted":
fp.write('method=None\n')
fp.write('key=' + psk + '\n')
fp.close()
def loadConfig(self, iface):
config.plugins.wlan.hiddenessid.value = False
config.plugins.wlan.wepkeytype.value = "ASCII"
config.plugins.wlan.essid.value = ""
config.plugins.wlan.encryption.value = "WPA2"
config.plugins.wlan.psk.value = ""
configfile = getWlanConfigName(iface)
if os_path.exists(configfile):
print("[Wlan.py] parsing configfile: ", configfile)
fd = open(configfile, "r")
lines = fd.readlines()
fd.close()
for line in lines:
try:
(key, value) = line.strip().split('=', 1)
except:
continue
if key == 'ssid':
config.plugins.wlan.essid.value = value.strip()
if key == 'method':
method = value.strip()
if method == "None":
method = "Unencrypted"
else:
method = method.upper()
config.plugins.wlan.encryption.value = method
elif key == 'key':
config.plugins.wlan.psk.value = value.strip()
else:
continue
wsconf = {
'hiddenessid': config.plugins.wlan.hiddenessid.value,
'ssid': config.plugins.wlan.essid.value,
'encryption': config.plugins.wlan.encryption.value,
'wepkeytype': config.plugins.wlan.wepkeytype.value,
'key': config.plugins.wlan.psk.value,
}
return wsconf
class wpaSupplicant:
def __init__(self):
pass
def writeBcmWifiConfig(self, iface, essid, encryption, psk):
contents = ""
contents += "ssid=" + essid + "\n"
contents += "method=" + encryption + "\n"
contents += "key=" + psk + "\n"
print("content = \n" + contents)
fd = open(getWlanConfigName(iface), "w")
fd.write(contents)
fd.close()
def loadBcmWifiConfig(self, iface):
wsconf = {}
wsconf["ssid"] = ""
wsconf["hiddenessid"] = False # not used
wsconf["encryption"] = "WPA2"
wsconf["wepkeytype"] = "ASCII" # not used
wsconf["key"] = ""
configfile = getWlanConfigName(iface)
try:
fd = open(configfile, "r")
lines = fd.readlines()
fd.close()
for line in lines:
try:
(key, value) = line.strip().split('=', 1)
except:
continue
if key == 'ssid':
wsconf["ssid"] = value.strip()
if key == 'method':
wsconf["encryption"] = value.strip()
elif key == 'key':
wsconf["key"] = value.strip()
else:
continue
except:
print("[Wlan.py] Error parsing ", configfile)
wsconfig = {
'hiddenessid': False,
'ssid': "",
'encryption': "WPA2",
'wepkeytype': "ASCII",
'key': "",
}
for (k, v) in list(wsconf.items()):
print("[wsconf][%s] %s" % (k, v))
return wsconf
def writeConfig(self, iface):
essid = config.plugins.wlan.essid.value
hiddenessid = config.plugins.wlan.hiddenessid.value
encryption = config.plugins.wlan.encryption.value
wepkeytype = config.plugins.wlan.wepkeytype.value
psk = config.plugins.wlan.psk.value
fp = open(getWlanConfigName(iface), 'w')
fp.write('#WPA Supplicant Configuration by enigma2\n')
fp.write('ctrl_interface=/var/run/wpa_supplicant\n')
fp.write('eapol_version=1\n')
fp.write('fast_reauth=1\n')
fp.write('ap_scan=1\n')
fp.write('network={\n')
fp.write('\tssid="' + essid + '"\n')
if hiddenessid:
fp.write('\tscan_ssid=1\n')
else:
fp.write('\tscan_ssid=0\n')
if encryption in ('WPA', 'WPA2', 'WPA/WPA2'):
fp.write('\tkey_mgmt=WPA-PSK\n')
if encryption == 'WPA':
fp.write('\tproto=WPA\n')
fp.write('\tpairwise=TKIP\n')
fp.write('\tgroup=TKIP\n')
elif encryption == 'WPA2':
fp.write('\tproto=RSN\n')
fp.write('\tpairwise=CCMP\n')
fp.write('\tgroup=CCMP\n')
else:
fp.write('\tproto=WPA RSN\n')
fp.write('\tpairwise=CCMP TKIP\n')
fp.write('\tgroup=CCMP TKIP\n')
fp.write('\tpsk="' + psk + '"\n')
elif encryption == 'WEP':
fp.write('\tkey_mgmt=NONE\n')
if wepkeytype == 'ASCII':
fp.write('\twep_key0="' + psk + '"\n')
else:
fp.write('\twep_key0=' + psk + '\n')
else:
fp.write('\tkey_mgmt=NONE\n')
fp.write('}')
fp.write('\n')
fp.close()
#system('cat ' + getWlanConfigName(iface))
def loadConfig(self, iface):
configfile = getWlanConfigName(iface)
if not os_path.exists(configfile):
configfile = '/etc/wpa_supplicant.conf'
try:
#parse the wpasupplicant configfile
print("[Wlan.py] parsing configfile: ", configfile)
fp = open(configfile, 'r')
supplicant = fp.readlines()
fp.close()
essid = None
encryption = "Unencrypted"
for s in supplicant:
split = s.strip().split('=', 1)
if split[0] == 'scan_ssid':
if split[1] == '1':
config.plugins.wlan.hiddenessid.value = True
else:
config.plugins.wlan.hiddenessid.value = False
elif split[0] == 'ssid':
essid = split[1][1:-1]
config.plugins.wlan.essid.value = essid
elif split[0] == 'proto':
if split[1] == 'WPA':
mode = 'WPA'
if split[1] == 'RSN':
mode = 'WPA2'
if split[1] in ('WPA RSN', 'WPA WPA2'):
mode = 'WPA/WPA2'
encryption = mode
elif split[0] == 'wep_key0':
encryption = 'WEP'
if split[1].startswith('"') and split[1].endswith('"'):
config.plugins.wlan.wepkeytype.value = 'ASCII'
config.plugins.wlan.psk.value = split[1][1:-1]
else:
config.plugins.wlan.wepkeytype.value = 'HEX'
config.plugins.wlan.psk.value = split[1]
elif split[0] == 'psk':
config.plugins.wlan.psk.value = split[1][1:-1]
else:
pass
config.plugins.wlan.encryption.value = encryption
wsconfig = {
'hiddenessid': config.plugins.wlan.hiddenessid.value,
'ssid': config.plugins.wlan.essid.value,
'encryption': config.plugins.wlan.encryption.value,
'wepkeytype': config.plugins.wlan.wepkeytype.value,
'key': config.plugins.wlan.psk.value,
}
for (key, item) in list(wsconfig.items()):
if item == "None" or item == "":
if key == 'hiddenessid':
wsconfig['hiddenessid'] = False
if key == 'ssid':
wsconfig['ssid'] = ""
if key == 'encryption':
wsconfig['encryption'] = "WPA2"
if key == 'wepkeytype':
wsconfig['wepkeytype'] = "ASCII"
if key == 'key':
wsconfig['key'] = ""
except:
print("[Wlan.py] Error parsing ", configfile)
wsconfig = {
'hiddenessid': False,
'ssid': "",
'encryption': "WPA2",
'wepkeytype': "ASCII",
'key': "",
}
#print "[Wlan.py] WS-CONFIG-->",wsconfig
return wsconfig
class Status:
def __init__(self):
self.wlaniface = {}
self.backupwlaniface = {}
self.statusCallback = None
self.WlanConsole = Console()
def stopWlanConsole(self):
if self.WlanConsole is not None:
print("[iStatus] killing self.WlanConsole")
self.WlanConsole.killAll()
self.WlanConsole = None
def getDataForInterface(self, iface, callback=None):
self.WlanConsole = Console()
cmd = "iwconfig " + iface
if callback is not None:
self.statusCallback = callback
self.WlanConsole.ePopen(cmd, self.iwconfigFinished, iface)
def iwconfigFinished(self, result, retval, extra_args):
result = six.ensure_str(result)
iface = extra_args
ssid = "off"
data = {'essid': False, 'frequency': False, 'accesspoint': False, 'bitrate': False, 'encryption': False, 'quality': False, 'signal': False, 'channel': False, 'encryption_type': False, 'frequency': False, 'frequency_norm': False}
for line in result.splitlines():
line = line.strip()
if "ESSID" in line:
if "off/any" in line:
ssid = "off"
else:
if "Nickname" in line:
ssid = (line[line.index('ESSID') + 7:line.index('" Nickname')])
else:
ssid = (line[line.index('ESSID') + 7:len(line) - 1])
if ssid != "off":
data['essid'] = ssid
if "Access Point" in line:
if "Sensitivity" in line:
ap = line[line.index('Access Point') + 14:line.index(' Sensitivity')]
else:
ap = line[line.index('Access Point') + 14:len(line)]
if ap is not None:
data['accesspoint'] = ap
if "Frequency" in line:
frequency = line[line.index('Frequency') + 10:line.index(' GHz')]
if frequency is not None:
data['frequency'] = frequency
if "Bit Rate" in line:
if "kb" in line:
br = line[line.index('Bit Rate') + 9:line.index(' kb/s')]
elif "Gb" in line:
br = line[line.index('Bit Rate') + 9:line.index(' Gb/s')]
else:
br = line[line.index('Bit Rate') + 9:line.index(' Mb/s')]
if br is not None:
data['bitrate'] = br
if ssid != None and ssid != "off":
scanresults = list(Cell.all(iface))
aps = {}
if scanresults:
for i in range(len(scanresults)):
bssid = scanresults[i].ssid
aps[bssid] = {
'active': True,
'bssid': scanresults[i].ssid,
'essid': scanresults[i].ssid,
'channel': scanresults[i].channel,
'encrypted': scanresults[i].encrypted,
'encryption_type': scanresults[i].encryption_type if scanresults[i].encrypted else "none",
'iface': iface,
'maxrate': scanresults[i].bitrates,
'mode': scanresults[i].mode,
'quality': scanresults[i].quality,
'signal': scanresults[i].signal,
'frequency': scanresults[i].frequency,
'frequency_norm': scanresults[i].frequency_norm,
'address': scanresults[i].address,
'noise': scanresults[i].noise,
'pairwise_ciphers': scanresults[i].pairwise_ciphers,
'authentication_suites': scanresults[i].authentication_suites,
}
#data['bitrate'] = aps[ssid]["maxrate"]
data['encryption'] = aps[ssid]["encrypted"]
data['quality'] = aps[ssid]["quality"]
data['signal'] = aps[ssid]["signal"]
data['channel'] = aps[ssid]["channel"]
data['encryption_type'] = aps[ssid]["encryption_type"]
#data['frequency'] = aps[ssid]["frequency"]
data['frequency_norm'] = aps[ssid]["frequency_norm"]
self.wlaniface[iface] = data
self.backupwlaniface = self.wlaniface
if self.WlanConsole is not None:
if not self.WlanConsole.appContainers:
print("[Wlan.py] self.wlaniface after loading:", self.wlaniface)
if self.statusCallback is not None:
self.statusCallback(True, self.wlaniface)
self.statusCallback = None
def getAdapterAttribute(self, iface, attribute):
self.iface = iface
if self.iface in self.wlaniface:
if attribute in self.wlaniface[self.iface]:
return self.wlaniface[self.iface][attribute]
return None
iStatus = Status()
| openatv/enigma2 | lib/python/Plugins/SystemPlugins/WirelessLan/Wlan.py | Python | gpl-2.0 | 14,667 |
from saylua import app
from flask_wtf import FlaskForm
from saylua.utils.form import sl_validators, UserCheck
from saylua.utils.form.fields import SlField, SlTextAreaField
recipient_check = UserCheck()
class ConversationForm(FlaskForm):
recipient = SlField('Recipient Name', [
sl_validators.Required(),
sl_validators.NotBlank(),
sl_validators.Min(app.config['MIN_USERNAME_LENGTH']),
sl_validators.Max(app.config['MAX_USERNAME_LENGTH']),
sl_validators.Username(),
recipient_check.UsernameExists])
title = SlField('Message Title', [
sl_validators.Required(),
sl_validators.NotBlank(),
sl_validators.Min(2)])
text = SlTextAreaField('Message Text', [
sl_validators.Required(),
sl_validators.NotBlank(),
sl_validators.Min(2)])
class ConversationReplyForm(FlaskForm):
text = SlTextAreaField('Reply', [
sl_validators.Required(),
sl_validators.NotBlank(),
sl_validators.Min(2)])
| LikeMyBread/Saylua | saylua/modules/messages/forms.py | Python | agpl-3.0 | 1,013 |
"""
A Python Singleton mixin class that makes use of some of the ideas
found at http://c2.com/cgi/wiki?PythonSingleton. Just inherit
from it and you have a singleton. No code is required in
subclasses to create singleton behavior -- inheritance from
Singleton is all that is needed.
Singleton creation is threadsafe.
USAGE:
Just inherit from Singleton. If you need a constructor, include
an __init__() method in your class as you usually would. However,
if your class is S, you instantiate the singleton using S.getInstance()
instead of S(). Repeated calls to S.getInstance() return the
originally-created instance.
For example:
class S(Singleton):
def __init__(self, a, b=1):
pass
S1 = S.getInstance(1, b=3)
Most of the time, that's all you need to know. However, there are some
other useful behaviors. Read on for a full description:
1) Getting the singleton:
S.getInstance()
returns the instance of S. If none exists, it is created.
2) The usual idiom to construct an instance by calling the class, i.e.
S()
is disabled for the sake of clarity.
For one thing, the S() syntax means instantiation, but getInstance()
usually does not cause instantiation. So the S() syntax would
be misleading.
Because of that, if S() were allowed, a programmer who didn't
happen to notice the inheritance from Singleton (or who
wasn't fully aware of what a Singleton pattern
does) might think he was creating a new instance,
which could lead to very unexpected behavior.
So, overall, it is felt that it is better to make things clearer
by requiring the call of a class method that is defined in
Singleton. An attempt to instantiate via S() will result
in a SingletonException being raised.
3) Use __S.__init__() for instantiation processing,
since S.getInstance() runs S.__init__(), passing it the args it has received.
If no data needs to be passed in at instantiation time, you don't need S.__init__().
4) If S.__init__(.) requires parameters, include them ONLY in the
first call to S.getInstance(). If subsequent calls have arguments,
a SingletonException is raised by default.
If you find it more convenient for subsequent calls to be allowed to
have arguments, but for those argumentsto be ignored, just include
'ignoreSubsequent = True' in your class definition, i.e.:
class S(Singleton):
ignoreSubsequent = True
def __init__(self, a, b=1):
pass
5) For testing, it is sometimes convenient for all existing singleton
instances to be forgotten, so that new instantiations can occur. For that
reason, a forgetAllSingletons() function is included. Just call
forgetAllSingletons()
and it is as if no earlier instantiations have occurred.
6) As an implementation detail, classes that inherit
from Singleton may not have their own __new__
methods. To make sure this requirement is followed,
an exception is raised if a Singleton subclass includes
__new__. This happens at subclass instantiation
time (by means of the MetaSingleton metaclass.
By Gary Robinson, grobinson@flyfi.com. No rights reserved --
placed in the public domain -- which is only reasonable considering
how much it owes to other people's code and ideas which are in the
public domain. The idea of using a metaclass came from
a comment on Gary's blog (see
http://www.garyrobinson.net/2004/03/python_singleto.html#comments).
Other improvements came from comments and email from other
people who saw it online. (See the blog post and comments
for further credits.)
Not guaranteed to be fit for any particular purpose. Use at your
own risk.
"""
import threading
class SingletonException(Exception):
pass
_stSingletons = set()
_lockForSingletons = threading.RLock()
_lockForSingletonCreation = threading.RLock() # Ensure only one instance of each Singleton
# class is created. This is not bound to the
# individual Singleton class since we need to
# ensure that there is only one mutex for each
# Singleton class, which would require having
# a lock when setting up the Singleton class,
# which is what this is anyway. So, when any
# Singleton is created, we lock this lock and
# then we don't need to lock it again for that
# class.
def _createSingletonInstance(cls, lstArgs, dctKwArgs):
_lockForSingletonCreation.acquire()
try:
if cls._isInstantiated(): # some other thread got here first
return
instance = cls.__new__(cls)
try:
instance.__init__(*lstArgs, **dctKwArgs)
except TypeError, e:
if e.message.find('__init__() takes') != -1:
raise SingletonException, 'If the singleton requires __init__ args, supply them on first call to getInstance().'
else:
raise
cls.cInstance = instance
_addSingleton(cls)
finally:
_lockForSingletonCreation.release()
def _addSingleton(cls):
_lockForSingletons.acquire()
try:
assert cls not in _stSingletons
_stSingletons.add(cls)
finally:
_lockForSingletons.release()
def _removeSingleton(cls):
_lockForSingletons.acquire()
try:
if cls in _stSingletons:
_stSingletons.remove(cls)
finally:
_lockForSingletons.release()
def forgetAllSingletons():
'''This is useful in tests, since it is hard to know which singletons need to be cleared to make a test work.'''
_lockForSingletons.acquire()
try:
for cls in _stSingletons.copy():
cls._forgetClassInstanceReferenceForTesting()
# Might have created some Singletons in the process of tearing down.
# Try one more time - there should be a limit to this.
iNumSingletons = len(_stSingletons)
if len(_stSingletons) > 0:
for cls in _stSingletons.copy():
cls._forgetClassInstanceReferenceForTesting()
iNumSingletons -= 1
assert iNumSingletons == len(_stSingletons), 'Added a singleton while destroying ' + str(cls)
assert len(_stSingletons) == 0, _stSingletons
finally:
_lockForSingletons.release()
class MetaSingleton(type):
def __new__(cls, strName, tupBases, dct):
metaclass = cls
if dct.has_key('__new__'):
raise SingletonException, 'Can not override __new__ in a Singleton'
return super(MetaSingleton, metaclass).__new__(metaclass, strName, tupBases, dct)
def __call__(self, *lstArgs, **dictArgs):
raise SingletonException, 'Singletons may only be instantiated through getInstance()'
class Singleton(object):
__metaclass__ = MetaSingleton
def getInstance(cls, *lstArgs, **dctKwArgs):
"""
Call this to instantiate an instance or retrieve the existing instance.
If the singleton requires args to be instantiated, include them the first
time you call getInstance.
"""
if cls._isInstantiated():
if (lstArgs or dctKwArgs) and not hasattr(cls, 'ignoreSubsequent'):
raise SingletonException, 'Singleton already instantiated, but getInstance() called with args.'
else:
_createSingletonInstance(cls, lstArgs, dctKwArgs)
return cls.cInstance
getInstance = classmethod(getInstance)
def _isInstantiated(cls):
# Don't use hasattr(cls, 'cInstance'), because that screws things up if there is a singleton that
# extends another singleton. hasattr looks in the base class if it doesn't find in subclass.
return 'cInstance' in cls.__dict__
_isInstantiated = classmethod(_isInstantiated)
# This can be handy for public use also
isInstantiated = _isInstantiated
def _forgetClassInstanceReferenceForTesting(cls):
"""
This is designed for convenience in testing -- sometimes you
want to get rid of a singleton during test code to see what
happens when you call getInstance() under a new situation.
To really delete the object, all external references to it
also need to be deleted.
"""
try:
if hasattr(cls.cInstance, '_prepareToForgetSingleton'):
# tell instance to release anything it might be holding onto.
cls.cInstance._prepareToForgetSingleton()
del cls.cInstance
_removeSingleton(cls)
except AttributeError:
# run up the chain of base classes until we find the one that has the instance
# and then delete it there
for baseClass in cls.__bases__:
if issubclass(baseClass, Singleton):
baseClass._forgetClassInstanceReferenceForTesting()
_forgetClassInstanceReferenceForTesting = classmethod(_forgetClassInstanceReferenceForTesting)
if __name__ == '__main__':
import unittest
import time
class singletonmixin_Public_TestCase(unittest.TestCase):
def testReturnsSameObject(self):
"""
Demonstrates normal use -- just call getInstance and it returns a singleton instance
"""
class A(Singleton):
def __init__(self):
super(A, self).__init__()
a1 = A.getInstance()
a2 = A.getInstance()
self.assertEquals(id(a1), id(a2))
def testInstantiateWithMultiArgConstructor(self):
"""
If the singleton needs args to construct, include them in the first
call to get instances.
"""
class B(Singleton):
def __init__(self, arg1, arg2):
super(B, self).__init__()
self.arg1 = arg1
self.arg2 = arg2
b1 = B.getInstance('arg1 value', 'arg2 value')
b2 = B.getInstance()
self.assertEquals(b1.arg1, 'arg1 value')
self.assertEquals(b1.arg2, 'arg2 value')
self.assertEquals(id(b1), id(b2))
def testInstantiateWithKeywordArg(self):
class B(Singleton):
def __init__(self, arg1=5):
super(B, self).__init__()
self.arg1 = arg1
b1 = B.getInstance('arg1 value')
b2 = B.getInstance()
self.assertEquals(b1.arg1, 'arg1 value')
self.assertEquals(id(b1), id(b2))
def testTryToInstantiateWithoutNeededArgs(self):
class B(Singleton):
def __init__(self, arg1, arg2):
super(B, self).__init__()
self.arg1 = arg1
self.arg2 = arg2
self.assertRaises(SingletonException, B.getInstance)
def testPassTypeErrorIfAllArgsThere(self):
"""
Make sure the test for capturing missing args doesn't interfere with a normal TypeError.
"""
class B(Singleton):
def __init__(self, arg1, arg2):
super(B, self).__init__()
self.arg1 = arg1
self.arg2 = arg2
raise TypeError, 'some type error'
self.assertRaises(TypeError, B.getInstance, 1, 2)
def testTryToInstantiateWithoutGetInstance(self):
"""
Demonstrates that singletons can ONLY be instantiated through
getInstance, as long as they call Singleton.__init__ during construction.
If this check is not required, you don't need to call Singleton.__init__().
"""
class A(Singleton):
def __init__(self):
super(A, self).__init__()
self.assertRaises(SingletonException, A)
def testDontAllowNew(self):
def instantiatedAnIllegalClass():
class A(Singleton):
def __init__(self):
super(A, self).__init__()
def __new__(metaclass, strName, tupBases, dct):
return super(MetaSingleton, metaclass).__new__(metaclass, strName, tupBases, dct)
self.assertRaises(SingletonException, instantiatedAnIllegalClass)
def testDontAllowArgsAfterConstruction(self):
class B(Singleton):
def __init__(self, arg1, arg2):
super(B, self).__init__()
self.arg1 = arg1
self.arg2 = arg2
B.getInstance('arg1 value', 'arg2 value')
self.assertRaises(SingletonException, B, 'arg1 value', 'arg2 value')
def test_forgetClassInstanceReferenceForTesting(self):
class A(Singleton):
def __init__(self):
super(A, self).__init__()
class B(A):
def __init__(self):
super(B, self).__init__()
# check that changing the class after forgetting the instance produces
# an instance of the new class
a = A.getInstance()
assert a.__class__.__name__ == 'A'
A._forgetClassInstanceReferenceForTesting()
b = B.getInstance()
assert b.__class__.__name__ == 'B'
# check that invoking the 'forget' on a subclass still deletes the instance
B._forgetClassInstanceReferenceForTesting()
a = A.getInstance()
B._forgetClassInstanceReferenceForTesting()
b = B.getInstance()
assert b.__class__.__name__ == 'B'
def test_forgetAllSingletons(self):
# Should work if there are no singletons
forgetAllSingletons()
class A(Singleton):
ciInitCount = 0
def __init__(self):
super(A, self).__init__()
A.ciInitCount += 1
A.getInstance()
self.assertEqual(A.ciInitCount, 1)
A.getInstance()
self.assertEqual(A.ciInitCount, 1)
forgetAllSingletons()
A.getInstance()
self.assertEqual(A.ciInitCount, 2)
def test_threadedCreation(self):
# Check that only one Singleton is created even if multiple
# threads try at the same time. If fails, would see assert in _addSingleton
class Test_Singleton(Singleton):
def __init__(self):
super(Test_Singleton, self).__init__()
class Test_SingletonThread(threading.Thread):
def __init__(self, fTargetTime):
super(Test_SingletonThread, self).__init__()
self._fTargetTime = fTargetTime
self._eException = None
def run(self):
try:
fSleepTime = self._fTargetTime - time.time()
if fSleepTime > 0:
time.sleep(fSleepTime)
Test_Singleton.getInstance()
except Exception, e:
self._eException = e
fTargetTime = time.time() + 0.1
lstThreads = []
for _ in xrange(100):
t = Test_SingletonThread(fTargetTime)
t.start()
lstThreads.append(t)
eException = None
for t in lstThreads:
t.join()
if t._eException and not eException:
eException = t._eException
if eException:
raise eException
def testNoInit(self):
"""
Demonstrates use with a class not defining __init__
"""
class A(Singleton):
pass
#INTENTIONALLY UNDEFINED:
#def __init__(self):
# super(A, self).__init__()
A.getInstance() #Make sure no exception is raised
def testMultipleGetInstancesWithArgs(self):
class A(Singleton):
ignoreSubsequent = True
def __init__(self, a, b=1):
pass
a1 = A.getInstance(1)
a2 = A.getInstance(2) # ignores the second call because of ignoreSubsequent
class B(Singleton):
def __init__(self, a, b=1):
pass
b1 = B.getInstance(1)
self.assertRaises(SingletonException, B.getInstance, 2) # No ignoreSubsequent included
class C(Singleton):
def __init__(self, a=1):
pass
c1 = C.getInstance(a=1)
self.assertRaises(SingletonException, C.getInstance, a=2) # No ignoreSubsequent included
def testInheritance(self):
"""
It's sometimes said that you can't subclass a singleton (see, for instance,
http://steve.yegge.googlepages.com/singleton-considered-stupid point e). This
test shows that at least rudimentary subclassing works fine for us.
"""
class A(Singleton):
def setX(self, x):
self.x = x
def setZ(self, z):
raise NotImplementedError
class B(A):
def setX(self, x):
self.x = -x
def setY(self, y):
self.y = y
a = A.getInstance()
a.setX(5)
b = B.getInstance()
b.setX(5)
b.setY(50)
self.assertEqual((a.x, b.x, b.y), (5, -5, 50))
self.assertRaises(AttributeError, eval, 'a.setY', {}, locals())
self.assertRaises(NotImplementedError, b.setZ, 500)
unittest.main()
| brentpayne/celery-geolocator | celery_geolocator/helpers/singleton.py | Python | mit | 17,689 |
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
url="https://github.com/OctavianLee/Pywechat"
VERSION = "0.0.2"
setup(
name="pywechat",
version=VERSION,
license='MIT',
description="A python SDK for the wechat public platform.",
author="Octavian Lee",
author_email="octavianlee1@gmail.com",
url = url,
long_description="A python SDK for the wechat public platform.",
install_requires=map(lambda x: x.replace('==', '>='),
open("requirements.txt").readlines()),
packages=find_packages(),
)
| OctavianLee/Pywechat | setup.py | Python | mit | 557 |
# Invert gray image
import cv2
from . import print_image
from . import plot_image
def invert(img, device, debug=None):
"""Inverts grayscale images.
Inputs:
img = image object, grayscale
device = device number. Used to count steps in the pipeline
debug = None, print, or plot. Print = save to file, Plot = print to screen.
Returns:
device = device number
img_inv = inverted image
:param img: numpy array
:param device: int
:param debug: str
:return device: int
:return img_inv: numpy array
"""
device += 1
img_inv = cv2.bitwise_not(img)
if debug == 'print':
print_image(img_inv, (str(device) + '_invert.png'))
elif debug == 'plot':
plot_image(img_inv, cmap='gray')
return device, img_inv
| AntonSax/plantcv | plantcv/invert.py | Python | mit | 794 |
#
# otopi -- plugable installer
# Copyright (C) 2012-2013 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
"""otopi module."""
import sys
def _pythonModulesCompat():
"""Rename modules to match python3 names."""
if sys.version_info[0] >= 3:
import builtins
setattr(builtins, 'unicode', str)
else:
import ConfigParser
sys.modules['configparser'] = ConfigParser
import __builtin__
sys.modules['builtins'] = __builtin__
class COMPAT_BlockingIOError(OSError):
pass
setattr(__builtin__, 'BlockingIOError', COMPAT_BlockingIOError)
_pythonModulesCompat()
__all__ = []
# vim: expandtab tabstop=4 shiftwidth=4
| alonbl/otopi | src/otopi/__init__.py | Python | lgpl-2.1 | 1,397 |
import rhinoscriptsyntax as rs
from math import*
for i in range(0,20):
if (i%2==0): #i modular 2 (checks for remainder; what if we divide i by 2, is there a remainder
rs.AddPoint([i,0,0])
else:
rs.AddSphere([i,0,0],0.3)
| FRMlab/Jenny_Sabin_Workshop | Python2/2conditional_if else.py | Python | gpl-2.0 | 278 |
#
# Helpers related to setup.
#
import logging
import os
import sys
import shlex
try:
import json
except ImportError, e:
import simplejson as json
sys.path.append(os.path.abspath('plugins/actions/nfsdirs'))
from hypnotoad.core import hypnofs
from base_classes import *
LOG = logging.getLogger('root')
FS = hypnofs.hypnofs()
class SetupHelper():
def __init__(self, config):
self.state_dir = config.get('Basic Options', 'state_dir') + "/nfsdirs"
self.max_diff_count = config.getint(
'Action Options', 'nfsdirs_max_diff_count')
def collect_users(self, models):
""" Merge all hypnotoad models into a single array of ScratchUsers."""
users = []
for plug_model in models:
for m in plug_model:
if 'user_entry' in m.keys():
user_model = m['user_entry']
user = ScratchUser(user_model['short_name_string'],
user_model['user_id_integer'],
user_model['group_id_integer'])
for c in user_model['compartment_access_array']:
# LOG.debug("Model, adding compartment `" + c + "` " + \
# "to user `" + user.short_name + "`.")
user.compartments.append(ScratchCompartment(c))
users.append(user)
return users
def state_cache_update(self, models):
"""
If a cache exists, check differences and update the cache if the
differences are not too great. Otherwise quietly create a cache if one
does not exist already.
"""
cache_file_name = self.state_dir + "/" + "model.json"
if FS.makedirs(self.state_dir)[1] is True:
LOG.error("Could not create a state directory at `" +
self.state_dir + "'.")
sys.exit()
def save_as_json(obj, dest_file_name):
"""Serializes obj to json and saves to a file at dest."""
LOG.debug("Saving to json at: " + dest_file_name)
j = json.dumps(obj)
f = open(dest_file_name, 'w')
f.write(j + "\n")
f.close()
def json_to_models(json_file_name):
LOG.debug("Reading in json file at: " + json_file_name)
f = open(json_file_name)
return json.load(f)
if FS.isfile(cache_file_name) is (False, False):
old_models = json_to_models(cache_file_name)
old_userlist, new_userlist = map(
self.collect_users, [old_models, models])
model_diff_count = len(list(set(old_userlist) - set(new_userlist)))
if model_diff_count > self.max_diff_count:
LOG.error("Too many objects in the model (ldap) have changed." +
"Since the number of differences `" + str(model_diff_count) +
"' is greater than the configuration limit of `" +
str(self.max_diff_count) + "', we'll exit now. " +
"If this is intended, please change the model difference limit " +
"in the configuration or remove the panlinks model cache " +
" at `" + str(cache_file_name) + "' so it can be automatically recreated.")
raise UserWarning
else:
# Overwrite the old cache.
LOG.debug(
"Verified existing model as sane. We can safely continue.")
save_as_json(models, cache_file_name)
else:
# Create a new cache if one does not exist.
save_as_json(models, cache_file_name)
def find_mount_points(self):
"""
Check if all panfs mounts specified in fstab are mounted. Display a
warning if not. Return mounted panfs mount points.
"""
def tab_check(f):
m = []
for l in f.readlines():
i = l.find('#')
if i != -1:
l = l[:i]
l = l.rstrip()
if l.find('nfs') != -1:
m.append(l.split()[1])
return set(m)
fstab_mounts, mtab_mounts = map(
tab_check, [open('/etc/fstab'), open('/etc/mtab')])
if len(fstab_mounts & mtab_mounts) == len(fstab_mounts):
LOG.info('All detected NFS mounts are mounted.')
else:
LOG.warning('There are NFS mounts that are NOT mounted.')
LOG.info("Found NFS mount points: " + str(mtab_mounts))
return mtab_mounts
# EOF
| hpc/hypnotoad | hypnotoad/plugins/actions/nfsdirs/setup_helper.py | Python | bsd-3-clause | 4,679 |
# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of logilab-common.
#
# logilab-common is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option) any
# later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
"""Table management module."""
__docformat__ = "restructuredtext en"
class Table(object):
"""Table defines a data table with column and row names.
inv:
len(self.data) <= len(self.row_names)
forall(self.data, lambda x: len(x) <= len(self.col_names))
"""
def __init__(self, default_value=0, col_names=None, row_names=None):
self.col_names = []
self.row_names = []
self.data = []
self.default_value = default_value
if col_names:
self.create_columns(col_names)
if row_names:
self.create_rows(row_names)
def _next_row_name(self):
return 'row%s' % (len(self.row_names)+1)
def __iter__(self):
return iter(self.data)
def __eq__(self, other):
if other is None:
return False
else:
return list(self) == list(other)
__hash__ = object.__hash__
def __ne__(self, other):
return not self == other
def __len__(self):
return len(self.row_names)
## Rows / Columns creation #################################################
def create_rows(self, row_names):
"""Appends row_names to the list of existing rows
"""
self.row_names.extend(row_names)
for row_name in row_names:
self.data.append([self.default_value]*len(self.col_names))
def create_columns(self, col_names):
"""Appends col_names to the list of existing columns
"""
for col_name in col_names:
self.create_column(col_name)
def create_row(self, row_name=None):
"""Creates a rowname to the row_names list
"""
row_name = row_name or self._next_row_name()
self.row_names.append(row_name)
self.data.append([self.default_value]*len(self.col_names))
def create_column(self, col_name):
"""Creates a colname to the col_names list
"""
self.col_names.append(col_name)
for row in self.data:
row.append(self.default_value)
## Sort by column ##########################################################
def sort_by_column_id(self, col_id, method = 'asc'):
"""Sorts the table (in-place) according to data stored in col_id
"""
try:
col_index = self.col_names.index(col_id)
self.sort_by_column_index(col_index, method)
except ValueError:
raise KeyError("Col (%s) not found in table" % (col_id))
def sort_by_column_index(self, col_index, method = 'asc'):
"""Sorts the table 'in-place' according to data stored in col_index
method should be in ('asc', 'desc')
"""
sort_list = sorted([(row[col_index], row, row_name)
for row, row_name in zip(self.data, self.row_names)])
# Sorting sort_list will sort according to col_index
# If we want reverse sort, then reverse list
if method.lower() == 'desc':
sort_list.reverse()
# Rebuild data / row names
self.data = []
self.row_names = []
for val, row, row_name in sort_list:
self.data.append(row)
self.row_names.append(row_name)
def groupby(self, colname, *others):
"""builds indexes of data
:returns: nested dictionaries pointing to actual rows
"""
groups = {}
colnames = (colname,) + others
col_indexes = [self.col_names.index(col_id) for col_id in colnames]
for row in self.data:
ptr = groups
for col_index in col_indexes[:-1]:
ptr = ptr.setdefault(row[col_index], {})
ptr = ptr.setdefault(row[col_indexes[-1]],
Table(default_value=self.default_value,
col_names=self.col_names))
ptr.append_row(tuple(row))
return groups
def select(self, colname, value):
grouped = self.groupby(colname)
try:
return grouped[value]
except KeyError:
return []
def remove(self, colname, value):
col_index = self.col_names.index(colname)
for row in self.data[:]:
if row[col_index] == value:
self.data.remove(row)
## The 'setter' part #######################################################
def set_cell(self, row_index, col_index, data):
"""sets value of cell 'row_indew', 'col_index' to data
"""
self.data[row_index][col_index] = data
def set_cell_by_ids(self, row_id, col_id, data):
"""sets value of cell mapped by row_id and col_id to data
Raises a KeyError if row_id or col_id are not found in the table
"""
try:
row_index = self.row_names.index(row_id)
except ValueError:
raise KeyError("Row (%s) not found in table" % (row_id))
else:
try:
col_index = self.col_names.index(col_id)
self.data[row_index][col_index] = data
except ValueError:
raise KeyError("Column (%s) not found in table" % (col_id))
def set_row(self, row_index, row_data):
"""sets the 'row_index' row
pre:
type(row_data) == types.ListType
len(row_data) == len(self.col_names)
"""
self.data[row_index] = row_data
def set_row_by_id(self, row_id, row_data):
"""sets the 'row_id' column
pre:
type(row_data) == types.ListType
len(row_data) == len(self.row_names)
Raises a KeyError if row_id is not found
"""
try:
row_index = self.row_names.index(row_id)
self.set_row(row_index, row_data)
except ValueError:
raise KeyError('Row (%s) not found in table' % (row_id))
def append_row(self, row_data, row_name=None):
"""Appends a row to the table
pre:
type(row_data) == types.ListType
len(row_data) == len(self.col_names)
"""
row_name = row_name or self._next_row_name()
self.row_names.append(row_name)
self.data.append(row_data)
return len(self.data) - 1
def insert_row(self, index, row_data, row_name=None):
"""Appends row_data before 'index' in the table. To make 'insert'
behave like 'list.insert', inserting in an out of range index will
insert row_data to the end of the list
pre:
type(row_data) == types.ListType
len(row_data) == len(self.col_names)
"""
row_name = row_name or self._next_row_name()
self.row_names.insert(index, row_name)
self.data.insert(index, row_data)
def delete_row(self, index):
"""Deletes the 'index' row in the table, and returns it.
Raises an IndexError if index is out of range
"""
self.row_names.pop(index)
return self.data.pop(index)
def delete_row_by_id(self, row_id):
"""Deletes the 'row_id' row in the table.
Raises a KeyError if row_id was not found.
"""
try:
row_index = self.row_names.index(row_id)
self.delete_row(row_index)
except ValueError:
raise KeyError('Row (%s) not found in table' % (row_id))
def set_column(self, col_index, col_data):
"""sets the 'col_index' column
pre:
type(col_data) == types.ListType
len(col_data) == len(self.row_names)
"""
for row_index, cell_data in enumerate(col_data):
self.data[row_index][col_index] = cell_data
def set_column_by_id(self, col_id, col_data):
"""sets the 'col_id' column
pre:
type(col_data) == types.ListType
len(col_data) == len(self.col_names)
Raises a KeyError if col_id is not found
"""
try:
col_index = self.col_names.index(col_id)
self.set_column(col_index, col_data)
except ValueError:
raise KeyError('Column (%s) not found in table' % (col_id))
def append_column(self, col_data, col_name):
"""Appends the 'col_index' column
pre:
type(col_data) == types.ListType
len(col_data) == len(self.row_names)
"""
self.col_names.append(col_name)
for row_index, cell_data in enumerate(col_data):
self.data[row_index].append(cell_data)
def insert_column(self, index, col_data, col_name):
"""Appends col_data before 'index' in the table. To make 'insert'
behave like 'list.insert', inserting in an out of range index will
insert col_data to the end of the list
pre:
type(col_data) == types.ListType
len(col_data) == len(self.row_names)
"""
self.col_names.insert(index, col_name)
for row_index, cell_data in enumerate(col_data):
self.data[row_index].insert(index, cell_data)
def delete_column(self, index):
"""Deletes the 'index' column in the table, and returns it.
Raises an IndexError if index is out of range
"""
self.col_names.pop(index)
return [row.pop(index) for row in self.data]
def delete_column_by_id(self, col_id):
"""Deletes the 'col_id' col in the table.
Raises a KeyError if col_id was not found.
"""
try:
col_index = self.col_names.index(col_id)
self.delete_column(col_index)
except ValueError:
raise KeyError('Column (%s) not found in table' % (col_id))
## The 'getter' part #######################################################
def get_shape(self):
"""Returns a tuple which represents the table's shape
"""
return len(self.row_names), len(self.col_names)
shape = property(get_shape)
def __getitem__(self, indices):
"""provided for convenience"""
rows, multirows = None, False
cols, multicols = None, False
if isinstance(indices, tuple):
rows = indices[0]
if len(indices) > 1:
cols = indices[1]
else:
rows = indices
# define row slice
if isinstance(rows, str):
try:
rows = self.row_names.index(rows)
except ValueError:
raise KeyError("Row (%s) not found in table" % (rows))
if isinstance(rows, int):
rows = slice(rows, rows+1)
multirows = False
else:
rows = slice(None)
multirows = True
# define col slice
if isinstance(cols, str):
try:
cols = self.col_names.index(cols)
except ValueError:
raise KeyError("Column (%s) not found in table" % (cols))
if isinstance(cols, int):
cols = slice(cols, cols+1)
multicols = False
else:
cols = slice(None)
multicols = True
# get sub-table
tab = Table()
tab.default_value = self.default_value
tab.create_rows(self.row_names[rows])
tab.create_columns(self.col_names[cols])
for idx, row in enumerate(self.data[rows]):
tab.set_row(idx, row[cols])
if multirows :
if multicols:
return tab
else:
return [item[0] for item in tab.data]
else:
if multicols:
return tab.data[0]
else:
return tab.data[0][0]
def get_cell_by_ids(self, row_id, col_id):
"""Returns the element at [row_id][col_id]
"""
try:
row_index = self.row_names.index(row_id)
except ValueError:
raise KeyError("Row (%s) not found in table" % (row_id))
else:
try:
col_index = self.col_names.index(col_id)
except ValueError:
raise KeyError("Column (%s) not found in table" % (col_id))
return self.data[row_index][col_index]
def get_row_by_id(self, row_id):
"""Returns the 'row_id' row
"""
try:
row_index = self.row_names.index(row_id)
except ValueError:
raise KeyError("Row (%s) not found in table" % (row_id))
return self.data[row_index]
def get_column_by_id(self, col_id, distinct=False):
"""Returns the 'col_id' col
"""
try:
col_index = self.col_names.index(col_id)
except ValueError:
raise KeyError("Column (%s) not found in table" % (col_id))
return self.get_column(col_index, distinct)
def get_columns(self):
"""Returns all the columns in the table
"""
return [self[:, index] for index in range(len(self.col_names))]
def get_column(self, col_index, distinct=False):
"""get a column by index"""
col = [row[col_index] for row in self.data]
if distinct:
col = list(set(col))
return col
def apply_stylesheet(self, stylesheet):
"""Applies the stylesheet to this table
"""
for instruction in stylesheet.instructions:
eval(instruction)
def transpose(self):
"""Keeps the self object intact, and returns the transposed (rotated)
table.
"""
transposed = Table()
transposed.create_rows(self.col_names)
transposed.create_columns(self.row_names)
for col_index, column in enumerate(self.get_columns()):
transposed.set_row(col_index, column)
return transposed
def pprint(self):
"""returns a string representing the table in a pretty
printed 'text' format.
"""
# The maximum row name (to know the start_index of the first col)
max_row_name = 0
for row_name in self.row_names:
if len(row_name) > max_row_name:
max_row_name = len(row_name)
col_start = max_row_name + 5
lines = []
# Build the 'first' line <=> the col_names one
# The first cell <=> an empty one
col_names_line = [' '*col_start]
for col_name in self.col_names:
col_names_line.append(col_name + ' '*5)
lines.append('|' + '|'.join(col_names_line) + '|')
max_line_length = len(lines[0])
# Build the table
for row_index, row in enumerate(self.data):
line = []
# First, build the row_name's cell
row_name = self.row_names[row_index]
line.append(row_name + ' '*(col_start-len(row_name)))
# Then, build all the table's cell for this line.
for col_index, cell in enumerate(row):
col_name_length = len(self.col_names[col_index]) + 5
data = str(cell)
line.append(data + ' '*(col_name_length - len(data)))
lines.append('|' + '|'.join(line) + '|')
if len(lines[-1]) > max_line_length:
max_line_length = len(lines[-1])
# Wrap the table with '-' to make a frame
lines.insert(0, '-'*max_line_length)
lines.append('-'*max_line_length)
return '\n'.join(lines)
def __repr__(self):
return repr(self.data)
def as_text(self):
data = []
# We must convert cells into strings before joining them
for row in self.data:
data.append([str(cell) for cell in row])
lines = ['\t'.join(row) for row in data]
return '\n'.join(lines)
class TableStyle:
"""Defines a table's style
"""
def __init__(self, table):
self._table = table
self.size = dict([(col_name, '1*') for col_name in table.col_names])
# __row_column__ is a special key to define the first column which
# actually has no name (<=> left most column <=> row names column)
self.size['__row_column__'] = '1*'
self.alignment = dict([(col_name, 'right')
for col_name in table.col_names])
self.alignment['__row_column__'] = 'right'
# We shouldn't have to create an entry for
# the 1st col (the row_column one)
self.units = dict([(col_name, '') for col_name in table.col_names])
self.units['__row_column__'] = ''
# XXX FIXME : params order should be reversed for all set() methods
def set_size(self, value, col_id):
"""sets the size of the specified col_id to value
"""
self.size[col_id] = value
def set_size_by_index(self, value, col_index):
"""Allows to set the size according to the column index rather than
using the column's id.
BE CAREFUL : the '0' column is the '__row_column__' one !
"""
if col_index == 0:
col_id = '__row_column__'
else:
col_id = self._table.col_names[col_index-1]
self.size[col_id] = value
def set_alignment(self, value, col_id):
"""sets the alignment of the specified col_id to value
"""
self.alignment[col_id] = value
def set_alignment_by_index(self, value, col_index):
"""Allows to set the alignment according to the column index rather than
using the column's id.
BE CAREFUL : the '0' column is the '__row_column__' one !
"""
if col_index == 0:
col_id = '__row_column__'
else:
col_id = self._table.col_names[col_index-1]
self.alignment[col_id] = value
def set_unit(self, value, col_id):
"""sets the unit of the specified col_id to value
"""
self.units[col_id] = value
def set_unit_by_index(self, value, col_index):
"""Allows to set the unit according to the column index rather than
using the column's id.
BE CAREFUL : the '0' column is the '__row_column__' one !
(Note that in the 'unit' case, you shouldn't have to set a unit
for the 1st column (the __row__column__ one))
"""
if col_index == 0:
col_id = '__row_column__'
else:
col_id = self._table.col_names[col_index-1]
self.units[col_id] = value
def get_size(self, col_id):
"""Returns the size of the specified col_id
"""
return self.size[col_id]
def get_size_by_index(self, col_index):
"""Allows to get the size according to the column index rather than
using the column's id.
BE CAREFUL : the '0' column is the '__row_column__' one !
"""
if col_index == 0:
col_id = '__row_column__'
else:
col_id = self._table.col_names[col_index-1]
return self.size[col_id]
def get_alignment(self, col_id):
"""Returns the alignment of the specified col_id
"""
return self.alignment[col_id]
def get_alignment_by_index(self, col_index):
"""Allors to get the alignment according to the column index rather than
using the column's id.
BE CAREFUL : the '0' column is the '__row_column__' one !
"""
if col_index == 0:
col_id = '__row_column__'
else:
col_id = self._table.col_names[col_index-1]
return self.alignment[col_id]
def get_unit(self, col_id):
"""Returns the unit of the specified col_id
"""
return self.units[col_id]
def get_unit_by_index(self, col_index):
"""Allors to get the unit according to the column index rather than
using the column's id.
BE CAREFUL : the '0' column is the '__row_column__' one !
"""
if col_index == 0:
col_id = '__row_column__'
else:
col_id = self._table.col_names[col_index-1]
return self.units[col_id]
import re
CELL_PROG = re.compile("([0-9]+)_([0-9]+)")
class TableStyleSheet:
"""A simple Table stylesheet
Rules are expressions where cells are defined by the row_index
and col_index separated by an underscore ('_').
For example, suppose you want to say that the (2,5) cell must be
the sum of its two preceding cells in the row, you would create
the following rule :
2_5 = 2_3 + 2_4
You can also use all the math.* operations you want. For example:
2_5 = sqrt(2_3**2 + 2_4**2)
"""
def __init__(self, rules = None):
rules = rules or []
self.rules = []
self.instructions = []
for rule in rules:
self.add_rule(rule)
def add_rule(self, rule):
"""Adds a rule to the stylesheet rules
"""
try:
source_code = ['from math import *']
source_code.append(CELL_PROG.sub(r'self.data[\1][\2]', rule))
self.instructions.append(compile('\n'.join(source_code),
'table.py', 'exec'))
self.rules.append(rule)
except SyntaxError:
print "Bad Stylesheet Rule : %s [skipped]"%rule
def add_rowsum_rule(self, dest_cell, row_index, start_col, end_col):
"""Creates and adds a rule to sum over the row at row_index from
start_col to end_col.
dest_cell is a tuple of two elements (x,y) of the destination cell
No check is done for indexes ranges.
pre:
start_col >= 0
end_col > start_col
"""
cell_list = ['%d_%d'%(row_index, index) for index in range(start_col,
end_col + 1)]
rule = '%d_%d=' % dest_cell + '+'.join(cell_list)
self.add_rule(rule)
def add_rowavg_rule(self, dest_cell, row_index, start_col, end_col):
"""Creates and adds a rule to make the row average (from start_col
to end_col)
dest_cell is a tuple of two elements (x,y) of the destination cell
No check is done for indexes ranges.
pre:
start_col >= 0
end_col > start_col
"""
cell_list = ['%d_%d'%(row_index, index) for index in range(start_col,
end_col + 1)]
num = (end_col - start_col + 1)
rule = '%d_%d=' % dest_cell + '('+'+'.join(cell_list)+')/%f'%num
self.add_rule(rule)
def add_colsum_rule(self, dest_cell, col_index, start_row, end_row):
"""Creates and adds a rule to sum over the col at col_index from
start_row to end_row.
dest_cell is a tuple of two elements (x,y) of the destination cell
No check is done for indexes ranges.
pre:
start_row >= 0
end_row > start_row
"""
cell_list = ['%d_%d'%(index, col_index) for index in range(start_row,
end_row + 1)]
rule = '%d_%d=' % dest_cell + '+'.join(cell_list)
self.add_rule(rule)
def add_colavg_rule(self, dest_cell, col_index, start_row, end_row):
"""Creates and adds a rule to make the col average (from start_row
to end_row)
dest_cell is a tuple of two elements (x,y) of the destination cell
No check is done for indexes ranges.
pre:
start_row >= 0
end_row > start_row
"""
cell_list = ['%d_%d'%(index, col_index) for index in range(start_row,
end_row + 1)]
num = (end_row - start_row + 1)
rule = '%d_%d=' % dest_cell + '('+'+'.join(cell_list)+')/%f'%num
self.add_rule(rule)
class TableCellRenderer:
"""Defines a simple text renderer
"""
def __init__(self, **properties):
"""keywords should be properties with an associated boolean as value.
For example :
renderer = TableCellRenderer(units = True, alignment = False)
An unspecified property will have a 'False' value by default.
Possible properties are :
alignment, unit
"""
self.properties = properties
def render_cell(self, cell_coord, table, table_style):
"""Renders the cell at 'cell_coord' in the table, using table_style
"""
row_index, col_index = cell_coord
cell_value = table.data[row_index][col_index]
final_content = self._make_cell_content(cell_value,
table_style, col_index +1)
return self._render_cell_content(final_content,
table_style, col_index + 1)
def render_row_cell(self, row_name, table, table_style):
"""Renders the cell for 'row_id' row
"""
cell_value = row_name
return self._render_cell_content(cell_value, table_style, 0)
def render_col_cell(self, col_name, table, table_style):
"""Renders the cell for 'col_id' row
"""
cell_value = col_name
col_index = table.col_names.index(col_name)
return self._render_cell_content(cell_value, table_style, col_index +1)
def _render_cell_content(self, content, table_style, col_index):
"""Makes the appropriate rendering for this cell content.
Rendering properties will be searched using the
*table_style.get_xxx_by_index(col_index)' methods
**This method should be overridden in the derived renderer classes.**
"""
return content
def _make_cell_content(self, cell_content, table_style, col_index):
"""Makes the cell content (adds decoration data, like units for
example)
"""
final_content = cell_content
if 'skip_zero' in self.properties:
replacement_char = self.properties['skip_zero']
else:
replacement_char = 0
if replacement_char and final_content == 0:
return replacement_char
try:
units_on = self.properties['units']
if units_on:
final_content = self._add_unit(
cell_content, table_style, col_index)
except KeyError:
pass
return final_content
def _add_unit(self, cell_content, table_style, col_index):
"""Adds unit to the cell_content if needed
"""
unit = table_style.get_unit_by_index(col_index)
return str(cell_content) + " " + unit
class DocbookRenderer(TableCellRenderer):
"""Defines how to render a cell for a docboook table
"""
def define_col_header(self, col_index, table_style):
"""Computes the colspec element according to the style
"""
size = table_style.get_size_by_index(col_index)
return '<colspec colname="c%d" colwidth="%s"/>\n' % \
(col_index, size)
def _render_cell_content(self, cell_content, table_style, col_index):
"""Makes the appropriate rendering for this cell content.
Rendering properties will be searched using the
table_style.get_xxx_by_index(col_index)' methods.
"""
try:
align_on = self.properties['alignment']
alignment = table_style.get_alignment_by_index(col_index)
if align_on:
return "<entry align='%s'>%s</entry>\n" % \
(alignment, cell_content)
except KeyError:
# KeyError <=> Default alignment
return "<entry>%s</entry>\n" % cell_content
class TableWriter:
"""A class to write tables
"""
def __init__(self, stream, table, style, **properties):
self._stream = stream
self.style = style or TableStyle(table)
self._table = table
self.properties = properties
self.renderer = None
def set_style(self, style):
"""sets the table's associated style
"""
self.style = style
def set_renderer(self, renderer):
"""sets the way to render cell
"""
self.renderer = renderer
def update_properties(self, **properties):
"""Updates writer's properties (for cell rendering)
"""
self.properties.update(properties)
def write_table(self, title = ""):
"""Writes the table
"""
raise NotImplementedError("write_table must be implemented !")
class DocbookTableWriter(TableWriter):
"""Defines an implementation of TableWriter to write a table in Docbook
"""
def _write_headers(self):
"""Writes col headers
"""
# Define col_headers (colstpec elements)
for col_index in range(len(self._table.col_names)+1):
self._stream.write(self.renderer.define_col_header(col_index,
self.style))
self._stream.write("<thead>\n<row>\n")
# XXX FIXME : write an empty entry <=> the first (__row_column) column
self._stream.write('<entry></entry>\n')
for col_name in self._table.col_names:
self._stream.write(self.renderer.render_col_cell(
col_name, self._table,
self.style))
self._stream.write("</row>\n</thead>\n")
def _write_body(self):
"""Writes the table body
"""
self._stream.write('<tbody>\n')
for row_index, row in enumerate(self._table.data):
self._stream.write('<row>\n')
row_name = self._table.row_names[row_index]
# Write the first entry (row_name)
self._stream.write(self.renderer.render_row_cell(row_name,
self._table,
self.style))
for col_index, cell in enumerate(row):
self._stream.write(self.renderer.render_cell(
(row_index, col_index),
self._table, self.style))
self._stream.write('</row>\n')
self._stream.write('</tbody>\n')
def write_table(self, title = ""):
"""Writes the table
"""
self._stream.write('<table>\n<title>%s></title>\n'%(title))
self._stream.write(
'<tgroup cols="%d" align="left" colsep="1" rowsep="1">\n'%
(len(self._table.col_names)+1))
self._write_headers()
self._write_body()
self._stream.write('</tgroup>\n</table>\n')
| esparta/logilab_common3 | table.py | Python | gpl-2.0 | 31,337 |
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^(\d+)/$', 'lists.views.view_list', name='view_list'),
url(r'^new$', 'lists.views.new_list', name='new_list'),
url(r'^(\d+)/add_item$', 'lists.views.add_item', name='add_item'),
)
| carlosgoce/superlists | lists/urls.py | Python | mit | 278 |
# coding=utf-8
# !/usr/bin/env python
# !/usr/bin/python
# author : decaywood
import sys
import os
import re
import fileinput
def cur_file_dir():
path = sys.path[0]
if os.path.isdir(path):
return path
elif os.path.isfile(path):
return os.path.dirname(path)
PATTERN = re.compile(r'.*\[(.*)\]\((.*)\).*')
BASE_DIR = cur_file_dir()
POST_DIR = ''.join([os.path.dirname(BASE_DIR), os.path.sep, '_posts'])
CARE_TYPE = '.markdown'
def check():
links, mapping = get_local_links()
for f_dir, content, url in links:
real = ''.join([url[1:-1].replace('/', '-'), CARE_TYPE])
if real not in mapping:
print 'the url', '(', content, ')[', url, ']', 'is not found ....', 'case is in:', f_dir
def get_local_links():
res = []
dir_list, dir_mapping = list_file_infos(POST_DIR)
container = []
for f_dir in dir_list:
get_link_in_file(container, f_dir)
container = filter(lambda (w, x, y): ('http' not in y and 'www' not in y), container)
for f_dir, content, url in container:
index = url.find(')')
index2 = url.find('#')
url = url[:index] if index > -1 else url
url = url[:index2] if index2 > -1 else url
if url:
res.append((f_dir, content, url))
return res, dir_mapping
def get_link_in_file(container, file_path):
for line in fileinput.input(file_path):
matches = []
get_matches_pair(line, matches)
for x, y in matches:
container.append((os.path.basename(file_path), x, y))
def list_file_infos(path):
dir_list = []
dir_mapping = {}
do_list_file_infos(dir_list, path)
for f_dir in dir_list:
f_dir = os.path.basename(f_dir)
dir_mapping[f_dir] = f_dir
return dir_list, dir_mapping
def do_list_file_infos(dir_list, path):
dirs = os.listdir(path)
for f_path in dirs:
f_path = ''.join([path, os.path.sep, f_path])
if os.path.isfile(f_path) and CARE_TYPE in f_path:
dir_list.append(f_path)
elif os.path.isdir(f_path):
do_list_file_infos(dir_list, f_path)
def get_matches_pair(line, matches):
match = PATTERN.match(line)
if match:
x, y = match.groups()
start = line.rfind(x)
line = line[:start]
matches.append((x, y))
get_matches_pair(line, matches)
check()
| chaoyangqq/chaoyangqq.github.io | shell/linkChecker.py | Python | mit | 2,373 |
import getopt
import traceback
import sys
from pathlib import Path
import logging
logger = logging.getLogger('root')
class Arguments:
verbose = 'normal'
receiver = False
sender = False
input_file = None
output_file = None
input_string = None
queue_number = 10
time_shifter = False
fields_shifter = False
treshold = 50
one_lower_limit = 70
one_upper_limit = 100
zero_lower_limit = 0
zero_upper_limit = 30
tcp_acknowledge_sequence_number_field = False
tcp_initial_sequence_number_field = False
ip_packet_identification_field = False
ip_do_not_fragment_field = False
ip_packet_identification_field_mask = '0000000000000001'
tcp_initial_sequence_number_field_mask = '11111111111111111111111111111111'
def __init__(self, argv = None):
if argv is None:
self.get_arguments(sys.argv)
else:
self.get_arguments(argv)
def get_arguments(self, argv):
try:
if argv[0] != 'sudo':
opts, args = getopt.getopt(argv[1:], 'hi:o:q:rst:v:w:x:y:z:1234m:n:', ['help', 'verbose=', 'receiver', 'sender', 'input-file=', 'output-file=', 'queue-number=', 'one-lower-limit=', 'one-upper-limit=', 'input-string=', 'treshold=', 'time-shifter', 'fields-shifter', 'zero-lower-limit=', 'zero-upper-limit=', 'tcp-acknowledge-sequence-number-field', 'tcp-initial-sequence-number-field', 'ip-packet-identification-field', 'ip-do-not-fragment-field', 'ip-mask=', 'tcp-mask='])
else:
opts, args = getopt.getopt(argv[2:], 'hi:o:q:rst:v:w:x:y:z:1234m:n:', ['help', 'verbose=', 'receiver', 'sender', 'input-file=', 'output-file=', 'queue-number=', 'one-lower-limit=', 'one-upper-limit=', 'input-string=', 'treshold=', 'time-shifter', 'fields-shifter', 'zero-lower-limit=', 'zero-upper-limit=', 'tcp-acknowledge-sequence-number-field', 'tcp-initial-sequence-number-field', 'ip-packet-identification-field', 'ip-do-not-fragment-field', 'ip-mask=' 'tcp-mask='])
except getopt.GetoptError as err:
logger.error(err)
logger.error('Please see help below')
Arguments.help()
sys.exit(2)
for opt, arg in opts:
if opt in ('-h', '--help'):
Arguments.help()
sys.exit()
elif opt in ('-v', '--verbose'):
if arg in ('critical', 'error', 'warning', 'normal', 'info', 'debug', 'trace'):
self.verbose = arg
else:
logger.error('Verbose level "' + arg + '" is not an verbose level ("critical", "error", "warning", "normal", "info", "debug", "trace").')
sys.exit(2)
elif opt in ('-i', '--input-file'):
self.input_file = arg
if not Path(self.input_file).is_file():
logger.error('Input file "' + arg + '" does not exist.')
sys.exit(2)
self.sender = True
elif opt in ('-o', '--output-file'):
self.output_file = arg
self.receiver = True
elif opt in ('-r', '--receiver'):
self.receiver = True
elif opt in ('-s', '--sender'):
self.sender = True
elif opt in ('--input-string'):
self.input_string = arg
self.sender = True
elif opt in ('-q', '--queue-number'):
try:
self.queue_number = int(arg)
except ValueError:
logger.error('Queue number "' + arg + '" is not an integer.')
sys.exit(2)
elif opt in ('-y', '--one-lower-limit'):
try:
self.one_lower_limit = int(arg)
except ValueError:
logger.error('One lower limit "' + arg + '" is not an integer.')
sys.exit(2)
self.time_shifter = True
elif opt in ('-z', '--one-upper-limit'):
try:
self.one_upper_limit = int(arg)
except ValueError:
logger.error('One upper limit "' + arg + '" is not an integer.')
sys.exit(2)
self.time_shifter = True
elif opt in ('-w', '--zero-lower-limit'):
try:
self.zero_lower_limit = int(arg)
except ValueError:
logger.error('Zero lower limit "' + arg + '" is not an integer.')
sys.exit(2)
self.time_shifter = True
elif opt in ('-x', '--zero-upper-limit'):
try:
self.zero_upper_limit = int(arg)
except ValueError:
logger.error('Zero upper limit "' + arg + '" is not an integer.')
sys.exit(2)
self.time_shifter = True
elif opt in ('-t', '--treshold'):
try:
self.treshold = int(arg)
except ValueError:
logger.error('Treshold "' + arg + '" is not an integer.')
sys.exit(2)
self.time_shifter = True
elif opt in ('-3', '--tcp-acknowledge-sequence-number-field'):
self.tcp_acknowledge_sequence_number_field = True
self.fields_shifter = True
elif opt in ('-4', '--tcp-initial-sequence-number-field'):
self.tcp_initial_sequence_number_field = True
self.fields_shifter = True
elif opt in ('-1', '--ip-packet-identification-field'):
self.ip_packet_identification_field = True
self.fields_shifter = True
elif opt in ('-2', '--ip-do-not-fragment-field'):
self.ip_do_not_fragment_field = True
self.fields_shifter = True
elif opt in ('--time-shifter'):
self.time_shifter = True
elif opt in ('--fields-shifter'):
self.fields_shifter = True
elif opt in ('-m', '--ip-mask'):
if len(arg) != 16:
logger.error('Mask "' + arg + '" is not a 16 character string representing a IP Packet Identification field mask.')
sys.exit(2)
for my_char in arg:
if my_char != '0' and my_char != '1':
logger.error('Mask "' + arg + '" contained unkown character(s).')
sys.exit(2)
self.ip_packet_identification_field_mask = arg
self.fields_shifter = True
elif opt in ('-n', '--tcp-mask'):
if len(arg) != 32:
logger.error('Mask "' + arg + '" is not a 32 character string representing a TCP Initial Sequence Number field mask.')
sys.exit(2)
for my_char in arg:
if my_char != '0' and my_char != '1':
logger.error('Mask "' + arg + '" contained unkown character(s).')
sys.exit(2)
self.tcp_initial_sequence_number_field_mask = arg
self.fields_shifter = True
def test_and_show_configuration(self):
# Set log level
if self.verbose == 'critical':
logger.setLevel(50)
elif self.verbose == 'error':
logger.setLevel(40)
elif self.verbose == 'warning':
logger.setLevel(30)
elif self.verbose == 'normal':
logger.setLevel(25)
elif self.verbose == 'info':
logger.setLevel(20)
elif self.verbose == 'debug':
logger.setLevel(10)
elif self.verbose == 'trace':
logger.setLevel(5)
# Testing sender or receiver mode only and log result
if self.sender and self.receiver:
logger.error('Cannot use both sender and receiver mode at the same time.')
sys.exit(2)
elif self.sender:
logger.debug('Using sender mode.')
elif self.receiver:
logger.debug('Using receiver mode.')
else:
logger.warning('Receiver or Sender mode not set.')
self.receiver = True
logger.warning('Using receiver mode as default mode.')
logger.debug('Using queue : "' + str(self.queue_number) + '".')
# Testing file and input string
if self.sender:
if self.input_file != None:
logger.debug('Using input file : "' + self.input_file + '".')
elif self.input_string != None:
if len(self.input_string) > 0:
logger.debug('Using input string :\n\n' + self.input_string + '\n')
else:
logger.error('Input string is empty.')
sys.exit(2)
else:
logger.error('Input file or input string not set.')
sys.exit(2)
else:
if self.output_file != None:
logger.debug('Using output file : "' + self.output_file + '".')
else:
logger.debug('Not using output files.')
# Testing Time Shifter or Fields Shifter
if not self.time_shifter and not self.fields_shifter:
log.warning('Time Shifter or Fields Shifter mode not set.')
self.time_shifter = True
log.warning('Using Time Shifter as a default mode.')
# Testing lower/upper limits and treshold (Time Shifter setup)
if self.time_shifter:
if self.zero_lower_limit > self.zero_upper_limit:
logger.error('The zero lower limit ("' + str(self.zero_lower_limit) + '") is greater than the zero upper limit ("' + str(self.zero_upper_limit) + '").')
sys.exit(2)
if self.one_lower_limit > self.one_upper_limit:
logger.error('The one lower limit ("' + str(self.one_lower_limit) + '") is greater than the one upper limit ("' + str(self.one_upper_limit) + '").')
sys.exit(2)
logger.debug('The treshold is ' + str(self.treshold) + '.\n'
'The one lower limit is ' + str(self.one_lower_limit) + '.\n'
'The one upper limit is ' + str(self.one_upper_limit) + '.\n'
'The zero lower limit is ' + str(self.zero_lower_limit) + '.\n'
'The zero upper limit is ' + str(self.zero_upper_limit) + '.\n')
if self.one_lower_limit < self.treshold:
logger.warning('The one lower limit ("' + str(self.one_lower_limit) + '") is lower than the treshold ("' + str(self.treshold) + '").')
if self.zero_upper_limit > self.treshold:
logger.warning('The zero upper limit ("' + str(self.zero_upper_limit) + '") is greater than the treshold ("' + str(self.treshold) + '").')
if self.zero_lower_limit == self.zero_upper_limit:
logger.warning('The zero lower limit equals the zero upper limit ("' + str(self.zero_upper_limit) + '").')
if self.one_lower_limit == self.one_upper_limit:
logger.warning('The one lower limit equals the one upper limit ("' + str(self.one_upper_limit) + '").')
# Testing Fields Shifter setup
if self.fields_shifter and not self.tcp_initial_sequence_number_field \
and not self.tcp_acknowledge_sequence_number_field \
and not self.ip_do_not_fragment_field \
and not self.ip_packet_identification_field:
logger.warning('Fields Shifter is set but no fields are set to be used.')
self.ip_do_not_fragment_field = True
logger.warning('Using the IP Do Not Fragment Field as a default.')
@staticmethod
def help():
print("Usage: \n"
" './stegfy.py [options] [string to send]' \n"
" \n"
" \n"
"Manual: \n"
"This software will hide data in the network. It can hide data in delays between packets or in some tcp/ip fields. \n"
"You must specify either '--receiver' or '--sender' to use this program (You cannot use both at the same time). \n"
"Using some options will activate '--time-shifter' or '--fields-shifter' even if you do not use those directly. \n"
"By default, the program start in Time Shifter Receiver mode. \n"
" \n"
" \n"
"Options : \n"
" '-h' or '--help' Show this help and exit \n"
" '-v' or '--verbose' Set verbose level: it must be one of 'critical', 'error', \n"
" 'warning', 'normal', 'info', 'debug', 'trace' \n"
" Default : warning \n"
" '-r' or '--receiver' Start program in receiver mode \n"
" Default : Receiver mode \n"
" '-s' or '--sender' Start program in sender mode \n"
" Default : Receiver mode \n"
" '-i <text file>' or '--input-file=<text file>' Using text file as input \n"
" Default : None \n"
" '-o <text file>' or '--output-file=<text file>' Using text file as output (program will replace it) \n"
" Default : None \n"
" '-q <integer>' or '--queue-number=<integer>' The queue number used (netfilter queue) \n"
" Default : 10 \n"
" '--time-shifter' Activate time shifting with default parameters \n"
" Default : Activated if '--fields-shifter' is not set \n"
" '-t <integer>' or '--treshold=<integer>' The treshold in milliseconds (see graph below) \n"
" Default : 50 \n"
" '-w <integer>' or '--zero-lower-limit=<integer>' The zero lower limit in milliseconds (see graph below) \n"
" Default : 0 \n"
" '-x <integer>' or '--zero-upper-limit=<integer>' The zero upper limit in milliseconds (see graph below) \n"
" Default : 30 \n"
" '-y <integer>' or '--one-lower-limit=<integer>' The one lower limit in milliseconds (see graph below) \n"
" Default : 70 \n"
" '-z <integer>' or '--one-upper-limit=<integer>' The one upper limit in milliseconds (see graph below) \n"
" Default : 100 \n"
" '--fields-shifter' Activate fields shifting with default parameters \n"
" Default : Not activated \n"
" '-1' or '--ip-packet-identification-field' Using the 'Packet Identification field' \n"
" Default : Not activated \n"
" 'm <mask>' or '--ip-mask <mask>' A string representing a 16 bit mask to set which bit of \n"
" the IP Identification field to use \n"
" Default : '0000000000000001' \n"
" '-2' or '--ip-do-not-fragment-field' Using the 'Do Not Fragment field' \n"
" Default : Not activated but used if no other are set \n"
" and '--fields-shifter' is set \n"
" '-3' or '--tcp-acknowledge-sequence-number-field' Using the 'Acknowledge Sequence Number field' \n"
" Default : Not activated (NOT IMPLEMENTED) \n"
" '-4' or '--tcp-initial-sequence-number-field' Using the 'Initial Sequence Number field' \n"
" Default : Not activated \n"
" 'n <mask>' or '--tcp-mask <mask>' A string representing a 32 bit mask to set which bit of \n"
" the TCP Initial Sequence Number field to use \n"
" Default : '11111111111111111111111111111111' \n"
" \n"
" \n"
" ^ \n"
" delay │ one-upper-limit \n"
" in │ ──────────────────────────────── \n"
" milliseconds │ \n"
" │ one-lower-limit \n"
" │ ──────────────────────────────── \n"
" │ \n"
" │ treshold \n"
" │ ──────────────────────────────── \n"
" │ \n"
" │ \n"
" │ ──────────────────────────────── \n"
" │ zero-upper-limit \n"
" │ \n"
" │ ──────────────────────────────── \n"
" │ zero-lower-limit \n"
" │ \n"
" ──│──────────────────────────────────────────────────────> \n"
" \n"
" \n"
"Exemple: \n"
" sudo ./stegphy.py -s 'Hello world !' \n"
" sudo ./stegphy.py --receiver --fields-shifter --ip-packet-identification-field \n"
" sudo ./main.py --sender --fields-shifter --ip-packet-identification-field --ip-mask '0010000000000001' --ip-do-not-fragment-field --tcp-initial-sequence-number-field --tcp-mask '00001000000000010000000000000001' --time-shifter -w 0 -x 300 -y 800 -z 1000 -t 500 --input-string 'heywhatsup' \n"
" sudo ./main.py --receiver --fields-shifter --ip-packet-identification-field --ip-mask '0010000000000001' --ip-do-not-fragment-field --tcp-initial-sequence-number-field --tcp-mask '00001000000000010000000000000001' --time-shifter -w 0 -x 300 -y 800 -z 1000 -t 500 \n")
| albuic/stegProject | stegfy/Arguments.py | Python | gpl-3.0 | 22,977 |
import os
import time
import rospy
import argparse
from numpy import array, save, zeros, ones
from fishing import FishingBaxter
REPEAT = 5
DURATION = 10 * 60
NTIMES = 10
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--log', type=str, required=True)
args = parser.parse_args()
log = os.path.join('logs', args.log)
if os.path.exists(log):
raise IOError("Log file already esists!")
rospy.init_node('baxter_garde_la_peche')
baxter = FishingBaxter()
baxter.goes_to_init_position()
data = []
for _ in range(REPEAT):
start = time.time()
while time.time() - start < DURATION:
baxter.goes_to_init_position()
time.sleep(2)
traj = baxter.random_dmp(bfs=10, W=50, duration=5.)
for _ in range(NTIMES):
baxter.goes_to_init_position()
time.sleep(2)
t = baxter.play_traj(traj, record=True)
data.append(t)
time.sleep(2)
time.sleep(DURATION/5.)
data = array(data)
save(log, data)
| flowersteam/fishing-baxter | run-dmp.py | Python | gpl-3.0 | 1,135 |
from django import template
register = template.Library()
def distribute_entrys(inboxentry):
boxentrys = inboxentry.box_set.all()
return {'boxentrys': boxentrys}
| arsenalstriker14/imagetraccloud | imagetrac_docker/taskmanager/templatetags/taskbox_tags.py | Python | mit | 173 |
#!/usr/bin/env python
# initial translation from the tcl by VTK/Utilities/tcl2py.py
# further cleanup and fixes to the translation by Charl P. Botha
import vtk
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# This example demonstrates how to use the vtkImageTracerWidget
# to trace on a slice of a 3D image dataset on one of its orthogonal planes.
# The button actions and key modifiers are as follows for controlling the
# widget:
# 1) left button click over the image, hold and drag draws a free hand line.
# 2) left button click and release erases the widget line, if it exists, and
# repositions the handle.
# 3) middle button click starts a snap line. The snap line can be
# terminated by clicking the middle button while depressing the ctrl key.
# 4) when tracing or snap drawing a line, if the last cursor position is
# within specified tolerance to the first handle, the widget line will form
# a closed loop with only one handle.
# 5) right button clicking and holding on any handle that is part of a snap
# line allows handle dragging. Any existing line segments are updated
# accordingly.
# 6) ctrl key + right button down on any handle will erase it. Any existing
# snap line segments are updated accordingly. If the line was formed by
# continous tracing, the line is deleted leaving one handle.
# 7) shift key + right button down on any snap line segment will insert a
# handle at the cursor position. The snap line segment is split accordingly.
#
#
def AdjustSpline(evt, obj):
itw.GetPath(poly)
npts = itw.GetNumberOfHandles()
if npts < 2:
imageActor2.GetMapper().SetInputConnection(extract.GetOutputPort())
return
closed = itw.IsClosed()
if closed:
isw.ClosedOn()
else:
isw.ClosedOff()
imageActor2.GetMapper().SetInputConnection(extract.GetOutputPort())
isw.SetNumberOfHandles(npts)
for i in range(0, npts):
pt = poly.GetPoints().GetPoint(i)
isw.SetHandlePosition(i, pt[0], pt[1], pt[2])
if closed:
isw.GetPolyData(spoly)
imageActor2.GetMapper().SetInputConnection(stencil.GetOutputPort())
stencil.Update()
def AdjustTracer(evt, obj):
npts = isw.GetNumberOfHandles()
points.SetNumberOfPoints(npts)
for i in range(0, npts):
pt = isw.GetHandlePosition(i)
points.SetPoint(i, pt[0], pt[1], pt[2])
closed = isw.GetClosed()
if closed:
isw.GetPolyData(spoly)
imageActor2.GetMapper().SetInputConnection(stencil.GetOutputPort())
stencil.Update()
itw.InitializeHandles(points)
# Start by loading some data.
v16 = vtk.vtkVolume16Reader()
v16.SetDataDimensions(64, 64)
v16.SetDataByteOrderToLittleEndian()
v16.SetImageRange(1, 93)
v16.SetDataSpacing(3.2, 3.2, 1.5)
v16.SetFilePrefix("%s/Data/headsq/quarter" % (VTK_DATA_ROOT,))
v16.Update()
#
srange = v16.GetOutput().GetScalarRange()
min = srange[0]
max = srange[1]
diff = max-min
slope = 255.0/diff
inter = -slope*min
shift = inter/slope
shifter = vtk.vtkImageShiftScale()
shifter.SetShift(shift)
shifter.SetScale(slope)
shifter.SetOutputScalarTypeToUnsignedChar()
shifter.SetInputConnection(v16.GetOutputPort())
shifter.ReleaseDataFlagOff()
shifter.Update()
# Display a y-z plane.
#
imageActor = vtk.vtkImageActor()
imageActor.GetMapper().SetInputConnection(shifter.GetOutputPort())
imageActor.VisibilityOn()
imageActor.SetDisplayExtent(31, 31, 0, 63, 0, 92)
imageActor.InterpolateOff()
#
spc = shifter.GetOutput().GetSpacing()
orig = shifter.GetOutput().GetOrigin()
x0 = orig[0]
xspc = spc[0]
pos = x0+xspc*31.0
# An alternative would be to formulate position in this case by:
# set bounds [imageActor GetBounds]
# set pos [lindex $bounds 0]
#
#
ren = vtk.vtkRenderer()
ren.SetBackground(0.4, 0.4, 0.5)
ren2 = vtk.vtkRenderer()
ren2.SetBackground(0.5, 0.4, 0.4)
#
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
renWin.AddRenderer(ren2)
renWin.SetSize(600, 300)
#
ren.SetViewport(0, 0, 0.5, 1)
ren2.SetViewport(0.5, 0, 1, 1)
#
interactor = vtk.vtkInteractorStyleImage()
#
iren = vtk.vtkRenderWindowInteractor()
iren.SetInteractorStyle(interactor)
iren.SetRenderWindow(renWin)
#
extract = vtk.vtkExtractVOI()
extract.SetVOI(31, 31, 0, 63, 0, 92)
extract.SetSampleRate(1, 1, 1)
extract.SetInputConnection(shifter.GetOutputPort())
extract.ReleaseDataFlagOff()
#
imageActor2 = vtk.vtkImageActor()
imageActor2.GetMapper().SetInputConnection(extract.GetOutputPort())
imageActor2.VisibilityOn()
imageActor2.SetDisplayExtent(31, 31, 0, 63, 0, 92)
imageActor2.InterpolateOff()
#
# Set up the image tracer widget
#
itw = vtk.vtkImageTracerWidget()
#
# Set the tolerance for capturing last handle when near first handle
# to form closed paths.
#
itw.SetCaptureRadius(1.5)
itw.GetGlyphSource().SetColor(1, 0, 0)
#
# Set the size of the glyph handle
#
itw.GetGlyphSource().SetScale(3.0)
#
# Set the initial rotation of the glyph if desired. The default glyph
# set internally by the widget is a '+' so rotating 45 deg. gives a 'x'
#
itw.GetGlyphSource().SetRotationAngle(45.0)
itw.GetGlyphSource().Modified()
itw.ProjectToPlaneOn()
itw.SetProjectionNormalToXAxes()
itw.SetProjectionPosition(pos)
itw.SetViewProp(imageActor)
itw.SetInputConnection(shifter.GetOutputPort())
itw.SetInteractor(iren)
itw.PlaceWidget()
#
# When the underlying vtkDataSet is a vtkImageData, the widget can be
# forced to snap to either nearest pixel points, or pixel centers. Here
# it is turned off.
#
itw.SnapToImageOff()
#
# Automatically form closed paths.
#
#itw AutoCloseOn
itw.AutoCloseOn()
#
# Set up a vtkSplineWidget in the second renderer and have
# its handles set by the tracer widget.
#
isw = vtk.vtkSplineWidget()
isw.SetCurrentRenderer(ren2)
isw.SetDefaultRenderer(ren2)
isw.SetInputConnection(extract.GetOutputPort())
isw.SetInteractor(iren)
bnds = imageActor2.GetBounds()
isw.PlaceWidget(bnds[0], bnds[1], bnds[2], bnds[3], bnds[4], bnds[5])
isw.ProjectToPlaneOn()
isw.SetProjectionNormalToXAxes()
isw.SetProjectionPosition(pos)
#
# Have the widgets control each others handle positions.
#
itw.AddObserver('EndInteractionEvent',AdjustSpline)
isw.AddObserver('EndInteractionEvent',AdjustTracer)
#
itw.On()
isw.On()
#
poly = vtk.vtkPolyData()
points = vtk.vtkPoints()
spoly = vtk.vtkPolyData()
#
# Set up a pipleline to demonstrate extraction of a 2D
# region of interest. Defining a closed clockwise path using the
# tracer widget will extract all pixels within the loop. A counter
# clockwise path provides the dual region of interest.
#
extrude = vtk.vtkLinearExtrusionFilter()
extrude.SetInputData(spoly)
extrude.SetScaleFactor(1)
extrude.SetExtrusionTypeToNormalExtrusion()
extrude.SetVector(1, 0, 0)
#
dataToStencil = vtk.vtkPolyDataToImageStencil()
dataToStencil.SetInputConnection(extrude.GetOutputPort())
#
stencil = vtk.vtkImageStencil()
stencil.SetInputConnection(extract.GetOutputPort())
stencil.SetStencilConnection(dataToStencil.GetOutputPort())
stencil.ReverseStencilOff()
stencil.SetBackgroundValue(128)
#
# Add all the actors.
#
ren.AddViewProp(imageActor)
ren2.AddViewProp(imageActor2)
#
# Render the image.
#
renWin.Render()
#
ren.GetActiveCamera().SetViewUp(0, 1, 0)
ren.GetActiveCamera().Azimuth(270)
ren.GetActiveCamera().Roll(270)
ren.GetActiveCamera().Dolly(1.7)
ren.ResetCameraClippingRange()
#
ren2.GetActiveCamera().SetViewUp(0, 1, 0)
ren2.GetActiveCamera().Azimuth(270)
ren2.GetActiveCamera().Roll(270)
ren2.GetActiveCamera().Dolly(1.7)
ren2.ResetCameraClippingRange()
#
# if we don't do this, the widgets disappear behind the imageActor.
vtk.vtkMapper.SetResolveCoincidentTopologyToPolygonOffset()
vtk.vtkMapper.SetResolveCoincidentTopologyPolygonOffsetParameters(10,10)
renWin.Render()
#
iren.Initialize()
renWin.Render()
iren.Start()
| HopeFOAM/HopeFOAM | ThirdParty-0.1/ParaView-5.0.1/VTK/Examples/GUI/Python/ImageTracerWidget.py | Python | gpl-3.0 | 7,773 |
"""
How to use this file:
have the data, main_data.csv, and the machine learning model file,
house_price_model_2.py, in the same folder with this file
change data path follow instructions in the house_price_model_2.py file:
for windows user -
os.environ["SALES_DATA_PATH"] = r'the path where Merged_Data.csv is saved', e.g.: "~/directory"
os.environ["SALES_DATA_FILE"] = 'Merged_Data.csv'
type bokeh serve --port 5001 main2.py in your terminal
Then you may go to the FirstStop landing page to click the predicting price link
"""
import pandas as pd
from bokeh.io import curdoc
#from bokeh.tile_providers import STAMEN_TONER
from bokeh.models import (
GMapPlot, GMapOptions, ColumnDataSource, Circle, DataRange1d,
PanTool, WheelZoomTool, BoxSelectTool, HoverTool
)
from bokeh.layouts import layout
from bokeh.models.widgets import Button, \
Select, Slider, Paragraph, Div
from house_price_model_2 import HousePriceModel
MODEL = HousePriceModel()
MODEL.initialize_model()
LOGO = Div(text="""<img src="https://s3-us-west-2.amazonaws.com/data515logo/logo_title_thinner.PNG"
alt="" />""")
DELIM_1 = Div(text="""<h2><span style="color: #800080;"
width=500 height=15>STEP 1:</span></h2>""")
DELIM_2 = Div(text="""<h2><span style="color: #800080;" width=500 height=15>STEP 2:</span></h2>""")
DELIM_3 = Div(text="""<h2><span style="color: #800080;" width=500 height=15>STEP 3:</span></h2>""")
DELIM_4 = Div(text="""<h2><span style="color: #800080;"
width=500 height=15>Almost Done. Just Submit!</span></h2>""")
DELIM_5 = Div(text="""<h2><span style="color: #800080;"
width=500 height=15>Ta Daa .....!</span></h2>""")
# Import dataset, the first sheet in the merged dataset
MAIN_DATA = pd.read_csv("main_data.csv", sep=",")
# Create widgets
BED = Select(title="Bedroom number:", value="3", options=['2', '3', '4', '5'])
BATH = Select(title="Bathroom number:", value="2", options=['2', '3', '4', '5'])
BUILTYEAR = Slider(title="Built year:", value=1900, start=1900, end=2015, step=1)
ZIPCODE = Select(title="Zipcode:", value="98004",
options=[str(x) for x in sorted(list(set(MAIN_DATA.zipcode.values)))])
SQFT_LIVING = Slider(title="Living Sqft:",
value=500, start=500, end=5500, step=10)
SQFT_LOT = Slider(title="Lot Sqft:", value=500, start=500, end=5500, step=10)
WATERFRONT = Select(title="Waterfront:", value="Either", options=['Either', 'Yes', 'No'])
VIEW = Select(title="House view:", value="1",
options=[str(x) for x in sorted(list(set(MAIN_DATA.view.values)))])
CONDITION = Select(title="House Condition:", value="3",
options=[str(x) for x in sorted(list(set(MAIN_DATA.condition.values)))])
GRADE = Select(title="House grade:", value="3",
options=[str(x) for x in sorted(list(set(MAIN_DATA.grade.values)))])
YEAR = Select(title="Year to buy the house:", value="2017",
options=['2017', '2018'])
MONTH = Select(title="Month to buy the house:", value="10",
options=['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12'])
BUTTON_1 = Button(label="Submit")
BUTTON_2 = Button(label="Reset")
OUTPUT1 = Paragraph(width=300, height=25) #or use pretext, for a <pre> tag in html
OUTPUT2 = Paragraph(width=1000, height=25)
# Set the parameters and add tools to the map
MAP_OPTIONS = GMapOptions(lat=47.5480, lng=-121.9836, map_type="roadmap", zoom=8)
PLOT = GMapPlot(x_range=DataRange1d(), y_range=DataRange1d(),
map_options=MAP_OPTIONS, plot_width=520, plot_height=520)
PLOT.api_key = "AIzaSyAA875-_BZDwKoR4bMonQUJgLxYIIZ3wzw"
SOURCE = ColumnDataSource(data=dict(lat=[],
lon=[],
br=[]))
CIRCLE = Circle(x="lon", y="lat", size=4, fill_color="blue", fill_alpha=0.9, line_color=None)
PLOT.add_glyph(SOURCE, CIRCLE)
MY_HOVER = HoverTool()
MY_HOVER.tooltips = [('Zipcode', '@zipcode'),
('Number of bedrooms', '@br'), ('Number of bathrooms', '@ba'),
('List price', '@list_price'), ('Final price', '@final_price')]
PLOT.add_tools(PanTool(), WheelZoomTool(), BoxSelectTool())
PLOT.add_tools(MY_HOVER)
def update():
"""
Callback function to subset data and update map based on inputs from the user
"""
if WATERFRONT.value == 'Either':
WATERFRONT.value = '0.5'
elif WATERFRONT.value == 'Yes':
WATERFRONT.value = '1'
else:
WATERFRONT.value = '0'
features = {'sale_day':
MODEL.calculate_sale_day_by_day(int(YEAR.value), int(MONTH.value), 15),
'bathrooms': float(BATH.value),
'sqft_living': float(SQFT_LIVING.value),
'sqft_lot': float(SQFT_LOT.value),
'waterfront': float(WATERFRONT.value),
'view': int(VIEW.value),
'condition': int(CONDITION.value),
'grade': int(GRADE.value),
'location':
MODEL.look_up_zipcode_by_string(ZIPCODE.value)
}
value = MODEL.predict(features)
OUTPUT1.text = 'The predicted price of your house is: $' + str(value)
sub_data = MAIN_DATA[MAIN_DATA.bedrooms == int(BED.value)]
sub_data = sub_data[sub_data.bathrooms == float(BATH.value)]
#sub_data = sub_data[sub_data.zipcode == int(zipcode.value)]
select_on_price_lower_limit = int(value) - 10000
select_on_price_upper_limit = int(value) + 10000
sub_data = sub_data[sub_data['List price'] > select_on_price_lower_limit]
sub_data = sub_data[sub_data['List price'] < select_on_price_upper_limit]
sub_data = sub_data[sub_data.yr_built > int(BUILTYEAR.value)]
SOURCE.data = {'lat':sub_data['lat'], 'lon':sub_data['long'], 'br':sub_data['bedrooms'],
'ba':sub_data['bathrooms'], 'zipcode':sub_data['zipcode'],
'list_price':sub_data['List price'], 'final_price': sub_data['price']}
OUTPUT2.text = 'Houses with ' + str(BED.value) + ' bedrooms, ' \
+ str(BATH.value) + ' bathrooms, built after year ' \
+ str(BUILTYEAR.value) + \
' and list price as ' + str(value) + \
'(+/-10000$) are shown on this map. Hover to see detail information'
def reset():
"""
Function to clear output
"""
OUTPUT1.text = None
OUTPUT2.text = None
# Submit prediction and update map at each clicking of button 1
BUTTON_1.on_click(update)
# Clear output at each clicking of button 2
BUTTON_2.on_click(reset)
# Load initial map
update()
# Define UI layout
L1 = layout(children=[[LOGO], [DELIM_1], [BED, BATH, BUILTYEAR, ZIPCODE], [DELIM_2],
[SQFT_LIVING, SQFT_LOT, WATERFRONT, VIEW], [DELIM_3], [GRADE, CONDITION],
[DELIM_4], [BUTTON_1, BUTTON_2], [DELIM_5], [OUTPUT1], [OUTPUT2], [PLOT]])
curdoc().add_root(L1)
curdoc().title = "Predict the price of your first home"
| sliwhu/UWHousingTeam | UWHousingTeam/Scripts/part1_predict_price.py | Python | mit | 6,926 |
def resample_to_1km( x, template_raster_mask ):
'''
template_raster_mask should be a mask in in the res/extent/origin/crs of the
existing TEM IEM products.
'''
import rasterio, os
from rasterio.warp import RESAMPLING, reproject
import numpy as np
fn = os.path.basename( x )
fn_split = fn.split( '.' )[0].split( '_' )
if '_cru_' in fn:
output_path = os.path.dirname( x ).replace( '/cru_ts31/', '/IEM/cru_ts31/' ) # hardwired!
fn_parts = ['variable', 'metric', 'model_1', 'model_2', 'kind', 'month', 'year']
fn_dict = dict( zip( fn_parts, fn_split ) )
fn_dict.update( scenario='historical', model='cru_ts31' )
else:
output_path = os.path.dirname( x ).replace( '/ar5/', '/IEM/ar5/' ) # hardwired!
fn_parts = ['variable', 'metric', 'model', 'scenario', 'ensemble', 'month', 'year']
fn_dict = dict( zip( fn_parts, fn_split ) )
try:
if not os.path.exists( output_path ):
os.makedirs( output_path )
except:
pass
fn_switch = { 'cld':'_'.join([ 'cld','mean','pct','iem',fn_dict['model'],fn_dict['scenario'],fn_dict['month'], fn_dict['year'] ]) + '.tif',
'vap':'_'.join(['vap','mean','hPa','iem', fn_dict['model'],fn_dict['scenario'],fn_dict['month'], fn_dict['year'] ]) + '.tif',
'tas':'_'.join(['tas','mean','C','iem',fn_dict['model'],fn_dict['scenario'],fn_dict['month'], fn_dict['year'] ]) + '.tif',
'hur':'_'.join(['hur','mean','pct','iem',fn_dict['model'],fn_dict['scenario'],fn_dict['month'], fn_dict['year'] ]) + '.tif' }
output_filename = os.path.join( output_path, fn_switch[ fn_dict[ 'variable' ] ] )
rst = rasterio.open( x )
rst_arr = rst.read( 1 )
template_arr = template_raster_mask.read( 1 )
template_meta = template_raster_mask.meta
template_meta.update( compress='lzw', nodata=rst.nodata )
if 'transform' in template_meta.keys():
template_meta.pop( 'transform' )
output_arr = np.empty_like( template_arr.astype( np.float32 ) )
output_arr[ template_arr == 0 ] = rst.nodata
src_crs = {'init':'epsg:3338'}
dst_crs = {'init':'epsg:3338'}
reproject( rst_arr, output_arr, src_transform=rst.affine, src_crs=src_crs, src_nodata=rst.nodata, \
dst_transform=template_raster_mask.affine, dst_crs=dst_crs,\
dst_nodata=rst.nodata, resampling=RESAMPLING.cubic_spline, num_threads=2 )
with rasterio.open( output_filename, 'w', **template_meta ) as out:
output_arr[ template_arr == 0 ] = rst.nodata
out.write( output_arr, 1 )
return output_filename
if __name__ == '__main__':
import os, glob, rasterio
import numpy as np
import pandas as pd
from functools import partial
from pathos import multiprocessing as mp
# some setup:
input_path = '/Data/malindgren/cru_november_final/ar5' # '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_november_final/ar5'
template_raster_mask_fn = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/extents/IEM_Mask_1km.tif'
ncores = 20
# read in the template raster mask must be 0-nodata, 1-data
template_raster_mask = rasterio.open( template_raster_mask_fn )
resample_to_1km_partial = partial( resample_to_1km, template_raster_mask=template_raster_mask )
models = [ 'IPSL-CM5A-LR', 'GISS-E2-R', 'MRI-CGCM3', 'CCSM4', 'GFDL-CM3' ]
variables = ['cld', 'vap' ] # ['tas'] # ['hur'] # run the HUR after the cld/vap which is required for TEM
# tas_files = sorted( glob.glob( os.path.join( tas_input_path, model, 'tas', 'downscaled', '*.tif' ) ) )
path_list = [ os.path.join( input_path, model, variable, 'downscaled', '*.tif' ) for model in models for variable in variables ]
# temporary for failes EOS run:
complete = '/Data/malindgren/cru_november_final/ar5/IPSL-CM5A-LR/cld/downscaled/*.tif'
path_list = [ path for path in path_list if path != complete ]
# end temporary
for path in path_list:
# print path
files = glob.glob( path )
# run it in parallel
pool = mp.Pool( processes=ncores )
pool.map( lambda x: resample_to_1km_partial( x=x ), files )
pool.close()
# for root, subs, files in os.walk( input_path ):
# if root.endswith( 'downscaled' ):
# # print 'running: %s' % root
# # add back in the file paths from the root
# files = [ os.path.join( root, i ) for i in files ]
# # run it in parallel
# pool = mp.Pool( processes=ncores )
# pool.map( lambda x: resample_to_1km_partial( x=x ), files )
# pool.close()
# # # # MAKE A MASK TO USE AS THE TEMPLATE RASTER
# template_fn = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/extents/tas_mean_C_iem_cru_TS31_01_1901.tif'
# template = rasterio.open( template_fn )
# template_meta = template.meta
# template_mask = template.read_masks( 1 )
# template_arr = template.read( 1 )
# template_arr[ template_mask != 0 ] = 1
# template_arr[ template_mask == 0 ] = 0
# template_meta.update( compress='lzw', crs={'init':'epsg:3338'} )
# output_filename = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/extents/IEM_Mask_1km.tif'
# with rasterio.open( output_filename, 'w', **template_meta ) as out:
# out.write( template_arr, 1 )
# # # # #
# def standardized_fn_to_vars( fn ):
# ''' take a filename string following the convention for this downscaling and break into parts and return a dict'''
# fn = os.path.basename( fn )
# fn_split = fn.split( '.' )[0].split( '_' )
# if '_cru_' in fn:
# fn_parts = ['variable', 'metric', 'model_1', 'model_2', 'kind', 'month', 'year']
# fn_dict = dict( zip( fn_parts, fn_split ) )
# fn_dict.update( scenario='historical', model='cru_ts31' )
# # name_convention = [ 'variable', 'metric', 'model', 'scenario', 'experiment', 'begin_time', 'end_time' ]
# else:
# fn_parts = ['variable', 'metric', 'model', 'scenario', 'ensemble', 'month', 'year']
# fn_dict = dict( zip( fn_parts, fn_split ) )
# fn_switch = { 'cld':'_'.join([ 'cld','mean','pct','iem',fn_dict['model'],fn_dict['scenario'],fn_dict['month'], fn_dict['year'] ]) + '.tif',
# 'vap':'_'.join(['vap','mean','hPa','iem', fn_dict['model'],fn_dict['scenario'],fn_dict['month'], fn_dict['year'] ]) + '.tif',
# 'tas':'_'.join(['tas','mean','C','iem',fn_dict['model'],fn_dict['scenario'],fn_dict['month'], fn_dict['year'] ]) + '.tif',
# 'hur':'_'.join(['hur','mean','pct','iem',fn_dict['model'],fn_dict['scenario'],fn_dict['month'], fn_dict['year'] ]) + '.tif' }
# output_filename = os.path.join( output_path, fn_switch[ fn_dict[ 'variable' ] ] )
# fn_list = fn.split( '.' )[0].split( '_' )
# return { i:j for i,j in zip( name_convention, fn_list )}
| ua-snap/downscale | snap_scripts/old_scripts/tem_iem_older_scripts_april2018/tem_inputs_iem/old_code/crop_mask_resample_to_iem.py | Python | mit | 6,482 |
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
from iptest.assert_util import *
add_clr_assemblies("loadorder_5")
# namespace NS {
# public class Target<T> {
# public static string Flag = typeof(Target<>).FullName;
# }
# public class Target<T1, T2> {
# public static string Flag = typeof(Target<,>).FullName;
# }
# }
import NS
from NS import *
add_clr_assemblies("loadorder_5b")
# namespace NS {
# public class Target<T1, T2, T3> {
# public static string Flag = typeof(Target<,,>).FullName;
# }
# }
AreEqual(Target[int].Flag, "NS.Target`1")
AreEqual(Target[int, str].Flag, "NS.Target`2")
AssertError(ValueError, lambda: Target[str, int, str].Flag)
from NS import *
AreEqual(Target[int].Flag, "NS.Target`1")
AreEqual(Target[int, str].Flag, "NS.Target`2")
AreEqual(Target[str, int, str].Flag, "NS.Target`3")
AreEqual(NS.Target[int].Flag, "NS.Target`1")
AreEqual(NS.Target[int, str].Flag, "NS.Target`2")
AreEqual(NS.Target[str, int, str].Flag, "NS.Target`3")
| slozier/ironpython2 | Tests/interop/net/loadorder/t5b.py | Python | apache-2.0 | 1,181 |
import Queue
import time, sys, threading
from twisted.words.protocols import irc
from twisted.internet import reactor, protocol
twitchRepeatTimeout = 30
twitchDelayMod = 0.05
twitchDelayNonMod = 1.7
twitchRateLimitMod = 90
twitchRateLimitNonMod = 15
twitchRateLimitPeriod = 30
class MessageQueue:
def __init__(self, bot):
self.bot = bot
self.queue = Queue.Queue()
self.lastMessage = ""
self.lastMessageTime = 0
self.oldMessageTimes = [0] * 100
self.messagesSent = 0
def run(self):
while True:
message = self.queue.get()
delay = twitchDelayMod if self.bot.isMod else twitchDelayNonMod
timeNow = time.time()
if message["message"].strip() == self.lastMessage and not message["repeat"] and timeNow - twitchRepeatTimeout < self.lastMessageTime:
# drop for being a repeat
continue
else:
earliestSendTime = self.lastMessageTime + delay
if timeNow < earliestSendTime:
time.sleep(earliestSendTime - timeNow)
self.clearOldMessageTimes()
rateLimit = twitchRateLimitMod if self.bot.isMod else twitchRateLimitNonMod
while(self.messagesSent >= rateLimit):
time.sleep(0.5)
self.clearOldMessageTimes()
if message["message"].lower().startswith("/me "):
message["message"] = message["message"][4:]
reactor.callFromThread(self.bot.describe, message["channel"], message["message"])
else:
reactor.callFromThread(self.bot.msg, message["channel"], message["message"], 1024)
self.lastMessageTime = time.time()
self.lastMessage = message["message"].strip()
self.oldMessageTimes[self.messagesSent] = self.lastMessageTime
self.messagesSent += 1
def queueMessage(self, channel, message):
self.queue.put({"channel": channel, "message": message, "repeat": False})
def queueMessageRA(self, channel, message):
self.queue.put({"channel": channel, "message": message, "repeat": True})
def clearOldMessageTimes(self):
clearTime = time.time()
msgsCleared = 0
while(self.oldMessageTimes[0] + twitchRateLimitPeriod <= clearTime and self.oldMessageTimes[0] != 0):
self.oldMessageTimes = self.oldMessageTimes[1:]
self.oldMessageTimes.append(0)
self.messagesSent -= 1
msgsCleared += 1
| Dabomstew/goldenrod | messagequeue.py | Python | mit | 2,718 |
import pybedtools
import os
testdir = os.path.dirname(__file__)
test_tempdir = os.path.join(os.path.abspath(testdir), 'tmp')
unwriteable = os.path.join(os.path.abspath(testdir), 'unwriteable')
def setup():
if not os.path.exists(test_tempdir):
os.system('mkdir -p %s' % test_tempdir)
pybedtools.set_tempdir(test_tempdir)
def teardown():
if os.path.exists(test_tempdir):
os.system('rm -r %s' % test_tempdir)
pybedtools.cleanup()
| jos4uke/getSeqFlankBlatHit | lib/python2.7/site-packages/pybedtools/test/tfuncs.py | Python | gpl-2.0 | 462 |
#!/usr/bin/env python
import argparse
import kaboom.api
import kaboom.compiler
import kaboom.constants
import kaboom.vm
def create(api, infile):
bytecode = kaboom.compiler.compile(infile)
print "Contract bytecode:", bytecode
contract = api.create(bytecode, kaboom.constants.DEFAULT_KEY)
print "Contract \"%s\" is at address %s" % (infile, contract)
api.wait_for_next_block(verbose=True)
assert api.is_contract_at(contract), "contract not present at address"
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('infile')
args = parser.parse_args()
kaboom.vm.ensure_running()
api = kaboom.api.Api()
api.wait_for_startup(verbose=True)
create(api, args.infile)
| Cpt-Obvious/kaboom | create.py | Python | mit | 749 |
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import json
import urllib2
def comment_on_pr(text):
if 'JENKINS_OAUTH_TOKEN' not in os.environ:
print 'Missing JENKINS_OAUTH_TOKEN env var: not commenting'
return
if 'ghprbPullId' not in os.environ:
print 'Missing ghprbPullId env var: not commenting'
return
req = urllib2.Request(
url = 'https://api.github.com/repos/grpc/grpc/issues/%s/comments' %
os.environ['ghprbPullId'],
data = json.dumps({'body': text}),
headers = {
'Authorization': 'token %s' % os.environ['JENKINS_OAUTH_TOKEN'],
'Content-Type': 'application/json',
})
print urllib2.urlopen(req).read()
| yang-g/grpc | tools/run_tests/python_utils/comment_on_pr.py | Python | apache-2.0 | 1,224 |
"""
Python 富文本XSS过滤类
@package XssHtml
@version 0.1
@link http://phith0n.github.io/python-xss-filter
@since 20150407
@copyright (c) Phithon All Rights Reserved
Based on native Python module HTMLParser purifier of HTML, To Clear all javascript in html
You can use it in all python web framework
Written by Phithon <root@leavesongs.com> in 2015 and placed in the public domain.
phithon <root@leavesongs.com> 编写于20150407
From: XDSEC <www.xdsec.org> & 离别歌 <www.leavesongs.com>
GitHub Pages: https://github.com/phith0n/python-xss-filter
Usage:
parser = XssHtml()
parser.feed('<html code>')
parser.close()
html = parser.getHtml()
print html
Requirements
Python 2.6+ or 3.2+
Cannot defense xss in browser which is belowed IE7
浏览器版本:IE7+ 或其他浏览器,无法防御IE6及以下版本浏览器中的XSS
"""
import re
from html.parser import HTMLParser
class XssHtml(HTMLParser): # pylint: disable=abstract-method
allow_tags = ['a', 'img', 'br', 'strong', 'b', 'code', 'pre',
'p', 'div', 'em', 'span', 'h1', 'h2', 'h3', 'h4',
'h5', 'h6', 'blockquote', 'ul', 'ol', 'tr', 'th', 'td',
'hr', 'li', 'u', 'embed', 's', 'table', 'thead', 'tbody',
'caption', 'small', 'q', 'sup', 'sub']
common_attrs = ["style", "class", "name"]
nonend_tags = ["img", "hr", "br", "embed"]
tags_own_attrs = {
"img": ["src", "width", "height", "alt", "align"],
"a": ["href", "target", "rel", "title"],
"embed": ["src", "width", "height", "type", "allowfullscreen", "loop", "play", "wmode", "menu"],
"table": ["border", "cellpadding", "cellspacing"],
"td": ["rowspan"],
}
_regex_url = re.compile(r'^(http|https|ftp)://.*', re.I | re.S)
_regex_style_1 = re.compile(r'(\\|&#|/\*|\*/)', re.I)
_regex_style_2 = re.compile(r'e.*x.*p.*r.*e.*s.*s.*i.*o.*n', re.I | re.S)
def __init__(self, allows=None):
HTMLParser.__init__(self, convert_charrefs=False)
self.allow_tags = allows if allows else self.allow_tags
self.result = []
self.start = []
self.data = []
def getHtml(self):
"""
Get the safe html code
"""
for i in range(0, len(self.result)):
self.data.append(self.result[i])
return ''.join(self.data)
def handle_startendtag(self, tag, attrs):
self.handle_starttag(tag, attrs)
def handle_starttag(self, tag, attrs):
if tag not in self.allow_tags:
return
end_diagonal = ' /' if tag in self.nonend_tags else ''
if not end_diagonal:
self.start.append(tag)
attdict = {}
for attr in attrs:
attdict[attr[0]] = attr[1]
attdict = self._wash_attr(attdict, tag)
if hasattr(self, "node_%s" % tag):
attdict = getattr(self, "node_%s" % tag)(attdict)
else:
attdict = self.node_default(attdict)
attrs = []
for (key, value) in attdict.items():
attrs.append('%s="%s"' % (key, self._htmlspecialchars(value)))
attrs = (' ' + ' '.join(attrs)) if attrs else ''
self.result.append('<' + tag + attrs + end_diagonal + '>')
def handle_endtag(self, tag):
if self.start and tag == self.start[len(self.start) - 1]:
self.result.append('</' + tag + '>')
self.start.pop()
def handle_data(self, data):
self.result.append(self._htmlspecialchars(data))
def handle_entityref(self, name):
if name.isalpha():
self.result.append("&%s;" % name)
def handle_charref(self, name):
if name.isdigit():
self.result.append("&#%s;" % name)
def node_default(self, attrs):
attrs = self._common_attr(attrs)
return attrs
def node_a(self, attrs):
attrs = self._common_attr(attrs)
attrs = self._get_link(attrs, "href")
attrs = self._set_attr_default(attrs, "target", "_blank")
attrs = self._limit_attr(attrs, {
"target": ["_blank", "_self"]
})
return attrs
def node_embed(self, attrs):
attrs = self._common_attr(attrs)
attrs = self._get_link(attrs, "src")
attrs = self._limit_attr(attrs, {
"type": ["application/x-shockwave-flash"],
"wmode": ["transparent", "window", "opaque"],
"play": ["true", "false"],
"loop": ["true", "false"],
"menu": ["true", "false"],
"allowfullscreen": ["true", "false"]
})
attrs["allowscriptaccess"] = "never"
attrs["allownetworking"] = "none"
return attrs
def _true_url(self, url):
return url
def _true_style(self, style):
if style:
style = self._regex_style_1.sub('_', style)
style = self._regex_style_2.sub('_', style)
return style
def _get_style(self, attrs):
if "style" in attrs:
attrs["style"] = self._true_style(attrs.get("style"))
return attrs
def _get_link(self, attrs, name):
if name in attrs:
attrs[name] = self._true_url(attrs[name])
return attrs
def _wash_attr(self, attrs, tag):
if tag in self.tags_own_attrs:
other = self.tags_own_attrs.get(tag)
else:
other = []
_attrs = {}
if attrs:
for (key, value) in attrs.items():
if key in self.common_attrs + other:
_attrs[key] = value
return _attrs
def _common_attr(self, attrs):
attrs = self._get_style(attrs)
return attrs
def _set_attr_default(self, attrs, name, default=''):
if name not in attrs:
attrs[name] = default
return attrs
def _limit_attr(self, attrs, limit=None):
for (key, value) in limit.items():
if key in attrs and attrs[key] not in value:
del attrs[key]
return attrs
def _htmlspecialchars(self, html):
return html.replace("<", "<") \
.replace(">", ">") \
.replace('"', """) \
.replace("'", "'")
if __name__ == "__main__":
parser = XssHtml()
parser.feed("""<p><img src=1 onerror=alert(/xss/)></p><div class="left">
<a href='javascript:prompt(1)'><br />hehe</a></div>
<p id="test" onmouseover="alert(1)" style="expresSion(alert(1))">>M<svg>
<a href="https://www.baidu.com" target="self">MM</a></p>
<embed src='javascript:alert(/hehe/)' allowscriptaccess=always />
<a href="/problem/">Problems</a>
<script type="math/tex">(1<n \\leq 100)</script>
<td rowspan="2">
<p><code><int></code></p>
<script>setInterval(function() { alert("hello"); }, 100);</script>
<script type="text/javascript">alert(/xss/)</script>
""")
parser.close()
print(parser.getHtml())
| ultmaster/eoj3 | utils/xss_filter.py | Python | mit | 6,450 |
from Foundation import *
import objc
from PyObjCTools.TestSupport import *
class TestNSLockProtocols (TestCase):
def testLockIsLock(self):
# Test for bug #1735937
lock = NSLock.alloc().init()
self.assert_(lock.conformsToProtocol_(objc.protocolNamed("NSLocking")))
self.assert_(lock.conformsToProtocol_(protocols.NSLocking))
def testMethods(self):
self.assertResultIsBOOL(NSLock.tryLock)
self.assertResultIsBOOL(NSLock.lockBeforeDate_)
self.assertResultIsBOOL(NSConditionLock.tryLock)
self.assertResultIsBOOL(NSConditionLock.tryLockWhenCondition_)
self.assertResultIsBOOL(NSConditionLock.lockBeforeDate_)
self.assertResultIsBOOL(NSConditionLock.lockWhenCondition_beforeDate_)
self.assertResultIsBOOL(NSRecursiveLock.tryLock)
self.assertResultIsBOOL(NSRecursiveLock.lockBeforeDate_)
self.assertResultIsBOOL(NSCondition.waitUntilDate_)
if __name__ == "__main__":
main()
| albertz/music-player | mac/pyobjc-framework-Cocoa/PyObjCTest/test_nslock.py | Python | bsd-2-clause | 994 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
###############################################################################
# Module: exceptions
# Purpose: Collection of custom exceptions for the extractor package
#
# Notes:
#
###############################################################################
class InvalidArgumentsError(Exception):
"""Thrown when arguments provided to a function are invalid."""
def __init__(self, message):
super(InvalidArgumentsError, self).__init__(message)
| iagcl/data_pipeline | data_pipeline/extractor/exceptions.py | Python | apache-2.0 | 1,263 |
import unittest
from scripttest import TestFileEnvironment
SCRIPT='../ldap2zabbix.py'
class CLITest(unittest.TestCase):
def setUp(self):
self.env = TestFileEnvironment('./.scratch')
def tearDown(self):
pass
def test_can_run_script_without_arguments(self):
result = self.env.run('%s' % (SCRIPT,), expect_error=True)
assert result.returncode == 2
def test_can_run_script_with_help(self):
result = self.env.run('%s -h' % (SCRIPT,))
assert result.returncode == 0
def test_run_script_with_invalid_arg(self):
result = self.env.run('%s --frob' % (SCRIPT,), expect_error=True)
assert result.returncode == 2
if __name__ == '__main__':
unittest.main()
| s-i-tech/py-ldap2zabbix | tests/functional_tests.py | Python | lgpl-3.0 | 738 |
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_users(host):
user = host.user('owner1')
assert user.exists
assert user.home == '/home/owner1'
user = host.user('devuser1')
assert user.exists
assert user.home == '/home/devuser1'
user = host.user('devuser2')
assert user.exists
assert user.home == '/home/devuser2'
user = host.user('devuser3')
assert user.exists
assert user.home == '/home/devuser3'
| FiaasCo/fiaas | playbooks/roles/createresources/molecule/fiaas01/tests/test_users.py | Python | mit | 582 |
# GUI Application automation and testing library
# Copyright (C) 2006 Mark Mc Mahon
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
"Controls package"
__revision__ = "$Revision$"
from .HwndWrapper import GetDialogPropsFromHandle
from .HwndWrapper import InvalidWindowHandle
# make an alias for the HwndWrapper object as WrapHandle
from .HwndWrapper import HwndWrapper as WrapHandle
# import the control clases - this will register the classes they
# contain
#import custom_controls
from . import common_controls
from . import win32_controls
#
##====================================================================
#def _unittests():
# "Run some tests on the controls"
# from pywinauto import win32functions
#
# "do some basic testing"
# from pywinauto.findwindows import find_windows
# import sys
#
# if len(sys.argv) < 2:
# handle = win32functions.GetDesktopWindow()
# else:
# try:
# handle = int(eval(sys.argv[1]))
#
# except ValueError:
#
# handle = find_windows(
# title_re = "^" + sys.argv[1], class_name = "#32770", )
# #visible_only = False)
#
# if not handle:
# print "dialog not found"
# sys.exit()
#
#
# props = GetDialogPropsFromHandle(handle)
# print len(props)
# #pprint(GetDialogPropsFromHandle(handle))
#
#if __name__ == "__main__":
# _unittests()
| manojklm/pywinauto-x64 | pywinauto/controls/__init__.py | Python | lgpl-2.1 | 2,175 |
"""
@todo Clean up the LimitTreeMaker python file to not depend on these extra variables in cuts.py
"""
import os
from .. import Load, DirFromEnv
newLimitTreeMaker = Load('LimitTreeMaker')
def SetupFromEnv(ltm):
"""A function that sets up the LimitTreeMaker after sourcing a config file
@param ltm The LimitTreeMaker object to set up
"""
from ..CommonTools.FileConfigReader import SetupConfigFromEnv, SetFunctionFromEnv, LoadConfig
SetupConfigFromEnv(ltm)
DirFromEnv('CrombieOutLimitTreeDir')
SetFunctionFromEnv([
(ltm.SetOutDirectory, 'CrombieOutLimitTreeDir'),
])
for region in LoadConfig.cuts.regions:
if os.environ.get('CrombieExcept_' + region):
ltm.ReadExceptionConfig(os.environ['CrombieExcept_' + region], region)
def SetCuts(ltm, category):
from .. import LoadConfig
cuts = LoadConfig.cuts
for region in cuts.regions:
ltm.AddRegion(region,cuts.cut(category, region))
if region in cuts.additionKeys:
ltm.AddExceptionDataCut(region, cuts.additions[region][0])
ltm.AddExceptionWeightBranch(region, cuts.additions[region][1])
| dabercro/CrombieTools | python/CrombieTools/AnalysisTools/LimitTreeMaker.py | Python | mit | 1,169 |
"""Test StructuredQuadGraph."""
from numpy.testing import assert_array_equal
from pytest import approx
from landlab.graph import DualStructuredQuadGraph, DualUniformRectilinearGraph
def test_create():
"""Test creating a quad graph."""
y = [0, 1, 3, 0, 1, 3, 0, 1, 3]
x = [3, 3, 3, 4, 4, 4, 6, 6, 6]
graph = DualStructuredQuadGraph((y, x), shape=(3, 3))
assert graph.number_of_nodes == 9
assert graph.number_of_links == 12
assert graph.number_of_patches == 4
assert graph.number_of_corners == 4
assert graph.number_of_faces == 4
assert graph.number_of_cells == 1
def test_create_raster():
"""Test creating a quad graph."""
graph = DualUniformRectilinearGraph((3, 4), spacing=(2.0, 3.0))
assert graph.number_of_nodes == 12
assert graph.number_of_links == 17
assert graph.number_of_patches == 6
assert graph.number_of_corners == 6
assert graph.number_of_faces == 7
assert graph.number_of_cells == 2
def test_raster_spacing():
"""Test the spacing keyword for raster."""
graph = DualUniformRectilinearGraph((3, 4), spacing=(2.0, 3.0))
assert_array_equal(
graph.length_of_link,
[
3.0,
3.0,
3.0,
2.0,
2.0,
2.0,
2.0,
3.0,
3.0,
3.0,
2.0,
2.0,
2.0,
2.0,
3.0,
3.0,
3.0,
],
)
assert_array_equal(graph.length_of_face, [3.0, 3.0, 2.0, 2.0, 2.0, 3.0, 3.0])
def test_raster_spacing_as_scalar():
"""Test the spacing keyword as a scalar for raster."""
graph = DualUniformRectilinearGraph((3, 4), spacing=2.0)
assert_array_equal(
graph.length_of_link,
[
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
],
)
assert_array_equal(graph.length_of_face, [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0])
def test_raster_origin():
"""Test the origin keyword for raster."""
graph = DualUniformRectilinearGraph((3, 4), origin=(-1.0, 10.0))
assert_array_equal(
graph.xy_of_node[:, 0],
[10.0, 11.0, 12.0, 13.0, 10.0, 11.0, 12.0, 13.0, 10.0, 11.0, 12.0, 13.0],
)
assert_array_equal(
graph.xy_of_node[:, 1],
[-1.0, -1.0, -1.0, -1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0],
)
assert_array_equal(graph.xy_of_corner[:, 0], [10.5, 11.5, 12.5, 10.5, 11.5, 12.5])
assert_array_equal(graph.xy_of_corner[:, 1], [-0.5, -0.5, -0.5, 0.5, 0.5, 0.5])
def test_raster_origin_as_scalar():
"""Test the origin keyword as a scalar for raster."""
graph = DualUniformRectilinearGraph((3, 4), origin=-1.0)
assert_array_equal(
graph.xy_of_node[:, 0],
[-1.0, 0.0, 1.0, 2.0, -1.0, 0.0, 1.0, 2.0, -1.0, 0.0, 1.0, 2.0],
)
assert_array_equal(
graph.xy_of_node[:, 1],
[-1.0, -1.0, -1.0, -1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0],
)
assert_array_equal(graph.xy_of_corner[:, 0], [-0.5, 0.5, 1.5, -0.5, 0.5, 1.5])
assert_array_equal(graph.xy_of_corner[:, 1], [-0.5, -0.5, -0.5, 0.5, 0.5, 0.5])
def test_perimeter_corners():
"""Test the perimeter corners."""
y = [0, 1, 3, 0, 1, 3, 0, 1, 3]
x = [3, 3, 3, 4, 4, 4, 6, 6, 6]
graph = DualStructuredQuadGraph((y, x), shape=(3, 3))
assert_array_equal(graph.perimeter_corners, [1, 3, 2, 0])
def test_length_of_face():
"""Test length of faces."""
y = [0, 1, 3, 0, 1, 3, 0, 1, 3]
x = [3, 3, 3, 4, 4, 4, 6, 6, 6]
graph = DualStructuredQuadGraph((y, x), shape=(3, 3))
assert graph.length_of_face == approx(1.5)
def test_area_of_cell():
"""Test areas of patches."""
y = [0, 0, 0, 1, 1, 1, 3, 3, 3]
x = [3, 4, 6, 3, 4, 6, 3, 4, 6]
graph = DualStructuredQuadGraph((y, x), shape=(3, 3))
assert graph.area_of_cell == approx([2.25])
def test_corners_at_cell():
"""Test corners of cells."""
y = [0, 1, 3, 0, 1, 3, 0, 1, 3]
x = [3, 3, 3, 4, 4, 4, 6, 6, 6]
graph = DualStructuredQuadGraph((y, x), shape=(3, 3), sort=True)
assert_array_equal(graph.corners_at_cell, [[3, 2, 0, 1]])
def test_cells_at_corner():
"""Test areas of patches."""
y = [0, 1, 3, 0, 1, 3, 0, 1, 3]
x = [3, 3, 3, 4, 4, 4, 6, 6, 6]
graph = DualStructuredQuadGraph((y, x), shape=(3, 3))
assert_array_equal(
graph.cells_at_corner,
[[0, -1, -1, -1], [-1, 0, -1, -1], [-1, -1, -1, 0], [-1, -1, 0, -1]],
)
def test_cells_at_face():
"""Test cells on either side of faces."""
y = [0, 1, 3, 0, 1, 3, 0, 1, 3]
x = [3, 3, 3, 4, 4, 4, 6, 6, 6]
graph = DualStructuredQuadGraph((y, x), shape=(3, 3))
assert_array_equal(graph.cells_at_face, [[-1, 0], [0, -1], [-1, 0], [0, -1]])
def test_faces_at_cell():
"""Test faces that form cells."""
y = [0, 1, 3, 0, 1, 3, 0, 1, 3]
x = [3, 3, 3, 4, 4, 4, 6, 6, 6]
graph = DualStructuredQuadGraph((y, x), shape=(3, 3))
assert_array_equal(graph.faces_at_cell, [[2, 3, 1, 0]])
def test_corners_at_face():
"""Test corners at face tail and head."""
y = [0, 1, 3, 0, 1, 3, 0, 1, 3]
x = [3, 3, 3, 4, 4, 4, 6, 6, 6]
graph = DualStructuredQuadGraph((y, x), shape=(3, 3))
assert_array_equal(graph.corners_at_face, [[0, 1], [0, 2], [1, 3], [2, 3]])
def test_faces_at_corner():
"""Test faces around corners."""
y = [0, 1, 3, 0, 1, 3, 0, 1, 3]
x = [3, 3, 3, 4, 4, 4, 6, 6, 6]
graph = DualStructuredQuadGraph((y, x), shape=(3, 3))
assert_array_equal(
graph.faces_at_corner,
[[0, 1, -1, -1], [-1, 2, 0, -1], [3, -1, -1, 1], [-1, -1, 3, 2]],
)
def test_face_dirs_at_corner():
"""Test face directions at corners."""
y = [0, 1, 3, 0, 1, 3, 0, 1, 3]
x = [3, 3, 3, 4, 4, 4, 6, 6, 6]
graph = DualStructuredQuadGraph((y, x), shape=(3, 3))
assert_array_equal(
graph.face_dirs_at_corner,
[[-1, -1, 0, 0], [0, -1, 1, 0], [-1, 0, 0, 1], [0, 0, 1, 1]],
)
def test_cell_at_node():
"""Test cell-node connectivity."""
y = [0, 1, 3, 0, 1, 3, 0, 1, 3]
x = [3, 3, 3, 4, 4, 4, 6, 6, 6]
graph = DualStructuredQuadGraph((y, x), shape=(3, 3))
assert_array_equal(graph.cell_at_node, [-1, -1, -1, -1, 0, -1, -1, -1, -1])
graph = DualUniformRectilinearGraph((3, 4))
assert_array_equal(
graph.cell_at_node, [-1, -1, -1, -1, -1, 0, 1, -1, -1, -1, -1, -1]
)
def test_link_at_face():
"""Test link-face connectivity."""
graph = DualUniformRectilinearGraph((3, 4))
assert_array_equal(graph.link_at_face, [4, 5, 7, 8, 9, 11, 12])
assert_array_equal(
graph.face_at_link,
[-1, -1, -1, -1, 0, 1, -1, 2, 3, 4, -1, 5, 6, -1, -1, -1, -1],
)
def test_corner_at_face():
"""Test corner-face connectivity."""
graph = DualUniformRectilinearGraph((3, 4))
assert_array_equal(
graph.corners_at_face, [[0, 1], [1, 2], [0, 3], [1, 4], [2, 5], [3, 4], [4, 5]]
)
assert_array_equal(graph.corner_at_face_tail, [0, 1, 0, 1, 2, 3, 4])
assert_array_equal(graph.corner_at_face_head, [1, 2, 3, 4, 5, 4, 5])
| cmshobe/landlab | tests/graph/structured_quad/test_dual_quad.py | Python | mit | 7,341 |
import os
from ehive.runnable.IGFBaseProcess import IGFBaseProcess
from igf_data.utils.tools.fastp_utils import Fastp_utils
from igf_data.utils.fileutils import get_datestamp_label
class RunFastp(IGFBaseProcess):
def param_defaults(self):
params_dict=super(RunFastp,self).param_defaults()
params_dict.update({
'fastp_options_list':['-a','auto',
'--qualified_quality_phred=15',
'--length_required=15'],
'split_by_lines_count':5000000,
'run_thread':1,
'split_fastq':None,
'polyg_platform_list':['NextSeq','NOVASEQ6000'],
'enable_polyg_trim':False,
'use_ephemeral_space':0,
})
return params_dict
def run(self):
'''
A method for running Fastp commands
:param project_igf_id: A project_igf_id from dataflow
:param experiment_igf_id: A experiment_igf_id from dataflow
:param sample_igf_id: A sample_igf_id from dataflow
:param fastp_exe: Fastp exe path from analysis config
:param input_fastq_list: Input fastq list from dataflow
:param base_work_dir: Base work dir path from analysis config
:param run_thread: Number of threads for fastp run, default 1
:param split_fastq: Enable splitting fastq files, default None
:param split_by_lines_count: Number of fastq lines to be used if split_fastq is True, default 5000000
:param fastp_options_list: A list of fasrp tool options, default ['-a=auto','--qualified_quality_phred=15','--length_required=15']
:param platform_name: Sequencing platform name from dataflow
:param use_ephemeral_space: A toggle for temp dir setting, default 0
:param polyg_platform_list: A list of Illumin platforms which emit poly Gs for empty cycles, default ['NextSeq','NOVASEQ6000']
:param enable_polyg_trim: Enable Fastp poly G trim, default False
'''
try:
project_igf_id = self.param_required('project_igf_id')
experiment_igf_id = self.param_required('experiment_igf_id')
sample_igf_id = self.param_required('sample_igf_id')
run_igf_id = self.param_required('run_igf_id')
fastp_exe = self.param_required('fastp_exe')
input_fastq_list = self.param_required('input_fastq_list')
base_work_dir = self.param_required('base_work_dir')
run_thread = self.param('run_thread')
split_fastq = self.param('split_fastq')
split_by_lines_count = self.param('split_by_lines_count')
fastp_options_list = self.param('fastp_options_list')
platform_name = self.param_required('platform_name')
polyg_platform_list = self.param('polyg_platform_list')
enable_polyg_trim = self.param('enable_polyg_trim')
use_ephemeral_space = self.param('use_ephemeral_space')
seed_date_stamp = self.param_required('date_stamp')
seed_date_stamp = get_datestamp_label(seed_date_stamp)
work_dir_prefix = \
os.path.join(\
base_work_dir,
project_igf_id,
sample_igf_id,
experiment_igf_id)
work_dir = self.get_job_work_dir(work_dir=work_dir_prefix) # get a run work dir
split_fastq = \
False if split_fastq is None else True # set default value for split fastq
if platform_name in polyg_platform_list:
enable_polyg_trim=True # enable poly G trim for new Illumin platforms
fastp_obj = \
Fastp_utils(\
fastp_exe=fastp_exe,
input_fastq_list=input_fastq_list,
log_output_prefix=run_igf_id,
output_dir=work_dir,
run_thread=run_thread,
use_ephemeral_space=use_ephemeral_space,
enable_polyg_trim=enable_polyg_trim,
split_by_lines_count=split_by_lines_count,
fastp_options_list=fastp_options_list) # setup fastp tool for run
output_read1, output_read2, output_html_file, output_json_file, _ = \
fastp_obj.\
run_adapter_trimming(split_fastq=split_fastq) # run fastp trimming
self.param('dataflow_params',
{'output_read1':output_read1,
'output_read2':output_read2,
'output_html_file':output_html_file,
'output_json_file':output_json_file,
'seed_date_stamp':seed_date_stamp}) # pass on fastp output list
message = 'finished fastp for {0} {1}'.\
format(project_igf_id,
sample_igf_id)
self.post_message_to_slack(message,reaction='pass') # send log to slack
self.post_message_to_ms_team(
message=message,
reaction='pass')
except Exception as e:
message = \
'project: {2}, sample:{3}, Error in {0}: {1}'.\
format(\
self.__class__.__name__,
e,
project_igf_id,
sample_igf_id)
self.warning(message)
self.post_message_to_slack(message,reaction='fail') # post msg to slack for failed jobs
self.post_message_to_ms_team(
message=message,
reaction='fail')
raise | imperial-genomics-facility/data-management-python | ehive/runnable/process/alignment/RunFastp.py | Python | apache-2.0 | 5,251 |
from ctypes import *
import sys, unittest
import os
from ctypes.util import find_library
from ctypes.test import is_resource_enabled
libc_name = None
if os.name == "nt":
libc_name = find_library("c")
elif os.name == "ce":
libc_name = "coredll"
elif sys.platform == "cygwin":
libc_name = "cygwin1.dll"
else:
libc_name = find_library("c")
if is_resource_enabled("printing"):
print "libc_name is", libc_name
class LoaderTest(unittest.TestCase):
unknowndll = "xxrandomnamexx"
@unittest.skipUnless(libc_name is not None, 'could not find libc')
def test_load(self):
CDLL(libc_name)
CDLL(os.path.basename(libc_name))
self.assertRaises(OSError, CDLL, self.unknowndll)
@unittest.skipUnless(libc_name is not None, 'could not find libc')
@unittest.skipUnless(libc_name is not None and
os.path.basename(libc_name) == "libc.so.6",
'wrong libc path for test')
def test_load_version(self):
cdll.LoadLibrary("libc.so.6")
# linux uses version, libc 9 should not exist
self.assertRaises(OSError, cdll.LoadLibrary, "libc.so.9")
self.assertRaises(OSError, cdll.LoadLibrary, self.unknowndll)
def test_find(self):
for name in ("c", "m"):
lib = find_library(name)
if lib:
cdll.LoadLibrary(lib)
CDLL(lib)
@unittest.skipUnless(os.name in ("nt", "ce"),
'test specific to Windows (NT/CE)')
def test_load_library(self):
self.assertIsNotNone(libc_name)
if is_resource_enabled("printing"):
print find_library("kernel32")
print find_library("user32")
if os.name == "nt":
windll.kernel32.GetModuleHandleW
windll["kernel32"].GetModuleHandleW
windll.LoadLibrary("kernel32").GetModuleHandleW
WinDLL("kernel32").GetModuleHandleW
elif os.name == "ce":
windll.coredll.GetModuleHandleW
windll["coredll"].GetModuleHandleW
windll.LoadLibrary("coredll").GetModuleHandleW
WinDLL("coredll").GetModuleHandleW
@unittest.skipUnless(os.name in ("nt", "ce"),
'test specific to Windows (NT/CE)')
def test_load_ordinal_functions(self):
import _ctypes_test
dll = WinDLL(_ctypes_test.__file__)
# We load the same function both via ordinal and name
func_ord = dll[2]
func_name = dll.GetString
# addressof gets the address where the function pointer is stored
a_ord = addressof(func_ord)
a_name = addressof(func_name)
f_ord_addr = c_void_p.from_address(a_ord).value
f_name_addr = c_void_p.from_address(a_name).value
self.assertEqual(hex(f_ord_addr), hex(f_name_addr))
self.assertRaises(AttributeError, dll.__getitem__, 1234)
@unittest.skipUnless(os.name == "nt", 'Windows-specific test')
def test_1703286_A(self):
from _ctypes import LoadLibrary, FreeLibrary
# On winXP 64-bit, advapi32 loads at an address that does
# NOT fit into a 32-bit integer. FreeLibrary must be able
# to accept this address.
# These are tests for http://www.python.org/sf/1703286
handle = LoadLibrary("advapi32")
FreeLibrary(handle)
@unittest.skipUnless(os.name == "nt", 'Windows-specific test')
def test_1703286_B(self):
# Since on winXP 64-bit advapi32 loads like described
# above, the (arbitrarily selected) CloseEventLog function
# also has a high address. 'call_function' should accept
# addresses so large.
from _ctypes import call_function
advapi32 = windll.advapi32
# Calling CloseEventLog with a NULL argument should fail,
# but the call should not segfault or so.
self.assertEqual(0, advapi32.CloseEventLog(None))
windll.kernel32.GetProcAddress.argtypes = c_void_p, c_char_p
windll.kernel32.GetProcAddress.restype = c_void_p
proc = windll.kernel32.GetProcAddress(advapi32._handle,
"CloseEventLog")
self.assertTrue(proc)
# This is the real test: call the function via 'call_function'
self.assertEqual(0, call_function(proc, (None,)))
if __name__ == "__main__":
unittest.main()
| svanschalkwyk/datafari | windows/python/Lib/ctypes/test/test_loading.py | Python | apache-2.0 | 4,399 |
class ConnectionError(Exception):
pass
class DownloadError(Exception):
pass
class LoginError(Exception):
pass | minadyn/lftppy | lftppy/exc.py | Python | gpl-2.0 | 130 |
import random
import uuid
from flask import g
from newparp.model import Chat
from newparp.model.connections import NewparpRedis, redis_chat_pool
from newparp.model.user_list import UserListStore, PingTimeoutException
def get_userlist(client, chat: Chat) -> UserListStore:
client.get("/" + chat.url)
user_list = UserListStore(NewparpRedis(connection_pool=redis_chat_pool), chat.id)
return user_list
def test_join_leave(user_client, group_chat):
user_list = get_userlist(user_client, group_chat)
socket_id = str(uuid.uuid4())
# Do some quick sanity checks to ensure that everything is empty
assert user_list.session_has_open_socket(g.session_id, g.user_id) is False
assert len(user_list.user_ids_online()) == 0
assert user_list.socket_disconnect(socket_id, g.user_id) is False
assert user_list.user_disconnect(g.user_id, g.user_id) is False
# Socket join/leave process
assert user_list.socket_join(socket_id, g.session_id, g.user_id) is True
assert len(user_list.user_ids_online()) == 1
assert user_list.socket_disconnect(socket_id, g.user_id) is True
assert len(user_list.user_ids_online()) == 0
# User should stop typing when they leave
assert user_list.socket_join(socket_id, g.session_id, g.user_id) is True
user_list.user_start_typing(g.user_id)
assert len(user_list.user_numbers_typing()) == 1
assert user_list.socket_disconnect(socket_id, g.user_id) is True
assert len(user_list.user_numbers_typing()) == 0
# [Kick, Ban] join/leave process
assert user_list.socket_join(socket_id, g.session_id, g.user_id) is True
assert len(user_list.user_ids_online()) == 1
assert user_list.user_disconnect(g.user_id, g.user_id) is True
assert len(user_list.user_ids_online()) == 0
# User should stop typing when they leave
assert user_list.socket_join(socket_id, g.session_id, g.user_id) is True
user_list.user_start_typing(g.user_id)
assert len(user_list.user_numbers_typing()) == 1
assert user_list.user_disconnect(g.user_id, g.user_id) is True
assert len(user_list.user_numbers_typing()) == 0
def test_typing(user_client, group_chat):
USER_AMOUNT = 10
users = sorted({random.randint(0, 100) for x in range(0, USER_AMOUNT)})
user_list = get_userlist(user_client, group_chat)
# Ensure that we are empty before doing anything
assert len(user_list.user_numbers_typing()) == 0
# Test start typing for a single user
user_list.user_start_typing(users[0])
assert [users[0]] == user_list.user_numbers_typing()
assert len(user_list.user_numbers_typing()) == 1
# Test start typing for all users
[user_list.user_start_typing(x) for x in users]
assert users == sorted(user_list.user_numbers_typing())
# Test stop typing for all users
[user_list.user_stop_typing(x) for x in users]
assert len(user_list.user_numbers_typing()) == 0
def test_socket_timeout(user_client, group_chat):
user_list = get_userlist(user_client, group_chat)
socket_id = str(uuid.uuid4())
assert user_list.socket_join(socket_id, g.session_id, g.user_id) is True
assert user_list.socket_ping(socket_id) is None
# Force expire the ping key
user_list.redis.delete(user_list.session_key % (socket_id))
try:
assert user_list.socket_ping(socket_id)
# Something is wrong if we are reaching this.
assert False
except PingTimeoutException:
assert True
assert len(user_list.inconsistent_entries()) == 1
| MSPARP/newparp | tests/web/test_userlist.py | Python | agpl-3.0 | 3,520 |
"""Define and instantiate the configuration class for Robottelo."""
import logging
import os
import sys
from logging import config
from nailgun import entities, entity_mixins
from nailgun.config import ServerConfig
from robottelo.config import casts
from six.moves.urllib.parse import urlunsplit, urljoin
from six.moves.configparser import (
NoOptionError,
NoSectionError,
ConfigParser
)
LOGGER = logging.getLogger(__name__)
SETTINGS_FILE_NAME = 'robottelo.properties'
class ImproperlyConfigured(Exception):
"""Indicates that Robottelo somehow is improperly configured.
For example, if settings file can not be found or some required
configuration is not defined.
"""
def get_project_root():
"""Return the path to the Robottelo project root directory.
:return: A directory path.
:rtype: str
"""
return os.path.realpath(os.path.join(
os.path.dirname(__file__),
os.pardir,
os.pardir,
))
class INIReader(object):
"""ConfigParser wrapper able to cast value when reading INI options."""
# Helper casters
cast_boolean = casts.Boolean()
cast_dict = casts.Dict()
cast_list = casts.List()
cast_logging_level = casts.LoggingLevel()
cast_tuple = casts.Tuple()
cast_webdriver_desired_capabilities = casts.WebdriverDesiredCapabilities()
def __init__(self, path):
self.config_parser = ConfigParser()
with open(path) as handler:
self.config_parser.readfp(handler)
if sys.version_info[0] < 3:
# ConfigParser.readfp is deprecated on Python3, read_file
# replaces it
self.config_parser.readfp(handler)
else:
self.config_parser.read_file(handler)
def get(self, section, option, default=None, cast=None):
"""Read an option from a section of a INI file.
The default value will return if the look up option is not available.
The value will be cast using a callable if specified otherwise a string
will be returned.
:param section: Section to look for.
:param option: Option to look for.
:param default: The value that should be used if the option is not
defined.
:param cast: If provided the value will be cast using the cast
provided.
"""
try:
value = self.config_parser.get(section, option)
if cast is not None:
if cast is bool:
value = self.cast_boolean(value)
elif cast is dict:
value = self.cast_dict(value)
elif cast is list:
value = self.cast_list(value)
elif cast is tuple:
value = self.cast_tuple(value)
else:
value = cast(value)
except (NoSectionError, NoOptionError):
value = default
return value
def has_section(self, section):
"""Check if section is available."""
return self.config_parser.has_section(section)
class FeatureSettings(object):
"""Settings related to a feature.
Create a instance of this class and assign attributes to map to the feature
options.
"""
def read(self, reader):
"""Subclasses must implement this method in order to populate itself
with expected settings values.
:param reader: An INIReader instance to read the settings.
"""
raise NotImplementedError('Subclasses must implement read method.')
def validate(self):
"""Subclasses must implement this method in order to validade the
settings and raise ``ImproperlyConfigured`` if any issue is found.
"""
raise NotImplementedError('Subclasses must implement validate method.')
class ServerSettings(FeatureSettings):
"""Satellite server settings definitions."""
def __init__(self, *args, **kwargs):
super(ServerSettings, self).__init__(*args, **kwargs)
self.admin_password = None
self.admin_username = None
self.hostname = None
self.port = None
self.scheme = None
self.ssh_key = None
self.ssh_password = None
self.ssh_username = None
def read(self, reader):
"""Read and validate Satellite server settings."""
self.admin_password = reader.get(
'server', 'admin_password', 'changeme')
self.admin_username = reader.get(
'server', 'admin_username', 'admin')
self.hostname = reader.get('server', 'hostname')
self.port = reader.get('server', 'port', cast=int)
self.scheme = reader.get('server', 'scheme', 'https')
self.ssh_key = reader.get('server', 'ssh_key')
self.ssh_password = reader.get('server', 'ssh_password')
self.ssh_username = reader.get('server', 'ssh_username', 'root')
def validate(self):
validation_errors = []
if self.hostname is None:
validation_errors.append('[server] hostname must be provided.')
if (self.ssh_key is None and self.ssh_password is None):
validation_errors.append(
'[server] ssh_key or ssh_password must be provided.')
return validation_errors
def get_credentials(self):
"""Return credentials for interacting with a Foreman deployment API.
:return: A username-password pair.
:rtype: tuple
"""
return (self.admin_username, self.admin_password)
def get_url(self):
"""Return the base URL of the Foreman deployment being tested.
The following values from the config file are used to build the URL:
* ``[server] scheme`` (default: https)
* ``[server] hostname`` (required)
* ``[server] port`` (default: none)
Setting ``port`` to 80 does *not* imply that ``scheme`` is 'https'. If
``port`` is 80 and ``scheme`` is unset, ``scheme`` will still default
to 'https'.
:return: A URL.
:rtype: str
"""
if not self.scheme:
scheme = 'https'
else:
scheme = self.scheme
# All anticipated error cases have been handled at this point.
if not self.port:
return urlunsplit((scheme, self.hostname, '', '', ''))
else:
return urlunsplit((
scheme, '{0}:{1}'.format(self.hostname, self.port), '', '', ''
))
def get_pub_url(self):
"""Return the pub URL of the server being tested.
The following values from the config file are used to build the URL:
* ``main.server.hostname`` (required)
:return: The pub directory URL.
:rtype: str
"""
return urlunsplit(('http', self.hostname, 'pub/', '', ''))
def get_cert_rpm_url(self):
"""Return the Katello cert RPM URL of the server being tested.
The following values from the config file are used to build the URL:
* ``main.server.hostname`` (required)
:return: The Katello cert RPM URL.
:rtype: str
"""
return urljoin(
self.get_pub_url(), 'katello-ca-consumer-latest.noarch.rpm')
class ClientsSettings(FeatureSettings):
"""Clients settings definitions."""
def __init__(self, *args, **kwargs):
super(ClientsSettings, self).__init__(*args, **kwargs)
self.image_dir = None
self.provisioning_server = None
def read(self, reader):
"""Read clients settings."""
self.image_dir = reader.get(
'clients', 'image_dir', '/opt/robottelo/images')
self.provisioning_server = reader.get(
'clients', 'provisioning_server')
def validate(self):
"""Validate clients settings."""
validation_errors = []
if self.provisioning_server is None:
validation_errors.append(
'[clients] provisioning_server option must be provided.')
return validation_errors
class DockerSettings(FeatureSettings):
"""Docker settings definitions."""
def __init__(self, *args, **kwargs):
super(DockerSettings, self).__init__(*args, **kwargs)
self.unix_socket = None
self.external_url = None
self.external_registry_1 = None
self.external_registry_2 = None
def read(self, reader):
"""Read docker settings."""
self.unix_socket = reader.get(
'docker', 'unix_socket', False, bool)
self.external_url = reader.get('docker', 'external_url')
self.external_registry_1 = reader.get('docker', 'external_registry_1')
self.external_registry_2 = reader.get('docker', 'external_registry_2')
def validate(self):
"""Validate docker settings."""
validation_errors = []
if not any((self.unix_socket, self.external_url)):
validation_errors.append(
'Either [docker] unix_socket or external_url options must '
'be provided or enabled.')
if not all((self.external_registry_1, self.external_registry_2)):
validation_errors.append(
'Both [docker] external_registry_1 and external_registry_2 '
'options must be provided.')
return validation_errors
def get_unix_socket_url(self):
"""Use the unix socket connection to the local docker daemon. Make sure
that your Satellite server's docker is configured to allow foreman user
accessing it. This can be done by::
$ groupadd docker
$ usermod -aG docker foreman
# Add -G docker to the options for the docker daemon
$ systemctl restart docker
$ katello-service restart
"""
return (
'unix:///var/run/docker.sock'
if self.unix_socket else None
)
class FakeManifestSettings(FeatureSettings):
"""Fake manifest settings defintitions."""
def __init__(self, *args, **kwargs):
super(FakeManifestSettings, self).__init__(*args, **kwargs)
self.cert_url = None
self.key_url = None
self.url = None
def read(self, reader):
"""Read fake manifest settings."""
self.cert_url = reader.get(
'fake_manifest', 'cert_url')
self.key_url = reader.get(
'fake_manifest', 'key_url')
self.url = reader.get(
'fake_manifest', 'url')
def validate(self):
"""Validate fake manifest settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [fake_manifest] cert_url, key_url, url options must '
'be provided.'
)
return validation_errors
class LDAPSettings(FeatureSettings):
"""LDAP settings definitions."""
def __init__(self, *args, **kwargs):
super(LDAPSettings, self).__init__(*args, **kwargs)
self.basedn = None
self.grpbasedn = None
self.hostname = None
self.password = None
self.username = None
def read(self, reader):
"""Read LDAP settings."""
self.basedn = reader.get('ldap', 'basedn')
self.grpbasedn = reader.get('ldap', 'grpbasedn')
self.hostname = reader.get('ldap', 'hostname')
self.password = reader.get('ldap', 'password')
self.username = reader.get('ldap', 'username')
def validate(self):
"""Validate LDAP settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [ldap] basedn, grpbasedn, hostname, password, '
'username options must be provided.'
)
return validation_errors
class LibvirtHostSettings(FeatureSettings):
"""Libvirt host settings definitions."""
def __init__(self, *args, **kwargs):
super(LibvirtHostSettings, self).__init__(*args, **kwargs)
self.libvirt_image_dir = None
self.libvirt_hostname = None
def read(self, reader):
"""Read libvirt host settings."""
self.libvirt_image_dir = reader.get(
'compute_resources', 'libvirt_image_dir', '/var/lib/libvirt/images'
)
self.libvirt_hostname = reader.get(
'compute_resources', 'libvirt_hostname')
def validate(self):
"""Validate libvirt host settings."""
validation_errors = []
if self.libvirt_hostname is None:
validation_errors.append(
'[compute_resources] libvirt_hostname option must be provided.'
)
return validation_errors
class FakeCapsuleSettings(FeatureSettings):
"""Fake Capsule settings definitions."""
def __init__(self, *args, **kwargs):
super(FakeCapsuleSettings, self).__init__(*args, **kwargs)
self.port_range = None
def read(self, reader):
"""Read fake capsule settings"""
self.port_range = reader.get(
'fake_capsules', 'port_range', cast=tuple
)
def validate(self):
"""Validate fake capsule settings."""
validation_errors = []
if self.port_range is None:
validation_errors.append(
'[fake_capsules] port_range option must be provided.'
)
return validation_errors
class RHEVSettings(FeatureSettings):
"""RHEV settings definitions."""
def __init__(self, *args, **kwargs):
super(RHEVSettings, self).__init__(*args, **kwargs)
# Compute Resource Information
self.hostname = None
self.username = None
self.password = None
self.datacenter = None
self.vm_name = None
# Image Information
self.image_os = None
self.image_arch = None
self.image_username = None
self.image_password = None
self.image_name = None
def read(self, reader):
"""Read rhev settings."""
# Compute Resource Information
self.hostname = reader.get('rhev', 'hostname')
self.username = reader.get('rhev', 'username')
self.password = reader.get('rhev', 'password')
self.datacenter = reader.get('rhev', 'datacenter')
self.vm_name = reader.get('rhev', 'vm_name')
# Image Information
self.image_os = reader.get('rhev', 'image_os')
self.image_arch = reader.get('rhev', 'image_arch')
self.image_username = reader.get('rhev', 'image_username')
self.image_password = reader.get('rhev', 'image_password')
self.image_name = reader.get('rhev', 'image_name')
def validate(self):
"""Validate rhev settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [rhev] hostname, username, password, datacenter, '
'vm_name, image_name, image_os, image_arch, image_usernam, '
'image_name options must be provided.'
)
return validation_errors
class VmWareSettings(FeatureSettings):
"""VmWare settings definitions."""
def __init__(self, *args, **kwargs):
super(VmWareSettings, self).__init__(*args, **kwargs)
# Compute Resource Information
self.vcenter = None
self.username = None
self.password = None
self.datacenter = None
self.vm_name = None
# Image Information
self.image_os = None
self.image_arch = None
self.image_username = None
self.image_password = None
self.image_name = None
def read(self, reader):
"""Read vmware settings."""
# Compute Resource Information
self.vcenter = reader.get('vmware', 'hostname')
self.username = reader.get('vmware', 'username')
self.password = reader.get('vmware', 'password')
self.datacenter = reader.get('vmware', 'datacenter')
self.vm_name = reader.get('vmware', 'vm_name')
# Image Information
self.image_os = reader.get('vmware', 'image_os')
self.image_arch = reader.get('vmware', 'image_arch')
self.image_username = reader.get('vmware', 'image_username')
self.image_password = reader.get('vmware', 'image_password')
self.image_name = reader.get('vmware', 'image_name')
def validate(self):
"""Validate vmware settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [vmware] hostname, username, password, datacenter, '
'vm_name, image_name, image_os, image_arch, image_usernam, '
'image_name options must be provided.'
)
return validation_errors
class DiscoveryISOSettings(FeatureSettings):
"""Discovery ISO name settings definition."""
def __init__(self, *args, **kwargs):
super(DiscoveryISOSettings, self).__init__(*args, **kwargs)
self.discovery_iso = None
def read(self, reader):
"""Read discovery iso setting."""
self.discovery_iso = reader.get('discovery', 'discovery_iso')
def validate(self):
"""Validate discovery iso name setting."""
validation_errors = []
if self.discovery_iso is None:
validation_errors.append(
'[discovery] discovery iso name must be provided.'
)
return validation_errors
class OscapSettings(FeatureSettings):
"""Oscap settings definitions."""
def __init__(self, *args, **kwargs):
super(OscapSettings, self).__init__(*args, **kwargs)
self.content_path = None
def read(self, reader):
"""Read Oscap settings."""
self.content_path = reader.get('oscap', 'content_path')
def validate(self):
"""Validate Oscap settings."""
validation_errors = []
if self.content_path is None:
validation_errors.append(
'[oscap] content_path option must be provided.'
)
return validation_errors
class PerformanceSettings(FeatureSettings):
"""Performance settings definitions."""
def __init__(self, *args, **kwargs):
super(PerformanceSettings, self).__init__(*args, **kwargs)
self.time_hammer = None
self.cdn_address = None
self.virtual_machines = None
self.fresh_install_savepoint = None
self.enabled_repos_savepoint = None
self.csv_buckets_count = None
self.sync_count = None
self.sync_type = None
self.repos = None
def read(self, reader):
"""Read performance settings."""
self.time_hammer = reader.get(
'performance', 'time_hammer', False, bool)
self.cdn_address = reader.get(
'performance', 'cdn_address')
self.virtual_machines = reader.get(
'performance', 'virtual_machines', cast=list)
self.fresh_install_savepoint = reader.get(
'performance', 'fresh_install_savepoint')
self.enabled_repos_savepoint = reader.get(
'performance', 'enabled_repos_savepoint')
self.csv_buckets_count = reader.get(
'performance', 'csv_buckets_count', 10, int)
self.sync_count = reader.get(
'performance', 'sync_count', 3, int)
self.sync_type = reader.get(
'performance', 'sync_type', 'sync')
self.repos = reader.get(
'performance', 'repos', cast=list)
def validate(self):
"""Validate performance settings."""
validation_errors = []
if self.cdn_address is None:
validation_errors.append(
'[performance] cdn_address must be provided.')
if self.virtual_machines is None:
validation_errors.append(
'[performance] virtual_machines must be provided.')
if self.fresh_install_savepoint is None:
validation_errors.append(
'[performance] fresh_install_savepoint must be provided.')
if self.enabled_repos_savepoint is None:
validation_errors.append(
'[performance] enabled_repos_savepoint must be provided.')
return validation_errors
class RHAISettings(FeatureSettings):
"""RHAI settings definitions."""
def __init__(self, *args, **kwargs):
super(RHAISettings, self).__init__(*args, **kwargs)
self.insights_client_el6repo = None
self.insights_client_el7repo = None
def read(self, reader):
"""Read RHAI settings."""
self.insights_client_el6repo = reader.get(
'rhai', 'insights_client_el6repo')
self.insights_client_el7repo = reader.get(
'rhai', 'insights_client_el7repo')
def validate(self):
"""Validate RHAI settings."""
return []
class TransitionSettings(FeatureSettings):
"""Transition settings definitions."""
def __init__(self, *args, **kwargs):
super(TransitionSettings, self).__init__(*args, **kwargs)
self.exported_data = None
def read(self, reader):
"""Read transition settings."""
self.exported_data = reader.get('transition', 'exported_data')
def validate(self):
"""Validate transition settings."""
validation_errors = []
if self.exported_data is None:
validation_errors.append(
'[transition] exported_data must be provided.')
return validation_errors
class VlanNetworkSettings(FeatureSettings):
"""Vlan Network settings definitions."""
def __init__(self, *args, **kwargs):
super(VlanNetworkSettings, self).__init__(*args, **kwargs)
self.subnet = None
self.netmask = None
self.gateway = None
self.bridge = None
def read(self, reader):
"""Read Vlan Network settings."""
self.subnet = reader.get('vlan_networking', 'subnet')
self.netmask = reader.get('vlan_networking', 'netmask')
self.gateway = reader.get('vlan_networking', 'gateway')
self.bridge = reader.get('vlan_networking', 'bridge')
def validate(self):
"""Validate Vlan Network settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [vlan_networking] subnet, netmask, gateway, bridge '
'options must be provided.')
return validation_errors
class UpgradeSettings(FeatureSettings):
"""Satellite upgrade settings definitions."""
def __init__(self, *args, **kwargs):
super(UpgradeSettings, self).__init__(*args, **kwargs)
self.upgrade_data = None
def read(self, reader):
"""Read and validate Satellite server settings."""
self.upgrade_data = reader.get('upgrade', 'upgrade_data')
def validate(self):
validation_errors = []
if self.upgrade_data is None:
validation_errors.append('[upgrade] data must be provided.')
return validation_errors
class Settings(object):
"""Robottelo's settings representation."""
def __init__(self):
self._all_features = None
self._configured = False
self._validation_errors = []
self.browser = None
self.locale = None
self.project = None
self.reader = None
self.rhel6_repo = None
self.rhel7_repo = None
self.screenshots_path = None
self.saucelabs_key = None
self.saucelabs_user = None
self.server = ServerSettings()
self.run_one_datapoint = None
self.upstream = None
self.verbosity = None
self.webdriver = None
self.webdriver_binary = None
self.webdriver_desired_capabilities = None
# Features
self.clients = ClientsSettings()
self.compute_resources = LibvirtHostSettings()
self.discovery = DiscoveryISOSettings()
self.docker = DockerSettings()
self.fake_capsules = FakeCapsuleSettings()
self.fake_manifest = FakeManifestSettings()
self.ldap = LDAPSettings()
self.oscap = OscapSettings()
self.performance = PerformanceSettings()
self.rhai = RHAISettings()
self.rhev = RHEVSettings()
self.transition = TransitionSettings()
self.vlan_networking = VlanNetworkSettings()
self.upgrade = UpgradeSettings()
self.vmware = VmWareSettings()
def configure(self):
"""Read the settings file and parse the configuration.
:raises: ImproperlyConfigured if any issue is found during the parsing
or validation of the configuration.
"""
if self.configured:
# TODO: what to do here, raise and exception, just skip or ...?
return
# Expect the settings file to be on the robottelo project root.
settings_path = os.path.join(get_project_root(), SETTINGS_FILE_NAME)
if not os.path.isfile(settings_path):
raise ImproperlyConfigured(
'Not able to find settings file at {}'.format(settings_path))
self.reader = INIReader(settings_path)
self._read_robottelo_settings()
self._validation_errors.extend(
self._validate_robottelo_settings())
self.server.read(self.reader)
self._validation_errors.extend(self.server.validate())
if self.reader.has_section('clients'):
self.clients.read(self.reader)
self._validation_errors.extend(self.clients.validate())
if self.reader.has_section('compute_resources'):
self.compute_resources.read(self.reader)
self._validation_errors.extend(self.compute_resources.validate())
if self.reader.has_section('discovery'):
self.discovery.read(self.reader)
self._validation_errors.extend(self.discovery.validate())
if self.reader.has_section('docker'):
self.docker.read(self.reader)
self._validation_errors.extend(self.docker.validate())
if self.reader.has_section('fake_capsules'):
self.fake_capsules.read(self.reader)
self._validation_errors.extend(self.fake_capsules.validate())
if self.reader.has_section('fake_manifest'):
self.fake_manifest.read(self.reader)
self._validation_errors.extend(self.fake_manifest.validate())
if self.reader.has_section('ldap'):
self.ldap.read(self.reader)
self._validation_errors.extend(self.ldap.validate())
if self.reader.has_section('oscap'):
self.oscap.read(self.reader)
self._validation_errors.extend(self.oscap.validate())
if self.reader.has_section('performance'):
self.performance.read(self.reader)
self._validation_errors.extend(self.performance.validate())
if self.reader.has_section('rhai'):
self.rhai.read(self.reader)
self._validation_errors.extend(self.rhai.validate())
if self.reader.has_section('rhev'):
self.rhev.read(self.reader)
self._validation_errors.extend(self.rhev.validate())
if self.reader.has_section('transition'):
self.transition.read(self.reader)
self._validation_errors.extend(self.transition.validate())
if self.reader.has_section('vlan_networking'):
self.vlan_networking.read(self.reader)
self._validation_errors.extend(self.vlan_networking.validate())
if self.reader.has_section('upgrade'):
self.upgrade.read(self.reader)
self._validation_errors.extend(self.upgrade.validate())
if self.reader.has_section('vmware'):
self.vmware.read(self.reader)
self._validation_errors.extend(self.vmware.validate())
if self._validation_errors:
raise ImproperlyConfigured(
'Failed to validate the configuration, check the message(s):\n'
'{}'.format('\n'.join(self._validation_errors))
)
self._configure_logging()
self._configure_third_party_logging()
self._configure_entities()
self._configured = True
def _read_robottelo_settings(self):
"""Read Robottelo's general settings."""
self.log_driver_commands = self.reader.get(
'robottelo',
'log_driver_commands',
['newSession',
'windowMaximize',
'get',
'findElement',
'sendKeysToElement',
'clickElement',
'mouseMoveTo'],
list
)
self.browser = self.reader.get(
'robottelo', 'browser', 'selenium')
self.locale = self.reader.get('robottelo', 'locale', 'en_US.UTF-8')
self.project = self.reader.get('robottelo', 'project', 'sat')
self.rhel6_repo = self.reader.get('robottelo', 'rhel6_repo', None)
self.rhel7_repo = self.reader.get('robottelo', 'rhel7_repo', None)
self.screenshots_path = self.reader.get(
'robottelo', 'screenshots_path', '/tmp/robottelo/screenshots')
self.run_one_datapoint = self.reader.get(
'robottelo', 'run_one_datapoint', False, bool)
self.cleanup = self.reader.get('robottelo', 'cleanup', False, bool)
self.upstream = self.reader.get('robottelo', 'upstream', True, bool)
self.verbosity = self.reader.get(
'robottelo',
'verbosity',
INIReader.cast_logging_level('debug'),
INIReader.cast_logging_level
)
self.webdriver = self.reader.get(
'robottelo', 'webdriver', 'firefox')
self.saucelabs_user = self.reader.get(
'robottelo', 'saucelabs_user', None)
self.saucelabs_key = self.reader.get(
'robottelo', 'saucelabs_key', None)
self.webdriver_binary = self.reader.get(
'robottelo', 'webdriver_binary', None)
self.webdriver_desired_capabilities = self.reader.get(
'robottelo',
'webdriver_desired_capabilities',
None,
cast=INIReader.cast_webdriver_desired_capabilities
)
self.window_manager_command = self.reader.get(
'robottelo', 'window_manager_command', None)
def _validate_robottelo_settings(self):
"""Validate Robottelo's general settings."""
validation_errors = []
browsers = ('selenium', 'docker', 'saucelabs')
webdrivers = ('chrome', 'firefox', 'ie', 'phantomjs', 'remote')
if self.browser not in browsers:
validation_errors.append(
'[robottelo] browser should be one of {0}.'
.format(', '.join(browsers))
)
if self.webdriver not in webdrivers:
validation_errors.append(
'[robottelo] webdriver should be one of {0}.'
.format(', '.join(webdrivers))
)
if self.browser == 'saucelabs':
if self.saucelabs_user is None:
validation_errors.append(
'[robottelo] saucelabs_user must be provided when '
'browser is saucelabs.'
)
if self.saucelabs_key is None:
validation_errors.append(
'[robottelo] saucelabs_key must be provided when '
'browser is saucelabs.'
)
return validation_errors
@property
def configured(self):
"""Returns True if the settings have already been configured."""
return self._configured
@property
def all_features(self):
"""List all expected feature settings sections."""
if self._all_features is None:
self._all_features = [
name for name, value in vars(self).items()
if isinstance(value, FeatureSettings)
]
return self._all_features
def _configure_entities(self):
"""Configure NailGun's entity classes.
Do the following:
* Set ``entity_mixins.CREATE_MISSING`` to ``True``. This causes method
``EntityCreateMixin.create_raw`` to generate values for empty and
required fields.
* Set ``nailgun.entity_mixins.DEFAULT_SERVER_CONFIG`` to whatever is
returned by :meth:`robottelo.helpers.get_nailgun_config`. See
``robottelo.entity_mixins.Entity`` for more information on the effects
of this.
* Set a default value for ``nailgun.entities.GPGKey.content``.
* Set the default value for
``nailgun.entities.DockerComputeResource.url``
if either ``docker.internal_url`` or ``docker.external_url`` is set in
the configuration file.
"""
entity_mixins.CREATE_MISSING = True
entity_mixins.DEFAULT_SERVER_CONFIG = ServerConfig(
self.server.get_url(),
self.server.get_credentials(),
verify=False,
)
gpgkey_init = entities.GPGKey.__init__
def patched_gpgkey_init(self, server_config=None, **kwargs):
"""Set a default value on the ``content`` field."""
gpgkey_init(self, server_config, **kwargs)
self._fields['content'].default = os.path.join(
get_project_root(),
'tests', 'foreman', 'data', 'valid_gpg_key.txt'
)
entities.GPGKey.__init__ = patched_gpgkey_init
# NailGun provides a default value for ComputeResource.url. We override
# that value if `docker.internal_url` or `docker.external_url` is set.
docker_url = None
# Try getting internal url
docker_url = self.docker.get_unix_socket_url()
# Try getting external url
if docker_url is None:
docker_url = self.docker.external_url
if docker_url is not None:
dockercr_init = entities.DockerComputeResource.__init__
def patched_dockercr_init(self, server_config=None, **kwargs):
"""Set a default value on the ``docker_url`` field."""
dockercr_init(self, server_config, **kwargs)
self._fields['url'].default = docker_url
entities.DockerComputeResource.__init__ = patched_dockercr_init
def _configure_logging(self):
"""Configure logging for the entire framework.
If a config named ``logging.conf`` exists in Robottelo's root
directory, the logger is configured using the options in that file.
Otherwise, a custom logging output format is set, and default values
are used for all other logging options.
"""
# All output should be made by the logging module, including warnings
logging.captureWarnings(True)
# Set the logging level based on the Robottelo's verbosity
for name in ('nailgun', 'robottelo'):
logging.getLogger(name).setLevel(self.verbosity)
# Allow overriding logging config based on the presence of logging.conf
# file on Robottelo's project root
logging_conf_path = os.path.join(get_project_root(), 'logging.conf')
if os.path.isfile(logging_conf_path):
config.fileConfig(logging_conf_path)
else:
logging.basicConfig(
format='%(levelname)s %(module)s:%(lineno)d: %(message)s'
)
def _configure_third_party_logging(self):
"""Increase the level of third party packages logging."""
loggers = (
'bugzilla',
'easyprocess',
'paramiko',
'requests.packages.urllib3.connectionpool',
'selenium.webdriver.remote.remote_connection',
)
for logger in loggers:
logging.getLogger(logger).setLevel(logging.WARNING)
| Ichimonji10/robottelo | robottelo/config/base.py | Python | gpl-3.0 | 35,889 |
"""
The Plaid API
The Plaid REST API. Please see https://plaid.com/docs/api for more details. # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from plaid.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
class InstitutionsSearchPaymentInitiationOptions(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = True
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'payment_id': (str, none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'payment_id': 'payment_id', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""InstitutionsSearchPaymentInitiationOptions - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
payment_id (str, none_type): A unique ID identifying the payment. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
| plaid/plaid-python | plaid/model/institutions_search_payment_initiation_options.py | Python | mit | 6,819 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import db
from nova import objects
from nova.objects import base
from nova.objects import fields
MAX_TAG_LENGTH = 60
@base.NovaObjectRegistry.register
class Tag(base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: Added method exists()
VERSION = '1.1'
fields = {
'resource_id': fields.StringField(),
'tag': fields.StringField(),
}
@staticmethod
def _from_db_object(context, tag, db_tag):
for key in tag.fields:
setattr(tag, key, db_tag[key])
tag.obj_reset_changes()
tag._context = context
return tag
@base.remotable
def create(self):
db_tag = db.instance_tag_add(self._context, self.resource_id, self.tag)
self._from_db_object(self._context, self, db_tag)
@base.remotable_classmethod
def destroy(cls, context, resource_id, name):
db.instance_tag_delete(context, resource_id, name)
@base.remotable_classmethod
def exists(cls, context, resource_id, name):
return db.instance_tag_exists(context, resource_id, name)
@base.NovaObjectRegistry.register
class TagList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: Tag <= version 1.1
VERSION = '1.1'
fields = {
'objects': fields.ListOfObjectsField('Tag'),
}
@base.remotable_classmethod
def get_by_resource_id(cls, context, resource_id):
db_tags = db.instance_tag_get_by_instance_uuid(context, resource_id)
return base.obj_make_list(context, cls(), objects.Tag, db_tags)
@base.remotable_classmethod
def create(cls, context, resource_id, tags):
db_tags = db.instance_tag_set(context, resource_id, tags)
return base.obj_make_list(context, cls(), objects.Tag, db_tags)
@base.remotable_classmethod
def destroy(cls, context, resource_id):
db.instance_tag_delete_all(context, resource_id)
| phenoxim/nova | nova/objects/tag.py | Python | apache-2.0 | 2,514 |
from JumpScale import j
import os
import time
import socket
base = j.tools.cuisine._getBaseClass()
class CuisineBootMediaInstaller(base):
def __init__(self, executor, cuisine):
self._executor = executor
self._cuisine = cuisine
def _downloadImage(self, url, redownload=False):
base = url.split("/")[-1]
downloadpath = "$tmpDir/%s" % base
self._cuisine.core.dir_ensure("$tmpDir")
if redownload:
self._cuisine.core.file_unlink(downloadpath)
if not self._cuisine.core.file_exists(downloadpath):
self._cuisine.core.run("cd $tmpDir;curl -L %s -O" % url)
return base
def _partition(self, deviceid, type):
cmd = "parted -s /dev/%s mklabel %s mkpart primary fat32 2 200M set 1 boot on mkpart primary ext4 200M 100%%" % (
deviceid, type)
self._cuisine.core.run(cmd)
def _umount(self, deviceid):
self._cuisine.core.run("umount /mnt/root/boot", die=False)
self._cuisine.core.run("umount /mnt/root", die=False)
self._cuisine.core.run("umount /dev/%s1" % deviceid, die=False)
self._cuisine.core.run("umount /dev/%s2" % deviceid, die=False)
def _mount(self, deviceid):
self._cuisine.core.run("mkfs.ext4 -F /dev/%s2" % deviceid)
self._cuisine.core.run("mkdir -p /mnt/root && mount /dev/%s2 /mnt/root" % deviceid)
self._cuisine.core.run("mkfs.vfat -F32 /dev/%s1" % deviceid)
self._cuisine.core.run("mkdir -p /mnt/root/boot && mount /dev/%s1 /mnt/root/boot" % deviceid)
def _install(self, base):
# We use bsdtar to support pi2 arm images.
self._cuisine.core.run("cd $tmpDir && bsdtar -vxpf %s -C /mnt/root" % base)
self._cuisine.core.run("sync")
self._cuisine.core.run("echo 'PermitRootLogin=yes'>>'/mnt/root/etc/ssh/sshd_config'")
def _findDevices(self):
devs = []
for line in self._cuisine.core.run("lsblk -b -o TYPE,NAME,SIZE")[1].split("\n"):
if line.startswith("disk"):
while line.find(" ") > 0:
line = line.replace(" ", " ")
ttype, dev, size = line.split(" ")
size = int(size)
if size > 30000000000 and size < 32000000000:
devs.append((dev, size))
if size > 15000000000 and size < 17000000000:
devs.append((dev, size))
if size > 7500000000 and size < 8500000000:
devs.append((dev, size))
if size > 4000000000 and size < 4100000000:
devs.append((dev, size))
if len(devs) == 0:
raise j.exceptions.RuntimeError(
"could not find flash disk device, (need to find at least 1 of 8,16 or 32 GB size)" % devs)
return devs
def formatCardDeployImage(self, url, deviceid=None, part_type='msdos', post_install=None):
"""
will only work if 1 or more sd cards found of 4 or 8 or 16 or 32 GB, be careful will overwrite the card
executor = a linux machine
executor=j.tools.executor.getSSHBased(addr="192.168.0.23", port=22,login="root",passwd="rooter",pushkey="ovh_install")
executor.cuisine.bootmediaInstaller.formatCards()
:param url: Image url
:param deviceid: Install on this device id, if not provided, will detect all devices that are 8,16,or 32GB
:param post_install: A method that will be called with the deviceid before the unmounting of the device.
"""
if post_install and not callable(post_install):
raise Exception("Post install must be callable")
base = self._downloadImage(url)
def partition(deviceid, size, base):
self._partition(deviceid, part_type)
self._umount(deviceid)
self._mount(deviceid)
self._install(base)
if post_install:
post_install(deviceid)
self._umount(deviceid)
if deviceid is None:
devs = self._findDevices()
else:
devs = [(deviceid, 0)]
for deviceid, size in devs:
partition(deviceid, size, base)
return devs
def ubuntu(self, platform="amd64", deviceid=None):
"""
if platform none then it will use self._cuisine.node.hwplatform
example: hwplatform = rpi_2b, orangepi_plus,amd64
"""
if platform == "amd64":
name = self._downloadImage("http://releases.ubuntu.com/15.10/ubuntu-15.10-server-amd64.iso")
else:
raise j.exceptions.Input("platform not supported yet")
path = "$tmpDir/%s" % name
cmd = 'dd if=%s of=/dev/%s bs=4000' % (path, deviceid)
self._cuisine.core.sudo(cmd)
def debian(self, platform="orangepi_plus", deviceid=None):
"""
if platform none then it will use self._cuisine.node.hwplatform
example: hwplatform = rpi_2b, orangepi_plus,amd64
"""
if platform == "orangepi_plus":
raise RuntimeError("not implemented")
else:
raise j.exceptions.Input("platform not supported yet")
# self.formatCardDeployImage(url, deviceid=deviceid)
def arch(self, platform="rpi_2b", deviceid=None):
"""
if platform none then it will use self._cuisine.node.hwplatform
example: hwplatform = rpi_2b, orangepi_plus,amd64
"""
if platform == "rpi_2b":
url = "http://archlinuxarm.org/os/ArchLinuxARM-rpi-2-latest.tar.gz"
else:
raise j.exceptions.Input("platform not supported yet")
self.formatCardDeployImage(url, deviceid=deviceid)
def g8os_arm(self, url, gid, nid, deviceid=None):
init_tmpl = """\
#!/usr/bin/bash
mkdir /dev/pts
mount -t devpts none /dev/pts
mount -o remount,rw /
source /etc/profile
exec /sbin/core -gid {gid} -nid {nid} -roles g8os > /var/log/core.log 2>&1
"""
def configure(deviceid):
import textwrap
init = textwrap.dedent(init_tmpl).format(gid=gid, nid=nid)
self._cuisine.core.file_write("/mnt/sbin/init", init, mode=755)
self.formatCardDeployImage(url, deviceid=deviceid, part_type='msdos', post_install=configure)
def g8os(self, gid, nid, platform="amd64", deviceid=None, url=None):
"""
if platform none then it will use self._cuisine.node.hwplatform
example: hwplatform = rpi_2b, orangepi_plus,amd64
"""
if url is None:
if platform == "amd64":
url = "https://stor.jumpscale.org/public/g8os.tgz"
else:
raise j.exceptions.Input("platform not supported yet")
fstab_tmpl = """\
PARTUUID={rootuuid}\t/\text4\trw,relatime,data=ordered\t0 1
PARTUUID={bootuuid}\t/boot\tvfat\trw,relatime,fmask=0022,dmask=0022,codepage=437,iocharset=iso8859-1,shortname=mixed,errors=remount-ro 0 2
"""
init_tmpl = """\
#!{bash}
mkdir /dev/pts
mount -t devpts none /dev/pts
mount -o remount,rw /
source /etc/profile
exec /usr/bin/core -gid {gid} -nid {nid} -roles g8os > /var/log/core.log 2>&1
"""
def configure(deviceid):
# get UUID of device
import textwrap
_1, bootuuid, _1 = self._cuisine.core.run('blkid /dev/%s1 -o value -s PARTUUID' % deviceid)
_2, rootuuid, _2 = self._cuisine.core.run('blkid /dev/%s2 -o value -s PARTUUID' % deviceid)
self._cuisine.core.run('mount -t sysfs none /mnt/root/sys')
self._cuisine.core.run('mount -t devtmpfs none /mnt/root/dev')
self._cuisine.core.run('mount -t tmpfs none /mnt/root/tmp')
self._cuisine.core.run('mount -t proc none /mnt/root/proc')
# add g8os section.
self._cuisine.core.run(
'chroot /mnt/root grub-install --target=x86_64-efi --efi-directory=/boot --modules="part_gpt ext2 fat" --removable')
self._cuisine.core.run('chroot /mnt/root grub-mkconfig -o /boot/grub/grub.cfg')
self._cuisine.core.run('umount /mnt/root/sys')
self._cuisine.core.run('umount /mnt/root/dev')
self._cuisine.core.run('umount /mnt/root/tmp')
self._cuisine.core.run('umount /mnt/root/proc')
fstab = textwrap.dedent(fstab_tmpl).format(rootuuid=rootuuid, bootuuid=bootuuid)
self._cuisine.core.file_write("/mnt/root/etc/fstab", fstab)
bash = '/usr/bin/bash'
if not j.sal.fs.exists('/mnt/root/usr/bin/bash'):
bash = '/bin/bash'
init = textwrap.dedent(init_tmpl).format(gid=gid, nid=nid, bash=bash)
self._cuisine.core.file_write("/mnt/root/sbin/init", init, mode=755)
self.formatCardDeployImage(url, deviceid=deviceid, part_type='gpt', post_install=configure)
def __str__(self):
return "cuisine.bootmediaInstaller:%s:%s" % (
getattr(self._executor, 'addr', 'local'), getattr(self._executor, 'port', ''))
__repr__ = __str__
| Jumpscale/jumpscale_core8 | lib/JumpScale/tools/cuisine/tools/CuisineBootMediaInstaller.py | Python | apache-2.0 | 9,198 |
from .. utils import TranspileTestCase, BuiltinTwoargFunctionTestCase
from unittest import expectedFailure
class FilterTests(TranspileTestCase):
base_code = """
#placeholder while list()s etc aren't fully implemented
class ListLike:
x = %s
index = 0
def __iter__(self):
return self
def __next__(self):
self.index = self.index + 1
if self.index > len(self.x):
raise StopIteration
return self.x[self.index]
def testish(x):
return %s
print(filter(testish, ListLike()))
mylist = ListLike()
print(filter(testish, mylist).__next__())
print(filter(testish, mylist).__next__())
print(filter(testish, mylist).__next__())
try:
print(filter(testish, mylist).__next__())
except StopIteration:
pass
"""
@expectedFailure
def test_bool(self):
self.assertCodeExecution(self.base_code % ("[True, False, True]", "bool(x)"))
@expectedFailure
def test_bytearray(self):
self.assertCodeExecution(self.base_code % ("b'123'", "x"))
@expectedFailure
def test_float(self):
self.assertCodeExecution(self.base_code % ("[3.14, 2.17, 1.0]", "x > 1"))
@expectedFailure
def test_int(self):
self.assertCodeExecution(self.base_code % ("[1, 2, 3]", "x * 2"))
class BuiltinFilterFunctionTests(BuiltinTwoargFunctionTestCase, TranspileTestCase):
functions = ["filter"]
not_implemented = [
'test_bool_bytearray',
'test_bool_bytes',
'test_bool_class',
'test_bool_dict',
'test_bool_frozenset',
'test_bool_list',
'test_bool_range',
'test_bool_set',
'test_bool_str',
'test_bool_tuple',
'test_bytearray_bytearray',
'test_bytearray_bytes',
'test_bytearray_class',
'test_bytearray_dict',
'test_bytearray_frozenset',
'test_bytearray_list',
'test_bytearray_range',
'test_bytearray_set',
'test_bytearray_str',
'test_bytearray_tuple',
'test_bytes_bytearray',
'test_bytes_bytes',
'test_bytes_class',
'test_bytes_dict',
'test_bytes_frozenset',
'test_bytes_list',
'test_bytes_range',
'test_bytes_set',
'test_bytes_str',
'test_bytes_tuple',
'test_class_bool',
'test_class_bytearray',
'test_class_bytes',
'test_class_class',
'test_class_complex',
'test_class_dict',
'test_class_float',
'test_class_frozenset',
'test_class_int',
'test_class_list',
'test_class_None',
'test_class_NotImplemented',
'test_class_range',
'test_class_set',
'test_class_slice',
'test_class_str',
'test_class_tuple',
'test_complex_bytearray',
'test_complex_bytes',
'test_complex_class',
'test_complex_dict',
'test_complex_frozenset',
'test_complex_list',
'test_complex_range',
'test_complex_set',
'test_complex_str',
'test_complex_tuple',
'test_dict_bytearray',
'test_dict_bytes',
'test_dict_class',
'test_dict_dict',
'test_dict_frozenset',
'test_dict_list',
'test_dict_range',
'test_dict_set',
'test_dict_str',
'test_dict_tuple',
'test_float_bytearray',
'test_float_bytes',
'test_float_class',
'test_float_dict',
'test_float_frozenset',
'test_float_list',
'test_float_range',
'test_float_set',
'test_float_str',
'test_float_tuple',
'test_frozenset_bool',
'test_frozenset_bytearray',
'test_frozenset_bytes',
'test_frozenset_class',
'test_frozenset_complex',
'test_frozenset_dict',
'test_frozenset_float',
'test_frozenset_frozenset',
'test_frozenset_int',
'test_frozenset_list',
'test_frozenset_None',
'test_frozenset_NotImplemented',
'test_frozenset_range',
'test_frozenset_set',
'test_frozenset_slice',
'test_frozenset_str',
'test_frozenset_tuple',
'test_int_bytearray',
'test_int_bytes',
'test_int_class',
'test_int_dict',
'test_int_frozenset',
'test_int_list',
'test_int_range',
'test_int_set',
'test_int_str',
'test_int_tuple',
'test_list_bytearray',
'test_list_bytes',
'test_list_class',
'test_list_dict',
'test_list_frozenset',
'test_list_list',
'test_list_range',
'test_list_set',
'test_list_str',
'test_list_tuple',
'test_None_bytearray',
'test_None_bytes',
'test_None_class',
'test_None_dict',
'test_None_frozenset',
'test_None_list',
'test_None_range',
'test_None_set',
'test_None_str',
'test_None_tuple',
'test_NotImplemented_bytearray',
'test_NotImplemented_bytes',
'test_NotImplemented_class',
'test_NotImplemented_dict',
'test_NotImplemented_frozenset',
'test_NotImplemented_list',
'test_NotImplemented_range',
'test_NotImplemented_set',
'test_NotImplemented_str',
'test_NotImplemented_tuple',
'test_range_bytearray',
'test_range_bytes',
'test_range_class',
'test_range_dict',
'test_range_frozenset',
'test_range_list',
'test_range_range',
'test_range_set',
'test_range_str',
'test_range_tuple',
'test_set_bytearray',
'test_set_bytes',
'test_set_class',
'test_set_dict',
'test_set_frozenset',
'test_set_list',
'test_set_range',
'test_set_set',
'test_set_str',
'test_set_tuple',
'test_slice_bytearray',
'test_slice_bytes',
'test_slice_class',
'test_slice_dict',
'test_slice_frozenset',
'test_slice_list',
'test_slice_range',
'test_slice_set',
'test_slice_str',
'test_slice_tuple',
'test_str_bytearray',
'test_str_bytes',
'test_str_class',
'test_str_dict',
'test_str_frozenset',
'test_str_list',
'test_str_range',
'test_str_set',
'test_str_str',
'test_str_tuple',
'test_tuple_bytearray',
'test_tuple_bytes',
'test_tuple_class',
'test_tuple_dict',
'test_tuple_frozenset',
'test_tuple_list',
'test_tuple_range',
'test_tuple_set',
'test_tuple_str',
'test_tuple_tuple',
]
| gEt-rIgHt-jR/voc | tests/builtins/test_filter.py | Python | bsd-3-clause | 7,021 |
shuffles = []
with open('input.txt') as f:
for line in f:
if line.startswith('deal with'):
inc = int(line.strip().split(' ')[-1])
shuffles.append((2, inc))
elif line.startswith('cut'):
d = int(line.strip().split(' ')[-1])
shuffles.append((1, d))
elif line.startswith('deal into'):
shuffles.append((0,))
else:
assert False
def egcd(a, b):
if a == 0:
return (b, 0, 1)
else:
g, y, x = egcd(b % a, a)
return (g, x - (b // a) * y, y)
def modinv(a, m):
g, x, y = egcd(a, m)
if g != 1:
raise Exception('modular inverse does not exist')
else:
return x % m
def reverse_deal_into(ncards, idx):
return ncards - idx - 1
def reverse_cut(ncards, n, idx):
n = n % ncards
return (n+idx) % ncards
def reverse_deal_with(ncards, inc, idx):
return modinv(inc, ncards)*idx % ncards
# return pow(inc, ncards - 2, ncards)*idx % ncards # fetmat's little theorem, only works for prime ncards
def reverse_shuffle(ncards, idx):
for shuffle in reversed(shuffles):
if shuffle[0] == 0:
idx = reverse_deal_into(ncards, idx)
elif shuffle[0] == 1:
idx = reverse_cut(ncards, shuffle[1], idx)
elif shuffle[0] == 2:
idx = reverse_deal_with(ncards, shuffle[1], idx)
return idx
# p1
for i in range(10007):
if reverse_shuffle(10007, i) == 2019:
print('part1: ', i)
break
# p2
ntimes = 101741582076661
ncards = 119315717514047
# all operations are linear in the index modulo ncards. f(x) = A*x + B. We can get A, B using 2 points
X = 2020
Y = reverse_shuffle(ncards, X)
Z = reverse_shuffle(ncards, Y)
A = (Y-Z) * modinv(X-Y+ncards, ncards) % ncards
B = (Y-A*X) % ncards
"""
apply f(x) ntimes, use geometric series formula
f^n(x) = A^n*x + A^(n-1)*B + A^(n-2)*B + ... + B
= A^n*x + (A^(n-1) + A^(n-2) + ... + 1) * B
= A^n*x + (A^n-1) / (A-1) * B
"""
print('part2: ', (pow(A, ntimes, ncards)*X + (pow(A, ntimes, ncards)-1)
* modinv(A-1, ncards) * B) % ncards)
| marcosfede/algorithms | adventofcode/2019/d22/d22.py | Python | gpl-3.0 | 2,144 |
"""
Generates protein-ligand docked poses.
"""
import platform
import logging
import os
import tempfile
import tarfile
import numpy as np
from subprocess import call
from subprocess import check_output
from typing import List, Optional, Tuple, Union
from deepchem.dock.binding_pocket import BindingPocketFinder
from deepchem.utils.data_utils import download_url, get_data_dir
from deepchem.utils.typing import RDKitMol
from deepchem.utils.geometry_utils import compute_centroid, compute_protein_range
from deepchem.utils.rdkit_utils import load_molecule, write_molecule
from deepchem.utils.vina_utils import load_docked_ligands, write_vina_conf
logger = logging.getLogger(__name__)
DOCKED_POSES = List[Tuple[RDKitMol, RDKitMol]]
class PoseGenerator(object):
"""A Pose Generator computes low energy conformations for molecular complexes.
Many questions in structural biophysics reduce to that of computing
the binding free energy of molecular complexes. A key step towards
computing the binding free energy of two complexes is to find low
energy "poses", that is energetically favorable conformations of
molecules with respect to each other. One application of this
technique is to find low energy poses for protein-ligand
interactions.
"""
def generate_poses(self,
molecular_complex: Tuple[str, str],
centroid: Optional[np.ndarray] = None,
box_dims: Optional[np.ndarray] = None,
exhaustiveness: int = 10,
num_modes: int = 9,
num_pockets: Optional[int] = None,
out_dir: Optional[str] = None,
generate_scores: bool = False):
"""Generates a list of low energy poses for molecular complex
Parameters
----------
molecular_complexes: Tuple[str, str]
A representation of a molecular complex. This tuple is
(protein_file, ligand_file).
centroid: np.ndarray, optional (default None)
The centroid to dock against. Is computed if not specified.
box_dims: np.ndarray, optional (default None)
A numpy array of shape `(3,)` holding the size of the box to dock. If not
specified is set to size of molecular complex plus 5 angstroms.
exhaustiveness: int, optional (default 10)
Tells pose generator how exhaustive it should be with pose
generation.
num_modes: int, optional (default 9)
Tells pose generator how many binding modes it should generate at
each invocation.
num_pockets: int, optional (default None)
If specified, `self.pocket_finder` must be set. Will only
generate poses for the first `num_pockets` returned by
`self.pocket_finder`.
out_dir: str, optional (default None)
If specified, write generated poses to this directory.
generate_score: bool, optional (default False)
If `True`, the pose generator will return scores for complexes.
This is used typically when invoking external docking programs
that compute scores.
Returns
-------
A list of molecular complexes in energetically favorable poses.
"""
raise NotImplementedError
class VinaPoseGenerator(PoseGenerator):
"""Uses Autodock Vina to generate binding poses.
This class uses Autodock Vina to make make predictions of
binding poses. It downloads the Autodock Vina executable for
your system to your specified DEEPCHEM_DATA_DIR (remember this
is an environment variable you set) and invokes the executable
to perform pose generation for you.
Note
----
This class requires RDKit to be installed.
"""
def __init__(self,
sixty_four_bits: bool = True,
pocket_finder: Optional[BindingPocketFinder] = None):
"""Initializes Vina Pose Generator
Parameters
----------
sixty_four_bits: bool, optional (default True)
Specifies whether this is a 64-bit machine. Needed to download
the correct executable.
pocket_finder: BindingPocketFinder, optional (default None)
If specified should be an instance of
`dc.dock.BindingPocketFinder`.
"""
data_dir = get_data_dir()
if platform.system() == 'Linux':
url = "http://vina.scripps.edu/download/autodock_vina_1_1_2_linux_x86.tgz"
filename = "autodock_vina_1_1_2_linux_x86.tgz"
dirname = "autodock_vina_1_1_2_linux_x86"
self.vina_dir = os.path.join(data_dir, dirname)
self.vina_cmd = os.path.join(self.vina_dir, "bin/vina")
elif platform.system() == 'Darwin':
if sixty_four_bits:
url = "http://vina.scripps.edu/download/autodock_vina_1_1_2_mac_64bit.tar.gz"
filename = "autodock_vina_1_1_2_mac_64bit.tar.gz"
dirname = "autodock_vina_1_1_2_mac_catalina_64bit"
else:
url = "http://vina.scripps.edu/download/autodock_vina_1_1_2_mac.tgz"
filename = "autodock_vina_1_1_2_mac.tgz"
dirname = "autodock_vina_1_1_2_mac"
self.vina_dir = os.path.join(data_dir, dirname)
self.vina_cmd = os.path.join(self.vina_dir, "bin/vina")
elif platform.system() == 'Windows':
url = "http://vina.scripps.edu/download/autodock_vina_1_1_2_win32.msi"
filename = "autodock_vina_1_1_2_win32.msi"
self.vina_dir = "\\Program Files (x86)\\The Scripps Research Institute\\Vina"
self.vina_cmd = os.path.join(self.vina_dir, "vina.exe")
else:
raise ValueError(
"Unknown operating system. Try using a cloud platform to run this code instead."
)
self.pocket_finder = pocket_finder
if not os.path.exists(self.vina_dir):
logger.info("Vina not available. Downloading")
download_url(url, data_dir)
downloaded_file = os.path.join(data_dir, filename)
logger.info("Downloaded Vina. Extracting")
if platform.system() == 'Windows':
msi_cmd = "msiexec /i %s" % downloaded_file
check_output(msi_cmd.split())
else:
with tarfile.open(downloaded_file) as tar:
tar.extractall(data_dir)
logger.info("Cleanup: removing downloaded vina tar.gz")
os.remove(downloaded_file)
def generate_poses(self,
molecular_complex: Tuple[str, str],
centroid: Optional[np.ndarray] = None,
box_dims: Optional[np.ndarray] = None,
exhaustiveness: int = 10,
num_modes: int = 9,
num_pockets: Optional[int] = None,
out_dir: Optional[str] = None,
generate_scores: bool = False
) -> Union[Tuple[DOCKED_POSES, List[float]], DOCKED_POSES]:
"""Generates the docked complex and outputs files for docked complex.
TODO: How can this work on Windows? We need to install a .msi file and
invoke it correctly from Python for this to work.
Parameters
----------
molecular_complexes: Tuple[str, str]
A representation of a molecular complex. This tuple is
(protein_file, ligand_file).
centroid: np.ndarray, optional
The centroid to dock against. Is computed if not specified.
box_dims: np.ndarray, optional
A numpy array of shape `(3,)` holding the size of the box to dock. If not
specified is set to size of molecular complex plus 5 angstroms.
exhaustiveness: int, optional (default 10)
Tells Autodock Vina how exhaustive it should be with pose
generation.
num_modes: int, optional (default 9)
Tells Autodock Vina how many binding modes it should generate at
each invocation.
num_pockets: int, optional (default None)
If specified, `self.pocket_finder` must be set. Will only
generate poses for the first `num_pockets` returned by
`self.pocket_finder`.
out_dir: str, optional
If specified, write generated poses to this directory.
generate_score: bool, optional (default False)
If `True`, the pose generator will return scores for complexes.
This is used typically when invoking external docking programs
that compute scores.
Returns
-------
Tuple[`docked_poses`, `scores`] or `docked_poses`
Tuple of `(docked_poses, scores)` or `docked_poses`. `docked_poses`
is a list of docked molecular complexes. Each entry in this list
contains a `(protein_mol, ligand_mol)` pair of RDKit molecules.
`scores` is a list of binding free energies predicted by Vina.
Raises
------
`ValueError` if `num_pockets` is set but `self.pocket_finder is None`.
"""
if out_dir is None:
out_dir = tempfile.mkdtemp()
if num_pockets is not None and self.pocket_finder is None:
raise ValueError(
"If num_pockets is specified, pocket_finder must have been provided at construction time."
)
# Parse complex
if len(molecular_complex) > 2:
raise ValueError(
"Autodock Vina can only dock protein-ligand complexes and not more general molecular complexes."
)
(protein_file, ligand_file) = molecular_complex
# Prepare protein
protein_name = os.path.basename(protein_file).split(".")[0]
protein_hyd = os.path.join(out_dir, "%s_hyd.pdb" % protein_name)
protein_pdbqt = os.path.join(out_dir, "%s.pdbqt" % protein_name)
protein_mol = load_molecule(
protein_file, calc_charges=True, add_hydrogens=True)
write_molecule(protein_mol[1], protein_hyd, is_protein=True)
write_molecule(protein_mol[1], protein_pdbqt, is_protein=True)
# Get protein centroid and range
if centroid is not None and box_dims is not None:
centroids = [centroid]
dimensions = [box_dims]
else:
if self.pocket_finder is None:
logger.info("Pockets not specified. Will use whole protein to dock")
protein_centroid = compute_centroid(protein_mol[0])
protein_range = compute_protein_range(protein_mol[0])
box_dims = protein_range + 5.0
centroids, dimensions = [protein_centroid], [box_dims]
else:
logger.info("About to find putative binding pockets")
pockets = self.pocket_finder.find_pockets(protein_file)
logger.info("%d pockets found in total" % len(pockets))
logger.info("Computing centroid and size of proposed pockets.")
centroids, dimensions = [], []
for pocket in pockets:
protein_centroid = pocket.center()
(x_min, x_max), (y_min, y_max), (
z_min, z_max) = pocket.x_range, pocket.y_range, pocket.z_range
# TODO(rbharath: Does vina divide box dimensions by 2?
x_box = (x_max - x_min) / 2.
y_box = (y_max - y_min) / 2.
z_box = (z_max - z_min) / 2.
box_dims = (x_box, y_box, z_box)
centroids.append(protein_centroid)
dimensions.append(box_dims)
if num_pockets is not None:
logger.info("num_pockets = %d so selecting this many pockets for docking."
% num_pockets)
centroids = centroids[:num_pockets]
dimensions = dimensions[:num_pockets]
# Prepare protein
ligand_name = os.path.basename(ligand_file).split(".")[0]
ligand_pdbqt = os.path.join(out_dir, "%s.pdbqt" % ligand_name)
ligand_mol = load_molecule(
ligand_file, calc_charges=True, add_hydrogens=True)
write_molecule(ligand_mol[1], ligand_pdbqt)
docked_complexes = []
all_scores = []
for i, (protein_centroid, box_dims) in enumerate(
zip(centroids, dimensions)):
logger.info("Docking in pocket %d/%d" % (i + 1, len(centroids)))
logger.info("Docking with center: %s" % str(protein_centroid))
logger.info("Box dimensions: %s" % str(box_dims))
# Write Vina conf file
conf_file = os.path.join(out_dir, "conf.txt")
write_vina_conf(
protein_pdbqt,
ligand_pdbqt,
protein_centroid,
box_dims,
conf_file,
num_modes=num_modes,
exhaustiveness=exhaustiveness)
# Define locations of log and output files
log_file = os.path.join(out_dir, "%s_log.txt" % ligand_name)
out_pdbqt = os.path.join(out_dir, "%s_docked.pdbqt" % ligand_name)
logger.info("About to call Vina")
if platform.system() == 'Windows':
args = [
self.vina_cmd, "--config", conf_file, "--log", log_file, "--out",
out_pdbqt
]
else:
# I'm not sure why specifying the args as a list fails on other platforms,
# but for some reason it only works if I pass it as a string.
# FIXME: Incompatible types in assignment
args = "%s --config %s --log %s --out %s" % ( # type: ignore
self.vina_cmd, conf_file, log_file, out_pdbqt)
# FIXME: We should use `subprocess.run` instead of `call`
call(args, shell=True)
ligands, scores = load_docked_ligands(out_pdbqt)
docked_complexes += [(protein_mol[1], ligand) for ligand in ligands]
all_scores += scores
if generate_scores:
return docked_complexes, all_scores
else:
return docked_complexes
| lilleswing/deepchem | deepchem/dock/pose_generation.py | Python | mit | 13,071 |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import mock
from concurrent import futures
import pytest
try:
import pandas
except ImportError: # pragma: NO COVER
pandas = None
try:
import IPython
from IPython.utils import io
from IPython.testing import tools
from IPython.terminal import interactiveshell
except ImportError: # pragma: NO COVER
IPython = None
import google.auth.credentials
from google.cloud.bigquery import table
from google.cloud.bigquery import magics
pytestmark = pytest.mark.skipif(IPython is None, reason='Requires `ipython`')
@pytest.fixture(scope='session')
def ipython():
config = tools.default_config()
config.TerminalInteractiveShell.simple_prompt = True
shell = interactiveshell.TerminalInteractiveShell.instance(config=config)
return shell
@pytest.fixture()
def ipython_interactive(request, ipython):
"""Activate IPython's builtin hooks
for the duration of the test scope.
"""
with ipython.builtin_trap:
yield ipython
def test_context_credentials_auto_set_w_application_default_credentials():
"""When Application Default Credentials are set, the context credentials
will be created the first time it is called
"""
assert magics.context._credentials is None
assert magics.context._project is None
project = 'prahj-ekt'
credentials_mock = mock.create_autospec(
google.auth.credentials.Credentials, instance=True)
default_patch = mock.patch(
'google.auth.default', return_value=(credentials_mock, project))
with default_patch as default_mock:
assert magics.context.credentials is credentials_mock
assert magics.context.project == project
assert default_mock.call_count == 2
def test_context_credentials_and_project_can_be_set_explicitly():
project1 = 'one-project-55564'
project2 = 'other-project-52569'
credentials_mock = mock.create_autospec(
google.auth.credentials.Credentials, instance=True)
default_patch = mock.patch(
'google.auth.default', return_value=(credentials_mock, project1))
with default_patch as default_mock:
magics.context.credentials = credentials_mock
magics.context.project = project2
assert magics.context.project == project2
assert magics.context.credentials is credentials_mock
# default should not be called if credentials & project are explicitly set
assert default_mock.call_count == 0
def test__run_query():
magics.context._credentials = None
job_id = 'job_1234'
sql = 'SELECT 17'
responses = [
futures.TimeoutError,
futures.TimeoutError,
[table.Row((17,), {'num': 0})]
]
client_patch = mock.patch(
'google.cloud.bigquery.magics.bigquery.Client', autospec=True)
with client_patch as client_mock, io.capture_output() as captured:
client_mock().query(sql).result.side_effect = responses
client_mock().query(sql).job_id = job_id
query_job = magics._run_query(client_mock(), sql)
lines = re.split('\n|\r', captured.stdout)
# Removes blanks & terminal code (result of display clearing)
updates = list(filter(lambda x: bool(x) and x != '\x1b[2K', lines))
assert query_job.job_id == job_id
expected_first_line = "Executing query with job ID: {}".format(job_id)
assert updates[0] == expected_first_line
execution_updates = updates[1:-1]
assert len(execution_updates) == 3 # one update per API response
assert all(re.match("Query executing: .*s", line)
for line in execution_updates)
assert re.match("Query complete after .*s", updates[-1])
@pytest.mark.usefixtures('ipython_interactive')
def test_extension_load():
ip = IPython.get_ipython()
ip.extension_manager.load_extension('google.cloud.bigquery')
# verify that the magic is registered and has the correct source
magic = ip.magics_manager.magics['cell'].get('bigquery')
assert magic.__module__ == 'google.cloud.bigquery.magics'
@pytest.mark.usefixtures('ipython_interactive')
@pytest.mark.skipif(pandas is None, reason='Requires `pandas`')
def test_bigquery_magic_without_optional_arguments():
ip = IPython.get_ipython()
ip.extension_manager.load_extension('google.cloud.bigquery')
magics.context.credentials = mock.create_autospec(
google.auth.credentials.Credentials, instance=True)
sql = 'SELECT 17 AS num'
result = pandas.DataFrame([17], columns=['num'])
run_query_patch = mock.patch(
'google.cloud.bigquery.magics._run_query', autospec=True)
query_job_mock = mock.create_autospec(
google.cloud.bigquery.job.QueryJob, instance=True)
query_job_mock.to_dataframe.return_value = result
with run_query_patch as run_query_mock:
run_query_mock.return_value = query_job_mock
result = ip.run_cell_magic('bigquery', '', sql)
assert isinstance(result, pandas.DataFrame)
assert len(result) == len(result) # verify row count
assert list(result) == list(result) # verify column names
@pytest.mark.usefixtures('ipython_interactive')
def test_bigquery_magic_with_legacy_sql():
ip = IPython.get_ipython()
ip.extension_manager.load_extension('google.cloud.bigquery')
magics.context.credentials = mock.create_autospec(
google.auth.credentials.Credentials, instance=True)
run_query_patch = mock.patch(
'google.cloud.bigquery.magics._run_query', autospec=True)
with run_query_patch as run_query_mock:
ip.run_cell_magic(
'bigquery', '--use_legacy_sql', 'SELECT 17 AS num')
job_config_used = run_query_mock.call_args_list[0][0][-1]
assert job_config_used.use_legacy_sql is True
@pytest.mark.usefixtures('ipython_interactive')
@pytest.mark.skipif(pandas is None, reason='Requires `pandas`')
def test_bigquery_magic_with_result_saved_to_variable():
ip = IPython.get_ipython()
ip.extension_manager.load_extension('google.cloud.bigquery')
magics.context.credentials = mock.create_autospec(
google.auth.credentials.Credentials, instance=True)
sql = 'SELECT 17 AS num'
result = pandas.DataFrame([17], columns=['num'])
assert 'df' not in ip.user_ns
run_query_patch = mock.patch(
'google.cloud.bigquery.magics._run_query', autospec=True)
query_job_mock = mock.create_autospec(
google.cloud.bigquery.job.QueryJob, instance=True)
query_job_mock.to_dataframe.return_value = result
with run_query_patch as run_query_mock:
run_query_mock.return_value = query_job_mock
ip.run_cell_magic('bigquery', 'df', sql)
assert 'df' in ip.user_ns # verify that variable exists
df = ip.user_ns['df']
assert len(df) == len(result) # verify row count
assert list(df) == list(result) # verify column names
@pytest.mark.usefixtures('ipython_interactive')
def test_bigquery_magic_does_not_clear_display_in_verbose_mode():
ip = IPython.get_ipython()
ip.extension_manager.load_extension('google.cloud.bigquery')
magics.context.credentials = mock.create_autospec(
google.auth.credentials.Credentials, instance=True)
clear_patch = mock.patch(
'google.cloud.bigquery.magics.display.clear_output', autospec=True)
run_query_patch = mock.patch(
'google.cloud.bigquery.magics._run_query', autospec=True)
with clear_patch as clear_mock, run_query_patch:
ip.run_cell_magic('bigquery', '--verbose', 'SELECT 17 as num')
assert clear_mock.call_count == 0
@pytest.mark.usefixtures('ipython_interactive')
def test_bigquery_magic_clears_display_in_verbose_mode():
ip = IPython.get_ipython()
ip.extension_manager.load_extension('google.cloud.bigquery')
magics.context.credentials = mock.create_autospec(
google.auth.credentials.Credentials, instance=True)
clear_patch = mock.patch(
'google.cloud.bigquery.magics.display.clear_output', autospec=True)
run_query_patch = mock.patch(
'google.cloud.bigquery.magics._run_query', autospec=True)
with clear_patch as clear_mock, run_query_patch:
ip.run_cell_magic('bigquery', '', 'SELECT 17 as num')
assert clear_mock.call_count == 1
@pytest.mark.usefixtures('ipython_interactive')
def test_bigquery_magic_with_project():
ip = IPython.get_ipython()
ip.extension_manager.load_extension('google.cloud.bigquery')
magics.context._project = None
credentials_mock = mock.create_autospec(
google.auth.credentials.Credentials, instance=True)
default_patch = mock.patch(
'google.auth.default',
return_value=(credentials_mock, 'general-project'))
run_query_patch = mock.patch(
'google.cloud.bigquery.magics._run_query', autospec=True)
with run_query_patch as run_query_mock, default_patch:
ip.run_cell_magic(
'bigquery', '--project=specific-project', 'SELECT 17 as num')
client_used = run_query_mock.call_args_list[0][0][0]
assert client_used.project == 'specific-project'
# context project should not change
assert magics.context.project == 'general-project'
@pytest.mark.usefixtures('ipython_interactive')
@pytest.mark.skipif(pandas is None, reason='Requires `pandas`')
def test_bigquery_magic_with_string_params():
ip = IPython.get_ipython()
ip.extension_manager.load_extension('google.cloud.bigquery')
magics.context.credentials = mock.create_autospec(
google.auth.credentials.Credentials, instance=True)
sql = 'SELECT @num AS num'
result = pandas.DataFrame([17], columns=['num'])
assert 'params_string_df' not in ip.user_ns
run_query_patch = mock.patch(
'google.cloud.bigquery.magics._run_query', autospec=True)
query_job_mock = mock.create_autospec(
google.cloud.bigquery.job.QueryJob, instance=True)
query_job_mock.to_dataframe.return_value = result
with run_query_patch as run_query_mock:
run_query_mock.return_value = query_job_mock
ip.run_cell_magic(
'bigquery', 'params_string_df --params {"num":17}', sql)
run_query_mock.assert_called_once_with(
mock.ANY, sql.format(num=17), mock.ANY)
assert 'params_string_df' in ip.user_ns # verify that the variable exists
df = ip.user_ns['params_string_df']
assert len(df) == len(result) # verify row count
assert list(df) == list(result) # verify column names
@pytest.mark.usefixtures('ipython_interactive')
@pytest.mark.skipif(pandas is None, reason='Requires `pandas`')
def test_bigquery_magic_with_dict_params():
ip = IPython.get_ipython()
ip.extension_manager.load_extension('google.cloud.bigquery')
magics.context.credentials = mock.create_autospec(
google.auth.credentials.Credentials, instance=True)
sql = 'SELECT @num AS num'
result = pandas.DataFrame([17], columns=['num'])
assert 'params_dict_df' not in ip.user_ns
run_query_patch = mock.patch(
'google.cloud.bigquery.magics._run_query', autospec=True)
query_job_mock = mock.create_autospec(
google.cloud.bigquery.job.QueryJob, instance=True)
query_job_mock.to_dataframe.return_value = result
with run_query_patch as run_query_mock:
run_query_mock.return_value = query_job_mock
params = {"num": 17}
# Insert dictionary into user namespace so that it can be expanded
ip.user_ns['params'] = params
ip.run_cell_magic('bigquery', 'params_dict_df --params $params', sql)
run_query_mock.assert_called_once_with(
mock.ANY, sql.format(num=17), mock.ANY)
assert 'params_dict_df' in ip.user_ns # verify that the variable exists
df = ip.user_ns['params_dict_df']
assert len(df) == len(result) # verify row count
assert list(df) == list(result) # verify column names
@pytest.mark.usefixtures('ipython_interactive')
@pytest.mark.skipif(pandas is None, reason='Requires `pandas`')
def test_bigquery_magic_with_improperly_formatted_params():
ip = IPython.get_ipython()
ip.extension_manager.load_extension('google.cloud.bigquery')
magics.context.credentials = mock.create_autospec(
google.auth.credentials.Credentials, instance=True)
sql = 'SELECT @num AS num'
with pytest.raises(SyntaxError):
ip.run_cell_magic(
'bigquery', '--params {17}', sql)
| jonparrott/gcloud-python | bigquery/tests/unit/test_magics.py | Python | apache-2.0 | 12,950 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=eval-used,invalid-name,too-many-arguments
"""Utility functions"""
from tvm import relay
from tvm.relay import transform
def has_multiple_inputs(node_list, node_idx, input_names):
"""Check whether a node has multiple input nodes
except variable nodes.
Parameters
----------
node_list : list of dict of str to object
List of all nodes in a graph.
node_idx : int
Node index to be checked.
input_names : list of str
List of input names of graph.
Returns
-------
out : bool
Whether the specified node has multiple input nodes
"""
num_inputs = 0
node = node_list[node_idx]
for in_idx in node["inputs"]:
in_idx = in_idx[0]
in_node = node_list[in_idx]
# Exclude parameter nodes
if in_node["op"] != "null" or is_input_node(in_node,
input_names):
num_inputs += 1
return num_inputs > 1
def is_input_node(node_entry, input_names):
"""Whether a node is an input node.
Parameters
----------
node_entry : dict
Node entry.
input_names : list of str
List of input names of graph.
Returns
-------
out : bool
whether node is a input node.
"""
return "name" in node_entry and node_entry["name"] in input_names
def bind_inputs(expr, input_shapes=None, input_dtypes="float32"):
"""Bind input variables of a relay function expression
to new shapes and/or dtypes.
Parameters
----------
expr : tvm.relay.Expr.Function
Input relay function expression.
input_shapes : dict of str to tuple of int, optional
Input shapes.
input_dtypes : str or dict of str to str, optional
Input dtypes.
Returns
-------
out : tvm.relay.Expr.Function
Bind relay function expression.
"""
if input_shapes is None:
return expr
if isinstance(input_dtypes, str):
input_dtypes = {key : input_dtypes for key in input_shapes.keys()}
updated_input_dict = {}
for input_name in input_shapes.keys():
updated_input = relay.var(input_name, shape=input_shapes[input_name],
dtype=input_dtypes[input_name])
updated_input_dict[input_name] = updated_input
rebind_dict = {}
for var in expr.params:
if var.name_hint in updated_input_dict:
rebind_dict[var] = updated_input_dict[var.name_hint]
updated_expr = relay.expr.bind(expr, rebind_dict)
mod = relay.Module.from_expr(updated_expr)
mod = transform.InferType()(mod)
entry = mod["main"]
return entry if isinstance(updated_expr, relay.Function) else entry.body
| mlperf/training_results_v0.7 | Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/3rdparty/tvm/python/tvm/autotvm/graph_tuner/utils/utils.py | Python | apache-2.0 | 3,524 |
#!/usr/bin/env python
###############################################################################
# $Id: rel.py 18195 2009-12-06 20:24:39Z rouault $
#
# Project: GDAL Python samples
# Purpose: Script to produce a shaded relief image from elevation data
# Author: Andrey Kiselev, dron@remotesensing.org
#
###############################################################################
# Copyright (c) 2003, Andrey Kiselev <dron@remotesensing.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
try:
from osgeo import gdal
from osgeo.gdalconst import *
gdal.TermProgress = gdal.TermProgress_nocb
except ImportError:
import gdal
from gdalconst import *
try:
import numpy as Numeric
Numeric.arrayrange = Numeric.arange
except ImportError:
import Numeric
try:
from osgeo import gdal_array as gdalnumeric
except ImportError:
import gdalnumeric
import sys
from math import *
# =============================================================================
def Usage():
print('Usage: rel.py -lsrcaz azimuth -lsrcel elevation [-elstep step]')
print(' [-dx xsize] [-dy ysize] [-b band] [-ot type] infile outfile')
print('Produce a shaded relief image from elevation data')
print('')
print(' -lsrcaz azimuth Azimuth angle of the diffuse light source (0..360 degrees)')
print(' -lsrcel elevation Elevation angle of the diffuse light source (0..180 degrees)')
print(' -elstep step Elevation change corresponding to a change of one grey level')
print(' (default 1)')
print(' -dx xsize X and Y dimensions (in metres) of one pixel on the ground')
print(' -dy ysize (taken from the geotransform matrix by default)')
print(' -r range Dynamic range for output image (default 255)')
print(' -b band Select a band number to convert (default 1)')
print(' -ot type Data type of the output dataset')
print(' (Byte/Int16/UInt16/UInt32/Int32/Float32/Float64/')
print(' CInt16/CInt32/CFloat32/CFloat64, default is Byte)')
print(' infile Name of the input file')
print(' outfile Name of the output file')
print('')
sys.exit(1)
# =============================================================================
# =============================================================================
def ParseType(type):
if type == 'Byte':
return GDT_Byte
elif type == 'Int16':
return GDT_Int16
elif type == 'UInt16':
return GDT_UInt16
elif type == 'Int32':
return GDT_Int32
elif type == 'UInt32':
return GDT_UInt32
elif type == 'Float32':
return GDT_Float32
elif type == 'Float64':
return GDT_Float64
elif type == 'CInt16':
return GDT_CInt16
elif type == 'CInt32':
return GDT_CInt32
elif type == 'CFloat32':
return GDT_CFloat32
elif type == 'CFloat64':
return GDT_CFloat64
else:
return GDT_Byte
# =============================================================================
infile = None
outfile = None
iBand = 1 # The first band will be converted by default
format = 'GTiff'
type = GDT_Byte
lsrcaz = None
lsrcel = None
elstep = 1.0
xsize = None
ysize = None
dyn_range = 255.0
# Parse command line arguments.
i = 1
while i < len(sys.argv):
arg = sys.argv[i]
if arg == '-b':
i += 1
iBand = int(sys.argv[i])
elif arg == '-ot':
i += 1
type = ParseType(sys.argv[i])
elif arg == '-lsrcaz':
i += 1
lsrcaz = float(sys.argv[i])
elif arg == '-lsrcel':
i += 1
lsrcel = float(sys.argv[i])
elif arg == '-elstep':
i += 1
elstep = float(sys.argv[i])
elif arg == '-dx':
i += 1
xsize = float(sys.argv[i])
elif arg == '-dy':
i += 1
ysize = float(sys.argv[i])
elif arg == '-r':
i += 1
dyn_range = float(sys.argv[i])
elif infile is None:
infile = arg
elif outfile is None:
outfile = arg
else:
Usage()
i += 1
if infile is None:
Usage()
if outfile is None:
Usage()
if lsrcaz is None:
Usage()
if lsrcel is None:
Usage()
# translate angles from degrees to radians
lsrcaz = lsrcaz / 180.0 * pi
lsrcel = lsrcel / 180.0 * pi
lx = -sin(lsrcaz) * cos(lsrcel)
ly = cos(lsrcaz) * cos(lsrcel)
lz = sin(lsrcel)
lxyz = sqrt(lx**2 + ly**2 + lz**2)
indataset = gdal.Open(infile, GA_ReadOnly)
if indataset == None:
print('Cannot open', infile)
sys.exit(2)
if indataset.RasterXSize < 3 or indataset.RasterYSize < 3:
print('Input image is too small to process, minimum size is 3x3')
sys.exit(3)
out_driver = gdal.GetDriverByName(format)
outdataset = out_driver.Create(outfile, indataset.RasterXSize, indataset.RasterYSize, indataset.RasterCount, type)
outband = outdataset.GetRasterBand(1)
geotransform = indataset.GetGeoTransform()
projection = indataset.GetProjection()
if xsize is None:
xsize = abs(geotransform[1])
if ysize is None:
ysize = abs(geotransform[5])
inband = indataset.GetRasterBand(iBand)
if inband == None:
print('Cannot load band', iBand, 'from the', infile)
sys.exit(2)
numtype = gdalnumeric.GDALTypeCodeToNumericTypeCode(type)
outline = Numeric.empty((1, inband.XSize), numtype)
prev = inband.ReadAsArray(0, 0, inband.XSize, 1, inband.XSize, 1)[0]
outband.WriteArray(outline, 0, 0)
gdal.TermProgress(0.0)
cur = inband.ReadAsArray(0, 1, inband.XSize, 1, inband.XSize, 1)[0]
outband.WriteArray(outline, 0, inband.YSize - 1)
gdal.TermProgress(1.0 / inband.YSize)
dx = 2 * xsize
dy = 2 * ysize
for i in range(1, inband.YSize - 1):
next = inband.ReadAsArray(0, i + 1, inband.XSize, 1, inband.XSize, 1)[0]
dzx = (cur[0:-2] - cur[2:]) * elstep
dzy = (prev[1:-1] - next[1:-1]) * elstep
nx = -dy * dzx
ny = dx * dzy
nz = dx * dy
nxyz = nx*nx + ny*ny + nz*nz
nlxyz = nx*lx + ny*ly + nz*lz
cosine = dyn_range * ( nlxyz / (lxyz * Numeric.sqrt(nxyz)))
cosine = Numeric.clip(cosine, 0.0, dyn_range)
outline[0, 1:-1] = cosine.astype(numtype)
outband.WriteArray(outline, 0, i)
prev = cur
cur = next
# Display progress report on terminal
gdal.TermProgress(float(i + 1) / (inband.YSize - 1))
outdataset.SetGeoTransform(geotransform)
outdataset.SetProjection(projection)
| AsherBond/MondocosmOS | gdal/swig/python/samples/rel.py | Python | agpl-3.0 | 7,548 |
"""
Google Code Wiki translator.
Syntax defined by http://code.google.com/p/support/wiki/WikiSyntax
Here called gwiki to make the dialect clear (g for google).
"""
import re, os, commands, sys
from common import default_movie, plain_exercise, insert_code_and_tex, \
fix_ref_section_chapter
from plaintext import plain_quiz
from misc import _abort
from doconce import errwarn
def gwiki_code(filestr, code_blocks, code_block_types,
tex_blocks, format):
filestr = insert_code_and_tex(filestr, code_blocks, tex_blocks, format)
c = re.compile(r'^!bc(.*?)\n', re.MULTILINE)
filestr = c.sub(r'{{{\n', filestr)
filestr = re.sub(r'!ec\n', r'}}}\n', filestr)
c = re.compile(r'^!bt\n', re.MULTILINE)
filestr = c.sub(r'{{{\n', filestr)
filestr = re.sub(r'!et\n', r'}}}\n', filestr)
return filestr
def gwiki_figure(m):
filename = m.group('filename')
link = filename if filename.startswith('http') else None
if not link and not os.path.isfile(filename):
raise IOError('no figure file %s' % filename)
basename = os.path.basename(filename)
stem, ext = os.path.splitext(basename)
root, ext = os.path.splitext(filename)
if link is None:
if not ext in '.png .gif .jpg .jpeg'.split():
# try to convert image file to PNG, using
# convert from ImageMagick:
cmd = 'convert %s png:%s' % (filename, root+'.png')
failure, output = commands.getstatusoutput(cmd)
if failure:
errwarn('\n**** Warning: could not run ' + cmd)
errwarn('Convert %s to PNG format manually' % filename)
_abort()
filename = root + '.png'
caption = m.group('caption')
# keep label if it's there:
caption = re.sub(r'label\{(.+?)\}', '(\g<1>)', caption)
errwarn("""
NOTE: Place %s at some place on the web and edit the
.gwiki page, either manually (seach for 'Figure: ')
or use the doconce script:
doconce gwiki_figsubst.py mydoc.gwiki URL
""" % filename)
result = r"""
---------------------------------------------------------------
Figure: %s
(the URL of the image file %s must be inserted here)
<wiki:comment>
Put the figure file %s on the web (e.g., as part of the
googlecode repository) and substitute the line above with the URL.
</wiki:comment>
---------------------------------------------------------------
""" % (caption, filename, filename)
return result
from common import table_analysis
def gwiki_table(table):
"""Native gwiki table."""
# add 2 chars for column width since we add boldface _..._
# in headlines:
column_width = [c+2 for c in table_analysis(table['rows'])]
# Does column and heading alignment matter?
# Not according to http://code.google.com/p/support/wiki/WikiSyntax#Tables
# but it is possible to use HTML code in gwiki (i.e., html_table)
# (think this was tried without success...)
s = '\n'
for i, row in enumerate(table['rows']):
if row == ['horizontal rule']:
continue
if i == 1 and \
table['rows'][i-1] == ['horizontal rule'] and \
table['rows'][i+1] == ['horizontal rule']:
headline = True
else:
headline = False
empty_row = max([len(column.strip())
for column in row]) == 0
if empty_row:
continue
for column, w in zip(row, column_width):
if headline:
if column:
c = ' %s ' % (('_'+ column + '_').center(w))
else:
c = ''
else:
c = ' %s ' % column.ljust(w)
s += ' || %s ' % c
s += ' ||\n'
s += '\n\n'
return s
def gwiki_author(authors_and_institutions, auth2index,
inst2index, index2inst, auth2email):
authors = []
for author, i, email in authors_and_institutions:
if email is None:
email_text = ''
else:
name, adr = email.split('@')
email_text = ' (%s at %s)' % (name, adr)
authors.append('_%s_%s' % (author, email_text))
if len(authors) == 1:
authors = authors[0]
elif len(authors) == 2:
authors = authors[0] + ' and ' + authors[1]
elif len(authors) > 2:
authors[-1] = 'and ' + authors[-1]
authors = ', '.join(authors)
else:
# no authors:
return ''
text = '\n\nBy ' + authors + '\n\n'
# we skip institutions in gwiki
return text
def wiki_ref_and_label_common(section_label2title, format, filestr):
filestr = fix_ref_section_chapter(filestr, format)
# remove label{...} from output
filestr = re.sub(r'label\{.+?\}', '', filestr) # all the remaining
# anchors in titles do not work...
# replace all references to sections:
for label in section_label2title:
title = section_label2title[label]
filestr = filestr.replace('ref{%s}' % label,
'[#%s]' % title.replace(' ', '_'))
from common import ref2equations
filestr = ref2equations(filestr)
# replace remaining ref{x} as x
filestr = re.sub(r'ref\{(.+?)\}', '\g<1>', filestr)
return filestr
def gwiki_ref_and_label(section_label2title, format, filestr):
return wiki_ref_and_label_common(section_label2title, format, filestr)
def define(FILENAME_EXTENSION,
BLANKLINE,
INLINE_TAGS_SUBST,
CODE,
LIST,
ARGLIST,
TABLE,
EXERCISE,
FIGURE_EXT,
CROSS_REFS,
INDEX_BIB,
TOC,
ENVIRS,
QUIZ,
INTRO,
OUTRO,
filestr):
# all arguments are dicts and accept in-place modifications (extensions)
FILENAME_EXTENSION['gwiki'] = '.gwiki' # output file extension
BLANKLINE['gwiki'] = '\n'
# replacement patterns for substitutions of inline tags
INLINE_TAGS_SUBST['gwiki'] = {
# use verbatim mode for math:
'math': r'\g<begin>`\g<subst>`\g<end>',
'math2': r'\g<begin>`\g<puretext>`\g<end>',
'emphasize': r'\g<begin>_\g<subst>_\g<end>',
'bold': r'\g<begin>*\g<subst>*\g<end>',
'verbatim': r'\g<begin>`\g<subst>`\g<end>',
#'linkURL': r'\g<begin>[\g<url> \g<link>]\g<end>',
'linkURL2': r'[\g<url> \g<link>]',
'linkURL3': r'[\g<url> \g<link>]',
'linkURL2v': r"[\g<url> `\g<link>`]",
'linkURL3v': r"[\g<url> `\g<link>`]",
'plainURL': r'\g<url>',
'colortext': r'<font color="\g<color>">\g<text></font>',
'chapter': r'= \g<subst> =',
'section': r'== \g<subst> ==',
'subsection': r'=== \g<subst> ===',
'subsubsection': r'==== \g<subst> ====\n',
# 'section': r'++++ \g<subst> ++++',
# 'subsection': r'++++++ \g<subst> ++++++',
# 'subsubsection': r'++++++++ \g<subst> ++++++++',
'paragraph': r'*\g<subst>*\g<space>',
#'title': r'#summary \g<subst>\n<wiki:toc max_depth="2" />',
'title': r'#summary \g<subst>\n',
'date': r'===== \g<subst> =====',
'author': gwiki_author, #r'===== \g<name>, \g<institution> =====',
# 'figure': r'<\g<filename>>',
'figure': gwiki_figure,
'movie': default_movie, # will not work for HTML movie player
'comment': '<wiki:comment> %s </wiki:comment>',
'abstract': r'\n*\g<type>.* \g<text>\g<rest>',
'linebreak': r'\g<text>' + '\n',
'non-breaking-space': ' ',
'ampersand2': r' \g<1>&\g<2>',
}
CODE['gwiki'] = gwiki_code
from html import html_table
#TABLE['gwiki'] = html_table
TABLE['gwiki'] = gwiki_table
# native list:
LIST['gwiki'] = {
'itemize': {'begin': '\n', 'item': '*', 'end': '\n\n'},
'enumerate': {'begin': '\n', 'item': '#', 'end': '\n\n'},
'description': {'begin': '\n', 'item': '* %s ', 'end': '\n\n'},
'separator': '\n'}
# (the \n\n for end is a hack because doconce.py avoids writing
# newline at the end of lists until the next paragraph is hit)
#LIST['gwiki'] = LIST['HTML'] # does not work well
# how to typeset description lists for function arguments, return
# values, and module/class variables:
ARGLIST['gwiki'] = {
'parameter': '*argument*',
'keyword': '*keyword argument*',
'return': '*return value(s)*',
'instance variable': '*instance variable*',
'class variable': '*class variable*',
'module variable': '*module variable*',
}
FIGURE_EXT['gwiki'] = {
'search': ('.png', '.gif', '.jpg', '.jpeg'),
'convert': ('.png', '.gif', '.jpg')}
CROSS_REFS['gwiki'] = gwiki_ref_and_label
from plaintext import plain_index_bib
EXERCISE['gwiki'] = plain_exercise
INDEX_BIB['gwiki'] = plain_index_bib
TOC['gwiki'] = lambda s, f: '<wiki: toc max_depth="2" />'
QUIZ['gwiki'] = plain_quiz
# document start:
INTRO['gwiki'] = ''
#INTRO['gwiki'] = '#summary YourOneLineSummary\n<wiki:toc max_depth="1" />\n'
| dragly/doconce | lib/doconce/gwiki.py | Python | bsd-3-clause | 9,341 |
# ========================================================================
# Copyright (c) 2015 The University of Washington
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========================================================================
#
#
# IAM messaging tools - DAO impl - Azure interface
#
import re
import json
# Azure interface classes
from azure.servicebus import ServiceBusService, Message, Topic, Rule, DEFAULT_RULE_NAME
from copy import deepcopy
from messagetools.iam_message import encode_message
from messagetools.iam_message import decode_message
from messagetools.dao_implementation.mock import get_mockdata_message
import logging
logger = logging.getLogger(__name__)
class File(object):
def __init__(self, conf):
self._conf = conf
self._event_no = 0
def recv_message(self):
message = get_mockdata_message('ms_azure', self._conf['SQS_QUEUE'], self._event_no)
self._event_no += 1
return message
def recv_and_process(self, handler, max=1):
ntot = 0
nvalid = 0
logger.debug('recv and proc: no=%d, max=%d' % (self._event_no, max))
for n in range(0,max):
message = get_mockdata_message('ms_azure', self._conf['SQS_QUEUE'], self._event_no)
if message==None:
break
self._event_no += 1
ret = handler(message)
if ret:
nvalid += 1
ntot += 1
return (ntot, nvalid)
class Live(object):
def __init__(self, conf):
self._conf = conf
self._topic = conf['TOPIC_NAME']
self._subscr = conf['SUBSCRIPTION_NAME']
def _get_bus_service(self):
return ServiceBusService(service_namespace=self._conf['NAMESPACE'],
shared_access_key_name=self._conf['ACCESS_KEY_NAME'],
shared_access_key_value=self._conf['ACCESS_KEY_VALUE'])
def send_message(self, msg, context, cryptid, signid, properties={}):
bus_service = self._get_bus_service()
b64msg = encode_message(msg, context, cryptid, signid)
ms_msg = Message(b64msg, custom_properties=properties)
ret = bus_service.send_topic_message(self._conf['TOPIC_NAME'], ms_msg)
return ret
def create_topic(self, topic_name):
bus_service = self._get_bus_service()
bus_service.create_topic(topic_name)
def create_subscription(self, topic_name, name):
bus_service = self._get_bus_service()
ret = bus_service.create_subscription(topic_name, name)
print(ret)
def recv_message(self, peek=False):
subscription_name=self._conf['SUBSCRIPTION_NAME']
topic_name = self._conf['TOPIC_NAME']
bus_service = self._get_bus_service()
ms_msg = bus_service.receive_subscription_message(topic_name, subscription_name, peek_lock=peek)
msg = decode_message(ms_msg.body)
if peek:
return (msg, ms_msg.delete, ms_msg.unlock)
else:
return msg
def recv_and_process(self, handler, max=1):
bus_service = self._get_bus_service()
ms_msg = bus_service.receive_subscription_message(self._topic, self._subscr, peek_lock=True)
dmsg = decode_message(ms_msg.body)
if dmsg is None:
print('removing invalid message')
ms_msg.delete()
return 1
ret = handler(decode_message(ms_msg.body))
if ret:
print('deleting')
ms_msg.delete()
else:
ms_msg.unlock()
return 1
def add_rule(self, topic_name, subscription_name, rule_name, rule_value):
bus_service = self._get_bus_service()
rule = Rule()
rule.filter_type = 'SqlFilter'
rule.filter_expression = rule_value
ret = bus_service.create_rule(topic_name, subscription_name, rule_name, rule)
print(ret)
def remove_rule(self, topic_name, subscription_name, rule_name):
if rule_name == '-default-':
rule_name = DEFAULT_RULE_NAME
bus_service = self._get_bus_service()
ret = bus_service.delete_rule(topic_name, subscription_name, rule_name)
print(ret)
| mattjmuw/iam-messaging | messagetools.old/dao_implementation/ms_azure.py | Python | apache-2.0 | 4,755 |
#
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Remote API resource implementations.
PUT or POST?
============
According to RFC2616 the main difference between PUT and POST is that
POST can create new resources but PUT can only create the resource the
URI was pointing to on the PUT request.
In the context of this module POST on ``/2/instances`` to change an existing
entity is legitimate, while PUT would not be. PUT creates a new entity (e.g. a
new instance) with a name specified in the request.
Quoting from RFC2616, section 9.6::
The fundamental difference between the POST and PUT requests is reflected in
the different meaning of the Request-URI. The URI in a POST request
identifies the resource that will handle the enclosed entity. That resource
might be a data-accepting process, a gateway to some other protocol, or a
separate entity that accepts annotations. In contrast, the URI in a PUT
request identifies the entity enclosed with the request -- the user agent
knows what URI is intended and the server MUST NOT attempt to apply the
request to some other resource. If the server desires that the request be
applied to a different URI, it MUST send a 301 (Moved Permanently) response;
the user agent MAY then make its own decision regarding whether or not to
redirect the request.
So when adding new methods, if they are operating on the URI entity itself,
PUT should be prefered over POST.
"""
# pylint: disable=C0103
# C0103: Invalid name, since the R_* names are not conforming
from ganeti import opcodes
from ganeti import objects
from ganeti import http
from ganeti import constants
from ganeti import cli
from ganeti import rapi
from ganeti import ht
from ganeti import compat
from ganeti import ssconf
from ganeti.rapi import baserlib
_COMMON_FIELDS = ["ctime", "mtime", "uuid", "serial_no", "tags"]
I_FIELDS = ["name", "admin_state", "os",
"pnode", "snodes",
"disk_template",
"nic.ips", "nic.macs", "nic.modes", "nic.uuids", "nic.names",
"nic.links", "nic.networks", "nic.networks.names", "nic.bridges",
"network_port",
"disk.sizes", "disk.spindles", "disk_usage", "disk.uuids",
"disk.names",
"beparams", "hvparams",
"oper_state", "oper_ram", "oper_vcpus", "status",
"custom_hvparams", "custom_beparams", "custom_nicparams",
] + _COMMON_FIELDS
N_FIELDS = ["name", "offline", "master_candidate", "drained",
"dtotal", "dfree", "sptotal", "spfree",
"mtotal", "mnode", "mfree",
"pinst_cnt", "sinst_cnt",
"ctotal", "cnos", "cnodes", "csockets",
"pip", "sip", "role",
"pinst_list", "sinst_list",
"master_capable", "vm_capable",
"ndparams",
"group.uuid",
] + _COMMON_FIELDS
NET_FIELDS = ["name", "network", "gateway",
"network6", "gateway6",
"mac_prefix",
"free_count", "reserved_count",
"map", "group_list", "inst_list",
"external_reservations",
] + _COMMON_FIELDS
G_FIELDS = [
"alloc_policy",
"name",
"node_cnt",
"node_list",
"ipolicy",
"custom_ipolicy",
"diskparams",
"custom_diskparams",
"ndparams",
"custom_ndparams",
] + _COMMON_FIELDS
J_FIELDS_BULK = [
"id", "ops", "status", "summary",
"opstatus",
"received_ts", "start_ts", "end_ts",
]
J_FIELDS = J_FIELDS_BULK + [
"oplog",
"opresult",
]
_NR_DRAINED = "drained"
_NR_MASTER_CANDIDATE = "master-candidate"
_NR_MASTER = "master"
_NR_OFFLINE = "offline"
_NR_REGULAR = "regular"
_NR_MAP = {
constants.NR_MASTER: _NR_MASTER,
constants.NR_MCANDIDATE: _NR_MASTER_CANDIDATE,
constants.NR_DRAINED: _NR_DRAINED,
constants.NR_OFFLINE: _NR_OFFLINE,
constants.NR_REGULAR: _NR_REGULAR,
}
assert frozenset(_NR_MAP.keys()) == constants.NR_ALL
# Request data version field
_REQ_DATA_VERSION = "__version__"
# Feature string for instance creation request data version 1
_INST_CREATE_REQV1 = "instance-create-reqv1"
# Feature string for instance reinstall request version 1
_INST_REINSTALL_REQV1 = "instance-reinstall-reqv1"
# Feature string for node migration version 1
_NODE_MIGRATE_REQV1 = "node-migrate-reqv1"
# Feature string for node evacuation with LU-generated jobs
_NODE_EVAC_RES1 = "node-evac-res1"
ALL_FEATURES = compat.UniqueFrozenset([
_INST_CREATE_REQV1,
_INST_REINSTALL_REQV1,
_NODE_MIGRATE_REQV1,
_NODE_EVAC_RES1,
])
# Timeout for /2/jobs/[job_id]/wait. Gives job up to 10 seconds to change.
_WFJC_TIMEOUT = 10
# FIXME: For compatibility we update the beparams/memory field. Needs to be
# removed in Ganeti 2.8
def _UpdateBeparams(inst):
"""Updates the beparams dict of inst to support the memory field.
@param inst: Inst dict
@return: Updated inst dict
"""
beparams = inst["beparams"]
beparams[constants.BE_MEMORY] = beparams[constants.BE_MAXMEM]
return inst
class R_root(baserlib.ResourceBase):
"""/ resource.
"""
@staticmethod
def GET():
"""Supported for legacy reasons.
"""
return None
class R_2(R_root):
"""/2 resource.
"""
class R_version(baserlib.ResourceBase):
"""/version resource.
This resource should be used to determine the remote API version and
to adapt clients accordingly.
"""
@staticmethod
def GET():
"""Returns the remote API version.
"""
return constants.RAPI_VERSION
class R_2_info(baserlib.OpcodeResource):
"""/2/info resource.
"""
GET_OPCODE = opcodes.OpClusterQuery
GET_ALIASES = {
"volume_group_name": "vg_name",
"drbd_usermode_helper": "drbd_helper",
}
def GET(self):
"""Returns cluster information.
"""
client = self.GetClient()
return client.QueryClusterInfo()
class R_2_features(baserlib.ResourceBase):
"""/2/features resource.
"""
@staticmethod
def GET():
"""Returns list of optional RAPI features implemented.
"""
return list(ALL_FEATURES)
class R_2_os(baserlib.OpcodeResource):
"""/2/os resource.
"""
GET_OPCODE = opcodes.OpOsDiagnose
def GET(self):
"""Return a list of all OSes.
Can return error 500 in case of a problem.
Example: ["debian-etch"]
"""
cl = self.GetClient()
op = opcodes.OpOsDiagnose(output_fields=["name", "variants"], names=[])
job_id = self.SubmitJob([op], cl=cl)
# we use custom feedback function, instead of print we log the status
result = cli.PollJob(job_id, cl, feedback_fn=baserlib.FeedbackFn)
diagnose_data = result[0]
if not isinstance(diagnose_data, list):
raise http.HttpBadGateway(message="Can't get OS list")
os_names = []
for (name, variants) in diagnose_data:
os_names.extend(cli.CalculateOSNames(name, variants))
return os_names
class R_2_redist_config(baserlib.OpcodeResource):
"""/2/redistribute-config resource.
"""
PUT_OPCODE = opcodes.OpClusterRedistConf
class R_2_cluster_modify(baserlib.OpcodeResource):
"""/2/modify resource.
"""
PUT_OPCODE = opcodes.OpClusterSetParams
class R_2_jobs(baserlib.ResourceBase):
"""/2/jobs resource.
"""
def GET(self):
"""Returns a dictionary of jobs.
@return: a dictionary with jobs id and uri.
"""
client = self.GetClient()
if self.useBulk():
bulkdata = client.QueryJobs(None, J_FIELDS_BULK)
return baserlib.MapBulkFields(bulkdata, J_FIELDS_BULK)
else:
jobdata = map(compat.fst, client.QueryJobs(None, ["id"]))
return baserlib.BuildUriList(jobdata, "/2/jobs/%s",
uri_fields=("id", "uri"))
class R_2_jobs_id(baserlib.ResourceBase):
"""/2/jobs/[job_id] resource.
"""
def GET(self):
"""Returns a job status.
@return: a dictionary with job parameters.
The result includes:
- id: job ID as a number
- status: current job status as a string
- ops: involved OpCodes as a list of dictionaries for each
opcodes in the job
- opstatus: OpCodes status as a list
- opresult: OpCodes results as a list of lists
"""
job_id = self.items[0]
result = self.GetClient().QueryJobs([job_id, ], J_FIELDS)[0]
if result is None:
raise http.HttpNotFound()
return baserlib.MapFields(J_FIELDS, result)
def DELETE(self):
"""Cancel not-yet-started job.
"""
job_id = self.items[0]
result = self.GetClient().CancelJob(job_id)
return result
class R_2_jobs_id_wait(baserlib.ResourceBase):
"""/2/jobs/[job_id]/wait resource.
"""
# WaitForJobChange provides access to sensitive information and blocks
# machine resources (it's a blocking RAPI call), hence restricting access.
GET_ACCESS = [rapi.RAPI_ACCESS_WRITE]
def GET(self):
"""Waits for job changes.
"""
job_id = self.items[0]
fields = self.getBodyParameter("fields")
prev_job_info = self.getBodyParameter("previous_job_info", None)
prev_log_serial = self.getBodyParameter("previous_log_serial", None)
if not isinstance(fields, list):
raise http.HttpBadRequest("The 'fields' parameter should be a list")
if not (prev_job_info is None or isinstance(prev_job_info, list)):
raise http.HttpBadRequest("The 'previous_job_info' parameter should"
" be a list")
if not (prev_log_serial is None or
isinstance(prev_log_serial, (int, long))):
raise http.HttpBadRequest("The 'previous_log_serial' parameter should"
" be a number")
client = self.GetClient()
result = client.WaitForJobChangeOnce(job_id, fields,
prev_job_info, prev_log_serial,
timeout=_WFJC_TIMEOUT)
if not result:
raise http.HttpNotFound()
if result == constants.JOB_NOTCHANGED:
# No changes
return None
(job_info, log_entries) = result
return {
"job_info": job_info,
"log_entries": log_entries,
}
class R_2_nodes(baserlib.OpcodeResource):
"""/2/nodes resource.
"""
def GET(self):
"""Returns a list of all nodes.
"""
client = self.GetClient()
if self.useBulk():
bulkdata = client.QueryNodes([], N_FIELDS, False)
return baserlib.MapBulkFields(bulkdata, N_FIELDS)
else:
nodesdata = client.QueryNodes([], ["name"], False)
nodeslist = [row[0] for row in nodesdata]
return baserlib.BuildUriList(nodeslist, "/2/nodes/%s",
uri_fields=("id", "uri"))
class R_2_nodes_name(baserlib.OpcodeResource):
"""/2/nodes/[node_name] resource.
"""
GET_ALIASES = {
"sip": "secondary_ip",
}
def GET(self):
"""Send information about a node.
"""
node_name = self.items[0]
client = self.GetClient()
result = baserlib.HandleItemQueryErrors(client.QueryNodes,
names=[node_name], fields=N_FIELDS,
use_locking=self.useLocking())
return baserlib.MapFields(N_FIELDS, result[0])
class R_2_nodes_name_powercycle(baserlib.OpcodeResource):
"""/2/nodes/[node_name]/powercycle resource.
"""
POST_OPCODE = opcodes.OpNodePowercycle
def GetPostOpInput(self):
"""Tries to powercycle a node.
"""
return (self.request_body, {
"node_name": self.items[0],
"force": self.useForce(),
})
class R_2_nodes_name_role(baserlib.OpcodeResource):
"""/2/nodes/[node_name]/role resource.
"""
PUT_OPCODE = opcodes.OpNodeSetParams
def GET(self):
"""Returns the current node role.
@return: Node role
"""
node_name = self.items[0]
client = self.GetClient()
result = client.QueryNodes(names=[node_name], fields=["role"],
use_locking=self.useLocking())
return _NR_MAP[result[0][0]]
def GetPutOpInput(self):
"""Sets the node role.
"""
baserlib.CheckType(self.request_body, basestring, "Body contents")
role = self.request_body
if role == _NR_REGULAR:
candidate = False
offline = False
drained = False
elif role == _NR_MASTER_CANDIDATE:
candidate = True
offline = drained = None
elif role == _NR_DRAINED:
drained = True
candidate = offline = None
elif role == _NR_OFFLINE:
offline = True
candidate = drained = None
else:
raise http.HttpBadRequest("Can't set '%s' role" % role)
assert len(self.items) == 1
return ({}, {
"node_name": self.items[0],
"master_candidate": candidate,
"offline": offline,
"drained": drained,
"force": self.useForce(),
"auto_promote": bool(self._checkIntVariable("auto-promote", default=0)),
})
class R_2_nodes_name_evacuate(baserlib.OpcodeResource):
"""/2/nodes/[node_name]/evacuate resource.
"""
POST_OPCODE = opcodes.OpNodeEvacuate
def GetPostOpInput(self):
"""Evacuate all instances off a node.
"""
return (self.request_body, {
"node_name": self.items[0],
"dry_run": self.dryRun(),
})
class R_2_nodes_name_migrate(baserlib.OpcodeResource):
"""/2/nodes/[node_name]/migrate resource.
"""
POST_OPCODE = opcodes.OpNodeMigrate
def GetPostOpInput(self):
"""Migrate all primary instances from a node.
"""
if self.queryargs:
# Support old-style requests
if "live" in self.queryargs and "mode" in self.queryargs:
raise http.HttpBadRequest("Only one of 'live' and 'mode' should"
" be passed")
if "live" in self.queryargs:
if self._checkIntVariable("live", default=1):
mode = constants.HT_MIGRATION_LIVE
else:
mode = constants.HT_MIGRATION_NONLIVE
else:
mode = self._checkStringVariable("mode", default=None)
data = {
"mode": mode,
}
else:
data = self.request_body
return (data, {
"node_name": self.items[0],
})
class R_2_nodes_name_modify(baserlib.OpcodeResource):
"""/2/nodes/[node_name]/modify resource.
"""
POST_OPCODE = opcodes.OpNodeSetParams
def GetPostOpInput(self):
"""Changes parameters of a node.
"""
assert len(self.items) == 1
return (self.request_body, {
"node_name": self.items[0],
})
class R_2_nodes_name_storage(baserlib.OpcodeResource):
"""/2/nodes/[node_name]/storage resource.
"""
# LUNodeQueryStorage acquires locks, hence restricting access to GET
GET_ACCESS = [rapi.RAPI_ACCESS_WRITE]
GET_OPCODE = opcodes.OpNodeQueryStorage
def GetGetOpInput(self):
"""List storage available on a node.
"""
storage_type = self._checkStringVariable("storage_type", None)
output_fields = self._checkStringVariable("output_fields", None)
if not output_fields:
raise http.HttpBadRequest("Missing the required 'output_fields'"
" parameter")
return ({}, {
"nodes": [self.items[0]],
"storage_type": storage_type,
"output_fields": output_fields.split(","),
})
class R_2_nodes_name_storage_modify(baserlib.OpcodeResource):
"""/2/nodes/[node_name]/storage/modify resource.
"""
PUT_OPCODE = opcodes.OpNodeModifyStorage
def GetPutOpInput(self):
"""Modifies a storage volume on a node.
"""
storage_type = self._checkStringVariable("storage_type", None)
name = self._checkStringVariable("name", None)
if not name:
raise http.HttpBadRequest("Missing the required 'name'"
" parameter")
changes = {}
if "allocatable" in self.queryargs:
changes[constants.SF_ALLOCATABLE] = \
bool(self._checkIntVariable("allocatable", default=1))
return ({}, {
"node_name": self.items[0],
"storage_type": storage_type,
"name": name,
"changes": changes,
})
class R_2_nodes_name_storage_repair(baserlib.OpcodeResource):
"""/2/nodes/[node_name]/storage/repair resource.
"""
PUT_OPCODE = opcodes.OpRepairNodeStorage
def GetPutOpInput(self):
"""Repairs a storage volume on a node.
"""
storage_type = self._checkStringVariable("storage_type", None)
name = self._checkStringVariable("name", None)
if not name:
raise http.HttpBadRequest("Missing the required 'name'"
" parameter")
return ({}, {
"node_name": self.items[0],
"storage_type": storage_type,
"name": name,
})
class R_2_networks(baserlib.OpcodeResource):
"""/2/networks resource.
"""
POST_OPCODE = opcodes.OpNetworkAdd
POST_RENAME = {
"name": "network_name",
}
def GetPostOpInput(self):
"""Create a network.
"""
assert not self.items
return (self.request_body, {
"dry_run": self.dryRun(),
})
def GET(self):
"""Returns a list of all networks.
"""
client = self.GetClient()
if self.useBulk():
bulkdata = client.QueryNetworks([], NET_FIELDS, False)
return baserlib.MapBulkFields(bulkdata, NET_FIELDS)
else:
data = client.QueryNetworks([], ["name"], False)
networknames = [row[0] for row in data]
return baserlib.BuildUriList(networknames, "/2/networks/%s",
uri_fields=("name", "uri"))
class R_2_networks_name(baserlib.OpcodeResource):
"""/2/networks/[network_name] resource.
"""
DELETE_OPCODE = opcodes.OpNetworkRemove
def GET(self):
"""Send information about a network.
"""
network_name = self.items[0]
client = self.GetClient()
result = baserlib.HandleItemQueryErrors(client.QueryNetworks,
names=[network_name],
fields=NET_FIELDS,
use_locking=self.useLocking())
return baserlib.MapFields(NET_FIELDS, result[0])
def GetDeleteOpInput(self):
"""Delete a network.
"""
assert len(self.items) == 1
return (self.request_body, {
"network_name": self.items[0],
"dry_run": self.dryRun(),
})
class R_2_networks_name_connect(baserlib.OpcodeResource):
"""/2/networks/[network_name]/connect resource.
"""
PUT_OPCODE = opcodes.OpNetworkConnect
def GetPutOpInput(self):
"""Changes some parameters of node group.
"""
assert self.items
return (self.request_body, {
"network_name": self.items[0],
"dry_run": self.dryRun(),
})
class R_2_networks_name_disconnect(baserlib.OpcodeResource):
"""/2/networks/[network_name]/disconnect resource.
"""
PUT_OPCODE = opcodes.OpNetworkDisconnect
def GetPutOpInput(self):
"""Changes some parameters of node group.
"""
assert self.items
return (self.request_body, {
"network_name": self.items[0],
"dry_run": self.dryRun(),
})
class R_2_networks_name_modify(baserlib.OpcodeResource):
"""/2/networks/[network_name]/modify resource.
"""
PUT_OPCODE = opcodes.OpNetworkSetParams
def GetPutOpInput(self):
"""Changes some parameters of network.
"""
assert self.items
return (self.request_body, {
"network_name": self.items[0],
})
class R_2_groups(baserlib.OpcodeResource):
"""/2/groups resource.
"""
POST_OPCODE = opcodes.OpGroupAdd
POST_RENAME = {
"name": "group_name",
}
def GetPostOpInput(self):
"""Create a node group.
"""
assert not self.items
return (self.request_body, {
"dry_run": self.dryRun(),
})
def GET(self):
"""Returns a list of all node groups.
"""
client = self.GetClient()
if self.useBulk():
bulkdata = client.QueryGroups([], G_FIELDS, False)
return baserlib.MapBulkFields(bulkdata, G_FIELDS)
else:
data = client.QueryGroups([], ["name"], False)
groupnames = [row[0] for row in data]
return baserlib.BuildUriList(groupnames, "/2/groups/%s",
uri_fields=("name", "uri"))
class R_2_groups_name(baserlib.OpcodeResource):
"""/2/groups/[group_name] resource.
"""
DELETE_OPCODE = opcodes.OpGroupRemove
def GET(self):
"""Send information about a node group.
"""
group_name = self.items[0]
client = self.GetClient()
result = baserlib.HandleItemQueryErrors(client.QueryGroups,
names=[group_name], fields=G_FIELDS,
use_locking=self.useLocking())
return baserlib.MapFields(G_FIELDS, result[0])
def GetDeleteOpInput(self):
"""Delete a node group.
"""
assert len(self.items) == 1
return ({}, {
"group_name": self.items[0],
"dry_run": self.dryRun(),
})
class R_2_groups_name_modify(baserlib.OpcodeResource):
"""/2/groups/[group_name]/modify resource.
"""
PUT_OPCODE = opcodes.OpGroupSetParams
PUT_RENAME = {
"custom_ndparams": "ndparams",
"custom_ipolicy": "ipolicy",
"custom_diskparams": "diskparams",
}
def GetPutOpInput(self):
"""Changes some parameters of node group.
"""
assert self.items
return (self.request_body, {
"group_name": self.items[0],
})
class R_2_groups_name_rename(baserlib.OpcodeResource):
"""/2/groups/[group_name]/rename resource.
"""
PUT_OPCODE = opcodes.OpGroupRename
def GetPutOpInput(self):
"""Changes the name of a node group.
"""
assert len(self.items) == 1
return (self.request_body, {
"group_name": self.items[0],
"dry_run": self.dryRun(),
})
class R_2_groups_name_assign_nodes(baserlib.OpcodeResource):
"""/2/groups/[group_name]/assign-nodes resource.
"""
PUT_OPCODE = opcodes.OpGroupAssignNodes
def GetPutOpInput(self):
"""Assigns nodes to a group.
"""
assert len(self.items) == 1
return (self.request_body, {
"group_name": self.items[0],
"dry_run": self.dryRun(),
"force": self.useForce(),
})
def _ConvertUsbDevices(data):
"""Convert in place the usb_devices string to the proper format.
In Ganeti 2.8.4 the separator for the usb_devices hvparam was changed from
comma to space because commas cannot be accepted on the command line
(they already act as the separator between different hvparams). RAPI
should be able to accept commas for backwards compatibility, but we want
it to also accept the new space separator. Therefore, we convert
spaces into commas here and keep the old parsing logic elsewhere.
"""
try:
hvparams = data["hvparams"]
usb_devices = hvparams[constants.HV_USB_DEVICES]
hvparams[constants.HV_USB_DEVICES] = usb_devices.replace(" ", ",")
data["hvparams"] = hvparams
except KeyError:
#No usb_devices, no modification required
pass
class R_2_instances(baserlib.OpcodeResource):
"""/2/instances resource.
"""
POST_OPCODE = opcodes.OpInstanceCreate
POST_RENAME = {
"os": "os_type",
"name": "instance_name",
}
def GET(self):
"""Returns a list of all available instances.
"""
client = self.GetClient()
use_locking = self.useLocking()
if self.useBulk():
bulkdata = client.QueryInstances([], I_FIELDS, use_locking)
return map(_UpdateBeparams, baserlib.MapBulkFields(bulkdata, I_FIELDS))
else:
instancesdata = client.QueryInstances([], ["name"], use_locking)
instanceslist = [row[0] for row in instancesdata]
return baserlib.BuildUriList(instanceslist, "/2/instances/%s",
uri_fields=("id", "uri"))
def GetPostOpInput(self):
"""Create an instance.
@return: a job id
"""
baserlib.CheckType(self.request_body, dict, "Body contents")
# Default to request data version 0
data_version = self.getBodyParameter(_REQ_DATA_VERSION, 0)
if data_version == 0:
raise http.HttpBadRequest("Instance creation request version 0 is no"
" longer supported")
elif data_version != 1:
raise http.HttpBadRequest("Unsupported request data version %s" %
data_version)
data = self.request_body.copy()
# Remove "__version__"
data.pop(_REQ_DATA_VERSION, None)
_ConvertUsbDevices(data)
return (data, {
"dry_run": self.dryRun(),
})
class R_2_instances_multi_alloc(baserlib.OpcodeResource):
"""/2/instances-multi-alloc resource.
"""
POST_OPCODE = opcodes.OpInstanceMultiAlloc
def GetPostOpInput(self):
"""Try to allocate multiple instances.
@return: A dict with submitted jobs, allocatable instances and failed
allocations
"""
if "instances" not in self.request_body:
raise http.HttpBadRequest("Request is missing required 'instances' field"
" in body")
# Unlike most other RAPI calls, this one is composed of individual opcodes,
# and we have to do the filling ourselves
OPCODE_RENAME = {
"os": "os_type",
"name": "instance_name",
}
body = objects.FillDict(self.request_body, {
"instances": [
baserlib.FillOpcode(opcodes.OpInstanceCreate, inst, {},
rename=OPCODE_RENAME)
for inst in self.request_body["instances"]
],
})
return (body, {
"dry_run": self.dryRun(),
})
class R_2_instances_name(baserlib.OpcodeResource):
"""/2/instances/[instance_name] resource.
"""
DELETE_OPCODE = opcodes.OpInstanceRemove
def GET(self):
"""Send information about an instance.
"""
client = self.GetClient()
instance_name = self.items[0]
result = baserlib.HandleItemQueryErrors(client.QueryInstances,
names=[instance_name],
fields=I_FIELDS,
use_locking=self.useLocking())
return _UpdateBeparams(baserlib.MapFields(I_FIELDS, result[0]))
def GetDeleteOpInput(self):
"""Delete an instance.
"""
assert len(self.items) == 1
return (self.request_body, {
"instance_name": self.items[0],
"ignore_failures": False,
"dry_run": self.dryRun(),
})
class R_2_instances_name_info(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/info resource.
"""
GET_OPCODE = opcodes.OpInstanceQueryData
def GetGetOpInput(self):
"""Request detailed instance information.
"""
assert len(self.items) == 1
return ({}, {
"instances": [self.items[0]],
"static": bool(self._checkIntVariable("static", default=0)),
})
class R_2_instances_name_reboot(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/reboot resource.
Implements an instance reboot.
"""
POST_OPCODE = opcodes.OpInstanceReboot
def GetPostOpInput(self):
"""Reboot an instance.
The URI takes type=[hard|soft|full] and
ignore_secondaries=[False|True] parameters.
"""
return (self.request_body, {
"instance_name": self.items[0],
"reboot_type":
self.queryargs.get("type", [constants.INSTANCE_REBOOT_HARD])[0],
"ignore_secondaries": bool(self._checkIntVariable("ignore_secondaries")),
"dry_run": self.dryRun(),
})
class R_2_instances_name_startup(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/startup resource.
Implements an instance startup.
"""
PUT_OPCODE = opcodes.OpInstanceStartup
def GetPutOpInput(self):
"""Startup an instance.
The URI takes force=[False|True] parameter to start the instance
if even if secondary disks are failing.
"""
return ({}, {
"instance_name": self.items[0],
"force": self.useForce(),
"dry_run": self.dryRun(),
"no_remember": bool(self._checkIntVariable("no_remember")),
})
class R_2_instances_name_shutdown(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/shutdown resource.
Implements an instance shutdown.
"""
PUT_OPCODE = opcodes.OpInstanceShutdown
def GetPutOpInput(self):
"""Shutdown an instance.
"""
return (self.request_body, {
"instance_name": self.items[0],
"no_remember": bool(self._checkIntVariable("no_remember")),
"dry_run": self.dryRun(),
})
def _ParseInstanceReinstallRequest(name, data):
"""Parses a request for reinstalling an instance.
"""
if not isinstance(data, dict):
raise http.HttpBadRequest("Invalid body contents, not a dictionary")
ostype = baserlib.CheckParameter(data, "os", default=None)
start = baserlib.CheckParameter(data, "start", exptype=bool,
default=True)
osparams = baserlib.CheckParameter(data, "osparams", default=None)
ops = [
opcodes.OpInstanceShutdown(instance_name=name),
opcodes.OpInstanceReinstall(instance_name=name, os_type=ostype,
osparams=osparams),
]
if start:
ops.append(opcodes.OpInstanceStartup(instance_name=name, force=False))
return ops
class R_2_instances_name_reinstall(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/reinstall resource.
Implements an instance reinstall.
"""
POST_OPCODE = opcodes.OpInstanceReinstall
def POST(self):
"""Reinstall an instance.
The URI takes os=name and nostartup=[0|1] optional
parameters. By default, the instance will be started
automatically.
"""
if self.request_body:
if self.queryargs:
raise http.HttpBadRequest("Can't combine query and body parameters")
body = self.request_body
elif self.queryargs:
# Legacy interface, do not modify/extend
body = {
"os": self._checkStringVariable("os"),
"start": not self._checkIntVariable("nostartup"),
}
else:
body = {}
ops = _ParseInstanceReinstallRequest(self.items[0], body)
return self.SubmitJob(ops)
class R_2_instances_name_replace_disks(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/replace-disks resource.
"""
POST_OPCODE = opcodes.OpInstanceReplaceDisks
def GetPostOpInput(self):
"""Replaces disks on an instance.
"""
static = {
"instance_name": self.items[0],
}
if self.request_body:
data = self.request_body
elif self.queryargs:
# Legacy interface, do not modify/extend
data = {
"remote_node": self._checkStringVariable("remote_node", default=None),
"mode": self._checkStringVariable("mode", default=None),
"disks": self._checkStringVariable("disks", default=None),
"iallocator": self._checkStringVariable("iallocator", default=None),
}
else:
data = {}
# Parse disks
try:
raw_disks = data.pop("disks")
except KeyError:
pass
else:
if raw_disks:
if ht.TListOf(ht.TInt)(raw_disks): # pylint: disable=E1102
data["disks"] = raw_disks
else:
# Backwards compatibility for strings of the format "1, 2, 3"
try:
data["disks"] = [int(part) for part in raw_disks.split(",")]
except (TypeError, ValueError), err:
raise http.HttpBadRequest("Invalid disk index passed: %s" % err)
return (data, static)
class R_2_instances_name_activate_disks(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/activate-disks resource.
"""
PUT_OPCODE = opcodes.OpInstanceActivateDisks
def GetPutOpInput(self):
"""Activate disks for an instance.
The URI might contain ignore_size to ignore current recorded size.
"""
return ({}, {
"instance_name": self.items[0],
"ignore_size": bool(self._checkIntVariable("ignore_size")),
})
class R_2_instances_name_deactivate_disks(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/deactivate-disks resource.
"""
PUT_OPCODE = opcodes.OpInstanceDeactivateDisks
def GetPutOpInput(self):
"""Deactivate disks for an instance.
"""
return ({}, {
"instance_name": self.items[0],
})
class R_2_instances_name_recreate_disks(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/recreate-disks resource.
"""
POST_OPCODE = opcodes.OpInstanceRecreateDisks
def GetPostOpInput(self):
"""Recreate disks for an instance.
"""
return ({}, {
"instance_name": self.items[0],
})
class R_2_instances_name_prepare_export(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/prepare-export resource.
"""
PUT_OPCODE = opcodes.OpBackupPrepare
def GetPutOpInput(self):
"""Prepares an export for an instance.
"""
return ({}, {
"instance_name": self.items[0],
"mode": self._checkStringVariable("mode"),
})
class R_2_instances_name_export(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/export resource.
"""
PUT_OPCODE = opcodes.OpBackupExport
PUT_RENAME = {
"destination": "target_node",
}
def GetPutOpInput(self):
"""Exports an instance.
"""
return (self.request_body, {
"instance_name": self.items[0],
})
class R_2_instances_name_migrate(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/migrate resource.
"""
PUT_OPCODE = opcodes.OpInstanceMigrate
def GetPutOpInput(self):
"""Migrates an instance.
"""
return (self.request_body, {
"instance_name": self.items[0],
})
class R_2_instances_name_failover(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/failover resource.
"""
PUT_OPCODE = opcodes.OpInstanceFailover
def GetPutOpInput(self):
"""Does a failover of an instance.
"""
return (self.request_body, {
"instance_name": self.items[0],
})
class R_2_instances_name_rename(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/rename resource.
"""
PUT_OPCODE = opcodes.OpInstanceRename
def GetPutOpInput(self):
"""Changes the name of an instance.
"""
return (self.request_body, {
"instance_name": self.items[0],
})
class R_2_instances_name_modify(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/modify resource.
"""
PUT_OPCODE = opcodes.OpInstanceSetParams
PUT_RENAME = {
"custom_beparams": "beparams",
"custom_hvparams": "hvparams",
}
def GetPutOpInput(self):
"""Changes parameters of an instance.
"""
data = self.request_body.copy()
_ConvertUsbDevices(data)
return (data, {
"instance_name": self.items[0],
})
class R_2_instances_name_disk_grow(baserlib.OpcodeResource):
"""/2/instances/[instance_name]/disk/[disk_index]/grow resource.
"""
POST_OPCODE = opcodes.OpInstanceGrowDisk
def GetPostOpInput(self):
"""Increases the size of an instance disk.
"""
return (self.request_body, {
"instance_name": self.items[0],
"disk": int(self.items[1]),
})
class R_2_instances_name_console(baserlib.ResourceBase):
"""/2/instances/[instance_name]/console resource.
"""
GET_ACCESS = [rapi.RAPI_ACCESS_WRITE, rapi.RAPI_ACCESS_READ]
GET_OPCODE = opcodes.OpInstanceConsole
def GET(self):
"""Request information for connecting to instance's console.
@return: Serialized instance console description, see
L{objects.InstanceConsole}
"""
instance_name = self.items[0]
client = self.GetClient()
((console, oper_state), ) = \
client.QueryInstances([instance_name], ["console", "oper_state"], False)
if not oper_state:
raise http.HttpServiceUnavailable("Instance console unavailable")
assert isinstance(console, dict)
return console
def _GetQueryFields(args):
"""Tries to extract C{fields} query parameter.
@type args: dictionary
@rtype: list of string
@raise http.HttpBadRequest: When parameter can't be found
"""
try:
fields = args["fields"]
except KeyError:
raise http.HttpBadRequest("Missing 'fields' query argument")
return _SplitQueryFields(fields[0])
def _SplitQueryFields(fields):
"""Splits fields as given for a query request.
@type fields: string
@rtype: list of string
"""
return [i.strip() for i in fields.split(",")]
class R_2_query(baserlib.ResourceBase):
"""/2/query/[resource] resource.
"""
# Results might contain sensitive information
GET_ACCESS = [rapi.RAPI_ACCESS_WRITE, rapi.RAPI_ACCESS_READ]
PUT_ACCESS = GET_ACCESS
GET_OPCODE = opcodes.OpQuery
PUT_OPCODE = opcodes.OpQuery
def _Query(self, fields, qfilter):
client = self.GetClient()
return client.Query(self.items[0], fields, qfilter).ToDict()
def GET(self):
"""Returns resource information.
@return: Query result, see L{objects.QueryResponse}
"""
return self._Query(_GetQueryFields(self.queryargs), None)
def PUT(self):
"""Submits job querying for resources.
@return: Query result, see L{objects.QueryResponse}
"""
body = self.request_body
baserlib.CheckType(body, dict, "Body contents")
try:
fields = body["fields"]
except KeyError:
fields = _GetQueryFields(self.queryargs)
qfilter = body.get("qfilter", None)
# TODO: remove this after 2.7
if qfilter is None:
qfilter = body.get("filter", None)
return self._Query(fields, qfilter)
class R_2_query_fields(baserlib.ResourceBase):
"""/2/query/[resource]/fields resource.
"""
GET_OPCODE = opcodes.OpQueryFields
def GET(self):
"""Retrieves list of available fields for a resource.
@return: List of serialized L{objects.QueryFieldDefinition}
"""
try:
raw_fields = self.queryargs["fields"]
except KeyError:
fields = None
else:
fields = _SplitQueryFields(raw_fields[0])
return self.GetClient().QueryFields(self.items[0], fields).ToDict()
class _R_Tags(baserlib.OpcodeResource):
"""Quasiclass for tagging resources.
Manages tags. When inheriting this class you must define the
TAG_LEVEL for it.
"""
TAG_LEVEL = None
GET_OPCODE = opcodes.OpTagsGet
PUT_OPCODE = opcodes.OpTagsSet
DELETE_OPCODE = opcodes.OpTagsDel
def __init__(self, items, queryargs, req, **kwargs):
"""A tag resource constructor.
We have to override the default to sort out cluster naming case.
"""
baserlib.OpcodeResource.__init__(self, items, queryargs, req, **kwargs)
if self.TAG_LEVEL == constants.TAG_CLUSTER:
self.name = None
else:
self.name = items[0]
def GET(self):
"""Returns a list of tags.
Example: ["tag1", "tag2", "tag3"]
"""
kind = self.TAG_LEVEL
if kind in (constants.TAG_INSTANCE,
constants.TAG_NODEGROUP,
constants.TAG_NODE,
constants.TAG_NETWORK):
if not self.name:
raise http.HttpBadRequest("Missing name on tag request")
cl = self.GetClient()
tags = list(cl.QueryTags(kind, self.name))
elif kind == constants.TAG_CLUSTER:
assert not self.name
# TODO: Use query API?
ssc = ssconf.SimpleStore()
tags = ssc.GetClusterTags()
else:
raise http.HttpBadRequest("Unhandled tag type!")
return list(tags)
def GetPutOpInput(self):
"""Add a set of tags.
The request as a list of strings should be PUT to this URI. And
you'll have back a job id.
"""
return ({}, {
"kind": self.TAG_LEVEL,
"name": self.name,
"tags": self.queryargs.get("tag", []),
"dry_run": self.dryRun(),
})
def GetDeleteOpInput(self):
"""Delete a tag.
In order to delete a set of tags, the DELETE
request should be addressed to URI like:
/tags?tag=[tag]&tag=[tag]
"""
# Re-use code
return self.GetPutOpInput()
class R_2_instances_name_tags(_R_Tags):
""" /2/instances/[instance_name]/tags resource.
Manages per-instance tags.
"""
TAG_LEVEL = constants.TAG_INSTANCE
class R_2_nodes_name_tags(_R_Tags):
""" /2/nodes/[node_name]/tags resource.
Manages per-node tags.
"""
TAG_LEVEL = constants.TAG_NODE
class R_2_groups_name_tags(_R_Tags):
""" /2/groups/[group_name]/tags resource.
Manages per-nodegroup tags.
"""
TAG_LEVEL = constants.TAG_NODEGROUP
class R_2_networks_name_tags(_R_Tags):
""" /2/networks/[network_name]/tags resource.
Manages per-network tags.
"""
TAG_LEVEL = constants.TAG_NETWORK
class R_2_tags(_R_Tags):
""" /2/tags resource.
Manages cluster tags.
"""
TAG_LEVEL = constants.TAG_CLUSTER
| kawamuray/ganeti | lib/rapi/rlib2.py | Python | gpl-2.0 | 41,238 |
# -*- coding: utf-8 -*-
"""The initialization file for the Pywikibot framework."""
#
# (C) Pywikibot team, 2008-2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__release__ = '2.0rc4'
__version__ = '$Id: e26392a530582f286edf2d99e729218b2e93405e $'
import datetime
import math
import re
import sys
import threading
import json
if sys.version_info[0] > 2:
from queue import Queue
long = int
else:
from Queue import Queue
from warnings import warn
# Use pywikibot. prefix for all in-package imports; this is to prevent
# confusion with similarly-named modules in version 1 framework, for users
# who want to continue using both
from pywikibot import config2 as config
from pywikibot.bot import (
output, warning, error, critical, debug, stdout, exception,
input, input_choice, input_yn, inputChoice, handle_args, showHelp, ui, log,
calledModuleName, Bot, CurrentPageBot, WikidataBot, QuitKeyboardInterrupt,
# the following are flagged as deprecated on usage
handleArgs,
)
from pywikibot.exceptions import (
Error, InvalidTitle, BadTitle, NoPage, SectionError,
SiteDefinitionError, NoSuchSite, UnknownSite, UnknownFamily,
UnknownExtension,
NoUsername, UserBlocked,
PageRelatedError, IsRedirectPage, IsNotRedirectPage,
PageSaveRelatedError, PageNotSaved, OtherPageSaveError,
LockedPage, CascadeLockedPage, LockedNoPage, NoCreateError,
EditConflict, PageDeletedConflict, PageCreatedConflict,
ServerError, FatalServerError, Server504Error,
CaptchaError, SpamfilterError, CircularRedirect, InterwikiRedirectPage,
WikiBaseError, CoordinateGlobeUnknownException,
)
from pywikibot.tools import UnicodeMixin, redirect_func
from pywikibot.i18n import translate
from pywikibot.data.api import UploadWarning
from pywikibot.diff import PatchManager
import pywikibot.textlib as textlib
import pywikibot.tools
textlib_methods = (
'unescape', 'replaceExcept', 'removeDisabledParts', 'removeHTMLParts',
'isDisabled', 'interwikiFormat', 'interwikiSort',
'getLanguageLinks', 'replaceLanguageLinks',
'removeLanguageLinks', 'removeLanguageLinksAndSeparator',
'getCategoryLinks', 'categoryFormat', 'replaceCategoryLinks',
'removeCategoryLinks', 'removeCategoryLinksAndSeparator',
'replaceCategoryInPlace', 'compileLinkR', 'extract_templates_and_params',
'TimeStripper',
)
# pep257 doesn't understand when the first entry is on the next line
__all__ = ('config', 'ui', 'UnicodeMixin', 'translate',
'Page', 'FilePage', 'Category', 'Link', 'User',
'ItemPage', 'PropertyPage', 'Claim',
'html2unicode', 'url2unicode', 'unicode2html',
'stdout', 'output', 'warning', 'error', 'critical', 'debug',
'exception', 'input_choice', 'input', 'input_yn', 'inputChoice',
'handle_args', 'handleArgs', 'showHelp', 'ui', 'log',
'calledModuleName', 'Bot', 'CurrentPageBot', 'WikidataBot',
'Error', 'InvalidTitle', 'BadTitle', 'NoPage', 'SectionError',
'SiteDefinitionError', 'NoSuchSite', 'UnknownSite', 'UnknownFamily',
'UnknownExtension',
'NoUsername', 'UserBlocked', 'UserActionRefuse',
'PageRelatedError', 'IsRedirectPage', 'IsNotRedirectPage',
'PageSaveRelatedError', 'PageNotSaved', 'OtherPageSaveError',
'LockedPage', 'CascadeLockedPage', 'LockedNoPage', 'NoCreateError',
'EditConflict', 'PageDeletedConflict', 'PageCreatedConflict',
'UploadWarning',
'ServerError', 'FatalServerError', 'Server504Error',
'CaptchaError', 'SpamfilterError', 'CircularRedirect',
'InterwikiRedirectPage',
'WikiBaseError', 'CoordinateGlobeUnknownException',
'QuitKeyboardInterrupt',
)
# flake8 is unable to detect concatenation in the same operation
# like:
# ) + textlib_methods
# pep257 also doesn't support __all__ multiple times in a document
# so instead use this trick
globals()['__all__'] = globals()['__all__'] + textlib_methods
if sys.version_info[0] == 2:
# T111615: Python 2 requires __all__ is bytes
globals()['__all__'] = tuple(bytes(item) for item in __all__)
for _name in textlib_methods:
target = getattr(textlib, _name)
wrapped_func = redirect_func(target)
globals()[_name] = wrapped_func
deprecated = redirect_func(pywikibot.tools.deprecated)
deprecate_arg = redirect_func(pywikibot.tools.deprecate_arg)
class Timestamp(datetime.datetime):
"""Class for handling MediaWiki timestamps.
This inherits from datetime.datetime, so it can use all of the methods
and operations of a datetime object. To ensure that the results of any
operation are also a Timestamp object, be sure to use only Timestamp
objects (and datetime.timedeltas) in any operation.
Use Timestamp.fromISOformat() and Timestamp.fromtimestampformat() to
create Timestamp objects from MediaWiki string formats.
As these constructors are typically used to create objects using data
passed provided by site and page methods, some of which return a Timestamp
when previously they returned a MediaWiki string representation, these
methods also accept a Timestamp object, in which case they return a clone.
Use Site.getcurrenttime() for the current time; this is more reliable
than using Timestamp.utcnow().
"""
mediawikiTSFormat = "%Y%m%d%H%M%S"
ISO8601Format = "%Y-%m-%dT%H:%M:%SZ"
def clone(self):
"""Clone this instance."""
return self.replace(microsecond=self.microsecond)
@classmethod
def fromISOformat(cls, ts):
"""Convert an ISO 8601 timestamp to a Timestamp object."""
# If inadvertantly passed a Timestamp object, use replace()
# to create a clone.
if isinstance(ts, cls):
return ts.clone()
return cls.strptime(ts, cls.ISO8601Format)
@classmethod
def fromtimestampformat(cls, ts):
"""Convert a MediaWiki internal timestamp to a Timestamp object."""
# If inadvertantly passed a Timestamp object, use replace()
# to create a clone.
if isinstance(ts, cls):
return ts.clone()
return cls.strptime(ts, cls.mediawikiTSFormat)
def isoformat(self):
"""
Convert object to an ISO 8601 timestamp accepted by MediaWiki.
datetime.datetime.isoformat does not postfix the ISO formatted date
with a 'Z' unless a timezone is included, which causes MediaWiki
~1.19 and earlier to fail.
"""
return self.strftime(self.ISO8601Format)
toISOformat = redirect_func(isoformat, old_name='toISOformat',
class_name='Timestamp')
def totimestampformat(self):
"""Convert object to a MediaWiki internal timestamp."""
return self.strftime(self.mediawikiTSFormat)
def __str__(self):
"""Return a string format recognized by the API."""
return self.isoformat()
def __add__(self, other):
"""Perform addition, returning a Timestamp instead of datetime."""
newdt = super(Timestamp, self).__add__(other)
if isinstance(newdt, datetime.datetime):
return Timestamp(newdt.year, newdt.month, newdt.day, newdt.hour,
newdt.minute, newdt.second, newdt.microsecond,
newdt.tzinfo)
else:
return newdt
def __sub__(self, other):
"""Perform substraction, returning a Timestamp instead of datetime."""
newdt = super(Timestamp, self).__sub__(other)
if isinstance(newdt, datetime.datetime):
return Timestamp(newdt.year, newdt.month, newdt.day, newdt.hour,
newdt.minute, newdt.second, newdt.microsecond,
newdt.tzinfo)
else:
return newdt
class Coordinate(object):
"""
Class for handling and storing Coordinates.
For now its just being used for DataSite, but
in the future we can use it for the GeoData extension.
"""
def __init__(self, lat, lon, alt=None, precision=None, globe='earth',
typ="", name="", dim=None, site=None, entity=''):
"""
Represent a geo coordinate.
@param lat: Latitude
@type lat: float
@param lon: Longitude
@type lon: float
@param alt: Altitute? TODO FIXME
@param precision: precision
@type precision: float
@param globe: Which globe the point is on
@type globe: str
@param typ: The type of coordinate point
@type typ: str
@param name: The name
@type name: str
@param dim: Dimension (in meters)
@type dim: int
@param entity: The URL entity of a Wikibase item
@type entity: str
"""
self.lat = lat
self.lon = lon
self.alt = alt
self._precision = precision
if globe:
globe = globe.lower()
self.globe = globe
self._entity = entity
self.type = typ
self.name = name
self._dim = dim
if not site:
self.site = Site().data_repository()
else:
self.site = site
def __repr__(self):
string = 'Coordinate(%s, %s' % (self.lat, self.lon)
if self.globe != 'earth':
string += ', globe="%s"' % self.globe
string += ')'
return string
@property
def entity(self):
if self._entity:
return self._entity
return self.site.globes()[self.globe]
def toWikibase(self):
"""
Export the data to a JSON object for the Wikibase API.
FIXME: Should this be in the DataSite object?
"""
if self.globe not in self.site.globes():
raise CoordinateGlobeUnknownException(
u"%s is not supported in Wikibase yet."
% self.globe)
return {'latitude': self.lat,
'longitude': self.lon,
'altitude': self.alt,
'globe': self.entity,
'precision': self.precision,
}
@classmethod
def fromWikibase(cls, data, site):
"""Constructor to create an object from Wikibase's JSON output."""
globes = {}
for k in site.globes():
globes[site.globes()[k]] = k
globekey = data['globe']
if globekey:
globe = globes.get(data['globe'])
else:
# Default to earth or should we use None here?
globe = 'earth'
return cls(data['latitude'], data['longitude'],
data['altitude'], data['precision'],
globe, site=site, entity=data['globe'])
@property
def precision(self):
u"""
Return the precision of the geo coordinate.
The biggest error (in degrees) will be given by the longitudinal error;
the same error in meters becomes larger (in degrees) further up north.
We can thus ignore the latitudinal error.
The longitudinal can be derived as follows:
In small angle approximation (and thus in radians):
M{Δλ ≈ Δpos / r_φ}, where r_φ is the radius of earth at the given latitude.
Δλ is the error in longitude.
M{r_φ = r cos φ}, where r is the radius of earth, φ the latitude
Therefore::
precision = math.degrees(self._dim/(radius*math.cos(math.radians(self.lat))))
"""
if not self._precision:
radius = 6378137 # TODO: Support other globes
self._precision = math.degrees(
self._dim / (radius * math.cos(math.radians(self.lat))))
return self._precision
def precisionToDim(self):
"""Convert precision from Wikibase to GeoData's dim."""
raise NotImplementedError
class WbTime(object):
"""A Wikibase time representation."""
PRECISION = {'1000000000': 0,
'100000000': 1,
'10000000': 2,
'1000000': 3,
'100000': 4,
'10000': 5,
'millenia': 6,
'century': 7,
'decade': 8,
'year': 9,
'month': 10,
'day': 11,
'hour': 12,
'minute': 13,
'second': 14
}
FORMATSTR = '{0:+012d}-{1:02d}-{2:02d}T{3:02d}:{4:02d}:{5:02d}Z'
def __init__(self, year=None, month=None, day=None,
hour=None, minute=None, second=None,
precision=None, before=0, after=0,
timezone=0, calendarmodel=None, site=None):
"""
Create a new WbTime object.
The precision can be set by the Wikibase int value (0-14) or by a human
readable string, e.g., 'hour'. If no precision is given, it is set
according to the given time units.
"""
if year is None:
raise ValueError('no year given')
self.precision = self.PRECISION['second']
if second is None:
self.precision = self.PRECISION['minute']
second = 0
if minute is None:
self.precision = self.PRECISION['hour']
minute = 0
if hour is None:
self.precision = self.PRECISION['day']
hour = 0
if day is None:
self.precision = self.PRECISION['month']
day = 1
if month is None:
self.precision = self.PRECISION['year']
month = 1
self.year = long(year)
self.month = month
self.day = day
self.hour = hour
self.minute = minute
self.second = second
self.after = after
self.before = before
self.timezone = timezone
if calendarmodel is None:
if site is None:
site = Site().data_repository()
calendarmodel = site.calendarmodel()
self.calendarmodel = calendarmodel
# if precision is given it overwrites the autodetection above
if precision is not None:
if (isinstance(precision, int) and
precision in self.PRECISION.values()):
self.precision = precision
elif precision in self.PRECISION:
self.precision = self.PRECISION[precision]
else:
raise ValueError('Invalid precision: "%s"' % precision)
@classmethod
def fromTimestr(cls, datetimestr, precision=14, before=0, after=0,
timezone=0, calendarmodel=None, site=None):
match = re.match(r'([-+]?\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)Z',
datetimestr)
if not match:
raise ValueError(u"Invalid format: '%s'" % datetimestr)
t = match.groups()
return cls(long(t[0]), int(t[1]), int(t[2]),
int(t[3]), int(t[4]), int(t[5]),
precision, before, after, timezone, calendarmodel, site)
def toTimestr(self):
"""
Convert the data to a UTC date/time string.
@return: str
"""
return self.FORMATSTR.format(self.year, self.month, self.day,
self.hour, self.minute, self.second)
def toWikibase(self):
"""
Convert the data to a JSON object for the Wikibase API.
@return: dict
"""
json = {'time': self.toTimestr(),
'precision': self.precision,
'after': self.after,
'before': self.before,
'timezone': self.timezone,
'calendarmodel': self.calendarmodel
}
return json
@classmethod
def fromWikibase(cls, ts):
return cls.fromTimestr(ts[u'time'], ts[u'precision'],
ts[u'before'], ts[u'after'],
ts[u'timezone'], ts[u'calendarmodel'])
def __str__(self):
return json.dumps(self.toWikibase(), indent=4, sort_keys=True,
separators=(',', ': '))
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return u"WbTime(year=%(year)d, month=%(month)d, day=%(day)d, " \
u"hour=%(hour)d, minute=%(minute)d, second=%(second)d, " \
u"precision=%(precision)d, before=%(before)d, after=%(after)d, " \
u"timezone=%(timezone)d, calendarmodel='%(calendarmodel)s')" \
% self.__dict__
class WbQuantity(object):
"""A Wikibase quantity representation."""
def __init__(self, amount, unit=None, error=None):
u"""
Create a new WbQuantity object.
@param amount: number representing this quantity
@type amount: float
@param unit: not used (only unit-less quantities are supported)
@param error: the uncertainty of the amount (e.g. ±1)
@type error: float, or tuple of two floats, where the first value is
the upper error and the second is the lower error value.
"""
if amount is None:
raise ValueError('no amount given')
if unit is None:
unit = '1'
self.amount = amount
self.unit = unit
upperError = lowerError = 0
if isinstance(error, tuple):
upperError, lowerError = error
elif error is not None:
upperError = lowerError = error
self.upperBound = self.amount + upperError
self.lowerBound = self.amount - lowerError
def toWikibase(self):
"""Convert the data to a JSON object for the Wikibase API."""
json = {'amount': self.amount,
'upperBound': self.upperBound,
'lowerBound': self.lowerBound,
'unit': self.unit
}
return json
@classmethod
def fromWikibase(cls, wb):
"""
Create a WbQuanity from the JSON data given by the Wikibase API.
@param wb: Wikibase JSON
"""
amount = eval(wb['amount'])
upperBound = eval(wb['upperBound'])
lowerBound = eval(wb['lowerBound'])
error = (upperBound - amount, amount - lowerBound)
return cls(amount, wb['unit'], error)
def __str__(self):
return json.dumps(self.toWikibase(), indent=4, sort_keys=True,
separators=(',', ': '))
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return (u"WbQuantity(amount=%(amount)s, upperBound=%(upperBound)s, "
u"lowerBound=%(lowerBound)s, unit=%(unit)s)" % self.__dict__)
_sites = {}
_url_cache = {} # The code/fam pair for each URL
def Site(code=None, fam=None, user=None, sysop=None, interface=None, url=None):
"""A factory method to obtain a Site object.
Site objects are cached and reused by this method.
By default rely on config settings. These defaults may all be overridden
using the method parameters.
@param code: language code (override config.mylang)
@type code: string
@param fam: family name or object (override config.family)
@type fam: string or Family
@param user: bot user name to use on this site (override config.usernames)
@type user: unicode
@param sysop: sysop user to use on this site (override config.sysopnames)
@type sysop: unicode
@param interface: site class or name of class in pywikibot.site
(override config.site_interface)
@type interface: subclass of L{pywikibot.site.BaseSite} or string
@param url: Instead of code and fam, does try to get a Site based on the
URL. Still requires that the family supporting that URL exists.
@type url: string
"""
# Either code and fam or only url
assert(not url or (not code and not fam))
_logger = "wiki"
if url:
if url in _url_cache:
cached = _url_cache[url]
if cached:
code = cached[0]
fam = cached[1]
else:
raise SiteDefinitionError("Unknown URL '{0}'.".format(url))
else:
# Iterate through all families and look, which does apply to
# the given URL
for fam in config.family_files:
try:
family = pywikibot.family.Family.load(fam)
code = family.from_url(url)
if code:
_url_cache[url] = (code, fam)
break
except Exception as e:
pywikibot.warning('Error in Family(%s).from_url: %s'
% (fam, e))
else:
_url_cache[url] = None
# TODO: As soon as AutoFamily is ready, try and use an
# AutoFamily
raise SiteDefinitionError("Unknown URL '{0}'.".format(url))
else:
# Fallback to config defaults
code = code or config.mylang
fam = fam or config.family
interface = interface or config.site_interface
# config.usernames is initialised with a dict for each family name
family_name = str(fam)
if family_name in config.usernames:
user = user or config.usernames[family_name].get(code) \
or config.usernames[family_name].get('*')
sysop = sysop or config.sysopnames[family_name].get(code) \
or config.sysopnames[family_name].get('*')
if not isinstance(interface, type):
# If it isnt a class, assume it is a string
try:
tmp = __import__('pywikibot.site', fromlist=[interface])
interface = getattr(tmp, interface)
except ImportError:
raise ValueError("Invalid interface name '%(interface)s'" % locals())
if not issubclass(interface, pywikibot.site.BaseSite):
warning('Site called with interface=%s' % interface.__name__)
user = pywikibot.tools.normalize_username(user)
key = '%s:%s:%s:%s' % (interface.__name__, fam, code, user)
if key not in _sites or not isinstance(_sites[key], interface):
_sites[key] = interface(code=code, fam=fam, user=user, sysop=sysop)
debug(u"Instantiated %s object '%s'"
% (interface.__name__, _sites[key]), _logger)
if _sites[key].code != code:
warn('Site %s instantiated using different code "%s"'
% (_sites[key], code), UserWarning, 2)
return _sites[key]
# alias for backwards-compability
getSite = pywikibot.tools.redirect_func(Site, old_name='getSite')
from .page import (
Page,
FilePage,
Category,
Link,
User,
ItemPage,
PropertyPage,
Claim,
)
from .page import html2unicode, url2unicode, unicode2html
link_regex = re.compile(r'\[\[(?P<title>[^\]|[<>{}]*)(\|.*?)?\]\]')
@pywikibot.tools.deprecated("comment parameter for page saving method")
def setAction(s):
"""Set a summary to use for changed page submissions."""
config.default_edit_summary = s
def showDiff(oldtext, newtext, context=0):
"""
Output a string showing the differences between oldtext and newtext.
The differences are highlighted (only on compatible systems) to show which
changes were made.
"""
PatchManager(oldtext, newtext, context=context).print_hunks()
# Throttle and thread handling
stopped = False
def stopme():
"""Drop this process from the throttle log, after pending threads finish.
Can be called manually if desired, but if not, will be called automatically
at Python exit.
"""
global stopped
_logger = "wiki"
if not stopped:
debug(u"stopme() called", _logger)
def remaining():
remainingPages = page_put_queue.qsize() - 1
# -1 because we added a None element to stop the queue
remainingSeconds = datetime.timedelta(
seconds=(remainingPages * config.put_throttle))
return (remainingPages, remainingSeconds)
page_put_queue.put((None, [], {}))
stopped = True
if page_put_queue.qsize() > 1:
num, sec = remaining()
format_values = dict(num=num, sec=sec)
output(u'\03{lightblue}'
u'Waiting for %(num)i pages to be put. '
u'Estimated time remaining: %(sec)s'
u'\03{default}' % format_values)
while(_putthread.isAlive()):
try:
_putthread.join(1)
except KeyboardInterrupt:
if input_yn('There are %i pages remaining in the queue. '
'Estimated time remaining: %s\nReally exit?'
% remaining(), default=False, automatic_quit=False):
return
# only need one drop() call because all throttles use the same global pid
try:
list(_sites.values())[0].throttle.drop()
log(u"Dropped throttle(s).")
except IndexError:
pass
import atexit
atexit.register(stopme)
# Create a separate thread for asynchronous page saves (and other requests)
def async_manager():
"""Daemon; take requests from the queue and execute them in background."""
while True:
(request, args, kwargs) = page_put_queue.get()
if request is None:
break
request(*args, **kwargs)
page_put_queue.task_done()
def async_request(request, *args, **kwargs):
"""Put a request on the queue, and start the daemon if necessary."""
if not _putthread.isAlive():
try:
page_put_queue.mutex.acquire()
try:
_putthread.start()
except (AssertionError, RuntimeError):
pass
finally:
page_put_queue.mutex.release()
page_put_queue.put((request, args, kwargs))
# queue to hold pending requests
page_put_queue = Queue(config.max_queue_size)
# set up the background thread
_putthread = threading.Thread(target=async_manager)
# identification for debugging purposes
_putthread.setName('Put-Thread')
_putthread.setDaemon(True)
wrapper = pywikibot.tools.ModuleDeprecationWrapper(__name__)
wrapper._add_deprecated_attr('ImagePage', FilePage)
wrapper._add_deprecated_attr(
'PageNotFound', pywikibot.exceptions.DeprecatedPageNotFoundError,
warning_message=('{0}.{1} is deprecated, and no longer '
'used by pywikibot; use http.fetch() instead.'))
wrapper._add_deprecated_attr(
'UserActionRefuse', pywikibot.exceptions._EmailUserError,
warning_message='UserActionRefuse is deprecated; '
'use UserRightsError and/or NotEmailableError')
| hperala/kontuwikibot | pywikibot/__init__.py | Python | mit | 26,829 |
"""
Copyright 2016 Peter Beard
Distributed under the GNU GPL v2. For full terms, see the LICENSE file.
Problem #14
The following iterative sequence is defined for the set of positive integers:
n → n/2 (n is even)
n → 3n + 1 (n is odd)
Using the rule above and starting with 13, we generate the following sequence:
13 → 40 → 20 → 10 → 5 → 16 → 8 → 4 → 2 → 1
It can be seen that this sequence (starting at 13 and finishing at 1)
contains 10 terms. Although it has not been proved yet (Collatz Problem), it
is thought that all starting numbers finish at 1.
Which starting number, under one million, produces the longest chain?
NOTE: Once the chain starts the terms are allowed to go above one million.
"""
def collatz_length(n, memo=None):
"""
Find the length of the Collatz sequence starting with n.
Optional memo parameter is a dictionary of {n: collatz_length(n)}
"""
length = 1
while n > 1:
if memo is not None and n in memo:
return length+memo[n]-1
if n % 2 == 0:
n = n/2
else:
n = 3*n + 1
length += 1
return length
def solution():
memo = {}
max_length = 0
max_n = 1
for n in range(1, 1000000):
length = collatz_length(n, memo)
memo[n] = length
if length > max_length:
max_length = length
max_n = n
return max_n
print("The longest Collatz sequence starts with {}".format(solution()))
| PeterBeard/project-euler | python/problem-014.py | Python | gpl-2.0 | 1,490 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Author: Wellington Silva a.k.a. Well
Date: July 2017
Name: how2decode-html.py
Purpose: Decode data with HTML Escape.
Description: Script Based on script CAL9000 by Chris Loomis from OWASP Project, posted at:
<https://www.owasp.org/index.php/Category:OWASP_CAL9000_Project>
Version: 1.0B
Licence: GPLv3
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Other itens: Copyright (c) 2017, How2Security All rights reserved.
'''
import sys
import string
import argparse
sys.path.append('lib-conv')
from html_esc import CodingHTMLEsc
from colors import *
def main():
usage = '''%(prog)s [--str="data"]'''
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument('--str', action='store', type=str, dest='txt', help='The String Decode')
parser.add_argument("--version", action="version", version="%(prog)s 1.0b")
args = parser.parse_args()
txt = args.txt
if len(sys.argv) == 1:
parser.print_help()
try:
msg = CodingHTMLEsc(txt.lower())
print(color("[+] HTML Escape: ", 2, 1) + "%s" % msg.decoded())
except AttributeError:
print(color("\n[!] This data non-HTML Escape", 1, 1))
print(color("[*] Plase try usage data with how2decoded-brute-force.py\n", 3, 1))
sys.exit(1)
if __name__ == "__main__":
main()
| how2security/how2convert | how2decode-html.py | Python | gpl-3.0 | 1,973 |
from django import forms
from django.forms.models import inlineformset_factory
from starwars.models import Movie, Episode
class MovieForm(forms.ModelForm):
class Meta:
"""
As I have to use : "exclude" or "fields"
As I'm very lazy, I dont want to fill the list in the "fields"
so I say that I just want to exclude ... nothing :P
"""
model = Movie
exclude = []
# a formeset based on the model of the Mother "Movie" and Child "Episode" + 1 new empty lines
EpisodeFormSet = inlineformset_factory(Movie, Episode, fields=('name', 'scenario'), extra=1)
| foxmask/tracking-starwars | starwars/forms.py | Python | mit | 624 |
#!/usr/bin/python
import boto3
import botocore
import fcntl
import json
import re
import socket
import struct
import sys
import time
from datetime import datetime
from optparse import OptionParser
ZONE_ID = 'MYROUTE53-ZONEID'
DEF_DOMAIN = ".myroute53domain"
REGION = "us-east-1"
TTL = 600
RTYPE = "A"
COMMENT = "dnsmgmt auto registration"
LOG_PREFIX = "[dnsmgmt] "
client = boto3.client('route53')
t1 = datetime.now().strftime("%c")
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl( s.fileno(), 0x8915, struct.pack('256s', ifname[:15]) )[20:24])
def getridof_fqdn(host):
if re.search('\.', host):
h = host.split('.')
host = h[0]
return host
def get_hostname(h=None):
if h:
#re_fqdn = re.compile('[a-zA-Z\d-]{,63}(\.[a-zA-Z\d-]{,63})*', re.IGNORECASE)
if re.search('\.', h):
ip = socket.gethostbyname(h)
host = h
else:
h = h + DEF_DOMAIN
ip = socket.gethostbyname(h)
host = h
else:
host = socket.gethostname()
try:
ip = socket.gethostbyname(host)
except:
ip = get_ip_address('eth0')
pass
print "%s Hostname : %s" % (LOG_PREFIX, host)
print "%s IP : %s" % (LOG_PREFIX, ip)
return(host,ip)
#####
#{u'ResourceRecordSets': [ {u'Name': 'host.mydomain.com.', u'Type': 'A', u'Region': 'us-east-1', u'ResourceRecords': [ {u'Value': '10.0.0.1'}], u'TTL': 600, u'SetIdentifier': 'dnsmgmt auto registration'} ],
def list_rr(entry=None, t=None, ALL=0):
if ALL == 0:
response = client.list_resource_record_sets(
HostedZoneId=ZONE_ID,
StartRecordName=entry,
StartRecordType=t
)
else:
response = client.list_resource_record_sets(
HostedZoneId=ZONE_ID
)
rr_entries = ''
for k,v in response.iteritems():
if k == 'ResourceRecordSets':
for a in response[k]:
rr_entries = rr_entries + a['Name'] + ' '
rr_entries = rr_entries + a['Type'] + ' '
for b in a['ResourceRecords']:
rr_entries = rr_entries + b['Value'] + '\n'
return rr_entries
def create_record(entry, ip, t=RTYPE, reg=REGION, ttl=TTL, comment=COMMENT):
print "%s Starting creation process %s %s" % (LOG_PREFIX, entry, ip)
name = entry + DEF_DOMAIN
try:
response = client.change_resource_record_sets(
HostedZoneId=ZONE_ID,
ChangeBatch={
'Changes': [
{
'Action': 'UPSERT',
'ResourceRecordSet': {
'Name': name,
'Region': reg,
'Type': t,
'TTL': ttl,
'SetIdentifier': comment,
'ResourceRecords': [
{
'Value': ip
}
],
}
}
]
}
)
except:
print "%s Error creating entry %s" % (LOG_PREFIX,name)
print(response)
print "%s Status: %s | Id: %s" % (LOG_PREFIX, response['ChangeInfo']['Status'], response['ChangeInfo']['Id'])
s = response['ChangeInfo']['Status']
i = response['ChangeInfo']['Id']
while s == 'PENDING':
s = get_change_status(i)
time.sleep(10)
print "%s Entry %s has been created. Request status is: %s" % (LOG_PREFIX, name, s)
return response
def get_change_status(reqid):
response = client.get_change(
Id=reqid
)
#response = client.get_change_details(Id=reqid)
return response['ChangeInfo']['Status']
def delete_record(entry, ip, reg=REGION, t=RTYPE, ttl=TTL, comment=COMMENT):
print "%s Starting deletion process %s %s" % (LOG_PREFIX, entry, ip)
name = entry + DEF_DOMAIN
try:
response = client.change_resource_record_sets(
HostedZoneId=ZONE_ID,
ChangeBatch={
'Changes': [
{
'Action': 'DELETE',
'ResourceRecordSet': {
'Name': name,
'Region': reg,
'Type': t,
'TTL': ttl,
'SetIdentifier': comment,
'ResourceRecords': [
{
'Value': ip
}
],
}
}
]
}
)
except:
print "%s Error deleting record %s" % (LOG_PREFIX,name)
print(response)
print "%s Status: %s | Id: %s" % (LOG_PREFIX, response['ChangeInfo']['Status'], response['ChangeInfo']['Id'])
s = response['ChangeInfo']['Status']
i = response['ChangeInfo']['Id']
while s == 'PENDING':
s = get_change_status(i)
time.sleep(10)
print "%s Entry %s has been removed" % (LOG_PREFIX, name)
return response
def helpTxt(option, opt, value, parser):
print "\n-a/--add --> ADD action\n"
print "-r/--remove --> REMOVE action\n"
print "-n/--name --> HOSTNAME to add\n"
print "-i/--ip --> IPADDR to add"
sys.exit(0)
def main():
parser = OptionParser()
parser.add_option("-a", "--add", action="store_const", const=1, dest="dnsadd", help="add new DNS entry")
parser.add_option("-r", "--remove", action="store_const", const=1, dest="dnsremove", help="remove a DNS entry")
parser.add_option("-n", "--name", dest="dnshost", help="hostname NOT fqdn")
parser.add_option("-i", "--ip", dest="dnsip", help="ipaddress")
parser.add_option("-?", action="callback", callback=helpTxt, help="detailed help")
(options, args) = parser.parse_args()
h,i = get_hostname()
h = getridof_fqdn(h)
if (options.dnshost):
h = options.dnshost
if (options.dnsip):
i = options.dnsip
if ( (not(options.dnsadd) and not(options.dnsremove)) or (options.dnsadd) ):
#Assume you are adding the local machine
r = create_record(entry=h, ip=i)
if (not(options.dnsadd) and options.dnsremove):
#Assumme you are deleting the local machine
d = delete_record(entry=h, ip=i)
l = list_rr(h+DEF_DOMAIN,RTYPE); print "%s %s" % (LOG_PREFIX, l)
if __name__ == '__main__':
main()
| rkferreira/tools | aws-route53-hostsmgmt/dnsmgmt.py | Python | gpl-2.0 | 5,470 |
# Generated by Django 2.2.6 on 2020-01-24 04:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user_profile', '0015_auto_20200116_1116'),
]
operations = [
migrations.AddField(
model_name='user',
name='starred_themes',
field=models.ManyToManyField(related_name='starred_themes', to='user_profile.Theme'),
),
]
| daily-bruin/meow | meow/user_profile/migrations/0016_user_starred_themes.py | Python | agpl-3.0 | 442 |
from time import clock
def timer(function):
def wrapper(*args, **kwargs):
start = clock()
print(function(*args, **kwargs))
print("Solution took: %f seconds." % (clock() - start))
return wrapper
@timer
def find_answer():
words = []
with open("words.txt") as f:
for item in f.read().split(","):
words.append(item.strip("\""))
tri = triangles(100)
amount = 0
for word in words:
total = sum([ord(x) - 64 for x in word])
if total in tri:
amount += 1
return amount
def triangles(amount):
tri = set()
for i in range(1, amount + 1):
tri.add(i * (i + 1) // 2)
return tri
if __name__ == "__main__":
find_answer()
| Igglyboo/Project-Euler | 1-99/40-49/Problem42.py | Python | unlicense | 743 |
#!python
"""
VMController Host - a general purpose host-side virtual machine controller via exposed hypervisors apis.
"""
try:
import os
import sys
import logging
import warnings
import multiprocessing
import time
import inject
from twisted.internet import reactor
from pkg_resources import resource_stream
from ConfigParser import SafeConfigParser
from optparse import OptionParser
from vmcontroller.common import StompProtocolFactory, StompProtocol
from vmcontroller.host.config import init_config, init_config_file, debug_config
from vmcontroller.host.controller import HyperVisorController
from vmcontroller.host.services import HostStompEngine, HostWords
from vmcontroller.host.services.HostServices import Host, HostXMLRPCService
except ImportError, e:
print "Import error in %s : %s" % (__name__, e)
import sys
sys.exit()
logger = logging.getLogger(__name__)
def init_logging(logfile=None, loglevel=logging.INFO):
"""
Sets logging configuration.
@param logfile: File to log messages. Default is None.
@param loglevel: Log level. Default is logging.INFO.
"""
format = '%(asctime)s - [%(threadName)s] %(filename)s:%(lineno)s - (%(levelname)s) %(message)s'
if logfile:
logging.basicConfig(filename=logfile, level=loglevel, format=format)
else:
logging.basicConfig(level=loglevel, format=format)
def init():
"""
Initializes VMController Host package.
First parses command line options. Then, creates config object from default cfg file.
Re-initializes config object if a config file is supplied and sets logger configuration.
Finally, uses dependency injection to bind objects to names.
"""
parser = OptionParser()
parser.add_option("-c", "--config", dest="configfile",
help="Read configuration from FILE. (Overrides default config file.)", metavar="FILE")
parser.add_option("-a", "--host", dest="xmlrpc_host",
help="Listen on specified address for XMLRPC interface (default 127.0.0.1)", metavar="ADDR")
parser.add_option("-p", "--port", dest="xmlrpc_port",
help="Listen on specified port for XMLRPC interface (default 50505)", type="int", metavar="PORT")
parser.add_option("-l", "--logfile", dest="logfile",
help="Log to specified file.", metavar="FILE")
parser.add_option("--debug", action="store_true", dest="debug", default=False,
help="Sets logging to debug (unless logging configured in config file).")
(options, args) = parser.parse_args()
config = init_config()
injector = inject.Injector()
inject.register(injector)
injector.bind('config', to=config)
injector.bind('stompEngine', to=HostStompEngine, scope=inject.appscope)
injector.bind('words', to=HostWords.getWords)
injector.bind('stompProtocol', to=StompProtocol, scope=inject.appscope)
injector.bind('subject', to=Host)
injector.bind('hvController', to=HyperVisorController)
init_config_file(options.configfile)
if options.xmlrpc_host is not None:
config.set('xmlrpc', 'host', options.xmlrpc_host)
if options.xmlrpc_port is not None:
config.set('xmlrpc', 'port', str(options.xmlrpc_port))
level = logging.DEBUG if options.debug else logging.INFO
init_logging(logfile=options.logfile, loglevel=level)
#debug_config(config)
def start_coilmq(config, server_event, tries=-1, delay=1, backoff=1.5):
"""
Starts CoilMQ broker.
@param config: Config for CoilMQ.
@param server_event: Event attached to multiprocessing manager.
@param tries: Maximum retries to start the server. Default -1 (infinite).
@param delay: Time to wait before next try to start broker. Default 1.
@param backoff: Factor to set delay. Default 1.5.
"""
m_tries = tries
m_delay = delay
m_server = None
try:
from coilmq.config import config as broker_config
import coilmq.start
except ImportError, e:
print "Import error: %s\nPlease check." % e
exit()
if config.has_section('broker'):
for (attribute, value) in config.items('broker'):
if attribute != 'name':
broker_config.set('coilmq', attribute, value)
logger.debug("[coilmq] %s = %s" % (attribute, value))
broker_server = None
while True:
try:
broker_server = coilmq.start.server_from_config(broker_config)
logger.info("Stomp server listening on %s:%s" % broker_server.server_address)
server_event.set()
broker_server.serve_forever()
except (KeyboardInterrupt, SystemExit):
logger.info("Stomp server stopped by user interrupt.")
raise SystemExit()
except IOError as ex:
logger.error("Exception while starting coilmq broker: '%s'", ex)
if m_tries != 0:
logger.debug("Retrying coilmq startup in %.1f seconds...", m_delay)
time.sleep(m_delay)
m_delay *= backoff
m_tries -= 1
else:
logger.debug("Ran out of trials (tried %d times) for coilmq startup. Giving up.", tries)
break
except Exception, e:
logger.error("Stomp server stopped due to error: %s" % e)
logger.exception(e)
raise SystemExit()
finally:
if broker_server: broker_server.server_close()
@inject.param('config')
def init_coilmq(config, brokerTimeout=60):
"""
Intializes and starts CoilMQ stomp broker as a light weight (multiprocessing) process.
@param config: Injected config object.
@param brokerTimeout: Timeout to check is broker is running. Default 60s.
"""
manager = multiprocessing.Manager()
server_event = manager.Event()
broker = multiprocessing.Process(target=start_coilmq, args=(config, server_event))
broker.daemon = False
broker.name = 'VMController-Broker'
broker.start()
server_event.wait(brokerTimeout)
if not server_event.is_set():
logger.fatal("Broker not available after %.1f seconds. Giving up", brokerTimeout)
return -1
@inject.param('config')
def init_morbid(config):
"""
Starts up light weight, twisted based MorbidQ stomp broker.
@param config: Injected config object.
"""
try:
import morbid
except ImportError, e:
import sys
print "Import error: %s\nPlease check." % e
sys.exit()
morbid_factory = morbid.StompFactory(verbose=True)
broker_host = config.get('broker', 'host')
broker_port = int(config.get('broker', 'port'))
try:
reactor.listenTCP(broker_port, morbid_factory, interface=broker_host)
except:
logger.fatal("Unable to start Morbid, port may not be free. Exiting.")
import sys
sys.exit()
logger.info("Starting MorbidQ broker %s:%s", broker_host, broker_port)
@inject.param('config')
def start(config):
"""
Starts VMController Host.
@param config: The injected config object.
"""
broker_name = config.get('broker', 'name')
if broker_name == 'morbid':
init_morbid()
elif broker_name == 'coilmq':
init_coilmq()
else:
logger.fatal("No broker found... Exiting")
exit()
stompProtocolFactory = StompProtocolFactory()
xmlrpcService = HostXMLRPCService()
xmlrpcService.makeEngineAccesible()
host = config.get('broker', 'host')
port = int(config.get('broker', 'port'))
reactor.connectTCP(host, port, stompProtocolFactory)
reactor.run()
def main():
"""
Initializes and starts VMController Host.
"""
init()
logger.info("Welcome to VMController Host!")
start()
if __name__ == '__main__':
try:
main()
except (KeyboardInterrupt, SystemExit):
pass
except Exception, e:
logger.error("Server terminated due to error: %s" % e)
logger.exception(e)
| dgquintas/vmcontroller.unstable | src/vmcontroller.host/vmcontroller/host/__main__.py | Python | bsd-3-clause | 8,090 |
from pymoveit_core.robot_model import *
| ros-planning/moveit | moveit_core/python/src/moveit/core/robot_model/__init__.py | Python | bsd-3-clause | 40 |
REDIS_URL = 'redis://redis/0'
ERROR_NO_IMAGE = 'Please provide an image'
ERROR_NO_TEXT = 'Please provide some text'
MAX_SIZE = (512, 512)
# Where to store the models weights
# (except for Keras' that are stored in ~/.keras)
WEIGHT_PATH = './weights'
# Original model source: https://drive.google.com/drive/folders/0B_rootXHuswsZ0E4Mjh1ZU5xZVU
DEEPLAB_URL = 'http://eliot.andres.free.fr/models/deeplab_resnet.ckpt'
DEEPLAB_FILENAME = 'deeplab_resnet.ckpt'
SSD_INCEPTION_URL = 'http://download.tensorflow.org/models/object_detection/ssd_inception_v2_coco_11_06_2017.tar.gz'
SSD_INCEPTION_FILENAME = 'ssd_inception_v2_coco_11_06_2017.tar.gz'
| EliotAndres/pretrained.ml | containers/tensorflow_models/config.py | Python | mit | 644 |
from argparse import ArgumentParser
from snapchat_bots import SnapchatBot
class ReflectorBot(SnapchatBot):
def on_snap(self, sender, snap):
self.send_snap([sender], snap)
def on_friend_add(self, friend):
self.add_friend(friend)
def on_friend_delete(self, friend):
self.delete_friend(friend)
if __name__ == '__main__':
parser = ArgumentParser("Reflector Bot")
parser.add_argument('-u', '--username', required=True, type=str, help="Username of the account to run the bot on")
parser.add_argument('-p', '--password', required=True, type=str, help="Password of the account to run the bot on")
args = parser.parse_args()
bot = ReflectorBot(args.username, args.password)
bot.listen(timeout=5)
| Gendreau/SnapchatBot | examples/reflectorbot.py | Python | mit | 754 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import with_statement
import abc
import sys
import pprint
import datetime
from python_utils import converters
import six
from . import base
from . import utils
MAX_DATE = datetime.date.max
MAX_TIME = datetime.time.max
MAX_DATETIME = datetime.datetime.max
def string_or_lambda(input_):
if isinstance(input_, six.string_types):
def render_input(progress, data, width):
return input_ % data
return render_input
else:
return input_
def create_marker(marker):
def _marker(progress, data, width):
if progress.max_value is not base.UnknownLength \
and progress.max_value > 0:
length = int(progress.value / progress.max_value * width)
return (marker * length)
else:
return marker
if isinstance(marker, six.string_types):
marker = converters.to_unicode(marker)
assert utils.len_color(marker) == 1, \
'Markers are required to be 1 char'
return _marker
else:
return marker
class FormatWidgetMixin(object):
'''Mixin to format widgets using a formatstring
Variables available:
- max_value: The maximum value (can be None with iterators)
- value: The current value
- total_seconds_elapsed: The seconds since the bar started
- seconds_elapsed: The seconds since the bar started modulo 60
- minutes_elapsed: The minutes since the bar started modulo 60
- hours_elapsed: The hours since the bar started modulo 24
- days_elapsed: The hours since the bar started
- time_elapsed: Shortcut for HH:MM:SS time since the bar started including
days
- percentage: Percentage as a float
'''
required_values = []
def __init__(self, format, new_style=False, **kwargs):
self.new_style = new_style
self.format = format
def __call__(self, progress, data, format=None):
'''Formats the widget into a string'''
try:
if self.new_style:
return (format or self.format).format(**data)
else:
return (format or self.format) % data
except (TypeError, KeyError):
print('Error while formatting %r' % self.format, file=sys.stderr)
pprint.pprint(data, stream=sys.stderr)
raise
class WidthWidgetMixin(object):
'''Mixing to make sure widgets are only visible if the screen is within a
specified size range so the progressbar fits on both large and small
screens..
Variables available:
- min_width: Only display the widget if at least `min_width` is left
- max_width: Only display the widget if at most `max_width` is left
>>> class Progress(object):
... term_width = 0
>>> WidthWidgetMixin(5, 10).check_size(Progress)
False
>>> Progress.term_width = 5
>>> WidthWidgetMixin(5, 10).check_size(Progress)
True
>>> Progress.term_width = 10
>>> WidthWidgetMixin(5, 10).check_size(Progress)
True
>>> Progress.term_width = 11
>>> WidthWidgetMixin(5, 10).check_size(Progress)
False
'''
def __init__(self, min_width=None, max_width=None, **kwargs):
self.min_width = min_width
self.max_width = max_width
def check_size(self, progress):
if self.min_width and self.min_width > progress.term_width:
return False
elif self.max_width and self.max_width < progress.term_width:
return False
else:
return True
class WidgetBase(WidthWidgetMixin):
__metaclass__ = abc.ABCMeta
'''The base class for all widgets
The ProgressBar will call the widget's update value when the widget should
be updated. The widget's size may change between calls, but the widget may
display incorrectly if the size changes drastically and repeatedly.
The boolean INTERVAL informs the ProgressBar that it should be
updated more often because it is time sensitive.
The widgets are only visible if the screen is within a
specified size range so the progressbar fits on both large and small
screens.
WARNING: Widgets can be shared between multiple progressbars so any state
information specific to a progressbar should be stored within the
progressbar instead of the widget.
Variables available:
- min_width: Only display the widget if at least `min_width` is left
- max_width: Only display the widget if at most `max_width` is left
- weight: Widgets with a higher `weigth` will be calculated before widgets
with a lower one
'''
@abc.abstractmethod
def __call__(self, progress, data):
'''Updates the widget.
progress - a reference to the calling ProgressBar
'''
class AutoWidthWidgetBase(WidgetBase):
'''The base class for all variable width widgets.
This widget is much like the \\hfill command in TeX, it will expand to
fill the line. You can use more than one in the same line, and they will
all have the same width, and together will fill the line.
'''
@abc.abstractmethod
def __call__(self, progress, data, width):
'''Updates the widget providing the total width the widget must fill.
progress - a reference to the calling ProgressBar
width - The total width the widget must fill
'''
class TimeSensitiveWidgetBase(WidgetBase):
'''The base class for all time sensitive widgets.
Some widgets like timers would become out of date unless updated at least
every `INTERVAL`
'''
INTERVAL = datetime.timedelta(milliseconds=100)
class FormatLabel(FormatWidgetMixin, WidgetBase):
'''Displays a formatted label
>>> label = FormatLabel('%(value)s', min_width=5, max_width=10)
>>> class Progress(object):
... pass
>>> label = FormatLabel('{value} :: {value:^6}', new_style=True)
>>> str(label(Progress, dict(value='test')))
'test :: test '
'''
mapping = {
'finished': ('end_time', None),
'last_update': ('last_update_time', None),
'max': ('max_value', None),
'seconds': ('seconds_elapsed', None),
'start': ('start_time', None),
'elapsed': ('total_seconds_elapsed', utils.format_time),
'value': ('value', None),
}
def __init__(self, format, **kwargs):
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data, **kwargs):
for name, (key, transform) in self.mapping.items():
try:
if transform is None:
data[name] = data[key]
else:
data[name] = transform(data[key])
except (KeyError, ValueError, IndexError): # pragma: no cover
pass
return FormatWidgetMixin.__call__(self, progress, data, **kwargs)
class Timer(FormatLabel, TimeSensitiveWidgetBase):
'''WidgetBase which displays the elapsed seconds.'''
def __init__(self, format='Elapsed Time: %(elapsed)s', **kwargs):
FormatLabel.__init__(self, format=format, **kwargs)
TimeSensitiveWidgetBase.__init__(self, **kwargs)
# This is exposed as a static method for backwards compatibility
format_time = staticmethod(utils.format_time)
class SamplesMixin(TimeSensitiveWidgetBase):
'''
Mixing for widgets that average multiple measurements
Note that samples can be either an integer or a timedelta to indicate a
certain amount of time
>>> class progress:
... last_update_time = datetime.datetime.now()
... value = 1
... extra = dict()
>>> samples = SamplesMixin(samples=2)
>>> samples(progress, None, True)
(None, None)
>>> progress.last_update_time += datetime.timedelta(seconds=1)
>>> samples(progress, None, True) == (datetime.timedelta(seconds=1), 0)
True
>>> progress.last_update_time += datetime.timedelta(seconds=1)
>>> samples(progress, None, True) == (datetime.timedelta(seconds=1), 0)
True
>>> samples = SamplesMixin(samples=datetime.timedelta(seconds=1))
>>> _, value = samples(progress, None)
>>> value
[1, 1]
>>> samples(progress, None, True) == (datetime.timedelta(seconds=1), 0)
True
'''
def __init__(self, samples=datetime.timedelta(seconds=2), key_prefix=None,
**kwargs):
self.samples = samples
self.key_prefix = (self.__class__.__name__ or key_prefix) + '_'
TimeSensitiveWidgetBase.__init__(self, **kwargs)
def get_sample_times(self, progress, data):
return progress.extra.setdefault(self.key_prefix + 'sample_times', [])
def get_sample_values(self, progress, data):
return progress.extra.setdefault(self.key_prefix + 'sample_values', [])
def __call__(self, progress, data, delta=False):
sample_times = self.get_sample_times(progress, data)
sample_values = self.get_sample_values(progress, data)
if sample_times:
sample_time = sample_times[-1]
else:
sample_time = datetime.datetime.min
if progress.last_update_time - sample_time > self.INTERVAL:
# Add a sample but limit the size to `num_samples`
sample_times.append(progress.last_update_time)
sample_values.append(progress.value)
if isinstance(self.samples, datetime.timedelta):
minimum_time = progress.last_update_time - self.samples
minimum_value = sample_values[-1]
while (sample_times[2:] and
minimum_time > sample_times[1] and
minimum_value > sample_values[1]):
sample_times.pop(0)
sample_values.pop(0)
else:
if len(sample_times) > self.samples:
sample_times.pop(0)
sample_values.pop(0)
if delta:
delta_time = sample_times[-1] - sample_times[0]
delta_value = sample_values[-1] - sample_values[0]
if delta_time:
return delta_time, delta_value
else:
return None, None
else:
return sample_times, sample_values
class ETA(Timer):
'''WidgetBase which attempts to estimate the time of arrival.'''
def __init__(
self,
format_not_started='ETA: --:--:--',
format_finished='Time: %(elapsed)8s',
format='ETA: %(eta)8s',
format_zero='ETA: 00:00:00',
format_NA='ETA: N/A',
**kwargs):
Timer.__init__(self, **kwargs)
self.format_not_started = format_not_started
self.format_finished = format_finished
self.format = format
self.format_zero = format_zero
self.format_NA = format_NA
def _calculate_eta(self, progress, data, value, elapsed):
'''Updates the widget to show the ETA or total time when finished.'''
if elapsed:
# The max() prevents zero division errors
per_item = elapsed.total_seconds() / max(value, 1e-6)
remaining = progress.max_value - data['value']
eta_seconds = remaining * per_item
else:
eta_seconds = 0
return eta_seconds
def __call__(self, progress, data, value=None, elapsed=None):
'''Updates the widget to show the ETA or total time when finished.'''
if value is None:
value = data['value']
if elapsed is None:
elapsed = data['time_elapsed']
ETA_NA = False
try:
data['eta_seconds'] = self._calculate_eta(
progress, data, value=value, elapsed=elapsed)
except TypeError:
data['eta_seconds'] = None
ETA_NA = True
data['eta'] = None
if data['eta_seconds']:
try:
data['eta'] = utils.format_time(data['eta_seconds'])
except (ValueError, OverflowError): # pragma: no cover
pass
if data['value'] == progress.min_value:
format = self.format_not_started
elif progress.end_time:
format = self.format_finished
elif data['eta']:
format = self.format
elif ETA_NA:
format = self.format_NA
else:
format = self.format_zero
return Timer.__call__(self, progress, data, format=format)
class AbsoluteETA(ETA):
'''Widget which attempts to estimate the absolute time of arrival.'''
def _calculate_eta(self, progress, data, value, elapsed):
eta_seconds = ETA._calculate_eta(self, progress, data, value, elapsed)
now = datetime.datetime.now()
try:
return now + datetime.timedelta(seconds=eta_seconds)
except OverflowError: # pragma: no cover
return datetime.datetime.max
def __init__(
self,
format_not_started='Estimated finish time: ----/--/-- --:--:--',
format_finished='Finished at: %(elapsed)s',
format='Estimated finish time: %(eta)s',
**kwargs):
ETA.__init__(self, format_not_started=format_not_started,
format_finished=format_finished, format=format, **kwargs)
class AdaptiveETA(ETA, SamplesMixin):
'''WidgetBase which attempts to estimate the time of arrival.
Uses a sampled average of the speed based on the 10 last updates.
Very convenient for resuming the progress halfway.
'''
def __init__(self, **kwargs):
ETA.__init__(self, **kwargs)
SamplesMixin.__init__(self, **kwargs)
def __call__(self, progress, data):
elapsed, value = SamplesMixin.__call__(self, progress, data,
delta=True)
if not elapsed:
value = None
elapsed = 0
return ETA.__call__(self, progress, data, value=value, elapsed=elapsed)
class DataSize(FormatWidgetMixin, WidgetBase):
'''
Widget for showing an amount of data transferred/processed.
Automatically formats the value (assumed to be a count of bytes) with an
appropriate sized unit, based on the IEC binary prefixes (powers of 1024).
'''
def __init__(
self, variable='value',
format='%(scaled)5.1f %(prefix)s%(unit)s', unit='B',
prefixes=('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'),
**kwargs):
self.variable = variable
self.unit = unit
self.prefixes = prefixes
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data):
value = data[self.variable]
if value is not None:
scaled, power = utils.scale_1024(value, len(self.prefixes))
else:
scaled = power = 0
data['scaled'] = scaled
data['prefix'] = self.prefixes[power]
data['unit'] = self.unit
return FormatWidgetMixin.__call__(self, progress, data)
class FileTransferSpeed(FormatWidgetMixin, TimeSensitiveWidgetBase):
'''
WidgetBase for showing the transfer speed (useful for file transfers).
'''
def __init__(
self, format='%(scaled)5.1f %(prefix)s%(unit)-s/s',
inverse_format='%(scaled)5.1f s/%(prefix)s%(unit)-s', unit='B',
prefixes=('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'),
**kwargs):
self.unit = unit
self.prefixes = prefixes
self.inverse_format = inverse_format
FormatWidgetMixin.__init__(self, format=format, **kwargs)
TimeSensitiveWidgetBase.__init__(self, **kwargs)
def _speed(self, value, elapsed):
speed = float(value) / elapsed
return utils.scale_1024(speed, len(self.prefixes))
def __call__(self, progress, data, value=None, total_seconds_elapsed=None):
'''Updates the widget with the current SI prefixed speed.'''
if value is None:
value = data['value']
elapsed = utils.deltas_to_seconds(
total_seconds_elapsed,
data['total_seconds_elapsed'])
if value is not None and elapsed is not None \
and elapsed > 2e-6 and value > 2e-6: # =~ 0
scaled, power = self._speed(value, elapsed)
else:
scaled = power = 0
data['unit'] = self.unit
if power == 0 and scaled < 0.1:
if scaled > 0:
scaled = 1 / scaled
data['scaled'] = scaled
data['prefix'] = self.prefixes[0]
return FormatWidgetMixin.__call__(self, progress, data,
self.inverse_format)
else:
data['scaled'] = scaled
data['prefix'] = self.prefixes[power]
return FormatWidgetMixin.__call__(self, progress, data)
class AdaptiveTransferSpeed(FileTransferSpeed, SamplesMixin):
'''WidgetBase for showing the transfer speed, based on the last X samples
'''
def __init__(self, **kwargs):
FileTransferSpeed.__init__(self, **kwargs)
SamplesMixin.__init__(self, **kwargs)
def __call__(self, progress, data):
elapsed, value = SamplesMixin.__call__(self, progress, data,
delta=True)
return FileTransferSpeed.__call__(self, progress, data, value, elapsed)
class AnimatedMarker(TimeSensitiveWidgetBase):
'''An animated marker for the progress bar which defaults to appear as if
it were rotating.
'''
def __init__(self, markers='|/-\\', default=None, fill='', **kwargs):
self.markers = markers
self.default = default or markers[0]
self.fill = create_marker(fill) if fill else None
WidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data, width=None):
'''Updates the widget to show the next marker or the first marker when
finished'''
if progress.end_time:
return self.default
if self.fill:
# Cut the last character so we can replace it with our marker
fill = self.fill(progress, data, width)[:-1]
else:
fill = ''
marker = self.markers[data['updates'] % len(self.markers)]
# Python 3 returns an int when indexing bytes
if isinstance(marker, int): # pragma: no cover
marker = bytes(marker)
fill = fill.encode()
else:
# cast fill to the same type as marker
fill = type(marker)(fill)
return fill + marker
# Alias for backwards compatibility
RotatingMarker = AnimatedMarker
class Counter(FormatWidgetMixin, WidgetBase):
'''Displays the current count'''
def __init__(self, format='%(value)d', **kwargs):
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, format=format, **kwargs)
def __call__(self, progress, data, format=None):
return FormatWidgetMixin.__call__(self, progress, data, format)
class Percentage(FormatWidgetMixin, WidgetBase):
'''Displays the current percentage as a number with a percent sign.'''
def __init__(self, format='%(percentage)3d%%', **kwargs):
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, format=format, **kwargs)
def __call__(self, progress, data, format=None):
# If percentage is not available, display N/A%
if 'percentage' in data and not data['percentage']:
return FormatWidgetMixin.__call__(self, progress, data,
format='N/A%%')
return FormatWidgetMixin.__call__(self, progress, data)
class SimpleProgress(FormatWidgetMixin, WidgetBase):
'''Returns progress as a count of the total (e.g.: "5 of 47")'''
DEFAULT_FORMAT = '%(value_s)s of %(max_value_s)s'
def __init__(self, format=DEFAULT_FORMAT, **kwargs):
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, format=format, **kwargs)
self.max_width_cache = dict(default=self.max_width)
def __call__(self, progress, data, format=None):
# If max_value is not available, display N/A
if data.get('max_value'):
data['max_value_s'] = data.get('max_value')
else:
data['max_value_s'] = 'N/A'
# if value is not available it's the zeroth iteration
if data.get('value'):
data['value_s'] = data['value']
else:
data['value_s'] = 0
formatted = FormatWidgetMixin.__call__(self, progress, data,
format=format)
# Guess the maximum width from the min and max value
key = progress.min_value, progress.max_value
max_width = self.max_width_cache.get(key, self.max_width)
if not max_width:
temporary_data = data.copy()
for value in key:
if value is None: # pragma: no cover
continue
temporary_data['value'] = value
width = progress.custom_len(FormatWidgetMixin.__call__(
self, progress, temporary_data, format=format))
if width: # pragma: no branch
max_width = max(max_width or 0, width)
self.max_width_cache[key] = max_width
# Adjust the output to have a consistent size in all cases
if max_width: # pragma: no branch
formatted = formatted.rjust(max_width)
return formatted
class Bar(AutoWidthWidgetBase):
'''A progress bar which stretches to fill the line.'''
def __init__(self, marker='#', left='|', right='|', fill=' ',
fill_left=True, **kwargs):
'''Creates a customizable progress bar.
The callable takes the same parameters as the `__call__` method
marker - string or callable object to use as a marker
left - string or callable object to use as a left border
right - string or callable object to use as a right border
fill - character to use for the empty part of the progress bar
fill_left - whether to fill from the left or the right
'''
self.marker = create_marker(marker)
self.left = string_or_lambda(left)
self.right = string_or_lambda(right)
self.fill = string_or_lambda(fill)
self.fill_left = fill_left
AutoWidthWidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data, width):
'''Updates the progress bar and its subcomponents'''
left = converters.to_unicode(self.left(progress, data, width))
right = converters.to_unicode(self.right(progress, data, width))
width -= progress.custom_len(left) + progress.custom_len(right)
marker = converters.to_unicode(self.marker(progress, data, width))
fill = converters.to_unicode(self.fill(progress, data, width))
if self.fill_left:
marker = marker.ljust(width, fill)
else:
marker = marker.rjust(width, fill)
return left + marker + right
class ReverseBar(Bar):
'''A bar which has a marker that goes from right to left'''
def __init__(self, marker='#', left='|', right='|', fill=' ',
fill_left=False, **kwargs):
'''Creates a customizable progress bar.
marker - string or updatable object to use as a marker
left - string or updatable object to use as a left border
right - string or updatable object to use as a right border
fill - character to use for the empty part of the progress bar
fill_left - whether to fill from the left or the right
'''
Bar.__init__(self, marker=marker, left=left, right=right, fill=fill,
fill_left=fill_left, **kwargs)
class BouncingBar(Bar, TimeSensitiveWidgetBase):
'''A bar which has a marker which bounces from side to side.'''
INTERVAL = datetime.timedelta(milliseconds=100)
def __call__(self, progress, data, width):
'''Updates the progress bar and its subcomponents'''
left = converters.to_unicode(self.left(progress, data, width))
right = converters.to_unicode(self.right(progress, data, width))
width -= progress.custom_len(left) + progress.custom_len(right)
marker = converters.to_unicode(self.marker(progress, data, width))
fill = converters.to_unicode(self.fill(progress, data, width))
if width: # pragma: no branch
value = int(
data['total_seconds_elapsed'] / self.INTERVAL.total_seconds())
a = value % width
b = width - a - 1
if value % (width * 2) >= width:
a, b = b, a
if self.fill_left:
marker = a * fill + marker + b * fill
else:
marker = b * fill + marker + a * fill
return left + marker + right
class FormatCustomText(FormatWidgetMixin, WidgetBase):
mapping = {}
def __init__(self, format, mapping=mapping, **kwargs):
self.format = format
self.mapping = mapping
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, **kwargs)
def update_mapping(self, **mapping):
self.mapping.update(mapping)
def __call__(self, progress, data):
return FormatWidgetMixin.__call__(
self, progress, self.mapping, self.format)
class VariableMixin(object):
'''Mixin to display a custom user variable '''
def __init__(self, name, **kwargs):
if not isinstance(name, str):
raise TypeError('Variable(): argument must be a string')
if len(name.split()) > 1:
raise ValueError('Variable(): argument must be single word')
self.name = name
class MultiRangeBar(Bar, VariableMixin):
'''
A bar with multiple sub-ranges, each represented by a different symbol
The various ranges are represented on a user-defined variable, formatted as
.. code-block:: python
[
['Symbol1', amount1],
['Symbol2', amount2],
...
]
'''
def __init__(self, name, markers, **kwargs):
VariableMixin.__init__(self, name)
Bar.__init__(self, **kwargs)
self.markers = [
string_or_lambda(marker)
for marker in markers
]
def get_values(self, progress, data):
return data['variables'][self.name] or []
def __call__(self, progress, data, width):
'''Updates the progress bar and its subcomponents'''
left = converters.to_unicode(self.left(progress, data, width))
right = converters.to_unicode(self.right(progress, data, width))
width -= progress.custom_len(left) + progress.custom_len(right)
values = self.get_values(progress, data)
values_sum = sum(values)
if width and values_sum:
middle = ''
values_accumulated = 0
width_accumulated = 0
for marker, value in zip(self.markers, values):
marker = converters.to_unicode(marker(progress, data, width))
assert utils.len_color(marker) == 1
values_accumulated += value
item_width = int(values_accumulated / values_sum * width)
item_width -= width_accumulated
width_accumulated += item_width
middle += item_width * marker
else:
fill = converters.to_unicode(self.fill(progress, data, width))
assert utils.len_color(fill) == 1
middle = fill * width
return left + middle + right
class MultiProgressBar(MultiRangeBar):
def __init__(self,
name,
# NOTE: the markers are not whitespace even though some
# terminals don't show the characters correctly!
markers=' ▁▂▃▄▅▆▇█',
**kwargs):
MultiRangeBar.__init__(self, name=name,
markers=list(reversed(markers)), **kwargs)
def get_values(self, progress, data):
ranges = [0] * len(self.markers)
for progress in data['variables'][self.name] or []:
if not isinstance(progress, (int, float)):
# Progress is (value, max)
progress_value, progress_max = progress
progress = float(progress_value) / float(progress_max)
if progress < 0 or progress > 1:
raise ValueError(
'Range value needs to be in the range [0..1], got %s' %
progress)
range_ = progress * (len(ranges) - 1)
pos = int(range_)
frac = range_ % 1
ranges[pos] += (1 - frac)
if (frac):
ranges[pos + 1] += (frac)
if self.fill_left:
ranges = list(reversed(ranges))
return ranges
class Variable(FormatWidgetMixin, VariableMixin, WidgetBase):
'''Displays a custom variable.'''
def __init__(self, name, format='{name}: {formatted_value}',
width=6, precision=3, **kwargs):
'''Creates a Variable associated with the given name.'''
self.format = format
self.width = width
self.precision = precision
VariableMixin.__init__(self, name=name)
WidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data):
value = data['variables'][self.name]
context = data.copy()
context['value'] = value
context['name'] = self.name
context['width'] = self.width
context['precision'] = self.precision
try:
# Make sure to try and cast the value first, otherwise the
# formatting will generate warnings/errors on newer Python releases
value = float(value)
fmt = '{value:{width}.{precision}}'
context['formatted_value'] = fmt.format(**context)
except (TypeError, ValueError):
if value:
context['formatted_value'] = '{value:{width}}'.format(
**context)
else:
context['formatted_value'] = '-' * self.width
return self.format.format(**context)
class DynamicMessage(Variable):
'''Kept for backwards compatibility, please use `Variable` instead.'''
pass
class CurrentTime(FormatWidgetMixin, TimeSensitiveWidgetBase):
'''Widget which displays the current (date)time with seconds resolution.'''
INTERVAL = datetime.timedelta(seconds=1)
def __init__(self, format='Current Time: %(current_time)s',
microseconds=False, **kwargs):
self.microseconds = microseconds
FormatWidgetMixin.__init__(self, format=format, **kwargs)
TimeSensitiveWidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data):
data['current_time'] = self.current_time()
data['current_datetime'] = self.current_datetime()
return FormatWidgetMixin.__call__(self, progress, data)
def current_datetime(self):
now = datetime.datetime.now()
if not self.microseconds:
now = now.replace(microsecond=0)
return now
def current_time(self):
return self.current_datetime().time()
| dennerlager/sepibrews | sepibrews/progressbar/widgets.py | Python | gpl-3.0 | 31,775 |
import visa
__all__ = ['TempStage', 'TMS91', 'TMS94']
# Original code kindly contributed by Filip Dominec <dominecf@fzu.cz>
# See http://www.fzu.cz/~dominecf/python/FDLabInstruments.py
class TempStage(object, visa.SerialInstrument):
"""Base class for Linkam temperature stages."""
def __init__(self, *args, **kwargs):
visa.SerialInstrument.__init__(self, *args, **kwargs)
self.term_chars = visa.CR
self.rate = 20
# Dummy context manager
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
pass
@property
def temp(self):
"""Temperature in deg C"""
raise NotImplementedError
@property
def status(self):
"""Current status byte"""
raw_string = self.ask('T')
return ord(raw_string[0])
@property
def rate(self):
"""Rate of temperature change (deg C/min); default is 20"""
return self._rate
@rate.setter
def rate(self, value):
self._rate = abs(value)
class TMS91(TempStage):
def __init__(self, *args, **kwargs):
super(TMS91, self).__init__(*args, **kwargs)
# From Linkam91 manual: 9600 baud, 8 data bits, 1 stop bit, no parity,
# using an RTS / CTS handshake.
# Serial cable (linkam D25 -> serial D9) pins: 2-2 3-3 4-8 5-7 7-5;
# connect 6,4 of PC together.
self.baud_rate = 9600
self.timeout = 20
@property
def temp(self):
raw_string = self.ask('T')
return float(raw_string[1:])
@temp.setter
def temp(self, value):
self.write('R1{:d}'.format(self.rate)) # Sets rate
self.write('L1{:d}'.format(value)) # Sets target temperature
self.write('R20') # End of current profile
self.write('S') # Starts action
class TMS94(TempStage):
def __init__(self, *args, **kwargs):
super(TMS94, self).__init__(*args, **kwargs)
# From Linkam94 manual: 19200 baud, 8 data bits, 1 stop bit, no parity
# using an RTS / CTS handshake.
# All commands from the PC must end with a carriage return.
# NOTE: When probed at baud_rate=9600, this model freezes.
self.baud_rate = 19200
self.timeout = 3
@property
def temp(self):
raw_string = self.ask('T')
return int(raw_string[6:10], 16) / 10.0
@temp.setter
def temp(self, value):
self.write('R1{:04d}'.format(self.rate * 100)) # Sets rate
self.write('L1{:04d}'.format(value * 10)) # Sets target temperature
self.write('S') # Starts action
| ptomato/REP-instrumentation | rep/linkam/tms.py | Python | gpl-3.0 | 2,599 |
# Copyright (c) 2017 Civic Knowledge. This file is licensed under the terms of the
# MIT License, included in this distribution as LICENSE.txt
""" """
from rowgenerators.source import Source
##
## FIXME This is probably broken
## But it's Socrata, so who really cares?
class SocrataSource(Source):
"""Iterates a CSV soruce from the JSON produced by Socrata """
def __init__(self, spec, dflo, cache, working_dir=None, env=None):
super(SocrataSource, self).__init__(spec, cache)
self._socrata_meta = None
self._download_url = spec.url + '/rows.csv'
self._csv_source = CsvSource(spec, dflo)
@classmethod
def download_url(cls, spec):
return spec.url + '/rows.csv'
@property
def _meta(self):
"""Return the Socrata meta data, as a nested dict"""
import requests
if not self._socrata_meta:
r = requests.get(self.spec.url)
r.raise_for_status()
self._socrata_meta = r.json()
return self._socrata_meta
@property
def headers(self):
"""Return headers. """
return [c['fieldName'] for c in self._meta['columns']]
datatype_map = {
'text': 'str',
'number': 'float',
}
@property
def meta(self):
"""Return metadata """
return {
'title': self._meta.get('name'),
'summary': self._meta.get('description'),
'columns': [
{
'name': c.get('fieldName'),
'position': c.get('position'),
'description': c.get('name') + '.' + c.get('description'),
}
for c in self._meta.get('columns')
]
}
def __iter__(self):
self.start()
for i, row in enumerate(self._csv_source):
# if i == 0:
# yield self.headers
yield row
self.finish()
| CivicKnowledge/rowgenerators | rowgenerators/generator/socrata.py | Python | mit | 1,962 |
# coding: utf-8
from PyQt4.QtGui import QDialog, QFormLayout
from qgis.gui import (QgsFieldComboBox, QgsMapLayerComboBox,
QgsMapLayerProxyModel)
# Create dialog
new_dialog = QDialog()
# Add combobox for layer and field
map_layer_combo_box = QgsMapLayerComboBox()
map_layer_combo_box.setCurrentIndex(-1)
map_layer_combo_box.setFilters(QgsMapLayerProxyModel.VectorLayer)
field_combo_box = QgsFieldComboBox()
# Create a form layout and add the two combobox
layout = QFormLayout()
layout.addWidget(map_layer_combo_box)
layout.addWidget(field_combo_box)
# Add signal event
map_layer_combo_box.layerChanged.connect(field_combo_box.setLayer) # setLayer is a native slot function
def on_field_changed(fieldName):
print(fieldName)
print("Layer field changed")
field_combo_box.fieldChanged.connect(on_field_changed)
new_dialog.setLayout(layout)
new_dialog.show() # To see possibility of this component, you need at least a layer opened
| webgeodatavore/pyqgis-samples | gui/qgis-sample-QgsMapLayerComboBox.py | Python | gpl-2.0 | 966 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Matt Martz <matt@sivel.net>, and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_msi
version_added: "1.7"
short_description: Installs and uninstalls Windows MSI files
description:
- Installs or uninstalls a Windows MSI file that is already located on the
target server
options:
path:
description:
- File system path to the MSI file to install
required: true
extra_args:
description:
- Additional arguments to pass to the msiexec.exe command
state:
description:
- Whether the MSI file should be installed or uninstalled
choices:
- present
- absent
default: present
creates:
description:
- Path to a file created by installing the MSI to prevent from
attempting to reinstall the package on every run
wait:
version_added: "2.1"
description:
- Specify whether to wait for install or uninstall to complete before continuing.
choices:
- true
- false
default: false
notes:
- Check-mode support is currently not supported.
- Please look into M(win_package) instead, this package will be deprecated in Ansible v2.3.
author: "Matt Martz (@sivel)"
'''
EXAMPLES = r'''
- name: Install an MSI file
win_msi:
path: C:\7z920-x64.msi
- name: Install an MSI, and wait for it to complete before continuing
win_msi:
path: C:\7z920-x64.msi
wait: true
- name: Uninstall an MSI file
win_msi:
path: C:\7z920-x64.msi
state: absent
'''
| bjolivot/ansible | lib/ansible/modules/windows/win_msi.py | Python | gpl-3.0 | 2,526 |
# Copyright 2013 Josh Durgin
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from lxml import etree
from oslo.config import cfg
import urllib
import webob
from cinder.api import extensions
from cinder.api.v2 import volumes
from cinder import context
from cinder import db
from cinder import exception
from cinder import test
from cinder.tests.api import fakes
from cinder.tests.api.v2 import stubs
from cinder.tests.image import fake as fake_image
from cinder.volume import api as volume_api
CONF = cfg.CONF
NS = '{http://docs.openstack.org/api/openstack-volume/2.0/content}'
TEST_SNAPSHOT_UUID = '00000000-0000-0000-0000-000000000001'
def stub_snapshot_get(self, context, snapshot_id):
if snapshot_id != TEST_SNAPSHOT_UUID:
raise exception.NotFound
return {
'id': snapshot_id,
'volume_id': 12,
'status': 'available',
'volume_size': 100,
'created_at': None,
'name': 'Default name',
'description': 'Default description',
}
class VolumeApiTest(test.TestCase):
def setUp(self):
super(VolumeApiTest, self).setUp()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
fake_image.stub_out_image_service(self.stubs)
self.controller = volumes.VolumeController(self.ext_mgr)
self.stubs.Set(db, 'volume_get_all', stubs.stub_volume_get_all)
self.stubs.Set(db, 'volume_get_all_by_project',
stubs.stub_volume_get_all_by_project)
self.stubs.Set(volume_api.API, 'get', stubs.stub_volume_get)
self.stubs.Set(volume_api.API, 'delete', stubs.stub_volume_delete)
self.stubs.Set(db, 'service_get_all_by_topic',
stubs.stub_service_get_all_by_topic)
self.maxDiff = None
def test_volume_create(self):
self.stubs.Set(volume_api.API, "create", stubs.stub_volume_create)
vol = {
"size": 100,
"name": "Volume Test Name",
"description": "Volume Test Desc",
"availability_zone": "zone1:host1"
}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v2/volumes')
res_dict = self.controller.create(req, body)
expected = {
'volume': {
'name': 'Volume Test Name',
'id': '1',
'links': [
{
'href': 'http://localhost/v1/fake/volumes/1',
'rel': 'self'
},
{
'href': 'http://localhost/fake/volumes/1',
'rel': 'bookmark'
}
],
}
}
self.assertEqual(res_dict, expected)
def test_volume_create_with_type(self):
vol_type = db.volume_type_create(
context.get_admin_context(),
dict(name=CONF.default_volume_type, extra_specs={})
)
db_vol_type = db.volume_type_get(context.get_admin_context(),
vol_type.id)
vol = {
"size": 100,
"name": "Volume Test Name",
"description": "Volume Test Desc",
"availability_zone": "zone1:host1",
"volume_type": db_vol_type['id'],
}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v2/volumes')
res_dict = self.controller.create(req, body)
volume_id = res_dict['volume']['id']
self.assertEquals(len(res_dict), 1)
self.stubs.Set(volume_api.API, 'get_all',
lambda *args, **kwargs:
[stubs.stub_volume(volume_id,
volume_type={'name': vol_type})])
req = fakes.HTTPRequest.blank('/v2/volumes/detail')
res_dict = self.controller.detail(req)
def test_volume_creation_fails_with_bad_size(self):
vol = {"size": '',
"name": "Volume Test Name",
"description": "Volume Test Desc",
"availability_zone": "zone1:host1"}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v2/volumes')
self.assertRaises(exception.InvalidInput,
self.controller.create,
req,
body)
def test_volume_creation_fails_with_bad_availability_zone(self):
vol = {"size": '1',
"name": "Volume Test Name",
"description": "Volume Test Desc",
"availability_zone": "zonen:hostn"}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v2/volumes')
self.assertRaises(exception.InvalidInput,
self.controller.create,
req, body)
def test_volume_create_with_image_id(self):
self.stubs.Set(volume_api.API, "create", stubs.stub_volume_create)
self.ext_mgr.extensions = {'os-image-create': 'fake'}
vol = {"size": '1',
"name": "Volume Test Name",
"description": "Volume Test Desc",
"availability_zone": "nova",
"imageRef": 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'}
expected = {
'volume': {
'name': 'Volume Test Name',
'id': '1',
'links': [
{
'href': 'http://localhost/v1/fake/volumes/1',
'rel': 'self'
},
{
'href': 'http://localhost/fake/volumes/1',
'rel': 'bookmark'
}
],
}
}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v2/volumes')
res_dict = self.controller.create(req, body)
self.assertEqual(res_dict, expected)
def test_volume_create_with_image_id_is_integer(self):
self.stubs.Set(volume_api.API, "create", stubs.stub_volume_create)
self.ext_mgr.extensions = {'os-image-create': 'fake'}
vol = {
"size": '1',
"name": "Volume Test Name",
"description": "Volume Test Desc",
"availability_zone": "cinder",
"imageRef": 1234,
}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v2/volumes')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_volume_create_with_image_id_not_uuid_format(self):
self.stubs.Set(volume_api.API, "create", stubs.stub_volume_create)
self.ext_mgr.extensions = {'os-image-create': 'fake'}
vol = {
"size": '1',
"name": "Volume Test Name",
"description": "Volume Test Desc",
"availability_zone": "cinder",
"imageRef": '12345'
}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v2/volumes')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_volume_update(self):
self.stubs.Set(volume_api.API, "update", stubs.stub_volume_update)
updates = {
"name": "Updated Test Name",
}
body = {"volume": updates}
req = fakes.HTTPRequest.blank('/v2/volumes/1')
res_dict = self.controller.update(req, '1', body)
expected = {
'volume': {
'status': 'fakestatus',
'description': 'displaydesc',
'availability_zone': 'fakeaz',
'name': 'Updated Test Name',
'attachments': [
{
'id': '1',
'volume_id': '1',
'server_id': 'fakeuuid',
'host_name': None,
'device': '/',
}
],
'user_id': 'fakeuser',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1, 1, 1, 1),
'size': 1,
'links': [
{
'href': 'http://localhost/v1/fake/volumes/1',
'rel': 'self'
},
{
'href': 'http://localhost/fake/volumes/1',
'rel': 'bookmark'
}
],
}
}
self.assertEquals(res_dict, expected)
def test_volume_update_metadata(self):
self.stubs.Set(volume_api.API, "update", stubs.stub_volume_update)
updates = {
"metadata": {"qos_max_iops": 2000}
}
body = {"volume": updates}
req = fakes.HTTPRequest.blank('/v2/volumes/1')
res_dict = self.controller.update(req, '1', body)
expected = {'volume': {
'status': 'fakestatus',
'description': 'displaydesc',
'availability_zone': 'fakeaz',
'name': 'displayname',
'attachments': [{
'id': '1',
'volume_id': '1',
'server_id': 'fakeuuid',
'host_name': None,
'device': '/',
}],
'user_id': 'fakeuser',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {"qos_max_iops": 2000},
'id': '1',
'created_at': datetime.datetime(1, 1, 1, 1, 1, 1),
'size': 1,
'links': [
{
'href': 'http://localhost/v1/fake/volumes/1',
'rel': 'self'
},
{
'href': 'http://localhost/fake/volumes/1',
'rel': 'bookmark'
}
],
}}
self.assertEquals(res_dict, expected)
def test_update_empty_body(self):
body = {}
req = fakes.HTTPRequest.blank('/v2/volumes/1')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update,
req, '1', body)
def test_update_invalid_body(self):
body = {
'name': 'missing top level volume key'
}
req = fakes.HTTPRequest.blank('/v2/volumes/1')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update,
req, '1', body)
def test_update_not_found(self):
self.stubs.Set(volume_api.API, "get", stubs.stub_volume_get_notfound)
updates = {
"name": "Updated Test Name",
}
body = {"volume": updates}
req = fakes.HTTPRequest.blank('/v2/volumes/1')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update,
req, '1', body)
def test_volume_list_summary(self):
self.stubs.Set(volume_api.API, 'get_all',
stubs.stub_volume_get_all_by_project)
req = fakes.HTTPRequest.blank('/v2/volumes')
res_dict = self.controller.index(req)
expected = {
'volumes': [
{
'name': 'displayname',
'id': '1',
'links': [
{
'href': 'http://localhost/v1/fake/volumes/1',
'rel': 'self'
},
{
'href': 'http://localhost/fake/volumes/1',
'rel': 'bookmark'
}
],
}
]
}
self.assertEqual(res_dict, expected)
def test_volume_list_detail(self):
self.stubs.Set(volume_api.API, 'get_all',
stubs.stub_volume_get_all_by_project)
req = fakes.HTTPRequest.blank('/v2/volumes/detail')
res_dict = self.controller.detail(req)
expected = {
'volumes': [
{
'status': 'fakestatus',
'description': 'displaydesc',
'availability_zone': 'fakeaz',
'name': 'displayname',
'attachments': [
{
'device': '/',
'server_id': 'fakeuuid',
'host_name': None,
'id': '1',
'volume_id': '1'
}
],
'user_id': 'fakeuser',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1, 1, 1, 1),
'size': 1,
'links': [
{
'href': 'http://localhost/v1/fake/volumes/1',
'rel': 'self'
},
{
'href': 'http://localhost/fake/volumes/1',
'rel': 'bookmark'
}
],
}
]
}
self.assertEqual(res_dict, expected)
def test_volume_index_with_marker(self):
def stub_volume_get_all_by_project(context, project_id, marker, limit,
sort_key, sort_dir):
return [
stubs.stub_volume(1, display_name='vol1'),
stubs.stub_volume(2, display_name='vol2'),
]
self.stubs.Set(db, 'volume_get_all_by_project',
stub_volume_get_all_by_project)
req = fakes.HTTPRequest.blank('/v2/volumes?marker=1')
res_dict = self.controller.index(req)
volumes = res_dict['volumes']
self.assertEquals(len(volumes), 2)
self.assertEquals(volumes[0]['id'], 1)
self.assertEquals(volumes[1]['id'], 2)
def test_volume_index_limit(self):
req = fakes.HTTPRequest.blank('/v2/volumes?limit=1')
res_dict = self.controller.index(req)
volumes = res_dict['volumes']
self.assertEquals(len(volumes), 1)
def test_volume_index_limit_negative(self):
req = fakes.HTTPRequest.blank('/v2/volumes?limit=-1')
self.assertRaises(exception.Invalid,
self.controller.index,
req)
def test_volume_index_limit_non_int(self):
req = fakes.HTTPRequest.blank('/v2/volumes?limit=a')
self.assertRaises(exception.Invalid,
self.controller.index,
req)
def test_volume_index_limit_marker(self):
req = fakes.HTTPRequest.blank('/v2/volumes?marker=1&limit=1')
res_dict = self.controller.index(req)
volumes = res_dict['volumes']
self.assertEquals(len(volumes), 1)
self.assertEquals(volumes[0]['id'], '1')
def test_volume_index_limit_offset(self):
def stub_volume_get_all_by_project(context, project_id, marker, limit,
sort_key, sort_dir):
return [
stubs.stub_volume(1, display_name='vol1'),
stubs.stub_volume(2, display_name='vol2'),
]
self.stubs.Set(db, 'volume_get_all_by_project',
stub_volume_get_all_by_project)
req = fakes.HTTPRequest.blank('/v2/volumes?limit=2&offset=1')
res_dict = self.controller.index(req)
volumes = res_dict['volumes']
self.assertEquals(len(volumes), 1)
self.assertEquals(volumes[0]['id'], 2)
req = fakes.HTTPRequest.blank('/v2/volumes?limit=-1&offset=1')
self.assertRaises(exception.InvalidInput,
self.controller.index,
req)
req = fakes.HTTPRequest.blank('/v2/volumes?limit=a&offset=1')
self.assertRaises(exception.InvalidInput,
self.controller.index,
req)
def test_volume_detail_with_marker(self):
def stub_volume_get_all_by_project(context, project_id, marker, limit,
sort_key, sort_dir):
return [
stubs.stub_volume(1, display_name='vol1'),
stubs.stub_volume(2, display_name='vol2'),
]
self.stubs.Set(db, 'volume_get_all_by_project',
stub_volume_get_all_by_project)
req = fakes.HTTPRequest.blank('/v2/volumes/detail?marker=1')
res_dict = self.controller.index(req)
volumes = res_dict['volumes']
self.assertEquals(len(volumes), 2)
self.assertEquals(volumes[0]['id'], 1)
self.assertEquals(volumes[1]['id'], 2)
def test_volume_detail_limit(self):
req = fakes.HTTPRequest.blank('/v2/volumes/detail?limit=1')
res_dict = self.controller.index(req)
volumes = res_dict['volumes']
self.assertEquals(len(volumes), 1)
def test_volume_detail_limit_negative(self):
req = fakes.HTTPRequest.blank('/v2/volumes/detail?limit=-1')
self.assertRaises(exception.Invalid,
self.controller.index,
req)
def test_volume_detail_limit_non_int(self):
req = fakes.HTTPRequest.blank('/v2/volumes/detail?limit=a')
self.assertRaises(exception.Invalid,
self.controller.index,
req)
def test_volume_detail_limit_marker(self):
req = fakes.HTTPRequest.blank('/v2/volumes/detail?marker=1&limit=1')
res_dict = self.controller.index(req)
volumes = res_dict['volumes']
self.assertEquals(len(volumes), 1)
self.assertEquals(volumes[0]['id'], '1')
def test_volume_detail_limit_offset(self):
def stub_volume_get_all_by_project(context, project_id, marker, limit,
sort_key, sort_dir):
return [
stubs.stub_volume(1, display_name='vol1'),
stubs.stub_volume(2, display_name='vol2'),
]
self.stubs.Set(db, 'volume_get_all_by_project',
stub_volume_get_all_by_project)
req = fakes.HTTPRequest.blank('/v2/volumes/detail?limit=2&offset=1')
res_dict = self.controller.index(req)
volumes = res_dict['volumes']
self.assertEquals(len(volumes), 1)
self.assertEquals(volumes[0]['id'], 2)
req = fakes.HTTPRequest.blank('/v2/volumes/detail?limit=-1&offset=1')
self.assertRaises(exception.InvalidInput,
self.controller.index,
req)
req = fakes.HTTPRequest.blank('/v2/volumes/detail?limit=a&offset=1')
self.assertRaises(exception.InvalidInput,
self.controller.index,
req)
def test_volume_list_by_name(self):
def stub_volume_get_all_by_project(context, project_id, marker, limit,
sort_key, sort_dir):
return [
stubs.stub_volume(1, display_name='vol1'),
stubs.stub_volume(2, display_name='vol2'),
stubs.stub_volume(3, display_name='vol3'),
]
self.stubs.Set(db, 'volume_get_all_by_project',
stub_volume_get_all_by_project)
# no name filter
req = fakes.HTTPRequest.blank('/v2/volumes')
resp = self.controller.index(req)
self.assertEqual(len(resp['volumes']), 3)
# filter on name
req = fakes.HTTPRequest.blank('/v2/volumes?name=vol2')
resp = self.controller.index(req)
self.assertEqual(len(resp['volumes']), 1)
self.assertEqual(resp['volumes'][0]['name'], 'vol2')
# filter no match
req = fakes.HTTPRequest.blank('/v2/volumes?name=vol4')
resp = self.controller.index(req)
self.assertEqual(len(resp['volumes']), 0)
def test_volume_list_by_metadata(self):
def stub_volume_get_all_by_project(context, project_id, marker, limit,
sort_key, sort_dir):
return [
stubs.stub_volume(1, display_name='vol1',
status='available',
volume_metadata=[{'key': 'key1',
'value': 'value1'}]),
stubs.stub_volume(2, display_name='vol2',
status='available',
volume_metadata=[{'key': 'key1',
'value': 'value2'}]),
stubs.stub_volume(3, display_name='vol3',
status='in-use',
volume_metadata=[{'key': 'key1',
'value': 'value2'}]),
]
self.stubs.Set(db, 'volume_get_all_by_project',
stub_volume_get_all_by_project)
# no metadata filter
req = fakes.HTTPRequest.blank('/v2/volumes', use_admin_context=True)
resp = self.controller.detail(req)
self.assertEqual(len(resp['volumes']), 3)
# single match
qparams = urllib.urlencode({'metadata': {'key1': 'value1'}})
req = fakes.HTTPRequest.blank('/v2/volumes?%s' % qparams,
use_admin_context=True)
resp = self.controller.detail(req)
self.assertEqual(len(resp['volumes']), 1)
self.assertEqual(resp['volumes'][0]['name'], 'vol1')
self.assertEqual(resp['volumes'][0]['metadata']['key1'], 'value1')
# multiple matches
qparams = urllib.urlencode({'metadata': {'key1': 'value2'}})
req = fakes.HTTPRequest.blank('/v2/volumes?%s' % qparams,
use_admin_context=True)
resp = self.controller.detail(req)
self.assertEqual(len(resp['volumes']), 2)
for volume in resp['volumes']:
self.assertEqual(volume['metadata']['key1'], 'value2')
# multiple filters
qparams = urllib.urlencode({'metadata': {'key1': 'value2'}})
req = fakes.HTTPRequest.blank('/v2/volumes?status=in-use&%s' % qparams,
use_admin_context=True)
resp = self.controller.detail(req)
self.assertEqual(len(resp['volumes']), 1)
self.assertEqual(resp['volumes'][0]['name'], 'vol3')
# no match
qparams = urllib.urlencode({'metadata': {'key1': 'value3'}})
req = fakes.HTTPRequest.blank('/v2/volumes?%s' % qparams,
use_admin_context=True)
resp = self.controller.detail(req)
self.assertEqual(len(resp['volumes']), 0)
def test_volume_list_by_status(self):
def stub_volume_get_all_by_project(context, project_id, marker, limit,
sort_key, sort_dir):
return [
stubs.stub_volume(1, display_name='vol1', status='available'),
stubs.stub_volume(2, display_name='vol2', status='available'),
stubs.stub_volume(3, display_name='vol3', status='in-use'),
]
self.stubs.Set(db, 'volume_get_all_by_project',
stub_volume_get_all_by_project)
# no status filter
req = fakes.HTTPRequest.blank('/v2/volumes/details')
resp = self.controller.detail(req)
self.assertEqual(len(resp['volumes']), 3)
# single match
req = fakes.HTTPRequest.blank('/v2/volumes/details?status=in-use')
resp = self.controller.detail(req)
self.assertEqual(len(resp['volumes']), 1)
self.assertEqual(resp['volumes'][0]['status'], 'in-use')
# multiple match
req = fakes.HTTPRequest.blank('/v2/volumes/details/?status=available')
resp = self.controller.detail(req)
self.assertEqual(len(resp['volumes']), 2)
for volume in resp['volumes']:
self.assertEqual(volume['status'], 'available')
# multiple filters
req = fakes.HTTPRequest.blank('/v2/volumes/details/?status=available&'
'name=vol1')
resp = self.controller.detail(req)
self.assertEqual(len(resp['volumes']), 1)
self.assertEqual(resp['volumes'][0]['name'], 'vol1')
self.assertEqual(resp['volumes'][0]['status'], 'available')
# no match
req = fakes.HTTPRequest.blank('/v2/volumes/details?status=in-use&'
'name=vol1')
resp = self.controller.detail(req)
self.assertEqual(len(resp['volumes']), 0)
def test_volume_show(self):
req = fakes.HTTPRequest.blank('/v2/volumes/1')
res_dict = self.controller.show(req, '1')
expected = {
'volume': {
'status': 'fakestatus',
'description': 'displaydesc',
'availability_zone': 'fakeaz',
'name': 'displayname',
'attachments': [
{
'device': '/',
'server_id': 'fakeuuid',
'host_name': None,
'id': '1',
'volume_id': '1'
}
],
'user_id': 'fakeuser',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1, 1, 1, 1),
'size': 1,
'links': [
{
'href': 'http://localhost/v1/fake/volumes/1',
'rel': 'self'
},
{
'href': 'http://localhost/fake/volumes/1',
'rel': 'bookmark'
}
],
}
}
self.assertEqual(res_dict, expected)
def test_volume_show_no_attachments(self):
def stub_volume_get(self, context, volume_id):
return stubs.stub_volume(volume_id, attach_status='detached')
self.stubs.Set(volume_api.API, 'get', stub_volume_get)
req = fakes.HTTPRequest.blank('/v2/volumes/1')
res_dict = self.controller.show(req, '1')
expected = {
'volume': {
'status': 'fakestatus',
'description': 'displaydesc',
'availability_zone': 'fakeaz',
'name': 'displayname',
'attachments': [],
'user_id': 'fakeuser',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1, 1, 1, 1),
'size': 1,
'links': [
{
'href': 'http://localhost/v1/fake/volumes/1',
'rel': 'self'
},
{
'href': 'http://localhost/fake/volumes/1',
'rel': 'bookmark'
}
],
}
}
self.assertEqual(res_dict, expected)
def test_volume_show_no_volume(self):
self.stubs.Set(volume_api.API, "get", stubs.stub_volume_get_notfound)
req = fakes.HTTPRequest.blank('/v2/volumes/1')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, 1)
def test_volume_delete(self):
req = fakes.HTTPRequest.blank('/v2/volumes/1')
resp = self.controller.delete(req, 1)
self.assertEqual(resp.status_int, 202)
def test_volume_delete_attached(self):
def stub_volume_attached(self, context, volume, force=False):
raise exception.VolumeAttached(volume_id=volume['id'])
self.stubs.Set(volume_api.API, "delete", stub_volume_attached)
req = fakes.HTTPRequest.blank('/v2/volumes/1')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.delete,
req, 1)
def test_volume_delete_no_volume(self):
self.stubs.Set(volume_api.API, "get", stubs.stub_volume_get_notfound)
req = fakes.HTTPRequest.blank('/v2/volumes/1')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, 1)
def test_admin_list_volumes_limited_to_project(self):
req = fakes.HTTPRequest.blank('/v2/fake/volumes',
use_admin_context=True)
res = self.controller.index(req)
self.assertTrue('volumes' in res)
self.assertEqual(1, len(res['volumes']))
def test_admin_list_volumes_all_tenants(self):
req = fakes.HTTPRequest.blank('/v2/fake/volumes?all_tenants=1',
use_admin_context=True)
res = self.controller.index(req)
self.assertTrue('volumes' in res)
self.assertEqual(3, len(res['volumes']))
def test_all_tenants_non_admin_gets_all_tenants(self):
req = fakes.HTTPRequest.blank('/v2/fake/volumes?all_tenants=1')
res = self.controller.index(req)
self.assertTrue('volumes' in res)
self.assertEqual(1, len(res['volumes']))
def test_non_admin_get_by_project(self):
req = fakes.HTTPRequest.blank('/v2/fake/volumes')
res = self.controller.index(req)
self.assertTrue('volumes' in res)
self.assertEqual(1, len(res['volumes']))
def _create_volume_bad_request(self, body):
req = fakes.HTTPRequest.blank('/v2/fake/volumes')
req.method = 'POST'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_no_body(self):
self._create_volume_bad_request(body=None)
def test_create_missing_volume(self):
body = {'foo': {'a': 'b'}}
self._create_volume_bad_request(body=body)
def test_create_malformed_entity(self):
body = {'volume': 'string'}
self._create_volume_bad_request(body=body)
class VolumeSerializerTest(test.TestCase):
def _verify_volume_attachment(self, attach, tree):
for attr in ('id', 'volume_id', 'server_id', 'device'):
self.assertEqual(str(attach[attr]), tree.get(attr))
def _verify_volume(self, vol, tree):
self.assertEqual(tree.tag, NS + 'volume')
for attr in ('id', 'status', 'size', 'availability_zone', 'created_at',
'name', 'description', 'volume_type',
'snapshot_id', 'source_volid'):
self.assertEqual(str(vol[attr]), tree.get(attr))
for child in tree:
print child.tag
self.assertTrue(child.tag in (NS + 'attachments', NS + 'metadata'))
if child.tag == 'attachments':
self.assertEqual(1, len(child))
self.assertEqual('attachment', child[0].tag)
self._verify_volume_attachment(vol['attachments'][0], child[0])
elif child.tag == 'metadata':
not_seen = set(vol['metadata'].keys())
for gr_child in child:
self.assertTrue(gr_child.get("key") in not_seen)
self.assertEqual(str(vol['metadata'][gr_child.get("key")]),
gr_child.text)
not_seen.remove(gr_child.get('key'))
self.assertEqual(0, len(not_seen))
def test_volume_show_create_serializer(self):
serializer = volumes.VolumeTemplate()
raw_volume = dict(
id='vol_id',
status='vol_status',
size=1024,
availability_zone='vol_availability',
created_at=datetime.datetime.now(),
attachments=[
dict(
id='vol_id',
volume_id='vol_id',
server_id='instance_uuid',
device='/foo'
)
],
name='vol_name',
description='vol_desc',
volume_type='vol_type',
snapshot_id='snap_id',
source_volid='source_volid',
metadata=dict(
foo='bar',
baz='quux',
),
)
text = serializer.serialize(dict(volume=raw_volume))
print text
tree = etree.fromstring(text)
self._verify_volume(raw_volume, tree)
def test_volume_index_detail_serializer(self):
serializer = volumes.VolumesTemplate()
raw_volumes = [
dict(
id='vol1_id',
status='vol1_status',
size=1024,
availability_zone='vol1_availability',
created_at=datetime.datetime.now(),
attachments=[
dict(
id='vol1_id',
volume_id='vol1_id',
server_id='instance_uuid',
device='/foo1'
)
],
name='vol1_name',
description='vol1_desc',
volume_type='vol1_type',
snapshot_id='snap1_id',
source_volid=None,
metadata=dict(foo='vol1_foo',
bar='vol1_bar', ), ),
dict(
id='vol2_id',
status='vol2_status',
size=1024,
availability_zone='vol2_availability',
created_at=datetime.datetime.now(),
attachments=[dict(id='vol2_id',
volume_id='vol2_id',
server_id='instance_uuid',
device='/foo2')],
name='vol2_name',
description='vol2_desc',
volume_type='vol2_type',
snapshot_id='snap2_id',
source_volid=None,
metadata=dict(foo='vol2_foo',
bar='vol2_bar', ), )]
text = serializer.serialize(dict(volumes=raw_volumes))
print text
tree = etree.fromstring(text)
self.assertEqual(NS + 'volumes', tree.tag)
self.assertEqual(len(raw_volumes), len(tree))
for idx, child in enumerate(tree):
self._verify_volume(raw_volumes[idx], child)
class TestVolumeCreateRequestXMLDeserializer(test.TestCase):
def setUp(self):
super(TestVolumeCreateRequestXMLDeserializer, self).setUp()
self.deserializer = volumes.CreateDeserializer()
def test_minimal_volume(self):
self_request = """
<volume xmlns="http://docs.openstack.org/api/openstack-volume/2.0/content"
size="1"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
},
}
self.assertEquals(request['body'], expected)
def test_name(self):
self_request = """
<volume xmlns="http://docs.openstack.org/api/openstack-volume/2.0/content"
size="1"
name="Volume-xml"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"name": "Volume-xml",
},
}
self.assertEquals(request['body'], expected)
def test_description(self):
self_request = """
<volume xmlns="http://docs.openstack.org/api/openstack-volume/2.0/content"
size="1"
name="Volume-xml"
description="description"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"name": "Volume-xml",
"description": "description",
},
}
self.assertEquals(request['body'], expected)
def test_volume_type(self):
self_request = """
<volume xmlns="http://docs.openstack.org/api/openstack-volume/2.0/content"
size="1"
name="Volume-xml"
description="description"
volume_type="289da7f8-6440-407c-9fb4-7db01ec49164"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"name": "Volume-xml",
"size": "1",
"name": "Volume-xml",
"description": "description",
"volume_type": "289da7f8-6440-407c-9fb4-7db01ec49164",
},
}
self.assertEquals(request['body'], expected)
def test_availability_zone(self):
self_request = """
<volume xmlns="http://docs.openstack.org/api/openstack-volume/2.0/content"
size="1"
name="Volume-xml"
description="description"
volume_type="289da7f8-6440-407c-9fb4-7db01ec49164"
availability_zone="us-east1"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"name": "Volume-xml",
"description": "description",
"volume_type": "289da7f8-6440-407c-9fb4-7db01ec49164",
"availability_zone": "us-east1",
},
}
self.assertEquals(request['body'], expected)
def test_metadata(self):
self_request = """
<volume xmlns="http://docs.openstack.org/api/openstack-volume/2.0/content"
name="Volume-xml"
size="1">
<metadata><meta key="Type">work</meta></metadata></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"name": "Volume-xml",
"size": "1",
"metadata": {
"Type": "work",
},
},
}
self.assertEquals(request['body'], expected)
def test_full_volume(self):
self_request = """
<volume xmlns="http://docs.openstack.org/api/openstack-volume/2.0/content"
size="1"
name="Volume-xml"
description="description"
volume_type="289da7f8-6440-407c-9fb4-7db01ec49164"
availability_zone="us-east1">
<metadata><meta key="Type">work</meta></metadata></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"name": "Volume-xml",
"description": "description",
"volume_type": "289da7f8-6440-407c-9fb4-7db01ec49164",
"availability_zone": "us-east1",
"metadata": {
"Type": "work",
},
},
}
self.assertEquals(request['body'], expected)
def test_imageref(self):
self_request = """
<volume xmlns="http://docs.openstack.org/api/openstack-volume/2.0/content"
size="1"
name="Volume-xml"
description="description"
imageRef="4a90189d-d702-4c7c-87fc-6608c554d737"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"name": "Volume-xml",
"description": "description",
"imageRef": "4a90189d-d702-4c7c-87fc-6608c554d737",
},
}
self.assertEquals(expected, request['body'])
def test_snapshot_id(self):
self_request = """
<volume xmlns="http://docs.openstack.org/api/openstack-volume/2.0/content"
size="1"
name="Volume-xml"
description="description"
snapshot_id="4a90189d-d702-4c7c-87fc-6608c554d737"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"name": "Volume-xml",
"description": "description",
"snapshot_id": "4a90189d-d702-4c7c-87fc-6608c554d737",
},
}
self.assertEquals(expected, request['body'])
def test_source_volid(self):
self_request = """
<volume xmlns="http://docs.openstack.org/api/openstack-volume/2.0/content"
size="1"
name="Volume-xml"
description="description"
source_volid="4a90189d-d702-4c7c-87fc-6608c554d737"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"name": "Volume-xml",
"description": "description",
"source_volid": "4a90189d-d702-4c7c-87fc-6608c554d737",
},
}
self.assertEquals(expected, request['body'])
| maelnor/cinder | cinder/tests/api/v2/test_volumes.py | Python | apache-2.0 | 42,339 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for google3.third_party.robotics.ikfast.ikfast_util."""
from typing import List
import unittest
import numpy as np
from pyreach.ikfast import ikfast
class IkfastUtilTest(unittest.TestCase):
def test_ikfast_ur3e(self) -> None:
resolver = ikfast.IKFast("ur3e.urdf")
test_pose = [0.441, -0.069, 0.296, 3.07915249, 0.17924206, 0.07207276]
inverse = resolver.ik(test_pose)
expect: List[List[float]] = [[
0.15575925, -2.9682486, 0.93033262, -2.6334274, 1.5101384, -1.53257425
],
[
0.15575925, -2.10598112, -0.93033262,
-1.63502962, 1.5101384, -1.53257425
],
[
2.6942723, -1.0361406, 0.94204333,
-1.54489237, -1.59749533, -2.13531918
],
[
2.6942723, -0.16315868, -0.94204333,
-0.53378763, -1.59749533, -2.13531918
]]
assert inverse is not None
self.assertTrue(np.allclose(np.asarray(expect), inverse))
for idx in range(inverse.shape[0]):
forward = resolver.fk(inverse[idx, :])
assert forward is not None
self.assertTrue(np.allclose(forward, test_pose))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNone(joints)
test_pose = [0.441, -0.069, 0.296, 3.07915249, 0.17924206, 0.07207276]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
0.15575925, -2.10598112, -0.93033262, -1.63502962, 1.5101384,
-1.53257425
])))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
],
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNone(joints)
test_pose = [0.441, -0.069, 0.296, 3.07915249, 0.17924206, 0.07207276]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
0.15575925, -2.10598112, -0.93033262, -1.63502962, 1.5101384,
-1.53257425
])))
def test_ikfast_ur5(self) -> None:
resolver = ikfast.IKFast("ur5.urdf")
test_pose = [0.441, -0.069, 0.296, 3.07915249, 0.17924206, 0.07207276]
inverse = resolver.ik(test_pose)
expect: List[List[float]] = [[
0.10294705189091624,
2.9057720806031386,
2.011619009816082,
2.9807817949616644,
1.512389014027443,
-1.5854321455669282,
],
[
0.10294705189094878,
2.9413041271216556,
1.4974508722646775,
0.31782523240476457,
-1.5123890140274414,
1.5561605080229008,
],
[
0.1029470518910393,
-1.9188698403478268,
-1.4974508722646795,
1.8897156372240311,
-1.5123890140274374,
1.5561605080229914,
],
[
2.745076118166182,
-1.6472653950904435,
2.0153720343702615,
-2.005655425845945,
-1.6009213486381686,
-2.084610518971255,
],
[
0.10294705188992255,
-1.4920570522272005,
-2.0116190098160973,
-1.1645216669349754,
1.512389014027483,
-1.585432145567923,
],
[
2.745076118166243,
-1.2243445112144193,
1.494050072651985,
1.2343383055860577,
1.6009213486381657,
1.0569821346186032,
],
[
2.7450761181663506,
0.19551923227662255,
-1.494050072651981,
2.8025747073989677,
1.6009213486381728,
1.0569821346187105,
],
[
2.745076118166387,
0.24131969412969428,
-2.015372034370251,
0.13650355367431574,
-1.6009213486381753,
-2.0846105189710498,
]]
assert inverse is not None
self.assertTrue(np.allclose(np.asarray(expect), inverse))
for idx in range(inverse.shape[0]):
forward = resolver.fk(inverse[idx, :])
assert forward is not None
self.assertTrue(np.allclose(forward, test_pose))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.75910822067493, -2.146299888647414, -1.6943876663147872,
-0.8787322918414351, 1.655420395887572, 0.18456659620035673
])))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
],
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.75910822067493, -2.146299888647414, -1.6943876663147872,
-0.8787322918414351, 1.655420395887572, 0.18456659620035673
])))
def test_ikfast_ur5e(self) -> None:
resolver = ikfast.IKFast("ur5e.urdf")
test_pose = [0.441, -0.069, 0.296, 3.07915249, 0.17924206, 0.07207276]
inverse = resolver.ik(test_pose)
expect: List[List[float]] = [
[
0.16405055986938188, 2.765791124520724, 2.1403950045986257,
2.98833657267692, 1.509800259026823, -1.524274490132905
],
[
0.16405055986941708, 2.812858484030981, 1.5952639230344068,
0.344807641141076, -1.5098002590268216, 1.6173181634569267
],
[
0.16405055986882972, -1.9578001796958513, -1.595263923034398,
2.0228088437571556, -1.5098002590268453, 1.6173181634563387
],
[
2.687641187651362, -1.6129194695445, 2.1445565902551675,
-2.1708019359199584, -1.5970429060996174, -2.141937160919743
],
[
0.1640505598697007, -1.5243944486163512, -2.1403950045986226,
-1.0070584593479304, 1.5098002590268065, -1.5242744901325862
],
[
2.6876411876547217, -1.1867158554146933, 1.5917631540339052,
1.0973805397612704, 1.5970429060998401, 0.9996554926734069
],
[
2.687641187652034, 0.32259890325751445, -1.5917631540338664,
2.771592089156949, 1.5970429060996565, 0.9996554926707248
],
[
2.687641187650905, 0.3835133327531148, -2.1445565902551706,
0.12187844229276258, -1.5970429060995792, -2.1419371609201985
]
]
assert inverse is not None
self.assertTrue(np.allclose(np.asarray(expect), inverse))
for idx in range(inverse.shape[0]):
forward = resolver.fk(inverse[idx, :])
assert forward is not None
self.assertTrue(np.allclose(forward, test_pose))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.7965282406276215, -2.22330850771462, -1.7512629073471058,
-0.7416700537416318, 1.6556240465808028, 0.22212078814240713
])))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.7965282406276215, -2.22330850771462, -1.7512629073471058,
-0.7416700537416318, 1.6556240465808028, 0.22212078814240713
])))
def test_ikfast_xarm6(self) -> None:
resolver = ikfast.IKFast("XArm6.urdf")
test_pose = [0.441, -0.069, 0.296, 3.07915249, 0.17924206, 0.07207276]
inverse = resolver.ik(test_pose)
expect: List[List[float]] = [
[
3.0241143888332935, -2.4979339266159415, -0.869571351354945,
2.9728619851022766, 2.855946930066941, -0.3971085573226188
],
[
-0.13629036084730872, 2.1264475502162976, 1.6838950019183567,
-0.06968215873316722, 2.4148672492298697, -0.30601073944427765
],
[
2.985715758962136, -2.1663234855763496, -1.548893556849539,
-0.07641723765342481, -2.508911406333506, 2.806453208942962
],
[
-0.14948326717072866, 1.7945329513322539, 2.3663428366356047,
3.0898920675784742, -2.064377274088418, 2.849999303285056
],
[
2.9937505181857373, -0.37313097589941435, 1.6837958107043811,
3.0935780878199486, 1.2537071699447582, -0.2504600050544283
],
[
-0.1527109989579265, 0.0013844348686441164, -0.8672603110596464,
-0.06265836034117257, 0.8093362839222564, -0.22704470871038002
],
[
2.997243605175099, -0.8536303497795151, 2.3663553966125934,
-0.046124307367660294, -1.4557065980813688, 2.8849426596237975
],
[
-0.14028908411558633, 0.4818663374284742, -1.549236376478318,
3.0872423121973194, -1.011081532800383, 2.91257730170467
]
]
assert inverse is not None
self.assertTrue(np.allclose(np.asarray(expect), inverse))
for idx in range(inverse.shape[0]):
forward = resolver.fk(inverse[idx, :])
assert forward is not None
self.assertTrue(np.allclose(forward, test_pose))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.5937576700857008, 0.935506050392982, -1.7341148539344895,
0.11240818685999415, 0.8226610103120375, 1.5128080302488023
])))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
-1.563445952223937, -2.412728605995753, -1.733865098759504,
-3.0431719068191505, 2.1565343828146206, 1.6283201194487749
])))
def test_ikfast_lrmate200ic(self) -> None:
resolver = ikfast.IKFast("lrmate200ic.urdf")
test_pose = [0.441, -0.069, 0.296, 3.07915249, 0.17924206, 0.07207276]
inverse = resolver.ik(test_pose)
expect: List[List[float]] = [[
-0.14702247913212219, 0.16034728889163374, 0.17485384923751324,
-3.095884823022829, 1.528085412362189, -2.8789233615489165
],
[
-0.14702247913212219, 0.16034728889163374,
0.17485384923751324, 0.04570783056696115,
-1.528085412362189, 0.2626692920408731
],
[
-0.14702247913212219, 1.3861858074105342,
2.5062996297985927, -3.047913002542534,
2.631778418156988, -2.795146542046531
],
[
-0.14702247913212219, 1.3861858074105342,
2.5062996297985927, 0.09367965104725506,
-2.631778418156988, 0.34644611154325844
]]
assert inverse is not None
self.assertTrue(np.allclose(np.asarray(expect), inverse))
for idx in range(inverse.shape[0]):
forward = resolver.fk(inverse[idx, :])
assert forward is not None
self.assertTrue(np.allclose(forward, test_pose))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.586832262092123, 0.8836393164616538, 0.6534707816860157,
-3.2255700376773384, 1.362807584317037, 1.576470205253172
])))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.586832262092123, -4.6798123690092215, -4.255502609829496,
-3.2326082653650454, 2.014960035728337, 1.519892449681402
])))
def test_ikfast_lrmate200id(self) -> None:
resolver = ikfast.IKFast("lrmate200id.urdf")
test_pose = [0.441, -0.069, 0.296, 3.07915249, 0.17924206, 0.07207276]
inverse = resolver.ik(test_pose)
expect: List[List[float]] = [
[
2.994570174457678, -1.3037946958950393, 0.7039292786508708,
0.09604786551728516, 2.6454714418591063, -2.7924436456339485
],
[
2.994570174457678, -1.3037946958950393, 0.7039292786508708,
-3.0455447880725153, -2.6454714418591063, 0.34914900795584103
],
[
2.994570174457678, -0.5328206500292163, 2.22946349871972,
0.04813734065101196, 1.892494928514801, -2.8617397938279296
],
[
2.994570174457678, -0.5328206500292163, 2.22946349871972,
-3.0934553129387883, -1.892494928514801, 0.2798528597618599
],
[
-0.14702247913212219, 0.17928706929581928, 0.22741215153833055,
-3.095924632692143, 1.5616688595405879, -2.8773874915996522
],
[
-0.14702247913212219, 0.17928706929581928, 0.22741215153833055,
0.04566802089764721, -1.5616688595405879, 0.2642051619901373
],
[
-0.14702247913212219, 1.4331704093685016, 2.7059806258322605,
-3.010958298998935, 2.7835449163035935, -2.754535197365122
],
[
-0.14702247913212219, 1.4331704093685016, 2.7059806258322605,
0.13063435459085504, -2.7835449163035935, 0.38705745622466736
]
]
assert inverse is not None
self.assertTrue(np.allclose(np.asarray(expect), inverse))
for idx in range(inverse.shape[0]):
forward = resolver.fk(inverse[idx, :])
assert forward is not None
self.assertTrue(np.allclose(forward, test_pose))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.586832262092123, 0.8583089346799717, 0.678704268127883,
-3.224789478317228, 1.4131949941018875, 1.5721775665784599
])))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.586832262092123, -4.628376152869551, -4.028496797936878,
-3.242526500173448, 2.189727907399642, 1.5003998827683611
])))
def test_ikfast_fanuccr7ia(self) -> None:
resolver = ikfast.IKFast("FanucCR7ia.urdf")
test_pose = [0.441, -0.069, 0.296, 3.07915249, 0.17924206, 0.07207276]
inverse = resolver.ik(test_pose)
expect: List[List[float]] = [
[
2.994570174457678, -1.3037946958950393, 0.7039292786508708,
0.09604786551728516, 2.6454714418591063, -2.7924436456339485
],
[
2.994570174457678, -1.3037946958950393, 0.7039292786508708,
-3.0455447880725153, -2.6454714418591063, 0.34914900795584103
],
[
2.994570174457678, -0.5328206500292163, 2.22946349871972,
0.04813734065101196, 1.892494928514801, -2.8617397938279296
],
[
2.994570174457678, -0.5328206500292163, 2.22946349871972,
-3.0934553129387883, -1.892494928514801, 0.2798528597618599
],
[
-0.14702247913212219, 0.17928706929581928, 0.22741215153833055,
-3.095924632692143, 1.5616688595405879, -2.8773874915996522
],
[
-0.14702247913212219, 0.17928706929581928, 0.22741215153833055,
0.04566802089764721, -1.5616688595405879, 0.2642051619901373
],
[
-0.14702247913212219, 1.4331704093685016, 2.7059806258322605,
-3.010958298998935, 2.7835449163035935, -2.754535197365122
],
[
-0.14702247913212219, 1.4331704093685016, 2.7059806258322605,
0.13063435459085504, -2.7835449163035935, 0.38705745622466736
]
]
assert inverse is not None
self.assertTrue(np.allclose(np.asarray(expect), inverse))
for idx in range(inverse.shape[0]):
forward = resolver.fk(inverse[idx, :])
assert forward is not None
self.assertTrue(np.allclose(forward, test_pose))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.586832262092123, 0.8583089346799717, 0.678704268127883,
-3.224789478317228, 1.4131949941018875, 1.5721775665784599
])))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.586832262092123, -4.628376152869551, -4.028496797936878,
-3.242526500173448, 2.189727907399642, 1.5003998827683611
])))
def test_ikfast_fcr7ia(self) -> None:
resolver = ikfast.IKFast("FanucLrmate200id7l.urdf")
test_pose = [0.441, -0.069, 0.296, 3.07915249, 0.17924206, 0.07207276]
inverse = resolver.ik(test_pose)
expect: List[List[float]] = [
[
2.994570174457678, -1.6669017442036627, -0.051227634780463695,
0.42134264859753107, 3.0297414721949014, -2.457962325116913
],
[
2.994570174457678, -1.6669017442036627, -0.051227634780463695,
-2.7202500049922693, -3.0297414721949014, 0.6836303284728764
],
[
2.994570174457678, -0.16971360172061356, 3.0265378245933725,
0.04595322756880707, 1.4589926469845675, -2.882100983937371
],
[
2.994570174457678, -0.16971360172061356, 3.0265378245933725,
-3.095639426020993, -1.4589926469845675, 0.25949166965241854
],
[
-0.14702247913212219, -0.05654430315647474, -0.29095654371623825,
-3.093915700981907, 1.279434998289845, -2.8906754142854263
],
[
-0.14702247913212219, -0.05654430315647474, -0.29095654371623825,
0.047676952607882406, -1.279434998289845, 0.25091723930436327
],
[
-0.14702247913212219, 1.6690017818207956, -3.0169185736504427,
-2.163681220897145, 3.0865202324865617, -1.8997622277531825
],
[
-0.14702247913212219, 1.6690017818207956, -3.0169185736504427,
0.977911432692645, -3.0865202324865617, 1.241830425836607
]
]
assert inverse is not None
self.assertTrue(np.allclose(np.asarray(expect), inverse))
for idx in range(inverse.shape[0]):
forward = resolver.fk(inverse[idx, :])
assert forward is not None
self.assertTrue(np.allclose(forward, test_pose))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
-1.554760391497667, -1.8843984866224632, 0.28949219178727287,
-0.14878476484202274, 2.5548526994125202, 1.4349097267086428
])))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
4.728424915681919, -1.8843984866224632, 0.28949219178727287,
-0.14878476484202274, 2.5548526994125202, 1.4349097267086428
])))
def test_ikfast_ur10e(self) -> None:
resolver = ikfast.IKFast("ur10e.urdf")
test_pose = [0.441, -0.069, 0.296, 3.07915249, 0.17924206, 0.07207276]
inverse = resolver.ik(test_pose)
expect: List[List[float]] = [
[
0.26371038014920006, 2.5632127944885656, 2.130956602388829,
0.052486851447946986, -1.5060703338492818, 1.7171054720912662
],
[
0.26371038014919684, 2.6671374428526033, 2.5208619955436653,
2.7002494635189085, 1.506070333849282, -1.4244871814985336
],
[
2.5915207323822638, -1.8177146660264059, 2.52537384343933,
-2.3490226930855793, -1.5903658741686955, -2.237850788262641
],
[
0.26371038014483766, -1.717080607512376, -2.1309566023887507,
2.311508151046817, -1.5060703338494312, 1.7171054720868972
],
[
0.26371038014703224, -1.3154916955612503, -2.520861995543703,
-0.8417680213390858, 1.5060703338493528, -1.4244871815007016
],
[
2.591520732378091, -1.4302510002096036, 2.1278717290007685,
0.8026084091259932, 1.5903658741683944, 0.9037418653229929
],
[
2.5915207323805673, 0.5700198052573013, -2.127871729000779,
3.0580810616608023, 1.590365874168569, 0.9037418653254639
],
[
2.591520732379995, 0.48564632416723474, -2.525373843439346,
0.39836400359934165, -1.5903658741685285, -2.237850788264904
]
]
assert inverse is not None
self.assertTrue(np.allclose(np.asarray(expect), inverse))
for idx in range(inverse.shape[0]):
forward = resolver.fk(inverse[idx, :])
assert forward is not None
self.assertTrue(np.allclose(forward, test_pose))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.8590061918522973, -1.984199896619137, -2.283022773368963,
-0.4437021074813956, 1.6556986786980046, 0.28482443103632815
])))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.8590061918522973, -1.984199896619137, -2.283022773368963,
-0.4437021074813956, 1.6556986786980046, 0.28482443103632815
])))
def test_ikfast_fr2000ia165f(self) -> None:
resolver = ikfast.IKFast("FanucR2000ia165f.urdf")
test_pose = [0.441, -0.069, 0.296, 3.07915249, 0.17924206, 0.07207276]
inverse = resolver.ik(test_pose)
expect: List[List[float]] = [
[
3.008767265421178, -2.3950602485110015, -0.985900131132994,
2.7240509187664217, 3.0267767850771836, -0.1646653851240094
],
[
3.008767265421178, -2.3950602485110015, -0.985900131132994,
-0.41754173482337853, -3.0267767850771836, 2.97692726846578
],
[
3.008767265421178, 0.4534452530700097, -2.503699724327534,
0.047855671283256344, 1.3300054763716667, -2.9025799159024483
],
[
3.008767265421178, 0.4534452530700097, -2.503699724327534,
-3.093736982306544, -1.3300054763716667, 0.23901273768734121
],
[
-0.13282538816862122, -1.566641780552722, -1.340762401419142,
-3.094445791261965, 1.739882576772183, -2.8832198442544774
],
[
-0.13282538816862122, -1.566641780552722, -1.340762401419142,
0.047146862327824834, -1.739882576772183, 0.25837280933531215
],
[
-0.13282538816862122, 2.0362517998715015, -2.148837454041386,
-0.10219457500600466, 2.6687839851312125, 0.1593844153615318
],
[
-0.13282538816862122, 2.0362517998715015, -2.148837454041386,
3.039398078583785, -2.6687839851312125, -2.982208238228268
]
]
assert inverse is not None
self.assertTrue(np.allclose(np.asarray(expect), inverse))
for idx in range(inverse.shape[0]):
forward = resolver.fk(inverse[idx, :])
assert forward is not None
self.assertTrue(np.allclose(forward, test_pose))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.ik_search(test_pose, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.5694808119146213, -1.1130801326094413, -1.4029222239017034,
-3.2263601374152837, 1.3047586646041642, 1.5988320979125414
])))
test_pose = [
10000.0, 10000.0, 10000.0, 3.11639138331322, 0.005372085246919482,
-0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNone(joints)
test_pose = [
-0.01672919342733003, 0.6336892500729367, 0.1106076186519042,
3.11639138331322, 0.005372085246919482, -0.1276226213011794
]
current_joints = [
1.793768405914307, -2.222002168694967, -1.752705574035645,
-0.7431128782084961, 1.653548717498779, 0.2220320701599121
]
ik_hints = {
0: [
1.68749761581421, -1.98929464817047, -1.89353466033936,
-0.851135075092316, 1.6761908531189, 0.265036106109619
],
1: [
1.39810276031494, -2.39948201179504, -1.09250926971436,
-1.27997672557831, 1.65425539016724, -0.0296047367155552
]
}
joints = resolver.unity_ik_solve_search(test_pose, current_joints, ik_hints)
self.assertIsNotNone(joints)
assert joints is not None
self.assertTrue(
np.allclose(
joints,
np.asarray([
1.5694808119146213, -1.1130801326094413, -1.4029222239017034,
-3.2263601374152837, 1.3047586646041642, 1.5988320979125414
])))
if __name__ == "__main__":
unittest.main()
| google-research/pyreach | pyreach/ikfast/ikfast_test.py | Python | apache-2.0 | 52,224 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009-2010 Zuza Software Foundation
#
# This file is part of translate.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
# (c) 2009 Dominic König (dominic@nursix.org)
#
"""convert web2py translation dictionaries (.py) to GNU/gettext PO files"""
from translate.storage import po
class web2py2po:
def __init__(self, pofile=None):
self.mypofile = pofile
def convertunit(self, source_str, target_str):
pounit = po.pounit(encoding="UTF-8")
pounit.setsource(source_str)
if target_str:
pounit.settarget(target_str)
return pounit
def convertstore(self, mydict):
targetheader = self.mypofile.init_headers(charset="UTF-8", encoding="8bit")
targetheader.addnote("extracted from web2py", "developer")
for source_str in mydict.keys():
target_str = mydict[source_str]
if target_str == source_str:
# a convention with new (untranslated) web2py files
target_str = u''
elif target_str.startswith(u'*** '):
# an older convention
target_str = u''
pounit = self.convertunit(source_str, target_str)
self.mypofile.addunit(pounit)
return self.mypofile
def convertpy(inputfile, outputfile, encoding="UTF-8"):
new_pofile = po.pofile()
convertor = web2py2po(new_pofile)
mydict = eval(inputfile.read())
if not isinstance(mydict, dict):
return 0
outputstore = convertor.convertstore(mydict)
if outputstore.isempty():
return 0
outputfile.write(str(outputstore))
return 1
def main(argv=None):
from translate.convert import convert
formats = {("py", "po"): ("po", convertpy), ("py", None): ("po", convertpy)}
parser = convert.ConvertOptionParser(formats, usetemplates=False, description=__doc__)
parser.run(argv)
if __name__ == '__main__':
main()
| jagg81/translate-toolkit | translate/convert/web2py2po.py | Python | gpl-2.0 | 2,573 |
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.tests.api import base
class BaseRouterTest(base.BaseAdminNetworkTest):
# NOTE(salv-orlando): This class inherits from BaseAdminNetworkTest
# as some router operations, such as enabling or disabling SNAT
# require admin credentials by default
@classmethod
def resource_setup(cls):
super(BaseRouterTest, cls).resource_setup()
def _cleanup_router(self, router):
self.delete_router(router)
self.routers.remove(router)
def _create_router(self, name, admin_state_up=False,
external_network_id=None, enable_snat=None):
# associate a cleanup with created routers to avoid quota limits
router = self.create_router(name, admin_state_up,
external_network_id, enable_snat)
self.addCleanup(self._cleanup_router, router)
return router
def _delete_router(self, router_id, network_client=None):
client = network_client or self.client
client.delete_router(router_id)
# Asserting that the router is not found in the list
# after deletion
list_body = self.client.list_routers()
routers_list = list()
for router in list_body['routers']:
routers_list.append(router['id'])
self.assertNotIn(router_id, routers_list)
def _add_router_interface_with_subnet_id(self, router_id, subnet_id):
interface = self.client.add_router_interface_with_subnet_id(
router_id, subnet_id)
self.addCleanup(self._remove_router_interface_with_subnet_id,
router_id, subnet_id)
self.assertEqual(subnet_id, interface['subnet_id'])
return interface
def _remove_router_interface_with_subnet_id(self, router_id, subnet_id):
body = self.client.remove_router_interface_with_subnet_id(
router_id, subnet_id)
self.assertEqual(subnet_id, body['subnet_id'])
def _remove_router_interface_with_port_id(self, router_id, port_id):
body = self.client.remove_router_interface_with_port_id(router_id,
port_id)
self.assertEqual(port_id, body['port_id'])
| paninetworks/neutron | neutron/tests/api/base_routers.py | Python | apache-2.0 | 2,857 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from specparser import get_settings_from_file
import csv
def main():
settings = get_settings_from_file("spec.json")
print(settings)
# TODO: Add your code here
pivot = int(settings.Param.split_index)
foutput1 = open(settings.Output.output1,"wb")
foutput2 = open(settings.Output.output2,"wb")
output1_writer = csv.writer(foutput1,lineterminator='\n')
output2_writer = csv.writer(foutput2,lineterminator='\n')
with open(settings.Input.input_file,"r") as fin:
while 1:
line = fin.readline()
if not line:
break
columns = line.rstrip('\n').split(',')
output1_writer.writerow(columns[:pivot])
output2_writer.writerow(columns[pivot:])
foutput1.close()
foutput2.close()
fin.close()
print("Done")
if __name__ == "__main__":
main()
| dkuner/example-modules | modules/data_transmation/column_splitter/main.py | Python | bsd-3-clause | 943 |
# rhn_register.py - GUI front end code for firstboot screen resolution
#
# Copyright 2003 Red Hat, Inc.
# Copyright 2003 Brent Fox <bfox@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import gtk
import gobject
import sys
import os
import functions
import gnome, gnome.ui
from gtk import glade
from rhn_register_firstboot_gui_window import RhnRegisterFirstbootGuiWindow
sys.path.insert(0, "/usr/share/rhn/up2date_client/")
sys.path.insert(1,"/usr/share/rhn")
import rhnreg
import rhnregGui
import gettext
t = gettext.translation('rhn-client-tools', fallback=True)
_ = t.ugettext
gtk.glade.bindtextdomain("rhn-client-tools", "/usr/share/locale")
class RhnLoginWindow(RhnRegisterFirstbootGuiWindow, rhnregGui.LoginPage):
#You must specify a runPriority for the order in which you wish your module to run
runPriority = 108
moduleName = _("Red Hat Login")
windowTitle = moduleName
shortMessage = _("Register with Red Hat Network")
needsparent = 1
needsnetwork = 1
noSidebar = True
def __init__(self):
RhnRegisterFirstbootGuiWindow.__init__(self)
rhnregGui.LoginPage.__init__(self)
if rhnreg.registered():
self.skipme = True
def updatePage(self):
# self.getCaps()
self.loginPagePrepare()
self.goingNextFromNewAccountDialog = False
def grabFocus(self):
# We must set focus where we want it here. Setting it in updatePage
# doesn't work.
self.loginXml.get_widget("loginUserEntry").grab_focus()
def _getVbox(self):
return self.loginPageVbox()
def apply(self, *args):
"""Returns None to stay on the same page. Anything else will cause
firstboot to advance but True is generally used. This is different from
the gnome druid in rhn_register.
"""
if self.doDebug:
print "applying rhn_login_gui"
# This is a hack. This function will get called if they click next on
# the login page (the else) or when they create an account (the if). In
# that case we don't want to do the normal logging in stuff.
if self.goingNextFromNewAccountDialog:
assert rhnregGui.newAccount is True
else:
if self.loginPageVerify():
return None
assert rhnregGui.newAccount is False
if self.loginPageApply():
return None
# We should try to activate hardware, even if no EUS in firstboot
rhnregGui.try_to_activate_hardware()
self.parent.setPage('rhn_create_profile_gui')
return True
def goToPageAfterLogin(self):
# This is a hack. More info above.
self.goingNextFromNewAccountDialog = True
self.parent.nextClicked()
def onLoginPageNext(self, dummy=None, dummy2=None):
# This is a hackish way to support enter advancing
self.parent.nextClicked()
childWindow = RhnLoginWindow
| xkollar/spacewalk | client/debian/packages-already-in-debian/rhn-client-tools/src/firstboot-legacy-rhel5/rhn_login_gui.py | Python | gpl-2.0 | 3,554 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.