repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
ryfeus/lambda-packs
|
refs/heads/master
|
HDF4_H5_NETCDF/source2.7/numpy/core/tests/test_mem_overlap.py
|
14
|
from __future__ import division, absolute_import, print_function
import sys
import itertools
import pytest
import numpy as np
from numpy.core._multiarray_tests import solve_diophantine, internal_overlap
from numpy.core import _umath_tests
from numpy.lib.stride_tricks import as_strided
from numpy.compat import long
from numpy.testing import (
assert_, assert_raises, assert_equal, assert_array_equal, assert_allclose
)
if sys.version_info[0] >= 3:
xrange = range
ndims = 2
size = 10
shape = tuple([size] * ndims)
MAY_SHARE_BOUNDS = 0
MAY_SHARE_EXACT = -1
def _indices_for_nelems(nelems):
"""Returns slices of length nelems, from start onwards, in direction sign."""
if nelems == 0:
return [size // 2] # int index
res = []
for step in (1, 2):
for sign in (-1, 1):
start = size // 2 - nelems * step * sign // 2
stop = start + nelems * step * sign
res.append(slice(start, stop, step * sign))
return res
def _indices_for_axis():
"""Returns (src, dst) pairs of indices."""
res = []
for nelems in (0, 2, 3):
ind = _indices_for_nelems(nelems)
# no itertools.product available in Py2.4
res.extend([(a, b) for a in ind for b in ind]) # all assignments of size "nelems"
return res
def _indices(ndims):
"""Returns ((axis0_src, axis0_dst), (axis1_src, axis1_dst), ... ) index pairs."""
ind = _indices_for_axis()
# no itertools.product available in Py2.4
res = [[]]
for i in range(ndims):
newres = []
for elem in ind:
for others in res:
newres.append([elem] + others)
res = newres
return res
def _check_assignment(srcidx, dstidx):
"""Check assignment arr[dstidx] = arr[srcidx] works."""
arr = np.arange(np.product(shape)).reshape(shape)
cpy = arr.copy()
cpy[dstidx] = arr[srcidx]
arr[dstidx] = arr[srcidx]
assert_(np.all(arr == cpy),
'assigning arr[%s] = arr[%s]' % (dstidx, srcidx))
def test_overlapping_assignments():
# Test automatically generated assignments which overlap in memory.
inds = _indices(ndims)
for ind in inds:
srcidx = tuple([a[0] for a in ind])
dstidx = tuple([a[1] for a in ind])
_check_assignment(srcidx, dstidx)
@pytest.mark.slow
def test_diophantine_fuzz():
# Fuzz test the diophantine solver
rng = np.random.RandomState(1234)
max_int = np.iinfo(np.intp).max
for ndim in range(10):
feasible_count = 0
infeasible_count = 0
min_count = 500//(ndim + 1)
while min(feasible_count, infeasible_count) < min_count:
# Ensure big and small integer problems
A_max = 1 + rng.randint(0, 11, dtype=np.intp)**6
U_max = rng.randint(0, 11, dtype=np.intp)**6
A_max = min(max_int, A_max)
U_max = min(max_int-1, U_max)
A = tuple(int(rng.randint(1, A_max+1, dtype=np.intp))
for j in range(ndim))
U = tuple(int(rng.randint(0, U_max+2, dtype=np.intp))
for j in range(ndim))
b_ub = min(max_int-2, sum(a*ub for a, ub in zip(A, U)))
b = rng.randint(-1, b_ub+2, dtype=np.intp)
if ndim == 0 and feasible_count < min_count:
b = 0
X = solve_diophantine(A, U, b)
if X is None:
# Check the simplified decision problem agrees
X_simplified = solve_diophantine(A, U, b, simplify=1)
assert_(X_simplified is None, (A, U, b, X_simplified))
# Check no solution exists (provided the problem is
# small enough so that brute force checking doesn't
# take too long)
try:
ranges = tuple(xrange(0, a*ub+1, a) for a, ub in zip(A, U))
except OverflowError:
# xrange on 32-bit Python 2 may overflow
continue
size = 1
for r in ranges:
size *= len(r)
if size < 100000:
assert_(not any(sum(w) == b for w in itertools.product(*ranges)))
infeasible_count += 1
else:
# Check the simplified decision problem agrees
X_simplified = solve_diophantine(A, U, b, simplify=1)
assert_(X_simplified is not None, (A, U, b, X_simplified))
# Check validity
assert_(sum(a*x for a, x in zip(A, X)) == b)
assert_(all(0 <= x <= ub for x, ub in zip(X, U)))
feasible_count += 1
def test_diophantine_overflow():
# Smoke test integer overflow detection
max_intp = np.iinfo(np.intp).max
max_int64 = np.iinfo(np.int64).max
if max_int64 <= max_intp:
# Check that the algorithm works internally in 128-bit;
# solving this problem requires large intermediate numbers
A = (max_int64//2, max_int64//2 - 10)
U = (max_int64//2, max_int64//2 - 10)
b = 2*(max_int64//2) - 10
assert_equal(solve_diophantine(A, U, b), (1, 1))
def check_may_share_memory_exact(a, b):
got = np.may_share_memory(a, b, max_work=MAY_SHARE_EXACT)
assert_equal(np.may_share_memory(a, b),
np.may_share_memory(a, b, max_work=MAY_SHARE_BOUNDS))
a.fill(0)
b.fill(0)
a.fill(1)
exact = b.any()
err_msg = ""
if got != exact:
err_msg = " " + "\n ".join([
"base_a - base_b = %r" % (a.__array_interface__['data'][0] - b.__array_interface__['data'][0],),
"shape_a = %r" % (a.shape,),
"shape_b = %r" % (b.shape,),
"strides_a = %r" % (a.strides,),
"strides_b = %r" % (b.strides,),
"size_a = %r" % (a.size,),
"size_b = %r" % (b.size,)
])
assert_equal(got, exact, err_msg=err_msg)
def test_may_share_memory_manual():
# Manual test cases for may_share_memory
# Base arrays
xs0 = [
np.zeros([13, 21, 23, 22], dtype=np.int8),
np.zeros([13, 21, 23*2, 22], dtype=np.int8)[:,:,::2,:]
]
# Generate all negative stride combinations
xs = []
for x in xs0:
for ss in itertools.product(*(([slice(None), slice(None, None, -1)],)*4)):
xp = x[ss]
xs.append(xp)
for x in xs:
# The default is a simple extent check
assert_(np.may_share_memory(x[:,0,:], x[:,1,:]))
assert_(np.may_share_memory(x[:,0,:], x[:,1,:], max_work=None))
# Exact checks
check_may_share_memory_exact(x[:,0,:], x[:,1,:])
check_may_share_memory_exact(x[:,::7], x[:,3::3])
try:
xp = x.ravel()
if xp.flags.owndata:
continue
xp = xp.view(np.int16)
except ValueError:
continue
# 0-size arrays cannot overlap
check_may_share_memory_exact(x.ravel()[6:6],
xp.reshape(13, 21, 23, 11)[:,::7])
# Test itemsize is dealt with
check_may_share_memory_exact(x[:,::7],
xp.reshape(13, 21, 23, 11))
check_may_share_memory_exact(x[:,::7],
xp.reshape(13, 21, 23, 11)[:,3::3])
check_may_share_memory_exact(x.ravel()[6:7],
xp.reshape(13, 21, 23, 11)[:,::7])
# Check unit size
x = np.zeros([1], dtype=np.int8)
check_may_share_memory_exact(x, x)
check_may_share_memory_exact(x, x.copy())
def iter_random_view_pairs(x, same_steps=True, equal_size=False):
rng = np.random.RandomState(1234)
if equal_size and same_steps:
raise ValueError()
def random_slice(n, step):
start = rng.randint(0, n+1, dtype=np.intp)
stop = rng.randint(start, n+1, dtype=np.intp)
if rng.randint(0, 2, dtype=np.intp) == 0:
stop, start = start, stop
step *= -1
return slice(start, stop, step)
def random_slice_fixed_size(n, step, size):
start = rng.randint(0, n+1 - size*step)
stop = start + (size-1)*step + 1
if rng.randint(0, 2) == 0:
stop, start = start-1, stop-1
if stop < 0:
stop = None
step *= -1
return slice(start, stop, step)
# First a few regular views
yield x, x
for j in range(1, 7, 3):
yield x[j:], x[:-j]
yield x[...,j:], x[...,:-j]
# An array with zero stride internal overlap
strides = list(x.strides)
strides[0] = 0
xp = as_strided(x, shape=x.shape, strides=strides)
yield x, xp
yield xp, xp
# An array with non-zero stride internal overlap
strides = list(x.strides)
if strides[0] > 1:
strides[0] = 1
xp = as_strided(x, shape=x.shape, strides=strides)
yield x, xp
yield xp, xp
# Then discontiguous views
while True:
steps = tuple(rng.randint(1, 11, dtype=np.intp)
if rng.randint(0, 5, dtype=np.intp) == 0 else 1
for j in range(x.ndim))
s1 = tuple(random_slice(p, s) for p, s in zip(x.shape, steps))
t1 = np.arange(x.ndim)
rng.shuffle(t1)
if equal_size:
t2 = t1
else:
t2 = np.arange(x.ndim)
rng.shuffle(t2)
a = x[s1]
if equal_size:
if a.size == 0:
continue
steps2 = tuple(rng.randint(1, max(2, p//(1+pa)))
if rng.randint(0, 5) == 0 else 1
for p, s, pa in zip(x.shape, s1, a.shape))
s2 = tuple(random_slice_fixed_size(p, s, pa)
for p, s, pa in zip(x.shape, steps2, a.shape))
elif same_steps:
steps2 = steps
else:
steps2 = tuple(rng.randint(1, 11, dtype=np.intp)
if rng.randint(0, 5, dtype=np.intp) == 0 else 1
for j in range(x.ndim))
if not equal_size:
s2 = tuple(random_slice(p, s) for p, s in zip(x.shape, steps2))
a = a.transpose(t1)
b = x[s2].transpose(t2)
yield a, b
def check_may_share_memory_easy_fuzz(get_max_work, same_steps, min_count):
# Check that overlap problems with common strides are solved with
# little work.
x = np.zeros([17,34,71,97], dtype=np.int16)
feasible = 0
infeasible = 0
pair_iter = iter_random_view_pairs(x, same_steps)
while min(feasible, infeasible) < min_count:
a, b = next(pair_iter)
bounds_overlap = np.may_share_memory(a, b)
may_share_answer = np.may_share_memory(a, b)
easy_answer = np.may_share_memory(a, b, max_work=get_max_work(a, b))
exact_answer = np.may_share_memory(a, b, max_work=MAY_SHARE_EXACT)
if easy_answer != exact_answer:
# assert_equal is slow...
assert_equal(easy_answer, exact_answer)
if may_share_answer != bounds_overlap:
assert_equal(may_share_answer, bounds_overlap)
if bounds_overlap:
if exact_answer:
feasible += 1
else:
infeasible += 1
@pytest.mark.slow
def test_may_share_memory_easy_fuzz():
# Check that overlap problems with common strides are always
# solved with little work.
check_may_share_memory_easy_fuzz(get_max_work=lambda a, b: 1,
same_steps=True,
min_count=2000)
@pytest.mark.slow
def test_may_share_memory_harder_fuzz():
# Overlap problems with not necessarily common strides take more
# work.
#
# The work bound below can't be reduced much. Harder problems can
# also exist but not be detected here, as the set of problems
# comes from RNG.
check_may_share_memory_easy_fuzz(get_max_work=lambda a, b: max(a.size, b.size)//2,
same_steps=False,
min_count=2000)
def test_shares_memory_api():
x = np.zeros([4, 5, 6], dtype=np.int8)
assert_equal(np.shares_memory(x, x), True)
assert_equal(np.shares_memory(x, x.copy()), False)
a = x[:,::2,::3]
b = x[:,::3,::2]
assert_equal(np.shares_memory(a, b), True)
assert_equal(np.shares_memory(a, b, max_work=None), True)
assert_raises(np.TooHardError, np.shares_memory, a, b, max_work=1)
assert_raises(np.TooHardError, np.shares_memory, a, b, max_work=long(1))
def test_may_share_memory_bad_max_work():
x = np.zeros([1])
assert_raises(OverflowError, np.may_share_memory, x, x, max_work=10**100)
assert_raises(OverflowError, np.shares_memory, x, x, max_work=10**100)
def test_internal_overlap_diophantine():
def check(A, U, exists=None):
X = solve_diophantine(A, U, 0, require_ub_nontrivial=1)
if exists is None:
exists = (X is not None)
if X is not None:
assert_(sum(a*x for a, x in zip(A, X)) == sum(a*u//2 for a, u in zip(A, U)))
assert_(all(0 <= x <= u for x, u in zip(X, U)))
assert_(any(x != u//2 for x, u in zip(X, U)))
if exists:
assert_(X is not None, repr(X))
else:
assert_(X is None, repr(X))
# Smoke tests
check((3, 2), (2*2, 3*2), exists=True)
check((3*2, 2), (15*2, (3-1)*2), exists=False)
def test_internal_overlap_slices():
# Slicing an array never generates internal overlap
x = np.zeros([17,34,71,97], dtype=np.int16)
rng = np.random.RandomState(1234)
def random_slice(n, step):
start = rng.randint(0, n+1, dtype=np.intp)
stop = rng.randint(start, n+1, dtype=np.intp)
if rng.randint(0, 2, dtype=np.intp) == 0:
stop, start = start, stop
step *= -1
return slice(start, stop, step)
cases = 0
min_count = 5000
while cases < min_count:
steps = tuple(rng.randint(1, 11, dtype=np.intp)
if rng.randint(0, 5, dtype=np.intp) == 0 else 1
for j in range(x.ndim))
t1 = np.arange(x.ndim)
rng.shuffle(t1)
s1 = tuple(random_slice(p, s) for p, s in zip(x.shape, steps))
a = x[s1].transpose(t1)
assert_(not internal_overlap(a))
cases += 1
def check_internal_overlap(a, manual_expected=None):
got = internal_overlap(a)
# Brute-force check
m = set()
ranges = tuple(xrange(n) for n in a.shape)
for v in itertools.product(*ranges):
offset = sum(s*w for s, w in zip(a.strides, v))
if offset in m:
expected = True
break
else:
m.add(offset)
else:
expected = False
# Compare
if got != expected:
assert_equal(got, expected, err_msg=repr((a.strides, a.shape)))
if manual_expected is not None and expected != manual_expected:
assert_equal(expected, manual_expected)
return got
def test_internal_overlap_manual():
# Stride tricks can construct arrays with internal overlap
# We don't care about memory bounds, the array is not
# read/write accessed
x = np.arange(1).astype(np.int8)
# Check low-dimensional special cases
check_internal_overlap(x, False) # 1-dim
check_internal_overlap(x.reshape([]), False) # 0-dim
a = as_strided(x, strides=(3, 4), shape=(4, 4))
check_internal_overlap(a, False)
a = as_strided(x, strides=(3, 4), shape=(5, 4))
check_internal_overlap(a, True)
a = as_strided(x, strides=(0,), shape=(0,))
check_internal_overlap(a, False)
a = as_strided(x, strides=(0,), shape=(1,))
check_internal_overlap(a, False)
a = as_strided(x, strides=(0,), shape=(2,))
check_internal_overlap(a, True)
a = as_strided(x, strides=(0, -9993), shape=(87, 22))
check_internal_overlap(a, True)
a = as_strided(x, strides=(0, -9993), shape=(1, 22))
check_internal_overlap(a, False)
a = as_strided(x, strides=(0, -9993), shape=(0, 22))
check_internal_overlap(a, False)
def test_internal_overlap_fuzz():
# Fuzz check; the brute-force check is fairly slow
x = np.arange(1).astype(np.int8)
overlap = 0
no_overlap = 0
min_count = 100
rng = np.random.RandomState(1234)
while min(overlap, no_overlap) < min_count:
ndim = rng.randint(1, 4, dtype=np.intp)
strides = tuple(rng.randint(-1000, 1000, dtype=np.intp)
for j in range(ndim))
shape = tuple(rng.randint(1, 30, dtype=np.intp)
for j in range(ndim))
a = as_strided(x, strides=strides, shape=shape)
result = check_internal_overlap(a)
if result:
overlap += 1
else:
no_overlap += 1
def test_non_ndarray_inputs():
# Regression check for gh-5604
class MyArray(object):
def __init__(self, data):
self.data = data
@property
def __array_interface__(self):
return self.data.__array_interface__
class MyArray2(object):
def __init__(self, data):
self.data = data
def __array__(self):
return self.data
for cls in [MyArray, MyArray2]:
x = np.arange(5)
assert_(np.may_share_memory(cls(x[::2]), x[1::2]))
assert_(not np.shares_memory(cls(x[::2]), x[1::2]))
assert_(np.shares_memory(cls(x[1::3]), x[::2]))
assert_(np.may_share_memory(cls(x[1::3]), x[::2]))
def view_element_first_byte(x):
"""Construct an array viewing the first byte of each element of `x`"""
from numpy.lib.stride_tricks import DummyArray
interface = dict(x.__array_interface__)
interface['typestr'] = '|b1'
interface['descr'] = [('', '|b1')]
return np.asarray(DummyArray(interface, x))
def assert_copy_equivalent(operation, args, out, **kwargs):
"""
Check that operation(*args, out=out) produces results
equivalent to out[...] = operation(*args, out=out.copy())
"""
kwargs['out'] = out
kwargs2 = dict(kwargs)
kwargs2['out'] = out.copy()
out_orig = out.copy()
out[...] = operation(*args, **kwargs2)
expected = out.copy()
out[...] = out_orig
got = operation(*args, **kwargs).copy()
if (got != expected).any():
assert_equal(got, expected)
class TestUFunc(object):
"""
Test ufunc call memory overlap handling
"""
def check_unary_fuzz(self, operation, get_out_axis_size, dtype=np.int16,
count=5000):
shapes = [7, 13, 8, 21, 29, 32]
rng = np.random.RandomState(1234)
for ndim in range(1, 6):
x = rng.randint(0, 2**16, size=shapes[:ndim]).astype(dtype)
it = iter_random_view_pairs(x, same_steps=False, equal_size=True)
min_count = count // (ndim + 1)**2
overlapping = 0
while overlapping < min_count:
a, b = next(it)
a_orig = a.copy()
b_orig = b.copy()
if get_out_axis_size is None:
assert_copy_equivalent(operation, [a], out=b)
if np.shares_memory(a, b):
overlapping += 1
else:
for axis in itertools.chain(range(ndim), [None]):
a[...] = a_orig
b[...] = b_orig
# Determine size for reduction axis (None if scalar)
outsize, scalarize = get_out_axis_size(a, b, axis)
if outsize == 'skip':
continue
# Slice b to get an output array of the correct size
sl = [slice(None)] * ndim
if axis is None:
if outsize is None:
sl = [slice(0, 1)] + [0]*(ndim - 1)
else:
sl = [slice(0, outsize)] + [0]*(ndim - 1)
else:
if outsize is None:
k = b.shape[axis]//2
if ndim == 1:
sl[axis] = slice(k, k + 1)
else:
sl[axis] = k
else:
assert b.shape[axis] >= outsize
sl[axis] = slice(0, outsize)
b_out = b[tuple(sl)]
if scalarize:
b_out = b_out.reshape([])
if np.shares_memory(a, b_out):
overlapping += 1
# Check result
assert_copy_equivalent(operation, [a], out=b_out, axis=axis)
@pytest.mark.slow
def test_unary_ufunc_call_fuzz(self):
self.check_unary_fuzz(np.invert, None, np.int16)
def test_binary_ufunc_accumulate_fuzz(self):
def get_out_axis_size(a, b, axis):
if axis is None:
if a.ndim == 1:
return a.size, False
else:
return 'skip', False # accumulate doesn't support this
else:
return a.shape[axis], False
self.check_unary_fuzz(np.add.accumulate, get_out_axis_size,
dtype=np.int16, count=500)
def test_binary_ufunc_reduce_fuzz(self):
def get_out_axis_size(a, b, axis):
return None, (axis is None or a.ndim == 1)
self.check_unary_fuzz(np.add.reduce, get_out_axis_size,
dtype=np.int16, count=500)
def test_binary_ufunc_reduceat_fuzz(self):
def get_out_axis_size(a, b, axis):
if axis is None:
if a.ndim == 1:
return a.size, False
else:
return 'skip', False # reduceat doesn't support this
else:
return a.shape[axis], False
def do_reduceat(a, out, axis):
if axis is None:
size = len(a)
step = size//len(out)
else:
size = a.shape[axis]
step = a.shape[axis] // out.shape[axis]
idx = np.arange(0, size, step)
return np.add.reduceat(a, idx, out=out, axis=axis)
self.check_unary_fuzz(do_reduceat, get_out_axis_size,
dtype=np.int16, count=500)
def test_binary_ufunc_reduceat_manual(self):
def check(ufunc, a, ind, out):
c1 = ufunc.reduceat(a.copy(), ind.copy(), out=out.copy())
c2 = ufunc.reduceat(a, ind, out=out)
assert_array_equal(c1, c2)
# Exactly same input/output arrays
a = np.arange(10000, dtype=np.int16)
check(np.add, a, a[::-1].copy(), a)
# Overlap with index
a = np.arange(10000, dtype=np.int16)
check(np.add, a, a[::-1], a)
def test_unary_gufunc_fuzz(self):
shapes = [7, 13, 8, 21, 29, 32]
gufunc = _umath_tests.euclidean_pdist
rng = np.random.RandomState(1234)
for ndim in range(2, 6):
x = rng.rand(*shapes[:ndim])
it = iter_random_view_pairs(x, same_steps=False, equal_size=True)
min_count = 500 // (ndim + 1)**2
overlapping = 0
while overlapping < min_count:
a, b = next(it)
if min(a.shape[-2:]) < 2 or min(b.shape[-2:]) < 2 or a.shape[-1] < 2:
continue
# Ensure the shapes are so that euclidean_pdist is happy
if b.shape[-1] > b.shape[-2]:
b = b[...,0,:]
else:
b = b[...,:,0]
n = a.shape[-2]
p = n * (n - 1) // 2
if p <= b.shape[-1] and p > 0:
b = b[...,:p]
else:
n = max(2, int(np.sqrt(b.shape[-1]))//2)
p = n * (n - 1) // 2
a = a[...,:n,:]
b = b[...,:p]
# Call
if np.shares_memory(a, b):
overlapping += 1
with np.errstate(over='ignore', invalid='ignore'):
assert_copy_equivalent(gufunc, [a], out=b)
def test_ufunc_at_manual(self):
def check(ufunc, a, ind, b=None):
a0 = a.copy()
if b is None:
ufunc.at(a0, ind.copy())
c1 = a0.copy()
ufunc.at(a, ind)
c2 = a.copy()
else:
ufunc.at(a0, ind.copy(), b.copy())
c1 = a0.copy()
ufunc.at(a, ind, b)
c2 = a.copy()
assert_array_equal(c1, c2)
# Overlap with index
a = np.arange(10000, dtype=np.int16)
check(np.invert, a[::-1], a)
# Overlap with second data array
a = np.arange(100, dtype=np.int16)
ind = np.arange(0, 100, 2, dtype=np.int16)
check(np.add, a, ind, a[25:75])
def test_unary_ufunc_1d_manual(self):
# Exercise branches in PyArray_EQUIVALENTLY_ITERABLE
def check(a, b):
a_orig = a.copy()
b_orig = b.copy()
b0 = b.copy()
c1 = ufunc(a, out=b0)
c2 = ufunc(a, out=b)
assert_array_equal(c1, c2)
# Trigger "fancy ufunc loop" code path
mask = view_element_first_byte(b).view(np.bool_)
a[...] = a_orig
b[...] = b_orig
c1 = ufunc(a, out=b.copy(), where=mask.copy()).copy()
a[...] = a_orig
b[...] = b_orig
c2 = ufunc(a, out=b, where=mask.copy()).copy()
# Also, mask overlapping with output
a[...] = a_orig
b[...] = b_orig
c3 = ufunc(a, out=b, where=mask).copy()
assert_array_equal(c1, c2)
assert_array_equal(c1, c3)
dtypes = [np.int8, np.int16, np.int32, np.int64, np.float32,
np.float64, np.complex64, np.complex128]
dtypes = [np.dtype(x) for x in dtypes]
for dtype in dtypes:
if np.issubdtype(dtype, np.integer):
ufunc = np.invert
else:
ufunc = np.reciprocal
n = 1000
k = 10
indices = [
np.index_exp[:n],
np.index_exp[k:k+n],
np.index_exp[n-1::-1],
np.index_exp[k+n-1:k-1:-1],
np.index_exp[:2*n:2],
np.index_exp[k:k+2*n:2],
np.index_exp[2*n-1::-2],
np.index_exp[k+2*n-1:k-1:-2],
]
for xi, yi in itertools.product(indices, indices):
v = np.arange(1, 1 + n*2 + k, dtype=dtype)
x = v[xi]
y = v[yi]
with np.errstate(all='ignore'):
check(x, y)
# Scalar cases
check(x[:1], y)
check(x[-1:], y)
check(x[:1].reshape([]), y)
check(x[-1:].reshape([]), y)
def test_unary_ufunc_where_same(self):
# Check behavior at wheremask overlap
ufunc = np.invert
def check(a, out, mask):
c1 = ufunc(a, out=out.copy(), where=mask.copy())
c2 = ufunc(a, out=out, where=mask)
assert_array_equal(c1, c2)
# Check behavior with same input and output arrays
x = np.arange(100).astype(np.bool_)
check(x, x, x)
check(x, x.copy(), x)
check(x, x, x.copy())
@pytest.mark.slow
def test_binary_ufunc_1d_manual(self):
ufunc = np.add
def check(a, b, c):
c0 = c.copy()
c1 = ufunc(a, b, out=c0)
c2 = ufunc(a, b, out=c)
assert_array_equal(c1, c2)
for dtype in [np.int8, np.int16, np.int32, np.int64,
np.float32, np.float64, np.complex64, np.complex128]:
# Check different data dependency orders
n = 1000
k = 10
indices = []
for p in [1, 2]:
indices.extend([
np.index_exp[:p*n:p],
np.index_exp[k:k+p*n:p],
np.index_exp[p*n-1::-p],
np.index_exp[k+p*n-1:k-1:-p],
])
for x, y, z in itertools.product(indices, indices, indices):
v = np.arange(6*n).astype(dtype)
x = v[x]
y = v[y]
z = v[z]
check(x, y, z)
# Scalar cases
check(x[:1], y, z)
check(x[-1:], y, z)
check(x[:1].reshape([]), y, z)
check(x[-1:].reshape([]), y, z)
check(x, y[:1], z)
check(x, y[-1:], z)
check(x, y[:1].reshape([]), z)
check(x, y[-1:].reshape([]), z)
def test_inplace_op_simple_manual(self):
rng = np.random.RandomState(1234)
x = rng.rand(200, 200) # bigger than bufsize
x += x.T
assert_array_equal(x - x.T, 0)
|
jayme-github/CouchPotatoServer
|
refs/heads/master
|
libs/requests/packages/chardet2/euctwfreq.py
|
323
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# EUCTW frequency table
# Converted from big5 work
# by Taiwan's Mandarin Promotion Council
# <http:#www.edu.tw:81/mandr/>
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
# Char to FreqOrder table ,
EUCTW_TABLE_SIZE = 8102
EUCTWCharToFreqOrder = ( \
1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
#Everything below is of no interest for detection purpose
2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
|
nojero/pod
|
refs/heads/master
|
src/neg/example.py
|
1
|
#Negreada para probar BEGIN
class On:
def __init__(self):
self.trans = []
class Transition:
def __init__(self):
self.label = ''
self.name = ''
on1 = On()
ta = Transition()
ta.label = '1'
ta.name = 'e0'
tb = Transition()
tb.label = '4'
tb.name = 'e1'
tc = Transition()
tc.label = '1'
tc.name = 'e2'
td = Transition()
td.label = '2'
td.name = 'e3'
te = Transition()
te.label = '2'
te.name = 'e4'
on1.trans.append(ta)
on1.trans.append(tb)
on1.trans.append(tc)
on1.trans.append(td)
on1.trans.append(te)
#Negreada para probar END
|
d5ve/mailinabox
|
refs/heads/master
|
setup/migrate.py
|
7
|
#!/usr/bin/python3
# Migrates any file structures, database schemas, etc. between versions of Mail-in-a-Box.
# We have to be careful here that any dependencies are already installed in the previous
# version since this script runs before all other aspects of the setup script.
import sys, os, os.path, glob, re, shutil
sys.path.insert(0, 'management')
from utils import load_environment, save_environment, shell
def migration_1(env):
# Re-arrange where we store SSL certificates. There was a typo also.
def move_file(fn, domain_name_escaped, filename):
# Moves an SSL-related file into the right place.
fn1 = os.path.join( env["STORAGE_ROOT"], 'ssl', domain_name_escaped, file_type)
os.makedirs(os.path.dirname(fn1), exist_ok=True)
shutil.move(fn, fn1)
# Migrate the 'domains' directory.
for sslfn in glob.glob(os.path.join( env["STORAGE_ROOT"], 'ssl/domains/*' )):
fn = os.path.basename(sslfn)
m = re.match("(.*)_(certifiate.pem|cert_sign_req.csr|private_key.pem)$", fn)
if m:
# get the new name for the file
domain_name, file_type = m.groups()
if file_type == "certifiate.pem": file_type = "ssl_certificate.pem" # typo
if file_type == "cert_sign_req.csr": file_type = "certificate_signing_request.csr" # nicer
move_file(sslfn, domain_name, file_type)
# Move the old domains directory if it is now empty.
try:
os.rmdir(os.path.join( env["STORAGE_ROOT"], 'ssl/domains'))
except:
pass
def migration_2(env):
# Delete the .dovecot_sieve script everywhere. This was formerly a copy of our spam -> Spam
# script. We now install it as a global script, and we use managesieve, so the old file is
# irrelevant. Also delete the compiled binary form.
for fn in glob.glob(os.path.join(env["STORAGE_ROOT"], 'mail/mailboxes/*/*/.dovecot.sieve')):
os.unlink(fn)
for fn in glob.glob(os.path.join(env["STORAGE_ROOT"], 'mail/mailboxes/*/*/.dovecot.svbin')):
os.unlink(fn)
def migration_3(env):
# Move the migration ID from /etc/mailinabox.conf to $STORAGE_ROOT/mailinabox.version
# so that the ID stays with the data files that it describes the format of. The writing
# of the file will be handled by the main function.
pass
def migration_4(env):
# Add a new column to the mail users table where we can store administrative privileges.
db = os.path.join(env["STORAGE_ROOT"], 'mail/users.sqlite')
shell("check_call", ["sqlite3", db, "ALTER TABLE users ADD privileges TEXT NOT NULL DEFAULT ''"])
def migration_5(env):
# The secret key for encrypting backups was world readable. Fix here.
os.chmod(os.path.join(env["STORAGE_ROOT"], 'backup/secret_key.txt'), 0o600)
def migration_6(env):
# We now will generate multiple DNSSEC keys for different algorithms, since TLDs may
# not support them all. .email only supports RSA/SHA-256. Rename the keys.conf file
# to be algorithm-specific.
basepath = os.path.join(env["STORAGE_ROOT"], 'dns/dnssec')
shutil.move(os.path.join(basepath, 'keys.conf'), os.path.join(basepath, 'RSASHA1-NSEC3-SHA1.conf'))
def migration_7(env):
# I previously wanted domain names to be stored in Unicode in the database. Now I want them
# to be in IDNA. Affects aliases only.
import sqlite3
conn = sqlite3.connect(os.path.join(env["STORAGE_ROOT"], "mail/users.sqlite"))
# Get existing alias source addresses.
c = conn.cursor()
c.execute('SELECT source FROM aliases')
aliases = [ row[0] for row in c.fetchall() ]
# Update to IDNA-encoded domains.
for email in aliases:
try:
localpart, domainpart = email.split("@")
domainpart = domainpart.encode("idna").decode("ascii")
newemail = localpart + "@" + domainpart
if newemail != email:
c = conn.cursor()
c.execute("UPDATE aliases SET source=? WHERE source=?", (newemail, email))
if c.rowcount != 1: raise ValueError("Alias not found.")
print("Updated alias", email, "to", newemail)
except Exception as e:
print("Error updating IDNA alias", email, e)
# Save.
conn.commit()
def migration_8(env):
# Delete DKIM keys. We had generated 1024-bit DKIM keys.
# By deleting the key file we'll automatically generate
# a new key, which will be 2048 bits.
os.unlink(os.path.join(env['STORAGE_ROOT'], 'mail/dkim/mail.private'))
def migration_9(env):
# Add a column to the aliases table to store permitted_senders,
# which is a list of user account email addresses that are
# permitted to send mail using this alias instead of their own
# address. This was motivated by the addition of #427 ("Reject
# outgoing mail if FROM does not match Login") - which introduced
# the notion of outbound permitted-senders.
db = os.path.join(env["STORAGE_ROOT"], 'mail/users.sqlite')
shell("check_call", ["sqlite3", db, "ALTER TABLE aliases ADD permitted_senders TEXT"])
def migration_10(env):
# Clean up the SSL certificates directory.
# Move the primary certificate to a new name and then
# symlink it to the system certificate path.
import datetime
system_certificate = os.path.join(env["STORAGE_ROOT"], 'ssl/ssl_certificate.pem')
if not os.path.islink(system_certificate): # not already a symlink
new_path = os.path.join(env["STORAGE_ROOT"], 'ssl', env['PRIMARY_HOSTNAME'] + "-" + datetime.datetime.now().date().isoformat().replace("-", "") + ".pem")
print("Renamed", system_certificate, "to", new_path, "and created a symlink for the original location.")
shutil.move(system_certificate, new_path)
os.symlink(new_path, system_certificate)
# Flatten the directory structure. For any directory
# that contains a single file named ssl_certificate.pem,
# move the file out and name it the same as the directory,
# and remove the directory.
for sslcert in glob.glob(os.path.join( env["STORAGE_ROOT"], 'ssl/*/ssl_certificate.pem' )):
d = os.path.dirname(sslcert)
if len(os.listdir(d)) == 1:
# This certificate is the only file in that directory.
newname = os.path.join(env["STORAGE_ROOT"], 'ssl', os.path.basename(d) + '.pem')
if not os.path.exists(newname):
shutil.move(sslcert, newname)
os.rmdir(d)
def get_current_migration():
ver = 0
while True:
next_ver = (ver + 1)
migration_func = globals().get("migration_%d" % next_ver)
if not migration_func:
return ver
ver = next_ver
def run_migrations():
if not os.access("/etc/mailinabox.conf", os.W_OK, effective_ids=True):
print("This script must be run as root.", file=sys.stderr)
sys.exit(1)
env = load_environment()
migration_id_file = os.path.join(env['STORAGE_ROOT'], 'mailinabox.version')
migration_id = None
if os.path.exists(migration_id_file):
with open(migration_id_file) as f:
migration_id = f.read().strip();
if migration_id is None:
# Load the legacy location of the migration ID. We'll drop support
# for this eventually.
migration_id = env.get("MIGRATIONID")
if migration_id is None:
print()
print("%s file doesn't exists. Skipping migration..." % (migration_id_file,))
return
ourver = int(migration_id)
while True:
next_ver = (ourver + 1)
migration_func = globals().get("migration_%d" % next_ver)
if not migration_func:
# No more migrations to run.
break
print()
print("Running migration to Mail-in-a-Box #%d..." % next_ver)
try:
migration_func(env)
except Exception as e:
print()
print("Error running the migration script:")
print()
print(e)
print()
print("Your system may be in an inconsistent state now. We're terribly sorry. A re-install from a backup might be the best way to continue.")
sys.exit(1)
ourver = next_ver
# Write out our current version now. Do this sooner rather than later
# in case of any problems.
with open(migration_id_file, "w") as f:
f.write(str(ourver) + "\n")
# Delete the legacy location of this field.
if "MIGRATIONID" in env:
del env["MIGRATIONID"]
save_environment(env)
# iterate and try next version...
if __name__ == "__main__":
if sys.argv[-1] == "--current":
# Return the number of the highest migration.
print(str(get_current_migration()))
elif sys.argv[-1] == "--migrate":
# Perform migrations.
run_migrations()
|
hb9kns/PyBitmessage
|
refs/heads/master
|
src/class_addressGenerator.py
|
1
|
import shared
import threading
import time
import sys
from pyelliptic.openssl import OpenSSL
import ctypes
import hashlib
import highlevelcrypto
from addresses import *
from bmconfigparser import BMConfigParser
from debug import logger
import defaults
from helper_threading import *
from pyelliptic import arithmetic
import tr
from binascii import hexlify
import queues
import state
class addressGenerator(threading.Thread, StoppableThread):
def __init__(self):
# QThread.__init__(self, parent)
threading.Thread.__init__(self, name="addressGenerator")
self.initStop()
def stopThread(self):
try:
queues.addressGeneratorQueue.put(("stopThread", "data"))
except:
pass
super(addressGenerator, self).stopThread()
def run(self):
while state.shutdown == 0:
queueValue = queues.addressGeneratorQueue.get()
nonceTrialsPerByte = 0
payloadLengthExtraBytes = 0
live = True
if queueValue[0] == 'createChan':
command, addressVersionNumber, streamNumber, label, deterministicPassphrase, live = queueValue
eighteenByteRipe = False
numberOfAddressesToMake = 1
numberOfNullBytesDemandedOnFrontOfRipeHash = 1
elif queueValue[0] == 'joinChan':
command, chanAddress, label, deterministicPassphrase, live = queueValue
eighteenByteRipe = False
addressVersionNumber = decodeAddress(chanAddress)[1]
streamNumber = decodeAddress(chanAddress)[2]
numberOfAddressesToMake = 1
numberOfNullBytesDemandedOnFrontOfRipeHash = 1
elif len(queueValue) == 7:
command, addressVersionNumber, streamNumber, label, numberOfAddressesToMake, deterministicPassphrase, eighteenByteRipe = queueValue
try:
numberOfNullBytesDemandedOnFrontOfRipeHash = BMConfigParser().getint(
'bitmessagesettings', 'numberofnullbytesonaddress')
except:
if eighteenByteRipe:
numberOfNullBytesDemandedOnFrontOfRipeHash = 2
else:
numberOfNullBytesDemandedOnFrontOfRipeHash = 1 # the default
elif len(queueValue) == 9:
command, addressVersionNumber, streamNumber, label, numberOfAddressesToMake, deterministicPassphrase, eighteenByteRipe, nonceTrialsPerByte, payloadLengthExtraBytes = queueValue
try:
numberOfNullBytesDemandedOnFrontOfRipeHash = BMConfigParser().getint(
'bitmessagesettings', 'numberofnullbytesonaddress')
except:
if eighteenByteRipe:
numberOfNullBytesDemandedOnFrontOfRipeHash = 2
else:
numberOfNullBytesDemandedOnFrontOfRipeHash = 1 # the default
elif queueValue[0] == 'stopThread':
break
else:
sys.stderr.write(
'Programming error: A structure with the wrong number of values was passed into the addressGeneratorQueue. Here is the queueValue: %s\n' % repr(queueValue))
if addressVersionNumber < 3 or addressVersionNumber > 4:
sys.stderr.write(
'Program error: For some reason the address generator queue has been given a request to create at least one version %s address which it cannot do.\n' % addressVersionNumber)
if nonceTrialsPerByte == 0:
nonceTrialsPerByte = BMConfigParser().getint(
'bitmessagesettings', 'defaultnoncetrialsperbyte')
if nonceTrialsPerByte < defaults.networkDefaultProofOfWorkNonceTrialsPerByte:
nonceTrialsPerByte = defaults.networkDefaultProofOfWorkNonceTrialsPerByte
if payloadLengthExtraBytes == 0:
payloadLengthExtraBytes = BMConfigParser().getint(
'bitmessagesettings', 'defaultpayloadlengthextrabytes')
if payloadLengthExtraBytes < defaults.networkDefaultPayloadLengthExtraBytes:
payloadLengthExtraBytes = defaults.networkDefaultPayloadLengthExtraBytes
if command == 'createRandomAddress':
queues.UISignalQueue.put((
'updateStatusBar', tr._translate("MainWindow", "Generating one new address")))
# This next section is a little bit strange. We're going to generate keys over and over until we
# find one that starts with either \x00 or \x00\x00. Then when we pack them into a Bitmessage address,
# we won't store the \x00 or \x00\x00 bytes thus making the
# address shorter.
startTime = time.time()
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix = 0
potentialPrivSigningKey = OpenSSL.rand(32)
potentialPubSigningKey = highlevelcrypto.pointMult(potentialPrivSigningKey)
while True:
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix += 1
potentialPrivEncryptionKey = OpenSSL.rand(32)
potentialPubEncryptionKey = highlevelcrypto.pointMult(
potentialPrivEncryptionKey)
ripe = hashlib.new('ripemd160')
sha = hashlib.new('sha512')
sha.update(
potentialPubSigningKey + potentialPubEncryptionKey)
ripe.update(sha.digest())
if ripe.digest()[:numberOfNullBytesDemandedOnFrontOfRipeHash] == '\x00' * numberOfNullBytesDemandedOnFrontOfRipeHash:
break
logger.info('Generated address with ripe digest: %s' % hexlify(ripe.digest()))
try:
logger.info('Address generator calculated %s addresses at %s addresses per second before finding one with the correct ripe-prefix.' % (numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix, numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix / (time.time() - startTime)))
except ZeroDivisionError:
# The user must have a pretty fast computer. time.time() - startTime equaled zero.
pass
address = encodeAddress(addressVersionNumber, streamNumber, ripe.digest())
# An excellent way for us to store our keys is in Wallet Import Format. Let us convert now.
# https://en.bitcoin.it/wiki/Wallet_import_format
privSigningKey = '\x80' + potentialPrivSigningKey
checksum = hashlib.sha256(hashlib.sha256(
privSigningKey).digest()).digest()[0:4]
privSigningKeyWIF = arithmetic.changebase(
privSigningKey + checksum, 256, 58)
privEncryptionKey = '\x80' + potentialPrivEncryptionKey
checksum = hashlib.sha256(hashlib.sha256(
privEncryptionKey).digest()).digest()[0:4]
privEncryptionKeyWIF = arithmetic.changebase(
privEncryptionKey + checksum, 256, 58)
BMConfigParser().add_section(address)
BMConfigParser().set(address, 'label', label)
BMConfigParser().set(address, 'enabled', 'true')
BMConfigParser().set(address, 'decoy', 'false')
BMConfigParser().set(address, 'noncetrialsperbyte', str(
nonceTrialsPerByte))
BMConfigParser().set(address, 'payloadlengthextrabytes', str(
payloadLengthExtraBytes))
BMConfigParser().set(
address, 'privSigningKey', privSigningKeyWIF)
BMConfigParser().set(
address, 'privEncryptionKey', privEncryptionKeyWIF)
BMConfigParser().save()
# The API and the join and create Chan functionality
# both need information back from the address generator.
queues.apiAddressGeneratorReturnQueue.put(address)
queues.UISignalQueue.put((
'updateStatusBar', tr._translate("MainWindow", "Done generating address. Doing work necessary to broadcast it...")))
queues.UISignalQueue.put(('writeNewAddressToTable', (
label, address, streamNumber)))
shared.reloadMyAddressHashes()
if addressVersionNumber == 3:
queues.workerQueue.put((
'sendOutOrStoreMyV3Pubkey', ripe.digest()))
elif addressVersionNumber == 4:
queues.workerQueue.put((
'sendOutOrStoreMyV4Pubkey', address))
elif command == 'createDeterministicAddresses' or command == 'getDeterministicAddress' or command == 'createChan' or command == 'joinChan':
if len(deterministicPassphrase) == 0:
sys.stderr.write(
'WARNING: You are creating deterministic address(es) using a blank passphrase. Bitmessage will do it but it is rather stupid.')
if command == 'createDeterministicAddresses':
queues.UISignalQueue.put((
'updateStatusBar', tr._translate("MainWindow","Generating %1 new addresses.").arg(str(numberOfAddressesToMake))))
signingKeyNonce = 0
encryptionKeyNonce = 1
listOfNewAddressesToSendOutThroughTheAPI = [
] # We fill out this list no matter what although we only need it if we end up passing the info to the API.
for i in range(numberOfAddressesToMake):
# This next section is a little bit strange. We're going to generate keys over and over until we
# find one that has a RIPEMD hash that starts with either \x00 or \x00\x00. Then when we pack them
# into a Bitmessage address, we won't store the \x00 or
# \x00\x00 bytes thus making the address shorter.
startTime = time.time()
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix = 0
while True:
numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix += 1
potentialPrivSigningKey = hashlib.sha512(
deterministicPassphrase + encodeVarint(signingKeyNonce)).digest()[:32]
potentialPrivEncryptionKey = hashlib.sha512(
deterministicPassphrase + encodeVarint(encryptionKeyNonce)).digest()[:32]
potentialPubSigningKey = highlevelcrypto.pointMult(
potentialPrivSigningKey)
potentialPubEncryptionKey = highlevelcrypto.pointMult(
potentialPrivEncryptionKey)
signingKeyNonce += 2
encryptionKeyNonce += 2
ripe = hashlib.new('ripemd160')
sha = hashlib.new('sha512')
sha.update(
potentialPubSigningKey + potentialPubEncryptionKey)
ripe.update(sha.digest())
if ripe.digest()[:numberOfNullBytesDemandedOnFrontOfRipeHash] == '\x00' * numberOfNullBytesDemandedOnFrontOfRipeHash:
break
logger.info('Generated address with ripe digest: %s' % hexlify(ripe.digest()))
try:
logger.info('Address generator calculated %s addresses at %s addresses per second before finding one with the correct ripe-prefix.' % (numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix, numberOfAddressesWeHadToMakeBeforeWeFoundOneWithTheCorrectRipePrefix / (time.time() - startTime)))
except ZeroDivisionError:
# The user must have a pretty fast computer. time.time() - startTime equaled zero.
pass
address = encodeAddress(addressVersionNumber, streamNumber, ripe.digest())
saveAddressToDisk = True
# If we are joining an existing chan, let us check to make sure it matches the provided Bitmessage address
if command == 'joinChan':
if address != chanAddress:
listOfNewAddressesToSendOutThroughTheAPI.append('chan name does not match address')
saveAddressToDisk = False
if command == 'getDeterministicAddress':
saveAddressToDisk = False
if saveAddressToDisk and live:
# An excellent way for us to store our keys is in Wallet Import Format. Let us convert now.
# https://en.bitcoin.it/wiki/Wallet_import_format
privSigningKey = '\x80' + potentialPrivSigningKey
checksum = hashlib.sha256(hashlib.sha256(
privSigningKey).digest()).digest()[0:4]
privSigningKeyWIF = arithmetic.changebase(
privSigningKey + checksum, 256, 58)
privEncryptionKey = '\x80' + \
potentialPrivEncryptionKey
checksum = hashlib.sha256(hashlib.sha256(
privEncryptionKey).digest()).digest()[0:4]
privEncryptionKeyWIF = arithmetic.changebase(
privEncryptionKey + checksum, 256, 58)
try:
BMConfigParser().add_section(address)
addressAlreadyExists = False
except:
addressAlreadyExists = True
if addressAlreadyExists:
logger.info('%s already exists. Not adding it again.' % address)
queues.UISignalQueue.put((
'updateStatusBar', tr._translate("MainWindow","%1 is already in 'Your Identities'. Not adding it again.").arg(address)))
else:
logger.debug('label: %s' % label)
BMConfigParser().set(address, 'label', label)
BMConfigParser().set(address, 'enabled', 'true')
BMConfigParser().set(address, 'decoy', 'false')
if command == 'joinChan' or command == 'createChan':
BMConfigParser().set(address, 'chan', 'true')
BMConfigParser().set(address, 'noncetrialsperbyte', str(
nonceTrialsPerByte))
BMConfigParser().set(address, 'payloadlengthextrabytes', str(
payloadLengthExtraBytes))
BMConfigParser().set(
address, 'privSigningKey', privSigningKeyWIF)
BMConfigParser().set(
address, 'privEncryptionKey', privEncryptionKeyWIF)
BMConfigParser().save()
queues.UISignalQueue.put(('writeNewAddressToTable', (
label, address, str(streamNumber))))
listOfNewAddressesToSendOutThroughTheAPI.append(
address)
shared.myECCryptorObjects[ripe.digest()] = highlevelcrypto.makeCryptor(
hexlify(potentialPrivEncryptionKey))
shared.myAddressesByHash[ripe.digest()] = address
tag = hashlib.sha512(hashlib.sha512(encodeVarint(
addressVersionNumber) + encodeVarint(streamNumber) + ripe.digest()).digest()).digest()[32:]
shared.myAddressesByTag[tag] = address
if addressVersionNumber == 3:
queues.workerQueue.put((
'sendOutOrStoreMyV3Pubkey', ripe.digest())) # If this is a chan address,
# the worker thread won't send out the pubkey over the network.
elif addressVersionNumber == 4:
queues.workerQueue.put((
'sendOutOrStoreMyV4Pubkey', address))
queues.UISignalQueue.put((
'updateStatusBar', tr._translate("MainWindow", "Done generating address")))
elif saveAddressToDisk and not live and not BMConfigParser().has_section(address):
listOfNewAddressesToSendOutThroughTheAPI.append(address)
# Done generating addresses.
if command == 'createDeterministicAddresses' or command == 'joinChan' or command == 'createChan':
queues.apiAddressGeneratorReturnQueue.put(
listOfNewAddressesToSendOutThroughTheAPI)
elif command == 'getDeterministicAddress':
queues.apiAddressGeneratorReturnQueue.put(address)
else:
raise Exception(
"Error in the addressGenerator thread. Thread was given a command it could not understand: " + command)
queues.addressGeneratorQueue.task_done()
|
adieu/allbuttonspressed
|
refs/heads/master
|
robots/__init__.py
|
12133432
| |
saideepchandg/oracle-r12-accounting
|
refs/heads/master
|
lib/django/conf/locale/en_GB/__init__.py
|
12133432
| |
GoogleCloudPlatform/ml-on-gcp
|
refs/heads/master
|
example_zoo/tensorflow/models/mnist/official/utils/logs/__init__.py
|
12133432
| |
tgarland1/datausa-site
|
refs/heads/master
|
datausa/profile/__init__.py
|
12133432
| |
chkir/django-cms
|
refs/heads/develop
|
cms/test_utils/project/pluginapp/plugins/meta/south_migrations/__init__.py
|
12133432
| |
tawanda/django-imagekit
|
refs/heads/develop
|
imagekit/admin.py
|
6
|
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
class AdminThumbnail(object):
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
if callable(self.image_field):
thumbnail = self.image_field(obj)
else:
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' %
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
|
schlichtanders/pyparsing-2.0.3-OrderedDict
|
refs/heads/master
|
examples/antlr_grammar.py
|
1
|
'''
antlr_grammar.py
Created on 4 sept. 2010
@author: luca
(Minor updates by Paul McGuire, June, 2012)
'''
from pyparsingOD import Word, ZeroOrMore, printables, Suppress, OneOrMore, Group, \
LineEnd, Optional, White, originalTextFor, hexnums, nums, Combine, Literal, Keyword, \
cStyleComment, Regex, Forward, MatchFirst, And, srange, oneOf, alphas, alphanums, \
delimitedList
# http://www.antlr.org/grammar/ANTLR/ANTLRv3.g
# Tokens
EOL = Suppress(LineEnd()) # $
singleTextString = originalTextFor(ZeroOrMore(~EOL + (White(" \t") | Word(printables)))).leaveWhitespace()
XDIGIT = hexnums
INT = Word(nums)
ESC = Literal('\\') + (oneOf(list(r'nrtbf\">'+"'")) | ('u' + Word(hexnums, exact=4)) | Word(printables, exact=1))
LITERAL_CHAR = ESC | ~(Literal("'") | Literal('\\')) + Word(printables, exact=1)
CHAR_LITERAL = Suppress("'") + LITERAL_CHAR + Suppress("'")
STRING_LITERAL = Suppress("'") + Combine(OneOrMore(LITERAL_CHAR)) + Suppress("'")
DOUBLE_QUOTE_STRING_LITERAL = '"' + ZeroOrMore(LITERAL_CHAR) + '"'
DOUBLE_ANGLE_STRING_LITERAL = '<<' + ZeroOrMore(Word(printables, exact=1)) + '>>'
TOKEN_REF = Word(alphas.upper(), alphanums+'_')
RULE_REF = Word(alphas.lower(), alphanums+'_')
ACTION_ESC = (Suppress("\\") + Suppress("'")) | Suppress('\\"') | Suppress('\\') + (~(Literal("'") | Literal('"')) + Word(printables, exact=1))
ACTION_CHAR_LITERAL = Suppress("'") + (ACTION_ESC | ~(Literal('\\') | Literal("'")) + Word(printables, exact=1)) + Suppress("'")
ACTION_STRING_LITERAL = Suppress('"') + ZeroOrMore(ACTION_ESC | ~(Literal('\\') | Literal('"')) + Word(printables, exact=1)) + Suppress('"')
SRC = Suppress('src') + ACTION_STRING_LITERAL("file") + INT("line")
id = TOKEN_REF | RULE_REF
SL_COMMENT = Suppress('//') + Suppress('$ANTLR') + SRC | ZeroOrMore(~EOL + Word(printables)) + EOL
ML_COMMENT = cStyleComment
WS = OneOrMore(Suppress(' ') | Suppress('\t') | (Optional(Suppress('\r')) + Literal('\n')))
WS_LOOP = ZeroOrMore(SL_COMMENT | ML_COMMENT)
NESTED_ARG_ACTION = Forward()
NESTED_ARG_ACTION << Suppress('[') + ZeroOrMore(NESTED_ARG_ACTION | ACTION_STRING_LITERAL | ACTION_CHAR_LITERAL) + Suppress(']')
ARG_ACTION = NESTED_ARG_ACTION
NESTED_ACTION = Forward()
NESTED_ACTION << Suppress('{') + ZeroOrMore(NESTED_ACTION | SL_COMMENT | ML_COMMENT | ACTION_STRING_LITERAL | ACTION_CHAR_LITERAL) + Suppress('}')
ACTION = NESTED_ACTION + Optional('?')
SCOPE = Suppress('scope')
OPTIONS = Suppress('options') + Suppress('{') # + WS_LOOP + Suppress('{')
TOKENS = Suppress('tokens') + Suppress('{') # + WS_LOOP + Suppress('{')
FRAGMENT = 'fragment';
TREE_BEGIN = Suppress('^(')
ROOT = Suppress('^')
BANG = Suppress('!')
RANGE = Suppress('..')
REWRITE = Suppress('->')
# General Parser Definitions
# Grammar heading
optionValue = id | STRING_LITERAL | CHAR_LITERAL | INT | Literal('*').setName("s")
option = Group(id("id") + Suppress('=') + optionValue("value"))("option")
optionsSpec = OPTIONS + Group(OneOrMore(option + Suppress(';')))("options") + Suppress('}')
tokenSpec = Group(TOKEN_REF("token_ref") + (Suppress('=') + (STRING_LITERAL | CHAR_LITERAL)("lit")))("token") + Suppress(';')
tokensSpec = TOKENS + Group(OneOrMore(tokenSpec))("tokens") + Suppress('}')
attrScope = Suppress('scope') + id + ACTION
grammarType = Keyword('lexer') + Keyword('parser') + Keyword('tree')
actionScopeName = id | Keyword('lexer')("l") | Keyword('parser')("p")
action = Suppress('@') + Optional(actionScopeName + Suppress('::')) + id + ACTION
grammarHeading = Optional(ML_COMMENT("ML_COMMENT")) + Optional(grammarType) + Suppress('grammar') + id("grammarName") + Suppress(';') + Optional(optionsSpec) + Optional(tokensSpec) + ZeroOrMore(attrScope) + ZeroOrMore(action)
modifier = Keyword('protected') | Keyword('public') | Keyword('private') | Keyword('fragment')
ruleAction = Suppress('@') + id + ACTION
throwsSpec = Suppress('throws') + delimitedList(id)
ruleScopeSpec = (Suppress('scope') + ACTION) | (Suppress('scope') + delimitedList(id) + Suppress(';')) | (Suppress('scope') + ACTION + Suppress('scope') + delimitedList(id) + Suppress(';'))
unary_op = oneOf("^ !")
notTerminal = CHAR_LITERAL | TOKEN_REF | STRING_LITERAL
terminal = (CHAR_LITERAL | TOKEN_REF + Optional(ARG_ACTION) | STRING_LITERAL | '.') + Optional(unary_op)
block = Forward()
notSet = Suppress('~') + (notTerminal | block)
rangeNotPython = CHAR_LITERAL("c1") + RANGE + CHAR_LITERAL("c2")
atom = Group(rangeNotPython + Optional(unary_op)("op")) | terminal | (notSet + Optional(unary_op)("op")) | (RULE_REF + Optional(ARG_ACTION("arg")) + Optional(unary_op)("op"))
element = Forward()
treeSpec = Suppress('^(') + element*(2,) + Suppress(')')
ebnfSuffix = oneOf("? * +")
ebnf = block + Optional(ebnfSuffix("op") | '=>')
elementNoOptionSpec = (id("result_name") + oneOf('= +=')("labelOp") + atom("atom") + Optional(ebnfSuffix)) | (id("result_name") + oneOf('= +=')("labelOp") + block + Optional(ebnfSuffix)) | atom("atom") + Optional(ebnfSuffix) | ebnf | ACTION | (treeSpec + Optional(ebnfSuffix)) # | SEMPRED ( '=>' -> GATED_SEMPRED | -> SEMPRED )
element << Group(elementNoOptionSpec)("element")
alternative = Group(Group(OneOrMore(element))("elements")) # Do not ask me why group is needed twice... seems like the xml that you see is not always the real structure?
rewrite = Optional(Literal('TODO REWRITE RULES TODO'))
block << Suppress('(') + Optional(Optional(optionsSpec("opts")) + Suppress(':')) + Group(alternative('a1') + rewrite + Group(ZeroOrMore(Suppress('|') + alternative('a2') + rewrite))("alternatives"))("block") + Suppress(')')
altList = alternative('a1') + rewrite + Group(ZeroOrMore(Suppress('|') + alternative('a2') + rewrite))("alternatives")
exceptionHandler = Suppress('catch') + ARG_ACTION + ACTION
finallyClause = Suppress('finally') + ACTION
exceptionGroup = (OneOrMore(exceptionHandler) + Optional(finallyClause)) | finallyClause
ruleHeading = Optional(ML_COMMENT)("ruleComment") + Optional(modifier)("modifier") + id("ruleName") + Optional("!") + Optional(ARG_ACTION("arg")) + Optional(Suppress('returns') + ARG_ACTION("rt")) + Optional(throwsSpec) + Optional(optionsSpec) + Optional(ruleScopeSpec) + ZeroOrMore(ruleAction)
rule = Group(ruleHeading + Suppress(':') + altList + Suppress(';') + Optional(exceptionGroup))("rule")
grammarDef = grammarHeading + Group(OneOrMore(rule))("rules")
def grammar():
return grammarDef
def __antlrAlternativesConverter(pyparsingRules, antlrBlock):
rule = None
if hasattr(antlrBlock, 'alternatives') and antlrBlock.alternatives != '' and len(antlrBlock.alternatives) > 0:
alternatives = []
alternatives.append(__antlrAlternativeConverter(pyparsingRules, antlrBlock.a1))
for alternative in antlrBlock.alternatives:
alternatives.append(__antlrAlternativeConverter(pyparsingRules, alternative))
rule = MatchFirst(alternatives)("anonymous_or")
elif hasattr(antlrBlock, 'a1') and antlrBlock.a1 != '':
rule = __antlrAlternativeConverter(pyparsingRules, antlrBlock.a1)
else:
raise Exception('Not yet implemented')
assert rule != None
return rule
def __antlrAlternativeConverter(pyparsingRules, antlrAlternative):
elementList = []
for element in antlrAlternative.elements:
rule = None
if hasattr(element.atom, 'c1') and element.atom.c1 != '':
regex = r'['+str(element.atom.c1[0])+'-'+str(element.atom.c2[0]+']')
rule = Regex(regex)("anonymous_regex")
elif hasattr(element, 'block') and element.block != '':
rule = __antlrAlternativesConverter(pyparsingRules, element.block)
else:
ruleRef = element.atom
assert ruleRef in pyparsingRules
rule = pyparsingRules[element.atom](element.atom)
if hasattr(element, 'op') and element.op != '':
if element.op == '+':
rule = Group(OneOrMore(rule))("anonymous_one_or_more")
elif element.op == '*':
rule = Group(ZeroOrMore(rule))("anonymous_zero_or_more")
elif element.op == '?':
rule = Optional(rule)
else:
raise Exception('rule operator not yet implemented : ' + element.op)
rule = rule
elementList.append(rule)
if len(elementList) > 1:
rule = Group(And(elementList))("anonymous_and")
else:
rule = elementList[0]
assert rule != None
return rule
def __antlrRuleConverter(pyparsingRules, antlrRule):
rule = None
rule = __antlrAlternativesConverter(pyparsingRules, antlrRule)
assert rule != None
rule(antlrRule.ruleName)
return rule
def antlrConverter(antlrGrammarTree):
pyparsingRules = {}
antlrTokens = {}
for antlrToken in antlrGrammarTree.tokens:
antlrTokens[antlrToken.token_ref] = antlrToken.lit
for antlrTokenName, antlrToken in list(antlrTokens.items()):
pyparsingRules[antlrTokenName] = Literal(antlrToken)
antlrRules = {}
for antlrRule in antlrGrammarTree.rules:
antlrRules[antlrRule.ruleName] = antlrRule
pyparsingRules[antlrRule.ruleName] = Forward() # antlr is a top down grammar
for antlrRuleName, antlrRule in list(antlrRules.items()):
pyparsingRule = __antlrRuleConverter(pyparsingRules, antlrRule)
assert pyparsingRule != None
pyparsingRules[antlrRuleName] << pyparsingRule
return pyparsingRules
if __name__ == "__main__":
text = """grammar SimpleCalc;
options {
language = Python;
}
tokens {
PLUS = '+' ;
MINUS = '-' ;
MULT = '*' ;
DIV = '/' ;
}
/*------------------------------------------------------------------
* PARSER RULES
*------------------------------------------------------------------*/
expr : term ( ( PLUS | MINUS ) term )* ;
term : factor ( ( MULT | DIV ) factor )* ;
factor : NUMBER ;
/*------------------------------------------------------------------
* LEXER RULES
*------------------------------------------------------------------*/
NUMBER : (DIGIT)+ ;
/* WHITESPACE : ( '\t' | ' ' | '\r' | '\n'| '\u000C' )+ { $channel = HIDDEN; } ; */
fragment DIGIT : '0'..'9' ;
"""
grammar().validate()
antlrGrammarTree = grammar().parseString(text)
print(antlrGrammarTree.asXML("antlrGrammarTree"))
pyparsingRules = antlrConverter(antlrGrammarTree)
pyparsingRule = pyparsingRules["expr"]
pyparsingTree = pyparsingRule.parseString("2 - 5 * 42 + 7 / 25")
print(pyparsingTree.asXML("pyparsingTree"))
|
wreckJ/intellij-community
|
refs/heads/master
|
python/testData/refactoring/extractmethod/DuplicateWithRename.after.py
|
71
|
def bar():
a = 1
b = 2
foo(a)
foo(b)
def foo(a_new):
print a_new
|
tipsybear/ormbad
|
refs/heads/master
|
tests/__init__.py
|
1
|
# tests
# Testing for the ormbad module
#
# Author: Benjamin Bengfort <benjamin@bengfort.com>
# Created: Thu Aug 13 12:34:07 2015 -0400
#
# Copyright (C) 2015 Tipsy Bear Studios
# For license information, see LICENSE.txt
#
# ID: __init__.py [] benjamin@bengfort.com $
"""
Testing for the ormbad module
"""
##########################################################################
## Imports
##########################################################################
import unittest
##########################################################################
## Module Constants
##########################################################################
TEST_VERSION = "0.1" ## Also the expected version onf the package
##########################################################################
## Test Cases
##########################################################################
class InitializationTest(unittest.TestCase):
def test_initialization(self):
"""
Tests a simple world fact by asserting that 10*10 is 100.
"""
self.assertEqual(10*10, 100)
def test_import(self):
"""
Can import ormbad
"""
try:
import ormbad
except ImportError:
self.fail("Unable to import the ormbad module!")
def test_version(self):
"""
Assert that the version is sane
"""
import ormbad
self.assertEqual(TEST_VERSION, ormbad.__version__)
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
refs/heads/master
|
galaxy-dist/eggs/numpy-1.6.0-py2.7-linux-x86_64-ucs4.egg/numpy/distutils/from_template.py
|
43
|
#!/usr/bin/python
"""
process_file(filename)
takes templated file .xxx.src and produces .xxx file where .xxx
is .pyf .f90 or .f using the following template rules:
'<..>' denotes a template.
All function and subroutine blocks in a source file with names that
contain '<..>' will be replicated according to the rules in '<..>'.
The number of comma-separeted words in '<..>' will determine the number of
replicates.
'<..>' may have two different forms, named and short. For example,
named:
<p=d,s,z,c> where anywhere inside a block '<p>' will be replaced with
'd', 's', 'z', and 'c' for each replicate of the block.
<_c> is already defined: <_c=s,d,c,z>
<_t> is already defined: <_t=real,double precision,complex,double complex>
short:
<s,d,c,z>, a short form of the named, useful when no <p> appears inside
a block.
In general, '<..>' contains a comma separated list of arbitrary
expressions. If these expression must contain a comma|leftarrow|rightarrow,
then prepend the comma|leftarrow|rightarrow with a backslash.
If an expression matches '\\<index>' then it will be replaced
by <index>-th expression.
Note that all '<..>' forms in a block must have the same number of
comma-separated entries.
Predefined named template rules:
<prefix=s,d,c,z>
<ftype=real,double precision,complex,double complex>
<ftypereal=real,double precision,\\0,\\1>
<ctype=float,double,complex_float,complex_double>
<ctypereal=float,double,\\0,\\1>
"""
__all__ = ['process_str','process_file']
import os
import sys
import re
routine_start_re = re.compile(r'(\n|\A)(( (\$|\*))|)\s*(subroutine|function)\b',re.I)
routine_end_re = re.compile(r'\n\s*end\s*(subroutine|function)\b.*(\n|\Z)',re.I)
function_start_re = re.compile(r'\n (\$|\*)\s*function\b',re.I)
def parse_structure(astr):
""" Return a list of tuples for each function or subroutine each
tuple is the start and end of a subroutine or function to be
expanded.
"""
spanlist = []
ind = 0
while 1:
m = routine_start_re.search(astr,ind)
if m is None:
break
start = m.start()
if function_start_re.match(astr,start,m.end()):
while 1:
i = astr.rfind('\n',ind,start)
if i==-1:
break
start = i
if astr[i:i+7]!='\n $':
break
start += 1
m = routine_end_re.search(astr,m.end())
ind = end = m and m.end()-1 or len(astr)
spanlist.append((start,end))
return spanlist
template_re = re.compile(r"<\s*(\w[\w\d]*)\s*>")
named_re = re.compile(r"<\s*(\w[\w\d]*)\s*=\s*(.*?)\s*>")
list_re = re.compile(r"<\s*((.*?))\s*>")
def find_repl_patterns(astr):
reps = named_re.findall(astr)
names = {}
for rep in reps:
name = rep[0].strip() or unique_key(names)
repl = rep[1].replace('\,','@comma@')
thelist = conv(repl)
names[name] = thelist
return names
item_re = re.compile(r"\A\\(?P<index>\d+)\Z")
def conv(astr):
b = astr.split(',')
l = [x.strip() for x in b]
for i in range(len(l)):
m = item_re.match(l[i])
if m:
j = int(m.group('index'))
l[i] = l[j]
return ','.join(l)
def unique_key(adict):
""" Obtain a unique key given a dictionary."""
allkeys = adict.keys()
done = False
n = 1
while not done:
newkey = '__l%s' % (n)
if newkey in allkeys:
n += 1
else:
done = True
return newkey
template_name_re = re.compile(r'\A\s*(\w[\w\d]*)\s*\Z')
def expand_sub(substr,names):
substr = substr.replace('\>','@rightarrow@')
substr = substr.replace('\<','@leftarrow@')
lnames = find_repl_patterns(substr)
substr = named_re.sub(r"<\1>",substr) # get rid of definition templates
def listrepl(mobj):
thelist = conv(mobj.group(1).replace('\,','@comma@'))
if template_name_re.match(thelist):
return "<%s>" % (thelist)
name = None
for key in lnames.keys(): # see if list is already in dictionary
if lnames[key] == thelist:
name = key
if name is None: # this list is not in the dictionary yet
name = unique_key(lnames)
lnames[name] = thelist
return "<%s>" % name
substr = list_re.sub(listrepl, substr) # convert all lists to named templates
# newnames are constructed as needed
numsubs = None
base_rule = None
rules = {}
for r in template_re.findall(substr):
if r not in rules:
thelist = lnames.get(r,names.get(r,None))
if thelist is None:
raise ValueError('No replicates found for <%s>' % (r))
if r not in names and not thelist.startswith('_'):
names[r] = thelist
rule = [i.replace('@comma@',',') for i in thelist.split(',')]
num = len(rule)
if numsubs is None:
numsubs = num
rules[r] = rule
base_rule = r
elif num == numsubs:
rules[r] = rule
else:
print("Mismatch in number of replacements (base <%s=%s>)"\
" for <%s=%s>. Ignoring." % (base_rule,
','.join(rules[base_rule]),
r,thelist))
if not rules:
return substr
def namerepl(mobj):
name = mobj.group(1)
return rules.get(name,(k+1)*[name])[k]
newstr = ''
for k in range(numsubs):
newstr += template_re.sub(namerepl, substr) + '\n\n'
newstr = newstr.replace('@rightarrow@','>')
newstr = newstr.replace('@leftarrow@','<')
return newstr
def process_str(allstr):
newstr = allstr
writestr = '' #_head # using _head will break free-format files
struct = parse_structure(newstr)
oldend = 0
names = {}
names.update(_special_names)
for sub in struct:
writestr += newstr[oldend:sub[0]]
names.update(find_repl_patterns(newstr[oldend:sub[0]]))
writestr += expand_sub(newstr[sub[0]:sub[1]],names)
oldend = sub[1]
writestr += newstr[oldend:]
return writestr
include_src_re = re.compile(r"(\n|\A)\s*include\s*['\"](?P<name>[\w\d./\\]+[.]src)['\"]",re.I)
def resolve_includes(source):
d = os.path.dirname(source)
fid = open(source)
lines = []
for line in fid.readlines():
m = include_src_re.match(line)
if m:
fn = m.group('name')
if not os.path.isabs(fn):
fn = os.path.join(d,fn)
if os.path.isfile(fn):
print ('Including file',fn)
lines.extend(resolve_includes(fn))
else:
lines.append(line)
else:
lines.append(line)
fid.close()
return lines
def process_file(source):
lines = resolve_includes(source)
return process_str(''.join(lines))
_special_names = find_repl_patterns('''
<_c=s,d,c,z>
<_t=real,double precision,complex,double complex>
<prefix=s,d,c,z>
<ftype=real,double precision,complex,double complex>
<ctype=float,double,complex_float,complex_double>
<ftypereal=real,double precision,\\0,\\1>
<ctypereal=float,double,\\0,\\1>
''')
if __name__ == "__main__":
try:
file = sys.argv[1]
except IndexError:
fid = sys.stdin
outfile = sys.stdout
else:
fid = open(file,'r')
(base, ext) = os.path.splitext(file)
newname = base
outfile = open(newname,'w')
allstr = fid.read()
writestr = process_str(allstr)
outfile.write(writestr)
|
rgerkin/neuroConstruct
|
refs/heads/master
|
lib/jython/Lib/StringIO.py
|
146
|
r"""File-like objects that read from or write to a string buffer.
This implements (nearly) all stdio methods.
f = StringIO() # ready for writing
f = StringIO(buf) # ready for reading
f.close() # explicitly release resources held
flag = f.isatty() # always false
pos = f.tell() # get current position
f.seek(pos) # set current position
f.seek(pos, mode) # mode 0: absolute; 1: relative; 2: relative to EOF
buf = f.read() # read until EOF
buf = f.read(n) # read up to n bytes
buf = f.readline() # read until end of line ('\n') or EOF
list = f.readlines()# list of f.readline() results until EOF
f.truncate([size]) # truncate file at to at most size (default: current pos)
f.write(buf) # write at current position
f.writelines(list) # for line in list: f.write(line)
f.getvalue() # return whole file's contents as a string
Notes:
- Using a real file is often faster (but less convenient).
- There's also a much faster implementation in C, called cStringIO, but
it's not subclassable.
- fileno() is left unimplemented so that code which uses it triggers
an exception early.
- Seeking far beyond EOF and then writing will insert real null
bytes that occupy space in the buffer.
- There's a simple test set (see end of this file).
"""
try:
from errno import EINVAL
except ImportError:
EINVAL = 22
__all__ = ["StringIO"]
def _complain_ifclosed(closed):
if closed:
raise ValueError, "I/O operation on closed file"
class StringIO:
"""class StringIO([buffer])
When a StringIO object is created, it can be initialized to an existing
string by passing the string to the constructor. If no string is given,
the StringIO will start empty.
The StringIO object can accept either Unicode or 8-bit strings, but
mixing the two may take some care. If both are used, 8-bit strings that
cannot be interpreted as 7-bit ASCII (that use the 8th bit) will cause
a UnicodeError to be raised when getvalue() is called.
"""
def __init__(self, buf = ''):
# Force self.buf to be a string or unicode
if not isinstance(buf, basestring):
buf = str(buf)
self.buf = buf
self.len = len(buf)
self.buflist = []
self.pos = 0
self.closed = False
self.softspace = 0
def __iter__(self):
return self
def next(self):
"""A file object is its own iterator, for example iter(f) returns f
(unless f is closed). When a file is used as an iterator, typically
in a for loop (for example, for line in f: print line), the next()
method is called repeatedly. This method returns the next input line,
or raises StopIteration when EOF is hit.
"""
_complain_ifclosed(self.closed)
r = self.readline()
if not r:
raise StopIteration
return r
def close(self):
"""Free the memory buffer.
"""
if not self.closed:
self.closed = True
del self.buf, self.pos
def isatty(self):
"""Returns False because StringIO objects are not connected to a
tty-like device.
"""
_complain_ifclosed(self.closed)
return False
def seek(self, pos, mode = 0):
"""Set the file's current position.
The mode argument is optional and defaults to 0 (absolute file
positioning); other values are 1 (seek relative to the current
position) and 2 (seek relative to the file's end).
There is no return value.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
if mode == 1:
pos += self.pos
elif mode == 2:
pos += self.len
self.pos = max(0, pos)
def tell(self):
"""Return the file's current position."""
_complain_ifclosed(self.closed)
return self.pos
def read(self, n = -1):
"""Read at most size bytes from the file
(less if the read hits EOF before obtaining size bytes).
If the size argument is negative or omitted, read all data until EOF
is reached. The bytes are returned as a string object. An empty
string is returned when EOF is encountered immediately.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
if n is None or n < 0:
newpos = self.len
else:
newpos = min(self.pos+n, self.len)
r = self.buf[self.pos:newpos]
self.pos = newpos
return r
def readline(self, length=None):
r"""Read one entire line from the file.
A trailing newline character is kept in the string (but may be absent
when a file ends with an incomplete line). If the size argument is
present and non-negative, it is a maximum byte count (including the
trailing newline) and an incomplete line may be returned.
An empty string is returned only when EOF is encountered immediately.
Note: Unlike stdio's fgets(), the returned string contains null
characters ('\0') if they occurred in the input.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
i = self.buf.find('\n', self.pos)
if i < 0:
newpos = self.len
else:
newpos = i+1
if length is not None and length >= 0:
if self.pos + length < newpos:
newpos = self.pos + length
r = self.buf[self.pos:newpos]
self.pos = newpos
return r
def readlines(self, sizehint = 0):
"""Read until EOF using readline() and return a list containing the
lines thus read.
If the optional sizehint argument is present, instead of reading up
to EOF, whole lines totalling approximately sizehint bytes (or more
to accommodate a final whole line).
"""
total = 0
lines = []
line = self.readline()
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline()
return lines
def truncate(self, size=None):
"""Truncate the file's size.
If the optional size argument is present, the file is truncated to
(at most) that size. The size defaults to the current position.
The current file position is not changed unless the position
is beyond the new file size.
If the specified size exceeds the file's current size, the
file remains unchanged.
"""
_complain_ifclosed(self.closed)
if size is None:
size = self.pos
elif size < 0:
raise IOError(EINVAL, "Negative size not allowed")
elif size < self.pos:
self.pos = size
self.buf = self.getvalue()[:size]
self.len = size
def write(self, s):
"""Write a string to the file.
There is no return value.
"""
_complain_ifclosed(self.closed)
if not s: return
# Force s to be a string or unicode
if not isinstance(s, basestring):
s = str(s)
spos = self.pos
slen = self.len
if spos == slen:
self.buflist.append(s)
self.len = self.pos = spos + len(s)
return
if spos > slen:
self.buflist.append('\0'*(spos - slen))
slen = spos
newpos = spos + len(s)
if spos < slen:
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = [self.buf[:spos], s, self.buf[newpos:]]
self.buf = ''
if newpos > slen:
slen = newpos
else:
self.buflist.append(s)
slen = newpos
self.len = slen
self.pos = newpos
def writelines(self, iterable):
"""Write a sequence of strings to the file. The sequence can be any
iterable object producing strings, typically a list of strings. There
is no return value.
(The name is intended to match readlines(); writelines() does not add
line separators.)
"""
write = self.write
for line in iterable:
write(line)
def flush(self):
"""Flush the internal buffer
"""
_complain_ifclosed(self.closed)
def getvalue(self):
"""
Retrieve the entire contents of the "file" at any time before
the StringIO object's close() method is called.
The StringIO object can accept either Unicode or 8-bit strings,
but mixing the two may take some care. If both are used, 8-bit
strings that cannot be interpreted as 7-bit ASCII (that use the
8th bit) will cause a UnicodeError to be raised when getvalue()
is called.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
return self.buf
# A little test suite
def test():
import sys
if sys.argv[1:]:
file = sys.argv[1]
else:
file = '/etc/passwd'
lines = open(file, 'r').readlines()
text = open(file, 'r').read()
f = StringIO()
for line in lines[:-2]:
f.write(line)
f.writelines(lines[-2:])
if f.getvalue() != text:
raise RuntimeError, 'write failed'
length = f.tell()
print 'File length =', length
f.seek(len(lines[0]))
f.write(lines[1])
f.seek(0)
print 'First line =', repr(f.readline())
print 'Position =', f.tell()
line = f.readline()
print 'Second line =', repr(line)
f.seek(-len(line), 1)
line2 = f.read(len(line))
if line != line2:
raise RuntimeError, 'bad result after seek back'
f.seek(len(line2), 1)
list = f.readlines()
line = list[-1]
f.seek(f.tell() - len(line))
line2 = f.read()
if line != line2:
raise RuntimeError, 'bad result after seek back from EOF'
print 'Read', len(list), 'more lines'
print 'File length =', f.tell()
if f.tell() != length:
raise RuntimeError, 'bad length'
f.truncate(length/2)
f.seek(0, 2)
print 'Truncated length =', f.tell()
if f.tell() != length/2:
raise RuntimeError, 'truncate did not adjust length'
f.close()
if __name__ == '__main__':
test()
|
PrefPy/opra
|
refs/heads/master
|
compsocsite/polls/migrations/0042_auto_20160710_0930.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-10 08:30
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('polls', '0041_combination'),
]
operations = [
migrations.AlterField(
model_name='combination',
name='response',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='polls.Response'),
),
]
|
CERNDocumentServer/invenio
|
refs/heads/prod
|
modules/websubmit/lib/functions/Is_Original_Submitter.py
|
3
|
# This file is part of Invenio.
# Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
##
## Name: Is_Original_Submitter
## Description: function Is_Original_Submitter
## This function compares the email of the current logged
## user with the original submitter of the document, then
## check whether the user has special rights.
## Author: T.Baron
##
## PARAMETERS: -
## OUTPUT: HTML
##
import re
import os
from invenio.access_control_engine import acc_authorize_action
from invenio.websubmit_config import InvenioWebSubmitFunctionStop
from invenio.websubmit_functions.Retrieve_Data import Get_Field
from invenio.websubmit_functions.Shared_Functions import write_file
def Is_Original_Submitter(parameters, curdir, form, user_info=None):
"""
This function compares the current logged in user email with the
email of the original submitter of the record. If it is the same
(or if the current user has superuser rights), we go on. If it
differs, an error message is issued.
"""
global uid_email,sysno,uid
doctype = form['doctype']
act = form['act']
email = Get_Field("8560_f",sysno)
email = re.sub("[\n\r ]+","",email)
uid_email = re.sub("[\n\r ]+","",uid_email)
(auth_code, auth_message) = acc_authorize_action(user_info, "submit",verbose=0,doctype=doctype, act=act)
if re.search(uid_email,email,re.IGNORECASE) is None and auth_code != 0:
raise InvenioWebSubmitFunctionStop("""
<SCRIPT>
document.forms[0].action="/submit";
document.forms[0].curpage.value = 1;
document.forms[0].step.value = 0;
user_must_confirm_before_leaving_page = false;
alert('Only the submitter of this document has the right to do this action. \\nYour login (%s) is different from the one of the submitter (%s).');
document.forms[0].submit();
</SCRIPT>""" % (uid_email,email))
elif re.search(uid_email,email, re.IGNORECASE) is None and \
auth_code == 0:
if not os.path.exists(os.path.join(curdir, 'is_original_submitter_warning')):
write_file(os.path.join(curdir, 'is_original_submitter_warning'), '')
return ("""
<SCRIPT>
alert('Only the submitter of this document has the right to do this action. \\nYour login (%s) is different from the one of the submitter (%s).\\n\\nAnyway, as you have a special authorization for this type of documents,\\nyou are allowed to proceed! Watch out your actions!');
</SCRIPT>""" % (uid_email,email))
return ""
|
Maspear/odoo
|
refs/heads/8.0
|
addons/report/models/report_paperformat.py
|
311
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from functools import partial
from openerp import SUPERUSER_ID
from openerp.osv import osv, fields
class report_paperformat(osv.Model):
_name = "report.paperformat"
_description = "Allows customization of a report."
_columns = {'name': fields.char('Name', required=True),
'default': fields.boolean('Default paper format ?'),
'format': fields.selection([('A0', 'A0 5 841 x 1189 mm'),
('A1', 'A1 6 594 x 841 mm'),
('A2', 'A2 7 420 x 594 mm'),
('A3', 'A3 8 297 x 420 mm'),
('A4', 'A4 0 210 x 297 mm, 8.26 x 11.69 inches'),
('A5', 'A5 9 148 x 210 mm'),
('A6', 'A6 10 105 x 148 mm'),
('A7', 'A7 11 74 x 105 mm'),
('A8', 'A8 12 52 x 74 mm'),
('A9', 'A9 13 37 x 52 mm'),
('B0', 'B0 14 1000 x 1414 mm'),
('B1', 'B1 15 707 x 1000 mm'),
('B2', 'B2 17 500 x 707 mm'),
('B3', 'B3 18 353 x 500 mm'),
('B4', 'B4 19 250 x 353 mm'),
('B5', 'B5 1 176 x 250 mm, 6.93 x 9.84 inches'),
('B6', 'B6 20 125 x 176 mm'),
('B7', 'B7 21 88 x 125 mm'),
('B8', 'B8 22 62 x 88 mm'),
('B9', 'B9 23 33 x 62 mm'),
('B10', ':B10 16 31 x 44 mm'),
('C5E', 'C5E 24 163 x 229 mm'),
('Comm10E', 'Comm10E 25 105 x 241 mm, U.S. '
'Common 10 Envelope'),
('DLE', 'DLE 26 110 x 220 mm'),
('Executive', 'Executive 4 7.5 x 10 inches, '
'190.5 x 254 mm'),
('Folio', 'Folio 27 210 x 330 mm'),
('Ledger', 'Ledger 28 431.8 x 279.4 mm'),
('Legal', 'Legal 3 8.5 x 14 inches, '
'215.9 x 355.6 mm'),
('Letter', 'Letter 2 8.5 x 11 inches, '
'215.9 x 279.4 mm'),
('Tabloid', 'Tabloid 29 279.4 x 431.8 mm'),
('custom', 'Custom')],
'Paper size',
help="Select Proper Paper size"),
'margin_top': fields.integer('Top Margin (mm)'),
'margin_bottom': fields.integer('Bottom Margin (mm)'),
'margin_left': fields.integer('Left Margin (mm)'),
'margin_right': fields.integer('Right Margin (mm)'),
'page_height': fields.integer('Page height (mm)'),
'page_width': fields.integer('Page width (mm)'),
'orientation': fields.selection([('Landscape', 'Landscape'),
('Portrait', 'Portrait')],
'Orientation'),
'header_line': fields.boolean('Display a header line'),
'header_spacing': fields.integer('Header spacing'),
'dpi': fields.integer('Output DPI', required=True),
'report_ids': fields.one2many('ir.actions.report.xml',
'paperformat_id',
'Associated reports',
help="Explicitly associated reports")
}
def _check_format_or_page(self, cr, uid, ids, context=None):
for paperformat in self.browse(cr, uid, ids, context=context):
if paperformat.format != 'custom' and (paperformat.page_width or paperformat.page_height):
return False
return True
_constraints = [
(_check_format_or_page, 'Error ! You cannot select a format AND speficic '
'page width/height.', ['format']),
]
_defaults = {
'format': 'A4',
'margin_top': 40,
'margin_bottom': 20,
'margin_left': 7,
'margin_right': 7,
'page_height': False,
'page_width': False,
'orientation': 'Landscape',
'header_line': False,
'header_spacing': 35,
'dpi': 90,
}
class res_company(osv.Model):
_inherit = 'res.company'
_columns = {'paperformat_id': fields.many2one('report.paperformat', 'Paper format')}
def init(self, cr):
# set a default paperformat based on rml one.
ref = partial(self.pool['ir.model.data'].xmlid_to_res_id, cr, SUPERUSER_ID)
ids = self.search(cr, SUPERUSER_ID, [('paperformat_id', '=', False)])
for company in self.browse(cr, SUPERUSER_ID, ids):
paperformat_id = {
'a4': ref('report.paperformat_euro'),
'us_letter': ref('report.paperformat_us'),
}.get(company.rml_paper_format) or ref('report.paperformat_euro')
if paperformat_id:
company.write({'paperformat_id': paperformat_id})
sup = super(res_company, self)
if hasattr(sup, 'init'):
sup.init(cr)
class ir_actions_report(osv.Model):
_inherit = 'ir.actions.report.xml'
def associated_view(self, cr, uid, ids, context):
"""Used in the ir.actions.report.xml form view in order to search naively after the view(s)
used in the rendering.
"""
try:
report_name = self.browse(cr, uid, ids[0], context).report_name
act_window_obj = self.pool.get('ir.actions.act_window')
view_action = act_window_obj.for_xml_id(cr, uid, 'base', 'action_ui_view', context=context)
view_action['domain'] = [('name', 'ilike', report_name.split('.')[1]), ('type', '=', 'qweb')]
return view_action
except:
return False
_columns = {'paperformat_id': fields.many2one('report.paperformat', 'Paper format')}
|
lidavidm/mathics-heroku
|
refs/heads/master
|
mathics/core/characters.py
|
3
|
# -*- coding: utf8 -*-
u"""
Mathics: a general-purpose computer algebra system
Copyright (C) 2011 Jan Pöschko
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation either version 3 of the License or
(at your option) any later version.
This program is distributed in the hope that it will be useful
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not see <http://www.gnu.org/licenses/>.
"""
# Character ranges of letters
letters = u'a-zA-Z\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u0103\u0106\u0107\
\u010c-\u010f\u0112-\u0115\u011a-\u012d\u0131\u0141\u0142\u0147\u0148\
\u0150-\u0153\u0158-\u0161\u0164\u0165\u016e-\u0171\u017d\u017e\
\u0391-\u03a1\u03a3-\u03a9\u03b1-\u03c9\u03d1\u03d2\u03d5\u03d6\
\u03da-\u03e1\u03f0\u03f1\u03f5\u210a-\u210c\u2110-\u2113\u211b\u211c\
\u2128\u212c\u212d\u212f-\u2131\u2133-\u2138\uf6b2-\uf6b5\uf6b7\uf6b9\
\uf6ba-\uf6bc\uf6be\uf6bf\uf6c1-\uf700\uf730\uf731\uf770\uf772\uf773\
\uf776\uf779\uf77a\uf77d-\uf780\uf782-\uf78b\uf78d-\uf78f\uf790\
\uf793-\uf79a\uf79c-\uf7a2\uf7a4-\uf7bd\uf800-\uf833\ufb01\ufb02'
# Character ranges of letterlikes
letterlikes = u'\u0024\u00A1\u00A2\u00A3\u00A5\u00A7\u00A9\u00AB\u00AE\
\u00B0\u00B5\u00B6\u00B8\u00BB\u00BF\u02C7\u02D8\u2013\u2014\u2020\u2021\
\u2022\u2026\u2032\u2033\u2035\u2036\u2060\u20AC\u210F\u2122\u2127\u212B\
\u21B5\u2205\u221E\u221F\u2220\u2221\u2222\u22EE\u22EF\u22F0\u22F1\u2300\
\u2318\u231A\u23B4\u23B5\u2500\u2502\u25A0\u25A1\u25AA\u25AE\u25AF\u25B2\
\u25B3\u25BC\u25BD\u25C0\u25C6\u25C7\u25CB\u25CF\u25E6\u25FB\u25FC\u2605\
\u2639\u263A\u2660\u2661\u2662\u2663\u266D\u266E\u266F\u2736\uF3A0\uF3B8\
\uF3B9\uF527\uF528\uF720\uF721\uF722\uF723\uF725\uF749\uF74A\uF74D\uF74E\
\uF74F\uF750\uF751\uF752\uF753\uF754\uF755\uF756\uF757\uF760\uF763\uF766\
\uF768\uF769\uF76A\uF76B\uF76C\uF7D4\uF800\uF801\uF802\uF803\uF804\uF805\
\uF806\uF807\uF808\uF809\uF80A\uF80B\uF80C\uF80D\uF80E\uF80F\uF810\uF811\
\uF812\uF813\uF814\uF815\uF816\uF817\uF818\uF819\uF81A\uF81B\uF81C\uF81D\
\uF81E\uF81F\uF820\uF821\uF822\uF823\uF824\uF825\uF826\uF827\uF828\uF829\
\uF82A\uF82B\uF82C\uF82D\uF82E\uF82F\uF830\uF831\uF832\uF833\uFE35\uFE36\
\uFE37\uFE38'
# All supported longname characters
named_characters = {
'AAcute': u'\u00E1',
'ABar': u'\u0101',
'ACup': u'\u0103',
'ADoubleDot': u'\u00E4',
'AE': u'\u00E6',
'AGrave': u'\u00E0',
'AHat': u'\u00E2',
'Aleph': u'\u2135',
'AliasDelimiter': u'\uF764',
'AliasIndicator': u'\uF768',
'AlignmentMarker': u'\uF760',
'Alpha': u'\u03B1',
'AltKey': u'\uF7D1',
'And': u'\u2227',
'Angle': u'\u2220',
'Angstrom': u'\u212B',
'ARing': u'\u00E5',
'AscendingEllipsis': u'\u22F0',
'ATilde': u'\u00E3',
'AutoLeftMatch': u'\uF3A8',
'AutoOperand': u'\uF3AE',
'AutoPlaceholder': u'\uF3A4',
'AutoRightMatch': u'\uF3A9',
'AutoSpace': u'\uF3AD',
'Backslash': u'\u2216',
'BeamedEighthNote': u'\u266B',
'BeamedSixteenthNote': u'\u266C',
'Because': u'\u2235',
'Bet': u'\u2136',
'Beta': u'\u03B2',
'BlackBishop': u'\u265D',
'BlackKing': u'\u265A',
'BlackKnight': u'\u265E',
'BlackPawn': u'\u265F',
'BlackQueen': u'\u265B',
'BlackRook': u'\u265C',
'Breve': u'\u02D8',
'Bullet': u'\u2022',
'CAcute': u'\u0107',
'CapitalAAcute': u'\u00C1',
'CapitalABar': u'\u0100',
'CapitalACup': u'\u0102',
'CapitalADoubleDot': u'\u00C4',
'CapitalAE': u'\u00C6',
'CapitalAGrave': u'\u00C0',
'CapitalAHat': u'\u00C2',
'CapitalAlpha': u'\u0391',
'CapitalARing': u'\u00C5',
'CapitalATilde': u'\u00C3',
'CapitalBeta': u'\u0392',
'CapitalCAcute': u'\u0106',
'CapitalCCedilla': u'\u00C7',
'CapitalCHacek': u'\u010C',
'CapitalChi': u'\u03A7',
'CapitalDelta': u'\u0394',
'CapitalDHacek': u'\u010E',
'CapitalDifferentialD': u'\uF74B',
'CapitalDigamma': u'\u03DC',
'CapitalEAcute': u'\u00C9',
'CapitalEBar': u'\u0112',
'CapitalECup': u'\u0114',
'CapitalEDoubleDot': u'\u00CB',
'CapitalEGrave': u'\u00C8',
'CapitalEHacek': u'\u011A',
'CapitalEHat': u'\u00CA',
'CapitalEpsilon': u'\u0395',
'CapitalEta': u'\u0397',
'CapitalEth': u'\u00D0',
'CapitalGamma': u'\u0393',
'CapitalIAcute': u'\u00CD',
'CapitalICup': u'\u012C',
'CapitalIDoubleDot': u'\u00CF',
'CapitalIGrave': u'\u00CC',
'CapitalIHat': u'\u00CE',
'CapitalIota': u'\u0399',
'CapitalKappa': u'\u039A',
'CapitalKoppa': u'\u03DE',
'CapitalLambda': u'\u039B',
'CapitalLSlash': u'\u0141',
'CapitalMu': u'\u039C',
'CapitalNHacek': u'\u0147',
'CapitalNTilde': u'\u00D1',
'CapitalNu': u'\u039D',
'CapitalOAcute': u'\u00D3',
'CapitalODoubleAcute': u'\u0150',
'CapitalODoubleDot': u'\u00D6',
'CapitalOE': u'\u0152',
'CapitalOGrave': u'\u00D2',
'CapitalOHat': u'\u00D4',
'CapitalOmega': u'\u03A9',
'CapitalOmicron': u'\u039F',
'CapitalOSlash': u'\u00D8',
'CapitalOTilde': u'\u00D5',
'CapitalPhi': u'\u03A6',
'CapitalPi': u'\u03A0',
'CapitalPsi': u'\u03A8',
'CapitalRHacek': u'\u0158',
'CapitalRho': u'\u03A1',
'CapitalSampi': u'\u03E0',
'CapitalSHacek': u'\u0160',
'CapitalSigma': u'\u03A3',
'CapitalStigma': u'\u03DA',
'CapitalTau': u'\u03A4',
'CapitalTHacek': u'\u0164',
'CapitalTheta': u'\u0398',
'CapitalThorn': u'\u00DE',
'CapitalUAcute': u'\u00DA',
'CapitalUDoubleAcute': u'\u0170',
'CapitalUDoubleDot': u'\u00DC',
'CapitalUGrave': u'\u00D9',
'CapitalUHat': u'\u00DB',
'CapitalUpsilon': u'\u03A5',
'CapitalURing': u'\u016E',
'CapitalXi': u'\u039E',
'CapitalYAcute': u'\u00DD',
'CapitalZeta': u'\u0396',
'CapitalZHacek': u'\u017D',
'Cap': u'\u2322',
'CCedilla': u'\u00E7',
'Cedilla': u'\u00B8',
'CenterDot': u'\u00B7',
'CenterEllipsis': u'\u22EF',
'Cent': u'\u00A2',
'CHacek': u'\u010D',
'Checkmark': u'\u2713',
'Chi': u'\u03C7',
'CircleDot': u'\u2299',
'CircleMinus': u'\u2296',
'CirclePlus': u'\u2295',
'CircleTimes': u'\u2297',
'ClockwiseContourIntegral': u'\u2232',
'CloseCurlyDoubleQuote': u'\u201D',
'CloseCurlyQuote': u'\u2019',
'CloverLeaf': u'\u2318',
'ClubSuit': u'\u2663',
'Colon': u'\u2236',
'CommandKey': u'\uF76A',
'Congruent': u'\u2261',
'Conjugate': u'\uF3C8',
'ConjugateTranspose': u'\uF3C9',
'ConstantC': u'\uF7DA',
'Continuation': u'\uF3B1',
'ContourIntegral': u'\u222E',
'ControlKey': u'\uF763',
'Coproduct': u'\u2210',
'Copyright': u'\u00A9',
'CounterClockwiseContourIntegral': u'\u2233',
'Cross': u'\uF4A0',
'CupCap': u'\u224D',
'Cup': u'\u2323',
'CurlyCapitalUpsilon': u'\u03D2',
'CurlyEpsilon': u'\u03B5',
'CurlyKappa': u'\u03F0',
'CurlyPhi': u'\u03C6',
'CurlyPi': u'\u03D6',
'CurlyRho': u'\u03F1',
'CurlyTheta': u'\u03D1',
'Currency': u'\u00A4',
'Dagger': u'\u2020',
'Dalet': u'\u2138',
'Dash': u'\u2013',
'Degree': u'\u00B0',
'DeleteKey': u'\uF7D0',
'Del': u'\u2207',
'Delta': u'\u03B4',
'DescendingEllipsis': u'\u22F1',
'DHacek': u'\u010F',
'Diameter': u'\u2300',
'Diamond': u'\u22C4',
'DiamondSuit': u'\u2662',
'DifferenceDelta': u'\u2206',
'DifferentialD': u'\uF74C',
'Digamma': u'\u03DD',
'DiscreteRatio': u'\uF4A4',
'DiscreteShift': u'\uF4A3',
'DiscretionaryHyphen': u'\u00AD',
'DiscretionaryLineSeparator': u'\uF76E',
'DiscretionaryParagraphSeparator': u'\uF76F',
'Divide': u'\u00F7',
'DotEqual': u'\u2250',
'DotlessI': u'\u0131',
'DotlessJ': u'\uF700',
'DottedSquare': u'\uF751',
'DoubleContourIntegral': u'\u222F',
'DoubleDagger': u'\u2021',
'DoubledGamma': u'\uF74A',
'DoubleDownArrow': u'\u21D3',
'DoubledPi': u'\uF749',
'DoubleLeftArrow': u'\u21D0',
'DoubleLeftRightArrow': u'\u21D4',
'DoubleLeftTee': u'\u2AE4',
'DoubleLongLeftArrow': u'\u27F8',
'DoubleLongLeftRightArrow': u'\u27FA',
'DoubleLongRightArrow': u'\u27F9',
'DoublePrime': u'\u2033',
'DoubleRightArrow': u'\u21D2',
'DoubleRightTee': u'\u22A8',
'DoubleStruckA': u'\uF6E6',
'DoubleStruckB': u'\uF6E7',
'DoubleStruckC': u'\uF6E8',
'DoubleStruckCapitalA': u'\uF7A4',
'DoubleStruckCapitalB': u'\uF7A5',
'DoubleStruckCapitalC': u'\uF7A6',
'DoubleStruckCapitalD': u'\uF7A7',
'DoubleStruckCapitalE': u'\uF7A8',
'DoubleStruckCapitalF': u'\uF7A9',
'DoubleStruckCapitalG': u'\uF7AA',
'DoubleStruckCapitalH': u'\uF7AB',
'DoubleStruckCapitalI': u'\uF7AC',
'DoubleStruckCapitalJ': u'\uF7AD',
'DoubleStruckCapitalK': u'\uF7AE',
'DoubleStruckCapitalL': u'\uF7AF',
'DoubleStruckCapitalM': u'\uF7B0',
'DoubleStruckCapitalN': u'\uF7B1',
'DoubleStruckCapitalO': u'\uF7B2',
'DoubleStruckCapitalP': u'\uF7B3',
'DoubleStruckCapitalQ': u'\uF7B4',
'DoubleStruckCapitalR': u'\uF7B5',
'DoubleStruckCapitalS': u'\uF7B6',
'DoubleStruckCapitalT': u'\uF7B7',
'DoubleStruckCapitalU': u'\uF7B8',
'DoubleStruckCapitalV': u'\uF7B9',
'DoubleStruckCapitalW': u'\uF7BA',
'DoubleStruckCapitalX': u'\uF7BB',
'DoubleStruckCapitalY': u'\uF7BC',
'DoubleStruckCapitalZ': u'\uF7BD',
'DoubleStruckD': u'\uF6E9',
'DoubleStruckE': u'\uF6EA',
'DoubleStruckEight': u'\uF7E3',
'DoubleStruckF': u'\uF6EB',
'DoubleStruckFive': u'\uF7E0',
'DoubleStruckFour': u'\uF7DF',
'DoubleStruckG': u'\uF6EC',
'DoubleStruckH': u'\uF6ED',
'DoubleStruckI': u'\uF6EE',
'DoubleStruckJ': u'\uF6EF',
'DoubleStruckK': u'\uF6F0',
'DoubleStruckL': u'\uF6F1',
'DoubleStruckM': u'\uF6F2',
'DoubleStruckN': u'\uF6F3',
'DoubleStruckNine': u'\uF7E4',
'DoubleStruckO': u'\uF6F4',
'DoubleStruckOne': u'\uF7DC',
'DoubleStruckP': u'\uF6F5',
'DoubleStruckQ': u'\uF6F6',
'DoubleStruckR': u'\uF6F7',
'DoubleStruckS': u'\uF6F8',
'DoubleStruckSeven': u'\uF7E2',
'DoubleStruckSix': u'\uF7E1',
'DoubleStruckT': u'\uF6F9',
'DoubleStruckThree': u'\uF7DE',
'DoubleStruckTwo': u'\uF7DD',
'DoubleStruckU': u'\uF6FA',
'DoubleStruckV': u'\uF6FB',
'DoubleStruckW': u'\uF6FC',
'DoubleStruckX': u'\uF6FD',
'DoubleStruckY': u'\uF6FE',
'DoubleStruckZ': u'\uF6FF',
'DoubleStruckZero': u'\uF7DB',
'DoubleUpArrow': u'\u21D1',
'DoubleUpDownArrow': u'\u21D5',
'DoubleVerticalBar': u'\u2225',
'DownArrowBar': u'\u2913',
'DownArrow': u'\u2193',
'DownArrowUpArrow': u'\u21F5',
'DownBreve': u'\uF755',
'DownExclamation': u'\u00A1',
'DownLeftRightVector': u'\u2950',
'DownLeftTeeVector': u'\u295E',
'DownLeftVector': u'\u21BD',
'DownLeftVectorBar': u'\u2956',
'DownPointer': u'\u25BE',
'DownQuestion': u'\u00BF',
'DownRightTeeVector': u'\u295F',
'DownRightVector': u'\u21C1',
'DownRightVectorBar': u'\u2957',
'DownTeeArrow': u'\u21A7',
'DownTee': u'\u22A4',
'EAcute': u'\u00E9',
'Earth': u'\u2641',
'EBar': u'\u0113',
'ECup': u'\u0115',
'EDoubleDot': u'\u00EB',
'EGrave': u'\u00E8',
'EHacek': u'\u011B',
'EHat': u'\u00EA',
'EighthNote': u'\u266A',
'Element': u'\u2208',
'Ellipsis': u'\u2026',
'EmptyCircle': u'\u25CB',
'EmptyDiamond': u'\u25C7',
'EmptyDownTriangle': u'\u25BD',
'EmptyRectangle': u'\u25AF',
'EmptySet': u'\u2205',
'EmptySmallCircle': u'\u25E6',
'EmptySmallSquare': u'\u25FB',
'EmptySquare': u'\u25A1',
'EmptyUpTriangle': u'\u25B3',
'EmptyVerySmallSquare': u'\u25AB',
'EnterKey': u'\uF7D4',
'EntityEnd': u'\uF3B9',
'EntityStart': u'\uF3B8',
'Epsilon': u'\u03F5',
'Equal': u'\uF431',
'EqualTilde': u'\u2242',
'Equilibrium': u'\u21CC',
'Equivalent': u'\u29E6',
'ErrorIndicator': u'\uF767',
'EscapeKey': u'\uF769',
'Eta': u'\u03B7',
'Eth': u'\u00F0',
'Euro': u'\u20AC',
'Exists': u'\u2203',
'ExponentialE': u'\uF74D',
'FiLigature': u'\uFB01',
'FilledCircle': u'\u25CF',
'FilledDiamond': u'\u25C6',
'FilledDownTriangle': u'\u25BC',
'FilledLeftTriangle': u'\u25C0',
'FilledRectangle': u'\u25AE',
'FilledRightTriangle': u'\u25B6',
'FilledSmallCircle': u'\uF750',
'FilledSmallSquare': u'\u25FC',
'FilledSquare': u'\u25A0',
'FilledUpTriangle': u'\u25B2',
'FilledVerySmallSquare': u'\u25AA',
'FinalSigma': u'\u03C2',
'FirstPage': u'\uF7FA',
'FivePointedStar': u'\u2605',
'Flat': u'\u266D',
'FlLigature': u'\uFB02',
'Florin': u'\u0192',
'ForAll': u'\u2200',
'FormalA': u'\uF800',
'FormalB': u'\uF801',
'FormalC': u'\uF802',
'FormalCapitalA': u'\uF81A',
'FormalCapitalB': u'\uF81B',
'FormalCapitalC': u'\uF81C',
'FormalCapitalD': u'\uF81D',
'FormalCapitalE': u'\uF81E',
'FormalCapitalF': u'\uF81F',
'FormalCapitalG': u'\uF820',
'FormalCapitalH': u'\uF821',
'FormalCapitalI': u'\uF822',
'FormalCapitalJ': u'\uF823',
'FormalCapitalK': u'\uF824',
'FormalCapitalL': u'\uF825',
'FormalCapitalM': u'\uF826',
'FormalCapitalN': u'\uF827',
'FormalCapitalO': u'\uF828',
'FormalCapitalP': u'\uF829',
'FormalCapitalQ': u'\uF82A',
'FormalCapitalR': u'\uF82B',
'FormalCapitalS': u'\uF82C',
'FormalCapitalT': u'\uF82D',
'FormalCapitalU': u'\uF82E',
'FormalCapitalV': u'\uF82F',
'FormalCapitalW': u'\uF830',
'FormalCapitalX': u'\uF831',
'FormalCapitalY': u'\uF832',
'FormalCapitalZ': u'\uF833',
'FormalD': u'\uF803',
'FormalE': u'\uF804',
'FormalF': u'\uF805',
'FormalG': u'\uF806',
'FormalH': u'\uF807',
'FormalI': u'\uF808',
'FormalJ': u'\uF809',
'FormalK': u'\uF80A',
'FormalL': u'\uF80B',
'FormalM': u'\uF80C',
'FormalN': u'\uF80D',
'FormalO': u'\uF80E',
'FormalP': u'\uF80F',
'FormalQ': u'\uF810',
'FormalR': u'\uF811',
'FormalS': u'\uF812',
'FormalT': u'\uF813',
'FormalU': u'\uF814',
'FormalV': u'\uF815',
'FormalW': u'\uF816',
'FormalX': u'\uF817',
'FormalY': u'\uF818',
'FormalZ': u'\uF819',
'FreakedSmiley': u'\uF721',
'Function': u'\uF4A1',
'Gamma': u'\u03B3',
'Gimel': u'\u2137',
'GothicA': u'\uF6CC',
'GothicB': u'\uF6CD',
'GothicC': u'\uF6CE',
'GothicCapitalA': u'\uF78A',
'GothicCapitalB': u'\uF78B',
'GothicCapitalC': u'\u212D',
'GothicCapitalD': u'\uF78D',
'GothicCapitalE': u'\uF78E',
'GothicCapitalF': u'\uF78F',
'GothicCapitalG': u'\uF790',
'GothicCapitalH': u'\u210C',
'GothicCapitalI': u'\u2111',
'GothicCapitalJ': u'\uF793',
'GothicCapitalK': u'\uF794',
'GothicCapitalL': u'\uF795',
'GothicCapitalM': u'\uF796',
'GothicCapitalN': u'\uF797',
'GothicCapitalO': u'\uF798',
'GothicCapitalP': u'\uF799',
'GothicCapitalQ': u'\uF79A',
'GothicCapitalR': u'\u211C',
'GothicCapitalS': u'\uF79C',
'GothicCapitalT': u'\uF79D',
'GothicCapitalU': u'\uF79E',
'GothicCapitalV': u'\uF79F',
'GothicCapitalW': u'\uF7A0',
'GothicCapitalX': u'\uF7A1',
'GothicCapitalY': u'\uF7A2',
'GothicCapitalZ': u'\u2128',
'GothicD': u'\uF6CF',
'GothicE': u'\uF6D0',
'GothicEight': u'\uF7ED',
'GothicF': u'\uF6D1',
'GothicFive': u'\uF7EA',
'GothicFour': u'\uF7E9',
'GothicG': u'\uF6D2',
'GothicH': u'\uF6D3',
'GothicI': u'\uF6D4',
'GothicJ': u'\uF6D5',
'GothicK': u'\uF6D6',
'GothicL': u'\uF6D7',
'GothicM': u'\uF6D8',
'GothicN': u'\uF6D9',
'GothicNine': u'\uF7EF',
'GothicO': u'\uF6DA',
'GothicOne': u'\uF7E6',
'GothicP': u'\uF6DB',
'GothicQ': u'\uF6DC',
'GothicR': u'\uF6DD',
'GothicS': u'\uF6DE',
'GothicSeven': u'\uF7EC',
'GothicSix': u'\uF7EB',
'GothicT': u'\uF6DF',
'GothicThree': u'\uF7E8',
'GothicTwo': u'\uF7E7',
'GothicU': u'\uF6E0',
'GothicV': u'\uF6E1',
'GothicW': u'\uF6E2',
'GothicX': u'\uF6E3',
'GothicY': u'\uF6E4',
'GothicZ': u'\uF6E5',
'GothicZero': u'\uF7E5',
'GrayCircle': u'\uF753',
'GraySquare': u'\uF752',
'GreaterEqualLess': u'\u22DB',
'GreaterEqual': u'\u2265',
'GreaterFullEqual': u'\u2267',
'GreaterGreater': u'\u226B',
'GreaterLess': u'\u2277',
'GreaterSlantEqual': u'\u2A7E',
'GreaterTilde': u'\u2273',
'Hacek': u'\u02C7',
'HappySmiley': u'\u263A',
'HBar': u'\u210F',
'HeartSuit': u'\u2661',
'HermitianConjugate': u'\uF3CE',
'HorizontalLine': u'\u2500',
'HumpDownHump': u'\u224E',
'HumpEqual': u'\u224F',
'Hyphen': u'\u2010',
'IAcute': u'\u00ED',
'ICup': u'\u012D',
'IDoubleDot': u'\u00EF',
'IGrave': u'\u00EC',
'IHat': u'\u00EE',
'ImaginaryI': u'\uF74E',
'ImaginaryJ': u'\uF74F',
'ImplicitPlus': u'\uF39E',
'Implies': u'\uF523',
'Infinity': u'\u221E',
'Integral': u'\u222B',
'Intersection': u'\u22C2',
'InvisibleApplication': u'\uF76D',
'InvisibleComma': u'\uF765',
'InvisiblePostfixScriptBase': u'\uF3B4',
'InvisiblePrefixScriptBase': u'\uF3B3',
'InvisibleSpace': u'\uF360',
'InvisibleTimes': u'\u2062',
'Iota': u'\u03B9',
'Jupiter': u'\u2643',
'Kappa': u'\u03BA',
'KernelIcon': u'\uF756',
'Koppa': u'\u03DF',
'Lambda': u'\u03BB',
'LastPage': u'\uF7FB',
'LeftAngleBracket': u'\u2329',
'LeftArrowBar': u'\u21E4',
'LeftArrow': u'\u2190',
'LeftArrowRightArrow': u'\u21C6',
'LeftBracketingBar': u'\uF603',
'LeftCeiling': u'\u2308',
'LeftDoubleBracket': u'\u301A',
'LeftDoubleBracketingBar': u'\uF605',
'LeftDownTeeVector': u'\u2961',
'LeftDownVectorBar': u'\u2959',
'LeftDownVector': u'\u21C3',
'LeftFloor': u'\u230A',
'LeftGuillemet': u'\u00AB',
'LeftModified': u'\uF76B',
'LeftPointer': u'\u25C2',
'LeftRightArrow': u'\u2194',
'LeftRightVector': u'\u294E',
'LeftSkeleton': u'\uF761',
'LeftTee': u'\u22A3',
'LeftTeeArrow': u'\u21A4',
'LeftTeeVector': u'\u295A',
'LeftTriangle': u'\u22B2',
'LeftTriangleBar': u'\u29CF',
'LeftTriangleEqual': u'\u22B4',
'LeftUpDownVector': u'\u2951',
'LeftUpTeeVector': u'\u2960',
'LeftUpVector': u'\u21BF',
'LeftUpVectorBar': u'\u2958',
'LeftVector': u'\u21BC',
'LeftVectorBar': u'\u2952',
'LessEqual': u'\u2264',
'LessEqualGreater': u'\u22DA',
'LessFullEqual': u'\u2266',
'LessGreater': u'\u2276',
'LessLess': u'\u226A',
'LessSlantEqual': u'\u2A7D',
'LessTilde': u'\u2272',
'LetterSpace': u'\uF754',
'LightBulb': u'\uF723',
'LongDash': u'\u2014',
'LongEqual': u'\uF7D9',
'LongLeftArrow': u'\u27F5',
'LongLeftRightArrow': u'\u27F7',
'LongRightArrow': u'\u27F6',
'LowerLeftArrow': u'\u2199',
'LowerRightArrow': u'\u2198',
'LSlash': u'\u0142',
'Mars': u'\u2642',
'MathematicaIcon': u'\uF757',
'MeasuredAngle': u'\u2221',
'MediumSpace': u'\u205F',
'Mercury': u'\u263F',
'Mho': u'\u2127',
'Micro': u'\u00B5',
'MinusPlus': u'\u2213',
'Mu': u'\u03BC',
'Nand': u'\u22BC',
'Natural': u'\u266E',
'NegativeMediumSpace': u'\uF383',
'NegativeThickSpace': u'\uF384',
'NegativeThinSpace': u'\uF382',
'NegativeVeryThinSpace': u'\uF380',
'Neptune': u'\u2646',
'NestedGreaterGreater': u'\u2AA2',
'NestedLessLess': u'\u2AA1',
'NeutralSmiley': u'\uF722',
'NHacek': u'\u0148',
'NoBreak': u'\u2060',
'NonBreakingSpace': u'\u00A0',
'Nor': u'\u22BD',
'NotCongruent': u'\u2262',
'NotCupCap': u'\u226D',
'NotDoubleVerticalBar': u'\u2226',
'NotElement': u'\u2209',
'NotEqual': u'\u2260',
'NotEqualTilde': u'\uF400',
'NotExists': u'\u2204',
'NotGreater': u'\u226F',
'NotGreaterEqual': u'\u2271',
'NotGreaterFullEqual': u'\u2269',
'NotGreaterGreater': u'\uF427',
'NotGreaterLess': u'\u2279',
'NotGreaterSlantEqual': u'\uF429',
'NotGreaterTilde': u'\u2275',
'NotHumpDownHump': u'\uF402',
'NotHumpEqual': u'\uF401',
'NotLeftTriangle': u'\u22EA',
'NotLeftTriangleBar': u'\uF412',
'NotLeftTriangleEqual': u'\u22EC',
'NotLessEqual': u'\u2270',
'NotLessFullEqual': u'\u2268',
'NotLessGreater': u'\u2278',
'NotLess': u'\u226E',
'NotLessLess': u'\uF422',
'NotLessSlantEqual': u'\uF424',
'NotLessTilde': u'\u2274',
'Not': u'\u00AC',
'NotNestedGreaterGreater': u'\uF428',
'NotNestedLessLess': u'\uF423',
'NotPrecedes': u'\u2280',
'NotPrecedesEqual': u'\uF42B',
'NotPrecedesSlantEqual': u'\u22E0',
'NotPrecedesTilde': u'\u22E8',
'NotReverseElement': u'\u220C',
'NotRightTriangle': u'\u22EB',
'NotRightTriangleBar': u'\uF413',
'NotRightTriangleEqual': u'\u22ED',
'NotSquareSubset': u'\uF42E',
'NotSquareSubsetEqual': u'\u22E2',
'NotSquareSuperset': u'\uF42F',
'NotSquareSupersetEqual': u'\u22E3',
'NotSubset': u'\u2284',
'NotSubsetEqual': u'\u2288',
'NotSucceeds': u'\u2281',
'NotSucceedsEqual': u'\uF42D',
'NotSucceedsSlantEqual': u'\u22E1',
'NotSucceedsTilde': u'\u22E9',
'NotSuperset': u'\u2285',
'NotSupersetEqual': u'\u2289',
'NotTilde': u'\u2241',
'NotTildeEqual': u'\u2244',
'NotTildeFullEqual': u'\u2247',
'NotTildeTilde': u'\u2249',
'NotVerticalBar': u'\u2224',
'NTilde': u'\u00F1',
'Nu': u'\u03BD',
'Null': u'\uF3A0',
'NumberSign': u'\uF724',
'OAcute': u'\u00F3',
'ODoubleAcute': u'\u0151',
'ODoubleDot': u'\u00F6',
'OE': u'\u0153',
'OGrave': u'\u00F2',
'OHat': u'\u00F4',
'Omega': u'\u03C9',
'Omicron': u'\u03BF',
'OpenCurlyDoubleQuote': u'\u201C',
'OpenCurlyQuote': u'\u2018',
'OptionKey': u'\uF7D2',
'Or': u'\u2228',
'OSlash': u'\u00F8',
'OTilde': u'\u00F5',
'OverBrace': u'\uFE37',
'OverBracket': u'\u23B4',
'OverParenthesis': u'\uFE35',
'Paragraph': u'\u00B6',
'PartialD': u'\u2202',
'Phi': u'\u03D5',
'Pi': u'\u03C0',
'Piecewise': u'\uF361',
'Placeholder': u'\uF528',
'PlusMinus': u'\u00B1',
'Pluto': u'\u2647',
'Precedes': u'\u227A',
'PrecedesEqual': u'\u2AAF',
'PrecedesSlantEqual': u'\u227C',
'PrecedesTilde': u'\u227E',
'Prime': u'\u2032',
'Product': u'\u220F',
'Proportion': u'\u2237',
'Proportional': u'\u221D',
'Psi': u'\u03C8',
'QuarterNote': u'\u2669',
'RawAmpersand': u'\u0026',
'RawAt': u'\u0040',
'RawBackquote': u'\u0060',
'RawBackslash': u'\u005C',
'RawColon': u'\u003A',
'RawComma': u'\u002C',
'RawDash': u'\u002D',
'RawDollar': u'\u0024',
'RawDot': u'\u002E',
'RawDoubleQuote': u'\u0022',
'RawEqual': u'\u003D',
'RawEscape': u'\u001B',
'RawExclamation': u'\u0021',
'RawGreater': u'\u003E',
'RawLeftBrace': u'\u007B',
'RawLeftBracket': u'\u005B',
'RawLeftParenthesis': u'\u0028',
'RawLess': u'\u003C',
'RawNumberSign': u'\u0023',
'RawPercent': u'\u0025',
'RawPlus': u'\u002B',
'RawQuestion': u'\u003F',
'RawQuote': u'\u0027',
'RawRightBrace': u'\u007D',
'RawRightBracket': u'\u005D',
'RawRightParenthesis': u'\u0029',
'RawSemicolon': u'\u003B',
'RawSlash': u'\u002F',
'RawSpace': u'\u0020',
'RawStar': u'\u002A',
'RawTab': u'\u0009',
'RawTilde': u'\u007E',
'RawUnderscore': u'\u005F',
'RawVerticalBar': u'\u007C',
'RawWedge': u'\u005E',
'RegisteredTrademark': u'\u00AE',
'ReturnIndicator': u'\u21B5',
'ReturnKey': u'\uF766',
'ReverseDoublePrime': u'\u2036',
'ReverseElement': u'\u220B',
'ReverseEquilibrium': u'\u21CB',
'ReversePrime': u'\u2035',
'ReverseUpEquilibrium': u'\u296F',
'RHacek': u'\u0159',
'Rho': u'\u03C1',
'RightAngle': u'\u221F',
'RightAngleBracket': u'\u232A',
'RightArrow': u'\u2192',
'RightArrowBar': u'\u21E5',
'RightArrowLeftArrow': u'\u21C4',
'RightBracketingBar': u'\uF604',
'RightCeiling': u'\u2309',
'RightDoubleBracket': u'\u301B',
'RightDoubleBracketingBar': u'\uF606',
'RightDownTeeVector': u'\u295D',
'RightDownVector': u'\u21C2',
'RightDownVectorBar': u'\u2955',
'RightFloor': u'\u230B',
'RightGuillemet': u'\u00BB',
'RightModified': u'\uF76C',
'RightPointer': u'\u25B8',
'RightSkeleton': u'\uF762',
'RightTee': u'\u22A2',
'RightTeeArrow': u'\u21A6',
'RightTeeVector': u'\u295B',
'RightTriangle': u'\u22B3',
'RightTriangleBar': u'\u29D0',
'RightTriangleEqual': u'\u22B5',
'RightUpDownVector': u'\u294F',
'RightUpTeeVector': u'\u295C',
'RightUpVector': u'\u21BE',
'RightUpVectorBar': u'\u2954',
'RightVector': u'\u21C0',
'RightVectorBar': u'\u2953',
'RoundImplies': u'\u2970',
'RoundSpaceIndicator': u'\uF3B2',
'Rule': u'\uF522',
'RuleDelayed': u'\uF51F',
'SadSmiley': u'\u2639',
'Sampi': u'\u03E0',
'Saturn': u'\u2644',
'ScriptA': u'\uF6B2',
'ScriptB': u'\uF6B3',
'ScriptC': u'\uF6B4',
'ScriptCapitalA': u'\uF770',
'ScriptCapitalB': u'\u212C',
'ScriptCapitalC': u'\uF772',
'ScriptCapitalD': u'\uF773',
'ScriptCapitalE': u'\u2130',
'ScriptCapitalF': u'\u2131',
'ScriptCapitalG': u'\uF776',
'ScriptCapitalH': u'\u210B',
'ScriptCapitalI': u'\u2110',
'ScriptCapitalJ': u'\uF779',
'ScriptCapitalK': u'\uF77A',
'ScriptCapitalL': u'\u2112',
'ScriptCapitalM': u'\u2133',
'ScriptCapitalN': u'\uF77D',
'ScriptCapitalO': u'\uF77E',
'ScriptCapitalP': u'\u2118',
'ScriptCapitalQ': u'\uF780',
'ScriptCapitalR': u'\u211B',
'ScriptCapitalS': u'\uF782',
'ScriptCapitalT': u'\uF783',
'ScriptCapitalU': u'\uF784',
'ScriptCapitalV': u'\uF785',
'ScriptCapitalW': u'\uF786',
'ScriptCapitalX': u'\uF787',
'ScriptCapitalY': u'\uF788',
'ScriptCapitalZ': u'\uF789',
'ScriptD': u'\uF6B5',
'ScriptDotlessI': u'\uF730',
'ScriptDotlessJ': u'\uF731',
'ScriptE': u'\u212F',
'ScriptEight': u'\uF7F8',
'ScriptF': u'\uF6B7',
'ScriptFive': u'\uF7F5',
'ScriptFour': u'\uF7F4',
'ScriptG': u'\u210A',
'ScriptH': u'\uF6B9',
'ScriptI': u'\uF6BA',
'ScriptJ': u'\uF6BB',
'ScriptK': u'\uF6BC',
'ScriptL': u'\u2113',
'ScriptM': u'\uF6BE',
'ScriptN': u'\uF6BF',
'ScriptNine': u'\uF7F9',
'ScriptO': u'\u2134',
'ScriptOne': u'\uF7F1',
'ScriptP': u'\uF6C1',
'ScriptQ': u'\uF6C2',
'ScriptR': u'\uF6C3',
'ScriptS': u'\uF6C4',
'ScriptSeven': u'\uF7F7',
'ScriptSix': u'\uF7F6',
'ScriptT': u'\uF6C5',
'ScriptThree': u'\uF7F3',
'ScriptTwo': u'\uF7F2',
'ScriptU': u'\uF6C6',
'ScriptV': u'\uF6C7',
'ScriptW': u'\uF6C8',
'ScriptX': u'\uF6C9',
'ScriptY': u'\uF6CA',
'ScriptZ': u'\uF6CB',
'ScriptZero': u'\uF7F0',
'Section': u'\u00A7',
'SelectionPlaceholder': u'\uF527',
'SHacek': u'\u0161',
'Sharp': u'\u266F',
'ShortLeftArrow': u'\uF526',
'ShortRightArrow': u'\uF525',
'Sigma': u'\u03C3',
'SixPointedStar': u'\u2736',
'SkeletonIndicator': u'\u2043',
'SmallCircle': u'\u2218',
'SpaceIndicator': u'\u2423',
'SpaceKey': u'\uF7BF',
'SpadeSuit': u'\u2660',
'SpanFromAbove': u'\uF3BB',
'SpanFromBoth': u'\uF3BC',
'SpanFromLeft': u'\uF3BA',
'SphericalAngle': u'\u2222',
'Sqrt': u'\u221A',
'Square': u'\uF520',
'SquareIntersection': u'\u2293',
'SquareSubset': u'\u228F',
'SquareSubsetEqual': u'\u2291',
'SquareSuperset': u'\u2290',
'SquareSupersetEqual': u'\u2292',
'SquareUnion': u'\u2294',
'Star': u'\u22C6',
'Sterling': u'\u00A3',
'Stigma': u'\u03DB',
'Subset': u'\u2282',
'SubsetEqual': u'\u2286',
'Succeeds': u'\u227B',
'SucceedsEqual': u'\u2AB0',
'SucceedsSlantEqual': u'\u227D',
'SucceedsTilde': u'\u227F',
'SuchThat': u'\u220D',
'Sum': u'\u2211',
'Superset': u'\u2283',
'SupersetEqual': u'\u2287',
'SystemEnterKey': u'\uF75F',
'SZ': u'\u00DF',
'TabKey': u'\uF7BE',
'Tau': u'\u03C4',
'THacek': u'\u0165',
'Therefore': u'\u2234',
'Theta': u'\u03B8',
'ThickSpace': u'\u2005',
'ThinSpace': u'\u2009',
'Thorn': u'\u00FE',
'Tilde': u'\u223C',
'TildeEqual': u'\u2243',
'TildeFullEqual': u'\u2245',
'TildeTilde': u'\u2248',
'Times': u'\u00D7',
'Trademark': u'\u2122',
'Transpose': u'\uF3C7',
'UAcute': u'\u00FA',
'UDoubleAcute': u'\u0171',
'UDoubleDot': u'\u00FC',
'UGrave': u'\u00F9',
'UHat': u'\u00FB',
'UnderBrace': u'\uFE38',
'UnderBracket': u'\u23B5',
'UnderParenthesis': u'\uFE36',
'Union': u'\u22C3',
'UnionPlus': u'\u228E',
'UpArrow': u'\u2191',
'UpArrowBar': u'\u2912',
'UpArrowDownArrow': u'\u21C5',
'UpDownArrow': u'\u2195',
'UpEquilibrium': u'\u296E',
'UpperLeftArrow': u'\u2196',
'UpperRightArrow': u'\u2197',
'UpPointer': u'\u25B4',
'Upsilon': u'\u03C5',
'UpTee': u'\u22A5',
'UpTeeArrow': u'\u21A5',
'Uranus': u'\u2645',
'URing': u'\u016F',
'Vee': u'\u22C1',
'Venus': u'\u2640',
'VerticalBar': u'\u2223',
'VerticalEllipsis': u'\u22EE',
'VerticalLine': u'\u2502',
'VerticalSeparator': u'\uF432',
'VerticalTilde': u'\u2240',
'VeryThinSpace': u'\u200A',
'WarningSign': u'\uF725',
'WatchIcon': u'\u231A',
'Wedge': u'\u22C0',
'WeierstrassP': u'\u2118',
'WhiteBishop': u'\u2657',
'WhiteKing': u'\u2654',
'WhiteKnight': u'\u2658',
'WhitePawn': u'\u2659',
'WhiteQueen': u'\u2655',
'WhiteRook': u'\u2656',
'Wolf': u'\uF720',
'Xi': u'\u03BE',
'Xnor': u'\uF4A2',
'Xor': u'\u22BB',
'YAcute': u'\u00FD',
'YDoubleDot': u'\u00FF',
'Yen': u'\u00A5',
'Zeta': u'\u03B6',
'ZHacek': u'\u017E',
}
aliased_characters = {
u"a'": u'\u00E1',
u'a-': u'\u0101',
u'au': u'\u0103',
u'a"': u'\u00E4',
u'ae': u'\u00E6',
u'a`': u'\u00E0',
u'a^': u'\u00E2',
u'al': u'\u2135',
u'esc': u'\uF768',
u'am': u'\uF760',
u'a': u'\u03B1',
u'alpha': u'\u03B1',
u'alt': u'\uF7D1',
u'&&': u'\u2227',
u'and': u'\u2227',
u'Ang': u'\u212B',
u'ao': u'\u00E5',
u'a~': u'\u00E3',
u'\\': u'\u2216',
u'be': u'\u2136',
u'b': u'\u03B2',
u'beta': u'\u03B2',
u'bv': u'\u02D8',
u'bu': u'\u2022',
u"c'": u'\u0107',
u"A'": u'\u00C1',
u'A-': u'\u0100',
u'Au': u'\u0102',
u'A"': u'\u00C4',
u'AE': u'\u00C6',
u'A`': u'\u00C0',
u'A^': u'\u00C2',
u'A': u'\u0391',
u'Alpha': u'\u0391',
u'Ao': u'\u00C5',
u'A~': u'\u00C3',
u'B': u'\u0392',
u'Beta': u'\u0392',
u"C'": u'\u0106',
u'C,': u'\u00C7',
u'Cv': u'\u010C',
u'Ch': u'\u03A7',
u'Chi': u'\u03A7',
u'C': u'\u03A7',
u'D': u'\u0394',
u'Delta': u'\u0394',
u'Dv': u'\u010E',
u'DD': u'\uF74B',
u'Di': u'\u03DC',
u'Digamma': u'\u03DC',
u"E'": u'\u00C9',
u'E-': u'\u0112',
u'Eu': u'\u0114',
u'E"': u'\u00CB',
u'E`': u'\u00C8',
u'Ev': u'\u011A',
u'E^': u'\u00CA',
u'E': u'\u0395',
u'Epsilon': u'\u0395',
u'Et': u'\u0397',
u'Eta': u'\u0397',
u'H': u'\u0397',
u'D-': u'\u00D0',
u'G': u'\u0393',
u'Gamma': u'\u0393',
u"I'": u'\u00CD',
u'Iu': u'\u012C',
u'I"': u'\u00CF',
u'I`': u'\u00CC',
u'I^': u'\u00CE',
u'I': u'\u0399',
u'Iota': u'\u0399',
u'K': u'\u039A',
u'Kappa': u'\u039A',
u'Ko': u'\u03DE',
u'Koppa': u'\u03DE',
u'L': u'\u039B',
u'Lambda': u'\u039B',
u'L/': u'\u0141',
u'M': u'\u039C',
u'Mu': u'\u039C',
u'Nv': u'\u0147',
u'N~': u'\u00D1',
u'N': u'\u039D',
u'Nu': u'\u039D',
u"O'": u'\u00D3',
u"O''": u'\u0150',
u'O"': u'\u00D6',
u'OE': u'\u0152',
u'O`': u'\u00D2',
u'O^': u'\u00D4',
u'O': u'\u03A9',
u'Omega': u'\u03A9',
u'W': u'\u03A9',
u'Om': u'\u039F',
u'Omicron': u'\u039F',
u'O/': u'\u00D8',
u'O~': u'\u00D5',
u'Ph': u'\u03A6',
u'Phi': u'\u03A6',
u'F': u'\u03A6',
u'P': u'\u03A0',
u'Pi': u'\u03A0',
u'Ps': u'\u03A8',
u'Psi': u'\u03A8',
u'Y': u'\u03A8',
u'Rv': u'\u0158',
u'R': u'\u03A1',
u'Rho': u'\u03A1',
u'Sa': u'\u03E0',
u'Sampi': u'\u03E0',
u'Sv': u'\u0160',
u'S': u'\u03A3',
u'Sigma': u'\u03A3',
u'T': u'\u03A4',
u'Tau': u'\u03A4',
u'Tv': u'\u0164',
u'Th': u'\u0398',
u'Theta': u'\u0398',
u'Q': u'\u0398',
u'Thn': u'\u00DE',
u"U'": u'\u00DA',
u"U''": u'\u0170',
u'U"': u'\u00DC',
u'U`': u'\u00D9',
u'U^': u'\u00DB',
u'U': u'\u03A5',
u'Upsilon': u'\u03A5',
u'Uo': u'\u016E',
u'X': u'\u039E',
u'Xi': u'\u039E',
u"Y'": u'\u00DD',
u'Z': u'\u0396',
u'Zeta': u'\u0396',
u'Zv': u'\u017D',
u'c,': u'\u00E7',
u'cd': u'\u00B8',
u'.': u'\u00B7',
u'cent': u'\u00A2',
u'cv': u'\u010D',
u'ch': u'\u03C7',
u'chi': u'\u03C7',
u'c': u'\u03C7',
u'c.': u'\u2299',
u'c-': u'\u2296',
u'c+': u'\u2295',
u'c*': u'\u2297',
u'ccint': u'\u2232',
u'cl': u'\u2318',
u':': u'\u2236',
u'cmd': u'\uF76A',
u'===': u'\u2261',
u'co': u'\uF3C8',
u'conj': u'\uF3C8',
u'ct': u'\uF3C9',
u'cont': u'\uF3B1',
u'cint': u'\u222E',
u'ctrl': u'\uF763',
u'coprod': u'\u2210',
u'cccint': u'\u2233',
u'cross': u'\uF4A0',
u'cU': u'\u03D2',
u'cUpsilon': u'\u03D2',
u'ce': u'\u03B5',
u'cepsilon': u'\u03B5',
u'ck': u'\u03F0',
u'ckappa': u'\u03F0',
u'j': u'\u03C6',
u'cph': u'\u03C6',
u'cphi': u'\u03C6',
u'cp': u'\u03D6',
u'cpi': u'\u03D6',
u'cr': u'\u03F1',
u'crho': u'\u03F1',
u'cq': u'\u03D1',
u'cth': u'\u03D1',
u'ctheta': u'\u03D1',
u'dg': u'\u2020',
u'da': u'\u2138',
u'-': u'\u2013',
u'deg': u'\u00B0',
u' del': u'\uF7D0',
u'del': u'\u2207',
u'd': u'\u03B4',
u'delta': u'\u03B4',
u'dv': u'\u010F',
u'dia': u'\u22C4',
u'diffd': u'\u2206',
u'dd': u'\uF74C',
u'di': u'\u03DD',
u'digamma': u'\u03DD',
u'dratio': u'\uF4A4',
u'shift': u'\uF4A3',
u'dhy': u'\u00AD',
u'dlsep': u'\uF76E',
u'dpsep': u'\uF76F',
u'div': u'\u00F7',
u'.=': u'\u2250',
u'ddg': u'\u2021',
u'gg': u'\uF74A',
u'pp': u'\uF749',
u' <=': u'\u21D0',
u'<=>': u'\u21D4',
u'<==': u'\u27F8',
u'<==>': u'\u27FA',
u'==>': u'\u27F9',
u"''": u'\u2033',
u' =>': u'\u21D2',
u'dsa': u'\uF6E6',
u'dsb': u'\uF6E7',
u'dsc': u'\uF6E8',
u'dsA': u'\uF7A4',
u'dsB': u'\uF7A5',
u'dsC': u'\uF7A6',
u'dsD': u'\uF7A7',
u'dsE': u'\uF7A8',
u'dsF': u'\uF7A9',
u'dsG': u'\uF7AA',
u'dsH': u'\uF7AB',
u'dsI': u'\uF7AC',
u'dsJ': u'\uF7AD',
u'dsK': u'\uF7AE',
u'dsL': u'\uF7AF',
u'dsM': u'\uF7B0',
u'dsN': u'\uF7B1',
u'dsO': u'\uF7B2',
u'dsP': u'\uF7B3',
u'dsQ': u'\uF7B4',
u'dsR': u'\uF7B5',
u'dsS': u'\uF7B6',
u'dsT': u'\uF7B7',
u'dsU': u'\uF7B8',
u'dsV': u'\uF7B9',
u'dsW': u'\uF7BA',
u'dsX': u'\uF7BB',
u'dsY': u'\uF7BC',
u'dsZ': u'\uF7BD',
u'dsd': u'\uF6E9',
u'dse': u'\uF6EA',
u'ds8': u'\uF7E3',
u'dsf': u'\uF6EB',
u'ds5': u'\uF7E0',
u'ds4': u'\uF7DF',
u'dsg': u'\uF6EC',
u'dsh': u'\uF6ED',
u'dsi': u'\uF6EE',
u'dsj': u'\uF6EF',
u'dsk': u'\uF6F0',
u'dsl': u'\uF6F1',
u'dsm': u'\uF6F2',
u'dsn': u'\uF6F3',
u'ds9': u'\uF7E4',
u'dso': u'\uF6F4',
u'ds1': u'\uF7DC',
u'dsp': u'\uF6F5',
u'dsq': u'\uF6F6',
u'dsr': u'\uF6F7',
u'dss': u'\uF6F8',
u'ds7': u'\uF7E2',
u'ds6': u'\uF7E1',
u'dst': u'\uF6F9',
u'ds3': u'\uF7DE',
u'ds2': u'\uF7DD',
u'dsu': u'\uF6FA',
u'dsv': u'\uF6FB',
u'dsw': u'\uF6FC',
u'dsx': u'\uF6FD',
u'dsy': u'\uF6FE',
u'dsz': u'\uF6FF',
u'ds0': u'\uF7DB',
u' ||': u'\u2225',
u'dbv': u'\uF755',
u'd!': u'\u00A1',
u'd?': u'\u00BF',
u'dT': u'\u22A4',
u"e'": u'\u00E9',
u'e-': u'\u0113',
u'eu': u'\u0115',
u'e"': u'\u00EB',
u'e`': u'\u00E8',
u'ev': u'\u011B',
u'e^': u'\u00EA',
u'el': u'\u2208',
u'elem': u'\u2208',
u'...': u'\u2026',
u'eci': u'\u25CB',
u'es': u'\u2205',
u'esci': u'\u25E6',
u'essq': u'\u25FB',
u'esq': u'\u25A1',
u'ent': u'\uF7D4',
u'e': u'\u03F5',
u'epsilon': u'\u03F5',
u'==': u'\uF431',
u'=~': u'\u2242',
u'equi': u'\u21CC',
u'equiv': u'\u29E6',
u' esc': u'\uF769',
u'et': u'\u03B7',
u'eta': u'\u03B7',
u'h': u'\u03B7',
u'd-': u'\u00F0',
u'ex': u'\u2203',
u'ee': u'\uF74D',
u'fci': u'\u25CF',
u'fsci': u'\uF750',
u'fssq': u'\u25FC',
u'fsq': u'\u25A0',
u'fvssq': u'\u25AA',
u'fs': u'\u03C2',
u'*5': u'\u2605',
u'fa': u'\u2200',
u'$a': u'\uF800',
u'$b': u'\uF801',
u'$c': u'\uF802',
u'$A': u'\uF81A',
u'$B': u'\uF81B',
u'$C': u'\uF81C',
u'$D': u'\uF81D',
u'$E': u'\uF81E',
u'$F': u'\uF81F',
u'$G': u'\uF820',
u'$H': u'\uF821',
u'$I': u'\uF822',
u'$J': u'\uF823',
u'$K': u'\uF824',
u'$L': u'\uF825',
u'$M': u'\uF826',
u'$N': u'\uF827',
u'$O': u'\uF828',
u'$P': u'\uF829',
u'$Q': u'\uF82A',
u'$R': u'\uF82B',
u'$S': u'\uF82C',
u'$T': u'\uF82D',
u'$U': u'\uF82E',
u'$V': u'\uF82F',
u'$W': u'\uF830',
u'$X': u'\uF831',
u'$Y': u'\uF832',
u'$Z': u'\uF833',
u'$d': u'\uF803',
u'$e': u'\uF804',
u'$f': u'\uF805',
u'$g': u'\uF806',
u'$h': u'\uF807',
u'$i': u'\uF808',
u'$j': u'\uF809',
u'$k': u'\uF80A',
u'$l': u'\uF80B',
u'$m': u'\uF80C',
u'$n': u'\uF80D',
u'$o': u'\uF80E',
u'$p': u'\uF80F',
u'$q': u'\uF810',
u'$r': u'\uF811',
u'$s': u'\uF812',
u'$t': u'\uF813',
u'$u': u'\uF814',
u'$v': u'\uF815',
u'$w': u'\uF816',
u'$x': u'\uF817',
u'$y': u'\uF818',
u'$z': u'\uF819',
u':-@': u'\uF721',
u'fn': u'\uF4A1',
u'g': u'\u03B3',
u'gamma': u'\u03B3',
u'gi': u'\u2137',
u'goa': u'\uF6CC',
u'gob': u'\uF6CD',
u'goc': u'\uF6CE',
u'goA': u'\uF78A',
u'goB': u'\uF78B',
u'goC': u'\u212D',
u'goD': u'\uF78D',
u'goE': u'\uF78E',
u'goF': u'\uF78F',
u'goG': u'\uF790',
u'goH': u'\u210C',
u'goI': u'\u2111',
u'goJ': u'\uF793',
u'goK': u'\uF794',
u'goL': u'\uF795',
u'goM': u'\uF796',
u'goN': u'\uF797',
u'goO': u'\uF798',
u'goP': u'\uF799',
u'goQ': u'\uF79A',
u'goR': u'\u211C',
u'goS': u'\uF79C',
u'goT': u'\uF79D',
u'goU': u'\uF79E',
u'goV': u'\uF79F',
u'goW': u'\uF7A0',
u'goX': u'\uF7A1',
u'goY': u'\uF7A2',
u'goZ': u'\u2128',
u'god': u'\uF6CF',
u'goe': u'\uF6D0',
u'go8': u'\uF7ED',
u'gof': u'\uF6D1',
u'go5': u'\uF7EA',
u'go4': u'\uF7E9',
u'gog': u'\uF6D2',
u'goh': u'\uF6D3',
u'goi': u'\uF6D4',
u'goj': u'\uF6D5',
u'gok': u'\uF6D6',
u'gol': u'\uF6D7',
u'gom': u'\uF6D8',
u'gon': u'\uF6D9',
u'go9': u'\uF7EF',
u'goo': u'\uF6DA',
u'go1': u'\uF7E6',
u'gop': u'\uF6DB',
u'goq': u'\uF6DC',
u'gor': u'\uF6DD',
u'gos': u'\uF6DE',
u'go7': u'\uF7EC',
u'go6': u'\uF7EB',
u'got': u'\uF6DF',
u'go3': u'\uF7E8',
u'go2': u'\uF7E7',
u'gou': u'\uF6E0',
u'gov': u'\uF6E1',
u'gow': u'\uF6E2',
u'gox': u'\uF6E3',
u'goy': u'\uF6E4',
u'goz': u'\uF6E5',
u'go0': u'\uF7E5',
u'gci': u'\uF753',
u'gsq': u'\uF752',
u'>=': u'\u2265',
u'>/': u'\u2A7E',
u'>~': u'\u2273',
u'hck': u'\u02C7',
u':)': u'\u263A',
u':-)': u'\u263A',
u'hb': u'\u210F',
u'hc': u'\uF3CE',
u'hline': u'\u2500',
u'h=': u'\u224F',
u"i'": u'\u00ED',
u'iu': u'\u012D',
u'i"': u'\u00EF',
u'i`': u'\u00EC',
u'i^': u'\u00EE',
u'ii': u'\uF74E',
u'jj': u'\uF74F',
u'+': u'\uF39E',
u'=>': u'\uF523',
u'inf': u'\u221E',
u'int': u'\u222B',
u'inter': u'\u22C2',
u'@': u'\uF76D',
u',': u'\uF765',
u'is': u'\uF360',
u'i': u'\u03B9',
u'iota': u'\u03B9',
u'k': u'\u03BA',
u'kappa': u'\u03BA',
u'ko': u'\u03DF',
u'koppa': u'\u03DF',
u'l': u'\u03BB',
u'lambda': u'\u03BB',
u'<': u'\u2329',
u'<-': u'\u2190',
u'l|': u'\uF603',
u'lc': u'\u2308',
u'[[': u'\u301A',
u'l||': u'\uF605',
u'lf': u'\u230A',
u'g<<': u'\u00AB',
u'[': u'\uF76B',
u'<->': u'\u2194',
u'lT': u'\u22A3',
u'<=': u'\u2264',
u'</': u'\u2A7D',
u'<~': u'\u2272',
u'_': u'\uF754',
u'ls': u'\uF754',
u'--': u'\u2014',
u'<--': u'\u27F5',
u'<-->': u'\u27F7',
u'-->': u'\u27F6',
u'l/': u'\u0142',
u'math': u'\uF757',
u' ': u'\u205F',
u'mho': u'\u2127',
u'mi': u'\u00B5',
u'-+': u'\u2213',
u'm': u'\u03BC',
u'mu': u'\u03BC',
u'nand': u'\u22BC',
u'- ': u'\uF383',
u'- ': u'\uF384',
u'- ': u'\uF382',
u'- ': u'\uF380',
u':-|': u'\uF722',
u'nv': u'\u0148',
u'nb': u'\u2060',
u'nbs': u'\u00A0',
u'nor': u'\u22BD',
u'!===': u'\u2262',
u'!||': u'\u2226',
u'!el': u'\u2209',
u'!elem': u'\u2209',
u'!=': u'\u2260',
u'!=~': u'\uF400',
u'!ex': u'\u2204',
u'!>': u'\u226F',
u'!>=': u'\u2271',
u'!>/': u'\uF429',
u'!>~': u'\u2275',
u'!h=': u'\uF401',
u'!<=': u'\u2270',
u'!<': u'\u226E',
u'!</': u'\uF424',
u'!<~': u'\u2274',
u'!': u'\u00AC',
u'not': u'\u00AC',
u'!mem': u'\u220C',
u'!sub': u'\u2284',
u'!sub=': u'\u2288',
u'!sup': u'\u2285',
u'!sup=': u'\u2289',
u'!~': u'\u2241',
u'!~=': u'\u2244',
u'!~==': u'\u2247',
u'!~~': u'\u2249',
u'!|': u'\u2224',
u'n~': u'\u00F1',
u'n': u'\u03BD',
u'nu': u'\u03BD',
u'null': u'\uF3A0',
u"o'": u'\u00F3',
u"o''": u'\u0151',
u'o"': u'\u00F6',
u'oe': u'\u0153',
u'o`': u'\u00F2',
u'o^': u'\u00F4',
u'o': u'\u03C9',
u'omega': u'\u03C9',
u'w': u'\u03C9',
u'om': u'\u03BF',
u'omicron': u'\u03BF',
u'opt': u'\uF7D2',
u'||': u'\u2228',
u'or': u'\u2228',
u'o/': u'\u00F8',
u'o~': u'\u00F5',
u'o{': u'\uFE37',
u'o[': u'\u23B4',
u'o(': u'\uFE35',
u'pd': u'\u2202',
u'ph': u'\u03D5',
u'phi': u'\u03D5',
u'f': u'\u03D5',
u'p': u'\u03C0',
u'pi': u'\u03C0',
u'pw': u'\uF361',
u'pl': u'\uF528',
u'+-': u'\u00B1',
u"'": u'\u2032',
u'prod': u'\u220F',
u'prop': u'\u221D',
u'ps': u'\u03C8',
u'psi': u'\u03C8',
u'y': u'\u03C8',
u'rtm': u'\u00AE',
u'ret': u'\u21B5',
u' ret': u'\uF766',
u'``': u'\u2036',
u'mem': u'\u220B',
u'`': u'\u2035',
u'rv': u'\u0159',
u'r': u'\u03C1',
u'rho': u'\u03C1',
u'>': u'\u232A',
u' ->': u'\u2192',
u'r|': u'\uF604',
u'rc': u'\u2309',
u']]': u'\u301B',
u'r||': u'\uF606',
u'rf': u'\u230B',
u'g>>': u'\u00BB',
u']': u'\uF76C',
u'rT': u'\u22A2',
u'vec': u'\u21C0',
u'->': u'\uF522',
u':>': u'\uF51F',
u':-(': u'\u2639',
u'sa': u'\u03E0',
u'sampi': u'\u03E0',
u'sca': u'\uF6B2',
u'scb': u'\uF6B3',
u'scc': u'\uF6B4',
u'scA': u'\uF770',
u'scB': u'\u212C',
u'scC': u'\uF772',
u'scD': u'\uF773',
u'scE': u'\u2130',
u'scF': u'\u2131',
u'scG': u'\uF776',
u'scH': u'\u210B',
u'scI': u'\u2110',
u'scJ': u'\uF779',
u'scK': u'\uF77A',
u'scL': u'\u2112',
u'scM': u'\u2133',
u'scN': u'\uF77D',
u'scO': u'\uF77E',
u'scP': u'\u2118',
u'scQ': u'\uF780',
u'scR': u'\u211B',
u'scS': u'\uF782',
u'scT': u'\uF783',
u'scU': u'\uF784',
u'scV': u'\uF785',
u'scW': u'\uF786',
u'scX': u'\uF787',
u'scY': u'\uF788',
u'scZ': u'\uF789',
u'scd': u'\uF6B5',
u'sce': u'\u212F',
u'sc8': u'\uF7F8',
u'scf': u'\uF6B7',
u'sc5': u'\uF7F5',
u'sc4': u'\uF7F4',
u'scg': u'\u210A',
u'sch': u'\uF6B9',
u'sci': u'\uF6BA',
u'scj': u'\uF6BB',
u'sck': u'\uF6BC',
u'scl': u'\u2113',
u'scm': u'\uF6BE',
u'scn': u'\uF6BF',
u'sc9': u'\uF7F9',
u'sco': u'\u2134',
u'sc1': u'\uF7F1',
u'scp': u'\uF6C1',
u'scq': u'\uF6C2',
u'scr': u'\uF6C3',
u'scs': u'\uF6C4',
u'sc7': u'\uF7F7',
u'sc6': u'\uF7F6',
u'sct': u'\uF6C5',
u'sc3': u'\uF7F3',
u'sc2': u'\uF7F2',
u'scu': u'\uF6C6',
u'scv': u'\uF6C7',
u'scw': u'\uF6C8',
u'scx': u'\uF6C9',
u'scy': u'\uF6CA',
u'scz': u'\uF6CB',
u'sc0': u'\uF7F0',
u'spl': u'\uF527',
u'sv': u'\u0161',
u's': u'\u03C3',
u'sigma': u'\u03C3',
u'*6': u'\u2736',
u'sc': u'\u2218',
u'space': u'\u2423',
u'spc': u'\uF7BF',
u'sqrt': u'\u221A',
u'sq': u'\uF520',
u'star': u'\u22C6',
u'sti': u'\u03DB',
u'stigma': u'\u03DB',
u'sub': u'\u2282',
u'sub=': u'\u2286',
u'st': u'\u220D',
u'sum': u'\u2211',
u'sup': u'\u2283',
u'sup=': u'\u2287',
u'sz': u'\u00DF',
u'ss': u'\u00DF',
u'tab': u'\uF7BE',
u't': u'\u03C4',
u'tau': u'\u03C4',
u'tv': u'\u0165',
u'tf': u'\u2234',
u'th': u'\u03B8',
u'theta': u'\u03B8',
u'q': u'\u03B8',
u' ': u'\u2005',
u' ': u'\u2009',
u'thn': u'\u00FE',
u'~': u'\u223C',
u'~=': u'\u2243',
u'~==': u'\u2245',
u'~~': u'\u2248',
u'*': u'\u00D7',
u'tm': u'\u2122',
u'tr': u'\uF3C7',
u"u'": u'\u00FA',
u"u''": u'\u0171',
u'u"': u'\u00FC',
u'u`': u'\u00F9',
u'u^': u'\u00FB',
u'u{': u'\uFE38',
u'u[': u'\u23B5',
u'u(': u'\uFE36',
u'un': u'\u22C3',
u'u': u'\u03C5',
u'upsilon': u'\u03C5',
u'uT': u'\u22A5',
u'uo': u'\u016F',
u'v': u'\u22C1',
u' |': u'\u2223',
u'vline': u'\u2502',
u'|': u'\uF432',
u' ': u'\u200A',
u'^': u'\u22C0',
u'wp': u'\u2118',
u'wf': u'\uF720',
u'wolf': u'\uF720',
u'x': u'\u03BE',
u'xi': u'\u03BE',
u'xnor': u'\uF4A2',
u'xor': u'\u22BB',
u"y'": u'\u00FD',
u'z': u'\u03B6',
u'zeta': u'\u03B6',
u'zv': u'\u017E',
}
|
ultigeo/LIMS
|
refs/heads/master
|
testfinal/urls.py
|
2
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from testapp.views import * # reader_portal,dc_portal
from testapp.models import las_parcel,Rivers,Roads
from django.conf.urls.static import static
from django.contrib.auth import views as auth_views
from django.views.generic import ListView
from django.views.generic import TemplateView
from djgeojson.views import GeoJSONLayerView
from django.contrib.auth import views
from django.contrib.auth import urls
from registration.backends.simple.views import *
admin.autodiscover()
admin.site.site_header = 'LADM Project'
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^ladm/', include('testapp.urls')),
url(r'^grappelli/', include('grappelli.urls')),
)+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)+ static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
ericzolf/ansible
|
refs/heads/devel
|
lib/ansible/utils/multiprocessing.py
|
60
|
# Copyright (c) 2019 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import multiprocessing
# Explicit multiprocessing context using the fork start method
# This exists as a compat layer now that Python3.8 has changed the default
# start method for macOS to ``spawn`` which is incompatible with our
# code base currently
#
# This exists in utils to allow it to be easily imported into various places
# without causing circular import or dependency problems
try:
context = multiprocessing.get_context('fork')
except AttributeError:
# Py2 has no context functionality, and only supports fork
context = multiprocessing
|
ttglennhall/DjangoGirlsTutorial
|
refs/heads/master
|
myvenv/lib/python3.4/site-packages/django/db/models/sql/__init__.py
|
237
|
from django.db.models.sql.datastructures import EmptyResultSet
from django.db.models.sql.subqueries import * # NOQA
from django.db.models.sql.query import * # NOQA
from django.db.models.sql.where import AND, OR
__all__ = ['Query', 'AND', 'OR', 'EmptyResultSet']
|
huongttlan/bokeh
|
refs/heads/master
|
examples/plotting/file/image_url.py
|
17
|
from bokeh.plotting import figure, show, output_file
output_file("image_url.html")
p = figure()
url = ["http://bokeh.pydata.org/en/latest/_static/bokeh-transparent.png"]*10
x = list(range(0, 100, 10))
y = list(range(0, 100, 10))
p.image_url(x=x, y=y, url=url, global_alpha=0.2)
show(p)
|
imtapps/django-imt-fork
|
refs/heads/IMT
|
django/contrib/sessions/management/__init__.py
|
12133432
| |
mrshoki/readthedocs.org
|
refs/heads/master
|
readthedocs/projects/templatetags/__init__.py
|
12133432
| |
TeamEOS/external_chromium_org
|
refs/heads/lp5.0
|
build/compiler_version.py
|
104
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Compiler version checking tool for gcc
Print gcc version as XY if you are running gcc X.Y.*.
This is used to tweak build flags for gcc 4.4.
"""
import os
import re
import subprocess
import sys
compiler_version_cache = {} # Map from (compiler, tool) -> version.
def Usage(program_name):
print '%s MODE TOOL' % os.path.basename(program_name)
print 'MODE: host or target.'
print 'TOOL: assembler or compiler or linker.'
return 1
def ParseArgs(args):
if len(args) != 2:
raise Exception('Invalid number of arguments')
mode = args[0]
tool = args[1]
if mode not in ('host', 'target'):
raise Exception('Invalid mode: %s' % mode)
if tool not in ('assembler', 'compiler', 'linker'):
raise Exception('Invalid tool: %s' % tool)
return mode, tool
def GetEnvironFallback(var_list, default):
"""Look up an environment variable from a possible list of variable names."""
for var in var_list:
if var in os.environ:
return os.environ[var]
return default
def GetVersion(compiler, tool):
tool_output = tool_error = None
cache_key = (compiler, tool)
cached_version = compiler_version_cache.get(cache_key)
if cached_version:
return cached_version
try:
# Note that compiler could be something tricky like "distcc g++".
if tool == "compiler":
compiler = compiler + " -dumpversion"
# 4.6
version_re = re.compile(r"(\d+)\.(\d+)")
elif tool == "assembler":
compiler = compiler + " -Xassembler --version -x assembler -c /dev/null"
# Unmodified: GNU assembler (GNU Binutils) 2.24
# Ubuntu: GNU assembler (GNU Binutils for Ubuntu) 2.22
# Fedora: GNU assembler version 2.23.2
version_re = re.compile(r"^GNU [^ ]+ .* (\d+).(\d+).*?$", re.M)
elif tool == "linker":
compiler = compiler + " -Xlinker --version"
# Using BFD linker
# Unmodified: GNU ld (GNU Binutils) 2.24
# Ubuntu: GNU ld (GNU Binutils for Ubuntu) 2.22
# Fedora: GNU ld version 2.23.2
# Using Gold linker
# Unmodified: GNU gold (GNU Binutils 2.24) 1.11
# Ubuntu: GNU gold (GNU Binutils for Ubuntu 2.22) 1.11
# Fedora: GNU gold (version 2.23.2) 1.11
version_re = re.compile(r"^GNU [^ ]+ .* (\d+).(\d+).*?$", re.M)
else:
raise Exception("Unknown tool %s" % tool)
# Force the locale to C otherwise the version string could be localized
# making regex matching fail.
env = os.environ.copy()
env["LC_ALL"] = "C"
pipe = subprocess.Popen(compiler, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
tool_output, tool_error = pipe.communicate()
if pipe.returncode:
raise subprocess.CalledProcessError(pipe.returncode, compiler)
parsed_output = version_re.match(tool_output)
result = parsed_output.group(1) + parsed_output.group(2)
compiler_version_cache[cache_key] = result
return result
except Exception, e:
if tool_error:
sys.stderr.write(tool_error)
print >> sys.stderr, "compiler_version.py failed to execute:", compiler
print >> sys.stderr, e
return ""
def main(args):
try:
(mode, tool) = ParseArgs(args[1:])
except Exception, e:
sys.stderr.write(e.message + '\n\n')
return Usage(args[0])
ret_code, result = ExtractVersion(mode, tool)
if ret_code == 0:
print result
return ret_code
def DoMain(args):
"""Hook to be called from gyp without starting a separate python
interpreter."""
(mode, tool) = ParseArgs(args)
ret_code, result = ExtractVersion(mode, tool)
if ret_code == 0:
return result
raise Exception("Failed to extract compiler version for args: %s" % args)
def ExtractVersion(mode, tool):
# Check if various CXX environment variables exist and use them if they
# exist. The preferences and fallback order is a close approximation of
# GenerateOutputForConfig() in GYP's ninja generator.
# The main difference being not supporting GYP's make_global_settings.
environments = ['CXX_target', 'CXX']
if mode == 'host':
environments = ['CXX_host'] + environments;
compiler = GetEnvironFallback(environments, 'c++')
if compiler:
compiler_version = GetVersion(compiler, tool)
if compiler_version != "":
return (0, compiler_version)
return (1, None)
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
blooparksystems/odoo
|
refs/heads/9.0
|
addons/l10n_ca/__init__.py
|
12
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (C) 2010 Savoir-faire Linux (<https://www.savoirfairelinux.com>).
|
GitHublong/hue
|
refs/heads/master
|
desktop/core/ext-py/python-openid-2.2.5/openid/test/test_ax.py
|
77
|
"""Tests for the attribute exchange extension module
"""
import unittest
from openid.extensions import ax
from openid.message import NamespaceMap, Message, OPENID2_NS
from openid.consumer.consumer import SuccessResponse
class BogusAXMessage(ax.AXMessage):
mode = 'bogus'
getExtensionArgs = ax.AXMessage._newArgs
class DummyRequest(object):
def __init__(self, message):
self.message = message
class AXMessageTest(unittest.TestCase):
def setUp(self):
self.bax = BogusAXMessage()
def test_checkMode(self):
check = self.bax._checkMode
self.failUnlessRaises(ax.NotAXMessage, check, {})
self.failUnlessRaises(ax.AXError, check, {'mode':'fetch_request'})
# does not raise an exception when the mode is right
check({'mode':self.bax.mode})
def test_checkMode_newArgs(self):
"""_newArgs generates something that has the correct mode"""
# This would raise AXError if it didn't like the mode newArgs made.
self.bax._checkMode(self.bax._newArgs())
class AttrInfoTest(unittest.TestCase):
def test_construct(self):
self.failUnlessRaises(TypeError, ax.AttrInfo)
type_uri = 'a uri'
ainfo = ax.AttrInfo(type_uri)
self.failUnlessEqual(type_uri, ainfo.type_uri)
self.failUnlessEqual(1, ainfo.count)
self.failIf(ainfo.required)
self.failUnless(ainfo.alias is None)
class ToTypeURIsTest(unittest.TestCase):
def setUp(self):
self.aliases = NamespaceMap()
def test_empty(self):
for empty in [None, '']:
uris = ax.toTypeURIs(self.aliases, empty)
self.failUnlessEqual([], uris)
def test_undefined(self):
self.failUnlessRaises(
KeyError,
ax.toTypeURIs, self.aliases, 'http://janrain.com/')
def test_one(self):
uri = 'http://janrain.com/'
alias = 'openid_hackers'
self.aliases.addAlias(uri, alias)
uris = ax.toTypeURIs(self.aliases, alias)
self.failUnlessEqual([uri], uris)
def test_two(self):
uri1 = 'http://janrain.com/'
alias1 = 'openid_hackers'
self.aliases.addAlias(uri1, alias1)
uri2 = 'http://jyte.com/'
alias2 = 'openid_hack'
self.aliases.addAlias(uri2, alias2)
uris = ax.toTypeURIs(self.aliases, ','.join([alias1, alias2]))
self.failUnlessEqual([uri1, uri2], uris)
class ParseAXValuesTest(unittest.TestCase):
"""Testing AXKeyValueMessage.parseExtensionArgs."""
def failUnlessAXKeyError(self, ax_args):
msg = ax.AXKeyValueMessage()
self.failUnlessRaises(KeyError, msg.parseExtensionArgs, ax_args)
def failUnlessAXValues(self, ax_args, expected_args):
"""Fail unless parseExtensionArgs(ax_args) == expected_args."""
msg = ax.AXKeyValueMessage()
msg.parseExtensionArgs(ax_args)
self.failUnlessEqual(expected_args, msg.data)
def test_emptyIsValid(self):
self.failUnlessAXValues({}, {})
def test_missingValueForAliasExplodes(self):
self.failUnlessAXKeyError({'type.foo':'urn:foo'})
def test_countPresentButNotValue(self):
self.failUnlessAXKeyError({'type.foo':'urn:foo',
'count.foo':'1'})
def test_invalidCountValue(self):
msg = ax.FetchRequest()
self.failUnlessRaises(ax.AXError,
msg.parseExtensionArgs,
{'type.foo':'urn:foo',
'count.foo':'bogus'})
def test_requestUnlimitedValues(self):
msg = ax.FetchRequest()
msg.parseExtensionArgs(
{'mode':'fetch_request',
'required':'foo',
'type.foo':'urn:foo',
'count.foo':ax.UNLIMITED_VALUES})
attrs = list(msg.iterAttrs())
foo = attrs[0]
self.failUnless(foo.count == ax.UNLIMITED_VALUES)
self.failUnless(foo.wantsUnlimitedValues())
def test_longAlias(self):
# Spec minimum length is 32 characters. This is a silly test
# for this library, but it's here for completeness.
alias = 'x' * ax.MINIMUM_SUPPORTED_ALIAS_LENGTH
msg = ax.AXKeyValueMessage()
msg.parseExtensionArgs(
{'type.%s' % (alias,): 'urn:foo',
'count.%s' % (alias,): '1',
'value.%s.1' % (alias,): 'first'}
)
def test_invalidAlias(self):
types = [
ax.AXKeyValueMessage,
ax.FetchRequest
]
inputs = [
{'type.a.b':'urn:foo',
'count.a.b':'1'},
{'type.a,b':'urn:foo',
'count.a,b':'1'},
]
for typ in types:
for input in inputs:
msg = typ()
self.failUnlessRaises(ax.AXError, msg.parseExtensionArgs,
input)
def test_countPresentAndIsZero(self):
self.failUnlessAXValues(
{'type.foo':'urn:foo',
'count.foo':'0',
}, {'urn:foo':[]})
def test_singletonEmpty(self):
self.failUnlessAXValues(
{'type.foo':'urn:foo',
'value.foo':'',
}, {'urn:foo':[]})
def test_doubleAlias(self):
self.failUnlessAXKeyError(
{'type.foo':'urn:foo',
'value.foo':'',
'type.bar':'urn:foo',
'value.bar':'',
})
def test_doubleSingleton(self):
self.failUnlessAXValues(
{'type.foo':'urn:foo',
'value.foo':'',
'type.bar':'urn:bar',
'value.bar':'',
}, {'urn:foo':[], 'urn:bar':[]})
def test_singletonValue(self):
self.failUnlessAXValues(
{'type.foo':'urn:foo',
'value.foo':'Westfall',
}, {'urn:foo':['Westfall']})
class FetchRequestTest(unittest.TestCase):
def setUp(self):
self.msg = ax.FetchRequest()
self.type_a = 'http://janrain.example.com/a'
self.alias_a = 'a'
def test_mode(self):
self.failUnlessEqual(self.msg.mode, 'fetch_request')
def test_construct(self):
self.failUnlessEqual({}, self.msg.requested_attributes)
self.failUnlessEqual(None, self.msg.update_url)
msg = ax.FetchRequest('hailstorm')
self.failUnlessEqual({}, msg.requested_attributes)
self.failUnlessEqual('hailstorm', msg.update_url)
def test_add(self):
uri = 'mud://puddle'
# Not yet added:
self.failIf(uri in self.msg)
attr = ax.AttrInfo(uri)
self.msg.add(attr)
# Present after adding
self.failUnless(uri in self.msg)
def test_addTwice(self):
uri = 'lightning://storm'
attr = ax.AttrInfo(uri)
self.msg.add(attr)
self.failUnlessRaises(KeyError, self.msg.add, attr)
def test_getExtensionArgs_empty(self):
expected_args = {
'mode':'fetch_request',
}
self.failUnlessEqual(expected_args, self.msg.getExtensionArgs())
def test_getExtensionArgs_noAlias(self):
attr = ax.AttrInfo(
type_uri = 'type://of.transportation',
)
self.msg.add(attr)
ax_args = self.msg.getExtensionArgs()
for k, v in ax_args.iteritems():
if v == attr.type_uri and k.startswith('type.'):
alias = k[5:]
break
else:
self.fail("Didn't find the type definition")
self.failUnlessExtensionArgs({
'type.' + alias:attr.type_uri,
'if_available':alias,
})
def test_getExtensionArgs_alias_if_available(self):
attr = ax.AttrInfo(
type_uri = 'type://of.transportation',
alias = 'transport',
)
self.msg.add(attr)
self.failUnlessExtensionArgs({
'type.' + attr.alias:attr.type_uri,
'if_available':attr.alias,
})
def test_getExtensionArgs_alias_req(self):
attr = ax.AttrInfo(
type_uri = 'type://of.transportation',
alias = 'transport',
required = True,
)
self.msg.add(attr)
self.failUnlessExtensionArgs({
'type.' + attr.alias:attr.type_uri,
'required':attr.alias,
})
def failUnlessExtensionArgs(self, expected_args):
"""Make sure that getExtensionArgs has the expected result
This method will fill in the mode.
"""
expected_args = dict(expected_args)
expected_args['mode'] = self.msg.mode
self.failUnlessEqual(expected_args, self.msg.getExtensionArgs())
def test_isIterable(self):
self.failUnlessEqual([], list(self.msg))
self.failUnlessEqual([], list(self.msg.iterAttrs()))
def test_getRequiredAttrs_empty(self):
self.failUnlessEqual([], self.msg.getRequiredAttrs())
def test_parseExtensionArgs_extraType(self):
extension_args = {
'mode':'fetch_request',
'type.' + self.alias_a:self.type_a,
}
self.failUnlessRaises(ValueError,
self.msg.parseExtensionArgs, extension_args)
def test_parseExtensionArgs(self):
extension_args = {
'mode':'fetch_request',
'type.' + self.alias_a:self.type_a,
'if_available':self.alias_a
}
self.msg.parseExtensionArgs(extension_args)
self.failUnless(self.type_a in self.msg)
self.failUnlessEqual([self.type_a], list(self.msg))
attr_info = self.msg.requested_attributes.get(self.type_a)
self.failUnless(attr_info)
self.failIf(attr_info.required)
self.failUnlessEqual(self.type_a, attr_info.type_uri)
self.failUnlessEqual(self.alias_a, attr_info.alias)
self.failUnlessEqual([attr_info], list(self.msg.iterAttrs()))
def test_extensionArgs_idempotent(self):
extension_args = {
'mode':'fetch_request',
'type.' + self.alias_a:self.type_a,
'if_available':self.alias_a
}
self.msg.parseExtensionArgs(extension_args)
self.failUnlessEqual(extension_args, self.msg.getExtensionArgs())
self.failIf(self.msg.requested_attributes[self.type_a].required)
def test_extensionArgs_idempotent_count_required(self):
extension_args = {
'mode':'fetch_request',
'type.' + self.alias_a:self.type_a,
'count.' + self.alias_a:'2',
'required':self.alias_a
}
self.msg.parseExtensionArgs(extension_args)
self.failUnlessEqual(extension_args, self.msg.getExtensionArgs())
self.failUnless(self.msg.requested_attributes[self.type_a].required)
def test_extensionArgs_count1(self):
extension_args = {
'mode':'fetch_request',
'type.' + self.alias_a:self.type_a,
'count.' + self.alias_a:'1',
'if_available':self.alias_a,
}
extension_args_norm = {
'mode':'fetch_request',
'type.' + self.alias_a:self.type_a,
'if_available':self.alias_a,
}
self.msg.parseExtensionArgs(extension_args)
self.failUnlessEqual(extension_args_norm, self.msg.getExtensionArgs())
def test_openidNoRealm(self):
openid_req_msg = Message.fromOpenIDArgs({
'mode': 'checkid_setup',
'ns': OPENID2_NS,
'ns.ax': ax.AXMessage.ns_uri,
'ax.update_url': 'http://different.site/path',
'ax.mode': 'fetch_request',
})
self.failUnlessRaises(ax.AXError,
ax.FetchRequest.fromOpenIDRequest,
DummyRequest(openid_req_msg))
def test_openidUpdateURLVerificationError(self):
openid_req_msg = Message.fromOpenIDArgs({
'mode': 'checkid_setup',
'ns': OPENID2_NS,
'realm': 'http://example.com/realm',
'ns.ax': ax.AXMessage.ns_uri,
'ax.update_url': 'http://different.site/path',
'ax.mode': 'fetch_request',
})
self.failUnlessRaises(ax.AXError,
ax.FetchRequest.fromOpenIDRequest,
DummyRequest(openid_req_msg))
def test_openidUpdateURLVerificationSuccess(self):
openid_req_msg = Message.fromOpenIDArgs({
'mode': 'checkid_setup',
'ns': OPENID2_NS,
'realm': 'http://example.com/realm',
'ns.ax': ax.AXMessage.ns_uri,
'ax.update_url': 'http://example.com/realm/update_path',
'ax.mode': 'fetch_request',
})
fr = ax.FetchRequest.fromOpenIDRequest(DummyRequest(openid_req_msg))
def test_openidUpdateURLVerificationSuccessReturnTo(self):
openid_req_msg = Message.fromOpenIDArgs({
'mode': 'checkid_setup',
'ns': OPENID2_NS,
'return_to': 'http://example.com/realm',
'ns.ax': ax.AXMessage.ns_uri,
'ax.update_url': 'http://example.com/realm/update_path',
'ax.mode': 'fetch_request',
})
fr = ax.FetchRequest.fromOpenIDRequest(DummyRequest(openid_req_msg))
def test_fromOpenIDRequestWithoutExtension(self):
"""return None for an OpenIDRequest without AX paramaters."""
openid_req_msg = Message.fromOpenIDArgs({
'mode': 'checkid_setup',
'ns': OPENID2_NS,
})
oreq = DummyRequest(openid_req_msg)
r = ax.FetchRequest.fromOpenIDRequest(oreq)
self.failUnless(r is None, "%s is not None" % (r,))
def test_fromOpenIDRequestWithoutData(self):
"""return something for SuccessResponse with AX paramaters,
even if it is the empty set."""
openid_req_msg = Message.fromOpenIDArgs({
'mode': 'checkid_setup',
'realm': 'http://example.com/realm',
'ns': OPENID2_NS,
'ns.ax': ax.AXMessage.ns_uri,
'ax.mode': 'fetch_request',
})
oreq = DummyRequest(openid_req_msg)
r = ax.FetchRequest.fromOpenIDRequest(oreq)
self.failUnless(r is not None)
class FetchResponseTest(unittest.TestCase):
def setUp(self):
self.msg = ax.FetchResponse()
self.value_a = 'monkeys'
self.type_a = 'http://phone.home/'
self.alias_a = 'robocop'
self.request_update_url = 'http://update.bogus/'
def test_construct(self):
self.failUnless(self.msg.update_url is None)
self.failUnlessEqual({}, self.msg.data)
def test_getExtensionArgs_empty(self):
expected_args = {
'mode':'fetch_response',
}
self.failUnlessEqual(expected_args, self.msg.getExtensionArgs())
def test_getExtensionArgs_empty_request(self):
expected_args = {
'mode':'fetch_response',
}
req = ax.FetchRequest()
msg = ax.FetchResponse(request=req)
self.failUnlessEqual(expected_args, msg.getExtensionArgs())
def test_getExtensionArgs_empty_request_some(self):
uri = 'http://not.found/'
alias = 'ext0'
expected_args = {
'mode':'fetch_response',
'type.%s' % (alias,): uri,
'count.%s' % (alias,): '0'
}
req = ax.FetchRequest()
req.add(ax.AttrInfo(uri))
msg = ax.FetchResponse(request=req)
self.failUnlessEqual(expected_args, msg.getExtensionArgs())
def test_updateUrlInResponse(self):
uri = 'http://not.found/'
alias = 'ext0'
expected_args = {
'mode':'fetch_response',
'update_url': self.request_update_url,
'type.%s' % (alias,): uri,
'count.%s' % (alias,): '0'
}
req = ax.FetchRequest(update_url=self.request_update_url)
req.add(ax.AttrInfo(uri))
msg = ax.FetchResponse(request=req)
self.failUnlessEqual(expected_args, msg.getExtensionArgs())
def test_getExtensionArgs_some_request(self):
expected_args = {
'mode':'fetch_response',
'type.' + self.alias_a:self.type_a,
'value.' + self.alias_a + '.1':self.value_a,
'count.' + self.alias_a: '1'
}
req = ax.FetchRequest()
req.add(ax.AttrInfo(self.type_a, alias=self.alias_a))
msg = ax.FetchResponse(request=req)
msg.addValue(self.type_a, self.value_a)
self.failUnlessEqual(expected_args, msg.getExtensionArgs())
def test_getExtensionArgs_some_not_request(self):
req = ax.FetchRequest()
msg = ax.FetchResponse(request=req)
msg.addValue(self.type_a, self.value_a)
self.failUnlessRaises(KeyError, msg.getExtensionArgs)
def test_getSingle_success(self):
req = ax.FetchRequest()
self.msg.addValue(self.type_a, self.value_a)
self.failUnlessEqual(self.value_a, self.msg.getSingle(self.type_a))
def test_getSingle_none(self):
self.failUnlessEqual(None, self.msg.getSingle(self.type_a))
def test_getSingle_extra(self):
self.msg.setValues(self.type_a, ['x', 'y'])
self.failUnlessRaises(ax.AXError, self.msg.getSingle, self.type_a)
def test_get(self):
self.failUnlessRaises(KeyError, self.msg.get, self.type_a)
def test_fromSuccessResponseWithoutExtension(self):
"""return None for SuccessResponse with no AX paramaters."""
args = {
'mode': 'id_res',
'ns': OPENID2_NS,
}
sf = ['openid.' + i for i in args.keys()]
msg = Message.fromOpenIDArgs(args)
class Endpoint:
claimed_id = 'http://invalid.'
oreq = SuccessResponse(Endpoint(), msg, signed_fields=sf)
r = ax.FetchResponse.fromSuccessResponse(oreq)
self.failUnless(r is None, "%s is not None" % (r,))
def test_fromSuccessResponseWithoutData(self):
"""return something for SuccessResponse with AX paramaters,
even if it is the empty set."""
args = {
'mode': 'id_res',
'ns': OPENID2_NS,
'ns.ax': ax.AXMessage.ns_uri,
'ax.mode': 'fetch_response',
}
sf = ['openid.' + i for i in args.keys()]
msg = Message.fromOpenIDArgs(args)
class Endpoint:
claimed_id = 'http://invalid.'
oreq = SuccessResponse(Endpoint(), msg, signed_fields=sf)
r = ax.FetchResponse.fromSuccessResponse(oreq)
self.failUnless(r is not None)
def test_fromSuccessResponseWithData(self):
name = 'ext0'
value = 'snozzberry'
uri = "http://willy.wonka.name/"
args = {
'mode': 'id_res',
'ns': OPENID2_NS,
'ns.ax': ax.AXMessage.ns_uri,
'ax.update_url': 'http://example.com/realm/update_path',
'ax.mode': 'fetch_response',
'ax.type.'+name: uri,
'ax.count.'+name: '1',
'ax.value.%s.1'%name: value,
}
sf = ['openid.' + i for i in args.keys()]
msg = Message.fromOpenIDArgs(args)
class Endpoint:
claimed_id = 'http://invalid.'
resp = SuccessResponse(Endpoint(), msg, signed_fields=sf)
ax_resp = ax.FetchResponse.fromSuccessResponse(resp)
values = ax_resp.get(uri)
self.failUnlessEqual([value], values)
class StoreRequestTest(unittest.TestCase):
def setUp(self):
self.msg = ax.StoreRequest()
self.type_a = 'http://three.count/'
self.alias_a = 'juggling'
def test_construct(self):
self.failUnlessEqual({}, self.msg.data)
def test_getExtensionArgs_empty(self):
args = self.msg.getExtensionArgs()
expected_args = {
'mode':'store_request',
}
self.failUnlessEqual(expected_args, args)
def test_getExtensionArgs_nonempty(self):
aliases = NamespaceMap()
aliases.addAlias(self.type_a, self.alias_a)
msg = ax.StoreRequest(aliases=aliases)
msg.setValues(self.type_a, ['foo', 'bar'])
args = msg.getExtensionArgs()
expected_args = {
'mode':'store_request',
'type.' + self.alias_a: self.type_a,
'count.' + self.alias_a: '2',
'value.%s.1' % (self.alias_a,):'foo',
'value.%s.2' % (self.alias_a,):'bar',
}
self.failUnlessEqual(expected_args, args)
class StoreResponseTest(unittest.TestCase):
def test_success(self):
msg = ax.StoreResponse()
self.failUnless(msg.succeeded())
self.failIf(msg.error_message)
self.failUnlessEqual({'mode':'store_response_success'},
msg.getExtensionArgs())
def test_fail_nomsg(self):
msg = ax.StoreResponse(False)
self.failIf(msg.succeeded())
self.failIf(msg.error_message)
self.failUnlessEqual({'mode':'store_response_failure'},
msg.getExtensionArgs())
def test_fail_msg(self):
reason = 'no reason, really'
msg = ax.StoreResponse(False, reason)
self.failIf(msg.succeeded())
self.failUnlessEqual(reason, msg.error_message)
self.failUnlessEqual({'mode':'store_response_failure',
'error':reason}, msg.getExtensionArgs())
|
learningequality/kolibri
|
refs/heads/develop
|
kolibri/core/device/models.py
|
1
|
import platform
import time
from uuid import uuid4
from django.conf import settings
from django.db import models
from morango.models import UUIDField
from .utils import LANDING_PAGE_LEARN
from .utils import LANDING_PAGE_SIGN_IN
from kolibri.core.auth.models import Facility
from kolibri.core.auth.models import FacilityUser
from kolibri.core.utils.cache import process_cache as cache
from kolibri.plugins.app.utils import interface
device_permissions_fields = ["is_superuser", "can_manage_content"]
class DevicePermissions(models.Model):
"""
This class stores metadata about device permissions for FacilityUsers.
"""
user = models.OneToOneField(
FacilityUser,
on_delete=models.CASCADE,
related_name="devicepermissions",
blank=False,
null=False,
primary_key=True,
)
is_superuser = models.BooleanField(default=False)
can_manage_content = models.BooleanField(default=False)
DEVICE_SETTINGS_CACHE_KEY = "device_settings_cache_key"
class DeviceSettingsManager(models.Manager):
def get(self, **kwargs):
if DEVICE_SETTINGS_CACHE_KEY not in cache:
model = super(DeviceSettingsManager, self).get(**kwargs)
cache.set(DEVICE_SETTINGS_CACHE_KEY, model, 600)
else:
model = cache.get(DEVICE_SETTINGS_CACHE_KEY)
return model
def get_device_hostname():
# Get the device hostname to set it as the default value of name field in
# DeviceSettings model
hostname = platform.node()
# make sure the default name does not exceed max length of the field
return hostname[:50]
def app_is_enabled():
return interface.enabled
class DeviceSettings(models.Model):
"""
This class stores data about settings particular to this device
"""
LANDING_PAGE_CHOICES = [
(LANDING_PAGE_SIGN_IN, "Sign-in page"),
(LANDING_PAGE_LEARN, "Learn page"),
]
objects = DeviceSettingsManager()
# Has this device gone through initial setup yet?
is_provisioned = models.BooleanField(default=False)
# What is the default language that Kolibri is displayed in for this device?
language_id = models.CharField(
max_length=15, default=settings.LANGUAGE_CODE, blank=True, null=True
)
# What is the default facility for this device?
default_facility = models.ForeignKey(
Facility, on_delete=models.SET_NULL, blank=True, null=True
)
# Where should we redirect to on first page load?
landing_page = models.CharField(
max_length=7, choices=LANDING_PAGE_CHOICES, default=LANDING_PAGE_SIGN_IN
)
# Should users be able to browse content on this device without logging in?
allow_guest_access = models.BooleanField(default=True)
# Should peer devices be able to import non-public channels from this device?
allow_peer_unlisted_channel_import = models.BooleanField(default=False)
# Should learners be able to access resources that are not assigned to them on this device?
allow_learner_unassigned_resource_access = models.BooleanField(default=True)
# What's the name of this device?
name = models.CharField(max_length=50, default=get_device_hostname)
# Should this device allow browser sessions from non-localhost devices?
allow_other_browsers_to_connect = models.BooleanField(default=app_is_enabled)
# Is this a device that only synchronizes data about a subset of users?
subset_of_users_device = models.BooleanField(default=False)
def save(self, *args, **kwargs):
self.pk = 1
self.full_clean()
super(DeviceSettings, self).save(*args, **kwargs)
cache.set(DEVICE_SETTINGS_CACHE_KEY, self, 600)
CONTENT_CACHE_KEY_CACHE_KEY = "content_cache_key"
class ContentCacheKey(models.Model):
"""
This class stores a cache key for content models that should be updated
whenever the content metadata stored on the device changes.
"""
key = models.IntegerField(default=time.time)
def save(self, *args, **kwargs):
self.pk = 1
super(ContentCacheKey, self).save(*args, **kwargs)
@classmethod
def update_cache_key(cls):
cache_key, created = cls.objects.get_or_create()
cache_key.key = time.time()
cache_key.save()
cache.set(CONTENT_CACHE_KEY_CACHE_KEY, cache_key.key, 5000)
return cache_key
@classmethod
def get_cache_key(cls):
key = cache.get(CONTENT_CACHE_KEY_CACHE_KEY)
if key is None:
try:
cache_key = cls.objects.get()
except cls.DoesNotExist:
cache_key = cls.update_cache_key()
key = cache_key.key
cache.set(CONTENT_CACHE_KEY_CACHE_KEY, key, 5000)
return key
APP_KEY_CACHE_KEY = "app_key"
class DeviceAppKey(models.Model):
"""
This class stores a key that is checked to make sure that a webview
is making requests from a privileged device (i.e. from inside an
app-wrapper webview)
"""
key = UUIDField(default=uuid4)
def save(self, *args, **kwargs):
self.pk = 1
super(DeviceAppKey, self).save(*args, **kwargs)
@classmethod
def update_app_key(cls):
app_key, created = cls.objects.get_or_create()
app_key.key = uuid4().hex
app_key.save()
cache.set(APP_KEY_CACHE_KEY, app_key.key, 5000)
return app_key
@classmethod
def get_app_key(cls):
key = cache.get(APP_KEY_CACHE_KEY)
if key is None:
try:
app_key = cls.objects.get()
except cls.DoesNotExist:
app_key = cls.update_app_key()
key = app_key.key
cache.set(APP_KEY_CACHE_KEY, key, 5000)
return key
class SQLiteLock(models.Model):
id = models.AutoField(primary_key=True)
def save(self, *args, **kwargs):
self.pk = 1
super(SQLiteLock, self).save(*args, **kwargs)
class SyncQueue(models.Model):
"""
This class maintains the queue of the devices that try to sync
with this server
"""
id = UUIDField(primary_key=True, default=uuid4)
user = models.ForeignKey(FacilityUser, on_delete=models.CASCADE, null=False)
datetime = models.DateTimeField(auto_now_add=True)
updated = models.FloatField(default=time.time)
# polling interval is 5 seconds by default
keep_alive = models.FloatField(default=5.0)
@classmethod
def clean_stale(cls, expire=180.0):
"""
This method will delete all the devices from the queue
with the expire time (in seconds) exhausted
"""
staled_time = time.time() - expire
cls.objects.filter(updated__lte=staled_time).delete()
|
lightcn/odoo
|
refs/heads/8.0
|
addons/website_event/tests/test_ui.py
|
339
|
import openerp.tests
@openerp.tests.common.at_install(False)
@openerp.tests.common.post_install(True)
class TestUi(openerp.tests.HttpCase):
def test_admin(self):
self.phantom_js("/", "openerp.Tour.run('event', 'test')", "openerp.Tour.tours.event", login='admin')
|
gchp/django
|
refs/heads/master
|
tests/defer/tests.py
|
338
|
from __future__ import unicode_literals
from django.db.models.query_utils import DeferredAttribute, InvalidQuery
from django.test import TestCase
from .models import (
BigChild, Child, ChildProxy, Primary, RefreshPrimaryProxy, Secondary,
)
class AssertionMixin(object):
def assert_delayed(self, obj, num):
"""
Instances with deferred fields look the same as normal instances when
we examine attribute values. Therefore, this method returns the number
of deferred fields on returned instances.
"""
count = 0
for field in obj._meta.fields:
if isinstance(obj.__class__.__dict__.get(field.attname), DeferredAttribute):
count += 1
self.assertEqual(count, num)
class DeferTests(AssertionMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.s1 = Secondary.objects.create(first="x1", second="y1")
cls.p1 = Primary.objects.create(name="p1", value="xx", related=cls.s1)
def test_defer(self):
qs = Primary.objects.all()
self.assert_delayed(qs.defer("name")[0], 1)
self.assert_delayed(qs.defer("name").get(pk=self.p1.pk), 1)
self.assert_delayed(qs.defer("related__first")[0], 0)
self.assert_delayed(qs.defer("name").defer("value")[0], 2)
def test_only(self):
qs = Primary.objects.all()
self.assert_delayed(qs.only("name")[0], 2)
self.assert_delayed(qs.only("name").get(pk=self.p1.pk), 2)
self.assert_delayed(qs.only("name").only("value")[0], 2)
self.assert_delayed(qs.only("related__first")[0], 2)
# Using 'pk' with only() should result in 3 deferred fields, namely all
# of them except the model's primary key see #15494
self.assert_delayed(qs.only("pk")[0], 3)
# You can use 'pk' with reverse foreign key lookups.
self.assert_delayed(self.s1.primary_set.all().only('pk')[0], 3)
def test_defer_only_chaining(self):
qs = Primary.objects.all()
self.assert_delayed(qs.only("name", "value").defer("name")[0], 2)
self.assert_delayed(qs.defer("name").only("value", "name")[0], 2)
self.assert_delayed(qs.defer("name").only("value")[0], 2)
self.assert_delayed(qs.only("name").defer("value")[0], 2)
def test_defer_on_an_already_deferred_field(self):
qs = Primary.objects.all()
self.assert_delayed(qs.defer("name")[0], 1)
self.assert_delayed(qs.defer("name").defer("name")[0], 1)
def test_defer_none_to_clear_deferred_set(self):
qs = Primary.objects.all()
self.assert_delayed(qs.defer("name", "value")[0], 2)
self.assert_delayed(qs.defer(None)[0], 0)
self.assert_delayed(qs.only("name").defer(None)[0], 0)
def test_only_none_raises_error(self):
msg = 'Cannot pass None as an argument to only().'
with self.assertRaisesMessage(TypeError, msg):
Primary.objects.only(None)
def test_defer_extra(self):
qs = Primary.objects.all()
self.assert_delayed(qs.defer("name").extra(select={"a": 1})[0], 1)
self.assert_delayed(qs.extra(select={"a": 1}).defer("name")[0], 1)
def test_defer_values_does_not_defer(self):
# User values() won't defer anything (you get the full list of
# dictionaries back), but it still works.
self.assertEqual(Primary.objects.defer("name").values()[0], {
"id": self.p1.id,
"name": "p1",
"value": "xx",
"related_id": self.s1.id,
})
def test_only_values_does_not_defer(self):
self.assertEqual(Primary.objects.only("name").values()[0], {
"id": self.p1.id,
"name": "p1",
"value": "xx",
"related_id": self.s1.id,
})
def test_get(self):
# Using defer() and only() with get() is also valid.
qs = Primary.objects.all()
self.assert_delayed(qs.defer("name").get(pk=self.p1.pk), 1)
self.assert_delayed(qs.only("name").get(pk=self.p1.pk), 2)
def test_defer_with_select_related(self):
obj = Primary.objects.select_related().defer("related__first", "related__second")[0]
self.assert_delayed(obj.related, 2)
self.assert_delayed(obj, 0)
def test_only_with_select_related(self):
obj = Primary.objects.select_related().only("related__first")[0]
self.assert_delayed(obj, 2)
self.assert_delayed(obj.related, 1)
self.assertEqual(obj.related_id, self.s1.pk)
self.assertEqual(obj.name, "p1")
def test_defer_select_related_raises_invalid_query(self):
# When we defer a field and also select_related it, the query is
# invalid and raises an exception.
with self.assertRaises(InvalidQuery):
Primary.objects.defer("related").select_related("related")[0]
def test_only_select_related_raises_invalid_query(self):
with self.assertRaises(InvalidQuery):
Primary.objects.only("name").select_related("related")[0]
def test_defer_foreign_keys_are_deferred_and_not_traversed(self):
# With a depth-based select_related, all deferred ForeignKeys are
# deferred instead of traversed.
with self.assertNumQueries(3):
obj = Primary.objects.defer("related").select_related()[0]
self.assert_delayed(obj, 1)
self.assertEqual(obj.related.id, self.s1.pk)
def test_saving_object_with_deferred_field(self):
# Saving models with deferred fields is possible (but inefficient,
# since every field has to be retrieved first).
Primary.objects.create(name="p2", value="xy", related=self.s1)
obj = Primary.objects.defer("value").get(name="p2")
obj.name = "a new name"
obj.save()
self.assertQuerysetEqual(
Primary.objects.all(), [
"p1", "a new name",
],
lambda p: p.name,
ordered=False,
)
def test_defer_baseclass_when_subclass_has_no_added_fields(self):
# Regression for #10572 - A subclass with no extra fields can defer
# fields from the base class
Child.objects.create(name="c1", value="foo", related=self.s1)
# You can defer a field on a baseclass when the subclass has no fields
obj = Child.objects.defer("value").get(name="c1")
self.assert_delayed(obj, 1)
self.assertEqual(obj.name, "c1")
self.assertEqual(obj.value, "foo")
def test_only_baseclass_when_subclass_has_no_added_fields(self):
# You can retrieve a single column on a base class with no fields
Child.objects.create(name="c1", value="foo", related=self.s1)
obj = Child.objects.only("name").get(name="c1")
# on an inherited model, its PK is also fetched, hence '3' deferred fields.
self.assert_delayed(obj, 3)
self.assertEqual(obj.name, "c1")
self.assertEqual(obj.value, "foo")
class BigChildDeferTests(AssertionMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.s1 = Secondary.objects.create(first="x1", second="y1")
BigChild.objects.create(name="b1", value="foo", related=cls.s1, other="bar")
def test_defer_baseclass_when_subclass_has_added_field(self):
# You can defer a field on a baseclass
obj = BigChild.objects.defer("value").get(name="b1")
self.assert_delayed(obj, 1)
self.assertEqual(obj.name, "b1")
self.assertEqual(obj.value, "foo")
self.assertEqual(obj.other, "bar")
def test_defer_subclass(self):
# You can defer a field on a subclass
obj = BigChild.objects.defer("other").get(name="b1")
self.assert_delayed(obj, 1)
self.assertEqual(obj.name, "b1")
self.assertEqual(obj.value, "foo")
self.assertEqual(obj.other, "bar")
def test_only_baseclass_when_subclass_has_added_field(self):
# You can retrieve a single field on a baseclass
obj = BigChild.objects.only("name").get(name="b1")
# when inherited model, its PK is also fetched, hence '4' deferred fields.
self.assert_delayed(obj, 4)
self.assertEqual(obj.name, "b1")
self.assertEqual(obj.value, "foo")
self.assertEqual(obj.other, "bar")
def test_only_sublcass(self):
# You can retrieve a single field on a subclass
obj = BigChild.objects.only("other").get(name="b1")
self.assert_delayed(obj, 4)
self.assertEqual(obj.name, "b1")
self.assertEqual(obj.value, "foo")
self.assertEqual(obj.other, "bar")
class TestDefer2(AssertionMixin, TestCase):
def test_defer_proxy(self):
"""
Ensure select_related together with only on a proxy model behaves
as expected. See #17876.
"""
related = Secondary.objects.create(first='x1', second='x2')
ChildProxy.objects.create(name='p1', value='xx', related=related)
children = ChildProxy.objects.all().select_related().only('id', 'name')
self.assertEqual(len(children), 1)
child = children[0]
self.assert_delayed(child, 2)
self.assertEqual(child.name, 'p1')
self.assertEqual(child.value, 'xx')
def test_defer_inheritance_pk_chaining(self):
"""
When an inherited model is fetched from the DB, its PK is also fetched.
When getting the PK of the parent model it is useful to use the already
fetched parent model PK if it happens to be available. Tests that this
is done.
"""
s1 = Secondary.objects.create(first="x1", second="y1")
bc = BigChild.objects.create(name="b1", value="foo", related=s1,
other="bar")
bc_deferred = BigChild.objects.only('name').get(pk=bc.pk)
with self.assertNumQueries(0):
bc_deferred.id
self.assertEqual(bc_deferred.pk, bc_deferred.id)
def test_eq(self):
s1 = Secondary.objects.create(first="x1", second="y1")
s1_defer = Secondary.objects.only('pk').get(pk=s1.pk)
self.assertEqual(s1, s1_defer)
self.assertEqual(s1_defer, s1)
def test_refresh_not_loading_deferred_fields(self):
s = Secondary.objects.create()
rf = Primary.objects.create(name='foo', value='bar', related=s)
rf2 = Primary.objects.only('related', 'value').get()
rf.name = 'new foo'
rf.value = 'new bar'
rf.save()
with self.assertNumQueries(1):
rf2.refresh_from_db()
self.assertEqual(rf2.value, 'new bar')
with self.assertNumQueries(1):
self.assertEqual(rf2.name, 'new foo')
def test_custom_refresh_on_deferred_loading(self):
s = Secondary.objects.create()
rf = RefreshPrimaryProxy.objects.create(name='foo', value='bar', related=s)
rf2 = RefreshPrimaryProxy.objects.only('related').get()
rf.name = 'new foo'
rf.value = 'new bar'
rf.save()
with self.assertNumQueries(1):
# Customized refresh_from_db() reloads all deferred fields on
# access of any of them.
self.assertEqual(rf2.name, 'new foo')
self.assertEqual(rf2.value, 'new bar')
|
Pakoach/Sick-Beard
|
refs/heads/master
|
lib/pythontwitter/__init__.py
|
30
|
#!/usr/bin/env python
#
# vim: sw=2 ts=2 sts=2
#
# Copyright 2007 The Python-Twitter Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''A library that provides a Python interface to the Twitter API'''
__author__ = 'python-twitter@googlegroups.com'
__version__ = '1.0.1'
import base64
import calendar
import datetime
import httplib
import os
import rfc822
import sys
import tempfile
import textwrap
import time
import urllib
import urllib2
import urlparse
import gzip
import StringIO
try:
# Python >= 2.6
import json as simplejson
except ImportError:
try:
# Python < 2.6
import lib.simplejson as simplejson
except ImportError:
try:
# Google App Engine
from django.utils import simplejson
except ImportError:
raise ImportError, "Unable to load a json library"
# parse_qsl moved to urlparse module in v2.6
try:
from urlparse import parse_qsl, parse_qs
except ImportError:
from cgi import parse_qsl, parse_qs
try:
from hashlib import md5
except ImportError:
from md5 import md5
import lib.oauth2 as oauth
CHARACTER_LIMIT = 140
# A singleton representing a lazily instantiated FileCache.
DEFAULT_CACHE = object()
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
class TwitterError(Exception):
'''Base class for Twitter errors'''
@property
def message(self):
'''Returns the first argument used to construct this error.'''
return self.args[0]
class Status(object):
'''A class representing the Status structure used by the twitter API.
The Status structure exposes the following properties:
status.created_at
status.created_at_in_seconds # read only
status.favorited
status.favorite_count
status.in_reply_to_screen_name
status.in_reply_to_user_id
status.in_reply_to_status_id
status.truncated
status.source
status.id
status.text
status.location
status.relative_created_at # read only
status.user
status.urls
status.user_mentions
status.hashtags
status.geo
status.place
status.coordinates
status.contributors
'''
def __init__(self,
created_at=None,
favorited=None,
favorite_count=None,
id=None,
text=None,
location=None,
user=None,
in_reply_to_screen_name=None,
in_reply_to_user_id=None,
in_reply_to_status_id=None,
truncated=None,
source=None,
now=None,
urls=None,
user_mentions=None,
hashtags=None,
media=None,
geo=None,
place=None,
coordinates=None,
contributors=None,
retweeted=None,
retweeted_status=None,
current_user_retweet=None,
retweet_count=None,
possibly_sensitive=None,
scopes=None,
withheld_copyright=None,
withheld_in_countries=None,
withheld_scope=None):
'''An object to hold a Twitter status message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
created_at:
The time this status message was posted. [Optional]
favorited:
Whether this is a favorite of the authenticated user. [Optional]
favorite_count:
Number of times this status message has been favorited. [Optional]
id:
The unique id of this status message. [Optional]
text:
The text of this status message. [Optional]
location:
the geolocation string associated with this message. [Optional]
relative_created_at:
A human readable string representing the posting time. [Optional]
user:
A twitter.User instance representing the person posting the
message. [Optional]
now:
The current time, if the client chooses to set it.
Defaults to the wall clock time. [Optional]
urls:
user_mentions:
hashtags:
geo:
place:
coordinates:
contributors:
retweeted:
retweeted_status:
current_user_retweet:
retweet_count:
possibly_sensitive:
scopes:
withheld_copyright:
withheld_in_countries:
withheld_scope:
'''
self.created_at = created_at
self.favorited = favorited
self.favorite_count = favorite_count
self.id = id
self.text = text
self.location = location
self.user = user
self.now = now
self.in_reply_to_screen_name = in_reply_to_screen_name
self.in_reply_to_user_id = in_reply_to_user_id
self.in_reply_to_status_id = in_reply_to_status_id
self.truncated = truncated
self.retweeted = retweeted
self.source = source
self.urls = urls
self.user_mentions = user_mentions
self.hashtags = hashtags
self.media = media
self.geo = geo
self.place = place
self.coordinates = coordinates
self.contributors = contributors
self.retweeted_status = retweeted_status
self.current_user_retweet = current_user_retweet
self.retweet_count = retweet_count
self.possibly_sensitive = possibly_sensitive
self.scopes = scopes
self.withheld_copyright = withheld_copyright
self.withheld_in_countries = withheld_in_countries
self.withheld_scope = withheld_scope
def GetCreatedAt(self):
'''Get the time this status message was posted.
Returns:
The time this status message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this status message was posted.
Args:
created_at:
The time this status message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc='The time this status message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this status message was posted, in seconds since the epoch.
Returns:
The time this status message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc="The time this status message was "
"posted, in seconds since the epoch")
def GetFavorited(self):
'''Get the favorited setting of this status message.
Returns:
True if this status message is favorited; False otherwise
'''
return self._favorited
def SetFavorited(self, favorited):
'''Set the favorited state of this status message.
Args:
favorited:
boolean True/False favorited state of this status message
'''
self._favorited = favorited
favorited = property(GetFavorited, SetFavorited,
doc='The favorited state of this status message.')
def GetFavoriteCount(self):
'''Get the favorite count of this status message.
Returns:
number of times this status message has been favorited
'''
return self._favorite_count
def SetFavoriteCount(self, favorite_count):
'''Set the favorited state of this status message.
Args:
favorite_count:
int number of favorites for this status message
'''
self._favorite_count = favorite_count
favorite_count = property(GetFavoriteCount, SetFavoriteCount,
doc='The number of favorites for this status message.')
def GetId(self):
'''Get the unique id of this status message.
Returns:
The unique id of this status message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this status message.
Args:
id:
The unique id of this status message
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this status message.')
def GetInReplyToScreenName(self):
return self._in_reply_to_screen_name
def SetInReplyToScreenName(self, in_reply_to_screen_name):
self._in_reply_to_screen_name = in_reply_to_screen_name
in_reply_to_screen_name = property(GetInReplyToScreenName, SetInReplyToScreenName,
doc='')
def GetInReplyToUserId(self):
return self._in_reply_to_user_id
def SetInReplyToUserId(self, in_reply_to_user_id):
self._in_reply_to_user_id = in_reply_to_user_id
in_reply_to_user_id = property(GetInReplyToUserId, SetInReplyToUserId,
doc='')
def GetInReplyToStatusId(self):
return self._in_reply_to_status_id
def SetInReplyToStatusId(self, in_reply_to_status_id):
self._in_reply_to_status_id = in_reply_to_status_id
in_reply_to_status_id = property(GetInReplyToStatusId, SetInReplyToStatusId,
doc='')
def GetTruncated(self):
return self._truncated
def SetTruncated(self, truncated):
self._truncated = truncated
truncated = property(GetTruncated, SetTruncated,
doc='')
def GetRetweeted(self):
return self._retweeted
def SetRetweeted(self, retweeted):
self._retweeted = retweeted
retweeted = property(GetRetweeted, SetRetweeted,
doc='')
def GetSource(self):
return self._source
def SetSource(self, source):
self._source = source
source = property(GetSource, SetSource,
doc='')
def GetText(self):
'''Get the text of this status message.
Returns:
The text of this status message.
'''
return self._text
def SetText(self, text):
'''Set the text of this status message.
Args:
text:
The text of this status message
'''
self._text = text
text = property(GetText, SetText,
doc='The text of this status message')
def GetLocation(self):
'''Get the geolocation associated with this status message
Returns:
The geolocation string of this status message.
'''
return self._location
def SetLocation(self, location):
'''Set the geolocation associated with this status message
Args:
location:
The geolocation string of this status message
'''
self._location = location
location = property(GetLocation, SetLocation,
doc='The geolocation string of this status message')
def GetRelativeCreatedAt(self):
'''Get a human readable string representing the posting time
Returns:
A human readable string representing the posting time
'''
fudge = 1.25
delta = long(self.now) - long(self.created_at_in_seconds)
if delta < (1 * fudge):
return 'about a second ago'
elif delta < (60 * (1 / fudge)):
return 'about %d seconds ago' % (delta)
elif delta < (60 * fudge):
return 'about a minute ago'
elif delta < (60 * 60 * (1 / fudge)):
return 'about %d minutes ago' % (delta / 60)
elif delta < (60 * 60 * fudge) or delta / (60 * 60) == 1:
return 'about an hour ago'
elif delta < (60 * 60 * 24 * (1 / fudge)):
return 'about %d hours ago' % (delta / (60 * 60))
elif delta < (60 * 60 * 24 * fudge) or delta / (60 * 60 * 24) == 1:
return 'about a day ago'
else:
return 'about %d days ago' % (delta / (60 * 60 * 24))
relative_created_at = property(GetRelativeCreatedAt,
doc='Get a human readable string representing '
'the posting time')
def GetUser(self):
'''Get a twitter.User representing the entity posting this status message.
Returns:
A twitter.User representing the entity posting this status message
'''
return self._user
def SetUser(self, user):
'''Set a twitter.User representing the entity posting this status message.
Args:
user:
A twitter.User representing the entity posting this status message
'''
self._user = user
user = property(GetUser, SetUser,
doc='A twitter.User representing the entity posting this '
'status message')
def GetNow(self):
'''Get the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Returns:
Whatever the status instance believes the current time to be,
in seconds since the epoch.
'''
if self._now is None:
self._now = time.time()
return self._now
def SetNow(self, now):
'''Set the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Args:
now:
The wallclock time for this instance.
'''
self._now = now
now = property(GetNow, SetNow,
doc='The wallclock time for this status instance.')
def GetGeo(self):
return self._geo
def SetGeo(self, geo):
self._geo = geo
geo = property(GetGeo, SetGeo,
doc='')
def GetPlace(self):
return self._place
def SetPlace(self, place):
self._place = place
place = property(GetPlace, SetPlace,
doc='')
def GetCoordinates(self):
return self._coordinates
def SetCoordinates(self, coordinates):
self._coordinates = coordinates
coordinates = property(GetCoordinates, SetCoordinates,
doc='')
def GetContributors(self):
return self._contributors
def SetContributors(self, contributors):
self._contributors = contributors
contributors = property(GetContributors, SetContributors,
doc='')
def GetRetweeted_status(self):
return self._retweeted_status
def SetRetweeted_status(self, retweeted_status):
self._retweeted_status = retweeted_status
retweeted_status = property(GetRetweeted_status, SetRetweeted_status,
doc='')
def GetRetweetCount(self):
return self._retweet_count
def SetRetweetCount(self, retweet_count):
self._retweet_count = retweet_count
retweet_count = property(GetRetweetCount, SetRetweetCount,
doc='')
def GetCurrent_user_retweet(self):
return self._current_user_retweet
def SetCurrent_user_retweet(self, current_user_retweet):
self._current_user_retweet = current_user_retweet
current_user_retweet = property(GetCurrent_user_retweet, SetCurrent_user_retweet,
doc='')
def GetPossibly_sensitive(self):
return self._possibly_sensitive
def SetPossibly_sensitive(self, possibly_sensitive):
self._possibly_sensitive = possibly_sensitive
possibly_sensitive = property(GetPossibly_sensitive, SetPossibly_sensitive,
doc='')
def GetScopes(self):
return self._scopes
def SetScopes(self, scopes):
self._scopes = scopes
scopes = property(GetScopes, SetScopes, doc='')
def GetWithheld_copyright(self):
return self._withheld_copyright
def SetWithheld_copyright(self, withheld_copyright):
self._withheld_copyright = withheld_copyright
withheld_copyright = property(GetWithheld_copyright, SetWithheld_copyright,
doc='')
def GetWithheld_in_countries(self):
return self._withheld_in_countries
def SetWithheld_in_countries(self, withheld_in_countries):
self._withheld_in_countries = withheld_in_countries
withheld_in_countries = property(GetWithheld_in_countries, SetWithheld_in_countries,
doc='')
def GetWithheld_scope(self):
return self._withheld_scope
def SetWithheld_scope(self, withheld_scope):
self._withheld_scope = withheld_scope
withheld_scope = property(GetWithheld_scope, SetWithheld_scope,
doc='')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.created_at == other.created_at and \
self.id == other.id and \
self.text == other.text and \
self.location == other.location and \
self.user == other.user and \
self.in_reply_to_screen_name == other.in_reply_to_screen_name and \
self.in_reply_to_user_id == other.in_reply_to_user_id and \
self.in_reply_to_status_id == other.in_reply_to_status_id and \
self.truncated == other.truncated and \
self.retweeted == other.retweeted and \
self.favorited == other.favorited and \
self.favorite_count == other.favorite_count and \
self.source == other.source and \
self.geo == other.geo and \
self.place == other.place and \
self.coordinates == other.coordinates and \
self.contributors == other.contributors and \
self.retweeted_status == other.retweeted_status and \
self.retweet_count == other.retweet_count and \
self.current_user_retweet == other.current_user_retweet and \
self.possibly_sensitive == other.possibly_sensitive and \
self.scopes == other.scopes and \
self.withheld_copyright == other.withheld_copyright and \
self.withheld_in_countries == other.withheld_in_countries and \
self.withheld_scope == other.withheld_scope
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.Status instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.Status instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.Status instance.
Returns:
A JSON string representation of this twitter.Status instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.Status instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.Status instance
'''
data = {}
if self.created_at:
data['created_at'] = self.created_at
if self.favorited:
data['favorited'] = self.favorited
if self.favorite_count:
data['favorite_count'] = self.favorite_count
if self.id:
data['id'] = self.id
if self.text:
data['text'] = self.text
if self.location:
data['location'] = self.location
if self.user:
data['user'] = self.user.AsDict()
if self.in_reply_to_screen_name:
data['in_reply_to_screen_name'] = self.in_reply_to_screen_name
if self.in_reply_to_user_id:
data['in_reply_to_user_id'] = self.in_reply_to_user_id
if self.in_reply_to_status_id:
data['in_reply_to_status_id'] = self.in_reply_to_status_id
if self.truncated is not None:
data['truncated'] = self.truncated
if self.retweeted is not None:
data['retweeted'] = self.retweeted
if self.favorited is not None:
data['favorited'] = self.favorited
if self.source:
data['source'] = self.source
if self.geo:
data['geo'] = self.geo
if self.place:
data['place'] = self.place
if self.coordinates:
data['coordinates'] = self.coordinates
if self.contributors:
data['contributors'] = self.contributors
if self.hashtags:
data['hashtags'] = [h.text for h in self.hashtags]
if self.retweeted_status:
data['retweeted_status'] = self.retweeted_status.AsDict()
if self.retweet_count:
data['retweet_count'] = self.retweet_count
if self.urls:
data['urls'] = dict([(url.url, url.expanded_url) for url in self.urls])
if self.user_mentions:
data['user_mentions'] = [um.AsDict() for um in self.user_mentions]
if self.current_user_retweet:
data['current_user_retweet'] = self.current_user_retweet
if self.possibly_sensitive:
data['possibly_sensitive'] = self.possibly_sensitive
if self.scopes:
data['scopes'] = self.scopes
if self.withheld_copyright:
data['withheld_copyright'] = self.withheld_copyright
if self.withheld_in_countries:
data['withheld_in_countries'] = self.withheld_in_countries
if self.withheld_scope:
data['withheld_scope'] = self.withheld_scope
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Status instance
'''
if 'user' in data:
user = User.NewFromJsonDict(data['user'])
else:
user = None
if 'retweeted_status' in data:
retweeted_status = Status.NewFromJsonDict(data['retweeted_status'])
else:
retweeted_status = None
if 'current_user_retweet' in data:
current_user_retweet = data['current_user_retweet']['id']
else:
current_user_retweet = None
urls = None
user_mentions = None
hashtags = None
media = None
if 'entities' in data:
if 'urls' in data['entities']:
urls = [Url.NewFromJsonDict(u) for u in data['entities']['urls']]
if 'user_mentions' in data['entities']:
user_mentions = [User.NewFromJsonDict(u) for u in data['entities']['user_mentions']]
if 'hashtags' in data['entities']:
hashtags = [Hashtag.NewFromJsonDict(h) for h in data['entities']['hashtags']]
if 'media' in data['entities']:
media = data['entities']['media']
else:
media = []
return Status(created_at=data.get('created_at', None),
favorited=data.get('favorited', None),
favorite_count=data.get('favorite_count', None),
id=data.get('id', None),
text=data.get('text', None),
location=data.get('location', None),
in_reply_to_screen_name=data.get('in_reply_to_screen_name', None),
in_reply_to_user_id=data.get('in_reply_to_user_id', None),
in_reply_to_status_id=data.get('in_reply_to_status_id', None),
truncated=data.get('truncated', None),
retweeted=data.get('retweeted', None),
source=data.get('source', None),
user=user,
urls=urls,
user_mentions=user_mentions,
hashtags=hashtags,
media=media,
geo=data.get('geo', None),
place=data.get('place', None),
coordinates=data.get('coordinates', None),
contributors=data.get('contributors', None),
retweeted_status=retweeted_status,
current_user_retweet=current_user_retweet,
retweet_count=data.get('retweet_count', None),
possibly_sensitive=data.get('possibly_sensitive', None),
scopes=data.get('scopes', None),
withheld_copyright=data.get('withheld_copyright', None),
withheld_in_countries=data.get('withheld_in_countries', None),
withheld_scope=data.get('withheld_scope', None))
class User(object):
'''A class representing the User structure used by the twitter API.
The User structure exposes the following properties:
user.id
user.name
user.screen_name
user.location
user.description
user.profile_image_url
user.profile_background_tile
user.profile_background_image_url
user.profile_sidebar_fill_color
user.profile_background_color
user.profile_link_color
user.profile_text_color
user.protected
user.utc_offset
user.time_zone
user.url
user.status
user.statuses_count
user.followers_count
user.friends_count
user.favourites_count
user.geo_enabled
user.verified
user.lang
user.notifications
user.contributors_enabled
user.created_at
user.listed_count
'''
def __init__(self,
id=None,
name=None,
screen_name=None,
location=None,
description=None,
profile_image_url=None,
profile_background_tile=None,
profile_background_image_url=None,
profile_sidebar_fill_color=None,
profile_background_color=None,
profile_link_color=None,
profile_text_color=None,
protected=None,
utc_offset=None,
time_zone=None,
followers_count=None,
friends_count=None,
statuses_count=None,
favourites_count=None,
url=None,
status=None,
geo_enabled=None,
verified=None,
lang=None,
notifications=None,
contributors_enabled=None,
created_at=None,
listed_count=None):
self.id = id
self.name = name
self.screen_name = screen_name
self.location = location
self.description = description
self.profile_image_url = profile_image_url
self.profile_background_tile = profile_background_tile
self.profile_background_image_url = profile_background_image_url
self.profile_sidebar_fill_color = profile_sidebar_fill_color
self.profile_background_color = profile_background_color
self.profile_link_color = profile_link_color
self.profile_text_color = profile_text_color
self.protected = protected
self.utc_offset = utc_offset
self.time_zone = time_zone
self.followers_count = followers_count
self.friends_count = friends_count
self.statuses_count = statuses_count
self.favourites_count = favourites_count
self.url = url
self.status = status
self.geo_enabled = geo_enabled
self.verified = verified
self.lang = lang
self.notifications = notifications
self.contributors_enabled = contributors_enabled
self.created_at = created_at
self.listed_count = listed_count
def GetId(self):
'''Get the unique id of this user.
Returns:
The unique id of this user
'''
return self._id
def SetId(self, id):
'''Set the unique id of this user.
Args:
id: The unique id of this user.
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this user.')
def GetName(self):
'''Get the real name of this user.
Returns:
The real name of this user
'''
return self._name
def SetName(self, name):
'''Set the real name of this user.
Args:
name: The real name of this user
'''
self._name = name
name = property(GetName, SetName,
doc='The real name of this user.')
def GetScreenName(self):
'''Get the short twitter name of this user.
Returns:
The short twitter name of this user
'''
return self._screen_name
def SetScreenName(self, screen_name):
'''Set the short twitter name of this user.
Args:
screen_name: the short twitter name of this user
'''
self._screen_name = screen_name
screen_name = property(GetScreenName, SetScreenName,
doc='The short twitter name of this user.')
def GetLocation(self):
'''Get the geographic location of this user.
Returns:
The geographic location of this user
'''
return self._location
def SetLocation(self, location):
'''Set the geographic location of this user.
Args:
location: The geographic location of this user
'''
self._location = location
location = property(GetLocation, SetLocation,
doc='The geographic location of this user.')
def GetDescription(self):
'''Get the short text description of this user.
Returns:
The short text description of this user
'''
return self._description
def SetDescription(self, description):
'''Set the short text description of this user.
Args:
description: The short text description of this user
'''
self._description = description
description = property(GetDescription, SetDescription,
doc='The short text description of this user.')
def GetUrl(self):
'''Get the homepage url of this user.
Returns:
The homepage url of this user
'''
return self._url
def SetUrl(self, url):
'''Set the homepage url of this user.
Args:
url: The homepage url of this user
'''
self._url = url
url = property(GetUrl, SetUrl,
doc='The homepage url of this user.')
def GetProfileImageUrl(self):
'''Get the url of the thumbnail of this user.
Returns:
The url of the thumbnail of this user
'''
return self._profile_image_url
def SetProfileImageUrl(self, profile_image_url):
'''Set the url of the thumbnail of this user.
Args:
profile_image_url: The url of the thumbnail of this user
'''
self._profile_image_url = profile_image_url
profile_image_url = property(GetProfileImageUrl, SetProfileImageUrl,
doc='The url of the thumbnail of this user.')
def GetProfileBackgroundTile(self):
'''Boolean for whether to tile the profile background image.
Returns:
True if the background is to be tiled, False if not, None if unset.
'''
return self._profile_background_tile
def SetProfileBackgroundTile(self, profile_background_tile):
'''Set the boolean flag for whether to tile the profile background image.
Args:
profile_background_tile: Boolean flag for whether to tile or not.
'''
self._profile_background_tile = profile_background_tile
profile_background_tile = property(GetProfileBackgroundTile, SetProfileBackgroundTile,
doc='Boolean for whether to tile the background image.')
def GetProfileBackgroundImageUrl(self):
return self._profile_background_image_url
def SetProfileBackgroundImageUrl(self, profile_background_image_url):
self._profile_background_image_url = profile_background_image_url
profile_background_image_url = property(GetProfileBackgroundImageUrl, SetProfileBackgroundImageUrl,
doc='The url of the profile background of this user.')
def GetProfileSidebarFillColor(self):
return self._profile_sidebar_fill_color
def SetProfileSidebarFillColor(self, profile_sidebar_fill_color):
self._profile_sidebar_fill_color = profile_sidebar_fill_color
profile_sidebar_fill_color = property(GetProfileSidebarFillColor, SetProfileSidebarFillColor)
def GetProfileBackgroundColor(self):
return self._profile_background_color
def SetProfileBackgroundColor(self, profile_background_color):
self._profile_background_color = profile_background_color
profile_background_color = property(GetProfileBackgroundColor, SetProfileBackgroundColor)
def GetProfileLinkColor(self):
return self._profile_link_color
def SetProfileLinkColor(self, profile_link_color):
self._profile_link_color = profile_link_color
profile_link_color = property(GetProfileLinkColor, SetProfileLinkColor)
def GetProfileTextColor(self):
return self._profile_text_color
def SetProfileTextColor(self, profile_text_color):
self._profile_text_color = profile_text_color
profile_text_color = property(GetProfileTextColor, SetProfileTextColor)
def GetProtected(self):
return self._protected
def SetProtected(self, protected):
self._protected = protected
protected = property(GetProtected, SetProtected)
def GetUtcOffset(self):
return self._utc_offset
def SetUtcOffset(self, utc_offset):
self._utc_offset = utc_offset
utc_offset = property(GetUtcOffset, SetUtcOffset)
def GetTimeZone(self):
'''Returns the current time zone string for the user.
Returns:
The descriptive time zone string for the user.
'''
return self._time_zone
def SetTimeZone(self, time_zone):
'''Sets the user's time zone string.
Args:
time_zone:
The descriptive time zone to assign for the user.
'''
self._time_zone = time_zone
time_zone = property(GetTimeZone, SetTimeZone)
def GetStatus(self):
'''Get the latest twitter.Status of this user.
Returns:
The latest twitter.Status of this user
'''
return self._status
def SetStatus(self, status):
'''Set the latest twitter.Status of this user.
Args:
status:
The latest twitter.Status of this user
'''
self._status = status
status = property(GetStatus, SetStatus,
doc='The latest twitter.Status of this user.')
def GetFriendsCount(self):
'''Get the friend count for this user.
Returns:
The number of users this user has befriended.
'''
return self._friends_count
def SetFriendsCount(self, count):
'''Set the friend count for this user.
Args:
count:
The number of users this user has befriended.
'''
self._friends_count = count
friends_count = property(GetFriendsCount, SetFriendsCount,
doc='The number of friends for this user.')
def GetListedCount(self):
'''Get the listed count for this user.
Returns:
The number of lists this user belongs to.
'''
return self._listed_count
def SetListedCount(self, count):
'''Set the listed count for this user.
Args:
count:
The number of lists this user belongs to.
'''
self._listed_count = count
listed_count = property(GetListedCount, SetListedCount,
doc='The number of lists this user belongs to.')
def GetFollowersCount(self):
'''Get the follower count for this user.
Returns:
The number of users following this user.
'''
return self._followers_count
def SetFollowersCount(self, count):
'''Set the follower count for this user.
Args:
count:
The number of users following this user.
'''
self._followers_count = count
followers_count = property(GetFollowersCount, SetFollowersCount,
doc='The number of users following this user.')
def GetStatusesCount(self):
'''Get the number of status updates for this user.
Returns:
The number of status updates for this user.
'''
return self._statuses_count
def SetStatusesCount(self, count):
'''Set the status update count for this user.
Args:
count:
The number of updates for this user.
'''
self._statuses_count = count
statuses_count = property(GetStatusesCount, SetStatusesCount,
doc='The number of updates for this user.')
def GetFavouritesCount(self):
'''Get the number of favourites for this user.
Returns:
The number of favourites for this user.
'''
return self._favourites_count
def SetFavouritesCount(self, count):
'''Set the favourite count for this user.
Args:
count:
The number of favourites for this user.
'''
self._favourites_count = count
favourites_count = property(GetFavouritesCount, SetFavouritesCount,
doc='The number of favourites for this user.')
def GetGeoEnabled(self):
'''Get the setting of geo_enabled for this user.
Returns:
True/False if Geo tagging is enabled
'''
return self._geo_enabled
def SetGeoEnabled(self, geo_enabled):
'''Set the latest twitter.geo_enabled of this user.
Args:
geo_enabled:
True/False if Geo tagging is to be enabled
'''
self._geo_enabled = geo_enabled
geo_enabled = property(GetGeoEnabled, SetGeoEnabled,
doc='The value of twitter.geo_enabled for this user.')
def GetVerified(self):
'''Get the setting of verified for this user.
Returns:
True/False if user is a verified account
'''
return self._verified
def SetVerified(self, verified):
'''Set twitter.verified for this user.
Args:
verified:
True/False if user is a verified account
'''
self._verified = verified
verified = property(GetVerified, SetVerified,
doc='The value of twitter.verified for this user.')
def GetLang(self):
'''Get the setting of lang for this user.
Returns:
language code of the user
'''
return self._lang
def SetLang(self, lang):
'''Set twitter.lang for this user.
Args:
lang:
language code for the user
'''
self._lang = lang
lang = property(GetLang, SetLang,
doc='The value of twitter.lang for this user.')
def GetNotifications(self):
'''Get the setting of notifications for this user.
Returns:
True/False for the notifications setting of the user
'''
return self._notifications
def SetNotifications(self, notifications):
'''Set twitter.notifications for this user.
Args:
notifications:
True/False notifications setting for the user
'''
self._notifications = notifications
notifications = property(GetNotifications, SetNotifications,
doc='The value of twitter.notifications for this user.')
def GetContributorsEnabled(self):
'''Get the setting of contributors_enabled for this user.
Returns:
True/False contributors_enabled of the user
'''
return self._contributors_enabled
def SetContributorsEnabled(self, contributors_enabled):
'''Set twitter.contributors_enabled for this user.
Args:
contributors_enabled:
True/False contributors_enabled setting for the user
'''
self._contributors_enabled = contributors_enabled
contributors_enabled = property(GetContributorsEnabled, SetContributorsEnabled,
doc='The value of twitter.contributors_enabled for this user.')
def GetCreatedAt(self):
'''Get the setting of created_at for this user.
Returns:
created_at value of the user
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set twitter.created_at for this user.
Args:
created_at:
created_at value for the user
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc='The value of twitter.created_at for this user.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.screen_name == other.screen_name and \
self.location == other.location and \
self.description == other.description and \
self.profile_image_url == other.profile_image_url and \
self.profile_background_tile == other.profile_background_tile and \
self.profile_background_image_url == other.profile_background_image_url and \
self.profile_sidebar_fill_color == other.profile_sidebar_fill_color and \
self.profile_background_color == other.profile_background_color and \
self.profile_link_color == other.profile_link_color and \
self.profile_text_color == other.profile_text_color and \
self.protected == other.protected and \
self.utc_offset == other.utc_offset and \
self.time_zone == other.time_zone and \
self.url == other.url and \
self.statuses_count == other.statuses_count and \
self.followers_count == other.followers_count and \
self.favourites_count == other.favourites_count and \
self.friends_count == other.friends_count and \
self.status == other.status and \
self.geo_enabled == other.geo_enabled and \
self.verified == other.verified and \
self.lang == other.lang and \
self.notifications == other.notifications and \
self.contributors_enabled == other.contributors_enabled and \
self.created_at == other.created_at and \
self.listed_count == other.listed_count
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.User instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.User instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.User instance.
Returns:
A JSON string representation of this twitter.User instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.User instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.User instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.screen_name:
data['screen_name'] = self.screen_name
if self.location:
data['location'] = self.location
if self.description:
data['description'] = self.description
if self.profile_image_url:
data['profile_image_url'] = self.profile_image_url
if self.profile_background_tile is not None:
data['profile_background_tile'] = self.profile_background_tile
if self.profile_background_image_url:
data['profile_sidebar_fill_color'] = self.profile_background_image_url
if self.profile_background_color:
data['profile_background_color'] = self.profile_background_color
if self.profile_link_color:
data['profile_link_color'] = self.profile_link_color
if self.profile_text_color:
data['profile_text_color'] = self.profile_text_color
if self.protected is not None:
data['protected'] = self.protected
if self.utc_offset:
data['utc_offset'] = self.utc_offset
if self.time_zone:
data['time_zone'] = self.time_zone
if self.url:
data['url'] = self.url
if self.status:
data['status'] = self.status.AsDict()
if self.friends_count:
data['friends_count'] = self.friends_count
if self.followers_count:
data['followers_count'] = self.followers_count
if self.statuses_count:
data['statuses_count'] = self.statuses_count
if self.favourites_count:
data['favourites_count'] = self.favourites_count
if self.geo_enabled:
data['geo_enabled'] = self.geo_enabled
if self.verified:
data['verified'] = self.verified
if self.lang:
data['lang'] = self.lang
if self.notifications:
data['notifications'] = self.notifications
if self.contributors_enabled:
data['contributors_enabled'] = self.contributors_enabled
if self.created_at:
data['created_at'] = self.created_at
if self.listed_count:
data['listed_count'] = self.listed_count
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.User instance
'''
if 'status' in data:
status = Status.NewFromJsonDict(data['status'])
else:
status = None
return User(id=data.get('id', None),
name=data.get('name', None),
screen_name=data.get('screen_name', None),
location=data.get('location', None),
description=data.get('description', None),
statuses_count=data.get('statuses_count', None),
followers_count=data.get('followers_count', None),
favourites_count=data.get('favourites_count', None),
friends_count=data.get('friends_count', None),
profile_image_url=data.get('profile_image_url_https', data.get('profile_image_url', None)),
profile_background_tile=data.get('profile_background_tile', None),
profile_background_image_url=data.get('profile_background_image_url', None),
profile_sidebar_fill_color=data.get('profile_sidebar_fill_color', None),
profile_background_color=data.get('profile_background_color', None),
profile_link_color=data.get('profile_link_color', None),
profile_text_color=data.get('profile_text_color', None),
protected=data.get('protected', None),
utc_offset=data.get('utc_offset', None),
time_zone=data.get('time_zone', None),
url=data.get('url', None),
status=status,
geo_enabled=data.get('geo_enabled', None),
verified=data.get('verified', None),
lang=data.get('lang', None),
notifications=data.get('notifications', None),
contributors_enabled=data.get('contributors_enabled', None),
created_at=data.get('created_at', None),
listed_count=data.get('listed_count', None))
class List(object):
'''A class representing the List structure used by the twitter API.
The List structure exposes the following properties:
list.id
list.name
list.slug
list.description
list.full_name
list.mode
list.uri
list.member_count
list.subscriber_count
list.following
'''
def __init__(self,
id=None,
name=None,
slug=None,
description=None,
full_name=None,
mode=None,
uri=None,
member_count=None,
subscriber_count=None,
following=None,
user=None):
self.id = id
self.name = name
self.slug = slug
self.description = description
self.full_name = full_name
self.mode = mode
self.uri = uri
self.member_count = member_count
self.subscriber_count = subscriber_count
self.following = following
self.user = user
def GetId(self):
'''Get the unique id of this list.
Returns:
The unique id of this list
'''
return self._id
def SetId(self, id):
'''Set the unique id of this list.
Args:
id:
The unique id of this list.
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this list.')
def GetName(self):
'''Get the real name of this list.
Returns:
The real name of this list
'''
return self._name
def SetName(self, name):
'''Set the real name of this list.
Args:
name:
The real name of this list
'''
self._name = name
name = property(GetName, SetName,
doc='The real name of this list.')
def GetSlug(self):
'''Get the slug of this list.
Returns:
The slug of this list
'''
return self._slug
def SetSlug(self, slug):
'''Set the slug of this list.
Args:
slug:
The slug of this list.
'''
self._slug = slug
slug = property(GetSlug, SetSlug,
doc='The slug of this list.')
def GetDescription(self):
'''Get the description of this list.
Returns:
The description of this list
'''
return self._description
def SetDescription(self, description):
'''Set the description of this list.
Args:
description:
The description of this list.
'''
self._description = description
description = property(GetDescription, SetDescription,
doc='The description of this list.')
def GetFull_name(self):
'''Get the full_name of this list.
Returns:
The full_name of this list
'''
return self._full_name
def SetFull_name(self, full_name):
'''Set the full_name of this list.
Args:
full_name:
The full_name of this list.
'''
self._full_name = full_name
full_name = property(GetFull_name, SetFull_name,
doc='The full_name of this list.')
def GetMode(self):
'''Get the mode of this list.
Returns:
The mode of this list
'''
return self._mode
def SetMode(self, mode):
'''Set the mode of this list.
Args:
mode:
The mode of this list.
'''
self._mode = mode
mode = property(GetMode, SetMode,
doc='The mode of this list.')
def GetUri(self):
'''Get the uri of this list.
Returns:
The uri of this list
'''
return self._uri
def SetUri(self, uri):
'''Set the uri of this list.
Args:
uri:
The uri of this list.
'''
self._uri = uri
uri = property(GetUri, SetUri,
doc='The uri of this list.')
def GetMember_count(self):
'''Get the member_count of this list.
Returns:
The member_count of this list
'''
return self._member_count
def SetMember_count(self, member_count):
'''Set the member_count of this list.
Args:
member_count:
The member_count of this list.
'''
self._member_count = member_count
member_count = property(GetMember_count, SetMember_count,
doc='The member_count of this list.')
def GetSubscriber_count(self):
'''Get the subscriber_count of this list.
Returns:
The subscriber_count of this list
'''
return self._subscriber_count
def SetSubscriber_count(self, subscriber_count):
'''Set the subscriber_count of this list.
Args:
subscriber_count:
The subscriber_count of this list.
'''
self._subscriber_count = subscriber_count
subscriber_count = property(GetSubscriber_count, SetSubscriber_count,
doc='The subscriber_count of this list.')
def GetFollowing(self):
'''Get the following status of this list.
Returns:
The following status of this list
'''
return self._following
def SetFollowing(self, following):
'''Set the following status of this list.
Args:
following:
The following of this list.
'''
self._following = following
following = property(GetFollowing, SetFollowing,
doc='The following status of this list.')
def GetUser(self):
'''Get the user of this list.
Returns:
The owner of this list
'''
return self._user
def SetUser(self, user):
'''Set the user of this list.
Args:
user:
The owner of this list.
'''
self._user = user
user = property(GetUser, SetUser,
doc='The owner of this list.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.slug == other.slug and \
self.description == other.description and \
self.full_name == other.full_name and \
self.mode == other.mode and \
self.uri == other.uri and \
self.member_count == other.member_count and \
self.subscriber_count == other.subscriber_count and \
self.following == other.following and \
self.user == other.user
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.List instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.List instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.List instance.
Returns:
A JSON string representation of this twitter.List instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.List instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.List instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.slug:
data['slug'] = self.slug
if self.description:
data['description'] = self.description
if self.full_name:
data['full_name'] = self.full_name
if self.mode:
data['mode'] = self.mode
if self.uri:
data['uri'] = self.uri
if self.member_count is not None:
data['member_count'] = self.member_count
if self.subscriber_count is not None:
data['subscriber_count'] = self.subscriber_count
if self.following is not None:
data['following'] = self.following
if self.user is not None:
data['user'] = self.user.AsDict()
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.List instance
'''
if 'user' in data:
user = User.NewFromJsonDict(data['user'])
else:
user = None
return List(id=data.get('id', None),
name=data.get('name', None),
slug=data.get('slug', None),
description=data.get('description', None),
full_name=data.get('full_name', None),
mode=data.get('mode', None),
uri=data.get('uri', None),
member_count=data.get('member_count', None),
subscriber_count=data.get('subscriber_count', None),
following=data.get('following', None),
user=user)
class DirectMessage(object):
'''A class representing the DirectMessage structure used by the twitter API.
The DirectMessage structure exposes the following properties:
direct_message.id
direct_message.created_at
direct_message.created_at_in_seconds # read only
direct_message.sender_id
direct_message.sender_screen_name
direct_message.recipient_id
direct_message.recipient_screen_name
direct_message.text
'''
def __init__(self,
id=None,
created_at=None,
sender_id=None,
sender_screen_name=None,
recipient_id=None,
recipient_screen_name=None,
text=None):
'''An object to hold a Twitter direct message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
id:
The unique id of this direct message. [Optional]
created_at:
The time this direct message was posted. [Optional]
sender_id:
The id of the twitter user that sent this message. [Optional]
sender_screen_name:
The name of the twitter user that sent this message. [Optional]
recipient_id:
The id of the twitter that received this message. [Optional]
recipient_screen_name:
The name of the twitter that received this message. [Optional]
text:
The text of this direct message. [Optional]
'''
self.id = id
self.created_at = created_at
self.sender_id = sender_id
self.sender_screen_name = sender_screen_name
self.recipient_id = recipient_id
self.recipient_screen_name = recipient_screen_name
self.text = text
def GetId(self):
'''Get the unique id of this direct message.
Returns:
The unique id of this direct message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this direct message.
Args:
id:
The unique id of this direct message
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this direct message.')
def GetCreatedAt(self):
'''Get the time this direct message was posted.
Returns:
The time this direct message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this direct message was posted.
Args:
created_at:
The time this direct message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc='The time this direct message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this direct message was posted, in seconds since the epoch.
Returns:
The time this direct message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc="The time this direct message was "
"posted, in seconds since the epoch")
def GetSenderId(self):
'''Get the unique sender id of this direct message.
Returns:
The unique sender id of this direct message
'''
return self._sender_id
def SetSenderId(self, sender_id):
'''Set the unique sender id of this direct message.
Args:
sender_id:
The unique sender id of this direct message
'''
self._sender_id = sender_id
sender_id = property(GetSenderId, SetSenderId,
doc='The unique sender id of this direct message.')
def GetSenderScreenName(self):
'''Get the unique sender screen name of this direct message.
Returns:
The unique sender screen name of this direct message
'''
return self._sender_screen_name
def SetSenderScreenName(self, sender_screen_name):
'''Set the unique sender screen name of this direct message.
Args:
sender_screen_name:
The unique sender screen name of this direct message
'''
self._sender_screen_name = sender_screen_name
sender_screen_name = property(GetSenderScreenName, SetSenderScreenName,
doc='The unique sender screen name of this direct message.')
def GetRecipientId(self):
'''Get the unique recipient id of this direct message.
Returns:
The unique recipient id of this direct message
'''
return self._recipient_id
def SetRecipientId(self, recipient_id):
'''Set the unique recipient id of this direct message.
Args:
recipient_id:
The unique recipient id of this direct message
'''
self._recipient_id = recipient_id
recipient_id = property(GetRecipientId, SetRecipientId,
doc='The unique recipient id of this direct message.')
def GetRecipientScreenName(self):
'''Get the unique recipient screen name of this direct message.
Returns:
The unique recipient screen name of this direct message
'''
return self._recipient_screen_name
def SetRecipientScreenName(self, recipient_screen_name):
'''Set the unique recipient screen name of this direct message.
Args:
recipient_screen_name:
The unique recipient screen name of this direct message
'''
self._recipient_screen_name = recipient_screen_name
recipient_screen_name = property(GetRecipientScreenName, SetRecipientScreenName,
doc='The unique recipient screen name of this direct message.')
def GetText(self):
'''Get the text of this direct message.
Returns:
The text of this direct message.
'''
return self._text
def SetText(self, text):
'''Set the text of this direct message.
Args:
text:
The text of this direct message
'''
self._text = text
text = property(GetText, SetText,
doc='The text of this direct message')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.created_at == other.created_at and \
self.sender_id == other.sender_id and \
self.sender_screen_name == other.sender_screen_name and \
self.recipient_id == other.recipient_id and \
self.recipient_screen_name == other.recipient_screen_name and \
self.text == other.text
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.DirectMessage instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.DirectMessage instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.DirectMessage instance.
Returns:
A JSON string representation of this twitter.DirectMessage instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.DirectMessage instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.DirectMessage instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.created_at:
data['created_at'] = self.created_at
if self.sender_id:
data['sender_id'] = self.sender_id
if self.sender_screen_name:
data['sender_screen_name'] = self.sender_screen_name
if self.recipient_id:
data['recipient_id'] = self.recipient_id
if self.recipient_screen_name:
data['recipient_screen_name'] = self.recipient_screen_name
if self.text:
data['text'] = self.text
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.DirectMessage instance
'''
return DirectMessage(created_at=data.get('created_at', None),
recipient_id=data.get('recipient_id', None),
sender_id=data.get('sender_id', None),
text=data.get('text', None),
sender_screen_name=data.get('sender_screen_name', None),
id=data.get('id', None),
recipient_screen_name=data.get('recipient_screen_name', None))
class Hashtag(object):
''' A class representing a twitter hashtag
'''
def __init__(self,
text=None):
self.text = text
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Hashtag instance
'''
return Hashtag(text=data.get('text', None))
class Trend(object):
''' A class representing a trending topic
'''
def __init__(self, name=None, query=None, timestamp=None, url=None):
self.name = name
self.query = query
self.timestamp = timestamp
self.url = url
def __str__(self):
return 'Name: %s\nQuery: %s\nTimestamp: %s\nSearch URL: %s\n' % (self.name, self.query, self.timestamp, self.url)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.name == other.name and \
self.query == other.query and \
self.timestamp == other.timestamp and \
self.url == self.url
except AttributeError:
return False
@staticmethod
def NewFromJsonDict(data, timestamp=None):
'''Create a new instance based on a JSON dict
Args:
data:
A JSON dict
timestamp:
Gets set as the timestamp property of the new object
Returns:
A twitter.Trend object
'''
return Trend(name=data.get('name', None),
query=data.get('query', None),
url=data.get('url', None),
timestamp=timestamp)
class Url(object):
'''A class representing an URL contained in a tweet'''
def __init__(self,
url=None,
expanded_url=None):
self.url = url
self.expanded_url = expanded_url
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Url instance
'''
return Url(url=data.get('url', None),
expanded_url=data.get('expanded_url', None))
class Api(object):
'''A python interface into the Twitter API
By default, the Api caches results for 1 minute.
Example usage:
To create an instance of the twitter.Api class, with no authentication:
>>> import twitter
>>> api = twitter.Api()
To fetch the most recently posted public twitter status messages:
>>> statuses = api.GetPublicTimeline()
>>> print [s.user.name for s in statuses]
[u'DeWitt', u'Kesuke Miyagi', u'ev', u'Buzz Andersen', u'Biz Stone'] #...
To fetch a single user's public status messages, where "user" is either
a Twitter "short name" or their user id.
>>> statuses = api.GetUserTimeline(user)
>>> print [s.text for s in statuses]
To use authentication, instantiate the twitter.Api class with a
consumer key and secret; and the oAuth key and secret:
>>> api = twitter.Api(consumer_key='twitter consumer key',
consumer_secret='twitter consumer secret',
access_token_key='the_key_given',
access_token_secret='the_key_secret')
To fetch your friends (after being authenticated):
>>> users = api.GetFriends()
>>> print [u.name for u in users]
To post a twitter status message (after being authenticated):
>>> status = api.PostUpdate('I love python-twitter!')
>>> print status.text
I love python-twitter!
There are many other methods, including:
>>> api.PostUpdates(status)
>>> api.PostDirectMessage(user, text)
>>> api.GetUser(user)
>>> api.GetReplies()
>>> api.GetUserTimeline(user)
>>> api.GetHomeTimeLine()
>>> api.GetStatus(id)
>>> api.DestroyStatus(id)
>>> api.GetFriendsTimeline(user)
>>> api.GetFriends(user)
>>> api.GetFollowers()
>>> api.GetFeatured()
>>> api.GetDirectMessages()
>>> api.GetSentDirectMessages()
>>> api.PostDirectMessage(user, text)
>>> api.DestroyDirectMessage(id)
>>> api.DestroyFriendship(user)
>>> api.CreateFriendship(user)
>>> api.GetUserByEmail(email)
>>> api.VerifyCredentials()
'''
DEFAULT_CACHE_TIMEOUT = 60 # cache for 1 minute
_API_REALM = 'Twitter API'
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
input_encoding=None,
request_headers=None,
cache=DEFAULT_CACHE,
shortner=None,
base_url=None,
use_gzip_compression=False,
debugHTTP=False):
'''Instantiate a new twitter.Api object.
Args:
consumer_key:
Your Twitter user's consumer_key.
consumer_secret:
Your Twitter user's consumer_secret.
access_token_key:
The oAuth access token key value you retrieved
from running get_access_token.py.
access_token_secret:
The oAuth access token's secret, also retrieved
from the get_access_token.py run.
input_encoding:
The encoding used to encode input strings. [Optional]
request_header:
A dictionary of additional HTTP request headers. [Optional]
cache:
The cache instance to use. Defaults to DEFAULT_CACHE.
Use None to disable caching. [Optional]
shortner:
The shortner instance to use. Defaults to None.
See shorten_url.py for an example shortner. [Optional]
base_url:
The base URL to use to contact the Twitter API.
Defaults to https://api.twitter.com. [Optional]
use_gzip_compression:
Set to True to tell enable gzip compression for any call
made to Twitter. Defaults to False. [Optional]
debugHTTP:
Set to True to enable debug output from urllib2 when performing
any HTTP requests. Defaults to False. [Optional]
'''
self.SetCache(cache)
self._urllib = urllib2
self._cache_timeout = Api.DEFAULT_CACHE_TIMEOUT
self._input_encoding = input_encoding
self._use_gzip = use_gzip_compression
self._debugHTTP = debugHTTP
self._oauth_consumer = None
self._shortlink_size = 19
self._InitializeRequestHeaders(request_headers)
self._InitializeUserAgent()
self._InitializeDefaultParameters()
if base_url is None:
self.base_url = 'https://api.twitter.com/1.1'
else:
self.base_url = base_url
if consumer_key is not None and (access_token_key is None or
access_token_secret is None):
print >> sys.stderr, 'Twitter now requires an oAuth Access Token for API calls.'
print >> sys.stderr, 'If your using this library from a command line utility, please'
print >> sys.stderr, 'run the the included get_access_token.py tool to generate one.'
raise TwitterError('Twitter requires oAuth Access Token for all API access')
self.SetCredentials(consumer_key, consumer_secret, access_token_key, access_token_secret)
def SetCredentials(self,
consumer_key,
consumer_secret,
access_token_key=None,
access_token_secret=None):
'''Set the consumer_key and consumer_secret for this instance
Args:
consumer_key:
The consumer_key of the twitter account.
consumer_secret:
The consumer_secret for the twitter account.
access_token_key:
The oAuth access token key value you retrieved
from running get_access_token.py.
access_token_secret:
The oAuth access token's secret, also retrieved
from the get_access_token.py run.
'''
self._consumer_key = consumer_key
self._consumer_secret = consumer_secret
self._access_token_key = access_token_key
self._access_token_secret = access_token_secret
self._oauth_consumer = None
if consumer_key is not None and consumer_secret is not None and \
access_token_key is not None and access_token_secret is not None:
self._signature_method_plaintext = oauth.SignatureMethod_PLAINTEXT()
self._signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()
self._oauth_token = oauth.Token(key=access_token_key, secret=access_token_secret)
self._oauth_consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret)
def ClearCredentials(self):
'''Clear the any credentials for this instance
'''
self._consumer_key = None
self._consumer_secret = None
self._access_token_key = None
self._access_token_secret = None
self._oauth_consumer = None
def GetSearch(self,
term=None,
geocode=None,
since_id=None,
max_id=None,
until=None,
count=15,
lang=None,
locale=None,
result_type="mixed",
include_entities=None):
'''Return twitter search results for a given term.
Args:
term:
Term to search by. Optional if you include geocode.
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than (that is, older
than) or equal to the specified ID. [Optional]
until:
Returns tweets generated before the given date. Date should be
formatted as YYYY-MM-DD. [Optional]
geocode:
Geolocation information in the form (latitude, longitude, radius)
[Optional]
count:
Number of results to return. Default is 15 [Optional]
lang:
Language for results as ISO 639-1 code. Default is None (all languages)
[Optional]
locale:
Language of the search query. Currently only 'ja' is effective. This is
intended for language-specific consumers and the default should work in
the majority of cases.
result_type:
Type of result which should be returned. Default is "mixed". Other
valid options are "recent" and "popular". [Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discrete structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message containing
the term
'''
# Build request parameters
parameters = {}
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if until:
parameters['until'] = until
if lang:
parameters['lang'] = lang
if locale:
parameters['locale'] = locale
if term is None and geocode is None:
return []
if term is not None:
parameters['q'] = term
if geocode is not None:
parameters['geocode'] = ','.join(map(str, geocode))
if include_entities:
parameters['include_entities'] = 1
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if result_type in ["mixed", "popular", "recent"]:
parameters['result_type'] = result_type
# Make and send requests
url = '%s/search/tweets.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
# Return built list of statuses
return [Status.NewFromJsonDict(x) for x in data['statuses']]
def GetUsersSearch(self,
term=None,
page=1,
count=20,
include_entities=None):
'''Return twitter user search results for a given term.
Args:
term:
Term to search by.
page:
Page of results to return. Default is 1
[Optional]
count:
Number of results to return. Default is 20
[Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discrete structure, including: user_mentions, urls, and hashtags.
[Optional]
Returns:
A sequence of twitter.User instances, one for each message containing
the term
'''
# Build request parameters
parameters = {}
if term is not None:
parameters['q'] = term
if include_entities:
parameters['include_entities'] = 1
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
# Make and send requests
url = '%s/users/search.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [User.NewFromJsonDict(x) for x in data]
def GetTrendsCurrent(self, exclude=None):
'''Get the current top trending topics (global)
Args:
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with 10 entries. Each entry contains a trend.
'''
return self.GetTrendsWoeid(id=1, exclude=exclude)
def GetTrendsWoeid(self, id, exclude=None):
'''Return the top 10 trending topics for a specific WOEID, if trending
information is available for it.
Args:
woeid:
the Yahoo! Where On Earth ID for a location.
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with 10 entries. Each entry contains a trend.
'''
url = '%s/trends/place.json' % (self.base_url)
parameters = {'id': id}
if exclude:
parameters['exclude'] = exclude
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
trends = []
timestamp = data[0]['as_of']
for trend in data[0]['trends']:
trends.append(Trend.NewFromJsonDict(trend, timestamp=timestamp))
return trends
def GetHomeTimeline(self,
count=None,
since_id=None,
max_id=None,
trim_user=False,
exclude_replies=False,
contributor_details=False,
include_entities=True):
'''
Fetch a collection of the most recent Tweets and retweets posted by the
authenticating user and the users they follow.
The home timeline is central to how most users interact with the Twitter
service.
The twitter.Api instance must be authenticated.
Args:
count:
Specifies the number of statuses to retrieve. May not be
greater than 200. Defaults to 20. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
trim_user:
When True, each tweet returned in a timeline will include a user
object including only the status authors numerical ID. Omit this
parameter to receive the complete user object. [Optional]
exclude_replies:
This parameter will prevent replies from appearing in the
returned timeline. Using exclude_replies with the count
parameter will mean you will receive up-to count tweets -
this is because the count parameter retrieves that many
tweets before filtering out retweets and replies.
[Optional]
contributor_details:
This parameter enhances the contributors element of the
status response to include the screen_name of the contributor.
By default only the user_id of the contributor is included.
[Optional]
include_entities:
The entities node will be disincluded when set to false.
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message
'''
url = '%s/statuses/home_timeline.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("API must be authenticated.")
parameters = {}
if count is not None:
try:
if int(count) > 200:
raise TwitterError("'count' may not be greater than 200")
except ValueError:
raise TwitterError("'count' must be an integer")
parameters['count'] = count
if since_id:
try:
parameters['since_id'] = long(since_id)
except ValueError:
raise TwitterError("'since_id' must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except ValueError:
raise TwitterError("'max_id' must be an integer")
if trim_user:
parameters['trim_user'] = 1
if exclude_replies:
parameters['exclude_replies'] = 1
if contributor_details:
parameters['contributor_details'] = 1
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetUserTimeline(self,
user_id=None,
screen_name=None,
since_id=None,
max_id=None,
count=None,
include_rts=None,
trim_user=None,
exclude_replies=None):
'''Fetch the sequence of public Status messages for a single user.
The twitter.Api instance must be authenticated if the user is private.
Args:
user_id:
Specifies the ID of the user for whom to return the
user_timeline. Helpful for disambiguating when a valid user ID
is also a valid screen name. [Optional]
screen_name:
Specifies the screen name of the user for whom to return the
user_timeline. Helpful for disambiguating when a valid screen
name is also a user ID. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than (that is, older
than) or equal to the specified ID. [Optional]
count:
Specifies the number of statuses to retrieve. May not be
greater than 200. [Optional]
include_rts:
If True, the timeline will contain native retweets (if they
exist) in addition to the standard stream of tweets. [Optional]
trim_user:
If True, statuses will only contain the numerical user ID only.
Otherwise a full user object will be returned for each status.
[Optional]
exclude_replies:
If True, this will prevent replies from appearing in the returned
timeline. Using exclude_replies with the count parameter will mean you
will receive up-to count tweets - this is because the count parameter
retrieves that many tweets before filtering out retweets and replies.
This parameter is only supported for JSON and XML responses. [Optional]
Returns:
A sequence of Status instances, one for each message up to count
'''
parameters = {}
url = '%s/statuses/user_timeline.json' % (self.base_url)
if user_id:
parameters['user_id'] = user_id
elif screen_name:
parameters['screen_name'] = screen_name
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if include_rts:
parameters['include_rts'] = 1
if trim_user:
parameters['trim_user'] = 1
if exclude_replies:
parameters['exclude_replies'] = 1
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetStatus(self,
id,
trim_user=False,
include_my_retweet=True,
include_entities=True):
'''Returns a single status message, specified by the id parameter.
The twitter.Api instance must be authenticated.
Args:
id:
The numeric ID of the status you are trying to retrieve.
trim_user:
When set to True, each tweet returned in a timeline will include
a user object including only the status authors numerical ID.
Omit this parameter to receive the complete user object.
[Optional]
include_my_retweet:
When set to True, any Tweets returned that have been retweeted by
the authenticating user will include an additional
current_user_retweet node, containing the ID of the source status
for the retweet. [Optional]
include_entities:
If False, the entities node will be disincluded.
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A twitter.Status instance representing that status message
'''
url = '%s/statuses/show.json' % (self.base_url)
if not self._oauth_consumer:
raise TwitterError("API must be authenticated.")
parameters = {}
try:
parameters['id'] = long(id)
except ValueError:
raise TwitterError("'id' must be an integer.")
if trim_user:
parameters['trim_user'] = 1
if include_my_retweet:
parameters['include_my_retweet'] = 1
if not include_entities:
parameters['include_entities'] = 'none'
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def DestroyStatus(self, id, trim_user=False):
'''Destroys the status specified by the required ID parameter.
The twitter.Api instance must be authenticated and the
authenticating user must be the author of the specified status.
Args:
id:
The numerical ID of the status you're trying to destroy.
Returns:
A twitter.Status instance representing the destroyed status message
'''
if not self._oauth_consumer:
raise TwitterError("API must be authenticated.")
try:
post_data = {'id': long(id)}
except:
raise TwitterError("id must be an integer")
url = '%s/statuses/destroy/%s.json' % (self.base_url, id)
if trim_user:
post_data['trim_user'] = 1
json = self._FetchUrl(url, post_data=post_data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
@classmethod
def _calculate_status_length(cls, status, linksize=19):
dummy_link_replacement = 'https://-%d-chars%s/' % (linksize, '-' * (linksize - 18))
shortened = ' '.join([x if not (x.startswith('http://') or
x.startswith('https://'))
else
dummy_link_replacement
for x in status.split(' ')])
return len(shortened)
def PostUpdate(self, status, in_reply_to_status_id=None, latitude=None, longitude=None, place_id=None, display_coordinates=False, trim_user=False):
'''Post a twitter status message from the authenticated user.
The twitter.Api instance must be authenticated.
https://dev.twitter.com/docs/api/1.1/post/statuses/update
Args:
status:
The message text to be posted.
Must be less than or equal to 140 characters.
in_reply_to_status_id:
The ID of an existing status that the status to be posted is
in reply to. This implicitly sets the in_reply_to_user_id
attribute of the resulting status to the user ID of the
message being replied to. Invalid/missing status IDs will be
ignored. [Optional]
latitude:
Latitude coordinate of the tweet in degrees. Will only work
in conjunction with longitude argument. Both longitude and
latitude will be ignored by twitter if the user has a false
geo_enabled setting. [Optional]
longitude:
Longitude coordinate of the tweet in degrees. Will only work
in conjunction with latitude argument. Both longitude and
latitude will be ignored by twitter if the user has a false
geo_enabled setting. [Optional]
place_id:
A place in the world. These IDs can be retrieved from
GET geo/reverse_geocode. [Optional]
display_coordinates:
Whether or not to put a pin on the exact coordinates a tweet
has been sent from. [Optional]
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A twitter.Status instance representing the message posted.
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/statuses/update.json' % self.base_url
if isinstance(status, unicode) or self._input_encoding is None:
u_status = status
else:
u_status = unicode(status, self._input_encoding)
# if self._calculate_status_length(u_status, self._shortlink_size) > CHARACTER_LIMIT:
# raise TwitterError("Text must be less than or equal to %d characters. "
# "Consider using PostUpdates." % CHARACTER_LIMIT)
data = {'status': status}
if in_reply_to_status_id:
data['in_reply_to_status_id'] = in_reply_to_status_id
if latitude is not None and longitude is not None:
data['lat'] = str(latitude)
data['long'] = str(longitude)
if place_id is not None:
data['place_id'] = str(place_id)
if display_coordinates:
data['display_coordinates'] = 'true'
if trim_user:
data['trim_user'] = 'true'
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def PostUpdates(self, status, continuation=None, **kwargs):
'''Post one or more twitter status messages from the authenticated user.
Unlike api.PostUpdate, this method will post multiple status updates
if the message is longer than 140 characters.
The twitter.Api instance must be authenticated.
Args:
status:
The message text to be posted.
May be longer than 140 characters.
continuation:
The character string, if any, to be appended to all but the
last message. Note that Twitter strips trailing '...' strings
from messages. Consider using the unicode \u2026 character
(horizontal ellipsis) instead. [Defaults to None]
**kwargs:
See api.PostUpdate for a list of accepted parameters.
Returns:
A of list twitter.Status instance representing the messages posted.
'''
results = list()
if continuation is None:
continuation = ''
line_length = CHARACTER_LIMIT - len(continuation)
lines = textwrap.wrap(status, line_length)
for line in lines[0:-1]:
results.append(self.PostUpdate(line + continuation, **kwargs))
results.append(self.PostUpdate(lines[-1], **kwargs))
return results
def PostRetweet(self, original_id, trim_user=False):
'''Retweet a tweet with the Retweet API.
The twitter.Api instance must be authenticated.
Args:
original_id:
The numerical id of the tweet that will be retweeted
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A twitter.Status instance representing the original tweet with retweet details embedded.
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
try:
if int(original_id) <= 0:
raise TwitterError("'original_id' must be a positive number")
except ValueError:
raise TwitterError("'original_id' must be an integer")
url = '%s/statuses/retweet/%s.json' % (self.base_url, original_id)
data = {'id': original_id}
if trim_user:
data['trim_user'] = 'true'
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def GetUserRetweets(self, count=None, since_id=None, max_id=None, trim_user=False):
'''Fetch the sequence of retweets made by the authenticated user.
The twitter.Api instance must be authenticated.
Args:
count:
The number of status messages to retrieve. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A sequence of twitter.Status instances, one for each message up to count
'''
return self.GetUserTimeline(since_id=since_id, count=count, max_id=max_id, trim_user=trim_user, exclude_replies=True, include_rts=True)
def GetReplies(self, since_id=None, count=None, max_id=None, trim_user=False):
'''Get a sequence of status messages representing the 20 most
recent replies (status updates prefixed with @twitterID) to the
authenticating user.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A sequence of twitter.Status instances, one for each reply to the user.
'''
return self.GetUserTimeline(since_id=since_id, count=count, max_id=max_id, trim_user=trim_user, exclude_replies=False, include_rts=False)
def GetRetweets(self, statusid, count=None, trim_user=False):
'''Returns up to 100 of the first retweets of the tweet identified
by statusid
Args:
statusid:
The ID of the tweet for which retweets should be searched for
count:
The number of status messages to retrieve. [Optional]
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A list of twitter.Status instances, which are retweets of statusid
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instsance must be authenticated.")
url = '%s/statuses/retweets/%s.json' % (self.base_url, statusid)
parameters = {}
if trim_user:
parameters['trim_user'] = 'true'
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(s) for s in data]
def GetRetweetsOfMe(self,
count=None,
since_id=None,
max_id=None,
trim_user=False,
include_entities=True,
include_user_entities=True):
'''Returns up to 100 of the most recent tweets of the user that have been
retweeted by others.
Args:
count:
The number of retweets to retrieve, up to 100. If omitted, 20 is
assumed.
since_id:
Returns results with an ID greater than (newer than) this ID.
max_id:
Returns results with an ID less than or equal to this ID.
trim_user:
When True, the user object for each tweet will only be an ID.
include_entities:
When True, the tweet entities will be included.
include_user_entities:
When True, the user entities will be included.
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/statuses/retweets_of_me.json' % self.base_url
parameters = {}
if count is not None:
try:
if int(count) > 100:
raise TwitterError("'count' may not be greater than 100")
except ValueError:
raise TwitterError("'count' must be an integer")
if count:
parameters['count'] = count
if since_id:
parameters['since_id'] = since_id
if max_id:
parameters['max_id'] = max_id
if trim_user:
parameters['trim_user'] = trim_user
if not include_entities:
parameters['include_entities'] = include_entities
if not include_user_entities:
parameters['include_user_entities'] = include_user_entities
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(s) for s in data]
def GetFriends(self, user_id=None, screen_name=None, cursor=-1, skip_status=False, include_user_entities=False):
'''Fetch the sequence of twitter.User instances, one for each friend.
The twitter.Api instance must be authenticated.
Args:
user_id:
The twitter id of the user whose friends you are fetching.
If not specified, defaults to the authenticated user. [Optional]
screen_name:
The twitter name of the user whose friends you are fetching.
If not specified, defaults to the authenticated user. [Optional]
cursor:
Should be set to -1 for the initial call and then is used to
control what result page Twitter returns [Optional(ish)]
skip_status:
If True the statuses will not be returned in the user items.
[Optional]
include_user_entities:
When True, the user entities will be included.
Returns:
A sequence of twitter.User instances, one for each friend
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/friends/list.json' % self.base_url
result = []
parameters = {}
if user_id is not None:
parameters['user_id'] = user_id
if screen_name is not None:
parameters['screen_name'] = screen_name
if skip_status:
parameters['skip_status'] = True
if include_user_entities:
parameters['include_user_entities'] = True
while True:
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
result += [User.NewFromJsonDict(x) for x in data['users']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
else:
break
return result
def GetFriendIDs(self, user_id=None, screen_name=None, cursor=-1, stringify_ids=False, count=None):
'''Returns a list of twitter user id's for every person
the specified user is following.
Args:
user_id:
The id of the user to retrieve the id list for
[Optional]
screen_name:
The screen_name of the user to retrieve the id list for
[Optional]
cursor:
Specifies the Twitter API Cursor location to start at.
Note: there are pagination limits.
[Optional]
stringify_ids:
if True then twitter will return the ids as strings instead of integers.
[Optional]
count:
The number of status messages to retrieve. [Optional]
Returns:
A list of integers, one for each user id.
'''
url = '%s/friends/ids.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
parameters = {}
if user_id is not None:
parameters['user_id'] = user_id
if screen_name is not None:
parameters['screen_name'] = screen_name
if stringify_ids:
parameters['stringify_ids'] = True
if count is not None:
parameters['count'] = count
result = []
while True:
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
result += [x for x in data['ids']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
else:
break
return result
def GetFollowerIDs(self, user_id=None, screen_name=None, cursor=-1, stringify_ids=False, count=None, total_count=None):
'''Returns a list of twitter user id's for every person
that is following the specified user.
Args:
user_id:
The id of the user to retrieve the id list for
[Optional]
screen_name:
The screen_name of the user to retrieve the id list for
[Optional]
cursor:
Specifies the Twitter API Cursor location to start at.
Note: there are pagination limits.
[Optional]
stringify_ids:
if True then twitter will return the ids as strings instead of integers.
[Optional]
count:
The number of user id's to retrieve per API request. Please be aware that
this might get you rate-limited if set to a small number. By default Twitter
will retrieve 5000 UIDs per call.
[Optional]
total_count:
The total amount of UIDs to retrieve. Good if the account has many followers
and you don't want to get rate limited. The data returned might contain more
UIDs if total_count is not a multiple of count (5000 by default).
[Optional]
Returns:
A list of integers, one for each user id.
'''
url = '%s/followers/ids.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
parameters = {}
if user_id is not None:
parameters['user_id'] = user_id
if screen_name is not None:
parameters['screen_name'] = screen_name
if stringify_ids:
parameters['stringify_ids'] = True
if count is not None:
parameters['count'] = count
result = []
while True:
if total_count and total_count < count:
parameters['count'] = total_count
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
result += [x for x in data['ids']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
total_count -= len(data['ids'])
if total_count < 1:
break
else:
break
return result
def GetFollowers(self, user_id=None, screen_name=None, cursor=-1, skip_status=False, include_user_entities=False):
'''Fetch the sequence of twitter.User instances, one for each follower
The twitter.Api instance must be authenticated.
Args:
user_id:
The twitter id of the user whose followers you are fetching.
If not specified, defaults to the authenticated user. [Optional]
screen_name:
The twitter name of the user whose followers you are fetching.
If not specified, defaults to the authenticated user. [Optional]
cursor:
Should be set to -1 for the initial call and then is used to
control what result page Twitter returns [Optional(ish)]
skip_status:
If True the statuses will not be returned in the user items.
[Optional]
include_user_entities:
When True, the user entities will be included.
Returns:
A sequence of twitter.User instances, one for each follower
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/followers/list.json' % self.base_url
result = []
parameters = {}
if user_id is not None:
parameters['user_id'] = user_id
if screen_name is not None:
parameters['screen_name'] = screen_name
if skip_status:
parameters['skip_status'] = True
if include_user_entities:
parameters['include_user_entities'] = True
while True:
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
result += [User.NewFromJsonDict(x) for x in data['users']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
else:
break
return result
def UsersLookup(self, user_id=None, screen_name=None, users=None, include_entities=True):
'''Fetch extended information for the specified users.
Users may be specified either as lists of either user_ids,
screen_names, or twitter.User objects. The list of users that
are queried is the union of all specified parameters.
The twitter.Api instance must be authenticated.
Args:
user_id:
A list of user_ids to retrieve extended information.
[Optional]
screen_name:
A list of screen_names to retrieve extended information.
[Optional]
users:
A list of twitter.User objects to retrieve extended information.
[Optional]
include_entities:
The entities node that may appear within embedded statuses will be
disincluded when set to False.
[Optional]
Returns:
A list of twitter.User objects for the requested users
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
if not user_id and not screen_name and not users:
raise TwitterError("Specify at least one of user_id, screen_name, or users.")
url = '%s/users/lookup.json' % self.base_url
parameters = {}
uids = list()
if user_id:
uids.extend(user_id)
if users:
uids.extend([u.id for u in users])
if len(uids):
parameters['user_id'] = ','.join(["%s" % u for u in uids])
if screen_name:
parameters['screen_name'] = ','.join(screen_name)
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters=parameters)
try:
data = self._ParseAndCheckTwitter(json)
except TwitterError, e:
_, e, _ = sys.exc_info()
t = e.args[0]
if len(t) == 1 and ('code' in t[0]) and (t[0]['code'] == 34):
data = []
else:
raise
return [User.NewFromJsonDict(u) for u in data]
def GetUser(self, user_id=None, screen_name=None, include_entities=True):
'''Returns a single user.
The twitter.Api instance must be authenticated.
Args:
user_id:
The id of the user to retrieve.
[Optional]
screen_name:
The screen name of the user for whom to return results for. Either a
user_id or screen_name is required for this method.
[Optional]
include_entities:
if set to False, the 'entities' node will not be included.
[Optional]
Returns:
A twitter.User instance representing that user
'''
url = '%s/users/show.json' % (self.base_url)
parameters = {}
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
if user_id:
parameters['user_id'] = user_id
elif screen_name:
parameters['screen_name'] = screen_name
else:
raise TwitterError("Specify at least one of user_id or screen_name.")
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def GetDirectMessages(self, since_id=None, max_id=None, count=None, include_entities=True, skip_status=False):
'''Returns a list of the direct messages sent to the authenticating user.
The twitter.Api instance must be authenticated.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
count:
Specifies the number of direct messages to try and retrieve, up to a
maximum of 200. The value of count is best thought of as a limit to the
number of Tweets to return because suspended or deleted content is
removed after the count has been applied. [Optional]
include_entities:
The entities node will not be included when set to False.
[Optional]
skip_status:
When set to True statuses will not be included in the returned user
objects. [Optional]
Returns:
A sequence of twitter.DirectMessage instances
'''
url = '%s/direct_messages.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since_id:
parameters['since_id'] = since_id
if max_id:
parameters['max_id'] = max_id
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if not include_entities:
parameters['include_entities'] = 'false'
if skip_status:
parameters['skip_status'] = 1
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [DirectMessage.NewFromJsonDict(x) for x in data]
def GetSentDirectMessages(self, since_id=None, max_id=None, count=None, page=None, include_entities=True):
'''Returns a list of the direct messages sent by the authenticating user.
The twitter.Api instance must be authenticated.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
count:
Specifies the number of direct messages to try and retrieve, up to a
maximum of 200. The value of count is best thought of as a limit to the
number of Tweets to return because suspended or deleted content is
removed after the count has been applied. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
include_entities:
The entities node will not be included when set to False.
[Optional]
Returns:
A sequence of twitter.DirectMessage instances
'''
url = '%s/direct_messages/sent.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since_id:
parameters['since_id'] = since_id
if page:
parameters['page'] = page
if max_id:
parameters['max_id'] = max_id
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [DirectMessage.NewFromJsonDict(x) for x in data]
def PostDirectMessage(self, text, user_id=None, screen_name=None):
'''Post a twitter direct message from the authenticated user
The twitter.Api instance must be authenticated. user_id or screen_name
must be specified.
Args:
text: The message text to be posted. Must be less than 140 characters.
user_id:
The ID of the user who should receive the direct message.
[Optional]
screen_name:
The screen name of the user who should receive the direct message.
[Optional]
Returns:
A twitter.DirectMessage instance representing the message posted
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/direct_messages/new.json' % self.base_url
data = {'text': text}
if user_id:
data['user_id'] = user_id
elif screen_name:
data['screen_name'] = screen_name
else:
raise TwitterError("Specify at least one of user_id or screen_name.")
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return DirectMessage.NewFromJsonDict(data)
def DestroyDirectMessage(self, id, include_entities=True):
'''Destroys the direct message specified in the required ID parameter.
The twitter.Api instance must be authenticated, and the
authenticating user must be the recipient of the specified direct
message.
Args:
id: The id of the direct message to be destroyed
Returns:
A twitter.DirectMessage instance representing the message destroyed
'''
url = '%s/direct_messages/destroy.json' % self.base_url
data = {'id': id}
if not include_entities:
data['include_entities'] = 'false'
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return DirectMessage.NewFromJsonDict(data)
def CreateFriendship(self, user_id=None, screen_name=None, follow=True):
'''Befriends the user specified by the user_id or screen_name.
The twitter.Api instance must be authenticated.
Args:
user_id:
A user_id to follow [Optional]
screen_name:
A screen_name to follow [Optional]
follow:
Set to False to disable notifications for the target user
Returns:
A twitter.User instance representing the befriended user.
'''
url = '%s/friendships/create.json' % (self.base_url)
data = {}
if user_id:
data['user_id'] = user_id
elif screen_name:
data['screen_name'] = screen_name
else:
raise TwitterError("Specify at least one of user_id or screen_name.")
if follow:
data['follow'] = 'true'
else:
data['follow'] = 'false'
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def DestroyFriendship(self, user_id=None, screen_name=None):
'''Discontinues friendship with a user_id or screen_name.
The twitter.Api instance must be authenticated.
Args:
user_id:
A user_id to unfollow [Optional]
screen_name:
A screen_name to unfollow [Optional]
Returns:
A twitter.User instance representing the discontinued friend.
'''
url = '%s/friendships/destroy.json' % self.base_url
data = {}
if user_id:
data['user_id'] = user_id
elif screen_name:
data['screen_name'] = screen_name
else:
raise TwitterError("Specify at least one of user_id or screen_name.")
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def CreateFavorite(self, status=None, id=None, include_entities=True):
'''Favorites the specified status object or id as the authenticating user.
Returns the favorite status when successful.
The twitter.Api instance must be authenticated.
Args:
id:
The id of the twitter status to mark as a favorite.
[Optional]
status:
The twitter.Status object to mark as a favorite.
[Optional]
include_entities:
The entities node will be omitted when set to False.
Returns:
A twitter.Status instance representing the newly-marked favorite.
'''
url = '%s/favorites/create.json' % self.base_url
data = {}
if id:
data['id'] = id
elif status:
data['id'] = status.id
else:
raise TwitterError("Specify id or status")
if not include_entities:
data['include_entities'] = 'false'
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def DestroyFavorite(self, status=None, id=None, include_entities=True):
'''Un-Favorites the specified status object or id as the authenticating user.
Returns the un-favorited status when successful.
The twitter.Api instance must be authenticated.
Args:
id:
The id of the twitter status to unmark as a favorite.
[Optional]
status:
The twitter.Status object to unmark as a favorite.
[Optional]
include_entities:
The entities node will be omitted when set to False.
Returns:
A twitter.Status instance representing the newly-unmarked favorite.
'''
url = '%s/favorites/destroy.json' % self.base_url
data = {}
if id:
data['id'] = id
elif status:
data['id'] = status.id
else:
raise TwitterError("Specify id or status")
if not include_entities:
data['include_entities'] = 'false'
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def GetFavorites(self,
user_id=None,
screen_name=None,
count=None,
since_id=None,
max_id=None,
include_entities=True):
'''Return a list of Status objects representing favorited tweets.
By default, returns the (up to) 20 most recent tweets for the
authenticated user.
Args:
user:
The twitter name or id of the user whose favorites you are fetching.
If not specified, defaults to the authenticated user. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
'''
parameters = {}
url = '%s/favorites/list.json' % self.base_url
if user_id:
parameters['user_id'] = user_id
elif screen_name:
parameters['screen_name'] = user_id
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if include_entities:
parameters['include_entities'] = True
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetMentions(self,
count=None,
since_id=None,
max_id=None,
trim_user=False,
contributor_details=False,
include_entities=True):
'''Returns the 20 most recent mentions (status containing @screen_name)
for the authenticating user.
Args:
count:
Specifies the number of tweets to try and retrieve, up to a maximum of
200. The value of count is best thought of as a limit to the number of
tweets to return because suspended or deleted content is removed after
the count has been applied. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than
(that is, older than) the specified ID. [Optional]
trim_user:
When set to True, each tweet returned in a timeline will include a user
object including only the status authors numerical ID. Omit this
parameter to receive the complete user object.
contributor_details:
If set to True, this parameter enhances the contributors element of the
status response to include the screen_name of the contributor. By
default only the user_id of the contributor is included.
include_entities:
The entities node will be disincluded when set to False.
Returns:
A sequence of twitter.Status instances, one for each mention of the user.
'''
url = '%s/statuses/mentions_timeline.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if trim_user:
parameters['trim_user'] = 1
if contributor_details:
parameters['contributor_details'] = 'true'
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def CreateList(self, name, mode=None, description=None):
'''Creates a new list with the give name for the authenticated user.
The twitter.Api instance must be authenticated.
Args:
name:
New name for the list
mode:
'public' or 'private'.
Defaults to 'public'. [Optional]
description:
Description of the list. [Optional]
Returns:
A twitter.List instance representing the new list
'''
url = '%s/lists/create.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {'name': name}
if mode is not None:
parameters['mode'] = mode
if description is not None:
parameters['description'] = description
json = self._FetchUrl(url, post_data=parameters)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def DestroyList(self,
owner_screen_name=False,
owner_id=False,
list_id=None,
slug=None):
'''
Destroys the list identified by list_id or owner_screen_name/owner_id and
slug.
The twitter.Api instance must be authenticated.
Args:
owner_screen_name:
The screen_name of the user who owns the list being requested by a slug.
owner_id:
The user ID of the user who owns the list being requested by a slug.
list_id:
The numerical id of the list.
slug:
You can identify a list by its slug instead of its numerical id. If you
decide to do so, note that you'll also have to specify the list owner
using the owner_id or owner_screen_name parameters.
Returns:
A twitter.List instance representing the removed list.
'''
url = '%s/lists/destroy.json' % self.base_url
data = {}
if list_id:
try:
data['list_id'] = long(list_id)
except:
raise TwitterError("list_id must be an integer")
elif slug:
data['slug'] = slug
if owner_id:
try:
data['owner_id'] = long(owner_id)
except:
raise TwitterError("owner_id must be an integer")
elif owner_screen_name:
data['owner_screen_name'] = owner_screen_name
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def CreateSubscription(self,
owner_screen_name=False,
owner_id=False,
list_id=None,
slug=None):
'''Creates a subscription to a list by the authenticated user
The twitter.Api instance must be authenticated.
Args:
owner_screen_name:
The screen_name of the user who owns the list being requested by a slug.
owner_id:
The user ID of the user who owns the list being requested by a slug.
list_id:
The numerical id of the list.
slug:
You can identify a list by its slug instead of its numerical id. If you
decide to do so, note that you'll also have to specify the list owner
using the owner_id or owner_screen_name parameters.
Returns:
A twitter.List instance representing the list subscribed to
'''
url = '%s/lists/subscribers/create.json' % (self.base_url)
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
data = {}
if list_id:
try:
data['list_id'] = long(list_id)
except:
raise TwitterError("list_id must be an integer")
elif slug:
data['slug'] = slug
if owner_id:
try:
data['owner_id'] = long(owner_id)
except:
raise TwitterError("owner_id must be an integer")
elif owner_screen_name:
data['owner_screen_name'] = owner_screen_name
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def DestroySubscription(self,
owner_screen_name=False,
owner_id=False,
list_id=None,
slug=None):
'''Destroys the subscription to a list for the authenticated user
The twitter.Api instance must be authenticated.
Args:
owner_screen_name:
The screen_name of the user who owns the list being requested by a slug.
owner_id:
The user ID of the user who owns the list being requested by a slug.
list_id:
The numerical id of the list.
slug:
You can identify a list by its slug instead of its numerical id. If you
decide to do so, note that you'll also have to specify the list owner
using the owner_id or owner_screen_name parameters.
Returns:
A twitter.List instance representing the removed list.
'''
url = '%s/lists/subscribers/destroy.json' % (self.base_url)
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
data = {}
if list_id:
try:
data['list_id'] = long(list_id)
except:
raise TwitterError("list_id must be an integer")
elif slug:
data['slug'] = slug
if owner_id:
try:
data['owner_id'] = long(owner_id)
except:
raise TwitterError("owner_id must be an integer")
elif owner_screen_name:
data['owner_screen_name'] = owner_screen_name
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
json = self._FetchUrl(url, post_data=data)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def GetSubscriptions(self, user_id=None, screen_name=None, count=20, cursor=-1):
'''
Obtain a collection of the lists the specified user is subscribed to, 20
lists per page by default. Does not include the user's own lists.
The twitter.Api instance must be authenticated.
Args:
user_id:
The ID of the user for whom to return results for. [Optional]
screen_name:
The screen name of the user for whom to return results for.
[Optional]
count:
The amount of results to return per page. Defaults to 20.
No more than 1000 results will ever be returned in a single page.
cursor:
"page" value that Twitter will use to start building the
list sequence from. -1 to start at the beginning.
Twitter will return in the result the values for next_cursor
and previous_cursor. [Optional]
Returns:
A sequence of twitter.List instances, one for each list
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/lists/subscriptions.json' % (self.base_url)
parameters = {}
try:
parameters['cursor'] = int(cursor)
except:
raise TwitterError("cursor must be an integer")
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if user_id is not None:
try:
parameters['user_id'] = long(user_id)
except:
raise TwitterError('user_id must be an integer')
elif screen_name is not None:
parameters['screen_name'] = screen_name
else:
raise TwitterError('Specify user_id or screen_name')
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
return [List.NewFromJsonDict(x) for x in data['lists']]
def GetLists(self, user_id=None, screen_name=None, count=None, cursor=-1):
'''Fetch the sequence of lists for a user.
The twitter.Api instance must be authenticated.
Args:
user_id:
The ID of the user for whom to return results for. [Optional]
screen_name:
The screen name of the user for whom to return results for.
[Optional]
count:
The amount of results to return per page. Defaults to 20. No more than
1000 results will ever be returned in a single page.
[Optional]
cursor:
"page" value that Twitter will use to start building the
list sequence from. -1 to start at the beginning.
Twitter will return in the result the values for next_cursor
and previous_cursor. [Optional]
Returns:
A sequence of twitter.List instances, one for each list
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/lists/ownerships.json' % self.base_url
result = []
parameters = {}
if user_id is not None:
try:
parameters['user_id'] = long(user_id)
except:
raise TwitterError('user_id must be an integer')
elif screen_name is not None:
parameters['screen_name'] = screen_name
else:
raise TwitterError('Specify user_id or screen_name')
if count is not None:
parameters['count'] = count
while True:
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters=parameters)
data = self._ParseAndCheckTwitter(json)
result += [List.NewFromJsonDict(x) for x in data['lists']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
else:
break
return result
def VerifyCredentials(self):
'''Returns a twitter.User instance if the authenticating user is valid.
Returns:
A twitter.User instance representing that user if the
credentials are valid, None otherwise.
'''
if not self._oauth_consumer:
raise TwitterError("Api instance must first be given user credentials.")
url = '%s/account/verify_credentials.json' % self.base_url
try:
json = self._FetchUrl(url, no_cache=True)
except urllib2.HTTPError, http_error:
if http_error.code == httplib.UNAUTHORIZED:
return None
else:
raise http_error
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def SetCache(self, cache):
'''Override the default cache. Set to None to prevent caching.
Args:
cache:
An instance that supports the same API as the twitter._FileCache
'''
if cache == DEFAULT_CACHE:
self._cache = _FileCache()
else:
self._cache = cache
def SetUrllib(self, urllib):
'''Override the default urllib implementation.
Args:
urllib:
An instance that supports the same API as the urllib2 module
'''
self._urllib = urllib
def SetCacheTimeout(self, cache_timeout):
'''Override the default cache timeout.
Args:
cache_timeout:
Time, in seconds, that responses should be reused.
'''
self._cache_timeout = cache_timeout
def SetUserAgent(self, user_agent):
'''Override the default user agent
Args:
user_agent:
A string that should be send to the server as the User-agent
'''
self._request_headers['User-Agent'] = user_agent
def SetXTwitterHeaders(self, client, url, version):
'''Set the X-Twitter HTTP headers that will be sent to the server.
Args:
client:
The client name as a string. Will be sent to the server as
the 'X-Twitter-Client' header.
url:
The URL of the meta.xml as a string. Will be sent to the server
as the 'X-Twitter-Client-URL' header.
version:
The client version as a string. Will be sent to the server
as the 'X-Twitter-Client-Version' header.
'''
self._request_headers['X-Twitter-Client'] = client
self._request_headers['X-Twitter-Client-URL'] = url
self._request_headers['X-Twitter-Client-Version'] = version
def SetSource(self, source):
'''Suggest the "from source" value to be displayed on the Twitter web site.
The value of the 'source' parameter must be first recognized by
the Twitter server. New source values are authorized on a case by
case basis by the Twitter development team.
Args:
source:
The source name as a string. Will be sent to the server as
the 'source' parameter.
'''
self._default_params['source'] = source
def GetRateLimitStatus(self, resources=None):
'''Fetch the rate limit status for the currently authorized user.
Args:
resources:
A comma seperated list of resource families you want to know the current
rate limit disposition of.
[Optional]
Returns:
A dictionary containing the time the limit will reset (reset_time),
the number of remaining hits allowed before the reset (remaining_hits),
the number of hits allowed in a 60-minute period (hourly_limit), and
the time of the reset in seconds since The Epoch (reset_time_in_seconds).
'''
parameters = {}
if resources is not None:
parameters['resources'] = resources
url = '%s/application/rate_limit_status.json' % self.base_url
json = self._FetchUrl(url, parameters=parameters, no_cache=True)
data = self._ParseAndCheckTwitter(json)
return data
def MaximumHitFrequency(self):
'''Determines the minimum number of seconds that a program must wait
before hitting the server again without exceeding the rate_limit
imposed for the currently authenticated user.
Returns:
The minimum second interval that a program must use so as to not
exceed the rate_limit imposed for the user.
'''
rate_status = self.GetRateLimitStatus()
reset_time = rate_status.get('reset_time', None)
limit = rate_status.get('remaining_hits', None)
if reset_time:
# put the reset time into a datetime object
reset = datetime.datetime(*rfc822.parsedate(reset_time)[:7])
# find the difference in time between now and the reset time + 1 hour
delta = reset + datetime.timedelta(hours=1) - datetime.datetime.utcnow()
if not limit:
return int(delta.seconds)
# determine the minimum number of seconds allowed as a regular interval
max_frequency = int(delta.seconds / limit) + 1
# return the number of seconds
return max_frequency
return 60
def _BuildUrl(self, url, path_elements=None, extra_params=None):
# Break url into constituent parts
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
# Add any additional path elements to the path
if path_elements:
# Filter out the path elements that have a value of None
p = [i for i in path_elements if i]
if not path.endswith('/'):
path += '/'
path += '/'.join(p)
# Add any additional query parameters to the query string
if extra_params and len(extra_params) > 0:
extra_query = self._EncodeParameters(extra_params)
# Add it to the existing query
if query:
query += '&' + extra_query
else:
query = extra_query
# Return the rebuilt URL
return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
def _InitializeRequestHeaders(self, request_headers):
if request_headers:
self._request_headers = request_headers
else:
self._request_headers = {}
def _InitializeUserAgent(self):
user_agent = 'Python-urllib/%s (python-twitter/%s)' % \
(self._urllib.__version__, __version__)
self.SetUserAgent(user_agent)
def _InitializeDefaultParameters(self):
self._default_params = {}
def _DecompressGzippedResponse(self, response):
raw_data = response.read()
if response.headers.get('content-encoding', None) == 'gzip':
url_data = gzip.GzipFile(fileobj=StringIO.StringIO(raw_data)).read()
else:
url_data = raw_data
return url_data
def _Encode(self, s):
if self._input_encoding:
return unicode(s, self._input_encoding).encode('utf-8')
else:
return unicode(s).encode('utf-8')
def _EncodeParameters(self, parameters):
'''Return a string in key=value&key=value form
Values of None are not included in the output string.
Args:
parameters:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if parameters is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in parameters.items() if v is not None]))
def _EncodePostData(self, post_data):
'''Return a string in key=value&key=value form
Values are assumed to be encoded in the format specified by self._encoding,
and are subsequently URL encoded.
Args:
post_data:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if post_data is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in post_data.items()]))
def _ParseAndCheckTwitter(self, json):
"""Try and parse the JSON returned from Twitter and return
an empty dictionary if there is any error. This is a purely
defensive check because during some Twitter network outages
it will return an HTML failwhale page."""
try:
data = simplejson.loads(json)
self._CheckForTwitterError(data)
except ValueError:
if "<title>Twitter / Over capacity</title>" in json:
raise TwitterError("Capacity Error")
if "<title>Twitter / Error</title>" in json:
raise TwitterError("Technical Error")
raise TwitterError("json decoding")
return data
def _CheckForTwitterError(self, data):
"""Raises a TwitterError if twitter returns an error message.
Args:
data:
A python dict created from the Twitter json response
Raises:
TwitterError wrapping the twitter error message if one exists.
"""
# Twitter errors are relatively unlikely, so it is faster
# to check first, rather than try and catch the exception
if 'error' in data:
raise TwitterError(data['error'])
if 'errors' in data:
raise TwitterError(data['errors'])
def _FetchUrl(self,
url,
post_data=None,
parameters=None,
no_cache=None,
use_gzip_compression=None):
'''Fetch a URL, optionally caching for a specified time.
Args:
url:
The URL to retrieve
post_data:
A dict of (str, unicode) key/value pairs.
If set, POST will be used.
parameters:
A dict whose key/value pairs should encoded and added
to the query string. [Optional]
no_cache:
If true, overrides the cache on the current request
use_gzip_compression:
If True, tells the server to gzip-compress the response.
It does not apply to POST requests.
Defaults to None, which will get the value to use from
the instance variable self._use_gzip [Optional]
Returns:
A string containing the body of the response.
'''
# Build the extra parameters dict
extra_params = {}
if self._default_params:
extra_params.update(self._default_params)
if parameters:
extra_params.update(parameters)
if post_data:
http_method = "POST"
else:
http_method = "GET"
if self._debugHTTP:
_debug = 1
else:
_debug = 0
http_handler = self._urllib.HTTPHandler(debuglevel=_debug)
https_handler = self._urllib.HTTPSHandler(debuglevel=_debug)
http_proxy = os.environ.get('http_proxy')
https_proxy = os.environ.get('https_proxy')
if http_proxy is None or https_proxy is None :
proxy_status = False
else :
proxy_status = True
opener = self._urllib.OpenerDirector()
opener.add_handler(http_handler)
opener.add_handler(https_handler)
if proxy_status is True :
proxy_handler = self._urllib.ProxyHandler({'http':str(http_proxy), 'https': str(https_proxy)})
opener.add_handler(proxy_handler)
if use_gzip_compression is None:
use_gzip = self._use_gzip
else:
use_gzip = use_gzip_compression
# Set up compression
if use_gzip and not post_data:
opener.addheaders.append(('Accept-Encoding', 'gzip'))
if self._oauth_consumer is not None:
if post_data and http_method == "POST":
parameters = post_data.copy()
req = oauth.Request.from_consumer_and_token(self._oauth_consumer,
token=self._oauth_token,
http_method=http_method,
http_url=url, parameters=parameters)
req.sign_request(self._signature_method_hmac_sha1, self._oauth_consumer, self._oauth_token)
headers = req.to_header()
if http_method == "POST":
encoded_post_data = req.to_postdata()
else:
encoded_post_data = None
url = req.to_url()
else:
url = self._BuildUrl(url, extra_params=extra_params)
encoded_post_data = self._EncodePostData(post_data)
# Open and return the URL immediately if we're not going to cache
if encoded_post_data or no_cache or not self._cache or not self._cache_timeout:
response = opener.open(url, encoded_post_data)
url_data = self._DecompressGzippedResponse(response)
opener.close()
else:
# Unique keys are a combination of the url and the oAuth Consumer Key
if self._consumer_key:
key = self._consumer_key + ':' + url
else:
key = url
# See if it has been cached before
last_cached = self._cache.GetCachedTime(key)
# If the cached version is outdated then fetch another and store it
if not last_cached or time.time() >= last_cached + self._cache_timeout:
try:
response = opener.open(url, encoded_post_data)
url_data = self._DecompressGzippedResponse(response)
self._cache.Set(key, url_data)
except urllib2.HTTPError, e:
print e
opener.close()
else:
url_data = self._cache.Get(key)
# Always return the latest version
return url_data
class _FileCacheError(Exception):
'''Base exception class for FileCache related errors'''
class _FileCache(object):
DEPTH = 3
def __init__(self, root_directory=None):
self._InitializeRootDirectory(root_directory)
def Get(self, key):
path = self._GetPath(key)
if os.path.exists(path):
return open(path).read()
else:
return None
def Set(self, key, data):
path = self._GetPath(key)
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.isdir(directory):
raise _FileCacheError('%s exists but is not a directory' % directory)
temp_fd, temp_path = tempfile.mkstemp()
temp_fp = os.fdopen(temp_fd, 'w')
temp_fp.write(data)
temp_fp.close()
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory))
if os.path.exists(path):
os.remove(path)
os.rename(temp_path, path)
def Remove(self, key):
path = self._GetPath(key)
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory))
if os.path.exists(path):
os.remove(path)
def GetCachedTime(self, key):
path = self._GetPath(key)
if os.path.exists(path):
return os.path.getmtime(path)
else:
return None
def _GetUsername(self):
'''Attempt to find the username in a cross-platform fashion.'''
try:
return os.getenv('USER') or \
os.getenv('LOGNAME') or \
os.getenv('USERNAME') or \
os.getlogin() or \
'nobody'
except (AttributeError, IOError, OSError), e:
return 'nobody'
def _GetTmpCachePath(self):
username = self._GetUsername()
cache_directory = 'python.cache_' + username
return os.path.join(tempfile.gettempdir(), cache_directory)
def _InitializeRootDirectory(self, root_directory):
if not root_directory:
root_directory = self._GetTmpCachePath()
root_directory = os.path.abspath(root_directory)
if not os.path.exists(root_directory):
os.mkdir(root_directory)
if not os.path.isdir(root_directory):
raise _FileCacheError('%s exists but is not a directory' %
root_directory)
self._root_directory = root_directory
def _GetPath(self, key):
try:
hashed_key = md5(key).hexdigest()
except TypeError:
hashed_key = md5.new(key).hexdigest()
return os.path.join(self._root_directory,
self._GetPrefix(hashed_key),
hashed_key)
def _GetPrefix(self, hashed_key):
return os.path.sep.join(hashed_key[0:_FileCache.DEPTH])
|
alexlo03/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/junos/junos_vlan.py
|
25
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: junos_vlan
version_added: "2.4"
author: "Ganesh Nalawade (@ganeshrn)"
short_description: Manage VLANs on Juniper JUNOS network devices
description:
- This module provides declarative management of VLANs
on Juniper JUNOS network devices.
options:
name:
description:
- Name of the VLAN.
required: true
vlan_id:
description:
- ID of the VLAN. Range 1-4094.
required: true
l3_interface:
description:
- Name of logical layer 3 interface.
version_added: "2.7"
description:
description:
- Text description of VLANs.
interfaces:
description:
- List of interfaces to check the VLAN has been
configured correctly.
aggregate:
description: List of VLANs definitions.
state:
description:
- State of the VLAN configuration.
default: present
choices: ['present', 'absent']
active:
description:
- Specifies whether or not the configuration is active or deactivated
default: True
type: bool
requirements:
- ncclient (>=v0.5.2)
notes:
- This module requires the netconf system service be enabled on
the remote device being managed.
- Tested against vSRX JUNOS version 15.1X49-D15.4, vqfx-10000 JUNOS Version 15.1X53-D60.4.
- Recommended connection is C(netconf). See L(the Junos OS Platform Options,../network/user_guide/platform_junos.html).
- This module also works with C(local) connections for legacy playbooks.
extends_documentation_fragment: junos
"""
EXAMPLES = """
- name: configure VLAN ID and name
junos_vlan:
vlan_name: test
vlan_id: 20
name: test-vlan
- name: Link to logical layer 3 interface
junos_vlan:
vlan_name: test
vlan_id: 20
l3-interface: vlan.20
name: test-vlan
- name: remove VLAN configuration
junos_vlan:
vlan_name: test
state: absent
- name: deactive VLAN configuration
junos_vlan:
vlan_name: test
state: present
active: False
- name: activate VLAN configuration
junos_vlan:
vlan_name: test
state: present
active: True
- name: Create vlan configuration using aggregate
junos_vlan:
aggregate:
- { vlan_id: 159, name: test_vlan_1, description: test vlan-1 }
- { vlan_id: 160, name: test_vlan_2, description: test vlan-2 }
- name: Delete vlan configuration using aggregate
junos_vlan:
aggregate:
- { vlan_id: 159, name: test_vlan_1 }
- { vlan_id: 160, name: test_vlan_2 }
state: absent
"""
RETURN = """
diff.prepared:
description: Configuration difference before and after applying change.
returned: when configuration is changed and diff option is enabled.
type: string
sample: >
[edit vlans]
+ test-vlan-1 {
+ vlan-id 60;
+ }
"""
import collections
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import remove_default_spec
from ansible.module_utils.network.junos.junos import junos_argument_spec, tostring
from ansible.module_utils.network.junos.junos import load_config, map_params_to_obj, map_obj_to_ele, to_param_list
from ansible.module_utils.network.junos.junos import commit_configuration, discard_changes, locked_config
USE_PERSISTENT_CONNECTION = True
def validate_vlan_id(value, module):
if value and not 1 <= value <= 4094:
module.fail_json(msg='vlan_id must be between 1 and 4094')
def validate_param_values(module, obj, param=None):
if not param:
param = module.params
for key in obj:
# validate the param value (if validator func exists)
validator = globals().get('validate_%s' % key)
if callable(validator):
validator(param.get(key), module)
def main():
""" main entry point for module execution
"""
element_spec = dict(
name=dict(),
vlan_id=dict(type='int'),
description=dict(),
interfaces=dict(),
l3_interface=dict(),
state=dict(default='present', choices=['present', 'absent']),
active=dict(default=True, type='bool')
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec['name'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec)
)
argument_spec.update(element_spec)
argument_spec.update(junos_argument_spec)
required_one_of = [['aggregate', 'name']]
mutually_exclusive = [['aggregate', 'name']]
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
result = {'changed': False}
if warnings:
result['warnings'] = warnings
top = 'vlans/vlan'
param_to_xpath_map = collections.OrderedDict()
param_to_xpath_map.update([
('name', {'xpath': 'name', 'is_key': True}),
('vlan_id', 'vlan-id'),
('l3_interface', 'l3-interface'),
('description', 'description')
])
params = to_param_list(module)
requests = list()
for param in params:
# if key doesn't exist in the item, get it from module.params
for key in param:
if param.get(key) is None:
param[key] = module.params[key]
item = param.copy()
validate_param_values(module, param_to_xpath_map, param=item)
want = map_params_to_obj(module, param_to_xpath_map, param=item)
requests.append(map_obj_to_ele(module, want, top, param=item))
diff = None
with locked_config(module):
for req in requests:
diff = load_config(module, tostring(req), warnings, action='merge')
commit = not module.check_mode
if diff:
if commit:
commit_configuration(module)
else:
discard_changes(module)
result['changed'] = True
if module._diff:
result['diff'] = {'prepared': diff}
module.exit_json(**result)
if __name__ == "__main__":
main()
|
DaikiMaekawa/leap_nextage
|
refs/heads/master
|
scripts/leap_nextage_node.py
|
1
|
#!/usr/bin/env python
#############################################################################################
# Copyright (c) 2014 Daiki Maekawa and ROS JAPAN Users Group All Rights Reserved. #
# #
# @file moveit_command_sender.py #
# @brief This program will run you through using python interface to the move_group node. #
# @author Daiki Maekawa #
# @date 2014-06-10 #
#############################################################################################
import moveit_commander
import rospy
import geometry_msgs.msg
import copy
import tf
import math
import threading
from moveit_commander.exception import MoveItCommanderException
from leap_motion2.msg import Hand
import Leap
class LeapReceiver(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
rospy.Subscriber("leapmotion2/data", Hand, self.__callback_hands_data)
self.__hand_data = None
def __callback_hands_data(self, msg):
rospy.loginfo(rospy.get_name() + ": Leap ROS Data %s" % msg)
self.__hand_data = msg
@property
def hand_data(self):
return self.__hand_data
class LeapNextage(object):
R_HAND_HOME_X = 0
R_HAND_HOME_Y = 1.0
R_HAND_HOME_Z = 0
def __init__(self):
self.__robot = moveit_commander.RobotCommander()
self.__rarm = moveit_commander.MoveGroupCommander("right_arm")
self.__larm = moveit_commander.MoveGroupCommander("left_arm")
self.__waypoints = []
self.__rarm_start_pose = self.__larm_start_pose = geometry_msgs.msg.Pose()
target_pose_r = target_pose_l = geometry_msgs.msg.Pose()
target_pose_r.position.x = 0.2035
target_pose_r.position.y = -0.45
target_pose_r.position.z = 0.0709
target_pose_r.orientation.x = -0.003
target_pose_r.orientation.y = -0.708
target_pose_r.orientation.z = -0.003
target_pose_r.orientation.w = 0.706
self.__rarm.set_pose_target(target_pose_r)
self.__rarm.go()
target_pose_l.position.y = -target_pose_r.position.y
self.__larm.set_pose_target(target_pose_l)
self.__larm.go()
self.__leap_receiver = LeapReceiver()
self.__leap_receiver.setDaemon(True)
self.__leap_receiver.start()
"""
def __callback_hands_data(self, msg):
rospy.loginfo(rospy.get_name() + ": Leap ROS Data %s" % msg)
self.__hand_data = msg
"""
def run(self):
while not rospy.is_shutdown():
if self.__leap_receiver.hand_data:
hand_data = self.__leap_receiver.hand_data
pose = self.__rarm.get_current_pose().pose
print "diff.x = ", hand_data.ypr.y * 0.001
print "diff.y = ", hand_data.ypr.x * 0.001
pose.position.x += -hand_data.ypr.x * 0.001
pose.position.y += hand_data.ypr.y * 0.001
print "ypr: "
print hand_data.ypr
self.__rarm.set_pose_target(pose)
self.__rarm.go(wait = False)
rospy.sleep(2)
"""
waypoints = []
future_pose = None
pose_count = 0
while not rospy.is_shutdown():
if self.__leap_receiver.hand_data:
hand_data = self.__leap_receiver.hand_data
if not future_pose:
pose = self.__rarm.get_current_pose().pose
else:
pose = future_pose
#pose.position.y += 0.01
q = tf.transformations.quaternion_from_euler(hand_data.ypr.z, hand_data.ypr.y, hand_data.ypr.x)
#q = tf.transformations.quaternion_from_euler(3.14, -1.57, -3.14)
pose.orientation.x = q[0]
pose.orientation.y = q[1]
pose.orientation.z = q[2]
pose.orientation.w = q[3]
pose_count += 1
if pose_count >= 1:
self.__rarm.set_pose_target(pose)
self.__rarm.go(wait = False)
rospy.sleep(1)
pose_count = 0
future_pose = None
else:
future_pose = pose
rospy.sleep(0.3)
"""
if __name__ == '__main__':
try:
rospy.init_node("moveit_command_sender")
leap_nextage = LeapNextage()
leap_nextage.run()
except MoveItCommanderException:
pass
except rospy.ROSInterruptException:
pass
|
cwahbong/onirim-py
|
refs/heads/master
|
tests/test_phase.py
|
1
|
"""
Tests for a phase.
"""
import collections
import pytest
from onirim import card
from onirim import core
from onirim import action
from onirim import exception
from onirim import component
from onirim import agent
def test_setup():
starting = [
card.sun(card.Color.red),
card.moon(card.Color.blue),
card.key(card.Color.green),
card.sun(card.Color.yellow),
card.moon(card.Color.red),
card.key(card.Color.blue),
card.nightmare(),
]
content = component.Content(starting)
flow = core.Flow(core.Core(None, None, content))
flow.setup()
assert len(content.hand) == 5
count_content = collections.Counter(content.hand + content.piles.undrawn)
count_starting = collections.Counter(starting)
assert count_content == count_starting
class DiscardActor(agent.Actor):
def phase_1_action(self, content):
return action.Phase1.discard, 0
def test_phase_1_discard_action():
discard_actor = DiscardActor()
content = component.Content(
undrawn_cards=[],
hand=[card.sun(card.Color.red), card.moon(card.Color.blue)]
)
flow = core.Flow(core.Core(discard_actor, agent.Observer(), content))
flow.phase_1()
assert content == component.Content(
undrawn_cards=[],
hand=[card.moon(card.Color.blue)],
discarded=[card.sun(card.Color.red)]
)
class WinActor(agent.Actor):
def phase_1_action(self, content):
return action.Phase1.play, 0
def open_door(self, content, door_card):
return True
def test_phase_1_pull_door_win():
win_actor = WinActor()
content = component.Content(
undrawn_cards=[card.door(card.Color.red)],
opened=[card.door(card.Color.red)] * 7,
explored=[card.sun(card.Color.red), card.moon(card.Color.red)],
hand=[card.sun(card.Color.red)]
)
flow = core.Flow(core.Core(win_actor, agent.Observer(), content))
with pytest.raises(exception.Win):
flow.phase_1()
print(content)
def test_phase_2_draw_door_win():
win_actor = WinActor()
content = component.Content(
undrawn_cards=[card.door(card.Color.red)],
opened=[card.door(card.Color.red)] * 7,
hand=[card.key(card.Color.red)]
)
flow = core.Flow(core.Core(win_actor, agent.Observer(), content))
with pytest.raises(exception.Win):
flow.phase_2()
|
postrational/django
|
refs/heads/master
|
tests/foreign_object/tests.py
|
2
|
import datetime
from operator import attrgetter
from .models import Country, Person, Group, Membership, Friendship, Article, ArticleTranslation
from django.test import TestCase
from django.utils.translation import activate
from django import forms
class MultiColumnFKTests(TestCase):
def setUp(self):
# Creating countries
self.usa = Country.objects.create(name="United States of America")
self.soviet_union = Country.objects.create(name="Soviet Union")
Person()
# Creating People
self.bob = Person()
self.bob.name = 'Bob'
self.bob.person_country = self.usa
self.bob.save()
self.jim = Person.objects.create(name='Jim', person_country=self.usa)
self.george = Person.objects.create(name='George', person_country=self.usa)
self.jane = Person.objects.create(name='Jane', person_country=self.soviet_union)
self.mark = Person.objects.create(name='Mark', person_country=self.soviet_union)
self.sam = Person.objects.create(name='Sam', person_country=self.soviet_union)
# Creating Groups
self.kgb = Group.objects.create(name='KGB', group_country=self.soviet_union)
self.cia = Group.objects.create(name='CIA', group_country=self.usa)
self.republican = Group.objects.create(name='Republican', group_country=self.usa)
self.democrat = Group.objects.create(name='Democrat', group_country=self.usa)
def test_get_succeeds_on_multicolumn_match(self):
# Membership objects have access to their related Person if both
# country_ids match between them
membership = Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id)
person = membership.person
self.assertEqual((person.id, person.name), (self.bob.id, "Bob"))
def test_get_fails_on_multicolumn_mismatch(self):
# Membership objects returns DoesNotExist error when the there is no
# Person with the same id and country_id
membership = Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.jane.id, group_id=self.cia.id)
self.assertRaises(Person.DoesNotExist, getattr, membership, 'person')
def test_reverse_query_returns_correct_result(self):
# Creating a valid membership because it has the same country has the person
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id)
# Creating an invalid membership because it has a different country has the person
Membership.objects.create(
membership_country_id=self.soviet_union.id, person_id=self.bob.id,
group_id=self.republican.id)
self.assertQuerysetEqual(
self.bob.membership_set.all(), [
self.cia.id
],
attrgetter("group_id")
)
def test_query_filters_correctly(self):
# Creating a to valid memberships
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id)
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.jim.id,
group_id=self.cia.id)
# Creating an invalid membership
Membership.objects.create(membership_country_id=self.soviet_union.id,
person_id=self.george.id, group_id=self.cia.id)
self.assertQuerysetEqual(
Membership.objects.filter(person__name__contains='o'), [
self.bob.id
],
attrgetter("person_id")
)
def test_reverse_query_filters_correctly(self):
timemark = datetime.datetime.utcnow()
timedelta = datetime.timedelta(days=1)
# Creating a to valid memberships
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.bob.id,
group_id=self.cia.id, date_joined=timemark - timedelta)
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.jim.id,
group_id=self.cia.id, date_joined=timemark + timedelta)
# Creating an invalid membership
Membership.objects.create(
membership_country_id=self.soviet_union.id, person_id=self.george.id,
group_id=self.cia.id, date_joined=timemark + timedelta)
self.assertQuerysetEqual(
Person.objects.filter(membership__date_joined__gte=timemark), [
'Jim'
],
attrgetter('name')
)
def test_forward_in_lookup_filters_correctly(self):
Membership.objects.create(membership_country_id=self.usa.id, person_id=self.bob.id,
group_id=self.cia.id)
Membership.objects.create(membership_country_id=self.usa.id, person_id=self.jim.id,
group_id=self.cia.id)
# Creating an invalid membership
Membership.objects.create(
membership_country_id=self.soviet_union.id, person_id=self.george.id,
group_id=self.cia.id)
self.assertQuerysetEqual(
Membership.objects.filter(person__in=[self.george, self.jim]), [
self.jim.id,
],
attrgetter('person_id')
)
self.assertQuerysetEqual(
Membership.objects.filter(person__in=Person.objects.filter(name='Jim')), [
self.jim.id,
],
attrgetter('person_id')
)
def test_select_related_foreignkey_forward_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(1):
people = [m.person for m in Membership.objects.select_related('person')]
normal_people = [m.person for m in Membership.objects.all()]
self.assertEqual(people, normal_people)
def test_prefetch_foreignkey_forward_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(2):
people = [m.person for m in Membership.objects.prefetch_related('person')]
normal_people = [m.person for m in Membership.objects.all()]
self.assertEqual(people, normal_people)
def test_prefetch_foreignkey_reverse_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(2):
membership_sets = [list(p.membership_set.all())
for p in Person.objects.prefetch_related('membership_set')]
normal_membership_sets = [list(p.membership_set.all()) for p in Person.objects.all()]
self.assertEqual(membership_sets, normal_membership_sets)
def test_m2m_through_forward_returns_valid_members(self):
# We start out by making sure that the Group 'CIA' has no members.
self.assertQuerysetEqual(
self.cia.members.all(),
[]
)
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.cia)
# Let's check to make sure that it worked. Bob and Jim should be members of the CIA.
self.assertQuerysetEqual(
self.cia.members.all(), [
'Bob',
'Jim'
], attrgetter("name")
)
def test_m2m_through_reverse_returns_valid_members(self):
# We start out by making sure that Bob is in no groups.
self.assertQuerysetEqual(
self.bob.groups.all(),
[]
)
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.bob,
group=self.republican)
# Bob should be in the CIA and a Republican
self.assertQuerysetEqual(
self.bob.groups.all(), [
'CIA',
'Republican'
], attrgetter("name")
)
def test_m2m_through_forward_ignores_invalid_members(self):
# We start out by making sure that the Group 'CIA' has no members.
self.assertQuerysetEqual(
self.cia.members.all(),
[]
)
# Something adds jane to group CIA but Jane is in Soviet Union which isn't CIA's country
Membership.objects.create(membership_country=self.usa, person=self.jane, group=self.cia)
# There should still be no members in CIA
self.assertQuerysetEqual(
self.cia.members.all(),
[]
)
def test_m2m_through_reverse_ignores_invalid_members(self):
# We start out by making sure that Jane has no groups.
self.assertQuerysetEqual(
self.jane.groups.all(),
[]
)
# Something adds jane to group CIA but Jane is in Soviet Union which isn't CIA's country
Membership.objects.create(membership_country=self.usa, person=self.jane, group=self.cia)
# Jane should still not be in any groups
self.assertQuerysetEqual(
self.jane.groups.all(),
[]
)
def test_m2m_through_on_self_works(self):
self.assertQuerysetEqual(
self.jane.friends.all(),
[]
)
Friendship.objects.create(
from_friend_country=self.jane.person_country, from_friend=self.jane,
to_friend_country=self.george.person_country, to_friend=self.george)
self.assertQuerysetEqual(
self.jane.friends.all(),
['George'], attrgetter("name")
)
def test_m2m_through_on_self_ignores_mismatch_columns(self):
self.assertQuerysetEqual(self.jane.friends.all(), [])
# Note that we use ids instead of instances. This is because instances on ForeignObject
# properties will set all related field off of the given instance
Friendship.objects.create(
from_friend_id=self.jane.id, to_friend_id=self.george.id,
to_friend_country_id=self.jane.person_country_id,
from_friend_country_id=self.george.person_country_id)
self.assertQuerysetEqual(self.jane.friends.all(), [])
def test_prefetch_related_m2m_foward_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(2):
members_lists = [list(g.members.all())
for g in Group.objects.prefetch_related('members')]
normal_members_lists = [list(g.members.all()) for g in Group.objects.all()]
self.assertEqual(members_lists, normal_members_lists)
def test_prefetch_related_m2m_reverse_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(2):
groups_lists = [list(p.groups.all()) for p in Person.objects.prefetch_related('groups')]
normal_groups_lists = [list(p.groups.all()) for p in Person.objects.all()]
self.assertEqual(groups_lists, normal_groups_lists)
def test_translations(self):
activate('fi')
a1 = Article.objects.create(pub_date=datetime.date.today())
at1_fi = ArticleTranslation(article=a1, lang='fi', title='Otsikko', body='Diipadaapa')
at1_fi.save()
at2_en = ArticleTranslation(article=a1, lang='en', title='Title', body='Lalalalala')
at2_en.save()
with self.assertNumQueries(1):
fetched = Article.objects.select_related('active_translation').get(
active_translation__title='Otsikko')
self.assertTrue(fetched.active_translation.title == 'Otsikko')
a2 = Article.objects.create(pub_date=datetime.date.today())
at2_fi = ArticleTranslation(article=a2, lang='fi', title='Atsikko', body='Diipadaapa',
abstract='dipad')
at2_fi.save()
a3 = Article.objects.create(pub_date=datetime.date.today())
at3_en = ArticleTranslation(article=a3, lang='en', title='A title', body='lalalalala',
abstract='lala')
at3_en.save()
# Test model initialization with active_translation field.
a3 = Article(id=a3.id, pub_date=a3.pub_date, active_translation=at3_en)
a3.save()
self.assertEqual(
list(Article.objects.filter(active_translation__abstract=None)),
[a1, a3])
self.assertEqual(
list(Article.objects.filter(active_translation__abstract=None,
active_translation__pk__isnull=False)),
[a1])
activate('en')
self.assertEqual(
list(Article.objects.filter(active_translation__abstract=None)),
[a1, a2])
class FormsTests(TestCase):
# ForeignObjects should not have any form fields, currently the user needs
# to manually deal with the foreignobject relation.
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = '__all__'
def test_foreign_object_form(self):
# A very crude test checking that the non-concrete fields do not get form fields.
form = FormsTests.ArticleForm()
self.assertIn('id_pub_date', form.as_table())
self.assertNotIn('active_translation', form.as_table())
form = FormsTests.ArticleForm(data={'pub_date': str(datetime.date.today())})
self.assertTrue(form.is_valid())
a = form.save()
self.assertEqual(a.pub_date, datetime.date.today())
form = FormsTests.ArticleForm(instance=a, data={'pub_date': '2013-01-01'})
a2 = form.save()
self.assertEqual(a.pk, a2.pk)
self.assertEqual(a2.pub_date, datetime.date(2013, 1, 1))
|
krsjoseph/youtube-dl
|
refs/heads/master
|
devscripts/gh-pages/update-feed.py
|
159
|
#!/usr/bin/env python3
from __future__ import unicode_literals
import datetime
import io
import json
import textwrap
atom_template = textwrap.dedent("""\
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<link rel="self" href="http://rg3.github.io/youtube-dl/update/releases.atom" />
<title>youtube-dl releases</title>
<id>https://yt-dl.org/feed/youtube-dl-updates-feed</id>
<updated>@TIMESTAMP@</updated>
@ENTRIES@
</feed>""")
entry_template = textwrap.dedent("""
<entry>
<id>https://yt-dl.org/feed/youtube-dl-updates-feed/youtube-dl-@VERSION@</id>
<title>New version @VERSION@</title>
<link href="http://rg3.github.io/youtube-dl" />
<content type="xhtml">
<div xmlns="http://www.w3.org/1999/xhtml">
Downloads available at <a href="https://yt-dl.org/downloads/@VERSION@/">https://yt-dl.org/downloads/@VERSION@/</a>
</div>
</content>
<author>
<name>The youtube-dl maintainers</name>
</author>
<updated>@TIMESTAMP@</updated>
</entry>
""")
now = datetime.datetime.now()
now_iso = now.isoformat() + 'Z'
atom_template = atom_template.replace('@TIMESTAMP@', now_iso)
versions_info = json.load(open('update/versions.json'))
versions = list(versions_info['versions'].keys())
versions.sort()
entries = []
for v in versions:
fields = v.split('.')
year, month, day = map(int, fields[:3])
faked = 0
patchlevel = 0
while True:
try:
datetime.date(year, month, day)
except ValueError:
day -= 1
faked += 1
assert day > 0
continue
break
if len(fields) >= 4:
try:
patchlevel = int(fields[3])
except ValueError:
patchlevel = 1
timestamp = '%04d-%02d-%02dT00:%02d:%02dZ' % (year, month, day, faked, patchlevel)
entry = entry_template.replace('@TIMESTAMP@', timestamp)
entry = entry.replace('@VERSION@', v)
entries.append(entry)
entries_str = textwrap.indent(''.join(entries), '\t')
atom_template = atom_template.replace('@ENTRIES@', entries_str)
with io.open('update/releases.atom', 'w', encoding='utf-8') as atom_file:
atom_file.write(atom_template)
|
Soya93/Extract-Refactoring
|
refs/heads/master
|
python/helpers/pydev/tests_pydevd/test_check_pydevconsole.py
|
21
|
import threading
import unittest
import os
import sys
try:
import pydevconsole
except:
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
import pydevconsole
from _pydev_bundle.pydev_imports import xmlrpclib, SimpleXMLRPCServer
from _pydev_bundle.pydev_localhost import get_localhost
try:
raw_input
raw_input_name = 'raw_input'
except NameError:
raw_input_name = 'input'
#=======================================================================================================================
# Test
#=======================================================================================================================
class Test(unittest.TestCase):
def start_client_thread(self, client_port):
class ClientThread(threading.Thread):
def __init__(self, client_port):
threading.Thread.__init__(self)
self.client_port = client_port
def run(self):
class HandleRequestInput:
def RequestInput(self):
client_thread.requested_input = True
return 'RequestInput: OK'
def NotifyFinished(self, *args, **kwargs):
client_thread.notified_finished += 1
return 1
handle_request_input = HandleRequestInput()
from _pydev_bundle import pydev_localhost
self.client_server = client_server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), self.client_port), logRequests=False)
client_server.register_function(handle_request_input.RequestInput)
client_server.register_function(handle_request_input.NotifyFinished)
client_server.serve_forever()
def shutdown(self):
return
self.client_server.shutdown()
client_thread = ClientThread(client_port)
client_thread.requested_input = False
client_thread.notified_finished = 0
client_thread.setDaemon(True)
client_thread.start()
return client_thread
def get_free_addresses(self):
import socket
s = socket.socket()
s.bind(('', 0))
port0 = s.getsockname()[1]
s1 = socket.socket()
s1.bind(('', 0))
port1 = s1.getsockname()[1]
s.close()
s1.close()
return port0, port1
def test_server(self):
# Just making sure that the singleton is created in this thread.
try:
from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend
except:
sys.stderr.write('Skipped test because IPython could not be imported.')
return
get_pydev_frontend(get_localhost(), 0)
client_port, server_port = self.get_free_addresses()
class ServerThread(threading.Thread):
def __init__(self, client_port, server_port):
threading.Thread.__init__(self)
self.client_port = client_port
self.server_port = server_port
def run(self):
from _pydev_bundle import pydev_localhost
print('Starting server with:', pydev_localhost.get_localhost(), self.server_port, self.client_port)
pydevconsole.start_server(pydev_localhost.get_localhost(), self.server_port, self.client_port)
server_thread = ServerThread(client_port, server_port)
server_thread.setDaemon(True)
server_thread.start()
client_thread = self.start_client_thread(client_port) #@UnusedVariable
try:
import time
time.sleep(.3) #let's give it some time to start the threads
from _pydev_bundle import pydev_localhost
server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), server_port))
server.execLine("import sys; print('Running with: %s %s' % (sys.executable or sys.platform, sys.version))")
server.execLine('class Foo:')
server.execLine(' pass')
server.execLine('')
server.execLine('foo = Foo()')
server.execLine('a = %s()' % raw_input_name)
initial = time.time()
while not client_thread.requested_input:
if time.time() - initial > 2:
raise AssertionError('Did not get the return asked before the timeout.')
time.sleep(.1)
frame_xml = server.getFrame()
self.assert_('RequestInput' in frame_xml, 'Did not fid RequestInput in:\n%s' % (frame_xml,))
finally:
client_thread.shutdown()
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
unittest.main()
|
burzillibus/RobHome
|
refs/heads/master
|
venv/lib/python2.7/site-packages/django/contrib/staticfiles/management/__init__.py
|
12133432
| |
ArdAmb/campamento_base
|
refs/heads/master
|
base/tests/views/__init__.py
|
12133432
| |
murraymeehan/marsyas
|
refs/heads/master
|
src/django/birdsong/application/birdsong/urls.py
|
7
|
from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
(r'^$', 'birdsong.main.views.index'),
(r'^$', 'birdsong.main.views.index'),
(r'^recordings$', 'birdsong.recordings.views.index'),
(r'^recordings/show/(?P<recording_id>\d+)$', 'birdsong.recordings.views.show'),
(r'^recordings/show_annotations/(?P<recording_id>\d+)/(?P<user_id>\d+)$', 'birdsong.recordings.views.show_annotations'),
(r'^recordings/annotations/(?P<recording_id>\d+)$', 'birdsong.recordings.views.annotations'),
(r'^recordings/pitchcontour/(?P<annotation_id>\d+)$', 'birdsong.recordings.views.pitchcontour'),
(r'^annotations$', 'birdsong.annotations.views.index'),
(r'^annotations/show/(?P<annotation_id>\d+)$', 'birdsong.annotations.views.show'),
(r'^annotations/update$', 'birdsong.annotations.views.update'),
# Pitch detection
(r'^dtw/$', 'birdsong.dtw.views.index'),
(r'^dtw/pitchcontour/(?P<median>\d+)$', 'birdsong.dtw.views.pitchcontour'),
(r'^dtw/pitchcontour_embed$', 'birdsong.dtw.views.pitchcontour_embed'),
# Static assets
(r'^images/(?P<path>.*)$',
'django.views.static.serve', {'document_root': '/home/sness/dDEV/birdsong/public_html/images'}),
(r'^flash/(?P<path>.*)$',
'django.views.static.serve', {'document_root': '/home/sness/dDEV/birdsong/public_html/flash'}),
(r'^images/(?P<path>.*)$',
'django.views.static.serve', {'document_root': '/home/sness/dDEV/birdsong/public_html/images'}),
(r'^src/(?P<path>.*)$',
'django.views.static.serve', {'document_root': '/home/sness/dDEV/birdsong/public_html/src'}),
(r'^stylesheets/(?P<path>.*)$',
'django.views.static.serve', {'document_root': '/home/sness/dDEV/birdsong/public_html/stylesheets'}),
(r'^javascript/(?P<path>.*)$',
'django.views.static.serve', {'document_root': '/home/sness/dDEV/birdsong/public_html/javascript'}),
(r'^assets/(?P<path>.*)$',
'django.views.static.serve', {'document_root': '/home/sness/dDEV/birdsong/public_html/assets'}),
(r'^songs/(?P<path>.*)$',
'django.views.static.serve', {'document_root': '/home/sness/dDEV/birdsong/public_html/songs'}),
# Admin site
(r'^admin/', include(admin.site.urls))
)
|
cbertinato/pandas
|
refs/heads/master
|
pandas/tests/reshape/merge/test_multi.py
|
1
|
from collections import OrderedDict
import numpy as np
from numpy import nan
from numpy.random import randn
import pytest
import pandas as pd
from pandas import DataFrame, Index, MultiIndex, Series
from pandas.core.reshape.concat import concat
from pandas.core.reshape.merge import merge
import pandas.util.testing as tm
@pytest.fixture
def left():
"""left dataframe (not multi-indexed) for multi-index join tests"""
# a little relevant example with NAs
key1 = ['bar', 'bar', 'bar', 'foo', 'foo', 'baz', 'baz', 'qux',
'qux', 'snap']
key2 = ['two', 'one', 'three', 'one', 'two', 'one', 'two', 'two',
'three', 'one']
data = np.random.randn(len(key1))
return DataFrame({'key1': key1, 'key2': key2, 'data': data})
@pytest.fixture
def right():
"""right dataframe (multi-indexed) for multi-index join tests"""
index = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux'],
['one', 'two', 'three']],
codes=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3],
[0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
names=['key1', 'key2'])
return DataFrame(np.random.randn(10, 3), index=index,
columns=['j_one', 'j_two', 'j_three'])
@pytest.fixture
def left_multi():
return (
DataFrame(
dict(Origin=['A', 'A', 'B', 'B', 'C'],
Destination=['A', 'B', 'A', 'C', 'A'],
Period=['AM', 'AM', 'IP', 'AM', 'OP'],
TripPurp=['hbw', 'nhb', 'hbo', 'nhb', 'hbw'],
Trips=[1987, 3647, 2470, 4296, 4444]),
columns=['Origin', 'Destination', 'Period',
'TripPurp', 'Trips'])
.set_index(['Origin', 'Destination', 'Period', 'TripPurp']))
@pytest.fixture
def right_multi():
return (
DataFrame(
dict(Origin=['A', 'A', 'B', 'B', 'C', 'C', 'E'],
Destination=['A', 'B', 'A', 'B', 'A', 'B', 'F'],
Period=['AM', 'AM', 'IP', 'AM', 'OP', 'IP', 'AM'],
LinkType=['a', 'b', 'c', 'b', 'a', 'b', 'a'],
Distance=[100, 80, 90, 80, 75, 35, 55]),
columns=['Origin', 'Destination', 'Period',
'LinkType', 'Distance'])
.set_index(['Origin', 'Destination', 'Period', 'LinkType']))
@pytest.fixture
def on_cols_multi():
return ['Origin', 'Destination', 'Period']
@pytest.fixture
def idx_cols_multi():
return ['Origin', 'Destination', 'Period', 'TripPurp', 'LinkType']
class TestMergeMulti:
def setup_method(self):
self.index = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux'],
['one', 'two', 'three']],
codes=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3],
[0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
names=['first', 'second'])
self.to_join = DataFrame(np.random.randn(10, 3), index=self.index,
columns=['j_one', 'j_two', 'j_three'])
# a little relevant example with NAs
key1 = ['bar', 'bar', 'bar', 'foo', 'foo', 'baz', 'baz', 'qux',
'qux', 'snap']
key2 = ['two', 'one', 'three', 'one', 'two', 'one', 'two', 'two',
'three', 'one']
data = np.random.randn(len(key1))
self.data = DataFrame({'key1': key1, 'key2': key2,
'data': data})
def test_merge_on_multikey(self, left, right, join_type):
on_cols = ['key1', 'key2']
result = (left.join(right, on=on_cols, how=join_type)
.reset_index(drop=True))
expected = pd.merge(left, right.reset_index(),
on=on_cols, how=join_type)
tm.assert_frame_equal(result, expected)
result = (left.join(right, on=on_cols, how=join_type, sort=True)
.reset_index(drop=True))
expected = pd.merge(left, right.reset_index(),
on=on_cols, how=join_type, sort=True)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("sort", [False, True])
def test_left_join_multi_index(self, left, right, sort):
icols = ['1st', '2nd', '3rd']
def bind_cols(df):
iord = lambda a: 0 if a != a else ord(a)
f = lambda ts: ts.map(iord) - ord('a')
return (f(df['1st']) + f(df['3rd']) * 1e2 +
df['2nd'].fillna(0) * 1e4)
def run_asserts(left, right, sort):
res = left.join(right, on=icols, how='left', sort=sort)
assert len(left) < len(res) + 1
assert not res['4th'].isna().any()
assert not res['5th'].isna().any()
tm.assert_series_equal(
res['4th'], - res['5th'], check_names=False)
result = bind_cols(res.iloc[:, :-2])
tm.assert_series_equal(res['4th'], result, check_names=False)
assert result.name is None
if sort:
tm.assert_frame_equal(
res, res.sort_values(icols, kind='mergesort'))
out = merge(left, right.reset_index(), on=icols,
sort=sort, how='left')
res.index = np.arange(len(res))
tm.assert_frame_equal(out, res)
lc = list(map(chr, np.arange(ord('a'), ord('z') + 1)))
left = DataFrame(np.random.choice(lc, (5000, 2)),
columns=['1st', '3rd'])
left.insert(1, '2nd', np.random.randint(0, 1000, len(left)))
i = np.random.permutation(len(left))
right = left.iloc[i].copy()
left['4th'] = bind_cols(left)
right['5th'] = - bind_cols(right)
right.set_index(icols, inplace=True)
run_asserts(left, right, sort)
# inject some nulls
left.loc[1::23, '1st'] = np.nan
left.loc[2::37, '2nd'] = np.nan
left.loc[3::43, '3rd'] = np.nan
left['4th'] = bind_cols(left)
i = np.random.permutation(len(left))
right = left.iloc[i, :-1]
right['5th'] = - bind_cols(right)
right.set_index(icols, inplace=True)
run_asserts(left, right, sort)
@pytest.mark.parametrize("sort", [False, True])
def test_merge_right_vs_left(self, left, right, sort):
# compare left vs right merge with multikey
on_cols = ['key1', 'key2']
merged_left_right = left.merge(right,
left_on=on_cols, right_index=True,
how='left', sort=sort)
merge_right_left = right.merge(left,
right_on=on_cols, left_index=True,
how='right', sort=sort)
# Reorder columns
merge_right_left = merge_right_left[merged_left_right.columns]
tm.assert_frame_equal(merged_left_right, merge_right_left)
def test_compress_group_combinations(self):
# ~ 40000000 possible unique groups
key1 = tm.rands_array(10, 10000)
key1 = np.tile(key1, 2)
key2 = key1[::-1]
df = DataFrame({'key1': key1, 'key2': key2,
'value1': np.random.randn(20000)})
df2 = DataFrame({'key1': key1[::2], 'key2': key2[::2],
'value2': np.random.randn(10000)})
# just to hit the label compression code path
merge(df, df2, how='outer')
def test_left_join_index_preserve_order(self):
on_cols = ['k1', 'k2']
left = DataFrame({'k1': [0, 1, 2] * 8,
'k2': ['foo', 'bar'] * 12,
'v': np.array(np.arange(24), dtype=np.int64)})
index = MultiIndex.from_tuples([(2, 'bar'), (1, 'foo')])
right = DataFrame({'v2': [5, 7]}, index=index)
result = left.join(right, on=on_cols)
expected = left.copy()
expected['v2'] = np.nan
expected.loc[(expected.k1 == 2) & (expected.k2 == 'bar'), 'v2'] = 5
expected.loc[(expected.k1 == 1) & (expected.k2 == 'foo'), 'v2'] = 7
tm.assert_frame_equal(result, expected)
result.sort_values(on_cols, kind='mergesort', inplace=True)
expected = left.join(right, on=on_cols, sort=True)
tm.assert_frame_equal(result, expected)
# test join with multi dtypes blocks
left = DataFrame({'k1': [0, 1, 2] * 8,
'k2': ['foo', 'bar'] * 12,
'k3': np.array([0, 1, 2] * 8, dtype=np.float32),
'v': np.array(np.arange(24), dtype=np.int32)})
index = MultiIndex.from_tuples([(2, 'bar'), (1, 'foo')])
right = DataFrame({'v2': [5, 7]}, index=index)
result = left.join(right, on=on_cols)
expected = left.copy()
expected['v2'] = np.nan
expected.loc[(expected.k1 == 2) & (expected.k2 == 'bar'), 'v2'] = 5
expected.loc[(expected.k1 == 1) & (expected.k2 == 'foo'), 'v2'] = 7
tm.assert_frame_equal(result, expected)
result = result.sort_values(on_cols, kind='mergesort')
expected = left.join(right, on=on_cols, sort=True)
tm.assert_frame_equal(result, expected)
def test_left_join_index_multi_match_multiindex(self):
left = DataFrame([
['X', 'Y', 'C', 'a'],
['W', 'Y', 'C', 'e'],
['V', 'Q', 'A', 'h'],
['V', 'R', 'D', 'i'],
['X', 'Y', 'D', 'b'],
['X', 'Y', 'A', 'c'],
['W', 'Q', 'B', 'f'],
['W', 'R', 'C', 'g'],
['V', 'Y', 'C', 'j'],
['X', 'Y', 'B', 'd']],
columns=['cola', 'colb', 'colc', 'tag'],
index=[3, 2, 0, 1, 7, 6, 4, 5, 9, 8])
right = (DataFrame([
['W', 'R', 'C', 0],
['W', 'Q', 'B', 3],
['W', 'Q', 'B', 8],
['X', 'Y', 'A', 1],
['X', 'Y', 'A', 4],
['X', 'Y', 'B', 5],
['X', 'Y', 'C', 6],
['X', 'Y', 'C', 9],
['X', 'Q', 'C', -6],
['X', 'R', 'C', -9],
['V', 'Y', 'C', 7],
['V', 'R', 'D', 2],
['V', 'R', 'D', -1],
['V', 'Q', 'A', -3]],
columns=['col1', 'col2', 'col3', 'val'])
.set_index(['col1', 'col2', 'col3']))
result = left.join(right, on=['cola', 'colb', 'colc'], how='left')
expected = DataFrame([
['X', 'Y', 'C', 'a', 6],
['X', 'Y', 'C', 'a', 9],
['W', 'Y', 'C', 'e', nan],
['V', 'Q', 'A', 'h', -3],
['V', 'R', 'D', 'i', 2],
['V', 'R', 'D', 'i', -1],
['X', 'Y', 'D', 'b', nan],
['X', 'Y', 'A', 'c', 1],
['X', 'Y', 'A', 'c', 4],
['W', 'Q', 'B', 'f', 3],
['W', 'Q', 'B', 'f', 8],
['W', 'R', 'C', 'g', 0],
['V', 'Y', 'C', 'j', 7],
['X', 'Y', 'B', 'd', 5]],
columns=['cola', 'colb', 'colc', 'tag', 'val'],
index=[3, 3, 2, 0, 1, 1, 7, 6, 6, 4, 4, 5, 9, 8])
tm.assert_frame_equal(result, expected)
result = left.join(right, on=['cola', 'colb', 'colc'],
how='left', sort=True)
expected = expected.sort_values(['cola', 'colb', 'colc'],
kind='mergesort')
tm.assert_frame_equal(result, expected)
def test_left_join_index_multi_match(self):
left = DataFrame([
['c', 0],
['b', 1],
['a', 2],
['b', 3]],
columns=['tag', 'val'],
index=[2, 0, 1, 3])
right = (DataFrame([
['a', 'v'],
['c', 'w'],
['c', 'x'],
['d', 'y'],
['a', 'z'],
['c', 'r'],
['e', 'q'],
['c', 's']],
columns=['tag', 'char'])
.set_index('tag'))
result = left.join(right, on='tag', how='left')
expected = DataFrame([
['c', 0, 'w'],
['c', 0, 'x'],
['c', 0, 'r'],
['c', 0, 's'],
['b', 1, nan],
['a', 2, 'v'],
['a', 2, 'z'],
['b', 3, nan]],
columns=['tag', 'val', 'char'],
index=[2, 2, 2, 2, 0, 1, 1, 3])
tm.assert_frame_equal(result, expected)
result = left.join(right, on='tag', how='left', sort=True)
expected2 = expected.sort_values('tag', kind='mergesort')
tm.assert_frame_equal(result, expected2)
# GH7331 - maintain left frame order in left merge
result = merge(left, right.reset_index(), how='left', on='tag')
expected.index = np.arange(len(expected))
tm.assert_frame_equal(result, expected)
def test_left_merge_na_buglet(self):
left = DataFrame({'id': list('abcde'), 'v1': randn(5),
'v2': randn(5), 'dummy': list('abcde'),
'v3': randn(5)},
columns=['id', 'v1', 'v2', 'dummy', 'v3'])
right = DataFrame({'id': ['a', 'b', np.nan, np.nan, np.nan],
'sv3': [1.234, 5.678, np.nan, np.nan, np.nan]})
result = merge(left, right, on='id', how='left')
rdf = right.drop(['id'], axis=1)
expected = left.join(rdf)
tm.assert_frame_equal(result, expected)
def test_merge_na_keys(self):
data = [[1950, "A", 1.5],
[1950, "B", 1.5],
[1955, "B", 1.5],
[1960, "B", np.nan],
[1970, "B", 4.],
[1950, "C", 4.],
[1960, "C", np.nan],
[1965, "C", 3.],
[1970, "C", 4.]]
frame = DataFrame(data, columns=["year", "panel", "data"])
other_data = [[1960, 'A', np.nan],
[1970, 'A', np.nan],
[1955, 'A', np.nan],
[1965, 'A', np.nan],
[1965, 'B', np.nan],
[1955, 'C', np.nan]]
other = DataFrame(other_data, columns=['year', 'panel', 'data'])
result = frame.merge(other, how='outer')
expected = frame.fillna(-999).merge(other.fillna(-999), how='outer')
expected = expected.replace(-999, np.nan)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("klass", [None, np.asarray, Series, Index])
def test_merge_datetime_index(self, klass):
# see gh-19038
df = DataFrame([1, 2, 3],
["2016-01-01", "2017-01-01", "2018-01-01"],
columns=["a"])
df.index = pd.to_datetime(df.index)
on_vector = df.index.year
if klass is not None:
on_vector = klass(on_vector)
expected = DataFrame(
OrderedDict([
("a", [1, 2, 3]),
("key_1", [2016, 2017, 2018]),
])
)
result = df.merge(df, on=["a", on_vector], how="inner")
tm.assert_frame_equal(result, expected)
expected = DataFrame(
OrderedDict([
("key_0", [2016, 2017, 2018]),
("a_x", [1, 2, 3]),
("a_y", [1, 2, 3]),
])
)
result = df.merge(df, on=[df.index.year], how="inner")
tm.assert_frame_equal(result, expected)
def test_join_multi_levels(self):
# GH 3662
# merge multi-levels
household = (
DataFrame(
dict(household_id=[1, 2, 3],
male=[0, 1, 0],
wealth=[196087.3, 316478.7, 294750]),
columns=['household_id', 'male', 'wealth'])
.set_index('household_id'))
portfolio = (
DataFrame(
dict(household_id=[1, 2, 2, 3, 3, 3, 4],
asset_id=["nl0000301109", "nl0000289783", "gb00b03mlx29",
"gb00b03mlx29", "lu0197800237", "nl0000289965",
np.nan],
name=["ABN Amro", "Robeco", "Royal Dutch Shell",
"Royal Dutch Shell",
"AAB Eastern Europe Equity Fund",
"Postbank BioTech Fonds", np.nan],
share=[1.0, 0.4, 0.6, 0.15, 0.6, 0.25, 1.0]),
columns=['household_id', 'asset_id', 'name', 'share'])
.set_index(['household_id', 'asset_id']))
result = household.join(portfolio, how='inner')
expected = (
DataFrame(
dict(male=[0, 1, 1, 0, 0, 0],
wealth=[196087.3, 316478.7, 316478.7,
294750.0, 294750.0, 294750.0],
name=['ABN Amro', 'Robeco', 'Royal Dutch Shell',
'Royal Dutch Shell',
'AAB Eastern Europe Equity Fund',
'Postbank BioTech Fonds'],
share=[1.00, 0.40, 0.60, 0.15, 0.60, 0.25],
household_id=[1, 2, 2, 3, 3, 3],
asset_id=['nl0000301109', 'nl0000289783', 'gb00b03mlx29',
'gb00b03mlx29', 'lu0197800237',
'nl0000289965']))
.set_index(['household_id', 'asset_id'])
.reindex(columns=['male', 'wealth', 'name', 'share']))
tm.assert_frame_equal(result, expected)
# equivalency
result = (merge(household.reset_index(), portfolio.reset_index(),
on=['household_id'], how='inner')
.set_index(['household_id', 'asset_id']))
tm.assert_frame_equal(result, expected)
result = household.join(portfolio, how='outer')
expected = (concat([
expected,
(DataFrame(
dict(share=[1.00]),
index=MultiIndex.from_tuples(
[(4, np.nan)],
names=['household_id', 'asset_id'])))
], axis=0, sort=True).reindex(columns=expected.columns))
tm.assert_frame_equal(result, expected)
# invalid cases
household.index.name = 'foo'
with pytest.raises(ValueError):
household.join(portfolio, how='inner')
portfolio2 = portfolio.copy()
portfolio2.index.set_names(['household_id', 'foo'])
with pytest.raises(ValueError):
portfolio2.join(portfolio, how='inner')
def test_join_multi_levels2(self):
# some more advanced merges
# GH6360
household = (
DataFrame(
dict(household_id=[1, 2, 2, 3, 3, 3, 4],
asset_id=["nl0000301109", "nl0000301109", "gb00b03mlx29",
"gb00b03mlx29", "lu0197800237", "nl0000289965",
np.nan],
share=[1.0, 0.4, 0.6, 0.15, 0.6, 0.25, 1.0]),
columns=['household_id', 'asset_id', 'share'])
.set_index(['household_id', 'asset_id']))
log_return = DataFrame(dict(
asset_id=["gb00b03mlx29", "gb00b03mlx29",
"gb00b03mlx29", "lu0197800237", "lu0197800237"],
t=[233, 234, 235, 180, 181],
log_return=[.09604978, -.06524096, .03532373, .03025441, .036997]
)).set_index(["asset_id", "t"])
expected = (
DataFrame(dict(
household_id=[2, 2, 2, 3, 3, 3, 3, 3],
asset_id=["gb00b03mlx29", "gb00b03mlx29",
"gb00b03mlx29", "gb00b03mlx29",
"gb00b03mlx29", "gb00b03mlx29",
"lu0197800237", "lu0197800237"],
t=[233, 234, 235, 233, 234, 235, 180, 181],
share=[0.6, 0.6, 0.6, 0.15, 0.15, 0.15, 0.6, 0.6],
log_return=[.09604978, -.06524096, .03532373,
.09604978, -.06524096, .03532373,
.03025441, .036997]
))
.set_index(["household_id", "asset_id", "t"])
.reindex(columns=['share', 'log_return']))
# this is the equivalency
result = (merge(household.reset_index(), log_return.reset_index(),
on=['asset_id'], how='inner')
.set_index(['household_id', 'asset_id', 't']))
tm.assert_frame_equal(result, expected)
expected = (
DataFrame(dict(
household_id=[1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4],
asset_id=["nl0000301109", "nl0000301109", "gb00b03mlx29",
"gb00b03mlx29", "gb00b03mlx29",
"gb00b03mlx29", "gb00b03mlx29", "gb00b03mlx29",
"lu0197800237", "lu0197800237",
"nl0000289965", None],
t=[None, None, 233, 234, 235, 233, 234,
235, 180, 181, None, None],
share=[1.0, 0.4, 0.6, 0.6, 0.6, 0.15,
0.15, 0.15, 0.6, 0.6, 0.25, 1.0],
log_return=[None, None, .09604978, -.06524096, .03532373,
.09604978, -.06524096, .03532373,
.03025441, .036997, None, None]
))
.set_index(["household_id", "asset_id", "t"])
.reindex(columns=['share', 'log_return']))
result = (merge(household.reset_index(), log_return.reset_index(),
on=['asset_id'], how='outer')
.set_index(['household_id', 'asset_id', 't']))
tm.assert_frame_equal(result, expected)
class TestJoinMultiMulti:
def test_join_multi_multi(self, left_multi, right_multi, join_type,
on_cols_multi, idx_cols_multi):
# Multi-index join tests
expected = (pd.merge(left_multi.reset_index(),
right_multi.reset_index(),
how=join_type, on=on_cols_multi).
set_index(idx_cols_multi).sort_index())
result = left_multi.join(right_multi, how=join_type).sort_index()
tm.assert_frame_equal(result, expected)
def test_join_multi_empty_frames(self, left_multi, right_multi, join_type,
on_cols_multi, idx_cols_multi):
left_multi = left_multi.drop(columns=left_multi.columns)
right_multi = right_multi.drop(columns=right_multi.columns)
expected = (pd.merge(left_multi.reset_index(),
right_multi.reset_index(),
how=join_type, on=on_cols_multi)
.set_index(idx_cols_multi).sort_index())
result = left_multi.join(right_multi, how=join_type).sort_index()
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("box", [None, np.asarray, Series, Index])
def test_merge_datetime_index(self, box):
# see gh-19038
df = DataFrame([1, 2, 3],
["2016-01-01", "2017-01-01", "2018-01-01"],
columns=["a"])
df.index = pd.to_datetime(df.index)
on_vector = df.index.year
if box is not None:
on_vector = box(on_vector)
expected = DataFrame(
OrderedDict([
("a", [1, 2, 3]),
("key_1", [2016, 2017, 2018]),
])
)
result = df.merge(df, on=["a", on_vector], how="inner")
tm.assert_frame_equal(result, expected)
expected = DataFrame(
OrderedDict([
("key_0", [2016, 2017, 2018]),
("a_x", [1, 2, 3]),
("a_y", [1, 2, 3]),
])
)
result = df.merge(df, on=[df.index.year], how="inner")
tm.assert_frame_equal(result, expected)
def test_single_common_level(self):
index_left = pd.MultiIndex.from_tuples([('K0', 'X0'), ('K0', 'X1'),
('K1', 'X2')],
names=['key', 'X'])
left = pd.DataFrame({'A': ['A0', 'A1', 'A2'],
'B': ['B0', 'B1', 'B2']},
index=index_left)
index_right = pd.MultiIndex.from_tuples([('K0', 'Y0'), ('K1', 'Y1'),
('K2', 'Y2'), ('K2', 'Y3')],
names=['key', 'Y'])
right = pd.DataFrame({'C': ['C0', 'C1', 'C2', 'C3'],
'D': ['D0', 'D1', 'D2', 'D3']},
index=index_right)
result = left.join(right)
expected = (pd.merge(left.reset_index(), right.reset_index(),
on=['key'], how='inner')
.set_index(['key', 'X', 'Y']))
tm.assert_frame_equal(result, expected)
|
willprice/weboob
|
refs/heads/master
|
modules/voyagessncf/test.py
|
7
|
# -*- coding: utf-8 -*-
# Copyright(C) 2013 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.test import BackendTest
class VoyagesSNCFTest(BackendTest):
MODULE = 'voyagessncf'
def test_stations(self):
stations = list(self.backend.iter_station_search('paris'))
self.assertTrue(len(stations) > 0)
self.assertTrue('Paris Massy' in stations[-1].name)
def test_departures(self):
departure = list(self.backend.iter_station_search('paris'))[0]
arrival = list(self.backend.iter_station_search('lyon'))[0]
prices = list(self.backend.iter_station_departures(departure.id, arrival.id))
self.assertTrue(len(prices) > 0)
|
logicus4078/vertx-web
|
refs/heads/master
|
src/test/sockjs-protocol/venv/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py
|
1734
|
from __future__ import absolute_import, division, unicode_literals
from . import _base
from ..sanitizer import HTMLSanitizerMixin
class Filter(_base.Filter, HTMLSanitizerMixin):
def __iter__(self):
for token in _base.Filter.__iter__(self):
token = self.sanitize_token(token)
if token:
yield token
|
WindowsPhoneForensics/find_my_texts_wp8
|
refs/heads/master
|
find_my_texts_wp8/recovery_expressions/sms_text/expressions/type0/full.py
|
1
|
import re
__author__ = 'Chris Ottersen'
# TODO: identify - deleted
# TODO: identify - location
# TODO: identify -
exp = re.compile(
r"""
(?P<u0>.{9})
(?P<message_id>.{4})
.{4}
\*{45} #74
(?:.{43})?
\*{25}
(?:.{43})?
\*{4} #/74
(?P<u1>.{4}) #padding
(?P<thread_id>.{4}) # while not thoroughly tested, this seems to indicate the thread
\*{34}
(?P<u2>.{4})? # unknown
\*{42} #80
(?P<FILETIME_0>.{6}[\xCD-\xD9]\x01) # unknown meaning
\*{36}
(?P<FILETIME_1>.{6}[\xCD-\xD9]\x01) # unknown meaning
(?P<direction>
(?P<unread> \x00\x00\x00\x00)|
(?P<read> \x01\x00\x00\x00)|
(?P<sent> \x21\x00\x00\x00)|
(?P<draft> \x29\x00\x00\x00)|
(?P<unknown_status>.{4})
)
\*{4} #4
(?P<u3>.{36}) #40
\*{4} #44
(?P<u4>.{4}) #48
(?P<u5>.{8}) #56
\*{4} #60
(?P<u6>.{4}) #64
\*{18} #82-84
(?P<u7>.{4}) #14 00 00 00
\*{16} #****************
(?P<u8>.{6}) #00 00 00 00 00 00
\*{8} #********
(?P<u9>.{4}) #01 00 00 00
(?:
.{50}
(?P<u10>\x00\x00\x00\x00)
(?P<FILETIME_2>.{6}[\xCD-\xD9]\x01)
#|
#\x01\x00\x00\x00
)?
(?P<u11>
(?P<u11a>.{,150}?)\x00\x00\x01
(?(draft)\x00\x00|(?(sent)\x00\x00|
(?:(?P<phone_0>(?:..){,20}?\x00\x00)\x01)?
))
)
(?P<SMStext>I\x00P\x00M\x00\.\x00S\x00M\x00S\x00t\x00e\x00x\x00t\x00\x00\x00)
#.*?
\x01
(?P<content>
(?(draft)|(?(sent)|
(?:
(?P<phone_1>(?:..){,20}?\x00\x00)\x01
(?P<phone_2>(?:..){,20}?\x00\x00)\x01
(?P<phone_3>(?:..){,20}?\x00\x00)\x01
)
))
(?:(?P<message>(?:..)*?)?(?:\x00\x00))?
)
(?<=\x00\x00)
(?:\x01
(?:\x00\x00(?P<FILETIME_2b>.{6}[\xCD-\xD9]\x01)..)?
(?P<u12>.{2,25}?)
(?P<FILETIME_3>.{6}[\xCD-\xD9]\x01)
(?:\x01
(?P<sim>S\x00I\x00M\x00\x00\x00)
)?
)
""", re.DOTALL | re.VERBOSE)
|
Dioptas/Dioptas
|
refs/heads/develop
|
dioptas/model/util/Pattern.py
|
1
|
# -*- coding: utf-8 -*-
# Dioptas - GUI program for fast processing of 2D X-ray diffraction data
# Principal author: Clemens Prescher (clemens.prescher@gmail.com)
# Copyright (C) 2014-2019 GSECARS, University of Chicago, USA
# Copyright (C) 2015-2018 Institute for Geology and Mineralogy, University of Cologne, Germany
# Copyright (C) 2019-2020 DESY, Hamburg, Germany
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import logging
from qtpy import QtCore
import numpy as np
from scipy.interpolate import interp1d
from scipy.ndimage import gaussian_filter1d
from . import extract_background
logger = logging.getLogger(__name__)
class Pattern(QtCore.QObject):
pattern_changed = QtCore.Signal(np.ndarray, np.ndarray)
def __init__(self, x=None, y=None, name=''):
super(Pattern, self).__init__()
if x is None:
self._original_x = np.linspace(0.1, 15, 100)
else:
self._original_x = x
if y is None:
self._original_y = np.log(self._original_x ** 2) - (self._original_x * 0.2) ** 2
else:
self._original_y = y
self.name = name
self.filename = ""
self._offset = 0
self._scaling = 1
self._smoothing = 0
self._background_pattern = None
self._pattern_x = self._original_x
self._pattern_y = self._original_y
self.auto_background_subtraction = False
self.auto_background_subtraction_roi = None
self.auto_background_subtraction_parameters = [0.1, 50, 50]
self._auto_background_before_subtraction_pattern = None
self._auto_background_pattern = None
def load(self, filename, skiprows=0):
try:
if filename.endswith('.chi'):
skiprows = 4
data = np.loadtxt(filename, skiprows=skiprows)
self.filename = filename
self._original_x = data.T[0]
self._original_y = data.T[1]
self.name = os.path.basename(filename).split('.')[:-1][0]
self.recalculate_pattern()
except ValueError:
print('Wrong data format for pattern file! - ' + filename)
return -1
def save(self, filename, header=''):
data = np.dstack((self._original_x, self._original_y))
np.savetxt(filename, data[0], header=header)
@property
def background_pattern(self):
return self._background_pattern
@background_pattern.setter
def background_pattern(self, pattern):
"""
:param pattern: new background pattern
:type pattern: Pattern
"""
self._background_pattern = pattern
self._background_pattern.pattern_changed.connect(self.recalculate_pattern)
self.recalculate_pattern()
def unset_background_pattern(self):
self._background_pattern = None
self.recalculate_pattern()
def set_auto_background_subtraction(self, parameters, roi=None, recalc_pattern=True):
self.auto_background_subtraction = True
self.auto_background_subtraction_parameters = parameters
self.auto_background_subtraction_roi = roi
if recalc_pattern:
self.recalculate_pattern()
def unset_auto_background_subtraction(self):
self.auto_background_subtraction = False
self.recalculate_pattern()
def get_auto_background_subtraction_parameters(self):
return self.auto_background_subtraction_parameters
def set_smoothing(self, amount):
self._smoothing = amount
self.recalculate_pattern()
def recalculate_pattern(self):
x = self._original_x
y = self._original_y * self._scaling + self._offset
if self._background_pattern is not None:
# create background function
x_bkg, y_bkg = self._background_pattern.data
if not np.array_equal(x_bkg, self._original_x):
# the background will be interpolated
f_bkg = interp1d(x_bkg, y_bkg, kind='linear')
# find overlapping x and y values:
ind = np.where((self._original_x <= np.max(x_bkg)) & (self._original_x >= np.min(x_bkg)))
x = self._original_x[ind]
y = self._original_y[ind]
if len(x) == 0:
# if there is no overlapping between background and pattern, raise an error
raise BkgNotInRangeError(self.name)
y = y - f_bkg(x)
else:
# if pattern and bkg have the same x basis we just delete y-y_bkg
y = y - y_bkg
if self.auto_background_subtraction:
self._auto_background_before_subtraction_pattern = Pattern(x, y)
if self.auto_background_subtraction_roi is not None:
ind = (x >= np.min(self.auto_background_subtraction_roi)) & \
(x <= np.max(self.auto_background_subtraction_roi))
x = x[ind]
y = y[ind]
self.auto_background_subtraction_roi = [np.min(x), np.max(x)]
else:
self.auto_background_subtraction_roi = [np.min(x), np.max(x)]
# reset ROI if limits are larger or smaller than the actual data
x_min, x_max = np.min(x), np.max(x)
if self.auto_background_subtraction_roi[0]<x_min:
self.auto_background_subtraction_roi[0]=x_min
if self.auto_background_subtraction_roi[1]>x_max:
self.auto_background_subtraction_roi[1]=x_max
y_bkg = extract_background(x, y,
self.auto_background_subtraction_parameters[0],
self.auto_background_subtraction_parameters[1],
self.auto_background_subtraction_parameters[2])
self._auto_background_pattern = Pattern(x, y_bkg, name='auto_bg_' + self.name)
y -= y_bkg
if self._smoothing > 0:
y = gaussian_filter1d(y, self._smoothing)
self._pattern_x = x
self._pattern_y = y
self.pattern_changed.emit(self._pattern_x, self._pattern_y)
@property
def data(self):
return self._pattern_x, self._pattern_y
@data.setter
def data(self, data):
(x, y) = data
self._original_x = x
self._original_y = y
self._scaling = 1
self._offset = 0
self.recalculate_pattern()
@property
def x(self):
return self._pattern_x
@property
def y(self):
return self._pattern_y
@property
def original_data(self):
return self._original_x, self._original_y
@property
def original_x(self):
return self._original_x
@property
def original_y(self):
return self._original_y
@property
def scaling(self):
return self._scaling
@scaling.setter
def scaling(self, value):
if value < 0:
self._scaling = 0
else:
self._scaling = value
self.recalculate_pattern()
def limit(self, x_min, x_max):
x, y = self.data
return Pattern(x[np.where((x_min < x) & (x < x_max))],
y[np.where((x_min < x) & (x < x_max))])
@property
def offset(self):
return self._offset
@offset.setter
def offset(self, value):
self._offset = value
self.recalculate_pattern()
@property
def auto_background_before_subtraction_pattern(self):
return self._auto_background_before_subtraction_pattern
@property
def auto_background_pattern(self):
return self._auto_background_pattern
def has_background(self):
return (self.background_pattern is not None) or self.auto_background_subtraction
# Operators:
def __sub__(self, other):
orig_x, orig_y = self.data
other_x, other_y = other.data
if orig_x.shape != other_x.shape:
# the background will be interpolated
other_fcn = interp1d(other_x, other_y, kind='cubic')
# find overlapping x and y values:
ind = np.where((orig_x <= np.max(other_x)) & (orig_x >= np.min(other_x)))
x = orig_x[ind]
y = orig_y[ind]
if len(x) == 0:
# if there is no overlapping between background and pattern, raise an error
raise BkgNotInRangeError(self.name)
return Pattern(x, y - other_fcn(x))
else:
return Pattern(orig_x, orig_y - other_y)
def __add__(self, other):
orig_x, orig_y = self.data
other_x, other_y = other.data
if orig_x.shape != other_x.shape:
# the background will be interpolated
other_fcn = interp1d(other_x, other_y, kind='linear')
# find overlapping x and y values:
ind = np.where((orig_x <= np.max(other_x)) & (orig_x >= np.min(other_x)))
x = orig_x[ind]
y = orig_y[ind]
if len(x) == 0:
# if there is no overlapping between background and pattern, raise an error
raise BkgNotInRangeError(self.name)
return Pattern(x, y + other_fcn(x))
else:
return Pattern(orig_x, orig_y + other_y)
def __rmul__(self, other):
orig_x, orig_y = self.data
return Pattern(orig_x, orig_y * other)
def __len__(self):
return len(self._original_x)
class BkgNotInRangeError(Exception):
def __init__(self, pattern_name):
self.pattern_name = pattern_name
def __str__(self):
return "The background range does not overlap with the Pattern range for " + self.pattern_name
|
vermouthmjl/scikit-learn
|
refs/heads/master
|
sklearn/neighbors/tests/test_kde.py
|
80
|
import numpy as np
from sklearn.utils.testing import (assert_allclose, assert_raises,
assert_equal)
from sklearn.neighbors import KernelDensity, KDTree, NearestNeighbors
from sklearn.neighbors.ball_tree import kernel_norm
from sklearn.pipeline import make_pipeline
from sklearn.datasets import make_blobs
from sklearn.model_selection import GridSearchCV
from sklearn.preprocessing import StandardScaler
def compute_kernel_slow(Y, X, kernel, h):
d = np.sqrt(((Y[:, None, :] - X) ** 2).sum(-1))
norm = kernel_norm(h, X.shape[1], kernel) / X.shape[0]
if kernel == 'gaussian':
return norm * np.exp(-0.5 * (d * d) / (h * h)).sum(-1)
elif kernel == 'tophat':
return norm * (d < h).sum(-1)
elif kernel == 'epanechnikov':
return norm * ((1.0 - (d * d) / (h * h)) * (d < h)).sum(-1)
elif kernel == 'exponential':
return norm * (np.exp(-d / h)).sum(-1)
elif kernel == 'linear':
return norm * ((1 - d / h) * (d < h)).sum(-1)
elif kernel == 'cosine':
return norm * (np.cos(0.5 * np.pi * d / h) * (d < h)).sum(-1)
else:
raise ValueError('kernel not recognized')
def test_kernel_density(n_samples=100, n_features=3):
rng = np.random.RandomState(0)
X = rng.randn(n_samples, n_features)
Y = rng.randn(n_samples, n_features)
for kernel in ['gaussian', 'tophat', 'epanechnikov',
'exponential', 'linear', 'cosine']:
for bandwidth in [0.01, 0.1, 1]:
dens_true = compute_kernel_slow(Y, X, kernel, bandwidth)
def check_results(kernel, bandwidth, atol, rtol):
kde = KernelDensity(kernel=kernel, bandwidth=bandwidth,
atol=atol, rtol=rtol)
log_dens = kde.fit(X).score_samples(Y)
assert_allclose(np.exp(log_dens), dens_true,
atol=atol, rtol=max(1E-7, rtol))
assert_allclose(np.exp(kde.score(Y)),
np.prod(dens_true),
atol=atol, rtol=max(1E-7, rtol))
for rtol in [0, 1E-5]:
for atol in [1E-6, 1E-2]:
for breadth_first in (True, False):
yield (check_results, kernel, bandwidth, atol, rtol)
def test_kernel_density_sampling(n_samples=100, n_features=3):
rng = np.random.RandomState(0)
X = rng.randn(n_samples, n_features)
bandwidth = 0.2
for kernel in ['gaussian', 'tophat']:
# draw a tophat sample
kde = KernelDensity(bandwidth, kernel=kernel).fit(X)
samp = kde.sample(100)
assert_equal(X.shape, samp.shape)
# check that samples are in the right range
nbrs = NearestNeighbors(n_neighbors=1).fit(X)
dist, ind = nbrs.kneighbors(X, return_distance=True)
if kernel == 'tophat':
assert np.all(dist < bandwidth)
elif kernel == 'gaussian':
# 5 standard deviations is safe for 100 samples, but there's a
# very small chance this test could fail.
assert np.all(dist < 5 * bandwidth)
# check unsupported kernels
for kernel in ['epanechnikov', 'exponential', 'linear', 'cosine']:
kde = KernelDensity(bandwidth, kernel=kernel).fit(X)
assert_raises(NotImplementedError, kde.sample, 100)
# non-regression test: used to return a scalar
X = rng.randn(4, 1)
kde = KernelDensity(kernel="gaussian").fit(X)
assert_equal(kde.sample().shape, (1, 1))
def test_kde_algorithm_metric_choice():
# Smoke test for various metrics and algorithms
rng = np.random.RandomState(0)
X = rng.randn(10, 2) # 2 features required for haversine dist.
Y = rng.randn(10, 2)
for algorithm in ['auto', 'ball_tree', 'kd_tree']:
for metric in ['euclidean', 'minkowski', 'manhattan',
'chebyshev', 'haversine']:
if algorithm == 'kd_tree' and metric not in KDTree.valid_metrics:
assert_raises(ValueError, KernelDensity,
algorithm=algorithm, metric=metric)
else:
kde = KernelDensity(algorithm=algorithm, metric=metric)
kde.fit(X)
y_dens = kde.score_samples(Y)
assert_equal(y_dens.shape, Y.shape[:1])
def test_kde_score(n_samples=100, n_features=3):
pass
#FIXME
#np.random.seed(0)
#X = np.random.random((n_samples, n_features))
#Y = np.random.random((n_samples, n_features))
def test_kde_badargs():
assert_raises(ValueError, KernelDensity,
algorithm='blah')
assert_raises(ValueError, KernelDensity,
bandwidth=0)
assert_raises(ValueError, KernelDensity,
kernel='blah')
assert_raises(ValueError, KernelDensity,
metric='blah')
assert_raises(ValueError, KernelDensity,
algorithm='kd_tree', metric='blah')
def test_kde_pipeline_gridsearch():
# test that kde plays nice in pipelines and grid-searches
X, _ = make_blobs(cluster_std=.1, random_state=1,
centers=[[0, 1], [1, 0], [0, 0]])
pipe1 = make_pipeline(StandardScaler(with_mean=False, with_std=False),
KernelDensity(kernel="gaussian"))
params = dict(kerneldensity__bandwidth=[0.001, 0.01, 0.1, 1, 10])
search = GridSearchCV(pipe1, param_grid=params, cv=5)
search.fit(X)
assert_equal(search.best_params_['kerneldensity__bandwidth'], .1)
|
acsone/odoo
|
refs/heads/8.0
|
addons/mrp/wizard/mrp_product_produce.py
|
75
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
class mrp_product_produce_line(osv.osv_memory):
_name="mrp.product.produce.line"
_description = "Product Produce Consume lines"
_columns = {
'product_id': fields.many2one('product.product', 'Product'),
'product_qty': fields.float('Quantity (in default UoM)', digits_compute=dp.get_precision('Product Unit of Measure')),
'lot_id': fields.many2one('stock.production.lot', 'Lot'),
'produce_id': fields.many2one('mrp.product.produce'),
'track_production': fields.related('product_id', 'track_production', type='boolean'),
}
class mrp_product_produce(osv.osv_memory):
_name = "mrp.product.produce"
_description = "Product Produce"
_columns = {
'product_id': fields.many2one('product.product', type='many2one'),
'product_qty': fields.float('Select Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
'mode': fields.selection([('consume_produce', 'Consume & Produce'),
('consume', 'Consume Only')], 'Mode', required=True,
help="'Consume only' mode will only consume the products with the quantity selected.\n"
"'Consume & Produce' mode will consume as well as produce the products with the quantity selected "
"and it will finish the production order when total ordered quantities are produced."),
'lot_id': fields.many2one('stock.production.lot', 'Lot'), #Should only be visible when it is consume and produce mode
'consume_lines': fields.one2many('mrp.product.produce.line', 'produce_id', 'Products Consumed'),
'track_production': fields.boolean('Track production'),
}
def on_change_qty(self, cr, uid, ids, product_qty, consume_lines, context=None):
"""
When changing the quantity of products to be produced it will
recalculate the number of raw materials needed according
to the scheduled products and the already consumed/produced products
It will return the consume lines needed for the products to be produced
which the user can still adapt
"""
prod_obj = self.pool.get("mrp.production")
uom_obj = self.pool.get("product.uom")
production = prod_obj.browse(cr, uid, context['active_id'], context=context)
consume_lines = []
new_consume_lines = []
if product_qty > 0.0:
product_uom_qty = uom_obj._compute_qty(cr, uid, production.product_uom.id, product_qty, production.product_id.uom_id.id)
consume_lines = prod_obj._calculate_qty(cr, uid, production, product_qty=product_uom_qty, context=context)
for consume in consume_lines:
new_consume_lines.append([0, False, consume])
return {'value': {'consume_lines': new_consume_lines}}
def _get_product_qty(self, cr, uid, context=None):
""" To obtain product quantity
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return: Quantity
"""
if context is None:
context = {}
prod = self.pool.get('mrp.production').browse(cr, uid,
context['active_id'], context=context)
done = 0.0
for move in prod.move_created_ids2:
if move.product_id == prod.product_id:
if not move.scrapped:
done += move.product_uom_qty # As uom of produced products and production order should correspond
return prod.product_qty - done
def _get_product_id(self, cr, uid, context=None):
""" To obtain product id
@return: id
"""
prod=False
if context and context.get("active_id"):
prod = self.pool.get('mrp.production').browse(cr, uid,
context['active_id'], context=context)
return prod and prod.product_id.id or False
def _get_track(self, cr, uid, context=None):
product_id = self._get_product_id(cr, uid, context=context)
if not product_id:
return False
product = self.pool.get("product.product").browse(
cr, uid, product_id, context=context)
return product.track_all or product.track_production or False
_defaults = {
'product_qty': _get_product_qty,
'mode': lambda *x: 'consume_produce',
'product_id': _get_product_id,
'track_production': _get_track,
}
def do_produce(self, cr, uid, ids, context=None):
production_id = context.get('active_id', False)
assert production_id, "Production Id should be specified in context as a Active ID."
data = self.browse(cr, uid, ids[0], context=context)
self.pool.get('mrp.production').action_produce(cr, uid, production_id,
data.product_qty, data.mode, data, context=context)
return {}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
gogobook/wagtail
|
refs/heads/master
|
wagtail/contrib/settings/views.py
|
7
|
from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.shortcuts import redirect, render, get_object_or_404
from django.utils.lru_cache import lru_cache
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from wagtail.wagtailadmin import messages
from wagtail.wagtailadmin.edit_handlers import (
ObjectList, extract_panel_definitions_from_model_class)
from wagtail.wagtailcore.models import Site
from .forms import SiteSwitchForm
from .permissions import user_can_edit_setting_type
from .registry import registry
def get_model_from_url_params(app_name, model_name):
"""
retrieve a content type from an app_name / model_name combo.
Throw Http404 if not a valid setting type
"""
model = registry.get_by_natural_key(app_name, model_name)
if model is None:
raise Http404
return model
@lru_cache()
def get_setting_edit_handler(model):
panels = extract_panel_definitions_from_model_class(model, ['site'])
return ObjectList(panels).bind_to_model(model)
def edit_current_site(request, app_name, model_name):
# Redirect the user to the edit page for the current site
# (or the current request does not correspond to a site, the first site in the list)
site = request.site or Site.objects.first()
return redirect('wagtailsettings:edit', site.pk, app_name, model_name)
def edit(request, site_pk, app_name, model_name):
model = get_model_from_url_params(app_name, model_name)
if not user_can_edit_setting_type(request.user, model):
raise PermissionDenied
site = get_object_or_404(Site, pk=site_pk)
setting_type_name = model._meta.verbose_name
instance = model.for_site(site)
edit_handler_class = get_setting_edit_handler(model)
form_class = edit_handler_class.get_form_class(model)
if request.POST:
form = form_class(request.POST, request.FILES, instance=instance)
if form.is_valid():
form.save()
messages.success(
request,
_("{setting_type} updated.").format(
setting_type=capfirst(setting_type_name),
instance=instance
)
)
return redirect('wagtailsettings:edit', site.pk, app_name, model_name)
else:
messages.error(request, _("The setting could not be saved due to errors."))
edit_handler = edit_handler_class(instance=instance, form=form)
else:
form = form_class(instance=instance)
edit_handler = edit_handler_class(instance=instance, form=form)
# Show a site switcher form if there are multiple sites
site_switcher = None
if Site.objects.count() > 1:
site_switcher = SiteSwitchForm(site, model)
return render(request, 'wagtailsettings/edit.html', {
'opts': model._meta,
'setting_type_name': setting_type_name,
'instance': instance,
'edit_handler': edit_handler,
'site': site,
'site_switcher': site_switcher,
})
|
CiscoSystems/avos
|
refs/heads/master
|
openstack_dashboard/dashboards/admin/avos/static/txt/ceilometercommands.py
|
2
|
ceilometerclient.Client('1', endpoint='http://172.29.86.41:35357/v2.0', username="admin", api_key="ADMIN_PASS")
carbohydrate-9662312c-a784-4c4d-b959-8ced233f8430:
from novaclient import client as novaclient
from ceilometerclient import client as ceilometerclient
from keystoneclient import client as keystoneclient
def get_token():
keystone = keystoneclient(username="admin", password="ADMIN_PASS", tenant_name="admin", auth_url="http://172.29.86.41:35357/v2.0")
token = keystone.service_catalog.catalog['token']['id']
return token
ceilometer = ceilometerclient(endpoint='http://172.29.86.41:8777', token=get_token())
ceilometer.
nova = novaclient.Client("1.1", username="admin", api_key="ADMIN_PASS", auth_url="http://10.0.120.143:35357/v2.0", project_id="admin")
nova = novaclient.Client("1.1", username=OS_USERNAME, api_key=OS_PASSWORD, auth_url=OS_ENDPOINT, project_id=OS_TENANT)
servers = nova.servers.list(detailed=True)
nova = novaclient.Client("2", auth_url="http://10.0.120.143:35357/v2.0", username="admin", api_key="ADMIN_PASS", project_id="admin" )
________
from glanceclient
____________
from ceilometerclient import client as ceilometerclient
ceilometer = ceilometerclient.get_client("2", os_auth_url="http://10.0.120.143:35357/v2.0", os_username="admin", os_password="ADMIN_PASS", os_tenant_name="admin" )
servers = ceilometer.meters.list()
ceilometer.meters.list(q=[{"field":"resource_id","op":"eq","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}])
ceilometer.statistics.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"28630164-5ef1-4a96-8b6e-96d0d7878cfa"}], groupby='metadata.flavor')
{"field":"duration_start","op":"gt","value":"2014-03-20T19:39:22"}],
ceilometer.statistics.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}])
ceilometer.statistics.list(meter_name="cpu_util", q=[{"field":"duration_start","op":"gt","value":"2014-03-20T19:39:22"}])
{"field":"period_start","op":"gt","value":"2014-03-20T19:39:22"}
ceilometer.statistics.list(meter_name="cpu_util", q=[{"field":"project_id","value":"admin"}], )
groupby=metadata.flavor&
{field=this,op=le,value=34}
ceilometer.statistics.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"28630164-5ef1-4a96-8b6e-96d0d7878cfa"}], period=600, groupby='instance_id')
>>> from ceilometerclient import client as ceilometerclient
>>> ceilometer = ceilometerclient.get_client("2", os_auth_url="http://10.0.120.143:35357/v2.0", os_username="admin", os_password="ADMIN_PASS", os_tenant_name="admin" )
>>> ceilometer.resource.get("3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'Client' object has no attribute 'resource'
>>> ceilometer.resources.get("3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0")
<Resource {u'project_id': u'10bed47042c548958046bd1f7b944039', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'links': [{u'href': u'http://controller:8777/v2/resources/3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'self'}, {u'href': u'http://controller:8777/v2/meters/instance?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'instance'}, {u'href': u'http://controller:8777/v2/meters/instance:m1.hadoop?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'instance:m1.hadoop'}, {u'href': u'http://controller:8777/v2/meters/disk.write.requests?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.write.requests'}, {u'href': u'http://controller:8777/v2/meters/disk.read.bytes?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.read.bytes'}, {u'href': u'http://controller:8777/v2/meters/cpu?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'cpu'}, {u'href': u'http://controller:8777/v2/meters/disk.write.bytes?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.write.bytes'}, {u'href': u'http://controller:8777/v2/meters/disk.read.requests?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.read.requests'}, {u'href': u'http://controller:8777/v2/meters/disk.write.requests.rate?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.write.requests.rate'}, {u'href': u'http://controller:8777/v2/meters/disk.read.bytes.rate?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.read.bytes.rate'}, {u'href': u'http://controller:8777/v2/meters/disk.write.bytes.rate?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.write.bytes.rate'}, {u'href': u'http://controller:8777/v2/meters/cpu_util?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'cpu_util'}, {u'href': u'http://controller:8777/v2/meters/disk.read.requests.rate?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'disk.read.requests.rate'}], u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0'}>
>>> ceilometer.resources.get("3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0")
<Resource {u'project_id': u'10bed47042c548958046bd1f7b944039', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'links': [{u'href': u'http://controller:8777/v2/resources/3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'self'}, {u'href': u'http://controller:8777/v2/meters/instance?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'instance'}, {u'href': u'http://controller:8777/v2/meters/instance:m1.hadoop?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'instance:m1.hadoop'}, {u'href': u'http://controller:8777/v2/meters/cpu_util?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'cpu_util'}, {u'href': u'http://controller:8777/v2/meters/cpu?q.field=resource_id&q.value=3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'rel': u'cpu'}], u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0'}>
>>> ceilometer.meters.list(q=[{"field":"resource_id","op":"eq","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}])
[<Meter {u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'name': u'instance', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'source': u'openstack', u'meter_id': u'M2FjM2FiNGMtZTZmMC00NTJkLWJmZDMtOWNiMWUxYTNjZmUwK2luc3RhbmNl\n', u'project_id': u'10bed47042c548958046bd1f7b944039', u'type': u'gauge', u'unit': u'instance'}>, <Meter {u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'name': u'instance:m1.hadoop', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'source': u'openstack', u'meter_id': u'M2FjM2FiNGMtZTZmMC00NTJkLWJmZDMtOWNiMWUxYTNjZmUwK2luc3RhbmNlOm0xLmhhZG9vcA==\n', u'project_id': u'10bed47042c548958046bd1f7b944039', u'type': u'gauge', u'unit': u'instance'}>, <Meter {u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'name': u'cpu_util', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'source': u'openstack', u'meter_id': u'M2FjM2FiNGMtZTZmMC00NTJkLWJmZDMtOWNiMWUxYTNjZmUwK2NwdV91dGls\n', u'project_id': u'10bed47042c548958046bd1f7b944039', u'type': u'gauge', u'unit': u'%'}>, <Meter {u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'name': u'cpu', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'source': u'openstack', u'meter_id': u'M2FjM2FiNGMtZTZmMC00NTJkLWJmZDMtOWNiMWUxYTNjZmUwK2NwdQ==\n', u'project_id': u'10bed47042c548958046bd1f7b944039', u'type': u'cumulative', u'unit': u'ns'}>]
>>> ceilometer.samples.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}])
[<Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:45', u'message_id': u'25bbd7fc-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:40', u'message_id': u'22c15932-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:35', u'message_id': u'1fc85802-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:30', u'message_id': u'1ceeef6a-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:25', u'message_id': u'19cddcec-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:20', u'message_id': u'16d601ae-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:15', u'message_id': u'13d83e68-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:10', u'message_id': u'10de6624-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:05', u'message_id': u'0de16d2c-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:54:00', u'message_id': u'0ae5e274-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:55', u'message_id': u'07ebc2b4-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:50', u'message_id': u'04ee4d98-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:45', u'message_id': u'01f4c806-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:40', u'message_id': u'fef70c5e-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:35', u'message_id': u'fbff9b6a-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:30', u'message_id': u'f903d930-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:25', u'message_id': u'f602a2ac-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:20', u'message_id': u'f30b1084-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:15', u'message_id': u'f00c3b38-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:10', u'message_id': u'ed10d862-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:53:05', u'message_id': u'ea216fd6-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:59', u'message_id': u'e70e93be-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:55', u'message_id': u'e41f588c-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:50', u'message_id': u'e1261f3a-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:45', u'message_id': u'de2885b6-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:40', u'message_id': u'db3097fe-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:35', u'message_id': u'd832dc6a-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:30', u'message_id': u'd5398572-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:25', u'message_id': u'd23d868e-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:20', u'message_id': u'cf43a9cc-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:15', u'message_id': u'cc46da1e-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:10', u'message_id': u'c949af12-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:04', u'message_id': u'c64de3aa-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:52:00', u'message_id': u'c356736a-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:54', u'message_id': u'c0554be6-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:49', u'message_id': u'bd5d4b64-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:44', u'message_id': u'ba5fcb44-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:39', u'message_id': u'b7644276-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:34', u'message_id': u'b468cfd8-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:29', u'message_id': u'b16d4e08-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:24', u'message_id': u'ae7190ec-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:19', u'message_id': u'ab778e32-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:14', u'message_id': u'a878a4be-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:09', u'message_id': u'a58056f8-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:51:04', u'message_id': u'a282fce4-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:59', u'message_id': u'9f8780a0-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:54', u'message_id': u'9c8e3f60-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:49', u'message_id': u'999260fc-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:44', u'message_id': u'969a0580-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:39', u'message_id': u'939d3690-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:34', u'message_id': u'90a1174a-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:29', u'message_id': u'8da41966-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:24', u'message_id': u'8aa5a3ce-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:19', u'message_id': u'87ad520c-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:14', u'message_id': u'84b3135c-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:09', u'message_id': u'81b7968c-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:50:04', u'message_id': u'7ebac788-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:59', u'message_id': u'7bbd8138-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:54', u'message_id': u'78c2fe7c-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:49', u'message_id': u'75c63f86-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:44', u'message_id': u'72cd91a8-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:40', u'message_id': u'6fe17b9e-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:34', u'message_id': u'6cd572c0-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:49:25', u'message_id': u'6764fb6c-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:48:22', u'message_id': u'41f40756-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:48:17', u'message_id': u'3ef9decc-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:48:13', u'message_id': u'3c154bce-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:48:07', u'message_id': u'3903f85e-b38d-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>]
>>> ceilometer.samples.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}], limit=3)
[<Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:55:30', u'message_id': u'40924a84-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:55:25', u'message_id': u'3da138d0-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:55:20', u'message_id': u'3a9b369a-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>]
>>> ceilometer.samples.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}], limit=1)
[<Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:55:40', u'message_id': u'468ac984-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>]
>>> ceilometer.samples.list(meter_name="cpu_util", q=[{"field":"resource_id","value":"3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0"}], limit=1)
[<Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3ac3ab4c-e6f0-452d-bfd3-9cb1e1a3cfe0', u'timestamp': u'2014-03-24T19:57:30', u'message_id': u'882e91fe-b38e-11e3-b621-0025b520019f', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 2.625, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Meph-001', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'a1633731e042286300316431269b96774585ecf8e2dfb38a495bf08b', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053a', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>]
>>> ceilometer.samples.list(meter_name="cpu_util", limit=1)
[<Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3a4356d7-b877-4065-b25e-fca8e3651f30', u'timestamp': u'2014-03-24T20:02:51', u'message_id': u'47703fae-b38f-11e3-b7fa-0025b52001bf', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.049999999999999996, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Seph-002', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'348f40c5ef2be79e9aa9e1a69a48f4e9b3809661aff4c090126dd6ae', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053c', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>]
>>> ceilometer.samples.list(meter_name="cpu_util", limit=2)
[<Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'3a4356d7-b877-4065-b25e-fca8e3651f30', u'timestamp': u'2014-03-24T20:03:06', u'message_id': u'5060c822-b38f-11e3-b7fa-0025b52001bf', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.049999999999999996, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'100', u'flavor.vcpus': u'8', u'flavor.ephemeral': u'100', u'display_name': u'savanna-Seph-002', u'flavor.id': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.hadoop', u'disk_gb': u'100', u'kernel_id': u'None', u'image.id': u'af770296-859f-485a-a1bc-7a7cc5c2c385', u'flavor.ram': u'10000', u'host': u'348f40c5ef2be79e9aa9e1a69a48f4e9b3809661aff4c090126dd6ae', u'image.name': u'Vanilla Hadoop', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/af770296-859f-485a-a1bc-7a7cc5c2c385', u'cpu_number': u'8', u'flavor.disk': u'100', u'root_gb': u'0', u'name': u'instance-0000053c', u'memory_mb': u'10000', u'instance_type': u'86c60af7-e41c-4bac-8554-83a9a1f4d0dd', u'vcpus': u'8', u'image_ref': u'af770296-859f-485a-a1bc-7a7cc5c2c385'}, u'counter_type': u'gauge'}>, <Sample {u'counter_name': u'cpu_util', u'user_id': u'691bc9c39e4b420cbf3d931190cd4a06', u'resource_id': u'bdd20e18-3d41-4ddd-ba65-2c0a967cb678', u'timestamp': u'2014-03-24T20:03:06', u'message_id': u'5079ccc8-b38f-11e3-b7fa-0025b52001bf', u'source': u'openstack', u'counter_unit': u'%', u'counter_volume': 0.0, u'project_id': u'10bed47042c548958046bd1f7b944039', u'resource_metadata': {u'ephemeral_gb': u'0', u'flavor.vcpus': u'2', u'flavor.ephemeral': u'0', u'display_name': u'HiBench_DO_NOT_DELETE', u'flavor.id': u'3', u'OS-EXT-AZ:availability_zone': u'nova', u'ramdisk_id': u'None', u'flavor.name': u'm1.medium', u'disk_gb': u'40', u'kernel_id': u'None', u'image.id': u'b09a0b3b-5ef0-4752-9f05-68e7043e7504', u'flavor.ram': u'4096', u'host': u'348f40c5ef2be79e9aa9e1a69a48f4e9b3809661aff4c090126dd6ae', u'image.name': u'Ubuntu Precise', u'image_ref_url': u'http://controller:8774/57ee55c6b3d24d63a99f22deebde9107/images/b09a0b3b-5ef0-4752-9f05-68e7043e7504', u'cpu_number': u'2', u'flavor.disk': u'40', u'root_gb': u'40', u'name': u'instance-00000539', u'memory_mb': u'4096', u'instance_type': u'3', u'vcpus': u'2', u'image_ref': u'b09a0b3b-5ef0-4752-9f05-68e7043e7504'}, u'counter_type': u'gauge'}>]
from novaclient.v1_1 import client
nova=client.Client("admin", "ubuntu", "admin", "http://controller:35357/v2.0")
nova.flavours.list()
from novaclient.client import Client
nova = Client(1.1,"admin", "ubuntu", "admin", "http://controller:35357/v2.0")
nova.servers.list()
from novaclient.client import Client
nova = Client(1.1,"admin", "ADMIN_PASS", "admin", "http://controller:35357/v2.0")
nova.servers.list()
(VERSION, USERNAME, PASSWORD, PROJECT_ID, AUTH_URL)
DUMP:
>>> import keystoneclient.v2_0.client as ksclient
>>> auth_url = "http://192.168.255.191:35357/v2.0"
>>> username = "admin"
>>> password = "ubuntu"
>>> tenant_name = "admin"
>>> keystone = ksclient.Client(auth_url=auth_url, username=username, password=password, tenant_name=tenant_name)
>>> keystone.auth_token
u'MIINsAYJKoZIhvcNAQcCoIINoTCCDZ0CAQExCTAHBgUrDgMCGjCCDAYGCSqGSIb3DQEHAaCCC-cEggvzeyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNC0wMy0xN1QxNjo0MDo1Ni42NTY0NTAiLCAiZXhwaXJlcyI6ICIyMDE0LTAzLTE4VDE2OjQwOjU2WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogbnVsbCwgImVuYWJsZWQiOiB0cnVlLCAiaWQiOiAiNDliMjRhMDg3OWZmNDc4NjlmMGQ5Y2YxNDc1NTZmODMiLCAibmFtZSI6ICJhZG1pbiJ9fSwgInNlcnZpY2VDYXRhbG9nIjogW3siZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE5Mi4xNjguMjU1LjE5MTo4Nzc0L3YyLzQ5YjI0YTA4NzlmZjQ3ODY5ZjBkOWNmMTQ3NTU2ZjgzIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE5Mi4xNjguMjU1LjE5MTo4Nzc0L3YyLzQ5YjI0YTA4NzlmZjQ3ODY5ZjBkOWNmMTQ3NTU2ZjgzIiwgImlkIjogIjA5ZGIwYTUwOGFhYjRlMWViOGRhMTY0NzVjOGJiZWViIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzQvdjIvNDliMjRhMDg3OWZmNDc4NjlmMGQ5Y2YxNDc1NTZmODMifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZSIsICJuYW1lIjogIm5vdmEifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzYvdjIvNDliMjRhMDg3OWZmNDc4NjlmMGQ5Y2YxNDc1NTZmODMiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzYvdjIvNDliMjRhMDg3OWZmNDc4NjlmMGQ5Y2YxNDc1NTZmODMiLCAiaWQiOiAiYmVkYzkwNGQ5MGZmNDNlY2I5MDlkNjAxODFmM2VmYTciLCAicHVibGljVVJMIjogImh0dHA6Ly8xOTIuMTY4LjI1NS4xOTE6ODc3Ni92Mi80OWIyNGEwODc5ZmY0Nzg2OWYwZDljZjE0NzU1NmY4MyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWV2MiIsICJuYW1lIjogImNpbmRlciJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xOTIuMTY4LjI1NS4xOTE6ODc3NC92MyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xOTIuMTY4LjI1NS4xOTE6ODc3NC92MyIsICJpZCI6ICI1MGQ1MTA2Nzc3MjY0MWNmOWRjMjExYzNkNzJlNDUxNCIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE5Mi4xNjguMjU1LjE5MTo4Nzc0L3YzIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogImNvbXB1dGV2MyIsICJuYW1lIjogIm5vdmEifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjMzMzMiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjMzMzMiLCAiaWQiOiAiMmVhYTAzNDRjNzMzNDlkZTljYmFjMWU5NTIzNTQ0Y2QiLCAicHVibGljVVJMIjogImh0dHA6Ly8xOTIuMTY4LjI1NS4xOTE6MzMzMyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJzMyIsICJuYW1lIjogInMzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzE5Mi4xNjguMjU1LjE5MTo5MjkyIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzE5Mi4xNjguMjU1LjE5MTo5MjkyIiwgImlkIjogIjBiMjhiNWVlNTg3YzQ2N2Q4ODMwNjc1YTNkNjBlODc5IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjkyOTIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiaW1hZ2UiLCAibmFtZSI6ICJnbGFuY2UifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzYvdjEvNDliMjRhMDg3OWZmNDc4NjlmMGQ5Y2YxNDc1NTZmODMiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzYvdjEvNDliMjRhMDg3OWZmNDc4NjlmMGQ5Y2YxNDc1NTZmODMiLCAiaWQiOiAiODViODRjMTJlYjQyNDVkZDk2MmZhZWEyNDg2ODM1ZTciLCAicHVibGljVVJMIjogImh0dHA6Ly8xOTIuMTY4LjI1NS4xOTE6ODc3Ni92MS80OWIyNGEwODc5ZmY0Nzg2OWYwZDljZjE0NzU1NmY4MyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWUiLCAibmFtZSI6ICJjaW5kZXIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzMvc2VydmljZXMvQWRtaW4iLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjg3NzMvc2VydmljZXMvQ2xvdWQiLCAiaWQiOiAiNWY5NDg1ZjlhYTk5NGQ4M2I4MTg2MWM0N2EyZDE5NTciLCAicHVibGljVVJMIjogImh0dHA6Ly8xOTIuMTY4LjI1NS4xOTE6ODc3My9zZXJ2aWNlcy9DbG91ZCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJlYzIiLCAibmFtZSI6ICJlYzIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTkyLjE2OC4yNTUuMTkxOjUwMDAvdjIuMCIsICJpZCI6ICIzM2QyZGE0ZTliOWU0MGExOTY1YThkMGQ4NmY1MTQyOCIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzE5Mi4xNjguMjU1LjE5MTo1MDAwL3YyLjAifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiaWRlbnRpdHkiLCAibmFtZSI6ICJrZXlzdG9uZSJ9XSwgInVzZXIiOiB7InVzZXJuYW1lIjogImFkbWluIiwgInJvbGVzX2xpbmtzIjogW10sICJpZCI6ICJkMDEwYjc2NTQ3Mjg0M2E1OTY5MmU1MDY0MmVmZmM2YiIsICJyb2xlcyI6IFt7Im5hbWUiOiAiYWRtaW4ifV0sICJuYW1lIjogImFkbWluIn0sICJtZXRhZGF0YSI6IHsiaXNfYWRtaW4iOiAwLCAicm9sZXMiOiBbIjYyNzEzYzk1ZGEzMDQwNjBhYjRkYWNhZGI3MmE1N2ZkIl19fX0xggGBMIIBfQIBATBcMFcxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVVbnNldDEOMAwGA1UEBwwFVW5zZXQxDjAMBgNVBAoMBVVuc2V0MRgwFgYDVQQDDA93d3cuZXhhbXBsZS5jb20CAQEwBwYFKw4DAhowDQYJKoZIhvcNAQEBBQAEggEAlU5wvh7RKqiBtweHRf5WL2Mdd3FpKH3mzyYjmQNSnq3T1qdLjw9OZqTXoPD34guTrfT+9wyZUI83gEd0jVB8jW754iAP5sFXeEZfY2zl7R20duBdNwYtYecE-VpAjLHguNL5vSNNffrqDwX-g--OVdGzDfCItRthCrR1e4Xlsc1AIlVHfL3GkGllp6s+d06PkLrT72hCcqq7+8uA97eCa32aLDnrHTp-ZZbWAWk2m5jjb-iMp7IiM3lSjKSrx-bzuK4lkrWzXYgpbDMExeU669hLv39OlqaPp+TkumH0f6wBjuPCufvIkoT7OJynWAWNeliHoWRKtAgOA2PUeh6zeg=='
>>> from novaclient import client as novaclient
>>> nova = novaclient.Client("1.1", auth_url=auth_url, username=username, password=password, tenant_name=tenant_name)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/opt/stack/python-novaclient/novaclient/client.py", line 506, in Client
return client_class(*args, **kwargs)
TypeError: __init__() got an unexpected keyword argument 'tenant_name'
>>> nova = novaclient.Client("1.1", username=username, api_key=password, auth_url=auth_url, project_id=tenant_name)
>>> nova.servers.list()
[]
>>> nova.servers.list()
[<Server: blarg-9a77ff67-53e8-4abe-a3fe-9a77405d03c8>, <Server: blarg-e4b81f1e-a76d-403b-a72c-b2727e252c36>, <Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>]
>>> server = nova.servers.find(name="blarg-9a77ff67-53e8-4abe-a3fe-9a77405d03c8")
>>> server.delete()
>>> nova.servers.list.details()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'function' object has no attribute 'details'
>>> server = nova.servers.find(name="blarg-54ba2260-dde5-4953-a135-01b81b80f96a")
>>> server.diagnostics()
(<Response [200]>, None)
>>> server.networks()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'dict' object is not callable
>>> server.list_security_group()
[<SecurityGroup description=default, id=1, name=default, rules=[], tenant_id=49b24a0879ff47869f0d9cf147556f83>]
>>> nova.servers.list(detailed=True)
[<Server: blarg-e4b81f1e-a76d-403b-a72c-b2727e252c36>, <Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>]
>>> nova.servers.list(detailed="True")
[<Server: blarg-e4b81f1e-a76d-403b-a72c-b2727e252c36>, <Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>]
>>> server.list(detailed="True")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/opt/stack/python-novaclient/novaclient/openstack/common/apiclient/base.py", line 464, in __getattr__
raise AttributeError(k)
AttributeError: list
>>> str(nova.servers.list())
'[<Server: blarg-e4b81f1e-a76d-403b-a72c-b2727e252c36>, <Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>]'
>>> nova.servers.list(detailed=True)
[<Server: blarg-e4b81f1e-a76d-403b-a72c-b2727e252c36>, <Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>]
>>> server = nova.servers.list(detailed=True)
>>> print(server)
[<Server: blarg-e4b81f1e-a76d-403b-a72c-b2727e252c36>, <Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>]
>>> print(server[1])
<Server: blarg-54ba2260-dde5-4953-a135-01b81b80f96a>
>>> print(vars(server[1]))
{'OS-EXT-STS:task_state': None, 'addresses': {u'private': [{u'OS-EXT-IPS-MAC:mac_addr': u'fa:16:3e:59:90:a5', u'version': 4, u'addr': u'10.0.0.4', u'OS-EXT-IPS:type': u'fixed'}]}, 'links': [{u'href': u'http://192.168.255.191:8774/v2/49b24a0879ff47869f0d9cf147556f83/servers/54ba2260-dde5-4953-a135-01b81b80f96a', u'rel': u'self'}, {u'href': u'http://192.168.255.191:8774/49b24a0879ff47869f0d9cf147556f83/servers/54ba2260-dde5-4953-a135-01b81b80f96a', u'rel': u'bookmark'}], 'image': {u'id': u'c1faa392-1a44-4ae1-aac1-cec18184d011', u'links': [{u'href': u'http://192.168.255.191:8774/49b24a0879ff47869f0d9cf147556f83/images/c1faa392-1a44-4ae1-aac1-cec18184d011', u'rel': u'bookmark'}]}, 'manager': <novaclient.v1_1.servers.ServerManager object at 0x2c6b290>, 'OS-EXT-STS:vm_state': u'stopped', 'OS-EXT-SRV-ATTR:instance_name': u'instance-00000001', 'OS-SRV-USG:launched_at': u'2014-03-17T17:16:07.000000', 'flavor': {u'id': u'84', u'links': [{u'href': u'http://192.168.255.191:8774/49b24a0879ff47869f0d9cf147556f83/flavors/84', u'rel': u'bookmark'}]}, 'id': u'54ba2260-dde5-4953-a135-01b81b80f96a', 'security_groups': [{u'name': u'default'}], 'user_id': u'd010b765472843a59692e50642effc6b', 'OS-DCF:diskConfig': u'MANUAL', 'accessIPv4': u'', 'accessIPv6': u'', 'OS-EXT-STS:power_state': 4, 'OS-EXT-AZ:availability_zone': u'nova', 'config_drive': u'', 'status': u'SHUTOFF', 'updated': u'2014-03-17T17:17:19Z', 'hostId': u'38648e03ba0f2467f3f31f6397289dd219c364264d8b9c905fe63fb5', 'OS-EXT-SRV-ATTR:host': u'ubuntu', 'OS-SRV-USG:terminated_at': None, 'key_name': None, 'OS-EXT-SRV-ATTR:hypervisor_hostname': u'ubuntu', 'name': u'blarg-54ba2260-dde5-4953-a135-01b81b80f96a', 'created': u'2014-03-17T17:15:24Z', 'tenant_id': u'49b24a0879ff47869f0d9cf147556f83', 'os-extended-volumes:volumes_attached': [], '_info': {u'OS-EXT-STS:task_state': None, u'addresses': {u'private': [{u'OS-EXT-IPS-MAC:mac_addr': u'fa:16:3e:59:90:a5', u'version': 4, u'addr': u'10.0.0.4', u'OS-EXT-IPS:type': u'fixed'}]}, u'links': [{u'href': u'http://192.168.255.191:8774/v2/49b24a0879ff47869f0d9cf147556f83/servers/54ba2260-dde5-4953-a135-01b81b80f96a', u'rel': u'self'}, {u'href': u'http://192.168.255.191:8774/49b24a0879ff47869f0d9cf147556f83/servers/54ba2260-dde5-4953-a135-01b81b80f96a', u'rel': u'bookmark'}], u'image': {u'id': u'c1faa392-1a44-4ae1-aac1-cec18184d011', u'links': [{u'href': u'http://192.168.255.191:8774/49b24a0879ff47869f0d9cf147556f83/images/c1faa392-1a44-4ae1-aac1-cec18184d011', u'rel': u'bookmark'}]}, u'OS-EXT-STS:vm_state': u'stopped', u'OS-EXT-SRV-ATTR:instance_name': u'instance-00000001', u'OS-SRV-USG:launched_at': u'2014-03-17T17:16:07.000000', u'flavor': {u'id': u'84', u'links': [{u'href': u'http://192.168.255.191:8774/49b24a0879ff47869f0d9cf147556f83/flavors/84', u'rel': u'bookmark'}]}, u'id': u'54ba2260-dde5-4953-a135-01b81b80f96a', u'security_groups': [{u'name': u'default'}], u'user_id': u'd010b765472843a59692e50642effc6b', u'OS-DCF:diskConfig': u'MANUAL', u'accessIPv4': u'', u'accessIPv6': u'', u'OS-EXT-STS:power_state': 4, u'OS-EXT-AZ:availability_zone': u'nova', u'config_drive': u'', u'status': u'SHUTOFF', u'updated': u'2014-03-17T17:17:19Z', u'hostId': u'38648e03ba0f2467f3f31f6397289dd219c364264d8b9c905fe63fb5', u'OS-EXT-SRV-ATTR:host': u'ubuntu', u'OS-SRV-USG:terminated_at': None, u'key_name': None, u'OS-EXT-SRV-ATTR:hypervisor_hostname': u'ubuntu', u'name': u'blarg-54ba2260-dde5-4953-a135-01b81b80f96a', u'created': u'2014-03-17T17:15:24Z', u'tenant_id': u'49b24a0879ff47869f0d9cf147556f83', u'os-extended-volumes:volumes_attached': [], u'metadata': {}}, 'metadata': {}, '_loaded': True}
|
cherokee/webserver
|
refs/heads/master
|
developers.py
|
5
|
# -*- coding: utf-8 -*-
DEVELOPERS = {
'alobbs' : "Alvaro Lopez Ortega <alvaro@alobbs.com>",
'skarcha' : "Antonio Perez <aperez@skarcha.com>",
'ion' : "Jonathan Hernandez <ion@suavizado.com>",
'taher' : "Taher Shihadeh <taher@unixwars.com>",
'robertounbit' : "Roberto De Ioris <roberto@unbit.it>",
'gefire' : "李炅 <lijiong1986@126.com>",
'skinkie' : "Stefan de Konink <stefan@konink.de>",
'adefacc' : "A.D.F. <adefacc@tin.it>",
'cesar' : "Cesar Fernandez Gago <cesar@pk2.org>",
'sberlotto' : "Sérgio H. Berlotto Jr <sergio.berlotto@gmail.com>",
'pigmej' : "Jędrzej Nowak <me@pigmej.eu>",
'rodrigo' : "Rodrigo Fernandez-Vizarra <rfvizarra@gmail.com>",
'pubcrawler' : "Paul Batis <pubcrawler.com@gmail.com>",
'borkason' : "Daniel Niccoli <daniel.niccoli@gmail.com>"
}
|
leppa/home-assistant
|
refs/heads/dev
|
homeassistant/components/n26/switch.py
|
7
|
"""Support for N26 switches."""
import logging
from homeassistant.components.switch import SwitchDevice
from . import DEFAULT_SCAN_INTERVAL, DOMAIN
from .const import CARD_STATE_ACTIVE, CARD_STATE_BLOCKED, DATA
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = DEFAULT_SCAN_INTERVAL
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the N26 switch platform."""
if discovery_info is None:
return
api_list = hass.data[DOMAIN][DATA]
switch_entities = []
for api_data in api_list:
for card in api_data.cards:
switch_entities.append(N26CardSwitch(api_data, card))
add_entities(switch_entities)
class N26CardSwitch(SwitchDevice):
"""Representation of a N26 card block/unblock switch."""
def __init__(self, api_data, card: dict):
"""Initialize the N26 card block/unblock switch."""
self._data = api_data
self._card = card
@property
def unique_id(self):
"""Return the unique ID of the entity."""
return self._card["id"]
@property
def name(self) -> str:
"""Friendly name of the sensor."""
return "card_{}".format(self._card["id"])
@property
def is_on(self):
"""Return true if switch is on."""
return self._card["status"] == CARD_STATE_ACTIVE
def turn_on(self, **kwargs):
"""Block the card."""
self._data.api.unblock_card(self._card["id"])
self._card["status"] = CARD_STATE_ACTIVE
def turn_off(self, **kwargs):
"""Unblock the card."""
self._data.api.block_card(self._card["id"])
self._card["status"] = CARD_STATE_BLOCKED
def update(self):
"""Update the switch state."""
self._data.update_cards()
self._card = self._data.card(self._card["id"], self._card)
|
OliverCole/ZeroNet
|
refs/heads/master
|
src/Test/TestRateLimit.py
|
3
|
import time
import gevent
from util import RateLimit
# Time is around limit +/- 0.05 sec
def around(t, limit):
return t >= limit - 0.05 and t <= limit + 0.05
class ExampleClass(object):
def __init__(self):
self.counted = 0
self.last_called = None
def count(self, back="counted"):
self.counted += 1
self.last_called = back
return back
class TestRateLimit:
def testCall(self):
obj1 = ExampleClass()
obj2 = ExampleClass()
s = time.time()
assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted"
assert around(time.time() - s, 0.0) # First allow to call instantly
assert obj1.counted == 1
# Call again
assert not RateLimit.isAllowed("counting", 0.1)
assert RateLimit.isAllowed("something else", 0.1)
assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted"
assert around(time.time() - s, 0.1) # Delays second call within interval
assert obj1.counted == 2
time.sleep(0.1) # Wait the cooldown time
# Call 3 times async
s = time.time()
assert obj2.counted == 0
threads = [
gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # Instant
gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # 0.1s delay
gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)) # 0.2s delay
]
gevent.joinall(threads)
assert [thread.value for thread in threads] == ["counted", "counted", "counted"]
assert around(time.time() - s, 0.2)
# Wait 0.1s cooldown
assert not RateLimit.isAllowed("counting", 0.1)
time.sleep(0.11)
assert RateLimit.isAllowed("counting", 0.1)
# No queue = instant again
s = time.time()
assert RateLimit.isAllowed("counting", 0.1)
assert RateLimit.call("counting", allowed_again=0.1, func=obj2.count) == "counted"
assert around(time.time() - s, 0.0)
assert obj2.counted == 4
def testCallAsync(self):
obj1 = ExampleClass()
obj2 = ExampleClass()
s = time.time()
RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #1").join()
assert obj1.counted == 1 # First instant
assert around(time.time() - s, 0.0)
# After that the calls delayed
s = time.time()
t1 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #2") # Dumped by the next call
time.sleep(0.03)
t2 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #3") # Dumped by the next call
time.sleep(0.03)
t3 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #4") # Will be called
assert obj1.counted == 1 # Delay still in progress: Not called yet
t3.join()
assert t3.value == "call #4"
assert around(time.time() - s, 0.1)
# Only the last one called
assert obj1.counted == 2
assert obj1.last_called == "call #4"
# Just called, not allowed again
assert not RateLimit.isAllowed("counting async", 0.1)
s = time.time()
t4 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join()
assert obj1.counted == 3
assert around(time.time() - s, 0.1)
assert not RateLimit.isAllowed("counting async", 0.1)
time.sleep(0.11)
assert RateLimit.isAllowed("counting async", 0.1)
|
lexus42/2015cd_midterm2
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/weakref.py
|
769
|
"""Weak reference support for Python.
This module is an implementation of PEP 205:
http://www.python.org/dev/peps/pep-0205/
"""
# Naming convention: Variables named "wr" are weak reference objects;
# they are called this instead of "ref" to avoid name collisions with
# the module-global ref() function imported from _weakref.
from _weakref import (
getweakrefcount,
getweakrefs,
ref,
proxy,
CallableProxyType,
ProxyType,
ReferenceType)
from _weakrefset import WeakSet, _IterationGuard
import collections # Import after _weakref to avoid circular import.
ProxyTypes = (ProxyType, CallableProxyType)
__all__ = ["ref", "proxy", "getweakrefcount", "getweakrefs",
"WeakKeyDictionary", "ReferenceType", "ProxyType",
"CallableProxyType", "ProxyTypes", "WeakValueDictionary",
"WeakSet"]
class WeakValueDictionary(collections.MutableMapping):
"""Mapping class that references values weakly.
Entries in the dictionary will be discarded when no strong
reference to the value exists anymore
"""
# We inherit the constructor without worrying about the input
# dictionary; since it uses our .update() method, we get the right
# checks (if the other dictionary is a WeakValueDictionary,
# objects are unwrapped on the way out, and we always wrap on the
# way in).
def __init__(self, *args, **kw):
def remove(wr, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(wr.key)
else:
del self.data[wr.key]
self._remove = remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
self.data = d = {}
self.update(*args, **kw)
def _commit_removals(self):
l = self._pending_removals
d = self.data
# We shouldn't encounter any KeyError, because this method should
# always be called *before* mutating the dict.
while l:
del d[l.pop()]
def __getitem__(self, key):
o = self.data[key]()
if o is None:
raise KeyError(key)
else:
return o
def __delitem__(self, key):
if self._pending_removals:
self._commit_removals()
del self.data[key]
def __len__(self):
return len(self.data) - len(self._pending_removals)
def __contains__(self, key):
try:
o = self.data[key]()
except KeyError:
return False
return o is not None
def __repr__(self):
return "<WeakValueDictionary at %s>" % id(self)
def __setitem__(self, key, value):
if self._pending_removals:
self._commit_removals()
self.data[key] = KeyedRef(value, self._remove, key)
def copy(self):
new = WeakValueDictionary()
for key, wr in self.data.items():
o = wr()
if o is not None:
new[key] = o
return new
__copy__ = copy
def __deepcopy__(self, memo):
from copy import deepcopy
new = self.__class__()
for key, wr in self.data.items():
o = wr()
if o is not None:
new[deepcopy(key, memo)] = o
return new
def get(self, key, default=None):
try:
wr = self.data[key]
except KeyError:
return default
else:
o = wr()
if o is None:
# This should only happen
return default
else:
return o
def items(self):
with _IterationGuard(self):
for k, wr in self.data.items():
v = wr()
if v is not None:
yield k, v
def keys(self):
with _IterationGuard(self):
for k, wr in self.data.items():
if wr() is not None:
yield k
__iter__ = keys
def itervaluerefs(self):
"""Return an iterator that yields the weak references to the values.
The references are not guaranteed to be 'live' at the time
they are used, so the result of calling the references needs
to be checked before being used. This can be used to avoid
creating references that will cause the garbage collector to
keep the values around longer than needed.
"""
with _IterationGuard(self):
for wr in self.data.values():
yield wr
def values(self):
with _IterationGuard(self):
for wr in self.data.values():
obj = wr()
if obj is not None:
yield obj
def popitem(self):
if self._pending_removals:
self._commit_removals()
while True:
key, wr = self.data.popitem()
o = wr()
if o is not None:
return key, o
def pop(self, key, *args):
if self._pending_removals:
self._commit_removals()
try:
o = self.data.pop(key)()
except KeyError:
if args:
return args[0]
raise
if o is None:
raise KeyError(key)
else:
return o
def setdefault(self, key, default=None):
try:
wr = self.data[key]
except KeyError:
if self._pending_removals:
self._commit_removals()
self.data[key] = KeyedRef(default, self._remove, key)
return default
else:
return wr()
def update(self, dict=None, **kwargs):
if self._pending_removals:
self._commit_removals()
d = self.data
if dict is not None:
if not hasattr(dict, "items"):
dict = type({})(dict)
for key, o in dict.items():
d[key] = KeyedRef(o, self._remove, key)
if len(kwargs):
self.update(kwargs)
def valuerefs(self):
"""Return a list of weak references to the values.
The references are not guaranteed to be 'live' at the time
they are used, so the result of calling the references needs
to be checked before being used. This can be used to avoid
creating references that will cause the garbage collector to
keep the values around longer than needed.
"""
return list(self.data.values())
class KeyedRef(ref):
"""Specialized reference that includes a key corresponding to the value.
This is used in the WeakValueDictionary to avoid having to create
a function object for each key stored in the mapping. A shared
callback object can use the 'key' attribute of a KeyedRef instead
of getting a reference to the key from an enclosing scope.
"""
__slots__ = "key",
def __new__(type, ob, callback, key):
self = ref.__new__(type, ob, callback)
self.key = key
return self
def __init__(self, ob, callback, key):
super().__init__(ob, callback)
class WeakKeyDictionary(collections.MutableMapping):
""" Mapping class that references keys weakly.
Entries in the dictionary will be discarded when there is no
longer a strong reference to the key. This can be used to
associate additional data with an object owned by other parts of
an application without adding attributes to those objects. This
can be especially useful with objects that override attribute
accesses.
"""
def __init__(self, dict=None):
self.data = {}
def remove(k, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(k)
else:
del self.data[k]
self._remove = remove
# A list of dead weakrefs (keys to be removed)
self._pending_removals = []
self._iterating = set()
if dict is not None:
self.update(dict)
def _commit_removals(self):
# NOTE: We don't need to call this method before mutating the dict,
# because a dead weakref never compares equal to a live weakref,
# even if they happened to refer to equal objects.
# However, it means keys may already have been removed.
l = self._pending_removals
d = self.data
while l:
try:
del d[l.pop()]
except KeyError:
pass
def __delitem__(self, key):
del self.data[ref(key)]
def __getitem__(self, key):
return self.data[ref(key)]
def __len__(self):
return len(self.data) - len(self._pending_removals)
def __repr__(self):
return "<WeakKeyDictionary at %s>" % id(self)
def __setitem__(self, key, value):
self.data[ref(key, self._remove)] = value
def copy(self):
new = WeakKeyDictionary()
for key, value in self.data.items():
o = key()
if o is not None:
new[o] = value
return new
__copy__ = copy
def __deepcopy__(self, memo):
from copy import deepcopy
new = self.__class__()
for key, value in self.data.items():
o = key()
if o is not None:
new[o] = deepcopy(value, memo)
return new
def get(self, key, default=None):
return self.data.get(ref(key),default)
def __contains__(self, key):
try:
wr = ref(key)
except TypeError:
return False
return wr in self.data
def items(self):
with _IterationGuard(self):
for wr, value in self.data.items():
key = wr()
if key is not None:
yield key, value
def keys(self):
with _IterationGuard(self):
for wr in self.data:
obj = wr()
if obj is not None:
yield obj
__iter__ = keys
def values(self):
with _IterationGuard(self):
for wr, value in self.data.items():
if wr() is not None:
yield value
def keyrefs(self):
"""Return a list of weak references to the keys.
The references are not guaranteed to be 'live' at the time
they are used, so the result of calling the references needs
to be checked before being used. This can be used to avoid
creating references that will cause the garbage collector to
keep the keys around longer than needed.
"""
return list(self.data)
def popitem(self):
while True:
key, value = self.data.popitem()
o = key()
if o is not None:
return o, value
def pop(self, key, *args):
return self.data.pop(ref(key), *args)
def setdefault(self, key, default=None):
return self.data.setdefault(ref(key, self._remove),default)
def update(self, dict=None, **kwargs):
d = self.data
if dict is not None:
if not hasattr(dict, "items"):
dict = type({})(dict)
for key, value in dict.items():
d[ref(key, self._remove)] = value
if len(kwargs):
self.update(kwargs)
|
semonte/intellij-community
|
refs/heads/master
|
python/testData/resolve/PercentPositionalArgs.py
|
38
|
"in percent string it's %<ref>s argument, but i want to pass %d arguments" % ("string", 1423)
|
dholbach/snapcraft
|
refs/heads/master
|
snaps_tests/tour_tests/10-SNAPS/test_02_service_confined.py
|
11
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import snaps_tests
class ServiceConfinedTestCase(snaps_tests.SnapsTestCase):
snap_content_dir = '02-service-confined'
def test_hello(self):
snap_path = self.build_snap(self.snap_content_dir)
self.install_snap(snap_path, 'hello-world-service', '0.1')
self.assert_service_running('hello-world-service', 'hello-service')
|
simon-andrews/django-resource-scheduler
|
refs/heads/master
|
resource_scheduler/apps.py
|
1
|
from django.apps import AppConfig
class ResourceSchedulerConfig(AppConfig):
name = 'resource_scheduler'
verbose_name = 'Resource Scheduler'
|
Eficent/stock-logistics-workflow
|
refs/heads/8.0
|
stock_picking_invoice_link/tests/__init__.py
|
8
|
# -*- coding: utf-8 -*-
# © 2016 Oihane Crucelaegui - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import test_stock_picking_invoice_link
|
Rewardcoin/p2pool-Rewardcoin
|
refs/heads/master
|
SOAPpy/Errors.py
|
294
|
"""
################################################################################
#
# SOAPpy - Cayce Ullman (cayce@actzero.com)
# Brian Matthews (blm@actzero.com)
# Gregory Warnes (Gregory.R.Warnes@Pfizer.com)
# Christopher Blunck (blunck@gst.com)
#
################################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
"""
ident = '$Id: Errors.py 921 2005-02-15 16:32:23Z warnes $'
from version import __version__
import exceptions
################################################################################
# Exceptions
################################################################################
class Error(exceptions.Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "<Error : %s>" % self.msg
__repr__ = __str__
def __call__(self):
return (msg,)
class RecursionError(Error):
pass
class UnknownTypeError(Error):
pass
class HTTPError(Error):
# indicates an HTTP protocol error
def __init__(self, code, msg):
self.code = code
self.msg = msg
def __str__(self):
return "<HTTPError %s %s>" % (self.code, self.msg)
__repr__ = __str__
def __call___(self):
return (self.code, self.msg, )
class UnderflowError(exceptions.ArithmeticError):
pass
|
nbargnesi/proxme
|
refs/heads/master
|
proxme/lib.py
|
1
|
# coding: utf-8
"""Lib module."""
|
berryny/socialcyclingnyc
|
refs/heads/master
|
profiles/openoutreach/libraries/openlayers/tools/minimize.py
|
297
|
# Minimal Python Minimizer
# Copyright 2008, Christopher Schmidt
# Released under the MIT License
#
# Taken from: http://svn.crschmidt.net/personal/python/minimize.py
# $Id: minimize.py 6 2008-01-03 06:33:35Z crschmidt $
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import re
def strip_comments_helper(data):
"""remove all /* */ format comments and surrounding whitespace."""
p = re.compile(r'[\s]*/\*.*?\*/[\s]*', re.DOTALL)
return p.sub('',data)
def minimize(data, exclude=None):
"""Central function call. This will call all other compression
functions. To add further compression algorithms, simply add
functions whose names end in _helper which take a string as input
and return a more compressed string as output."""
for key, item in globals().iteritems():
if key.endswith("_helper"):
func_key = key[:-7]
if not exclude or not func_key in exclude:
data = item(data)
return data
if __name__ == "__main__":
import sys
print minimize(open(sys.argv[1]).read())
|
torchingloom/edx-platform
|
refs/heads/select/release
|
common/djangoapps/student/migrations/0030_auto__chg_field_anonymoususerid_anonymous_user_id.py
|
167
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'AnonymousUserId.anonymous_user_id'
db.alter_column('student_anonymoususerid', 'anonymous_user_id', self.gf('django.db.models.fields.CharField')(unique=True, max_length=32))
# THIS SQL WAS HAND-CODED
db.execute("""
CREATE TABLE student_anonymoususerid_temp_archive
AS SELECT * FROM student_anonymoususerid WHERE LENGTH(anonymous_user_id) = 16
""")
db.execute("""
DELETE FROM student_anonymoususerid
WHERE LENGTH(anonymous_user_id) = 16
""")
def backwards(self, orm):
# Changing field 'AnonymousUserId.anonymous_user_id'
db.alter_column('student_anonymoususerid', 'anonymous_user_id', self.gf('django.db.models.fields.CharField')(max_length=16, unique=True))
db.execute("DROP TABLE student_anonymoususerid_temp_archive")
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.anonymoususerid': {
'Meta': {'object_name': 'AnonymousUserId'},
'anonymous_user_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseenrollmentallowed': {
'Meta': {'unique_together': "(('email', 'course_id'),)", 'object_name': 'CourseEnrollmentAllowed'},
'auto_enroll': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'student.pendingemailchange': {
'Meta': {'object_name': 'PendingEmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.pendingnamechange': {
'Meta': {'object_name': 'PendingNameChange'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rationale': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.registration': {
'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"},
'allow_certificate': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'goals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'level_of_education': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'mailing_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'year_of_birth': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'})
},
'student.userstanding': {
'Meta': {'object_name': 'UserStanding'},
'account_status': ('django.db.models.fields.CharField', [], {'max_length': '31', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'standing_last_changed_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'standing'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'student.usertestgroup': {
'Meta': {'object_name': 'UserTestGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'db_index': 'True', 'symmetrical': 'False'})
}
}
complete_apps = ['student']
|
microcom/odoo
|
refs/heads/9.0
|
addons/account/tests/test_manual_reconciliation.py
|
47
|
from openerp.addons.account.tests.account_test_classes import AccountingTestCase
class TestManualReconciliation(AccountingTestCase):
def test_reconciliation_proposition(self):
pass
def test_full_reconcile(self):
pass
def test_partial_reconcile(self):
pass
def test_reconcile_with_write_off(self):
pass
|
sthirugn/robottelo
|
refs/heads/master
|
robottelo/cli/settings.py
|
8
|
# -*- encoding: utf-8 -*-
"""
Usage::
hammer settings [OPTIONS] SUBCOMMAND [ARG] ...
Parameters::
SUBCOMMAND subcommand
[ARG] ... subcommand arguments
Subcommands::
list List all settings
set Update a setting
"""
from robottelo.cli.base import Base
class Settings(Base):
"""Manipulates Foreman's settings."""
command_base = 'settings'
@classmethod
def set(cls, options=None):
"""Update a setting"""
cls.command_sub = 'set'
return cls.execute(cls._construct_command(options))
|
thnee/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/fortios/fortios_system_replacemsg_utm.py
|
13
|
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_replacemsg_utm
short_description: Replacement messages in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify system_replacemsg feature and utm category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
system_replacemsg_utm:
description:
- Replacement messages.
default: null
type: dict
suboptions:
buffer:
description:
- Message string.
type: str
format:
description:
- Format flag.
type: str
choices:
- none
- text
- html
- wml
header:
description:
- Header flag.
type: str
choices:
- none
- http
- 8bit
msg_type:
description:
- Message type.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Replacement messages.
fortios_system_replacemsg_utm:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
system_replacemsg_utm:
buffer: "<your_own_value>"
format: "none"
header: "none"
msg_type: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_replacemsg_utm_data(json):
option_list = ['buffer', 'format', 'header',
'msg_type']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_replacemsg_utm(data, fos):
vdom = data['vdom']
state = data['state']
system_replacemsg_utm_data = data['system_replacemsg_utm']
filtered_data = underscore_to_hyphen(filter_system_replacemsg_utm_data(system_replacemsg_utm_data))
if state == "present":
return fos.set('system.replacemsg',
'utm',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('system.replacemsg',
'utm',
mkey=filtered_data['msg-type'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system_replacemsg(data, fos):
if data['system_replacemsg_utm']:
resp = system_replacemsg_utm(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"system_replacemsg_utm": {
"required": False, "type": "dict", "default": None,
"options": {
"buffer": {"required": False, "type": "str"},
"format": {"required": False, "type": "str",
"choices": ["none", "text", "html",
"wml"]},
"header": {"required": False, "type": "str",
"choices": ["none", "http", "8bit"]},
"msg_type": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_system_replacemsg(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_system_replacemsg(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
joecole889/spam-filter
|
refs/heads/master
|
FeatureVecGen.py
|
1
|
# -*- coding: utf-8 -*-
"""
Contains a class to generate feature vectors from email text. Utility functions are also included for
creating the dictionaries needed to build the feature vectors. Uses the Porter stemmer algorithm
as implemented in the natural language processing toolkit (nltk).
Created on Fri Feb 12 13:35:00 2016
@author: JCole119213
"""
#import pdb
import re
from nltk.stem.porter import PorterStemmer
class FeatureVecGen :
def __init__(self,DictList) :
"""
Initialize the feature vector creation engine with a list of words from a dictionary.
DictList -
a list of strings (words) from a dictionary, used to build the feature vectors
"""
self.HashDict(DictList) #Initializes self.DictHash
return
def MakeVec(self,SampleBody) :
"""
Implements the full flow of creating a feature vector from the raw string extracted from an email
SampleBody -
unprocessed text from the body of an email message
Return values:
featurevec -
a list of integers (actually only 0 or 1, but stored as int) indicating the absence or
presence in SampleBody of each dictionary word from self.DictHash
"""
SampleWords = self.RegularizeWords(SampleBody)
SampleWords = self.StemWords(SampleWords)
featurevec = self.MarkWordPresence(SampleWords)
return featurevec
def MarkWordPresence(self,EmailContentsReg) :
"""
Create a feature vector from the regularized text of an email message body
EmailContentsReg -
a list of strings (words) after processing by the FeatureVecGen.RegularizeWords() method
Return values:
FeatureVec -
a list of integers (actually only 0 or 1, but stored as int) indicating the absence or
presence in EmailContentsReg of each dictionary word from self.DictHash
"""
FeatureVec = [0] * len(self.DictHash)
for Word in EmailContentsReg :
FeatureInd = self.DictHash.get(Word,-1) # Check if word is in the dictionary
if FeatureInd != -1 :
FeatureVec[FeatureInd] = 1 # Note the presence of the word as a feature - only one instance of the word matters
return FeatureVec
def HashDict(self,DictList) :
"""
Creates a hash to determine the presence of a word in the dictionary and the corresponding feature location
DictList -
a list of strings (words) from a dictionary, used to build the feature vectors
"""
self.DictHash = dict()
for ind,Word in enumerate(DictList) :
self.DictHash[Word] = ind
return
@classmethod
def ParetoWords(cls,TextToProcess) :
"""
Creates a dictionary with a count of all words in the text to be processed
TextToProcess -
unprocessed text from the body of an email message
Return values:
DictHist -
a Python dictionary where the value associated with each key (regularized, stemmed words) is
a count of the number of times that word occurred in TextToProcess
"""
DictHist = dict()
Words = cls.RegularizeWords(TextToProcess)
ShortWords = cls.StemWords(Words)
# Create word histogram
for ShortWord in ShortWords :
DictHist[ShortWord] = DictHist.get(ShortWord,0) + 1 # Create the histogram of word counts
return DictHist
@staticmethod
def RegularizeWords(FileStr) :
"""
Removes email addresses, punctuation, HTML tags, etc.
FileStr -
unprocessed text from the body of an email message
Return values:
FileWords -
a list of stings split by punctuation and filtered from 0 length strings
"""
FileStr = FileStr.lower() # Go to lower case
FileStr = re.sub('<[^<>]+>',' ',FileStr) # Remove HTML tags without < or > inside
FileStr = re.sub('\d+','number',FileStr) # Replace all numbers with the string 'number'
FileStr = re.sub('(http|https)://[^\s]*','httpaddr',FileStr) # Replace all URLs with the string 'httpaddr'
FileStr = re.sub('[^\s]+@[^\s]+','emailaddr',FileStr) # Replace all emails with the string 'emailaddr'
FileStr = re.sub('[$]+','dollar',FileStr) # Replace all $ signs with the string 'dollar' as spam is likely to reference money
FileWords = re.split('[\s\|@/#\.\-:&\*\+=\[\]\?!\(\)\{\},\'">_<;%~]',FileStr)
return filter(None,FileWords)
@staticmethod
def StemWords(WordList) :
"""
Extracts the root of a word (e.g. stepped -> step) using the Porter stemmer algorithm
WordList -
a list of strings (words) as processed by the FeatureVecGen.RegularizeWords() method
Return values:
StemmedWordList - a list of root words from the input list
"""
stemmer = PorterStemmer()
StemmedWordList = []
# Loop over list and stem the words
for Word in WordList :
assert (Word.find('[\W_]') == -1),'Failed to remove all non-alphanumeric characters: %s' % Word
try : # Not sure about the reliability of the Porter Stemmer code, so use a try/catch block
ShortWord = stemmer.stem(Word)
assert(len(ShortWord) > 2) # Ignore words less than 3 characters long
StemmedWordList.append(ShortWord)
except :
continue
return StemmedWordList
################### Main Program ###################
if __name__ == "__main__" :
print "Test code goes here."
|
ycool/apollo
|
refs/heads/master
|
modules/tools/prediction/data_pipelines/data_preprocessing/generate_labels.py
|
3
|
###############################################################################
# Copyright 2019 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import os
import sys
import glob
import argparse
import logging
sys.path.append('/apollo/modules/tools/prediction/data_pipelines/common/')
from online_to_offline import LabelGenerator
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Generate labels')
parser.add_argument('input', type=str, help='input file')
args = parser.parse_args()
label_gen = LabelGenerator()
print("Create Label {}".format(args.input))
if os.path.isfile(args.input):
label_gen.LoadFeaturePBAndSaveLabelFiles(args.input)
label_gen.Label()
else:
print("{} is not a valid file".format(args.input))
|
TangHao1987/intellij-community
|
refs/heads/master
|
python/helpers/profiler/thrift/TSerialization.py
|
36
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from thrift.protocol import TBinaryProtocol
from thrift.transport import TTransport
def serialize(thrift_object,
protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()):
transport = TTransport.TMemoryBuffer()
protocol = protocol_factory.getProtocol(transport)
thrift_object.write(protocol)
return transport.getvalue()
def deserialize(base,
buf,
protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()):
transport = TTransport.TMemoryBuffer(buf)
protocol = protocol_factory.getProtocol(transport)
base.read(protocol)
return base
|
r2t2sdr/r2t2
|
refs/heads/master
|
linux/trunk/linux-4.0-adi/tools/perf/scripts/python/check-perf-trace.py
|
1997
|
# perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
|
cloew/KaoResources
|
refs/heads/master
|
setup.py
|
1
|
from distutils.core import setup
setup(name='kao_resources',
version='0.0.1',
description="",
author='',
author_email='',
packages=['kao_resources'],
)
|
pombreda/django-hotclub
|
refs/heads/master
|
libs/external_libs/python-yadis-1.1.0/yadis/parsehtml.py
|
5
|
__all__ = ['findHTMLMeta', 'MetaNotFound']
from HTMLParser import HTMLParser, HTMLParseError
import htmlentitydefs
import re
from yadis.constants import YADIS_HEADER_NAME
# Size of the chunks to search at a time (also the amount that gets
# read at a time)
CHUNK_SIZE = 1024 * 16 # 16 KB
class ParseDone(Exception):
"""Exception to hold the URI that was located when the parse is
finished. If the parse finishes without finding the URI, set it to
None."""
class MetaNotFound(Exception):
"""Exception to hold the content of the page if we did not find
the appropriate <meta> tag"""
re_flags = re.IGNORECASE | re.UNICODE | re.VERBOSE
ent_pat = r'''
&
(?: \#x (?P<hex> [a-f0-9]+ )
| \# (?P<dec> \d+ )
| (?P<word> \w+ )
)
;'''
ent_re = re.compile(ent_pat, re_flags)
def substituteMO(mo):
if mo.lastgroup == 'hex':
codepoint = int(mo.group('hex'), 16)
elif mo.lastgroup == 'dec':
codepoint = int(mo.group('dec'))
else:
assert mo.lastgroup == 'word'
codepoint = htmlentitydefs.name2codepoint.get(mo.group('word'))
if codepoint is None:
return mo.group()
else:
return unichr(codepoint)
def substituteEntities(s):
return ent_re.sub(substituteMO, s)
class YadisHTMLParser(HTMLParser):
"""Parser that finds a meta http-equiv tag in the head of a html
document.
When feeding in data, if the tag is matched or it will never be
found, the parser will raise ParseDone with the uri as the first
attribute.
Parsing state diagram
=====================
Any unlisted input does not affect the state::
1, 2, 5 8
+--------------------------+ +-+
| | | |
4 | 3 1, 2, 5, 7 v | v
TOP -> HTML -> HEAD ----------> TERMINATED
| | ^ | ^ ^
| | 3 | | | |
| +------------+ +-> FOUND ------+ |
| 6 8 |
| 1, 2 |
+------------------------------------+
1. any of </body>, </html>, </head> -> TERMINATE
2. <body> -> TERMINATE
3. <head> -> HEAD
4. <html> -> HTML
5. <html> -> TERMINATE
6. <meta http-equiv='X-XRDS-Location'> -> FOUND
7. <head> -> TERMINATE
8. Any input -> TERMINATE
"""
TOP = 0
HTML = 1
HEAD = 2
FOUND = 3
TERMINATED = 4
def __init__(self):
HTMLParser.__init__(self)
self.phase = self.TOP
def _terminate(self):
self.phase = self.TERMINATED
raise ParseDone(None)
def handle_endtag(self, tag):
# If we ever see an end of head, body, or html, bail out right away.
# [1]
if tag in ['head', 'body', 'html']:
self._terminate()
def handle_starttag(self, tag, attrs):
# if we ever see a start body tag, bail out right away, since
# we want to prevent the meta tag from appearing in the body
# [2]
if tag=='body':
self._terminate()
if self.phase == self.TOP:
# At the top level, allow a html tag or a head tag to move
# to the head or html phase
if tag == 'head':
# [3]
self.phase = self.HEAD
elif tag == 'html':
# [4]
self.phase = self.HTML
elif self.phase == self.HTML:
# if we are in the html tag, allow a head tag to move to
# the HEAD phase. If we get another html tag, then bail
# out
if tag == 'head':
# [3]
self.phase = self.HEAD
elif tag == 'html':
# [5]
self._terminate()
elif self.phase == self.HEAD:
# If we are in the head phase, look for the appropriate
# meta tag. If we get a head or body tag, bail out.
if tag == 'meta':
attrs_d = dict(attrs)
http_equiv = attrs_d.get('http-equiv', '').lower()
if http_equiv == YADIS_HEADER_NAME.lower():
raw_attr = attrs_d.get('content')
yadis_loc = substituteEntities(raw_attr)
# [6]
self.phase = self.FOUND
raise ParseDone(yadis_loc)
elif tag in ['head', 'html']:
# [5], [7]
self._terminate()
def feed(self, chars):
# [8]
if self.phase in [self.TERMINATED, self.FOUND]:
self._terminate()
return HTMLParser.feed(self, chars)
def findHTMLMeta(stream):
"""Look for a meta http-equiv tag with the YADIS header name.
@param stream: Source of the html text
@type stream: Object that implements a read() method that works
like file.read
@return: The URI from which to fetch the XRDS document
@rtype: str
@raises MetaNotFound: raised with the content that was
searched as the first parameter.
"""
parser = YadisHTMLParser()
chunks = []
while 1:
chunk = stream.read(CHUNK_SIZE)
if not chunk:
# End of file
break
chunks.append(chunk)
try:
parser.feed(chunk)
except HTMLParseError, why:
# HTML parse error, so bail
chunks.append(stream.read())
break
except ParseDone, why:
uri = why[0]
if uri is None:
# Parse finished, but we may need the rest of the file
chunks.append(stream.read())
break
else:
return uri
content = ''.join(chunks)
raise MetaNotFound(content)
|
EdLogan18/logan-repository
|
refs/heads/master
|
plugin.video.exodus/resources/lib/sources/torba_mv_tv.py
|
2
|
# -*- coding: utf-8 -*-
'''
Exodus Add-on
Copyright (C) 2016 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,os,json,urllib,urlparse
from resources.lib.modules import control
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import workers
class source:
def __init__(self):
self.domains = ['torba.se']
self.base_link = 'http://torba.se'
self.search_mv_link = '/movies/autocomplete?order=relevance&title=%s'
self.search_tv_link = '/series/autocomplete?order=relevance&title=%s'
self.tv_link = '/series/%s/%s/%s'
self.mv_link = '/v/%s'
def movie(self, imdb, title, year):
try:
query = self.search_mv_link % (urllib.quote_plus(title))
query = urlparse.urljoin(self.base_link, query)
r = client.request(query, headers={'X-Requested-With': 'XMLHttpRequest'})
r = json.loads(r)
t = cleantitle.get(title)
r = [(i['slug'], i['title'], i['year']) for i in r]
r = [i[0] for i in r if t == cleantitle.get(i[1]) and year == str(i[2])][0]
url = r.encode('utf-8')
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, year):
try:
query = self.search_tv_link % (urllib.quote_plus(tvshowtitle))
query = urlparse.urljoin(self.base_link, query)
r = client.request(query, headers={'X-Requested-With': 'XMLHttpRequest'})
r = json.loads(r)
t = cleantitle.get(tvshowtitle)
r = [(i['slug'], i['title'], i['year']) for i in r]
r = [i[0] for i in r if t == cleantitle.get(i[1]) and year == str(i[2])][0]
url = r.encode('utf-8')
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
if url == None: return
url = '%s/%01d/%01d' % (url, int(season), int(episode))
url = url.encode('utf-8')
return url
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
try: url = self.tv_link % re.findall('(.+?)/(\d*)/(\d*)$', url)[0]
except: url = self.mv_link % url
url = urlparse.urljoin(self.base_link, url)
r = client.request(url)
url = client.parseDOM(r, 'a', ret='href', attrs = {'class': 'video-play.+?'})[0]
url = re.findall('(?://|\.)streamtorrent\.tv/.+?/([0-9a-zA-Z/]+)', url)[0]
u = 'https://streamtorrent.tv/api/torrent/%s.json' % url
r = client.request(u)
r = json.loads(r)
r = [i for i in r['files'] if 'streams' in i and len(i['streams']) > 0][0]
r = [{'height': i['height'], 'stream_id': r['_id'], 'vid_id': url} for i in r['streams']]
links = []
links += [{'quality': '1080p', 'url': urllib.urlencode(i)} for i in r if int(i['height']) >= 1080]
links += [{'quality': 'HD', 'url': urllib.urlencode(i)} for i in r if 720 <= int(i['height']) < 1080]
links += [{'quality': 'SD', 'url': urllib.urlencode(i)} for i in r if int(i['height']) <= 720]
links = links[:3]
for i in links: sources.append({'source': 'cdn', 'quality': i['quality'], 'provider': 'Torba', 'url': i['url'], 'direct': True, 'debridonly': False, 'autoplay': False})
return sources
except:
return sources
def resolve(self, url):
try:
m3u8 = [
'#EXTM3U',
'#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",DEFAULT=YES,AUTOSELECT=YES,NAME="Stream 1",URI="{audio_stream}"',
'',
'#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=0,NAME="{stream_name}",AUDIO="audio"',
'{video_stream}'
]
query = urlparse.parse_qs(url)
query = dict([(key, query[key][0]) if query[key] else (key, '') for key in query])
auth = 'http://streamtorrent.tv/api/torrent/%s/%s.m3u8?json=true' % (query['vid_id'], query['stream_id'])
r = client.request(auth)
r = json.loads(r)
try: url = r['url']
except: url = None
if not url == None:
def dialog(url):
try: self.disableScraper = control.yesnoDialog('To watch this video visit from any device', '[COLOR skyblue]%s[/COLOR]' % url, '', 'Torba', 'Cancel', 'Settings')
except: pass
workers.Thread(dialog, url).start()
control.sleep(3000)
for i in range(100):
try:
if not control.condVisibility('Window.IsActive(yesnoDialog)'): break
r = client.request(auth)
r = json.loads(r)
try: url = r['url']
except: url = None
if url == None: break
workers.Thread(dialog, url).start()
control.sleep(3000)
except:
pass
if self.disableScraper:
control.openSettings(query='2.0')
return ''
control.execute('Dialog.Close(yesnoDialog)')
if not url == None: return
stream_name = '%sp' % (query['height'])
video_stream = r[stream_name]
if not 'audio' in r: return video_stream
audio_stream = r['audio']
content = ('\n'.join(m3u8)).format(**{'audio_stream': audio_stream, 'stream_name': stream_name, 'video_stream': video_stream})
path = os.path.join(control.dataPath, 'torbase.m3u8')
control.makeFile(control.dataPath) ; control.deleteFile(path)
file = control.openFile(path, 'w') ; file.write(content) ; file.close()
return path
except:
return
|
EKiefer/edge-starter
|
refs/heads/master
|
py34env/Lib/site-packages/django/contrib/postgres/aggregates/__init__.py
|
625
|
from .general import * # NOQA
from .statistics import * # NOQA
|
mbarylsk/goldbach-partition
|
refs/heads/master
|
goldbach-eliminate_primes.py
|
1
|
#
# Copyright (c) 2019 - 2020, Marcin Barylski
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
#
import math
import sys
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import matplotlib.mlab as mlab
import numpy as np
import os
from datetime import datetime
import goldbach
sys.path.insert(0, '..\\primes\\')
import primes
import dataprocessing
#############################################################
# Settings - configuration
#############################################################
# set to True if you want to see more output during calculations
be_verbose = False
# Caching previous primality results
# o True - auxilary sets of primes and composite numbers will grow
# it will speed up further primality tests but more RAM will
# be occupied
# o False - do not cache new p rimality test results
caching_primality_results = False
min_num = 2
max_num = 12500
step_factor = 2
checkpoint_value = 2500
file_input_primes = '..\\primes\\t_prime_numbers.txt'
file_input_nonprimes = '..\\primes\\t_nonprime_numbers.txt'
#############################################################
# Settings - output directory and files
#############################################################
directory = "results/" + str(step_factor*max_num)
if not os.path.exists(directory):
os.makedirs(directory)
file_output_list_of_eliminated_primes = directory + "/t_list_of_eliminated_primes_" + str(step_factor*max_num) + ".txt"
file_output_list_of_primes_in_partitions = directory + "/t_list_of_primes_in_partitions_" + str(step_factor*max_num) + ".txt"
file_output_list_of_sets_required_factors = directory + "/t_list_of_sets_required_factors_" + str(step_factor*max_num) + ".txt"
#############################################################
# Business logic
#############################################################
list_of_sets_required_factors = [{2}]
temp_list_of_sets_required_factors = []
list_of_primes_in_partitions = []
list_of_eliminated_primes = []
list_no_of_eliminated_primes = []
list_no_of_eliminated_primes_to_no_of_partitions = []
list_no_of_eliminated_primes_to_no_of_partitions_avg = []
list_no_of_eliminated_primes_to_no_of_req_primes = []
list_no_of_eliminated_primes_to_no_of_req_primes_avg = []
list_percentages = [[],[],[]]
list_nums = []
list_no_of_required_factors = []
list_no_of_primes = []
list_no_of_primes_half = []
list_no_of_primes_half_n = []
list_no_of_sets_required_factors = []
def update_metrics (p, dp, num, factors):
global list_nums, list_of_eliminated_primes, list_no_of_sets_required_factors, list_of_primes_in_partitions
global list_no_of_primes_half, list_no_of_primes, list_no_of_primes_half_n, list_no_of_required_factors, list_no_of_eliminated_primes
list_nums.append (num)
min_lenght = sys.maxsize
for set_required_factors in list_of_sets_required_factors:
if len(set_required_factors) < min_lenght:
min_lenght = len(set_required_factors)
no_primes = p.get_all_primes_leq(num)
no_primes_half_n = p.get_all_primes_leq(math.floor(num / 4))
list_no_of_primes_half.append (math.ceil( no_primes / 4))
list_no_of_primes.append(no_primes)
list_no_of_primes_half_n.append(no_primes_half_n)
list_no_of_required_factors.append (min_lenght)
list_no_of_sets_required_factors.append (len(list_of_sets_required_factors))
list_of_primes_in_partitions.sort()
list_of_eliminated_primes = []
for p in list_of_primes_in_partitions:
list_of_eliminated_primes.append (p)
for set_required_factors in list_of_sets_required_factors:
for q in set_required_factors:
if q in list_of_eliminated_primes:
list_of_eliminated_primes.remove (q)
list_no_of_eliminated_primes.append (len(list_of_eliminated_primes))
list_no_of_eliminated_primes_to_no_of_partitions.append(len(list_of_eliminated_primes)/dp.get_number_of_pairs (factors))
list_no_of_eliminated_primes_to_no_of_partitions_avg.append(dp.get_avg_value_from_list (list_no_of_eliminated_primes_to_no_of_partitions))
list_no_of_eliminated_primes_to_no_of_req_primes.append(len(list_of_eliminated_primes)/min_lenght)
list_no_of_eliminated_primes_to_no_of_req_primes_avg.append(dp.get_avg_value_from_list (list_no_of_eliminated_primes_to_no_of_req_primes))
if no_primes > 0:
list_percentages[0].append (int(len(list_of_eliminated_primes)/no_primes*100))
list_percentages[1].append (int(min_lenght/no_primes*100))
list_percentages[2].append (100 - int(len(list_of_eliminated_primes)/no_primes*100) - int(min_lenght/no_primes*100))
else:
list_percentages[0].append (0)
list_percentages[1].append (0)
list_percentages[2].append (100)
#############################################################
# Presentation
#############################################################
def write_results_to_figures (directory):
global list_nums, list_no_of_primes, list_no_of_primes_half, list_no_of_primes_half_n, list_no_of_required_factors, list_no_of_sets_required_factors, list_no_of_eliminated_primes
plt.figure(1)
r_patch = mpatches.Patch(color='red', label='ceil (pi(n)/4)')
g_patch = mpatches.Patch(color='green', label='pi(n)')
m_patch = mpatches.Patch(color='magenta', label='pi(n/4)')
b_patch = mpatches.Patch(color='blue', label='# of req primes for GSC')
list_of_handles = []
list_of_handles.append(g_patch)
list_of_handles.append(r_patch)
list_of_handles.append(m_patch)
list_of_handles.append(b_patch)
plt.legend(handles=list_of_handles, loc='upper left', prop={'size': 6})
plt.plot(list_nums, list_no_of_primes, 'g.', ms=2)
plt.plot(list_nums, list_no_of_primes_half, 'r.', ms=2)
plt.plot(list_nums, list_no_of_primes_half_n, 'm.', ms=2)
plt.plot(list_nums, list_no_of_required_factors, 'b.', ms=2)
plt.xlabel('n')
plt.ylabel('Count')
plt.title('How big are the sets?')
plt.grid(True)
plt.savefig(directory + "/f_required_primes.png")
plt.figure(2)
plt.plot(list_nums, list_no_of_sets_required_factors, 'b-', ms=1)
plt.xlabel('n')
plt.ylabel('Count')
plt.title('How many sets?')
plt.grid(True)
plt.savefig(directory + "/f_number_of_possible_sets.png")
plt.figure(3)
plt.plot(list_nums, list_no_of_eliminated_primes, 'b-', ms=1)
plt.xlabel('n')
plt.ylabel('Count')
plt.title('How many eliminated primes?')
plt.grid(True)
plt.savefig(directory + "/f_number_of_eliminated_primes.png")
plt.figure(4)
blue_patch = mpatches.Patch(color='blue', label='ratio')
red_patch = mpatches.Patch(color='red', label='avg')
plt.legend(handles=[red_patch, blue_patch], loc='upper left', prop={'size': 6})
plt.plot(list_nums, list_no_of_eliminated_primes_to_no_of_partitions, 'b.', ms=1)
plt.plot(list_nums, list_no_of_eliminated_primes_to_no_of_partitions_avg, 'r-', ms=1)
plt.xlabel('n')
plt.ylabel('Ratio')
plt.title('How many eliminated primes to partitions?')
plt.grid(True)
plt.savefig(directory + "/f_eliminated_primes_to_partitions.png")
plt.figure(5)
blue_patch = mpatches.Patch(color='blue', label='ratio')
red_patch = mpatches.Patch(color='red', label='avg')
plt.legend(handles=[red_patch, blue_patch], loc='upper left', prop={'size': 6})
plt.plot(list_nums, list_no_of_eliminated_primes_to_no_of_req_primes, 'b.', ms=1)
plt.plot(list_nums, list_no_of_eliminated_primes_to_no_of_req_primes_avg, 'r-', ms=1)
plt.xlabel('n')
plt.ylabel('Ratio')
plt.title('How many eliminated primes to required primes?')
plt.grid(True)
plt.savefig(directory + "/f_eliminated_primes_to_req_primes.png")
plt.figure(6)
r_patch = mpatches.Patch(color='red', label='% of eliminated primes')
b_patch = mpatches.Patch(color='blue', label='% of required primes')
g_patch = mpatches.Patch(color='green', label='% of undecided primes')
list_of_handles = []
list_of_handles.append(r_patch)
list_of_handles.append(b_patch)
list_of_handles.append(g_patch)
plt.legend(handles=list_of_handles, loc='upper left', prop={'size': 6})
plt.plot(list_nums, list_percentages[0], 'r-', ms=1)
plt.plot(list_nums, list_percentages[1], 'b-', ms=1)
plt.plot(list_nums, list_percentages[2], 'g-', ms=1)
plt.xlabel('n')
plt.ylabel('%')
plt.title('Percentages')
plt.grid(True)
plt.savefig(directory + "/f_required_eliminated_primes_percentage.png")
def write_results_to_files (directory):
global file_output_list_of_eliminated_primes, file_output_list_of_primes_in_partitions, file_output_list_of_sets_required_factors
f = open(file_output_list_of_eliminated_primes, "w")
f.write (str(list_of_eliminated_primes))
f.close ()
f = open(file_output_list_of_primes_in_partitions, "w")
f.write (str(list_of_primes_in_partitions))
f.close ()
f = open(file_output_list_of_sets_required_factors, "w")
f.write (str(list_of_sets_required_factors))
f.close ()
#############################################################
# Main - Phase 1
# Preload files & restore previous calculations
#############################################################
print ("---------------------------------------------------")
print ("Initialize objects...")
p = primes.Primes(caching_primality_results)
gp = goldbach.GoldbachPartition (p)
dp = dataprocessing.DataProcessing()
print ("DONE")
print ("Loading helper sets...")
p.init_set(file_input_primes, True)
p.init_set(file_input_nonprimes, False)
print ("DONE")
print ("Sorting primes...")
p.sort_primes_set()
print ("DONE")
print ("Output result folder: ", directory)
print ("---------------------------------------------------")
#############################################################
# Main - Phase 2
# New calculations
#############################################################
dt_start = datetime.now()
dt_current_previous = dt_start
for k in range (min_num, max_num):
num = step_factor*k
if be_verbose:
print ("=============")
print ("num=", num)
factors = gp.find_sum_of_prime_numbers (num)
if be_verbose:
print ("current factors:", factors)
# step 1:
# check #1: maybe set_required_factors already contains required factors?
fullfiled = False
for pair in factors:
(p1, p2) = pair
# remember all primes present in partions
if p1 not in list_of_primes_in_partitions:
list_of_primes_in_partitions.append (p1)
if p2 not in list_of_primes_in_partitions:
list_of_primes_in_partitions.append (p2)
for set_required_factors in list_of_sets_required_factors:
if not fullfiled and p1 in set_required_factors and p2 in set_required_factors:
fullfiled = True
if be_verbose:
print ("num=", num, "is fullfiled")
print (list_of_sets_required_factors)
# check #2: p1 or p2 is not on a set_required_factors
if not fullfiled:
if be_verbose:
print ("not fullfiled")
for pair in factors:
(p1, p2) = pair
for my_set_required_factors in list_of_sets_required_factors:
my_temp_set = my_set_required_factors.copy()
if p1 in my_temp_set and p2 not in my_temp_set:
my_temp_set.add (p2)
if be_verbose:
print ("case 1", my_temp_set)
temp_list_of_sets_required_factors.append (my_temp_set)
my_temp_set = {}
for pair in factors:
(p1, p2) = pair
for my_set_required_factors in list_of_sets_required_factors:
my_temp_set = my_set_required_factors.copy()
if p1 not in my_temp_set and p2 in my_temp_set:
my_temp_set.add (p1)
if be_verbose:
print ("case 2", my_temp_set)
temp_list_of_sets_required_factors.append (my_temp_set)
my_temp_set = {}
for pair in factors:
(p1, p2) = pair
for my_set_required_factors in list_of_sets_required_factors:
my_temp_set = my_set_required_factors.copy()
if p1 not in my_temp_set and p2 not in my_temp_set:
my_temp_set.add (p1)
my_temp_set.add (p2)
if be_verbose:
print ("case 3", my_temp_set)
temp_list_of_sets_required_factors.append (my_temp_set)
my_temp_set = {}
else:
for set_required_factors in list_of_sets_required_factors:
condition_met = False
for pair in factors:
(p1, p2) = pair
if p1 in set_required_factors and p2 in set_required_factors:
condition_met = True
if not condition_met:
list_of_sets_required_factors.remove (set_required_factors)
if be_verbose:
print ("Removed", set_required_factors, "from list_of_sets_required_factors")
print ("Now list_of_sets_required_factors", list_of_sets_required_factors)
# step 2: cleanup of set_temp_list_required_factors
if len(temp_list_of_sets_required_factors) > 0:
min_lenght = sys.maxsize
for temp_set_required_factors in temp_list_of_sets_required_factors:
if be_verbose:
print (temp_set_required_factors)
if len(temp_set_required_factors) < min_lenght:
min_lenght = len(temp_set_required_factors)
list_of_sets_required_factors = []
for temp_set_required_factors in temp_list_of_sets_required_factors:
if len(temp_set_required_factors) == min_lenght:
list_of_sets_required_factors.append (temp_set_required_factors)
temp_list_of_sets_required_factors = []
update_metrics (p, dp, num, factors)
# checkpoint - partial results
if num % checkpoint_value == 0:
dt_current = datetime.now()
dt_diff_current = (dt_current - dt_current_previous).total_seconds()
print ("Iteration", k, "of total", max_num, "took", dt_diff_current, "seconds")
# remember results so far
write_results_to_figures (directory)
write_results_to_files (directory)
if be_verbose:
print ("final result - currently required factors", list_of_sets_required_factors)
dt_end = datetime.now()
write_results_to_figures (directory)
# final results
dt_diff = dt_end - dt_start
print ("Total calculations lasted:", dt_diff)
print (" + Max examined number:", num)
print (" + currently required factors:", list_of_sets_required_factors)
print (" + primes present in partitions:", list_of_primes_in_partitions)
print (" + number of required factors:", list_no_of_required_factors)
print (" + eliminated primes:", list_of_eliminated_primes)
write_results_to_files (directory)
|
andmos/ansible
|
refs/heads/devel
|
test/units/modules/network/ios/test_ios_vlan.py
|
45
|
# (c) 2018 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from units.compat.mock import patch
from ansible.modules.network.ios import ios_vlan
from ansible.modules.network.ios.ios_vlan import parse_vlan_brief
from units.modules.utils import set_module_args
from .ios_module import TestIosModule, load_fixture
class TestIosVlanModule(TestIosModule):
module = ios_vlan
def setUp(self):
super(TestIosVlanModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.ios.ios_vlan.run_commands')
self.run_commands = self.mock_run_commands.start()
self.mock_load_config = patch('ansible.modules.network.ios.ios_vlan.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestIosVlanModule, self).tearDown()
self.mock_run_commands.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
self.run_commands.return_value = [load_fixture('ios_vlan_config.cfg')]
self.load_config.return_value = {'diff': None, 'session': 'session'}
def test_ios_vlan_create(self):
set_module_args({'vlan_id': '3', 'name': 'test', 'state': 'present'})
result = self.execute_module(changed=True)
expected_commands = [
'vlan 3',
'name test',
]
self.assertEqual(result['commands'], expected_commands)
def test_ios_vlan_id_startwith_9(self):
set_module_args({'vlan_id': '9', 'name': 'vlan9', 'state': 'present'})
result = self.execute_module(changed=False)
expected_commands = []
self.assertEqual(result['commands'], expected_commands)
def test_ios_vlan_rename(self):
set_module_args({'vlan_id': '2', 'name': 'test', 'state': 'present'})
result = self.execute_module(changed=True)
expected_commands = [
'vlan 2',
'name test',
]
self.assertEqual(result['commands'], expected_commands)
def test_ios_vlan_with_interfaces(self):
set_module_args({'vlan_id': '2', 'name': 'vlan2', 'state': 'present', 'interfaces': ['GigabitEthernet1/0/8', 'GigabitEthernet1/0/7']})
result = self.execute_module(changed=True)
expected_commands = [
'vlan 2',
'interface GigabitEthernet1/0/8',
'switchport mode access',
'switchport access vlan 2',
'vlan 2',
'interface GigabitEthernet1/0/6',
'switchport mode access',
'no switchport access vlan 2',
]
self.assertEqual(result['commands'], expected_commands)
def test_ios_vlan_with_interfaces_and_newvlan(self):
set_module_args({'vlan_id': '3', 'name': 'vlan3', 'state': 'present', 'interfaces': ['GigabitEthernet1/0/8', 'GigabitEthernet1/0/7']})
result = self.execute_module(changed=True)
expected_commands = [
'vlan 3',
'name vlan3',
'interface GigabitEthernet1/0/8',
'switchport mode access',
'switchport access vlan 3',
'interface GigabitEthernet1/0/7',
'switchport mode access',
'switchport access vlan 3',
]
self.assertEqual(result['commands'], expected_commands)
def test_parse_vlan_brief(self):
result = parse_vlan_brief(load_fixture('ios_vlan_config.cfg'))
obj = [
{
'name': 'default',
'interfaces': [
'GigabitEthernet1/0/4',
'GigabitEthernet1/0/5',
'GigabitEthernet1/0/52',
'GigabitEthernet1/0/54',
],
'state': 'active',
'vlan_id': '1',
},
{
'name': 'vlan2',
'interfaces': [
'GigabitEthernet1/0/6',
'GigabitEthernet1/0/7',
],
'state': 'active',
'vlan_id': '2',
},
{
'name': 'vlan9',
'interfaces': [
'GigabitEthernet1/0/6',
],
'state': 'active',
'vlan_id': '9',
},
{
'name': 'fddi-default',
'interfaces': [],
'state': 'act/unsup',
'vlan_id': '1002',
},
{
'name': 'fddo-default',
'interfaces': [],
'state': 'act/unsup',
'vlan_id': '1003',
},
]
self.assertEqual(result, obj)
|
saullocastro/pyNastran
|
refs/heads/master
|
pyNastran/f06/utils.py
|
1
|
from __future__ import print_function
import os
import numpy as np
from pyNastran.f06.parse_flutter import plot_flutter_f06
def cmd_line_plot_flutter(): # pragma: no cover
import sys
from docopt import docopt
import pyNastran
msg = "Usage:\n"
msg += " f06 plot_145 F06_FILENAME [--noline] [--modes MODES] [--subcases SUB] [--xlim FREQ] [--ylim DAMP]\n"
msg += ' f06 plot_145 -h | --help\n'
msg += ' f06 plot_145 -v | --version\n'
msg += '\n'
msg += "Positional Arguments:\n"
msg += " F06_FILENAME path to input F06 files\n"
msg += 'Options:\n'
msg += " --modes MODES the modes to plot (e.g. 1:10,20:22); unused\n"
msg += " --subcases SUB the subcases to plot (e.g. 1,3); unused\n"
msg += " --xlim FREQ the frequency limits (unused)"
msg += " --ylim DAMP the damping limits (unused)"
msg += '\n'
msg += 'Info:\n'
msg += ' -h, --help show this help message and exit\n'
msg += " -v, --version show program's version number and exit\n"
if len(sys.argv) == 1:
sys.exit(msg)
ver = str(pyNastran.__version__)
#type_defaults = {
# '--nerrors' : [int, 100],
#}
data = docopt(msg, version=ver)
print(data)
f06_filename = data['F06_FILENAME']
if not f06_filename.lower().endswith('.f06'):
base = os.path.splitext(f06_filename)[0]
f06_filename = base + '.f06'
modes = data['--modes']
modes2 = []
if modes is not None:
smodes = modes.strip().split(',')
for mode in smodes:
mode = mode.strip()
if ':' in mode:
smode = mode.split(':')
if len(smode) == 2:
istart = int(smode[0])
if smode[1] == '':
iend = None
modes2 = slice(istart, None)
assert len(smodes) == 1, smodes
else:
iend = int(smode[1])
assert iend > istart, 'smode=%s; istart=%s iend=%s' % (smode, istart, iend)
modes2 += list(range(istart, iend + 1))
elif len(smode) == 3:
istart = int(smode[0])
iend = int(smode[1])
assert iend > istart, 'smode=%s; istart=%s iend=%s' % (smode, istart, iend)
istep = int(smode[2])
modes2 += list(range(istart, iend + 1, istep))
else:
raise NotImplementedError('smode=%r; len=%s' % (smode, len(smode)))
else:
imode = int(mode)
modes2.append(imode)
#modes = np.array(modes2, dtype='int32') - 1
modes = modes2
print('modes = %s' % modes)
plot_flutter_f06(f06_filename, modes=modes,
plot_root_locus=True, plot_vg_vf=True, plot_vg=False)
#plot_flutter_f06(f06_filename, plot_root_locus=False, plot_vg_vf=True)
def cmd_line(): # pragma: no cover
import sys
dev = True
msg = 'Usage:\n'
msg += ' f06 plot_145 F06_FILENAME [--noline] [--modes MODES] [--subcases SUB] [--xlim FREQ] [--ylim DAMP]\n'
msg += '\n'
msg += ' f06 plot_145 -h | --help\n'
msg += ' f06 -v | --version\n'
msg += '\n'
if len(sys.argv) == 1:
sys.exit(msg)
#assert sys.argv[0] != 'bdf', msg
if sys.argv[1] == 'plot_145':
cmd_line_plot_flutter()
else:
sys.exit(msg)
#raise NotImplementedError('arg1=%r' % sys.argv[1])
if __name__ == '__main__': # pragma: no cover
main()
|
wemanuel/smry
|
refs/heads/master
|
server-auth/ls/google-cloud-sdk/lib/dns/dnssec.py
|
33
|
# Copyright (C) 2003-2007, 2009 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Common DNSSEC-related functions and constants."""
import cStringIO
import struct
import time
import dns.exception
import dns.hash
import dns.name
import dns.node
import dns.rdataset
import dns.rdata
import dns.rdatatype
import dns.rdataclass
class UnsupportedAlgorithm(dns.exception.DNSException):
"""Raised if an algorithm is not supported."""
pass
class ValidationFailure(dns.exception.DNSException):
"""The DNSSEC signature is invalid."""
pass
RSAMD5 = 1
DH = 2
DSA = 3
ECC = 4
RSASHA1 = 5
DSANSEC3SHA1 = 6
RSASHA1NSEC3SHA1 = 7
RSASHA256 = 8
RSASHA512 = 10
INDIRECT = 252
PRIVATEDNS = 253
PRIVATEOID = 254
_algorithm_by_text = {
'RSAMD5' : RSAMD5,
'DH' : DH,
'DSA' : DSA,
'ECC' : ECC,
'RSASHA1' : RSASHA1,
'DSANSEC3SHA1' : DSANSEC3SHA1,
'RSASHA1NSEC3SHA1' : RSASHA1NSEC3SHA1,
'RSASHA256' : RSASHA256,
'RSASHA512' : RSASHA512,
'INDIRECT' : INDIRECT,
'PRIVATEDNS' : PRIVATEDNS,
'PRIVATEOID' : PRIVATEOID,
}
# We construct the inverse mapping programmatically to ensure that we
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
# would cause the mapping not to be true inverse.
_algorithm_by_value = dict([(y, x) for x, y in _algorithm_by_text.iteritems()])
def algorithm_from_text(text):
"""Convert text into a DNSSEC algorithm value
@rtype: int"""
value = _algorithm_by_text.get(text.upper())
if value is None:
value = int(text)
return value
def algorithm_to_text(value):
"""Convert a DNSSEC algorithm value to text
@rtype: string"""
text = _algorithm_by_value.get(value)
if text is None:
text = str(value)
return text
def _to_rdata(record, origin):
s = cStringIO.StringIO()
record.to_wire(s, origin=origin)
return s.getvalue()
def key_id(key, origin=None):
rdata = _to_rdata(key, origin)
if key.algorithm == RSAMD5:
return (ord(rdata[-3]) << 8) + ord(rdata[-2])
else:
total = 0
for i in range(len(rdata) / 2):
total += (ord(rdata[2 * i]) << 8) + ord(rdata[2 * i + 1])
if len(rdata) % 2 != 0:
total += ord(rdata[len(rdata) - 1]) << 8
total += ((total >> 16) & 0xffff);
return total & 0xffff
def make_ds(name, key, algorithm, origin=None):
if algorithm.upper() == 'SHA1':
dsalg = 1
hash = dns.hash.get('SHA1')()
elif algorithm.upper() == 'SHA256':
dsalg = 2
hash = dns.hash.get('SHA256')()
else:
raise UnsupportedAlgorithm, 'unsupported algorithm "%s"' % algorithm
if isinstance(name, (str, unicode)):
name = dns.name.from_text(name, origin)
hash.update(name.canonicalize().to_wire())
hash.update(_to_rdata(key, origin))
digest = hash.digest()
dsrdata = struct.pack("!HBB", key_id(key), key.algorithm, dsalg) + digest
return dns.rdata.from_wire(dns.rdataclass.IN, dns.rdatatype.DS, dsrdata, 0,
len(dsrdata))
def _find_key(keys, rrsig):
value = keys.get(rrsig.signer)
if value is None:
return None
if isinstance(value, dns.node.Node):
try:
rdataset = node.find_rdataset(dns.rdataclass.IN,
dns.rdatatype.DNSKEY)
except KeyError:
return None
else:
rdataset = value
for rdata in rdataset:
if rdata.algorithm == rrsig.algorithm and \
key_id(rdata) == rrsig.key_tag:
return rdata
return None
def _is_rsa(algorithm):
return algorithm in (RSAMD5, RSASHA1,
RSASHA1NSEC3SHA1, RSASHA256,
RSASHA512)
def _is_dsa(algorithm):
return algorithm in (DSA, DSANSEC3SHA1)
def _is_md5(algorithm):
return algorithm == RSAMD5
def _is_sha1(algorithm):
return algorithm in (DSA, RSASHA1,
DSANSEC3SHA1, RSASHA1NSEC3SHA1)
def _is_sha256(algorithm):
return algorithm == RSASHA256
def _is_sha512(algorithm):
return algorithm == RSASHA512
def _make_hash(algorithm):
if _is_md5(algorithm):
return dns.hash.get('MD5')()
if _is_sha1(algorithm):
return dns.hash.get('SHA1')()
if _is_sha256(algorithm):
return dns.hash.get('SHA256')()
if _is_sha512(algorithm):
return dns.hash.get('SHA512')()
raise ValidationFailure, 'unknown hash for algorithm %u' % algorithm
def _make_algorithm_id(algorithm):
if _is_md5(algorithm):
oid = [0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x02, 0x05]
elif _is_sha1(algorithm):
oid = [0x2b, 0x0e, 0x03, 0x02, 0x1a]
elif _is_sha256(algorithm):
oid = [0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01]
elif _is_sha512(algorithm):
oid = [0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03]
else:
raise ValidationFailure, 'unknown algorithm %u' % algorithm
olen = len(oid)
dlen = _make_hash(algorithm).digest_size
idbytes = [0x30] + [8 + olen + dlen] + \
[0x30, olen + 4] + [0x06, olen] + oid + \
[0x05, 0x00] + [0x04, dlen]
return ''.join(map(chr, idbytes))
def _validate_rrsig(rrset, rrsig, keys, origin=None, now=None):
"""Validate an RRset against a single signature rdata
The owner name of the rrsig is assumed to be the same as the owner name
of the rrset.
@param rrset: The RRset to validate
@type rrset: dns.rrset.RRset or (dns.name.Name, dns.rdataset.Rdataset)
tuple
@param rrsig: The signature rdata
@type rrsig: dns.rrset.Rdata
@param keys: The key dictionary.
@type keys: a dictionary keyed by dns.name.Name with node or rdataset values
@param origin: The origin to use for relative names
@type origin: dns.name.Name or None
@param now: The time to use when validating the signatures. The default
is the current time.
@type now: int
"""
if isinstance(origin, (str, unicode)):
origin = dns.name.from_text(origin, dns.name.root)
key = _find_key(keys, rrsig)
if not key:
raise ValidationFailure, 'unknown key'
# For convenience, allow the rrset to be specified as a (name, rdataset)
# tuple as well as a proper rrset
if isinstance(rrset, tuple):
rrname = rrset[0]
rdataset = rrset[1]
else:
rrname = rrset.name
rdataset = rrset
if now is None:
now = time.time()
if rrsig.expiration < now:
raise ValidationFailure, 'expired'
if rrsig.inception > now:
raise ValidationFailure, 'not yet valid'
hash = _make_hash(rrsig.algorithm)
if _is_rsa(rrsig.algorithm):
keyptr = key.key
(bytes,) = struct.unpack('!B', keyptr[0:1])
keyptr = keyptr[1:]
if bytes == 0:
(bytes,) = struct.unpack('!H', keyptr[0:2])
keyptr = keyptr[2:]
rsa_e = keyptr[0:bytes]
rsa_n = keyptr[bytes:]
keylen = len(rsa_n) * 8
pubkey = Crypto.PublicKey.RSA.construct(
(Crypto.Util.number.bytes_to_long(rsa_n),
Crypto.Util.number.bytes_to_long(rsa_e)))
sig = (Crypto.Util.number.bytes_to_long(rrsig.signature),)
elif _is_dsa(rrsig.algorithm):
keyptr = key.key
(t,) = struct.unpack('!B', keyptr[0:1])
keyptr = keyptr[1:]
octets = 64 + t * 8
dsa_q = keyptr[0:20]
keyptr = keyptr[20:]
dsa_p = keyptr[0:octets]
keyptr = keyptr[octets:]
dsa_g = keyptr[0:octets]
keyptr = keyptr[octets:]
dsa_y = keyptr[0:octets]
pubkey = Crypto.PublicKey.DSA.construct(
(Crypto.Util.number.bytes_to_long(dsa_y),
Crypto.Util.number.bytes_to_long(dsa_g),
Crypto.Util.number.bytes_to_long(dsa_p),
Crypto.Util.number.bytes_to_long(dsa_q)))
(dsa_r, dsa_s) = struct.unpack('!20s20s', rrsig.signature[1:])
sig = (Crypto.Util.number.bytes_to_long(dsa_r),
Crypto.Util.number.bytes_to_long(dsa_s))
else:
raise ValidationFailure, 'unknown algorithm %u' % rrsig.algorithm
hash.update(_to_rdata(rrsig, origin)[:18])
hash.update(rrsig.signer.to_digestable(origin))
if rrsig.labels < len(rrname) - 1:
suffix = rrname.split(rrsig.labels + 1)[1]
rrname = dns.name.from_text('*', suffix)
rrnamebuf = rrname.to_digestable(origin)
rrfixed = struct.pack('!HHI', rdataset.rdtype, rdataset.rdclass,
rrsig.original_ttl)
rrlist = sorted(rdataset);
for rr in rrlist:
hash.update(rrnamebuf)
hash.update(rrfixed)
rrdata = rr.to_digestable(origin)
rrlen = struct.pack('!H', len(rrdata))
hash.update(rrlen)
hash.update(rrdata)
digest = hash.digest()
if _is_rsa(rrsig.algorithm):
# PKCS1 algorithm identifier goop
digest = _make_algorithm_id(rrsig.algorithm) + digest
padlen = keylen / 8 - len(digest) - 3
digest = chr(0) + chr(1) + chr(0xFF) * padlen + chr(0) + digest
elif _is_dsa(rrsig.algorithm):
pass
else:
# Raise here for code clarity; this won't actually ever happen
# since if the algorithm is really unknown we'd already have
# raised an exception above
raise ValidationFailure, 'unknown algorithm %u' % rrsig.algorithm
if not pubkey.verify(digest, sig):
raise ValidationFailure, 'verify failure'
def _validate(rrset, rrsigset, keys, origin=None, now=None):
"""Validate an RRset
@param rrset: The RRset to validate
@type rrset: dns.rrset.RRset or (dns.name.Name, dns.rdataset.Rdataset)
tuple
@param rrsigset: The signature RRset
@type rrsigset: dns.rrset.RRset or (dns.name.Name, dns.rdataset.Rdataset)
tuple
@param keys: The key dictionary.
@type keys: a dictionary keyed by dns.name.Name with node or rdataset values
@param origin: The origin to use for relative names
@type origin: dns.name.Name or None
@param now: The time to use when validating the signatures. The default
is the current time.
@type now: int
"""
if isinstance(origin, (str, unicode)):
origin = dns.name.from_text(origin, dns.name.root)
if isinstance(rrset, tuple):
rrname = rrset[0]
else:
rrname = rrset.name
if isinstance(rrsigset, tuple):
rrsigname = rrsigset[0]
rrsigrdataset = rrsigset[1]
else:
rrsigname = rrsigset.name
rrsigrdataset = rrsigset
rrname = rrname.choose_relativity(origin)
rrsigname = rrname.choose_relativity(origin)
if rrname != rrsigname:
raise ValidationFailure, "owner names do not match"
for rrsig in rrsigrdataset:
try:
_validate_rrsig(rrset, rrsig, keys, origin, now)
return
except ValidationFailure, e:
pass
raise ValidationFailure, "no RRSIGs validated"
def _need_pycrypto(*args, **kwargs):
raise NotImplementedError, "DNSSEC validation requires pycrypto"
try:
import Crypto.PublicKey.RSA
import Crypto.PublicKey.DSA
import Crypto.Util.number
validate = _validate
validate_rrsig = _validate_rrsig
except ImportError:
validate = _need_pycrypto
validate_rrsig = _need_pycrypto
|
Mlieou/lXXtcode
|
refs/heads/master
|
leetcode/python/ex_535.py
|
3
|
class Codec:
table = {}
chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
url = 'https://short.it/'
def encode(self, longUrl):
"""Encodes a URL to a shortened URL.
:type longUrl: str
:rtype: str
"""
key = ''
while not key or key in self.table:
temp = ''
for i in range(6):
temp += self.chars[random.randint(0, 61)]
key = temp
self.table[key] = longUrl
return self.url + key
def decode(self, shortUrl):
"""Decodes a shortened URL to its original URL.
:type shortUrl: str
:rtype: str
"""
key = shortUrl[-6:]
return self.table[key]
# Your Codec object will be instantiated and called as such:
# codec = Codec()
# codec.decode(codec.encode(url))
|
akirk/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/regiotv.py
|
99
|
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
sanitized_Request,
xpath_text,
xpath_with_ns,
)
class RegioTVIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?regio-tv\.de/video/(?P<id>[0-9]+)'
_TESTS = [{
'url': 'http://www.regio-tv.de/video/395808.html',
'info_dict': {
'id': '395808',
'ext': 'mp4',
'title': 'Wir in Ludwigsburg',
'description': 'Mit unseren zuckersüßen Adventskindern, außerdem besuchen wir die Abendsterne!',
}
}, {
'url': 'http://www.regio-tv.de/video/395808',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
key = self._search_regex(
r'key\s*:\s*(["\'])(?P<key>.+?)\1', webpage, 'key', group='key')
title = self._og_search_title(webpage)
SOAP_TEMPLATE = '<?xml version="1.0" encoding="utf-8"?><soap:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/"><soap:Body><{0} xmlns="http://v.telvi.de/"><key xsi:type="xsd:string">{1}</key></{0}></soap:Body></soap:Envelope>'
request = sanitized_Request(
'http://v.telvi.de/',
SOAP_TEMPLATE.format('GetHTML5VideoData', key).encode('utf-8'))
video_data = self._download_xml(request, video_id, 'Downloading video XML')
NS_MAP = {
'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
'soap': 'http://schemas.xmlsoap.org/soap/envelope/',
}
video_url = xpath_text(
video_data, xpath_with_ns('.//video', NS_MAP), 'video url', fatal=True)
thumbnail = xpath_text(
video_data, xpath_with_ns('.//image', NS_MAP), 'thumbnail')
description = self._og_search_description(
webpage) or self._html_search_meta('description', webpage)
return {
'id': video_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
}
|
jwren/intellij-community
|
refs/heads/master
|
python/testData/quickFixes/PyAddImportQuickFixTest/reexportedName/flask/globals.py
|
35
|
request = object()
|
Furzoom/learnpython
|
refs/heads/master
|
learnpythonthehardway/ex8.py
|
1
|
# -*- encoding: utf-8 -*-
formatter = "%r %r %r %r"
print formatter % (1, 2, 3, 4)
print formatter % ("one", "two", "three", "four")
print formatter % (True, False, False, True)
print formatter % (formatter, formatter, formatter, formatter)
print formatter % (
"I had this thing.",
"That you could type up right.",
"But it didn't sing.",
"So I said goodnight."
)
print "%r" % "顺利"
|
vincent-tr/rpi-js-os
|
refs/heads/master
|
ext/v8-6.3.166/v8/tools/unittests/testdata/d8_mocked2.py
|
7
|
#!/usr/bin/env python
# Copyright 2017 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
print 'Richards1: 1'
print 'DeltaBlue1: 1'
print 'Richards2: 0.2'
print 'DeltaBlue2: 1.0'
print 'DeltaBlue3: 0.1'
|
cstipkovic/spidermonkey-research
|
refs/heads/master
|
testing/puppeteer/firefox/firefox_puppeteer/ui/browser/notifications.py
|
1
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from abc import ABCMeta
from marionette_driver import By
from firefox_puppeteer.ui_base_lib import UIBaseLib
class BaseNotification(UIBaseLib):
"""Abstract base class for any kind of notification."""
__metaclass__ = ABCMeta
@property
def close_button(self):
"""Provide access to the close button.
:returns: The close button.
"""
return self.element.find_element(By.ANON_ATTRIBUTE,
{'anonid': 'closebutton'})
@property
def label(self):
"""Provide access to the notification label.
:returns: The notification label.
"""
return self.element.get_attribute('label')
@property
def origin(self):
"""Provide access to the notification origin.
:returns: The notification origin.
"""
return self.element.get_attribute('origin')
def close(self, force=False):
"""Close the notification.
:param force: Optional, if True force close the notification.
Defaults to False.
"""
if force:
self.marionette.execute_script('arguments[0].click()',
script_args=[self.close_button])
else:
self.close_button.click()
self.window.wait_for_notification(None)
class AddOnInstallBlockedNotification(BaseNotification):
"""Add-on install blocked notification."""
@property
def allow_button(self):
"""Provide access to the allow button.
:returns: The allow button.
"""
return self.element.find_element(
By.ANON_ATTRIBUTE, {'anonid': 'button'}).find_element(
By.ANON_ATTRIBUTE, {'anonid': 'button'})
class AddOnInstallConfirmationNotification(BaseNotification):
"""Add-on install confirmation notification."""
@property
def addon_name(self):
"""Provide access to the add-on name.
:returns: The add-on name.
"""
label = self.element.find_element(
By.CSS_SELECTOR, '#addon-install-confirmation-content label')
return label.get_attribute('value')
def cancel_button(self):
"""Provide access to the cancel button.
:returns: The cancel button.
"""
return self.element.find_element(
By.ID, 'addon-install-confirmation-cancel')
def install_button(self):
"""Provide access to the install button.
:returns: The install button.
"""
return self.element.find_element(
By.ID, 'addon-install-confirmation-accept')
class AddOnInstallCompleteNotification(BaseNotification):
"""Add-on install complete notification."""
pass
class AddOnInstallFailedNotification(BaseNotification):
"""Add-on install failed notification."""
pass
class AddOnProgressNotification(BaseNotification):
"""Add-on progress notification."""
pass
|
solidfire/solidfire-cli
|
refs/heads/master
|
element/cli/parser.py
|
2
|
from element import exceptions
def parse_array(input_string):
# The base case is that it is None:
if(input_string == '' or input_string == 'null' or input_string is None):
return None
# If it is surrounded by brackets, we just remove the brackets:
if(input_string[0] == '[' and input_string[-1] == ']'):
input_string = input_string[1:-1]
# When we want an empty array.
if input_string == '':
return []
# First, split the input string on ','
array = str(input_string).split(',')
return array
|
kressi/erpnext
|
refs/heads/develop
|
erpnext/schools/doctype/program/test_program.py
|
46
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
# test_records = frappe.get_test_records('Program')
class TestProgram(unittest.TestCase):
pass
|
jarn0ld/gnuradio
|
refs/heads/master
|
grc/gui/DrawingArea.py
|
19
|
"""
Copyright 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import pygtk
pygtk.require('2.0')
import gtk
from Constants import MIN_WINDOW_WIDTH, MIN_WINDOW_HEIGHT, DND_TARGETS
import Colors
class DrawingArea(gtk.DrawingArea):
"""
DrawingArea is the gtk pixel map that graphical elements may draw themselves on.
The drawing area also responds to mouse and key events.
"""
def __init__(self, flow_graph):
"""
DrawingArea contructor.
Connect event handlers.
Args:
main_window: the main_window containing all flow graphs
"""
self.ctrl_mask = False
self.mod1_mask = False
self._flow_graph = flow_graph
gtk.DrawingArea.__init__(self)
self.set_size_request(MIN_WINDOW_WIDTH, MIN_WINDOW_HEIGHT)
self.connect('realize', self._handle_window_realize)
self.connect('configure-event', self._handle_window_configure)
self.connect('expose-event', self._handle_window_expose)
self.connect('motion-notify-event', self._handle_mouse_motion)
self.connect('button-press-event', self._handle_mouse_button_press)
self.connect('button-release-event', self._handle_mouse_button_release)
self.connect('scroll-event', self._handle_mouse_scroll)
self.add_events(
gtk.gdk.BUTTON_PRESS_MASK | \
gtk.gdk.POINTER_MOTION_MASK | \
gtk.gdk.BUTTON_RELEASE_MASK | \
gtk.gdk.LEAVE_NOTIFY_MASK | \
gtk.gdk.ENTER_NOTIFY_MASK | \
gtk.gdk.FOCUS_CHANGE_MASK
)
#setup drag and drop
self.drag_dest_set(gtk.DEST_DEFAULT_ALL, DND_TARGETS, gtk.gdk.ACTION_COPY)
self.connect('drag-data-received', self._handle_drag_data_received)
#setup the focus flag
self._focus_flag = False
self.get_focus_flag = lambda: self._focus_flag
def _handle_notify_event(widget, event, focus_flag): self._focus_flag = focus_flag
self.connect('leave-notify-event', _handle_notify_event, False)
self.connect('enter-notify-event', _handle_notify_event, True)
self.set_flags(gtk.CAN_FOCUS) # self.set_can_focus(True)
self.connect('focus-out-event', self._handle_focus_lost_event)
def new_pixmap(self, width, height):
return gtk.gdk.Pixmap(self.window, width, height, -1)
def get_screenshot(self, transparent_bg=False):
pixmap = self._pixmap
W, H = pixmap.get_size()
pixbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, 0, 8, W, H)
pixbuf.fill(0xFF + Colors.FLOWGRAPH_BACKGROUND_COLOR.pixel << 8)
pixbuf.get_from_drawable(pixmap, pixmap.get_colormap(), 0, 0, 0, 0, W-1, H-1)
if transparent_bg:
bgc = Colors.FLOWGRAPH_BACKGROUND_COLOR
pixbuf = pixbuf.add_alpha(True, bgc.red, bgc.green, bgc.blue)
return pixbuf
##########################################################################
## Handlers
##########################################################################
def _handle_drag_data_received(self, widget, drag_context, x, y, selection_data, info, time):
"""
Handle a drag and drop by adding a block at the given coordinate.
"""
self._flow_graph.add_new_block(selection_data.data, (x, y))
def _handle_mouse_scroll(self, widget, event):
if event.state & gtk.gdk.SHIFT_MASK:
if event.direction == gtk.gdk.SCROLL_UP:
event.direction = gtk.gdk.SCROLL_LEFT
else:
event.direction = gtk.gdk.SCROLL_RIGHT
def _handle_mouse_button_press(self, widget, event):
"""
Forward button click information to the flow graph.
"""
self.grab_focus()
self.ctrl_mask = event.state & gtk.gdk.CONTROL_MASK
self.mod1_mask = event.state & gtk.gdk.MOD1_MASK
if event.button == 1: self._flow_graph.handle_mouse_selector_press(
double_click=(event.type == gtk.gdk._2BUTTON_PRESS),
coordinate=(event.x, event.y),
)
if event.button == 3: self._flow_graph.handle_mouse_context_press(
coordinate=(event.x, event.y),
event=event,
)
def _handle_mouse_button_release(self, widget, event):
"""
Forward button release information to the flow graph.
"""
self.ctrl_mask = event.state & gtk.gdk.CONTROL_MASK
self.mod1_mask = event.state & gtk.gdk.MOD1_MASK
if event.button == 1: self._flow_graph.handle_mouse_selector_release(
coordinate=(event.x, event.y),
)
def _handle_mouse_motion(self, widget, event):
"""
Forward mouse motion information to the flow graph.
"""
self.ctrl_mask = event.state & gtk.gdk.CONTROL_MASK
self.mod1_mask = event.state & gtk.gdk.MOD1_MASK
self._flow_graph.handle_mouse_motion(
coordinate=(event.x, event.y),
)
def _handle_window_realize(self, widget):
"""
Called when the window is realized.
Update the flowgraph, which calls new pixmap.
"""
self._flow_graph.update()
def _handle_window_configure(self, widget, event):
"""
Called when the window is resized.
Create a new pixmap for background buffer.
"""
self._pixmap = self.new_pixmap(*self.get_size_request())
def _handle_window_expose(self, widget, event):
"""
Called when window is exposed, or queue_draw is called.
Double buffering: draw to pixmap, then draw pixmap to window.
"""
gc = self.window.new_gc()
self._flow_graph.draw(gc, self._pixmap)
self.window.draw_drawable(gc, self._pixmap, 0, 0, 0, 0, -1, -1)
# draw a light grey line on the bottom and right end of the canvas.
# this is useful when the theme uses the same panel bg color as the canvas
W, H = self._pixmap.get_size()
gc.set_foreground(Colors.FLOWGRAPH_EDGE_COLOR)
self.window.draw_line(gc, 0, H-1, W, H-1)
self.window.draw_line(gc, W-1, 0, W-1, H)
def _handle_focus_lost_event(self, widget, event):
# don't clear selection while context menu is active
if not self._flow_graph.get_context_menu().flags() & gtk.VISIBLE:
self._flow_graph.unselect()
self._flow_graph.update_selected()
self._flow_graph.queue_draw()
|
sabi0/intellij-community
|
refs/heads/master
|
python/testData/completion/heavyStarPropagation/lib/_pkg1/_pkg1_1/_pkg1_1_0/_pkg1_1_0_0/_pkg1_1_0_0_1/__init__.py
|
30
|
from ._mod1_1_0_0_1_0 import *
from ._mod1_1_0_0_1_1 import *
from ._mod1_1_0_0_1_2 import *
from ._mod1_1_0_0_1_3 import *
from ._mod1_1_0_0_1_4 import *
|
longmen21/edx-platform
|
refs/heads/master
|
common/test/acceptance/tests/studio/base_studio_test.py
|
7
|
"""
Base classes used by studio tests.
"""
from bok_choy.web_app_test import WebAppTest
from bok_choy.page_object import XSS_INJECTION
from common.test.acceptance.pages.studio.auto_auth import AutoAuthPage
from common.test.acceptance.fixtures.course import CourseFixture
from common.test.acceptance.fixtures.library import LibraryFixture
from common.test.acceptance.tests.helpers import UniqueCourseTest
from common.test.acceptance.pages.studio.overview import CourseOutlinePage
from common.test.acceptance.pages.studio.utils import verify_ordering
class StudioCourseTest(UniqueCourseTest):
"""
Base class for all Studio course tests.
"""
def setUp(self, is_staff=False, test_xss=True): # pylint: disable=arguments-differ
"""
Install a course with no content using a fixture.
"""
super(StudioCourseTest, self).setUp()
self.test_xss = test_xss
self.install_course_fixture(is_staff)
def install_course_fixture(self, is_staff=False):
"""
Install a course fixture
"""
self.course_fixture = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name'],
)
if self.test_xss:
xss_injected_unique_id = XSS_INJECTION + self.unique_id
test_improper_escaping = {u"value": xss_injected_unique_id}
self.course_fixture.add_advanced_settings({
"advertised_start": test_improper_escaping,
"info_sidebar_name": test_improper_escaping,
"cert_name_short": test_improper_escaping,
"cert_name_long": test_improper_escaping,
"display_organization": test_improper_escaping,
"display_coursenumber": test_improper_escaping,
})
self.course_info['display_organization'] = xss_injected_unique_id
self.course_info['display_coursenumber'] = xss_injected_unique_id
self.populate_course_fixture(self.course_fixture)
self.course_fixture.install()
self.user = self.course_fixture.user
self.log_in(self.user, is_staff)
def populate_course_fixture(self, course_fixture):
"""
Populate the children of the test course fixture.
"""
pass
def log_in(self, user, is_staff=False):
"""
Log in as the user that created the course. The user will be given instructor access
to the course and enrolled in it. By default the user will not have staff access unless
is_staff is passed as True.
Args:
user(dict): dictionary containing user data: {'username': ..., 'email': ..., 'password': ...}
is_staff(bool): register this user as staff
"""
self.auth_page = AutoAuthPage(
self.browser,
staff=is_staff,
username=user.get('username'),
email=user.get('email'),
password=user.get('password')
)
self.auth_page.visit()
class ContainerBase(StudioCourseTest):
"""
Base class for tests that do operations on the container page.
"""
def setUp(self, is_staff=False):
"""
Create a unique identifier for the course used in this test.
"""
# Ensure that the superclass sets up
super(ContainerBase, self).setUp(is_staff=is_staff)
self.outline = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
def go_to_nested_container_page(self):
"""
Go to the nested container page.
"""
unit = self.go_to_unit_page()
# The 0th entry is the unit page itself.
container = unit.xblocks[1].go_to_container()
return container
def go_to_unit_page(self, section_name='Test Section', subsection_name='Test Subsection', unit_name='Test Unit'):
"""
Go to the test unit page.
If make_draft is true, the unit page will be put into draft mode.
"""
self.outline.visit()
subsection = self.outline.section(section_name).subsection(subsection_name)
return subsection.expand_subsection().unit(unit_name).go_to()
def do_action_and_verify(self, action, expected_ordering):
"""
Perform the supplied action and then verify the resulting ordering.
"""
container = self.go_to_nested_container_page()
action(container)
verify_ordering(self, container, expected_ordering)
# Reload the page to see that the change was persisted.
container = self.go_to_nested_container_page()
verify_ordering(self, container, expected_ordering)
class StudioLibraryTest(WebAppTest):
"""
Base class for all Studio library tests.
"""
as_staff = True
def setUp(self):
"""
Install a library with no content using a fixture.
"""
super(StudioLibraryTest, self).setUp()
fixture = LibraryFixture(
'test_org',
self.unique_id,
'Test Library {}'.format(self.unique_id),
)
self.populate_library_fixture(fixture)
fixture.install()
self.library_fixture = fixture
self.library_info = fixture.library_info
self.library_key = fixture.library_key
self.user = fixture.user
self.log_in(self.user, self.as_staff)
def populate_library_fixture(self, library_fixture):
"""
Populate the children of the test course fixture.
"""
pass
def log_in(self, user, is_staff=False):
"""
Log in as the user that created the library.
By default the user will not have staff access unless is_staff is passed as True.
"""
auth_page = AutoAuthPage(
self.browser,
staff=is_staff,
username=user.get('username'),
email=user.get('email'),
password=user.get('password')
)
auth_page.visit()
|
tboyce021/home-assistant
|
refs/heads/dev
|
homeassistant/components/traccar/__init__.py
|
8
|
"""Support for Traccar."""
from aiohttp import web
import voluptuous as vol
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER
from homeassistant.const import CONF_WEBHOOK_ID, HTTP_OK, HTTP_UNPROCESSABLE_ENTITY
from homeassistant.helpers import config_entry_flow
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import (
ATTR_ACCURACY,
ATTR_ALTITUDE,
ATTR_BATTERY,
ATTR_BEARING,
ATTR_ID,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_SPEED,
ATTR_TIMESTAMP,
DOMAIN,
)
TRACKER_UPDATE = f"{DOMAIN}_tracker_update"
DEFAULT_ACCURACY = HTTP_OK
DEFAULT_BATTERY = -1
def _id(value: str) -> str:
"""Coerce id by removing '-'."""
return value.replace("-", "")
WEBHOOK_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ID): vol.All(cv.string, _id),
vol.Required(ATTR_LATITUDE): cv.latitude,
vol.Required(ATTR_LONGITUDE): cv.longitude,
vol.Optional(ATTR_ACCURACY, default=DEFAULT_ACCURACY): vol.Coerce(float),
vol.Optional(ATTR_ALTITUDE): vol.Coerce(float),
vol.Optional(ATTR_BATTERY, default=DEFAULT_BATTERY): vol.Coerce(float),
vol.Optional(ATTR_BEARING): vol.Coerce(float),
vol.Optional(ATTR_SPEED): vol.Coerce(float),
vol.Optional(ATTR_TIMESTAMP): vol.Coerce(int),
}
)
async def async_setup(hass, hass_config):
"""Set up the Traccar component."""
hass.data[DOMAIN] = {"devices": set(), "unsub_device_tracker": {}}
return True
async def handle_webhook(hass, webhook_id, request):
"""Handle incoming webhook with Traccar request."""
try:
data = WEBHOOK_SCHEMA(dict(request.query))
except vol.MultipleInvalid as error:
return web.Response(text=error.error_message, status=HTTP_UNPROCESSABLE_ENTITY)
attrs = {
ATTR_ALTITUDE: data.get(ATTR_ALTITUDE),
ATTR_BEARING: data.get(ATTR_BEARING),
ATTR_SPEED: data.get(ATTR_SPEED),
}
device = data[ATTR_ID]
async_dispatcher_send(
hass,
TRACKER_UPDATE,
device,
data[ATTR_LATITUDE],
data[ATTR_LONGITUDE],
data[ATTR_BATTERY],
data[ATTR_ACCURACY],
attrs,
)
return web.Response(text=f"Setting location for {device}", status=HTTP_OK)
async def async_setup_entry(hass, entry):
"""Configure based on config entry."""
hass.components.webhook.async_register(
DOMAIN, "Traccar", entry.data[CONF_WEBHOOK_ID], handle_webhook
)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, DEVICE_TRACKER)
)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID])
hass.data[DOMAIN]["unsub_device_tracker"].pop(entry.entry_id)()
await hass.config_entries.async_forward_entry_unload(entry, DEVICE_TRACKER)
return True
async_remove_entry = config_entry_flow.webhook_async_remove_entry
|
mick-d/nipype_source
|
refs/heads/master
|
nipype/interfaces/spm/tests/test_auto_ApplyTransform.py
|
2
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.spm.utils import ApplyTransform
def test_ApplyTransform_inputs():
input_map = dict(ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(copyfile=True,
mandatory=True,
),
mat=dict(mandatory=True,
),
matlab_cmd=dict(),
mfile=dict(usedefault=True,
),
paths=dict(),
use_mcr=dict(),
use_v8struct=dict(min_ver='8',
usedefault=True,
),
)
inputs = ApplyTransform.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_ApplyTransform_outputs():
output_map = dict(out_file=dict(),
)
outputs = ApplyTransform.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
Lyrositor/moul-scripts
|
refs/heads/master
|
Python/xKI.py
|
12
|
# -*- coding: utf-8 -*-
""" *==LICENSE==*
CyanWorlds.com Engine - MMOG client, server and tools
Copyright (C) 2011 Cyan Worlds, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Additional permissions under GNU GPL version 3 section 7
If you modify this Program, or any covered work, by linking or
combining it with any of RAD Game Tools Bink SDK, Autodesk 3ds Max SDK,
NVIDIA PhysX SDK, Microsoft DirectX SDK, OpenSSL library, Independent
JPEG Group JPEG library, Microsoft Windows Media SDK, or Apple QuickTime SDK
(or a modified version of those libraries),
containing parts covered by the terms of the Bink SDK EULA, 3ds Max EULA,
PhysX SDK EULA, DirectX SDK EULA, OpenSSL and SSLeay licenses, IJG
JPEG Library README, Windows Media SDK EULA, or QuickTime SDK EULA, the
licensors of this Program grant you additional
permission to convey the resulting work. Corresponding Source for a
non-source form of such a combination shall include the source code for
the parts of OpenSSL and IJG JPEG Library used as well as that of the covered
work.
You can contact Cyan Worlds, Inc. by email legal@cyan.com
or by snail mail at:
Cyan Worlds, Inc.
14617 N Newport Hwy
Mead, WA 99021
*==LICENSE==* """
# Note: This is a workaround for limitations in Plasma's bindings.
from ki import *
|
anksp21/Community-Zenpacks
|
refs/heads/master
|
ZenPacks.atrawog.Brocade/ZenPacks/atrawog/Brocade/modeler/__init__.py
|
504
|
# __init__.py
|
CloCkWeRX/rabbitvcs
|
refs/heads/master
|
rabbitvcs/vcs/status.py
|
3
|
#
# This is an extension to the Nautilus file manager to allow better
# integration with the Subversion source control system.
#
# Copyright (C) 2006-2008 by Jason Field <jason@jasonfield.com>
# Copyright (C) 2007-2008 by Bruce van der Kooij <brucevdkooij@gmail.com>
# Copyright (C) 2008-2010 by Adam Plumb <adamplumb@gmail.com>
#
# RabbitVCS is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# RabbitVCS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with RabbitVCS; If not, see <http://www.gnu.org/licenses/>.
#
import os.path
import unittest
from datetime import datetime
import rabbitvcs.vcs
from rabbitvcs.util.log import Log
log = Log("rabbitvcs.vcs.status")
from rabbitvcs import gettext
_ = gettext.gettext
# These are the statuses that we might represent with icons
status_normal = 'normal'
status_modified = 'modified'
status_added = 'added'
status_deleted = 'deleted'
status_ignored = 'ignored'
status_read_only = 'read-only'
status_locked = 'locked'
status_unknown = 'unknown'
# Specifically: this means something IN A WORKING COPY but not added
status_unversioned = 'unversioned'
status_missing = 'missing'
status_replaced = 'replaced'
# "complicated" = anything we display with that exclamation mark icon
status_complicated = 'complicated'
status_calculating = 'calculating'
status_error = 'error'
MODIFIED_CHILD_STATUSES = [
status_modified,
status_added,
status_deleted,
status_missing,
status_replaced
]
class StatusCache(object):
keys = [
None,
status_normal,
status_modified,
status_added,
status_deleted,
status_ignored,
status_read_only,
status_locked,
status_unknown,
status_unversioned,
status_missing,
status_replaced,
status_complicated,
status_calculating,
status_error
]
authors = []
revisions = []
def __init__(self):
self.cache = {}
def __setitem__(self, path, status):
try:
content_index = self.keys.index(status.simple_content_status())
metadata_index = self.keys.index(status.simple_metadata_status())
try:
author_index = self.authors.index(status.author)
except ValueError, e:
self.authors.append(status.author)
author_index = len(self.authors) -1
try:
revision_index = self.revisions.index(status.revision)
except ValueError, e:
self.revisions.append(status.revision)
revision_index = len(self.revisions) -1
self.cache[path] = (
content_index,
metadata_index,
revision_index,
author_index,
status.date
)
except Exception, e:
log.debug(e)
def __getitem__(self, path):
try:
(content_index, metadata_index, revision_index, author_index, date) = self.cache[path]
content = self.keys[content_index]
metadata = self.keys[metadata_index]
revision = self.revisions[revision_index]
author = self.authors[author_index]
return Status(path, content, metadata, revision=revision,
author=author, date=date)
except Exception, e:
log.debug(e)
def __delitem__(self, path):
try:
del self.cache[path]
except KeyError, e:
log.debug(e)
def __contains__(self, path):
return path in self.cache
def find_path_statuses(self, path):
statuses = []
if os.path.isdir(path):
for key, value in self.cache.items():
if key.startswith(path):
statuses.append(self.__getitem__(key))
else:
statuses.append(self.__getitem__(path))
return statuses
class Status(object):
@staticmethod
def status_unknown(path):
return Status(path, status_unknown, summary = status_unknown)
@staticmethod
def status_error(path):
return Status(path, status_error, summary = status_error)
@staticmethod
def status_calc(path):
return Status(path, status_calculating, summary = status_calculating)
vcs_type = rabbitvcs.vcs.VCS_DUMMY
clean_statuses = ['unchanged']
content_status_map = None
metadata_status_map = None
def __init__(self, path, content, metadata=None, summary=None,
revision=None, author=None, date=None):
"""
The status objects accepts the following items
@type path: string
@param path: The path to the item
@type content: string
@param content: The content status
@type metadata: string
@param metadata: The property status
@type summary: string
@param summary: The summary status
@type revision: string or int
@param revision: The last commit revision of the item
@type author: string
@param author: The commit author
@type date: int
@param date: The timestamp of the commit time
"""
self.path = path
self.content = content
self.metadata = metadata
self.remote_content = None
self.remote_metadata = None
self.single = self._make_single_status()
self.summary = summary
self.revision = revision
self.author = author
self.date = date
def _make_single_status(self):
"""
Given our text_status and a prop_status, simplify to a single "simple"
status. If we don't know how to simplify our particular combination of
status, call it an error.
"""
# Content status dominates
single = self.simple_content_status() or status_error
if single in Status.clean_statuses:
if self.metadata:
single = self.simple_metadata_status() or status_error
return single
def simple_content_status(self):
if self.content_status_map:
return self.content_status_map.get(self.content)
else:
return self.content
def simple_metadata_status(self):
if self.metadata and self.metadata_status_map:
return self.metadata_status_map.get(self.metadata)
else:
return self.metadata
def make_summary(self, child_statuses = []):
""" Summarises statuses for directories.
"""
summary = status_unknown
status_set = set([st.single for st in child_statuses])
if not status_set:
self.summary = self.single
if status_complicated in status_set:
self.summary = status_complicated
elif self.single in ["added", "modified", "deleted"]:
# These take priority over child statuses
self.summary = self.single
elif len(set(MODIFIED_CHILD_STATUSES) & status_set):
self.summary = status_modified
else:
self.summary = self.single
return summary
def is_versioned(self):
return self.single is not status_unversioned
def is_modified(self):
# This may need to be more sophisticated... eg. is read-only modified?
# Unknown? etc...
return self.single is not status_normal
def has_modified(self):
# Includes self being modified!
return self.summary is not status_normal
def __repr__(self):
return "<%s %s (%s) %s/%s>" % (_("RabbitVCS status for"),
self.path,
self.vcs_type,
self.simple_content_status(),
self.simple_metadata_status())
def __getstate__(self):
attrs = self.__dict__.copy()
attrs['__type__'] = type(self).__name__
attrs['__module__'] = type(self).__module__
return attrs
def __setstate__(self, state_dict):
del state_dict['__type__']
del state_dict['__module__']
self.__dict__ = state_dict
class SVNStatus(Status):
vcs_type = rabbitvcs.vcs.VCS_SVN
content_status_map = {
'normal': status_normal,
'added': status_added,
'missing': status_missing,
'unversioned': status_unversioned,
'deleted': status_deleted,
'replaced': status_modified,
'modified': status_modified,
'merged': status_modified,
'conflicted': status_complicated,
'ignored': status_ignored,
'obstructed': status_complicated,
# FIXME: is this the best representation of 'externally populated'?
'external': status_normal,
'incomplete': status_complicated
}
metadata_status_map = {
'normal': status_normal,
'none': status_normal,
'modified': status_modified
}
#external - an unversioned path populated by an svn:external property
#incomplete - a directory doesn't contain a complete entries list
def __init__(self, pysvn_status):
revision = author = date = None
if pysvn_status.entry:
revision = int(pysvn_status.entry.commit_revision.number)
author = pysvn_status.entry.commit_author
date = int(pysvn_status.entry.commit_time)
# There is a potential problem here: I'm pretty sure that PySVN statuses
# do NOT have translatable representations, so this will always come out
# to be 'normal', 'modified' etc
Status.__init__(
self,
pysvn_status.path,
content=str(pysvn_status.text_status),
metadata=str(pysvn_status.prop_status),
revision=revision,
author=author,
date=date
)
# self.remote_content = getattr(pysvn_status, "repos_text_status", None)
# self.remote_metadata = getattr(pysvn_status, "repos_prop_status", None)
self.remote_content = str(pysvn_status.repos_text_status)
self.remote_metadata = str(pysvn_status.repos_prop_status)
class GitStatus(Status):
vcs_type = 'git'
content_status_map = {
'normal': status_normal,
'added': status_added,
'missing': status_missing,
'untracked': status_unversioned,
'removed': status_deleted,
'modified': status_modified,
'renamed': status_modified,
'ignored': status_ignored
}
metadata_status_map = {
'normal': status_normal,
None: status_normal
}
def __init__(self, gittyup_status):
super(GitStatus, self).__init__(
gittyup_status.path,
content=str(gittyup_status.identifier),
metadata=None)
class MercurialStatus(Status):
vcs_type = 'mercurial'
content_status_map = {
'clean': status_normal,
'added': status_added,
'missing': status_missing,
'unknown': status_unversioned,
'removed': status_deleted,
'modified': status_modified,
'ignored': status_ignored
}
metadata_status_map = {
'normal': status_normal,
None: status_normal
}
def __init__(self, mercurial_status):
super(MercurialStatus, self).__init__(
mercurial_status["path"],
content=str(mercurial_status["content"]),
metadata=None)
STATUS_TYPES = [
Status,
SVNStatus,
GitStatus,
MercurialStatus
]
class TestStatusObjects(unittest.TestCase):
base = "/path/to/test"
children = [
os.path.join(base, chr(x)) for x in range(97,123)
]
def testsingle_clean(self):
status = Status(self.base, status_normal)
self.assertEqual(status.single, status_normal)
def testsingle_changed(self):
status = Status(self.base, status_modified)
self.assertEqual(status.single, status_modified)
def testsingle_propclean(self):
status = Status(self.base, status_normal, status_normal)
self.assertEqual(status.single, status_normal)
def testsingle_propchanged(self):
status = Status(self.base, status_normal, status_modified)
self.assertEqual(status.single, status_modified)
def testsummary_clean(self):
top_status = Status(self.base, status_normal)
child_sts = [Status(path, status_normal) for path in self.children]
top_status.make_summary(child_sts)
self.assertEqual(top_status.summary, status_normal)
def testsummary_changed(self):
top_status = Status(self.base, status_normal)
child_sts = [Status(path, status_normal) for path in self.children]
child_sts[1] = Status(child_sts[1].path, status_modified)
top_status.make_summary(child_sts)
self.assertEqual(top_status.summary, status_modified)
def testsummary_added(self):
top_status = Status(self.base, status_normal)
child_sts = [Status(path, status_normal) for path in self.children]
child_sts[1] = Status(child_sts[1].path, status_added)
top_status.make_summary(child_sts)
self.assertEqual(top_status.summary, status_modified)
def testsummary_complicated(self):
top_status = Status(self.base, status_normal)
child_sts = [Status(path, status_normal) for path in self.children]
child_sts[1] = Status(child_sts[1].path, status_complicated)
top_status.make_summary(child_sts)
self.assertEqual(top_status.summary, status_complicated)
def testsummary_propchange(self):
top_status = Status(self.base, status_normal)
child_sts = [Status(path, status_normal) for path in self.children]
child_sts[1] = Status(child_sts[1].path,
status_normal,
status_modified)
top_status.make_summary(child_sts)
self.assertEqual(top_status.summary, status_modified)
def testsummary_bothchange(self):
top_status = Status(self.base, status_normal)
child_sts = [Status(path, status_normal) for path in self.children]
child_sts[1] = Status(child_sts[1].path,
status_complicated,
status_modified)
top_status.make_summary(child_sts)
self.assertEqual(top_status.summary, status_complicated)
def testsummary_topadded(self):
top_status = Status(self.base, status_added)
child_sts = [Status(path, status_normal) for path in self.children]
child_sts[1] = Status(child_sts[1].path, status_modified, status_modified)
top_status.make_summary(child_sts)
self.assertEqual(top_status.summary, status_added)
if __name__ == "__main__":
unittest.main()
|
synconics/odoo
|
refs/heads/8.0
|
addons/payment_buckaroo/models/buckaroo.py
|
33
|
# -*- coding: utf-'8' "-*-"
from hashlib import sha1
import logging
import urllib
import urlparse
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment_buckaroo.controllers.main import BuckarooController
from openerp.osv import osv, fields
from openerp.tools.float_utils import float_compare
_logger = logging.getLogger(__name__)
def normalize_keys_upper(data):
"""Set all keys of a dictionnary to uppercase
Buckaroo parameters names are case insensitive
convert everything to upper case to be able to easily detected the presence
of a parameter by checking the uppercase key only
"""
return dict((key.upper(), val) for key, val in data.items())
class AcquirerBuckaroo(osv.Model):
_inherit = 'payment.acquirer'
def _get_buckaroo_urls(self, cr, uid, environment, context=None):
""" Buckaroo URLs
"""
if environment == 'prod':
return {
'buckaroo_form_url': 'https://checkout.buckaroo.nl/html/',
}
else:
return {
'buckaroo_form_url': 'https://testcheckout.buckaroo.nl/html/',
}
def _get_providers(self, cr, uid, context=None):
providers = super(AcquirerBuckaroo, self)._get_providers(cr, uid, context=context)
providers.append(['buckaroo', 'Buckaroo'])
return providers
_columns = {
'brq_websitekey': fields.char('WebsiteKey', required_if_provider='buckaroo', groups='base.group_user'),
'brq_secretkey': fields.char('SecretKey', required_if_provider='buckaroo', groups='base.group_user'),
}
def _buckaroo_generate_digital_sign(self, acquirer, inout, values):
""" Generate the shasign for incoming or outgoing communications.
:param browse acquirer: the payment.acquirer browse record. It should
have a shakey in shaky out
:param string inout: 'in' (openerp contacting buckaroo) or 'out' (buckaroo
contacting openerp).
:param dict values: transaction values
:return string: shasign
"""
assert inout in ('in', 'out')
assert acquirer.provider == 'buckaroo'
keys = "add_returndata Brq_amount Brq_culture Brq_currency Brq_invoicenumber Brq_return Brq_returncancel Brq_returnerror Brq_returnreject brq_test Brq_websitekey".split()
def get_value(key):
if values.get(key):
return values[key]
return ''
values = dict(values or {})
if inout == 'out':
for key in values.keys():
# case insensitive keys
if key.upper() == 'BRQ_SIGNATURE':
del values[key]
break
items = sorted(values.items(), key=lambda (x, y): x.lower())
sign = ''.join('%s=%s' % (k, urllib.unquote_plus(v)) for k, v in items)
else:
sign = ''.join('%s=%s' % (k,get_value(k)) for k in keys)
#Add the pre-shared secret key at the end of the signature
sign = sign + acquirer.brq_secretkey
if isinstance(sign, str):
# TODO: remove me? should not be used
sign = urlparse.parse_qsl(sign)
shasign = sha1(sign.encode('utf-8')).hexdigest()
return shasign
def buckaroo_form_generate_values(self, cr, uid, id, partner_values, tx_values, context=None):
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
acquirer = self.browse(cr, uid, id, context=context)
buckaroo_tx_values = dict(tx_values)
buckaroo_tx_values.update({
'Brq_websitekey': acquirer.brq_websitekey,
'Brq_amount': tx_values['amount'],
'Brq_currency': tx_values['currency'] and tx_values['currency'].name or '',
'Brq_invoicenumber': tx_values['reference'],
'brq_test': False if acquirer.environment == 'prod' else True,
'Brq_return': '%s' % urlparse.urljoin(base_url, BuckarooController._return_url),
'Brq_returncancel': '%s' % urlparse.urljoin(base_url, BuckarooController._cancel_url),
'Brq_returnerror': '%s' % urlparse.urljoin(base_url, BuckarooController._exception_url),
'Brq_returnreject': '%s' % urlparse.urljoin(base_url, BuckarooController._reject_url),
'Brq_culture': (partner_values.get('lang') or 'en_US').replace('_', '-'),
})
if buckaroo_tx_values.get('return_url'):
buckaroo_tx_values['add_returndata'] = buckaroo_tx_values.pop('return_url')
else:
buckaroo_tx_values['add_returndata'] = ''
buckaroo_tx_values['Brq_signature'] = self._buckaroo_generate_digital_sign(acquirer, 'in', buckaroo_tx_values)
return partner_values, buckaroo_tx_values
def buckaroo_get_form_action_url(self, cr, uid, id, context=None):
acquirer = self.browse(cr, uid, id, context=context)
return self._get_buckaroo_urls(cr, uid, acquirer.environment, context=context)['buckaroo_form_url']
class TxBuckaroo(osv.Model):
_inherit = 'payment.transaction'
# buckaroo status
_buckaroo_valid_tx_status = [190]
_buckaroo_pending_tx_status = [790, 791, 792, 793]
_buckaroo_cancel_tx_status = [890, 891]
_buckaroo_error_tx_status = [490, 491, 492]
_buckaroo_reject_tx_status = [690]
_columns = {
'buckaroo_txnid': fields.char('Transaction ID'),
}
# --------------------------------------------------
# FORM RELATED METHODS
# --------------------------------------------------
def _buckaroo_form_get_tx_from_data(self, cr, uid, data, context=None):
""" Given a data dict coming from buckaroo, verify it and find the related
transaction record. """
origin_data = dict(data)
data = normalize_keys_upper(data)
reference, pay_id, shasign = data.get('BRQ_INVOICENUMBER'), data.get('BRQ_PAYMENT'), data.get('BRQ_SIGNATURE')
if not reference or not pay_id or not shasign:
error_msg = 'Buckaroo: received data with missing reference (%s) or pay_id (%s) or shashign (%s)' % (reference, pay_id, shasign)
_logger.error(error_msg)
raise ValidationError(error_msg)
tx_ids = self.search(cr, uid, [('reference', '=', reference)], context=context)
if not tx_ids or len(tx_ids) > 1:
error_msg = 'Buckaroo: received data for reference %s' % (reference)
if not tx_ids:
error_msg += '; no order found'
else:
error_msg += '; multiple order found'
_logger.error(error_msg)
raise ValidationError(error_msg)
tx = self.pool['payment.transaction'].browse(cr, uid, tx_ids[0], context=context)
#verify shasign
shasign_check = self.pool['payment.acquirer']._buckaroo_generate_digital_sign(tx.acquirer_id, 'out', origin_data)
if shasign_check.upper() != shasign.upper():
error_msg = 'Buckaroo: invalid shasign, received %s, computed %s, for data %s' % (shasign, shasign_check, data)
_logger.error(error_msg)
raise ValidationError(error_msg)
return tx
def _buckaroo_form_get_invalid_parameters(self, cr, uid, tx, data, context=None):
invalid_parameters = []
data = normalize_keys_upper(data)
if tx.acquirer_reference and data.get('BRQ_TRANSACTIONS') != tx.acquirer_reference:
invalid_parameters.append(('Transaction Id', data.get('BRQ_TRANSACTIONS'), tx.acquirer_reference))
# check what is buyed
if float_compare(float(data.get('BRQ_AMOUNT', '0.0')), tx.amount, 2) != 0:
invalid_parameters.append(('Amount', data.get('BRQ_AMOUNT'), '%.2f' % tx.amount))
if data.get('BRQ_CURRENCY') != tx.currency_id.name:
invalid_parameters.append(('Currency', data.get('BRQ_CURRENCY'), tx.currency_id.name))
return invalid_parameters
def _buckaroo_form_validate(self, cr, uid, tx, data, context=None):
data = normalize_keys_upper(data)
status_code = int(data.get('BRQ_STATUSCODE','0'))
if status_code in self._buckaroo_valid_tx_status:
tx.write({
'state': 'done',
'buckaroo_txnid': data.get('BRQ_TRANSACTIONS'),
})
return True
elif status_code in self._buckaroo_pending_tx_status:
tx.write({
'state': 'pending',
'buckaroo_txnid': data.get('BRQ_TRANSACTIONS'),
})
return True
elif status_code in self._buckaroo_cancel_tx_status:
tx.write({
'state': 'cancel',
'buckaroo_txnid': data.get('BRQ_TRANSACTIONS'),
})
return True
else:
error = 'Buckaroo: feedback error'
_logger.info(error)
tx.write({
'state': 'error',
'state_message': error,
'buckaroo_txnid': data.get('BRQ_TRANSACTIONS'),
})
return False
|
kenwang815/KodiPlugins
|
refs/heads/master
|
script.module.oceanktv/lib/youtube_dl/extractor/ruhd.py
|
19
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .common import InfoExtractor
class RUHDIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?ruhd\.ru/play\.php\?vid=(?P<id>\d+)'
_TEST = {
'url': 'http://www.ruhd.ru/play.php?vid=207',
'md5': 'd1a9ec4edf8598e3fbd92bb16072ba83',
'info_dict': {
'id': '207',
'ext': 'divx',
'title': 'КОТ бааааам',
'description': 'классный кот)',
'thumbnail': 're:^http://.*\.jpg$',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(
r'<param name="src" value="([^"]+)"', webpage, 'video url')
title = self._html_search_regex(
r'<title>([^<]+) RUHD.ru - Видео Высокого качества №1 в России!</title>',
webpage, 'title')
description = self._html_search_regex(
r'(?s)<div id="longdesc">(.+?)<span id="showlink">',
webpage, 'description', fatal=False)
thumbnail = self._html_search_regex(
r'<param name="previewImage" value="([^"]+)"',
webpage, 'thumbnail', fatal=False)
if thumbnail:
thumbnail = 'http://www.ruhd.ru' + thumbnail
return {
'id': video_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
}
|
qbektrix/namebench
|
refs/heads/master
|
nb_third_party/dns/rdtypes/ANY/PTR.py
|
248
|
# Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.nsbase
class PTR(dns.rdtypes.nsbase.NSBase):
"""PTR record"""
pass
|
jhd/spunout
|
refs/heads/master
|
flask/lib/python2.7/site-packages/migrate/__init__.py
|
79
|
"""
SQLAlchemy migrate provides two APIs :mod:`migrate.versioning` for
database schema version and repository management and
:mod:`migrate.changeset` that allows to define database schema changes
using Python.
"""
import pkg_resources
from migrate.versioning import *
from migrate.changeset import *
__version__ = pkg_resources.get_provider(
pkg_resources.Requirement.parse('sqlalchemy-migrate')).version
|
peeyush-tm/check_mk
|
refs/heads/nocout
|
doc/treasures/modbus/perfometer/modbus.py
|
6
|
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# ails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
# Put this file into share/check_mk/web/plugins/perfometer
def perfometer_modbus_value(row, check_command, perf_data):
value = int(perf_data[0][1])
return perf_data[0][1], perfometer_logarithmic(value, value*3, 2, '#3366cc')
perfometers['check_mk-modbus_value'] = perfometer_modbus_value
|
blackzw/openwrt_sdk_dev1
|
refs/heads/master
|
staging_dir/host/lib/python2.7/stringold.py
|
293
|
# module 'string' -- A collection of string operations
# Warning: most of the code you see here isn't normally used nowadays. With
# Python 1.6, many of these functions are implemented as methods on the
# standard string object. They used to be implemented by a built-in module
# called strop, but strop is now obsolete itself.
"""Common string manipulations.
Public module variables:
whitespace -- a string containing all characters considered whitespace
lowercase -- a string containing all characters considered lowercase letters
uppercase -- a string containing all characters considered uppercase letters
letters -- a string containing all characters considered letters
digits -- a string containing all characters considered decimal digits
hexdigits -- a string containing all characters considered hexadecimal digits
octdigits -- a string containing all characters considered octal digits
"""
from warnings import warnpy3k
warnpy3k("the stringold module has been removed in Python 3.0", stacklevel=2)
del warnpy3k
# Some strings for ctype-style character classification
whitespace = ' \t\n\r\v\f'
lowercase = 'abcdefghijklmnopqrstuvwxyz'
uppercase = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
letters = lowercase + uppercase
digits = '0123456789'
hexdigits = digits + 'abcdef' + 'ABCDEF'
octdigits = '01234567'
# Case conversion helpers
_idmap = ''
for i in range(256): _idmap = _idmap + chr(i)
del i
# Backward compatible names for exceptions
index_error = ValueError
atoi_error = ValueError
atof_error = ValueError
atol_error = ValueError
# convert UPPER CASE letters to lower case
def lower(s):
"""lower(s) -> string
Return a copy of the string s converted to lowercase.
"""
return s.lower()
# Convert lower case letters to UPPER CASE
def upper(s):
"""upper(s) -> string
Return a copy of the string s converted to uppercase.
"""
return s.upper()
# Swap lower case letters and UPPER CASE
def swapcase(s):
"""swapcase(s) -> string
Return a copy of the string s with upper case characters
converted to lowercase and vice versa.
"""
return s.swapcase()
# Strip leading and trailing tabs and spaces
def strip(s):
"""strip(s) -> string
Return a copy of the string s with leading and trailing
whitespace removed.
"""
return s.strip()
# Strip leading tabs and spaces
def lstrip(s):
"""lstrip(s) -> string
Return a copy of the string s with leading whitespace removed.
"""
return s.lstrip()
# Strip trailing tabs and spaces
def rstrip(s):
"""rstrip(s) -> string
Return a copy of the string s with trailing whitespace
removed.
"""
return s.rstrip()
# Split a string into a list of space/tab-separated words
def split(s, sep=None, maxsplit=0):
"""split(str [,sep [,maxsplit]]) -> list of strings
Return a list of the words in the string s, using sep as the
delimiter string. If maxsplit is nonzero, splits into at most
maxsplit words If sep is not specified, any whitespace string
is a separator. Maxsplit defaults to 0.
(split and splitfields are synonymous)
"""
return s.split(sep, maxsplit)
splitfields = split
# Join fields with optional separator
def join(words, sep = ' '):
"""join(list [,sep]) -> string
Return a string composed of the words in list, with
intervening occurrences of sep. The default separator is a
single space.
(joinfields and join are synonymous)
"""
return sep.join(words)
joinfields = join
# for a little bit of speed
_apply = apply
# Find substring, raise exception if not found
def index(s, *args):
"""index(s, sub [,start [,end]]) -> int
Like find but raises ValueError when the substring is not found.
"""
return _apply(s.index, args)
# Find last substring, raise exception if not found
def rindex(s, *args):
"""rindex(s, sub [,start [,end]]) -> int
Like rfind but raises ValueError when the substring is not found.
"""
return _apply(s.rindex, args)
# Count non-overlapping occurrences of substring
def count(s, *args):
"""count(s, sub[, start[,end]]) -> int
Return the number of occurrences of substring sub in string
s[start:end]. Optional arguments start and end are
interpreted as in slice notation.
"""
return _apply(s.count, args)
# Find substring, return -1 if not found
def find(s, *args):
"""find(s, sub [,start [,end]]) -> in
Return the lowest index in s where substring sub is found,
such that sub is contained within s[start,end]. Optional
arguments start and end are interpreted as in slice notation.
Return -1 on failure.
"""
return _apply(s.find, args)
# Find last substring, return -1 if not found
def rfind(s, *args):
"""rfind(s, sub [,start [,end]]) -> int
Return the highest index in s where substring sub is found,
such that sub is contained within s[start,end]. Optional
arguments start and end are interpreted as in slice notation.
Return -1 on failure.
"""
return _apply(s.rfind, args)
# for a bit of speed
_float = float
_int = int
_long = long
_StringType = type('')
# Convert string to float
def atof(s):
"""atof(s) -> float
Return the floating point number represented by the string s.
"""
if type(s) == _StringType:
return _float(s)
else:
raise TypeError('argument 1: expected string, %s found' %
type(s).__name__)
# Convert string to integer
def atoi(*args):
"""atoi(s [,base]) -> int
Return the integer represented by the string s in the given
base, which defaults to 10. The string s must consist of one
or more digits, possibly preceded by a sign. If base is 0, it
is chosen from the leading characters of s, 0 for octal, 0x or
0X for hexadecimal. If base is 16, a preceding 0x or 0X is
accepted.
"""
try:
s = args[0]
except IndexError:
raise TypeError('function requires at least 1 argument: %d given' %
len(args))
# Don't catch type error resulting from too many arguments to int(). The
# error message isn't compatible but the error type is, and this function
# is complicated enough already.
if type(s) == _StringType:
return _apply(_int, args)
else:
raise TypeError('argument 1: expected string, %s found' %
type(s).__name__)
# Convert string to long integer
def atol(*args):
"""atol(s [,base]) -> long
Return the long integer represented by the string s in the
given base, which defaults to 10. The string s must consist
of one or more digits, possibly preceded by a sign. If base
is 0, it is chosen from the leading characters of s, 0 for
octal, 0x or 0X for hexadecimal. If base is 16, a preceding
0x or 0X is accepted. A trailing L or l is not accepted,
unless base is 0.
"""
try:
s = args[0]
except IndexError:
raise TypeError('function requires at least 1 argument: %d given' %
len(args))
# Don't catch type error resulting from too many arguments to long(). The
# error message isn't compatible but the error type is, and this function
# is complicated enough already.
if type(s) == _StringType:
return _apply(_long, args)
else:
raise TypeError('argument 1: expected string, %s found' %
type(s).__name__)
# Left-justify a string
def ljust(s, width):
"""ljust(s, width) -> string
Return a left-justified version of s, in a field of the
specified width, padded with spaces as needed. The string is
never truncated.
"""
n = width - len(s)
if n <= 0: return s
return s + ' '*n
# Right-justify a string
def rjust(s, width):
"""rjust(s, width) -> string
Return a right-justified version of s, in a field of the
specified width, padded with spaces as needed. The string is
never truncated.
"""
n = width - len(s)
if n <= 0: return s
return ' '*n + s
# Center a string
def center(s, width):
"""center(s, width) -> string
Return a center version of s, in a field of the specified
width. padded with spaces as needed. The string is never
truncated.
"""
n = width - len(s)
if n <= 0: return s
half = n/2
if n%2 and width%2:
# This ensures that center(center(s, i), j) = center(s, j)
half = half+1
return ' '*half + s + ' '*(n-half)
# Zero-fill a number, e.g., (12, 3) --> '012' and (-3, 3) --> '-03'
# Decadent feature: the argument may be a string or a number
# (Use of this is deprecated; it should be a string as with ljust c.s.)
def zfill(x, width):
"""zfill(x, width) -> string
Pad a numeric string x with zeros on the left, to fill a field
of the specified width. The string x is never truncated.
"""
if type(x) == type(''): s = x
else: s = repr(x)
n = len(s)
if n >= width: return s
sign = ''
if s[0] in ('-', '+'):
sign, s = s[0], s[1:]
return sign + '0'*(width-n) + s
# Expand tabs in a string.
# Doesn't take non-printing chars into account, but does understand \n.
def expandtabs(s, tabsize=8):
"""expandtabs(s [,tabsize]) -> string
Return a copy of the string s with all tab characters replaced
by the appropriate number of spaces, depending on the current
column, and the tabsize (default 8).
"""
res = line = ''
for c in s:
if c == '\t':
c = ' '*(tabsize - len(line) % tabsize)
line = line + c
if c == '\n':
res = res + line
line = ''
return res + line
# Character translation through look-up table.
def translate(s, table, deletions=""):
"""translate(s,table [,deletechars]) -> string
Return a copy of the string s, where all characters occurring
in the optional argument deletechars are removed, and the
remaining characters have been mapped through the given
translation table, which must be a string of length 256.
"""
return s.translate(table, deletions)
# Capitalize a string, e.g. "aBc dEf" -> "Abc def".
def capitalize(s):
"""capitalize(s) -> string
Return a copy of the string s with only its first character
capitalized.
"""
return s.capitalize()
# Capitalize the words in a string, e.g. " aBc dEf " -> "Abc Def".
def capwords(s, sep=None):
"""capwords(s, [sep]) -> string
Split the argument into words using split, capitalize each
word using capitalize, and join the capitalized words using
join. Note that this replaces runs of whitespace characters by
a single space.
"""
return join(map(capitalize, s.split(sep)), sep or ' ')
# Construct a translation string
_idmapL = None
def maketrans(fromstr, tostr):
"""maketrans(frm, to) -> string
Return a translation table (a string of 256 bytes long)
suitable for use in string.translate. The strings frm and to
must be of the same length.
"""
if len(fromstr) != len(tostr):
raise ValueError, "maketrans arguments must have same length"
global _idmapL
if not _idmapL:
_idmapL = list(_idmap)
L = _idmapL[:]
fromstr = map(ord, fromstr)
for i in range(len(fromstr)):
L[fromstr[i]] = tostr[i]
return join(L, "")
# Substring replacement (global)
def replace(s, old, new, maxsplit=0):
"""replace (str, old, new[, maxsplit]) -> string
Return a copy of string str with all occurrences of substring
old replaced by new. If the optional argument maxsplit is
given, only the first maxsplit occurrences are replaced.
"""
return s.replace(old, new, maxsplit)
# XXX: transitional
#
# If string objects do not have methods, then we need to use the old string.py
# library, which uses strop for many more things than just the few outlined
# below.
try:
''.upper
except AttributeError:
from stringold import *
# Try importing optional built-in module "strop" -- if it exists,
# it redefines some string operations that are 100-1000 times faster.
# It also defines values for whitespace, lowercase and uppercase
# that match <ctype.h>'s definitions.
try:
from strop import maketrans, lowercase, uppercase, whitespace
letters = lowercase + uppercase
except ImportError:
pass # Use the original versions
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.