text stringlengths 4 1.02M | meta dict |
|---|---|
"""Authentication components for single-signon via Underpants."""
import urllib
from django.contrib.auth.middleware import RemoteUserMiddleware
from django.contrib.auth.backends import RemoteUserBackend
class UnderpantsRemoteUserMiddleware(RemoteUserMiddleware):
header = "HTTP_UNDERPANTS_EMAIL"
class UnderpantsRemoteUserBackend(RemoteUserBackend):
def clean_username(self, username):
return urllib.unquote(username)
| {
"content_hash": "34121054928da62c3c67e0e87d6a9443",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 65,
"avg_line_length": 29.333333333333332,
"alnum_prop": 0.8113636363636364,
"repo_name": "reddit/django-underpants",
"id": "d0b07e57ef3e4ec56a8b88a6857f9a58e0673492",
"size": "440",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_underpants.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "749"
}
],
"symlink_target": ""
} |
from datetime import datetime
import re
import numpy as np
import pytest
import pandas as pd
from pandas import (
Series,
_testing as tm,
)
# --------------------------------------------------------------------------------------
# str.contains
# --------------------------------------------------------------------------------------
def test_contains(any_string_dtype):
values = np.array(
["foo", np.nan, "fooommm__foo", "mmm_", "foommm[_]+bar"], dtype=np.object_
)
values = Series(values, dtype=any_string_dtype)
pat = "mmm[_]+"
result = values.str.contains(pat)
expected_dtype = "object" if any_string_dtype == "object" else "boolean"
expected = Series(
np.array([False, np.nan, True, True, False], dtype=np.object_),
dtype=expected_dtype,
)
tm.assert_series_equal(result, expected)
result = values.str.contains(pat, regex=False)
expected = Series(
np.array([False, np.nan, False, False, True], dtype=np.object_),
dtype=expected_dtype,
)
tm.assert_series_equal(result, expected)
values = Series(
np.array(["foo", "xyz", "fooommm__foo", "mmm_"], dtype=object),
dtype=any_string_dtype,
)
result = values.str.contains(pat)
expected_dtype = np.bool_ if any_string_dtype == "object" else "boolean"
expected = Series(np.array([False, False, True, True]), dtype=expected_dtype)
tm.assert_series_equal(result, expected)
# case insensitive using regex
values = Series(
np.array(["Foo", "xYz", "fOOomMm__fOo", "MMM_"], dtype=object),
dtype=any_string_dtype,
)
result = values.str.contains("FOO|mmm", case=False)
expected = Series(np.array([True, False, True, True]), dtype=expected_dtype)
tm.assert_series_equal(result, expected)
# case insensitive without regex
result = values.str.contains("foo", regex=False, case=False)
expected = Series(np.array([True, False, True, False]), dtype=expected_dtype)
tm.assert_series_equal(result, expected)
# unicode
values = Series(
np.array(["foo", np.nan, "fooommm__foo", "mmm_"], dtype=np.object_),
dtype=any_string_dtype,
)
pat = "mmm[_]+"
result = values.str.contains(pat)
expected_dtype = "object" if any_string_dtype == "object" else "boolean"
expected = Series(
np.array([False, np.nan, True, True], dtype=np.object_), dtype=expected_dtype
)
tm.assert_series_equal(result, expected)
result = values.str.contains(pat, na=False)
expected_dtype = np.bool_ if any_string_dtype == "object" else "boolean"
expected = Series(np.array([False, False, True, True]), dtype=expected_dtype)
tm.assert_series_equal(result, expected)
values = Series(
np.array(["foo", "xyz", "fooommm__foo", "mmm_"], dtype=np.object_),
dtype=any_string_dtype,
)
result = values.str.contains(pat)
expected = Series(np.array([False, False, True, True]), dtype=expected_dtype)
tm.assert_series_equal(result, expected)
def test_contains_object_mixed():
mixed = Series(
np.array(
["a", np.nan, "b", True, datetime.today(), "foo", None, 1, 2.0],
dtype=object,
)
)
result = mixed.str.contains("o")
expected = Series(
np.array(
[False, np.nan, False, np.nan, np.nan, True, np.nan, np.nan, np.nan],
dtype=np.object_,
)
)
tm.assert_series_equal(result, expected)
def test_contains_na_kwarg_for_object_category():
# gh 22158
# na for category
values = Series(["a", "b", "c", "a", np.nan], dtype="category")
result = values.str.contains("a", na=True)
expected = Series([True, False, False, True, True])
tm.assert_series_equal(result, expected)
result = values.str.contains("a", na=False)
expected = Series([True, False, False, True, False])
tm.assert_series_equal(result, expected)
# na for objects
values = Series(["a", "b", "c", "a", np.nan])
result = values.str.contains("a", na=True)
expected = Series([True, False, False, True, True])
tm.assert_series_equal(result, expected)
result = values.str.contains("a", na=False)
expected = Series([True, False, False, True, False])
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"na, expected",
[
(None, pd.NA),
(True, True),
(False, False),
(0, False),
(3, True),
(np.nan, pd.NA),
],
)
@pytest.mark.parametrize("regex", [True, False])
def test_contains_na_kwarg_for_nullable_string_dtype(
nullable_string_dtype, na, expected, regex
):
# https://github.com/pandas-dev/pandas/pull/41025#issuecomment-824062416
values = Series(["a", "b", "c", "a", np.nan], dtype=nullable_string_dtype)
result = values.str.contains("a", na=na, regex=regex)
expected = Series([True, False, False, True, expected], dtype="boolean")
tm.assert_series_equal(result, expected)
def test_contains_moar(any_string_dtype):
# PR #1179
s = Series(
["A", "B", "C", "Aaba", "Baca", "", np.nan, "CABA", "dog", "cat"],
dtype=any_string_dtype,
)
result = s.str.contains("a")
expected_dtype = "object" if any_string_dtype == "object" else "boolean"
expected = Series(
[False, False, False, True, True, False, np.nan, False, False, True],
dtype=expected_dtype,
)
tm.assert_series_equal(result, expected)
result = s.str.contains("a", case=False)
expected = Series(
[True, False, False, True, True, False, np.nan, True, False, True],
dtype=expected_dtype,
)
tm.assert_series_equal(result, expected)
result = s.str.contains("Aa")
expected = Series(
[False, False, False, True, False, False, np.nan, False, False, False],
dtype=expected_dtype,
)
tm.assert_series_equal(result, expected)
result = s.str.contains("ba")
expected = Series(
[False, False, False, True, False, False, np.nan, False, False, False],
dtype=expected_dtype,
)
tm.assert_series_equal(result, expected)
result = s.str.contains("ba", case=False)
expected = Series(
[False, False, False, True, True, False, np.nan, True, False, False],
dtype=expected_dtype,
)
tm.assert_series_equal(result, expected)
def test_contains_nan(any_string_dtype):
# PR #14171
s = Series([np.nan, np.nan, np.nan], dtype=any_string_dtype)
result = s.str.contains("foo", na=False)
expected_dtype = np.bool_ if any_string_dtype == "object" else "boolean"
expected = Series([False, False, False], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
result = s.str.contains("foo", na=True)
expected = Series([True, True, True], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
result = s.str.contains("foo", na="foo")
if any_string_dtype == "object":
expected = Series(["foo", "foo", "foo"], dtype=np.object_)
else:
expected = Series([True, True, True], dtype="boolean")
tm.assert_series_equal(result, expected)
result = s.str.contains("foo")
expected_dtype = "object" if any_string_dtype == "object" else "boolean"
expected = Series([np.nan, np.nan, np.nan], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
# --------------------------------------------------------------------------------------
# str.startswith
# --------------------------------------------------------------------------------------
@pytest.mark.parametrize("dtype", [None, "category"])
@pytest.mark.parametrize("null_value", [None, np.nan, pd.NA])
@pytest.mark.parametrize("na", [True, False])
def test_startswith(dtype, null_value, na):
# add category dtype parametrizations for GH-36241
values = Series(
["om", null_value, "foo_nom", "nom", "bar_foo", null_value, "foo"],
dtype=dtype,
)
result = values.str.startswith("foo")
exp = Series([False, np.nan, True, False, False, np.nan, True])
tm.assert_series_equal(result, exp)
result = values.str.startswith("foo", na=na)
exp = Series([False, na, True, False, False, na, True])
tm.assert_series_equal(result, exp)
# mixed
mixed = np.array(
["a", np.nan, "b", True, datetime.today(), "foo", None, 1, 2.0],
dtype=np.object_,
)
rs = Series(mixed).str.startswith("f")
xp = Series([False, np.nan, False, np.nan, np.nan, True, np.nan, np.nan, np.nan])
tm.assert_series_equal(rs, xp)
@pytest.mark.parametrize("na", [None, True, False])
def test_startswith_nullable_string_dtype(nullable_string_dtype, na):
values = Series(
["om", None, "foo_nom", "nom", "bar_foo", None, "foo", "regex", "rege."],
dtype=nullable_string_dtype,
)
result = values.str.startswith("foo", na=na)
exp = Series(
[False, na, True, False, False, na, True, False, False], dtype="boolean"
)
tm.assert_series_equal(result, exp)
result = values.str.startswith("rege.", na=na)
exp = Series(
[False, na, False, False, False, na, False, False, True], dtype="boolean"
)
tm.assert_series_equal(result, exp)
# --------------------------------------------------------------------------------------
# str.endswith
# --------------------------------------------------------------------------------------
@pytest.mark.parametrize("dtype", [None, "category"])
@pytest.mark.parametrize("null_value", [None, np.nan, pd.NA])
@pytest.mark.parametrize("na", [True, False])
def test_endswith(dtype, null_value, na):
# add category dtype parametrizations for GH-36241
values = Series(
["om", null_value, "foo_nom", "nom", "bar_foo", null_value, "foo"],
dtype=dtype,
)
result = values.str.endswith("foo")
exp = Series([False, np.nan, False, False, True, np.nan, True])
tm.assert_series_equal(result, exp)
result = values.str.endswith("foo", na=na)
exp = Series([False, na, False, False, True, na, True])
tm.assert_series_equal(result, exp)
# mixed
mixed = np.array(
["a", np.nan, "b", True, datetime.today(), "foo", None, 1, 2.0],
dtype=object,
)
rs = Series(mixed).str.endswith("f")
xp = Series([False, np.nan, False, np.nan, np.nan, False, np.nan, np.nan, np.nan])
tm.assert_series_equal(rs, xp)
@pytest.mark.parametrize("na", [None, True, False])
def test_endswith_nullable_string_dtype(nullable_string_dtype, na):
values = Series(
["om", None, "foo_nom", "nom", "bar_foo", None, "foo", "regex", "rege."],
dtype=nullable_string_dtype,
)
result = values.str.endswith("foo", na=na)
exp = Series(
[False, na, False, False, True, na, True, False, False], dtype="boolean"
)
tm.assert_series_equal(result, exp)
result = values.str.endswith("rege.", na=na)
exp = Series(
[False, na, False, False, False, na, False, False, True], dtype="boolean"
)
tm.assert_series_equal(result, exp)
# --------------------------------------------------------------------------------------
# str.replace
# --------------------------------------------------------------------------------------
def test_replace(any_string_dtype):
ser = Series(["fooBAD__barBAD", np.nan], dtype=any_string_dtype)
result = ser.str.replace("BAD[_]*", "", regex=True)
expected = Series(["foobar", np.nan], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
def test_replace_max_replacements(any_string_dtype):
ser = Series(["fooBAD__barBAD", np.nan], dtype=any_string_dtype)
expected = Series(["foobarBAD", np.nan], dtype=any_string_dtype)
result = ser.str.replace("BAD[_]*", "", n=1, regex=True)
tm.assert_series_equal(result, expected)
expected = Series(["foo__barBAD", np.nan], dtype=any_string_dtype)
result = ser.str.replace("BAD", "", n=1, regex=False)
tm.assert_series_equal(result, expected)
def test_replace_mixed_object():
ser = Series(
["aBAD", np.nan, "bBAD", True, datetime.today(), "fooBAD", None, 1, 2.0]
)
result = Series(ser).str.replace("BAD[_]*", "", regex=True)
expected = Series(["a", np.nan, "b", np.nan, np.nan, "foo", np.nan, np.nan, np.nan])
tm.assert_series_equal(result, expected)
def test_replace_unicode(any_string_dtype):
ser = Series([b"abcd,\xc3\xa0".decode("utf-8")], dtype=any_string_dtype)
expected = Series([b"abcd, \xc3\xa0".decode("utf-8")], dtype=any_string_dtype)
result = ser.str.replace(r"(?<=\w),(?=\w)", ", ", flags=re.UNICODE, regex=True)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("repl", [None, 3, {"a": "b"}])
@pytest.mark.parametrize("data", [["a", "b", None], ["a", "b", "c", "ad"]])
def test_replace_wrong_repl_type_raises(any_string_dtype, index_or_series, repl, data):
# https://github.com/pandas-dev/pandas/issues/13438
msg = "repl must be a string or callable"
obj = index_or_series(data, dtype=any_string_dtype)
with pytest.raises(TypeError, match=msg):
obj.str.replace("a", repl)
def test_replace_callable(any_string_dtype):
# GH 15055
ser = Series(["fooBAD__barBAD", np.nan], dtype=any_string_dtype)
# test with callable
repl = lambda m: m.group(0).swapcase()
result = ser.str.replace("[a-z][A-Z]{2}", repl, n=2, regex=True)
expected = Series(["foObaD__baRbaD", np.nan], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"repl", [lambda: None, lambda m, x: None, lambda m, x, y=None: None]
)
def test_replace_callable_raises(any_string_dtype, repl):
# GH 15055
values = Series(["fooBAD__barBAD", np.nan], dtype=any_string_dtype)
# test with wrong number of arguments, raising an error
msg = (
r"((takes)|(missing)) (?(2)from \d+ to )?\d+ "
r"(?(3)required )positional arguments?"
)
with pytest.raises(TypeError, match=msg):
values.str.replace("a", repl)
def test_replace_callable_named_groups(any_string_dtype):
# test regex named groups
ser = Series(["Foo Bar Baz", np.nan], dtype=any_string_dtype)
pat = r"(?P<first>\w+) (?P<middle>\w+) (?P<last>\w+)"
repl = lambda m: m.group("middle").swapcase()
result = ser.str.replace(pat, repl, regex=True)
expected = Series(["bAR", np.nan], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
def test_replace_compiled_regex(any_string_dtype):
# GH 15446
ser = Series(["fooBAD__barBAD", np.nan], dtype=any_string_dtype)
# test with compiled regex
pat = re.compile(r"BAD_*")
result = ser.str.replace(pat, "", regex=True)
expected = Series(["foobar", np.nan], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
result = ser.str.replace(pat, "", n=1, regex=True)
expected = Series(["foobarBAD", np.nan], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
def test_replace_compiled_regex_mixed_object():
pat = re.compile(r"BAD_*")
ser = Series(
["aBAD", np.nan, "bBAD", True, datetime.today(), "fooBAD", None, 1, 2.0]
)
result = Series(ser).str.replace(pat, "", regex=True)
expected = Series(["a", np.nan, "b", np.nan, np.nan, "foo", np.nan, np.nan, np.nan])
tm.assert_series_equal(result, expected)
def test_replace_compiled_regex_unicode(any_string_dtype):
ser = Series([b"abcd,\xc3\xa0".decode("utf-8")], dtype=any_string_dtype)
expected = Series([b"abcd, \xc3\xa0".decode("utf-8")], dtype=any_string_dtype)
pat = re.compile(r"(?<=\w),(?=\w)", flags=re.UNICODE)
result = ser.str.replace(pat, ", ")
tm.assert_series_equal(result, expected)
def test_replace_compiled_regex_raises(any_string_dtype):
# case and flags provided to str.replace will have no effect
# and will produce warnings
ser = Series(["fooBAD__barBAD__bad", np.nan], dtype=any_string_dtype)
pat = re.compile(r"BAD_*")
msg = "case and flags cannot be set when pat is a compiled regex"
with pytest.raises(ValueError, match=msg):
ser.str.replace(pat, "", flags=re.IGNORECASE)
with pytest.raises(ValueError, match=msg):
ser.str.replace(pat, "", case=False)
with pytest.raises(ValueError, match=msg):
ser.str.replace(pat, "", case=True)
def test_replace_compiled_regex_callable(any_string_dtype):
# test with callable
ser = Series(["fooBAD__barBAD", np.nan], dtype=any_string_dtype)
repl = lambda m: m.group(0).swapcase()
pat = re.compile("[a-z][A-Z]{2}")
result = ser.str.replace(pat, repl, n=2)
expected = Series(["foObaD__baRbaD", np.nan], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"regex,expected", [(True, ["bao", "bao", np.nan]), (False, ["bao", "foo", np.nan])]
)
def test_replace_literal(regex, expected, any_string_dtype):
# GH16808 literal replace (regex=False vs regex=True)
ser = Series(["f.o", "foo", np.nan], dtype=any_string_dtype)
expected = Series(expected, dtype=any_string_dtype)
result = ser.str.replace("f.", "ba", regex=regex)
tm.assert_series_equal(result, expected)
def test_replace_literal_callable_raises(any_string_dtype):
ser = Series([], dtype=any_string_dtype)
repl = lambda m: m.group(0).swapcase()
msg = "Cannot use a callable replacement when regex=False"
with pytest.raises(ValueError, match=msg):
ser.str.replace("abc", repl, regex=False)
def test_replace_literal_compiled_raises(any_string_dtype):
ser = Series([], dtype=any_string_dtype)
pat = re.compile("[a-z][A-Z]{2}")
msg = "Cannot use a compiled regex as replacement pattern with regex=False"
with pytest.raises(ValueError, match=msg):
ser.str.replace(pat, "", regex=False)
def test_replace_moar(any_string_dtype):
# PR #1179
ser = Series(
["A", "B", "C", "Aaba", "Baca", "", np.nan, "CABA", "dog", "cat"],
dtype=any_string_dtype,
)
result = ser.str.replace("A", "YYY")
expected = Series(
["YYY", "B", "C", "YYYaba", "Baca", "", np.nan, "CYYYBYYY", "dog", "cat"],
dtype=any_string_dtype,
)
tm.assert_series_equal(result, expected)
result = ser.str.replace("A", "YYY", case=False)
expected = Series(
[
"YYY",
"B",
"C",
"YYYYYYbYYY",
"BYYYcYYY",
"",
np.nan,
"CYYYBYYY",
"dog",
"cYYYt",
],
dtype=any_string_dtype,
)
tm.assert_series_equal(result, expected)
result = ser.str.replace("^.a|dog", "XX-XX ", case=False, regex=True)
expected = Series(
[
"A",
"B",
"C",
"XX-XX ba",
"XX-XX ca",
"",
np.nan,
"XX-XX BA",
"XX-XX ",
"XX-XX t",
],
dtype=any_string_dtype,
)
tm.assert_series_equal(result, expected)
def test_replace_not_case_sensitive_not_regex(any_string_dtype):
# https://github.com/pandas-dev/pandas/issues/41602
ser = Series(["A.", "a.", "Ab", "ab", np.nan], dtype=any_string_dtype)
result = ser.str.replace("a", "c", case=False, regex=False)
expected = Series(["c.", "c.", "cb", "cb", np.nan], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
result = ser.str.replace("a.", "c.", case=False, regex=False)
expected = Series(["c.", "c.", "Ab", "ab", np.nan], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
def test_replace_regex_default_warning(any_string_dtype):
# https://github.com/pandas-dev/pandas/pull/24809
s = Series(["a", "b", "ac", np.nan, ""], dtype=any_string_dtype)
msg = (
"The default value of regex will change from True to False in a "
"future version\\.$"
)
with tm.assert_produces_warning(FutureWarning, match=msg):
result = s.str.replace("^.$", "a")
expected = Series(["a", "a", "ac", np.nan, ""], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("regex", [True, False, None])
def test_replace_regex_single_character(regex, any_string_dtype):
# https://github.com/pandas-dev/pandas/pull/24809
# The current behavior is to treat single character patterns as literal strings,
# even when ``regex`` is set to ``True``.
s = Series(["a.b", ".", "b", np.nan, ""], dtype=any_string_dtype)
if regex is None:
msg = re.escape(
"The default value of regex will change from True to False in a future "
"version. In addition, single character regular expressions will *not* "
"be treated as literal strings when regex=True."
)
with tm.assert_produces_warning(FutureWarning, match=msg):
result = s.str.replace(".", "a", regex=regex)
else:
result = s.str.replace(".", "a", regex=regex)
expected = Series(["aab", "a", "b", np.nan, ""], dtype=any_string_dtype)
tm.assert_series_equal(result, expected)
# --------------------------------------------------------------------------------------
# str.match
# --------------------------------------------------------------------------------------
def test_match(any_string_dtype):
# New match behavior introduced in 0.13
expected_dtype = "object" if any_string_dtype == "object" else "boolean"
values = Series(["fooBAD__barBAD", np.nan, "foo"], dtype=any_string_dtype)
result = values.str.match(".*(BAD[_]+).*(BAD)")
expected = Series([True, np.nan, False], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
values = Series(
["fooBAD__barBAD", "BAD_BADleroybrown", np.nan, "foo"], dtype=any_string_dtype
)
result = values.str.match(".*BAD[_]+.*BAD")
expected = Series([True, True, np.nan, False], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
result = values.str.match("BAD[_]+.*BAD")
expected = Series([False, True, np.nan, False], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
values = Series(
["fooBAD__barBAD", "^BAD_BADleroybrown", np.nan, "foo"], dtype=any_string_dtype
)
result = values.str.match("^BAD[_]+.*BAD")
expected = Series([False, False, np.nan, False], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
result = values.str.match("\\^BAD[_]+.*BAD")
expected = Series([False, True, np.nan, False], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
def test_match_mixed_object():
mixed = Series(
[
"aBAD_BAD",
np.nan,
"BAD_b_BAD",
True,
datetime.today(),
"foo",
None,
1,
2.0,
]
)
result = Series(mixed).str.match(".*(BAD[_]+).*(BAD)")
expected = Series(
[True, np.nan, True, np.nan, np.nan, False, np.nan, np.nan, np.nan]
)
assert isinstance(result, Series)
tm.assert_series_equal(result, expected)
def test_match_na_kwarg(any_string_dtype):
# GH #6609
s = Series(["a", "b", np.nan], dtype=any_string_dtype)
result = s.str.match("a", na=False)
expected_dtype = np.bool_ if any_string_dtype == "object" else "boolean"
expected = Series([True, False, False], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
result = s.str.match("a")
expected_dtype = "object" if any_string_dtype == "object" else "boolean"
expected = Series([True, False, np.nan], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
def test_match_case_kwarg(any_string_dtype):
values = Series(["ab", "AB", "abc", "ABC"], dtype=any_string_dtype)
result = values.str.match("ab", case=False)
expected_dtype = np.bool_ if any_string_dtype == "object" else "boolean"
expected = Series([True, True, True, True], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
# --------------------------------------------------------------------------------------
# str.fullmatch
# --------------------------------------------------------------------------------------
def test_fullmatch(any_string_dtype):
# GH 32806
ser = Series(
["fooBAD__barBAD", "BAD_BADleroybrown", np.nan, "foo"], dtype=any_string_dtype
)
result = ser.str.fullmatch(".*BAD[_]+.*BAD")
expected_dtype = "object" if any_string_dtype == "object" else "boolean"
expected = Series([True, False, np.nan, False], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
def test_fullmatch_na_kwarg(any_string_dtype):
ser = Series(
["fooBAD__barBAD", "BAD_BADleroybrown", np.nan, "foo"], dtype=any_string_dtype
)
result = ser.str.fullmatch(".*BAD[_]+.*BAD", na=False)
expected_dtype = np.bool_ if any_string_dtype == "object" else "boolean"
expected = Series([True, False, False, False], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
def test_fullmatch_case_kwarg(any_string_dtype):
ser = Series(["ab", "AB", "abc", "ABC"], dtype=any_string_dtype)
expected_dtype = np.bool_ if any_string_dtype == "object" else "boolean"
expected = Series([True, False, False, False], dtype=expected_dtype)
result = ser.str.fullmatch("ab", case=True)
tm.assert_series_equal(result, expected)
expected = Series([True, True, False, False], dtype=expected_dtype)
result = ser.str.fullmatch("ab", case=False)
tm.assert_series_equal(result, expected)
result = ser.str.fullmatch("ab", flags=re.IGNORECASE)
tm.assert_series_equal(result, expected)
# --------------------------------------------------------------------------------------
# str.findall
# --------------------------------------------------------------------------------------
def test_findall(any_string_dtype):
ser = Series(["fooBAD__barBAD", np.nan, "foo", "BAD"], dtype=any_string_dtype)
result = ser.str.findall("BAD[_]*")
expected = Series([["BAD__", "BAD"], np.nan, [], ["BAD"]])
tm.assert_series_equal(result, expected)
def test_findall_mixed_object():
ser = Series(
[
"fooBAD__barBAD",
np.nan,
"foo",
True,
datetime.today(),
"BAD",
None,
1,
2.0,
]
)
result = ser.str.findall("BAD[_]*")
expected = Series(
[
["BAD__", "BAD"],
np.nan,
[],
np.nan,
np.nan,
["BAD"],
np.nan,
np.nan,
np.nan,
]
)
tm.assert_series_equal(result, expected)
# --------------------------------------------------------------------------------------
# str.find
# --------------------------------------------------------------------------------------
def test_find(any_string_dtype):
ser = Series(
["ABCDEFG", "BCDEFEF", "DEFGHIJEF", "EFGHEF", "XXXX"], dtype=any_string_dtype
)
expected_dtype = np.int64 if any_string_dtype == "object" else "Int64"
result = ser.str.find("EF")
expected = Series([4, 3, 1, 0, -1], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
expected = np.array([v.find("EF") for v in np.array(ser)], dtype=np.int64)
tm.assert_numpy_array_equal(np.array(result, dtype=np.int64), expected)
result = ser.str.rfind("EF")
expected = Series([4, 5, 7, 4, -1], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
expected = np.array([v.rfind("EF") for v in np.array(ser)], dtype=np.int64)
tm.assert_numpy_array_equal(np.array(result, dtype=np.int64), expected)
result = ser.str.find("EF", 3)
expected = Series([4, 3, 7, 4, -1], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
expected = np.array([v.find("EF", 3) for v in np.array(ser)], dtype=np.int64)
tm.assert_numpy_array_equal(np.array(result, dtype=np.int64), expected)
result = ser.str.rfind("EF", 3)
expected = Series([4, 5, 7, 4, -1], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
expected = np.array([v.rfind("EF", 3) for v in np.array(ser)], dtype=np.int64)
tm.assert_numpy_array_equal(np.array(result, dtype=np.int64), expected)
result = ser.str.find("EF", 3, 6)
expected = Series([4, 3, -1, 4, -1], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
expected = np.array([v.find("EF", 3, 6) for v in np.array(ser)], dtype=np.int64)
tm.assert_numpy_array_equal(np.array(result, dtype=np.int64), expected)
result = ser.str.rfind("EF", 3, 6)
expected = Series([4, 3, -1, 4, -1], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
expected = np.array([v.rfind("EF", 3, 6) for v in np.array(ser)], dtype=np.int64)
tm.assert_numpy_array_equal(np.array(result, dtype=np.int64), expected)
def test_find_bad_arg_raises(any_string_dtype):
ser = Series([], dtype=any_string_dtype)
with pytest.raises(TypeError, match="expected a string object, not int"):
ser.str.find(0)
with pytest.raises(TypeError, match="expected a string object, not int"):
ser.str.rfind(0)
def test_find_nan(any_string_dtype):
ser = Series(
["ABCDEFG", np.nan, "DEFGHIJEF", np.nan, "XXXX"], dtype=any_string_dtype
)
expected_dtype = np.float64 if any_string_dtype == "object" else "Int64"
result = ser.str.find("EF")
expected = Series([4, np.nan, 1, np.nan, -1], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
result = ser.str.rfind("EF")
expected = Series([4, np.nan, 7, np.nan, -1], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
result = ser.str.find("EF", 3)
expected = Series([4, np.nan, 7, np.nan, -1], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
result = ser.str.rfind("EF", 3)
expected = Series([4, np.nan, 7, np.nan, -1], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
result = ser.str.find("EF", 3, 6)
expected = Series([4, np.nan, -1, np.nan, -1], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
result = ser.str.rfind("EF", 3, 6)
expected = Series([4, np.nan, -1, np.nan, -1], dtype=expected_dtype)
tm.assert_series_equal(result, expected)
# --------------------------------------------------------------------------------------
# str.translate
# --------------------------------------------------------------------------------------
def test_translate(index_or_series, any_string_dtype):
obj = index_or_series(
["abcdefg", "abcc", "cdddfg", "cdefggg"], dtype=any_string_dtype
)
table = str.maketrans("abc", "cde")
result = obj.str.translate(table)
expected = index_or_series(
["cdedefg", "cdee", "edddfg", "edefggg"], dtype=any_string_dtype
)
tm.assert_equal(result, expected)
def test_translate_mixed_object():
# Series with non-string values
s = Series(["a", "b", "c", 1.2])
table = str.maketrans("abc", "cde")
expected = Series(["c", "d", "e", np.nan])
result = s.str.translate(table)
tm.assert_series_equal(result, expected)
# --------------------------------------------------------------------------------------
def test_flags_kwarg(any_string_dtype):
data = {
"Dave": "dave@google.com",
"Steve": "steve@gmail.com",
"Rob": "rob@gmail.com",
"Wes": np.nan,
}
data = Series(data, dtype=any_string_dtype)
pat = r"([A-Z0-9._%+-]+)@([A-Z0-9.-]+)\.([A-Z]{2,4})"
result = data.str.extract(pat, flags=re.IGNORECASE, expand=True)
assert result.iloc[0].tolist() == ["dave", "google", "com"]
result = data.str.match(pat, flags=re.IGNORECASE)
assert result[0]
result = data.str.fullmatch(pat, flags=re.IGNORECASE)
assert result[0]
result = data.str.findall(pat, flags=re.IGNORECASE)
assert result[0][0] == ("dave", "google", "com")
result = data.str.count(pat, flags=re.IGNORECASE)
assert result[0] == 1
msg = "This pattern has match groups"
with tm.assert_produces_warning(UserWarning, match=msg):
result = data.str.contains(pat, flags=re.IGNORECASE)
assert result[0]
| {
"content_hash": "297bf210c66125118d639436892cf4dd",
"timestamp": "",
"source": "github",
"line_count": 925,
"max_line_length": 88,
"avg_line_length": 35.00432432432432,
"alnum_prop": 0.5825380647950832,
"repo_name": "jorisvandenbossche/pandas",
"id": "f390cbf492202b16d8cb4a091393cef2b864caa0",
"size": "32379",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "pandas/tests/strings/test_find_replace.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "127"
},
{
"name": "C",
"bytes": "360342"
},
{
"name": "CSS",
"bytes": "1438"
},
{
"name": "Cython",
"bytes": "1083849"
},
{
"name": "Dockerfile",
"bytes": "1690"
},
{
"name": "HTML",
"bytes": "456275"
},
{
"name": "Makefile",
"bytes": "507"
},
{
"name": "Python",
"bytes": "17541583"
},
{
"name": "Shell",
"bytes": "10719"
},
{
"name": "Smarty",
"bytes": "7820"
},
{
"name": "XSLT",
"bytes": "1196"
}
],
"symlink_target": ""
} |
"""Scheduling learning rate."""
import logging
class LRScheduler(object):
"""Base class of a learning rate scheduler.
A scheduler returns a new learning rate based on the number of updates that have
been performed.
Parameters
----------
base_lr : float, optional
The initial learning rate.
"""
def __init__(self, base_lr=0.01):
self.base_lr = base_lr
def __call__(self, num_update):
"""Return a new learning rate.
The ``num_update`` is the upper bound of the number of updates applied to
every weight.
Assume the optimizer has updated *i*-th weight by *k_i* times, namely
``optimizer.update(i, weight_i)`` is called by *k_i* times. Then::
num_update = max([k_i for all i])
Parameters
----------
num_update: int
the maximal number of updates applied to a weight.
"""
raise NotImplementedError("must override this")
class FactorScheduler(LRScheduler):
"""Reduce the learning rate by a factor for every *n* steps.
It returns a new learning rate by::
base_lr * pow(factor, floor(num_update/step))
Parameters
----------
step : int
Changes the learning rate for every n updates.
factor : float, optional
The factor to change the learning rate.
stop_factor_lr : float, optional
Stop updating the learning rate if it is less than this value.
"""
def __init__(self, step, factor=1, stop_factor_lr=1e-8):
super(FactorScheduler, self).__init__()
if step < 1:
raise ValueError("Schedule step must be greater or equal than 1 round")
if factor > 1.0:
raise ValueError("Factor must be no more than 1 to make lr reduce")
self.step = step
self.factor = factor
self.stop_factor_lr = stop_factor_lr
self.count = 0
def __call__(self, num_update):
# NOTE: use while rather than if (for continuing training via load_epoch)
while num_update > self.count + self.step:
self.count += self.step
self.base_lr *= self.factor
if self.base_lr < self.stop_factor_lr:
self.base_lr = self.stop_factor_lr
logging.info("Update[%d]: now learning rate arrived at %0.5e, will not "
"change in the future", num_update, self.base_lr)
else:
logging.info("Update[%d]: Change learning rate to %0.5e",
num_update, self.base_lr)
return self.base_lr
class MultiFactorScheduler(LRScheduler):
"""Reduce the learning rate by given a list of steps.
Assume there exists *k* such that::
step[k] <= num_update and num_update < step[k+1]
Then calculate the new learning rate by::
base_lr * pow(factor, k+1)
Parameters
----------
step: list of int
The list of steps to schedule a change
factor: float
The factor to change the learning rate.
"""
def __init__(self, step, factor=1):
super(MultiFactorScheduler, self).__init__()
assert isinstance(step, list) and len(step) >= 1
for i, _step in enumerate(step):
if i != 0 and step[i] <= step[i-1]:
raise ValueError("Schedule step must be an increasing integer list")
if _step < 1:
raise ValueError("Schedule step must be greater or equal than 1 round")
if factor > 1.0:
raise ValueError("Factor must be no more than 1 to make lr reduce")
self.step = step
self.cur_step_ind = 0
self.factor = factor
self.count = 0
def __call__(self, num_update):
# NOTE: use while rather than if (for continuing training via load_epoch)
while self.cur_step_ind <= len(self.step)-1:
if num_update > self.step[self.cur_step_ind]:
self.count = self.step[self.cur_step_ind]
self.cur_step_ind += 1
self.base_lr *= self.factor
logging.info("Update[%d]: Change learning rate to %0.5e",
num_update, self.base_lr)
else:
return self.base_lr
return self.base_lr
class PolyScheduler(LRScheduler):
""" Reduce the learning rate by given a list of steps.
Calculate the new learning rate by::
base_lr * (1-nup/max_nup)^pwr
if nup < max_nup, 0 otherwise.
Parameters
----------
max_update: maximum number of updates before the decay reaches 0.
base_lr: base learning rate
pwr: power of the decay term as a funtion of the current number of updates.
"""
def __init__(self, max_update, base_lr=0.01, pwr=2):
super(PolyScheduler, self).__init__(base_lr)
assert isinstance(max_update, int)
if max_update < 1:
raise ValueError("maximum number of updates must be strictly positive")
self.base_lr_orig = self.base_lr
self.max_update = max_update
self.power = pwr
self.base_lr = self.base_lr_orig
def __call__(self, num_update):
if num_update <= self.max_update:
self.base_lr = self.base_lr_orig * pow(1.0 - float(num_update) / float(self.max_update),
self.power)
return self.base_lr
| {
"content_hash": "7c6b37a9a6cd9de07ce8f7a2247a42c8",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 100,
"avg_line_length": 35.411764705882355,
"alnum_prop": 0.5782576596530085,
"repo_name": "TuSimple/mxnet",
"id": "963560d17853a54964439e937324da321c82bafb",
"size": "6204",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "python/mxnet/lr_scheduler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Batchfile",
"bytes": "13130"
},
{
"name": "C",
"bytes": "122099"
},
{
"name": "C++",
"bytes": "5340824"
},
{
"name": "CMake",
"bytes": "80796"
},
{
"name": "Cuda",
"bytes": "963477"
},
{
"name": "Dockerfile",
"bytes": "24811"
},
{
"name": "Groovy",
"bytes": "1020"
},
{
"name": "HTML",
"bytes": "40277"
},
{
"name": "Java",
"bytes": "122297"
},
{
"name": "Jupyter Notebook",
"bytes": "1275177"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "68249"
},
{
"name": "Perl",
"bytes": "1256723"
},
{
"name": "Perl 6",
"bytes": "7280"
},
{
"name": "Python",
"bytes": "5417311"
},
{
"name": "R",
"bytes": "311544"
},
{
"name": "Scala",
"bytes": "988309"
},
{
"name": "Shell",
"bytes": "276038"
},
{
"name": "Smalltalk",
"bytes": "3497"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import mmap
import sys
import time
import Queue
import base64
import hashlib
import json
import os
import re
import tempfile
import threading
import urllib2
from Crypto import Random
from Crypto.Cipher import AES
#
# Author: Matthew Ingersoll <matth@mtingers.com>
#
# A class for accessing the Backblaze B2 API
#
# All of the API methods listed are implemented:
# https://www.backblaze.com/b2/docs/
#
#
# Thanks to stackoverflow
# http://stackoverflow.com/questions/16761458/how-to-aes-encrypt-decrypt-files-using-python-pycrypto-in-an-openssl-compatible
# TODO: review if these encryption techniques are actually sound.
def derive_key_and_iv(password, salt, key_length, iv_length):
d = d_i = ''
while len(d) < key_length + iv_length:
d_i = hashlib.md5(d_i + password + salt).digest()
d += d_i
return d[:key_length], d[key_length:key_length + iv_length]
def generate_salt_key_iv(password, key_length=32):
bs = AES.block_size
salt = Random.new().read(bs - len('Salted__'))
key, iv = derive_key_and_iv(password, salt, key_length, bs)
return salt, key, iv
def decrypt(in_file, out_file, password, key_length=32):
bs = AES.block_size
salt = in_file.read(bs)[len('Salted__'):]
key, iv = derive_key_and_iv(password, salt, key_length, bs)
cipher = AES.new(key, AES.MODE_CBC, iv)
next_chunk = ''
finished = False
while not finished:
chunk, next_chunk = next_chunk, cipher.decrypt(in_file.read(1024 * bs))
if len(next_chunk) == 0:
padding_length = ord(chunk[-1])
chunk = chunk[:-padding_length]
finished = True
out_file.write(chunk)
# A stupid way to calculate size of encrypted file and sha1
# B2 requires a header with the sha1 but urllib2 must have the header before streaming
# the data. This means we must read the file once to calculate the sha1, then read it again
# for streaming the data on upload.
def calc_encryption_sha_and_length(in_file, password, salt, key_length, key,
iv):
bs = AES.block_size
size = 0
cipher = AES.new(key, AES.MODE_CBC, iv)
sha = hashlib.sha1()
sha.update('Salted__' + salt)
size += len('Salted__' + salt)
finished = False
while not finished:
chunk = in_file.read(1024 * bs)
if len(chunk) == 0 or len(chunk) % bs != 0:
padding_length = (bs - len(chunk) % bs) or bs
chunk += padding_length * chr(padding_length)
finished = True
chunk = cipher.encrypt(chunk)
sha.update(chunk)
size += len(chunk)
return sha.hexdigest(), size
class Read2Encrypt(file):
""" Return encrypted data from read() calls
Override read() for urllib2 when streaming encrypted data (uploads)
"""
def __init__(self, path, mode, password, salt, key_length, key, iv, size=0,
*args):
super(Read2Encrypt, self).__init__(path, mode)
self.password = password
self.bs = AES.block_size
self.cipher = AES.new(key, AES.MODE_CBC, iv)
(self.salt, self.key_length, self.key, self.iv) = (
salt, key_length, key, iv)
self.finished = False
self._size = size
self._args = args
self.sha = None
self.first_read = True
def __len__(self):
return self._size
def read(self, size):
if self.first_read:
self.first_read = False
return 'Salted__' + self.salt
if self.finished:
return None
chunk = file.read(self, size)
if len(chunk) == 0 or len(chunk) % self.bs != 0:
padding_length = (self.bs - len(chunk) % self.bs) or self.bs
chunk += padding_length * chr(padding_length)
self.finished = True
chunk = self.cipher.encrypt(chunk)
return chunk
if chunk:
chunk = self.cipher.encrypt(chunk)
return chunk
class BackBlazeB2(object):
def __init__(self, account_id, app_key, mt_queue_size=12, valid_duration=24 * 60 * 60,
auth_token_lifetime_in_seconds=2 * 60 * 60, default_timeout=None):
self.account_id = account_id
self.app_key = app_key
self.authorization_token = None
self.api_url = None
self.download_url = None
self.upload_url = None
self.upload_authorization_token = None
self.valid_duration = valid_duration
self.queue_size = mt_queue_size
self.upload_queue = Queue.Queue(maxsize=mt_queue_size)
self.default_timeout = default_timeout
self._last_authorization_token_time = None
self.auth_token_lifetime_in_seconds = auth_token_lifetime_in_seconds
def authorize_account(self, timeout=None):
id_and_key = self.account_id + ':' + self.app_key
basic_auth_string = 'Basic ' + base64.b64encode(id_and_key)
headers = {'Authorization': basic_auth_string}
try:
request = urllib2.Request(
'https://api.backblaze.com/b2api/v1/b2_authorize_account',
headers=headers
)
response = self.__url_open_with_timeout(request, timeout)
response_data = json.loads(response.read())
response.close()
except urllib2.HTTPError, error:
print("ERROR: %s" % error.read())
raise
self.authorization_token = response_data['authorizationToken']
self._last_authorization_token_time = time.time()
self.api_url = response_data['apiUrl']
self.download_url = response_data['downloadUrl']
return response_data
def _authorize_account(self, timeout):
if (self._last_authorization_token_time is not None \
and time.time() - self._last_authorization_token_time > self.auth_token_lifetime_in_seconds) \
or not self.authorization_token or not self.api_url:
self.authorize_account(timeout)
def __url_open_with_timeout(self, request, timeout):
if timeout is not None or self.default_timeout is not None:
custom_timeout = timeout or self.default_timeout
response = urllib2.urlopen(request, timeout=custom_timeout)
else:
response = urllib2.urlopen(request)
return response
def create_bucket(self, bucket_name, bucket_type='allPrivate', timeout=None):
self._authorize_account(timeout)
# bucket_type can be Either allPublic or allPrivate
return self._api_request('%s/b2api/v1/b2_create_bucket' % self.api_url,
{'accountId': self.account_id,
'bucketName': bucket_name,
'bucketType': bucket_type},
{'Authorization': self.authorization_token}, timeout)
def get_download_authorization(self, bucket_id, bucket_name,
file_name_prefix, timeout):
self._authorize_account(timeout)
url = '%s/b2api/v1/b2_get_download_authorization' % self.api_url
data = {
'bucketId': bucket_id,
'fileNamePrefix': file_name_prefix,
'validDurationInSeconds': self.valid_duration
}
result = self._api_request(
url,
data,
{'Authorization': self.authorization_token},
timeout
)
url_authorized_download = "{}/file/{}/{}?Authorization={}".format(
self.download_url, bucket_name, result['fileNamePrefix'],
result['authorizationToken']
)
return url_authorized_download
def list_buckets(self, timeout=None):
self._authorize_account(timeout)
return self._api_request('%s/b2api/v1/b2_list_buckets' % self.api_url,
{'accountId': self.account_id},
{'Authorization': self.authorization_token}, timeout)
def get_bucket_info(self, bucket_id=None, bucket_name=None, timeout=None):
bkt = None
if not bucket_id and not bucket_name:
raise Exception(
"get_bucket_info requires either a bucket_id or bucket_name")
if bucket_id and bucket_name:
raise Exception(
"get_bucket_info requires only _one_ argument and not both bucket_id and bucket_name")
buckets = self.list_buckets(timeout)['buckets']
if not bucket_id:
key = 'bucketName'
val = bucket_name
else:
key = 'bucketId'
val = bucket_id
for bucket in buckets:
if bucket[key] == val:
bkt = bucket
break
return bkt
def delete_bucket(self, bucket_id=None, bucket_name=None, timeout=None):
if not bucket_id and not bucket_name:
raise Exception(
"create_bucket requires either a bucket_id or bucket_name")
if bucket_id and bucket_name:
raise Exception(
"create_bucket requires only _one_ argument and not both bucket_id and bucket_name")
self._authorize_account(timeout)
bucket = self.get_bucket_info(bucket_id, bucket_name, timeout)
return self._api_request('%s/b2api/v1/b2_delete_bucket' % self.api_url,
{'accountId': self.account_id,
'bucketId': bucket['bucketId']},
{'Authorization': self.authorization_token}, timeout)
def get_upload_url(self, bucket_name, bucket_id, timeout=None):
self._authorize_account(timeout)
bucket = self.get_bucket_info(bucket_id, bucket_name)
bucket_id = bucket['bucketId']
return self._api_request('%s/b2api/v1/b2_get_upload_url' % self.api_url,
{'bucketId': bucket_id},
{'Authorization': self.authorization_token}, timeout)
# If password is set, encrypt files, else nah
def upload_file(self, path, password=None, bucket_id=None, bucket_name=None,
thread_upload_url=None,
thread_upload_authorization_token=None, timeout=None):
self._authorize_account(timeout)
if password:
(salt, key, iv) = generate_salt_key_iv(password, 32)
in_file = open(path, 'rb')
(sha, size) = calc_encryption_sha_and_length(in_file, password,
salt, 32, key, iv)
in_file.close()
fp = Read2Encrypt(path, 'rb', password, salt, 32, key, iv,
size=size)
else:
fp = open(path, 'rb')
mm_file_data = mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ)
filename = re.sub('^/', '', path)
filename = re.sub('//', '/', filename)
# TODO: Figure out URL encoding issue
# filename = unicode(filename, "utf-8")
sha = hashlib.sha1()
with open(path, 'rb') as f:
while True:
block = f.read(2 ** 10)
if not block: break
sha.update(block)
sha = sha.hexdigest()
if thread_upload_url:
cur_upload_url = thread_upload_url
cur_upload_authorization_token = thread_upload_authorization_token
elif not self.upload_url or not self.upload_authorization_token:
url = self.get_upload_url(bucket_name=bucket_name,
bucket_id=bucket_id)
cur_upload_url = url['uploadUrl']
cur_upload_authorization_token = url['authorizationToken']
# fixup filename
filename = re.sub('\\\\', '/',
path) # Make sure Windows paths are converted.
filename = re.sub('^/', '', filename)
filename = re.sub('//', '/', filename)
# All the whitespaces in the filename should be converted to %20
if " " in filename:
filename = filename.replace(" ", "%20")
# TODO: Figure out URL encoding issue
filename = unicode(filename, "utf-8")
headers = {
'Authorization': cur_upload_authorization_token,
'X-Bz-File-Name': filename,
'Content-Type': 'application/octet-stream',
# 'Content-Type' : 'b2/x-auto',
'X-Bz-Content-Sha1': sha
}
try:
if password:
request = urllib2.Request(cur_upload_url, fp, headers)
else:
request = urllib2.Request(cur_upload_url, mm_file_data, headers)
response = self.__url_open_with_timeout(request, timeout)
response_data = json.loads(response.read())
except urllib2.HTTPError, error:
print("ERROR: %s" % error.read())
raise
response.close()
fp.close()
return response_data
def update_bucket(self, bucket_type, bucket_id=None, bucket_name=None, timeout=None):
if bucket_type not in ('allPublic', 'allPrivate'):
raise Exception(
"update_bucket: Invalid bucket_type. Must be string allPublic or allPrivate")
bucket = self.get_bucket_info(bucket_id=bucket_id,
bucket_name=bucket_name, timeout=timeout)
return self._api_request('%s/b2api/v1/b2_update_bucket' % self.api_url,
{'accountId': self.account_id,
'bucketId': bucket['bucketId'],
'bucketType': bucket_type},
{'Authorization': self.authorization_token}, timeout)
def list_file_versions(self, bucket_id=None, bucket_name=None, maxFileCount=100, startFileName=None,
startFileId=None, prefix=None, timeout=None):
bucket = self.get_bucket_info(bucket_id=bucket_id,
bucket_name=bucket_name, timeout=timeout)
if maxFileCount > 10000:
maxFileCount = 10000
if maxFileCount < 0:
maxFileCount = 100
data = {'bucketId': bucket['bucketId'], 'maxFileCount': maxFileCount}
if startFileName is not None:
data['startFileName'] = startFileName
if startFileId is not None:
data['startFileId'] = startFileId
if prefix is not None:
data['prefix'] = prefix
return self._api_request(
'%s/b2api/v1/b2_list_file_versions' % self.api_url,
data,
{'Authorization': self.authorization_token}, timeout)
def list_file_names(self, bucket_id=None, bucket_name=None, maxFileCount=100, startFileName=None, prefix=None,
timeout=None):
bucket = self.get_bucket_info(bucket_id=bucket_id,
bucket_name=bucket_name, timeout=timeout)
if maxFileCount > 10000:
maxFileCount = 10000
if maxFileCount < 0:
maxFileCount = 100
data = {'bucketId': bucket['bucketId'], 'maxFileCount': maxFileCount}
if startFileName is not None:
data['startFileName'] = startFileName
if prefix is not None:
data['prefix'] = prefix
return self._api_request(
'%s/b2api/v1/b2_list_file_names' % self.api_url,
data,
{'Authorization': self.authorization_token}, timeout)
def hide_file(self, file_name, bucket_id=None, bucket_name=None, timeout=None):
bucket = self.get_bucket_info(bucket_id=bucket_id,
bucket_name=bucket_name)
return self._api_request(
'%s/b2api/v1/b2_list_file_versions' % self.api_url,
{'bucketId': bucket['bucketId'], 'fileName': file_name},
{'Authorization': self.authorization_token}, timeout)
def delete_file_version(self, file_name, file_id, timeout=None):
self._authorize_account(timeout)
return self._api_request(
'%s/b2api/v1/b2_delete_file_version' % self.api_url,
{'fileName': file_name, 'fileId': file_id},
{'Authorization': self.authorization_token}, timeout)
def get_file_info_by_name(self, file_name, bucket_id=None, bucket_name=None):
file_names = self.list_file_names(bucket_id=bucket_id, bucket_name=bucket_name, prefix=file_name)
for i in file_names['files']:
if file_name in i['fileName']:
return self.get_file_info(i['fileId'])
return None
def get_file_info(self, file_id, timeout=None):
return self._api_request('%s/b2api/v1/b2_get_file_info' % self.api_url,
{'fileId': file_id},
{'Authorization': self.authorization_token}, timeout)
def download_file_with_authorized_url(self, url, dst_file_name, force=False,
password=None, timeout=None):
if os.path.exists(dst_file_name) and not force:
raise Exception(
"Destination file exists. Refusing to overwrite. "
"Set force=True if you wish to do so.")
request = urllib2.Request(
url, None, {})
response = self.__url_open_with_timeout(request, timeout)
return BackBlazeB2.write_file(response, dst_file_name, password)
def download_file_by_name(self, file_name, dst_file_name, bucket_id=None,
bucket_name=None, force=False, password=None, timeout=None):
if os.path.exists(dst_file_name) and not force:
raise Exception(
"Destination file exists. Refusing to overwrite. "
"Set force=True if you wish to do so.")
self._authorize_account(timeout)
bucket = self.get_bucket_info(bucket_id=bucket_id,
bucket_name=bucket_name, timeout=timeout)
url = self.download_url + '/file/' + bucket[
'bucketName'] + '/' + file_name
headers = {
'Authorization': self.authorization_token
}
request = urllib2.Request(
url, None, headers)
response = self.__url_open_with_timeout(request, timeout)
return BackBlazeB2.write_file(response, dst_file_name, password)
def download_file_by_id(self, file_id, dst_file_name, force=False,
password=None, timeout=None):
if os.path.exists(dst_file_name) and not force:
raise Exception(
"Destination file exists. Refusing to overwrite. "
"Set force=True if you wish to do so.")
self._authorize_account(timeout)
url = self.download_url + '/b2api/v1/b2_download_file_by_id?fileId=' + file_id
request = urllib2.Request(url, None,
{'Authorization': self.authorization_token})
resp = self.__url_open_with_timeout(request, timeout)
return BackBlazeB2.write_file(resp, dst_file_name, password)
def _upload_worker(self, password, bucket_id, bucket_name):
# B2 started requiring a unique upload url per thread
"""Uploading in Parallel
The URL and authorization token that you get from b2_get_upload_url can be used by only one thread at a time.
If you want multiple threads running, each one needs to get its own URL and auth token. It can keep using that
URL and auth token for multiple uploads, until it gets a returned status indicating that it should get a
new upload URL."""
url = self.get_upload_url(bucket_name=bucket_name, bucket_id=bucket_id)
thread_upload_url = url['uploadUrl']
thread_upload_authorization_token = url['authorizationToken']
while not self.upload_queue_done:
time.sleep(1)
try:
path = self.upload_queue.get_nowait()
except:
continue
# try a few times in case of error
for i in range(4):
try:
self.upload_file(path, password=password,
bucket_id=bucket_id,
bucket_name=bucket_name,
thread_upload_url=thread_upload_url,
thread_upload_authorization_token=thread_upload_authorization_token)
break
except Exception, e:
print(
"WARNING: Error processing file '%s'\n%s\nTrying again." % (
path, e))
time.sleep(1)
def recursive_upload(self, path, bucket_id=None, bucket_name=None,
exclude_regex=None, include_regex=None,
exclude_re_flags=None, include_re_flags=None,
password=None, multithread=True):
bucket = self.get_bucket_info(bucket_id=bucket_id,
bucket_name=bucket_name)
if exclude_regex:
exclude_regex = re.compile(exclude_regex, flags=exclude_re_flags)
if include_regex:
include_regex = re.compile(include_regex, flags=include_re_flags)
nfiles = 0
if os.path.isdir(path):
if multithread:
# Generate Queue worker threads to match QUEUE_SIZE
self.threads = []
self.upload_queue_done = False
for i in range(self.queue_size):
t = threading.Thread(target=self._upload_worker, args=(
password, bucket_id, bucket_name,))
self.threads.append(t)
t.start()
for root, dirs, files in os.walk(path):
for f in files:
if os.path.islink(root + '/' + f): continue
if exclude_regex and exclude_regex.match(
root + '/' + f): continue
if include_regex and not include_regex.match(
root + '/' + f): continue
if multithread:
print("UPLOAD: %s" % root + '/' + f)
self.upload_queue.put(root + '/' + f)
else:
self.upload_file(root + '/' + f, password=password,
bucket_id=bucket_id,
bucket_name=bucket_name)
nfiles += 1
if multithread:
self.upload_queue_done = True
for t in self.threads:
t.join()
else:
nfiles = 1
if not os.path.islink(path):
if exclude_regex and exclude_regex.match(path):
nfiles -= 1
if include_regex and include_regex.match(path):
nfiles += 1
if nfiles > 0:
print("UPLOAD: %s" % path)
self.upload_file(path, password=password, bucket_id=bucket_id,
bucket_name=bucket_name)
return 1
else:
print("WARNING: No files uploaded")
return nfiles
def _api_request(self, url, data, headers, timeout=None):
self._authorize_account(timeout)
request = urllib2.Request(url, json.dumps(data), headers)
response = self.__url_open_with_timeout(request, timeout)
response_data = json.loads(response.read())
response.close()
return response_data
@staticmethod
def write_file(response, dst_file_name, password=None):
with open(dst_file_name, 'wb') as f:
while True:
chunk = response.read(2 ** 10)
if not chunk:
break
f.write(chunk)
# If password protection, decrypt
if password:
d = os.path.dirname(dst_file_name)
with tempfile.NamedTemporaryFile(prefix='b2-', dir=d, suffix='.tmp',
delete=False) as tfile:
tname = tfile.name
with open(dst_file_name, 'rb') as in_file:
decrypt(in_file, tfile, password)
os.unlink(dst_file_name)
os.rename(tname, dst_file_name)
return True
# Example command line utility
if __name__ == "__main__":
import argparse, ConfigParser
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', required=True, dest='config_path',
help='Configuration path')
parser.add_argument('-b', '--bucket-name', required=False,
dest='bucket_name',
help='Bucket name')
parser.add_argument('-B', '--bucket-id', required=False, dest='bucket_id',
help='Bucket id')
parser.add_argument('-u', '--upload', required=False, dest='upload_path',
help='Upload file or directory path', nargs='*')
parser.add_argument('-d', '--download', required=False, dest='download',
nargs=2,
help='Download file source') # , action='store_const')
parser.add_argument('-n', '--new-bucket', required=False, dest='new_bucket',
nargs=2,
help='Create a new bucket [name, type]')
parser.add_argument('-lb', '--list-buckets', required=False,
dest='list_buckets',
help='List buckets', action='store_true')
parser.add_argument('-lf', '--list-files', required=False,
dest='list_files',
help='List files', action='store_true')
parser.add_argument('-m', '--multithread', required=False, dest='mt',
help='Upload multithreaded worker queue size')
args = parser.parse_args()
if (not args.bucket_name and not args.bucket_id and not args.new_bucket and not args.list_buckets) or (
args.bucket_name and args.bucket_id):
parser.print_help()
print("Must specify either -b/--bucket-name or -B/--bucket-id")
sys.exit(1)
# Consume config
config = ConfigParser.ConfigParser()
config.read(args.config_path)
account_id = config.get('auth', 'account_id')
app_key = config.get('auth', 'app_key')
enc_pass = None
try:
enc_pass = config.get('encryption', 'password')
except:
pass
if args.mt:
b2 = BackBlazeB2(account_id, app_key, mt_queue_size=int(args.mt))
else:
b2 = BackBlazeB2(account_id, app_key)
# Upload an entire directory concurrently, encrypt with a password
if args.upload_path:
for path in args.upload_path:
print("recursive_upload: %s" % path)
response = b2.recursive_upload(path, bucket_name=args.bucket_name,
bucket_id=args.bucket_id,
multithread=args.mt,
password=enc_pass)
print("Uploaded %d files" % (response))
# Download
if args.download:
download_src, download_dst = args.download
print("download_file_by_name: %s to %s" % (download_src, download_dst))
response = b2.download_file_by_name(download_src, download_dst,
bucket_name=args.bucket_name,
bucket_id=args.bucket_id,
password=enc_pass)
print(response)
# Create bucket
# Currently requires -B or -b even if it doesn't exist
if args.new_bucket:
bucket_name, bucket_type = args.new_bucket
response = b2.create_bucket(bucket_name, bucket_type)
print(response)
# List buckets
if args.list_buckets:
buckets = b2.list_buckets()
for bucket in buckets['buckets']:
print("%s %s %s" % (
bucket['bucketType'], bucket['bucketId'], bucket['bucketName']))
# List files in bucket
if args.list_files:
print("list_files: %s %s" % (args.bucket_name, args.bucket_id))
files = b2.list_file_names(bucket_name=args.bucket_name,
bucket_id=args.bucket_id)
print("contentSha1 size uploadTimestamp fileName")
for f in files['files']:
print("%s %s %s %s" % (
f['contentSha1'], f['size'], f['uploadTimestamp'], f['fileName']))
| {
"content_hash": "0efc6bd7283b344a00fed3ec18123cc0",
"timestamp": "",
"source": "github",
"line_count": 690,
"max_line_length": 125,
"avg_line_length": 41.85507246376812,
"alnum_prop": 0.5536703601108033,
"repo_name": "mtingers/backblaze-b2",
"id": "4da9862030af41b4bfb59814db7e5aa438765c55",
"size": "28904",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backblazeb2/backblazeb2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "29190"
}
],
"symlink_target": ""
} |
import os, multiprocessing, traceback
from pointcloud.QueriesParameters import QueriesParameters
from pointcloud import utils
class AbstractQuerier:
"""Abstract class for the queriers to be implemented for each different
solution for the benchmark."""
def __init__(self, configuration):
""" Create the querier, a ConfigParser is required (loaded from a .ini file)"""
self.conf = configuration
def getConfiguration(self):
""" Gets the configuration (ConfigParser) instance """
return self.conf
def run(self):
config = self.getConfiguration()
# Create and move to the execution path
executionPath = config.get('General','ExecutionPath')
if not os.path.isdir(executionPath):
os.makedirs(executionPath)
os.chdir(executionPath)
# Initialize the PCDMS for querying
self.initialize()
# Get which IO devices need to be monitored
ioMonitorParam = config.get('General','IOMonitor').strip()
ioDevices = None
if ioMonitorParam != '':
ioDevices = ioMonitorParam.split(',')
# Get the method to monitor the system usage (CPU / memory)
# Nothe that all system is monitored (not only the processes related to the queries)
usageMonitor = config.getboolean('General','UsageMonitor')
# Read the query file
queryFileAbsPath = config.get('Query','File')
queriesParameters = QueriesParameters(queryFileAbsPath)
# Get the identifiers of the several queries
queryIds = queriesParameters.getIds()
numQueries = len(queryIds)
if numQueries != len(set(queryIds)):
raise Exception('ERROR: There are duplicated identifiers in given XML')
numUsers = config.getint('Query','NumberUsers')
numIterations = config.getint('Query','NumberIterations')
# Create queues
queriesQueue = multiprocessing.Queue() # The queue of tasks (queries)
resultsQueue = multiprocessing.Queue() # The queue of results
for queryId in queryIds:
queriesQueue.put(queryId)
for i in range(numUsers): #we add as many None jobs as numUsers to tell them to terminate (queue is FIFO)
queriesQueue.put(None)
users = []
# We start numUsers users processes
for i in range(numUsers):
users.append(multiprocessing.Process(target=self.runUser,
args=(i, queriesQueue, resultsQueue, numIterations, queriesParameters, usageMonitor, ioDevices)))
users[-1].start()
# We need to receive for each query the two iterations and for each iteration both the results from the query execution and from the monitor
numResults = numQueries * numIterations
resultsDict = {}
for i in range(numResults):
[userIndex, queryId, iterationId, qTime, qResult, qCPU, qMEM] = resultsQueue.get()
resultsDict[(queryId, iterationId)] = (userIndex, qTime, qResult, qCPU, qMEM)
# wait for all users to finish their execution
for i in range(numUsers):
users[i].join()
stats = []
for queryId in queryIds:
for iterationId in range(numIterations):
(userIndex, qTime, qResult, qCPU, qMEM) = resultsDict[(queryId, iterationId)]
qName = str(queryId) + '_' + str(iterationId)
stats.append((qName, qTime, qResult, qCPU, qMEM))
self.close()
return stats
def runUser(self, userIndex, tasksQueue, resultsQueue, numIterations, queriesParameters, usageMonitor, ioDevices):
childResultQueue = multiprocessing.Queue()
kill_received = False
while not kill_received:
queryId = None
try:
# This call will patiently wait until new job is available
queryId = tasksQueue.get()
except:
# if there is an error we will quit the generation
kill_received = True
if queryId == None:
# If we receive a None job, it means we can stop this workers
# (all the create-image jobs are done)
kill_received = True
else:
for iterationId in range(numIterations):
queryName = queryId + '_' + '_' + str(iterationId)
usageAbsPath = os.path.abspath(queryName + '.usage')
ioAbsPath = None
if ioDevices != None:
ioAbsPath = os.path.abspath(queryName + '.io')
utils.runMonitor(self.runQuery,(queryId, iterationId, queriesParameters, childResultQueue), usageMonitor, usageAbsPath, ioDevices, ioAbsPath)
[queryId, iterationId, qTime, qResult] = childResultQueue.get()
(qCPU,qMEM) = (None, None)
if usageMonitor:
(times, cpus, mems) = utils.parseUsage(usageAbsPath)
(qCPU,qMEM) = (cpus.mean(), mems.mean())
imageAbsPath = os.path.abspath(queryName + '_usage.png')
utils.saveUsage(times, cpus, mems, queryName + ' CPU/MEM', imageAbsPath)
if ioDevices != None:
(times, rdata, wdata) = utils.parseIO(ioAbsPath)
ioImageAbsPath = os.path.abspath(queryName + '_io.png')
utils.saveIO(times, rdata, wdata, queryName + ' IO', ioImageAbsPath)
resultsQueue.put((userIndex, queryId, iterationId, qTime, qResult, qCPU, qMEM))
def runQuery(self, queryId, iterationId, queriesParameters, resultsQueue):
try:
(eTime, result) = self.query(queryId, iterationId, queriesParameters)
resultsQueue.put((queryId, iterationId, eTime, result))
except Exception,e:
print e
print traceback.format_exc()
resultsQueue.put((queryId, iterationId, '-', '-'))
#
# FOLLOWING METHODS HAVE TO BE IMPLEMENTED BY ALL QUERIERS
#
def initialize(self):
""" Initialize the querier procedure """
raise NotImplementedError( "Should have implemented this" )
def query(self, queryId, iterationId, queriesParameters):
""" Executes query indicated by queryId. It must return a tuple with (time, results)"""
raise NotImplementedError( "Should have implemented this" )
def close(self):
""" Close the querier procedure"""
raise NotImplementedError( "Should have implemented this" )
| {
"content_hash": "d4ba30bcafbb639eb1159383e3e94d1e",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 161,
"avg_line_length": 46.14093959731544,
"alnum_prop": 0.5898181818181818,
"repo_name": "NLeSC/pointcloud-benchmark",
"id": "ec27b7fcf700f80121d33135136272a4813febba",
"size": "7221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/pointcloud/AbstractQuerier.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "170293"
},
{
"name": "C++",
"bytes": "28083"
},
{
"name": "CMake",
"bytes": "39767"
},
{
"name": "Makefile",
"bytes": "12996"
},
{
"name": "Python",
"bytes": "345656"
},
{
"name": "Shell",
"bytes": "16610"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import re
from akamai.storage import AkamaiNetStorage
from akamai.utils import get_storage_class
from django.core.exceptions import ImproperlyConfigured
from django.core.files import storage as django_storage
from django.forms import fields
class AkamaiFilePathField(fields.ChoiceField):
def __init__(self, path='', match=None, recursive=False, allow_files=True,
allow_folders=False, required=True, widget=None, label=None,
initial=None, help_text='', storage_key=None, storage_field=None, *args, **kwargs):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
self.storage_key, self.storage_field = storage_key, storage_field
super(AkamaiFilePathField, self).__init__(choices=(), required=required,
widget=widget, label=label, initial=initial, help_text=help_text,
*args, **kwargs)
if storage_key:
storage = get_storage_class(storage_key)
elif storage_field:
storage = django_storage.default_storage
else:
storage = django_storage.default_storage
if storage.__class__ != AkamaiNetStorage:
raise ImproperlyConfigured('AkamaiFilePathField only works with AkamaiNetStorage storage.')
if self.required:
self.choices = []
else:
self.choices = [("", "---------")]
storage._start_connection()
dirs, files = storage._get_dir_details(path, recursive=recursive, show_folders=allow_folders, show_files=allow_files)
lines = sorted(dirs.keys() + files.keys(), key=str)
if self.match is not None:
self.match_re = re.compile(self.match)
for line in lines:
if self.match is None or self.match_re.search(line):
self.choices.append((line, line))
self.widget.choices = self.choices
| {
"content_hash": "6cda124a5fd2db2564181be3943274e2",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 125,
"avg_line_length": 38.38461538461539,
"alnum_prop": 0.6482965931863728,
"repo_name": "g3rd/django-akamai-storage",
"id": "5b3381b810e8a8d0cb5fd0a7f7400e2eec21882f",
"size": "1996",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "akamai_storage/forms/fields.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25134"
}
],
"symlink_target": ""
} |
import uuid
import datetime
import bson
import mock
from tests import FunctionalTest
from st2common.util import date as date_utils
from st2common.models.db.auth import ApiKeyDB, TokenDB, UserDB
from st2common.persistence.auth import ApiKey, Token, User
from st2common.exceptions.auth import TokenNotFoundError
from st2tests.fixturesloader import FixturesLoader
OBJ_ID = bson.ObjectId()
USER = 'stanley'
USER_DB = UserDB(name=USER)
TOKEN = uuid.uuid4().hex
NOW = date_utils.get_datetime_utc_now()
FUTURE = NOW + datetime.timedelta(seconds=300)
PAST = NOW + datetime.timedelta(seconds=-300)
class TestTokenBasedAuth(FunctionalTest):
enable_auth = True
@mock.patch.object(
Token, 'get',
mock.Mock(return_value=TokenDB(id=OBJ_ID, user=USER, token=TOKEN, expiry=FUTURE)))
@mock.patch.object(User, 'get_by_name', mock.Mock(return_value=USER_DB))
def test_token_validation_token_in_headers(self):
response = self.app.get('/v1/actions', headers={'X-Auth-Token': TOKEN},
expect_errors=False)
self.assertTrue('application/json' in response.headers['content-type'])
self.assertEqual(response.status_int, 200)
@mock.patch.object(
Token, 'get',
mock.Mock(return_value=TokenDB(id=OBJ_ID, user=USER, token=TOKEN, expiry=FUTURE)))
@mock.patch.object(User, 'get_by_name', mock.Mock(return_value=USER_DB))
def test_token_validation_token_in_query_params(self):
response = self.app.get('/v1/actions?x-auth-token=%s' % (TOKEN), expect_errors=False)
self.assertTrue('application/json' in response.headers['content-type'])
self.assertEqual(response.status_int, 200)
@mock.patch.object(
Token, 'get',
mock.Mock(return_value=TokenDB(id=OBJ_ID, user=USER, token=TOKEN, expiry=PAST)))
def test_token_expired(self):
response = self.app.get('/v1/actions', headers={'X-Auth-Token': TOKEN},
expect_errors=True)
self.assertTrue('application/json' in response.headers['content-type'])
self.assertEqual(response.status_int, 401)
@mock.patch.object(
Token, 'get', mock.MagicMock(side_effect=TokenNotFoundError()))
def test_token_not_found(self):
response = self.app.get('/v1/actions', headers={'X-Auth-Token': TOKEN},
expect_errors=True)
self.assertTrue('application/json' in response.headers['content-type'])
self.assertEqual(response.status_int, 401)
def test_token_not_provided(self):
response = self.app.get('/v1/actions', expect_errors=True)
self.assertTrue('application/json' in response.headers['content-type'])
self.assertEqual(response.status_int, 401)
FIXTURES_PACK = 'generic'
TEST_MODELS = {
'apikeys': ['apikey1.yaml', 'apikey_disabled.yaml']
}
# Hardcoded keys matching the fixtures. Lazy way to workound one-way hash and still use fixtures.
KEY1_KEY = "1234"
DISABLED_KEY = "0000"
class TestApiKeyBasedAuth(FunctionalTest):
enable_auth = True
apikey1 = None
apikey_disabled = None
@classmethod
def setUpClass(cls):
super(TestApiKeyBasedAuth, cls).setUpClass()
models = FixturesLoader().save_fixtures_to_db(fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_MODELS)
cls.apikey1 = models['apikeys']['apikey1.yaml']
cls.apikey_disabled = models['apikeys']['apikey_disabled.yaml']
@mock.patch.object(User, 'get_by_name', mock.Mock(return_value=UserDB(name='bill')))
def test_apikey_validation_apikey_in_headers(self):
response = self.app.get('/v1/actions', headers={'St2-Api-key': KEY1_KEY},
expect_errors=False)
self.assertTrue('application/json' in response.headers['content-type'])
self.assertEqual(response.status_int, 200)
@mock.patch.object(User, 'get_by_name', mock.Mock(return_value=UserDB(name='bill')))
def test_apikey_validation_apikey_in_query_params(self):
response = self.app.get('/v1/actions?st2-api-key=%s' % (KEY1_KEY), expect_errors=False)
self.assertTrue('application/json' in response.headers['content-type'])
self.assertEqual(response.status_int, 200)
def test_apikey_disabled(self):
response = self.app.get('/v1/actions', headers={'St2-Api-key': DISABLED_KEY},
expect_errors=True)
self.assertTrue('application/json' in response.headers['content-type'])
self.assertEqual(response.status_int, 401)
self.assertEqual(response.json_body['faultstring'], 'Unauthorized - API key is disabled.')
def test_apikey_not_found(self):
response = self.app.get('/v1/actions', headers={'St2-Api-key': 'UNKNOWN'},
expect_errors=True)
self.assertTrue('application/json' in response.headers['content-type'])
self.assertEqual(response.status_int, 401)
self.assertRegexpMatches(response.json_body['faultstring'],
'^Unauthorized - ApiKey with key_hash=([a-zA-Z0-9]+) not found.$')
@mock.patch.object(
Token, 'get',
mock.Mock(return_value=TokenDB(id=OBJ_ID, user=USER, token=TOKEN, expiry=FUTURE)))
@mock.patch.object(
ApiKey, 'get',
mock.Mock(return_value=ApiKeyDB(user=USER, key_hash=KEY1_KEY, enabled=True)))
@mock.patch.object(User, 'get_by_name', mock.Mock(return_value=USER_DB))
def test_multiple_auth_sources(self):
response = self.app.get('/v1/actions',
headers={'X-Auth-Token': TOKEN, 'St2-Api-key': KEY1_KEY},
expect_errors=True)
self.assertTrue('application/json' in response.headers['content-type'])
self.assertEqual(response.status_int, 401)
self.assertEqual(response.json_body['faultstring'],
'Unauthorized - Only one of Token or API key expected.')
| {
"content_hash": "09af7fa814fa028d302e7b9eb17f1214",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 99,
"avg_line_length": 43.97080291970803,
"alnum_prop": 0.649070385126162,
"repo_name": "tonybaloney/st2",
"id": "9df727a644f05f4bf2959c901098bad82f42ba24",
"size": "6804",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "st2api/tests/unit/controllers/v1/test_auth.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "198"
},
{
"name": "Makefile",
"bytes": "46066"
},
{
"name": "PowerShell",
"bytes": "299"
},
{
"name": "Python",
"bytes": "4278891"
},
{
"name": "Shell",
"bytes": "47687"
},
{
"name": "Slash",
"bytes": "677"
}
],
"symlink_target": ""
} |
"""Conformer utilities."""
import copy
from typing import List, Optional
from absl import logging
import numpy as np
import rdkit
from rdkit import Chem
from rdkit.Chem import AllChem
import tensorflow.compat.v2 as tf
def generate_conformers(
molecule: Chem.rdchem.Mol,
max_num_conformers: int,
*,
random_seed: int = -1,
prune_rms_thresh: float = -1.0,
max_iter: int = -1,
fallback_to_random: bool = False,
) -> Chem.rdchem.Mol:
"""Generates conformers for a given molecule.
Args:
molecule: molecular representation of the compound.
max_num_conformers: maximum number of conformers to generate. If pruning is
done, the returned number of conformers is not guaranteed to match
max_num_conformers.
random_seed: random seed to use for conformer generation.
prune_rms_thresh: RMSD threshold which allows to prune conformers that are
too similar.
max_iter: Maximum number of iterations to perform when optimising MMFF force
field. If set to <= 0, energy optimisation is not performed.
fallback_to_random: if conformers cannot be obtained, use random coordinates
to initialise.
Returns:
Copy of a `molecule` with added hydrogens. The returned molecule contains
force field-optimised conformers. The number of conformers is guaranteed to
be <= max_num_conformers.
"""
mol = copy.deepcopy(molecule)
mol = Chem.AddHs(mol)
mol = _embed_conformers(
mol,
max_num_conformers,
random_seed,
prune_rms_thresh,
fallback_to_random,
use_random=False)
if max_iter > 0:
mol_with_conformers = _minimize_by_mmff(mol, max_iter)
if mol_with_conformers is None:
mol_with_conformers = _minimize_by_uff(mol, max_iter)
else:
mol_with_conformers = mol
# Aligns conformations in a molecule to each other using the first
# conformation as the reference.
AllChem.AlignMolConformers(mol_with_conformers)
# We remove hydrogens to keep the number of atoms consistent with the graph
# nodes.
mol_with_conformers = Chem.RemoveHs(mol_with_conformers)
return mol_with_conformers
def atom_to_feature_vector(
atom: rdkit.Chem.rdchem.Atom,
conformer: Optional[np.ndarray] = None,
) -> List[float]:
"""Converts rdkit atom object to feature list of indices.
Args:
atom: rdkit atom object.
conformer: Generated conformers. Returns -1 values if set to None.
Returns:
List containing positions (x, y, z) of each atom from the conformer.
"""
if conformer:
pos = conformer.GetAtomPosition(atom.GetIdx())
return [pos.x, pos.y, pos.z]
return [np.nan, np.nan, np.nan]
def compute_conformer(smile: str, max_iter: int = -1) -> np.ndarray:
"""Computes conformer.
Args:
smile: Smile string.
max_iter: Maximum number of iterations to perform when optimising MMFF force
field. If set to <= 0, energy optimisation is not performed.
Returns:
A tuple containing index, fingerprint and conformer.
Raises:
RuntimeError: If unable to convert smile string to RDKit mol.
"""
mol = rdkit.Chem.MolFromSmiles(smile)
if not mol:
raise RuntimeError('Unable to convert smile to molecule: %s' % smile)
conformer_failed = False
try:
mol = generate_conformers(
mol,
max_num_conformers=1,
random_seed=45,
prune_rms_thresh=0.01,
max_iter=max_iter)
except IOError as e:
logging.exception('Failed to generate conformers for %s . IOError %s.',
smile, e)
conformer_failed = True
except ValueError:
logging.error('Failed to generate conformers for %s . ValueError', smile)
conformer_failed = True
except: # pylint: disable=bare-except
logging.error('Failed to generate conformers for %s.', smile)
conformer_failed = True
atom_features_list = []
conformer = None if conformer_failed else list(mol.GetConformers())[0]
for atom in mol.GetAtoms():
atom_features_list.append(atom_to_feature_vector(atom, conformer))
conformer_features = np.array(atom_features_list, dtype=np.float32)
return conformer_features
def get_random_rotation_matrix(include_mirror_symmetry: bool) -> tf.Tensor:
"""Returns a single random rotation matrix."""
rotation_matrix = _get_random_rotation_3d()
if include_mirror_symmetry:
random_mirror_symmetry = _get_random_mirror_symmetry()
rotation_matrix = tf.matmul(rotation_matrix, random_mirror_symmetry)
return rotation_matrix
def rotate(vectors: tf.Tensor, rotation_matrix: tf.Tensor) -> tf.Tensor:
"""Batch of vectors on a single rotation matrix."""
return tf.matmul(vectors, rotation_matrix)
def _embed_conformers(
molecule: Chem.rdchem.Mol,
max_num_conformers: int,
random_seed: int,
prune_rms_thresh: float,
fallback_to_random: bool,
*,
use_random: bool = False,
) -> Chem.rdchem.Mol:
"""Embeds conformers into a copy of a molecule.
If random coordinates allowed, tries not to use random coordinates at first,
and uses random only if fails.
Args:
molecule: molecular representation of the compound.
max_num_conformers: maximum number of conformers to generate. If pruning is
done, the returned number of conformers is not guaranteed to match
max_num_conformers.
random_seed: random seed to use for conformer generation.
prune_rms_thresh: RMSD threshold which allows to prune conformers that are
too similar.
fallback_to_random: if conformers cannot be obtained, use random coordinates
to initialise.
*:
use_random: Use random coordinates. Shouldn't be set by any caller except
this function itself.
Returns:
A copy of a molecule with embedded conformers.
Raises:
ValueError: if conformers cannot be obtained for a given molecule.
"""
mol = copy.deepcopy(molecule)
# Obtains parameters for conformer generation.
# In particular, ETKDG is experimental-torsion basic knowledge distance
# geometry, which allows to randomly generate an initial conformation that
# satisfies various geometric constraints such as lower and upper bounds on
# the distances between atoms.
params = AllChem.ETKDGv3()
params.randomSeed = random_seed
params.pruneRmsThresh = prune_rms_thresh
params.numThreads = -1
params.useRandomCoords = use_random
conf_ids = AllChem.EmbedMultipleConfs(mol, max_num_conformers, params)
if not conf_ids:
if not fallback_to_random or use_random:
raise ValueError('Cant get conformers')
return _embed_conformers(
mol,
max_num_conformers,
random_seed,
prune_rms_thresh,
fallback_to_random,
use_random=True)
return mol
def _minimize_by_mmff(
molecule: Chem.rdchem.Mol,
max_iter: int,
) -> Optional[Chem.rdchem.Mol]:
"""Minimizes forcefield for conformers using MMFF algorithm.
Args:
molecule: a datastructure containing conformers.
max_iter: number of maximum iterations to use when optimising force field.
Returns:
A copy of a `molecule` containing optimised conformers; or None if MMFF
cannot be performed.
"""
molecule_props = AllChem.MMFFGetMoleculeProperties(molecule)
if molecule_props is None:
return None
mol = copy.deepcopy(molecule)
for conf_id in range(mol.GetNumConformers()):
ff = AllChem.MMFFGetMoleculeForceField(
mol, molecule_props, confId=conf_id, ignoreInterfragInteractions=False)
ff.Initialize()
# minimises a conformer within a mol in place.
ff.Minimize(max_iter)
return mol
def _minimize_by_uff(
molecule: Chem.rdchem.Mol,
max_iter: int,
) -> Chem.rdchem.Mol:
"""Minimizes forcefield for conformers using UFF algorithm.
Args:
molecule: a datastructure containing conformers.
max_iter: number of maximum iterations to use when optimising force field.
Returns:
A copy of a `molecule` containing optimised conformers.
"""
mol = copy.deepcopy(molecule)
conf_ids = range(mol.GetNumConformers())
for conf_id in conf_ids:
ff = AllChem.UFFGetMoleculeForceField(mol, confId=conf_id)
ff.Initialize()
# minimises a conformer within a mol in place.
ff.Minimize(max_iter)
return mol
def _get_symmetry_rotation_matrix(sign: tf.Tensor) -> tf.Tensor:
"""Returns the 2d/3d matrix for mirror symmetry."""
zero = tf.zeros_like(sign)
one = tf.ones_like(sign)
# pylint: disable=bad-whitespace,bad-continuation
rot = [sign, zero, zero,
zero, one, zero,
zero, zero, one]
# pylint: enable=bad-whitespace,bad-continuation
shape = (3, 3)
rot = tf.stack(rot, axis=-1)
rot = tf.reshape(rot, shape)
return rot
def _quaternion_to_rotation_matrix(quaternion: tf.Tensor) -> tf.Tensor:
"""Converts a batch of quaternions to a batch of rotation matrices."""
q0 = quaternion[0]
q1 = quaternion[1]
q2 = quaternion[2]
q3 = quaternion[3]
r00 = 2 * (q0 * q0 + q1 * q1) - 1
r01 = 2 * (q1 * q2 - q0 * q3)
r02 = 2 * (q1 * q3 + q0 * q2)
r10 = 2 * (q1 * q2 + q0 * q3)
r11 = 2 * (q0 * q0 + q2 * q2) - 1
r12 = 2 * (q2 * q3 - q0 * q1)
r20 = 2 * (q1 * q3 - q0 * q2)
r21 = 2 * (q2 * q3 + q0 * q1)
r22 = 2 * (q0 * q0 + q3 * q3) - 1
matrix = tf.stack([r00, r01, r02,
r10, r11, r12,
r20, r21, r22], axis=-1)
return tf.reshape(matrix, [3, 3])
def _get_random_rotation_3d() -> tf.Tensor:
random_quaternions = tf.random.normal(
shape=[4], dtype=tf.float32)
random_quaternions /= tf.linalg.norm(
random_quaternions, axis=-1, keepdims=True)
return _quaternion_to_rotation_matrix(random_quaternions)
def _get_random_mirror_symmetry() -> tf.Tensor:
random_0_1 = tf.random.uniform(
shape=(), minval=0, maxval=2, dtype=tf.int32)
random_signs = tf.cast((2 * random_0_1) - 1, tf.float32)
return _get_symmetry_rotation_matrix(random_signs)
| {
"content_hash": "d932f05768b82cf97735df1e0b644f47",
"timestamp": "",
"source": "github",
"line_count": 310,
"max_line_length": 80,
"avg_line_length": 31.803225806451614,
"alnum_prop": 0.6887108225986408,
"repo_name": "deepmind/deepmind-research",
"id": "3217a4b2a1083fa99371966e6de63b9e61dba3d1",
"size": "10451",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ogb_lsc/pcq/conformer_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1002"
},
{
"name": "C++",
"bytes": "5765"
},
{
"name": "Jupyter Notebook",
"bytes": "12330730"
},
{
"name": "Lua",
"bytes": "76186"
},
{
"name": "OpenEdge ABL",
"bytes": "15630"
},
{
"name": "PureBasic",
"bytes": "8"
},
{
"name": "Python",
"bytes": "3419119"
},
{
"name": "Racket",
"bytes": "226692"
},
{
"name": "Shell",
"bytes": "84450"
},
{
"name": "Starlark",
"bytes": "3463"
}
],
"symlink_target": ""
} |
from wtforms import TextField, SelectMultipleField, SelectField
from wtforms.validators import DataRequired, NumberRange, Email
from ..datacenter import DatacenterCheckForm
class CheckForm(DatacenterCheckForm):
''' Monitor form for cloudflare traffic increase monitoring '''
title = "CloudFlare: Decrease in Traffic"
description = """
This monitor utilizes CloudFlare's zone analyitics to detect a decrease in HTTP requests. This monitor can be used to detect changes to HTTP traffic and be combined with scaling reactions. The threshold setting allows you to define the percentage of change to trigger on. For example; if more than 50% of the web traffic decreases trigger this monitor as True.
"""
placeholders = DatacenterCheckForm.placeholders
return_choices = [
("true", "True"),
("false", "False")
]
start_choices = [
("-30", "1 minute"),
("-360", "15 minutes"),
("-720", "30 minutes"),
("-1440", "1 hour"),
("-10080", "1 day")
]
email = TextField(
"CloudFlare Email",
description=DatacenterCheckForm.descriptions['cloudflare']['email'],
validators=[Email(message='Email address invalid')])
domain = TextField(
"Domain",
description=DatacenterCheckForm.descriptions['domain'],
validators=[DataRequired(message='Domain is a required field')])
apikey = TextField(
"API Key",
description=DatacenterCheckForm.descriptions['apikey'],
validators=[DataRequired(message='API Key is a required field')])
threshold = TextField(
"Threshold",
description = """
Define the percentage of change to trigger this monitor on. If this monitor should be True when traffic decreases by 20% than the value here should be 20
""",
validators=[DataRequired(message='Threshold is a required field'), NumberRange(min=1, message="Threshold must be a number between 1 - 100") ])
start_time = SelectField(
"Time Span",
description=DatacenterCheckForm.descriptions['cloudflare']['timespan'],
choices=start_choices,
validators=[DataRequired(message="Time Span is a required field")])
return_value = SelectField(
"Return Value",
description=DatacenterCheckForm.descriptions['return_value'],
choices=return_choices,
validators=[DataRequired(message="Return Value is a required field")])
if __name__ == '__main__':
pass
| {
"content_hash": "4a0d382e0bec29d86bf400480cba7ddb",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 364,
"avg_line_length": 43.824561403508774,
"alnum_prop": 0.6725380304243395,
"repo_name": "Runbook/runbook",
"id": "4dcc8084fbd478d1be5bf1c0e567e03184c537a9",
"size": "2792",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/web/monitorforms/cloudflare-traffic-decrease/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "17816"
},
{
"name": "HTML",
"bytes": "227999"
},
{
"name": "JavaScript",
"bytes": "4250"
},
{
"name": "Python",
"bytes": "754910"
},
{
"name": "Shell",
"bytes": "5859"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('competitions', '0004_auto_20151024_1653'),
]
operations = [
migrations.AddField(
model_name='series',
name='results_note',
field=models.CharField(max_length=500, null=True, verbose_name='note in results', blank=True),
preserve_default=True,
),
]
| {
"content_hash": "25206f4e58c84ae30ac1d361d4bb52c8",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 106,
"avg_line_length": 25.157894736842106,
"alnum_prop": 0.6108786610878661,
"repo_name": "tbabej/roots",
"id": "cec4ce664b1e49fe1149486ba2d6faae262c8308",
"size": "502",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "competitions/migrations/0005_series_results_note.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "912"
},
{
"name": "HTML",
"bytes": "94535"
},
{
"name": "JavaScript",
"bytes": "16550"
},
{
"name": "Python",
"bytes": "274597"
},
{
"name": "TeX",
"bytes": "2163"
}
],
"symlink_target": ""
} |
import logging
import json
import requests
import time
from collections import namedtuple
try:
from urllib.parse import urlparse as parse_url
except ImportError:
from urlparse import urlparse as parse_url
from .exceptions import (
SocketIOError, ConnectionError, TimeoutError, PacketError)
from .symmetries import _get_text
from .transports import (
_get_response, TRANSPORTS,
_WebsocketTransport, _XHR_PollingTransport, _JSONP_PollingTransport)
__version__ = '0.5.4'
_SocketIOSession = namedtuple('_SocketIOSession', [
'id',
'heartbeat_timeout',
'server_supported_transports',
])
_log = logging.getLogger(__name__)
PROTOCOL_VERSION = 1
RETRY_INTERVAL_IN_SECONDS = 1
class BaseNamespace(object):
'Define client behavior'
def __init__(self, _transport, path):
self._transport = _transport
self.path = path
self._was_connected = False
self._callback_by_event = {}
self.initialize()
def initialize(self):
'Initialize custom variables here; you can override this method'
pass
def message(self, data='', callback=None):
self._transport.message(self.path, data, callback)
def emit(self, event, *args, **kw):
callback, args = find_callback(args, kw)
self._transport.emit(self.path, event, args, callback)
def disconnect(self):
self._transport.disconnect(self.path)
def on(self, event, callback):
'Define a callback to handle a custom event emitted by the server'
self._callback_by_event[event] = callback
def on_connect(self):
'Called after server connects; you can override this method'
pass
def on_disconnect(self):
'Called after server disconnects; you can override this method'
pass
def on_heartbeat(self):
'Called after server sends a heartbeat; you can override this method'
pass
def on_message(self, data):
'Called after server sends a message; you can override this method'
pass
def on_event(self, event, *args):
"""
Called after server sends an event; you can override this method.
Called only if a custom event handler does not exist,
such as one defined by namespace.on('my_event', my_function).
"""
callback, args = find_callback(args)
if callback:
callback(*args)
def on_error(self, reason, advice):
'Called after server sends an error; you can override this method'
pass
def on_noop(self):
'Called after server sends a noop; you can override this method'
pass
def on_open(self, *args):
pass
def on_close(self, *args):
pass
def on_retry(self, *args):
pass
def on_reconnect(self, *args):
pass
def _find_event_callback(self, event):
# Check callbacks defined by on()
try:
return self._callback_by_event[event]
except KeyError:
pass
# Convert connect to reconnect if we have seen connect already
if event == 'connect':
if not self._was_connected:
self._was_connected = True
else:
event = 'reconnect'
# Check callbacks defined explicitly or use on_event()
return getattr(
self,
'on_' + event.replace(' ', '_'),
lambda *args: self.on_event(event, *args))
class LoggingNamespace(BaseNamespace):
def _log(self, level, msg, *attrs):
_log.log(level, '%s: %s' % (self._transport._url, msg), *attrs)
def on_connect(self):
self._log(logging.DEBUG, '%s [connect]', self.path)
super(LoggingNamespace, self).on_connect()
def on_disconnect(self):
self._log(logging.DEBUG, '%s [disconnect]', self.path)
super(LoggingNamespace, self).on_disconnect()
def on_heartbeat(self):
self._log(logging.DEBUG, '%s [heartbeat]', self.path)
super(LoggingNamespace, self).on_heartbeat()
def on_message(self, data):
self._log(logging.INFO, '%s [message] %s', self.path, data)
super(LoggingNamespace, self).on_message(data)
def on_event(self, event, *args):
callback, args = find_callback(args)
arguments = [repr(_) for _ in args]
if callback:
arguments.append('callback(*args)')
self._log(logging.INFO, '%s [event] %s(%s)', self.path, event,
', '.join(arguments))
super(LoggingNamespace, self).on_event(event, *args)
def on_error(self, reason, advice):
self._log(logging.INFO, '%s [error] %s', self.path, advice)
super(LoggingNamespace, self).on_error(reason, advice)
def on_noop(self):
self._log(logging.INFO, '%s [noop]', self.path)
super(LoggingNamespace, self).on_noop()
def on_open(self, *args):
self._log(logging.INFO, '%s [open] %s', self.path, args)
super(LoggingNamespace, self).on_open(*args)
def on_close(self, *args):
self._log(logging.INFO, '%s [close] %s', self.path, args)
super(LoggingNamespace, self).on_close(*args)
def on_retry(self, *args):
self._log(logging.INFO, '%s [retry] %s', self.path, args)
super(LoggingNamespace, self).on_retry(*args)
def on_reconnect(self, *args):
self._log(logging.INFO, '%s [reconnect] %s', self.path, args)
super(LoggingNamespace, self).on_reconnect(*args)
class SocketIO(object):
"""Create a socket.io client that connects to a socket.io server
at the specified host and port.
- Define the behavior of the client by specifying a custom Namespace.
- Prefix host with https:// to use SSL.
- Set wait_for_connection=True to block until we have a connection.
- Specify desired transports=['websocket', 'xhr-polling'].
- Pass query params, headers, cookies, proxies as keyword arguments.
SocketIO('localhost', 8000,
params={'q': 'qqq'},
headers={'Authorization': 'Basic ' + b64encode('username:password')},
cookies={'a': 'aaa'},
proxies={'https': 'https://proxy.example.com:8080'})
"""
def __init__(
self, host, port=None, Namespace=None,
wait_for_connection=True, transports=TRANSPORTS,
resource='socket.io', **kw):
self.is_secure, self._base_url = _parse_host(host, port, resource)
self.wait_for_connection = wait_for_connection
self._namespace_by_path = {}
self._client_supported_transports = transports
self._kw = kw
if Namespace:
self.define(Namespace)
def _log(self, level, msg, *attrs):
_log.log(level, '%s: %s' % (self._base_url, msg), *attrs)
def __enter__(self):
return self
def __exit__(self, *exception_pack):
self.disconnect()
def __del__(self):
self.disconnect()
def define(self, Namespace, path=''):
if path:
self._transport.connect(path)
namespace = Namespace(self._transport, path)
self._namespace_by_path[path] = namespace
return namespace
def on(self, event, callback, path=''):
if path not in self._namespace_by_path:
self.define(BaseNamespace, path)
return self.get_namespace(path).on(event, callback)
def message(self, data='', callback=None, path=''):
self._transport.message(path, data, callback)
def emit(self, event, *args, **kw):
path = kw.get('path', '')
callback, args = find_callback(args, kw)
self._transport.emit(path, event, args, callback)
def wait(self, seconds=None, for_callbacks=False):
"""Wait in a loop and process events as defined in the namespaces.
- Omit seconds, i.e. call wait() without arguments, to wait forever.
"""
warning_screen = _yield_warning_screen(seconds)
timeout = min(self._heartbeat_interval, seconds)
for elapsed_time in warning_screen:
if self._stop_waiting(for_callbacks):
break
try:
try:
self._process_events(timeout)
except TimeoutError:
pass
next(self._heartbeat_pacemaker)
except ConnectionError as e:
try:
warning = Exception('[connection error] %s' % e)
warning_screen.throw(warning)
except StopIteration:
self._log(logging.WARNING, warning)
try:
namespace = self._namespace_by_path['']
namespace.on_disconnect()
except KeyError:
pass
def _process_events(self, timeout=None):
for packet in self._transport.recv_packet(timeout):
try:
self._process_packet(packet)
except PacketError as e:
self._log(logging.WARNING, '[packet error] %s', e)
def _process_packet(self, packet):
code, packet_id, path, data = packet
namespace = self.get_namespace(path)
delegate = self._get_delegate(code)
delegate(packet, namespace._find_event_callback)
def _stop_waiting(self, for_callbacks):
# Use __transport to make sure that we do not reconnect inadvertently
if for_callbacks and not self.__transport.has_ack_callback:
return True
if self.__transport._wants_to_disconnect:
return True
return False
def wait_for_callbacks(self, seconds=None):
self.wait(seconds, for_callbacks=True)
def disconnect(self, path=''):
try:
self._transport.disconnect(path)
except ReferenceError:
pass
try:
namespace = self._namespace_by_path[path]
namespace.on_disconnect()
del self._namespace_by_path[path]
except KeyError:
pass
@property
def connected(self):
try:
transport = self.__transport
except AttributeError:
return False
else:
return transport.connected
@property
def _transport(self):
try:
if self.connected:
return self.__transport
except AttributeError:
pass
socketIO_session = self._get_socketIO_session()
supported_transports = self._get_supported_transports(socketIO_session)
self._heartbeat_pacemaker = self._make_heartbeat_pacemaker(
heartbeat_timeout=socketIO_session.heartbeat_timeout)
next(self._heartbeat_pacemaker)
warning_screen = _yield_warning_screen(seconds=None)
for elapsed_time in warning_screen:
try:
self._transport_name = supported_transports.pop(0)
except IndexError:
raise ConnectionError('Could not negotiate a transport')
try:
self.__transport = self._get_transport(
socketIO_session, self._transport_name)
break
except ConnectionError:
pass
for path, namespace in self._namespace_by_path.items():
namespace._transport = self.__transport
if path:
self.__transport.connect(path)
return self.__transport
def _get_socketIO_session(self):
warning_screen = _yield_warning_screen(seconds=None)
for elapsed_time in warning_screen:
try:
return _get_socketIO_session(
self.is_secure, self._base_url, **self._kw)
except ConnectionError as e:
if not self.wait_for_connection:
raise
warning = Exception('[waiting for connection] %s' % e)
try:
warning_screen.throw(warning)
except StopIteration:
self._log(logging.WARNING, warning)
def _get_supported_transports(self, session):
self._log(
logging.DEBUG, '[transports available] %s',
' '.join(session.server_supported_transports))
supported_transports = [
x for x in self._client_supported_transports if
x in session.server_supported_transports]
if not supported_transports:
raise SocketIOError(' '.join([
'could not negotiate a transport:',
'client supports %s but' % ', '.join(
self._client_supported_transports),
'server supports %s' % ', '.join(
session.server_supported_transports),
]))
return supported_transports
def _get_transport(self, session, transport_name):
self._log(logging.DEBUG, '[transport chosen] %s', transport_name)
return {
'websocket': _WebsocketTransport,
'xhr-polling': _XHR_PollingTransport,
'jsonp-polling': _JSONP_PollingTransport,
}[transport_name](session, self.is_secure, self._base_url, **self._kw)
def _make_heartbeat_pacemaker(self, heartbeat_timeout):
self._heartbeat_interval = heartbeat_timeout / 2
heartbeat_time = time.time()
while True:
yield
if time.time() - heartbeat_time > self._heartbeat_interval:
heartbeat_time = time.time()
self._transport.send_heartbeat()
def get_namespace(self, path=''):
try:
return self._namespace_by_path[path]
except KeyError:
raise PacketError('unhandled namespace path (%s)' % path)
def _get_delegate(self, code):
try:
return {
'0': self._on_disconnect,
'1': self._on_connect,
'2': self._on_heartbeat,
'3': self._on_message,
'4': self._on_json,
'5': self._on_event,
'6': self._on_ack,
'7': self._on_error,
'8': self._on_noop,
}[code]
except KeyError:
raise PacketError('unexpected code (%s)' % code)
def _on_disconnect(self, packet, find_event_callback):
find_event_callback('disconnect')()
def _on_connect(self, packet, find_event_callback):
find_event_callback('connect')()
def _on_heartbeat(self, packet, find_event_callback):
find_event_callback('heartbeat')()
def _on_message(self, packet, find_event_callback):
code, packet_id, path, data = packet
args = [data]
if packet_id:
args.append(self._prepare_to_send_ack(path, packet_id))
find_event_callback('message')(*args)
def _on_json(self, packet, find_event_callback):
code, packet_id, path, data = packet
args = [json.loads(data)]
if packet_id:
args.append(self._prepare_to_send_ack(path, packet_id))
find_event_callback('message')(*args)
def _on_event(self, packet, find_event_callback):
code, packet_id, path, data = packet
value_by_name = json.loads(data)
event = value_by_name['name']
args = value_by_name.get('args', [])
if packet_id:
args.append(self._prepare_to_send_ack(path, packet_id))
find_event_callback(event)(*args)
def _on_ack(self, packet, find_event_callback):
code, packet_id, path, data = packet
data_parts = data.split('+', 1)
packet_id = data_parts[0]
try:
ack_callback = self._transport.get_ack_callback(packet_id)
except KeyError:
return
args = json.loads(data_parts[1]) if len(data_parts) > 1 else []
ack_callback(*args)
def _on_error(self, packet, find_event_callback):
code, packet_id, path, data = packet
reason, advice = data.split('+', 1)
find_event_callback('error')(reason, advice)
def _on_noop(self, packet, find_event_callback):
find_event_callback('noop')()
def _prepare_to_send_ack(self, path, packet_id):
'Return function that acknowledges the server'
return lambda *args: self._transport.ack(path, packet_id, *args)
def find_callback(args, kw=None):
'Return callback whether passed as a last argument or as a keyword'
if args and callable(args[-1]):
return args[-1], args[:-1]
try:
return kw['callback'], args
except (KeyError, TypeError):
return None, args
def _parse_host(host, port, resource):
if not host.startswith('http'):
host = 'http://' + host
url_pack = parse_url(host)
is_secure = url_pack.scheme == 'https'
port = port or url_pack.port or (443 if is_secure else 80)
base_url = '%s:%d%s/%s/%s' % (
url_pack.hostname, port, url_pack.path, resource, PROTOCOL_VERSION)
return is_secure, base_url
def _yield_warning_screen(seconds=None):
last_warning = None
for elapsed_time in _yield_elapsed_time(seconds):
try:
yield elapsed_time
except Exception as warning:
warning = str(warning)
if last_warning != warning:
last_warning = warning
_log.warn(warning)
time.sleep(RETRY_INTERVAL_IN_SECONDS)
def _yield_elapsed_time(seconds=None):
start_time = time.time()
if seconds is None:
while True:
yield time.time() - start_time
while time.time() - start_time < seconds:
yield time.time() - start_time
def _get_socketIO_session(is_secure, base_url, **kw):
server_url = '%s://%s/' % ('https' if is_secure else 'http', base_url)
try:
response = _get_response(requests.get, server_url, **kw)
except TimeoutError as e:
raise ConnectionError(e)
response_parts = _get_text(response).split(':')
return _SocketIOSession(
id=response_parts[0],
heartbeat_timeout=int(response_parts[1]),
server_supported_transports=response_parts[3].split(','))
| {
"content_hash": "6b2a4901ac7c4c69aa16a9b0cb2d354b",
"timestamp": "",
"source": "github",
"line_count": 523,
"max_line_length": 79,
"avg_line_length": 34.5717017208413,
"alnum_prop": 0.5864166804933355,
"repo_name": "drewhutchison/socketIO-client",
"id": "44b4444e165d2f74a9346b7f5529d2f01b13d936",
"size": "18081",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "socketIO_client/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "2356"
},
{
"name": "Python",
"bytes": "39264"
}
],
"symlink_target": ""
} |
"""Fashion-MNIST dataset.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gzip
import os
import numpy as np
from tensorflow.python.keras._impl.keras.utils.data_utils import get_file
def load_data():
"""Loads the Fashion-MNIST dataset.
Returns:
Tuple of Numpy arrays: `(x_train, y_train), (x_test, y_test)`.
"""
dirname = os.path.join('datasets', 'fashion-mnist')
base = 'http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/'
files = [
'train-labels-idx1-ubyte.gz', 'train-images-idx3-ubyte.gz',
't10k-labels-idx1-ubyte.gz', 't10k-images-idx3-ubyte.gz'
]
paths = []
for fname in files:
paths.append(get_file(fname, origin=base + fname, cache_subdir=dirname))
with gzip.open(paths[0], 'rb') as lbpath:
y_train = np.frombuffer(lbpath.read(), np.uint8, offset=8)
with gzip.open(paths[1], 'rb') as imgpath:
x_train = np.frombuffer(
imgpath.read(), np.uint8, offset=16).reshape(len(y_train), 28, 28)
with gzip.open(paths[2], 'rb') as lbpath:
y_test = np.frombuffer(lbpath.read(), np.uint8, offset=8)
with gzip.open(paths[3], 'rb') as imgpath:
x_test = np.frombuffer(
imgpath.read(), np.uint8, offset=16).reshape(len(y_test), 28, 28)
return (x_train, y_train), (x_test, y_test)
| {
"content_hash": "f1f4e6c8e1577a01900bf3383a8ea1ac",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 76,
"avg_line_length": 29.217391304347824,
"alnum_prop": 0.6599702380952381,
"repo_name": "zasdfgbnm/tensorflow",
"id": "b9ae41a0d4d0e8d9df70e3fc1952e81c5f57e8d9",
"size": "2033",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tensorflow/python/keras/_impl/keras/datasets/fashion_mnist.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "9096"
},
{
"name": "C",
"bytes": "341181"
},
{
"name": "C++",
"bytes": "37811513"
},
{
"name": "CMake",
"bytes": "193934"
},
{
"name": "Go",
"bytes": "1061098"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "551109"
},
{
"name": "Jupyter Notebook",
"bytes": "1940883"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "48122"
},
{
"name": "Objective-C",
"bytes": "12456"
},
{
"name": "Objective-C++",
"bytes": "94385"
},
{
"name": "PHP",
"bytes": "1556"
},
{
"name": "Perl",
"bytes": "6179"
},
{
"name": "Perl 6",
"bytes": "1357"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "32936295"
},
{
"name": "Ruby",
"bytes": "533"
},
{
"name": "Shell",
"bytes": "425164"
}
],
"symlink_target": ""
} |
"""rawtranscation RPCs QA test.
# Tests the following RPCs:
# - createrawtransaction
# - signrawtransaction
# - sendrawtransaction
# - decoderawtransaction
# - getrawtransaction
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
#connect to a local machine for debugging
#url = "http://bitcoinrpc:DP6DvqZtqXarpeNWyN3LZTFchCCyCUuHwNF7E8pX99x1@%s:%d" % ('127.0.0.1', 18332)
#proxy = AuthServiceProxy(url)
#proxy.url = url # store URL on proxy for info
#self.nodes.append(proxy)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
def run_test(self):
#prepare some coins for multiple *rawtransaction commands
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(101)
self.sync_all()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0)
self.sync_all()
self.nodes[0].generate(5)
self.sync_all()
#########################################
# sendrawtransaction with missing input #
#########################################
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists
outputs = { self.nodes[0].getnewaddress() : 4.998 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
rawtx = self.nodes[2].signrawtransaction(rawtx)
# This will raise an exception since there are missing inputs
assert_raises_jsonrpc(-25, "Missing inputs", self.nodes[2].sendrawtransaction, rawtx['hex'])
#########################
# RAW TX MULTISIG TESTS #
#########################
# 2of2 test
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
mSigObjValid = self.nodes[2].validateaddress(mSigObj)
#use balance deltas instead of absolute values
bal = self.nodes[2].getbalance()
# send 1.2 BTC to msig adr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance
# 2of3 test from different nodes
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr3 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
addr3Obj = self.nodes[2].validateaddress(addr3)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])
mSigObjValid = self.nodes[2].validateaddress(mSigObj)
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
sPK = rawTx['vout'][0]['scriptPubKey']['hex']
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#THIS IS A INCOMPLETE FEATURE
#NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = False
for outpoint in rawTx['vout']:
if outpoint['value'] == Decimal('2.20000000'):
vout = outpoint
break
bal = self.nodes[0].getbalance()
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex']}]
outputs = { self.nodes[0].getnewaddress() : 1.2 }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned = self.nodes[1].signrawtransaction(rawTx, inputs)
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxSigned = self.nodes[2].signrawtransaction(rawTx, inputs)
assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('500000.00000000')+Decimal('1.20000000')) #block reward + tx
# getrawtransaction tests
# 1. valid parameters - only supply txid
txHash = rawTx["hash"]
assert_equal(self.nodes[0].getrawtransaction(txHash), rawTxSigned['hex'])
# 2. valid parameters - supply txid and 0 for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, 0), rawTxSigned['hex'])
# 3. valid parameters - supply txid and False for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, False), rawTxSigned['hex'])
# 4. valid parameters - supply txid and 1 for verbose.
# We only check the "hex" field of the output so we don't need to update this test every time the output format changes.
assert_equal(self.nodes[0].getrawtransaction(txHash, 1)["hex"], rawTxSigned['hex'])
# 5. valid parameters - supply txid and True for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, True)["hex"], rawTxSigned['hex'])
# 6. invalid parameters - supply txid and string "Flase"
assert_raises_jsonrpc(-3,"Invalid type", self.nodes[0].getrawtransaction, txHash, "Flase")
# 7. invalid parameters - supply txid and empty array
assert_raises_jsonrpc(-3,"Invalid type", self.nodes[0].getrawtransaction, txHash, [])
# 8. invalid parameters - supply txid and empty dict
assert_raises_jsonrpc(-3,"Invalid type", self.nodes[0].getrawtransaction, txHash, {})
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 1000)
# 9. invalid parameters - sequence number out of range
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises_jsonrpc(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
# 10. invalid parameters - sequence number out of range
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967296}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises_jsonrpc(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967294}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 4294967294)
if __name__ == '__main__':
RawTransactionsTest().main()
| {
"content_hash": "79a0b402907dc6faa597ddd00e41ca3d",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 147,
"avg_line_length": 46.53225806451613,
"alnum_prop": 0.6376660889659157,
"repo_name": "dogecoin/dogecoin",
"id": "1eae751547c52a0a646a4fe23746c20cba07d53d",
"size": "8919",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "qa/rpc-tests/rawtransactions.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28456"
},
{
"name": "C",
"bytes": "784129"
},
{
"name": "C++",
"bytes": "5215709"
},
{
"name": "CMake",
"bytes": "14198"
},
{
"name": "HTML",
"bytes": "20943"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "210413"
},
{
"name": "Makefile",
"bytes": "103238"
},
{
"name": "Nix",
"bytes": "234"
},
{
"name": "Objective-C++",
"bytes": "7252"
},
{
"name": "Python",
"bytes": "1216702"
},
{
"name": "QMake",
"bytes": "825"
},
{
"name": "Sage",
"bytes": "30192"
},
{
"name": "Shell",
"bytes": "51546"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from datetime import datetime
from dateutil.parser import parse
from django.utils.six import string_types
from django.utils.timezone import pytz
def hex_timestamp_to_datetime(hex_timestamp):
"""Converts hex timestamp to a datetime object.
>>> hex_timestamp_to_datetime('558BBCF9')
datetime.datetime(2015, 6, 25, 8, 34, 1)
>>> hex_timestamp_to_datetime('0x558BBCF9')
datetime.datetime(2015, 6, 25, 8, 34, 1)
>>> datetime.fromtimestamp(0x558BBCF9)
datetime.datetime(2015, 6, 25, 8, 34, 1)
"""
if not hex_timestamp.startswith('0x'):
hex_timestamp = '0x{0}'.format(hex_timestamp)
return datetime.fromtimestamp(int(hex_timestamp, 16))
def now_by_tz(tz='US/Central', ignoretz=True):
"""Gets the current datetime object by timezone.
:param tz: is the timezone to get the date for. tz can be passed as a
string or as a timezone object. (i.e. 'US/Central' or
pytz.timezone('US/Central'), etc)
:param ignoretz: will ignore the timezone portion of the datetime object and
tzinfo will be None.
:return: the current datetime object by tz
Examples:
>>> now_by_tz('US/Pacific')
2011-09-28 10:06:01.130025
>>> now_by_tz('US/Pacific', False)
2011-09-28 10:06:01.130025-07:00
>>> now_by_tz(pytz.timezone('US/Central'))
2011-09-28 12:06:01.130025
>>> now_by_tz(pytz.timezone('US/Central'), False)
2011-09-28 12:06:01.130025-05:00
"""
if isinstance(tz, string_types):
tz = pytz.timezone(tz)
if ignoretz:
return datetime.now(tz).replace(tzinfo=None)
return datetime.now(tz)
def tz_to_utc(dt, tz, ignoretz=True):
"""Converts a datetime object from the specified timezone to a UTC datetime.
:param tz: the timezone the datetime is currently in. tz can be passed
as a string or as a timezone object. (i.e. 'US/Central' or
pytz.timezone('US/Central'), etc)
:param ignoretz: will ignore the timezone portion of the datetime object and
tzinfo will be None.
:return: the datetime object by in UTC time.
Examples:
>>> tz_to_utc(datetime(2011, 11, 25, 9), 'US/Central')
2011-11-25 15:00:00
>>> tz_to_utc(datetime(2011, 11, 25, 9), pytz.timezone('US/Central'))
2011-11-25 15:00:00
>>> tz_to_utc(datetime(2011, 11, 25, 9), 'US/Central', False)
2011-11-25 15:00:00+00:00
"""
if isinstance(tz, string_types):
tz = pytz.timezone(tz)
dt = tz.localize(dt)
dt = datetime.astimezone(dt, pytz.timezone('UTC'))
if ignoretz:
return dt.replace(tzinfo=None)
return dt
def utc_to_tz(dt, tz, ignoretz=True):
""" Converts UTC datetime object to the specific timezone.
:param dt: the UTC datetime object to convert.
:param tz: the timezone to convert the UTC datetime object info. tz can be
passed as a string or as a timezone object. (i.e. 'US/Central' or
pytz.timezone('US/Central'), etc)
:param ignoretz: will ignore the timezone portion of the datetime object and
tzinfo will be None.
:return: the datetime object by in UTC time.
Examples:
>>> utc_to_tz(datetime(2011, 11, 25, 9), pytz.timezone('US/Central'))
2011-11-25 03:00:00
>>> utc_to_tz(datetime(2011, 11, 25, 9), 'US/Central', False)
2011-11-25 03:00:00-06:00
"""
if isinstance(tz, string_types):
tz = pytz.timezone(tz)
dt = pytz.utc.localize(dt)
dt = dt.astimezone(tz)
if ignoretz:
return dt.replace(tzinfo=None)
return dt
def parse_date(dt, ignoretz=True, as_tz=None):
"""
:param dt: string datetime to convert into datetime object.
:return: date object if the string can be parsed into a date. Otherwise,
return None.
:see: http://labix.org/python-dateutil
Examples:
>>> parse_date('2011-12-30')
datetime.date(2011, 12, 30)
>>> parse_date('12/30/2011')
datetime.date(2011, 12, 30)
"""
dttm = parse_datetime(dt, ignoretz=ignoretz)
return None if dttm is None else dttm.date()
def parse_datetime(dt, ignoretz=True, **kwargs):
"""
:param dt: string datetime to convert into datetime object.
:return: datetime object if the string can be parsed into a datetime.
Otherwise, return None.
:see: http://labix.org/python-dateutil
Examples:
>>> parse_datetime('2011-12-30 13:45:12 CDT')
2011-12-30 13:45:12
>>> parse_datetime('12/30/2011 13:45:12 CDT')
2011-12-30 13:45:12
>>> parse_datetime('2011-12-30 13:45:12 CDT', ignoretz=False)
2011-12-30 13:45:12-06:00
>>> parse_datetime('12/30/2011 13:45:12 CDT', ignoretz=False)
2011-12-30 13:45:12-06:00
"""
try:
return parse(dt, ignoretz=ignoretz, **kwargs)
except:
return None
"""
print(parse_date('2011-12-30'))
print(parse_date('12/30/2011'))
print(parse_datetime('hello world'))
print(parse_datetime('12/30/2011 13:45:12 CDT'))
print(parse_datetime('2011-12-30 13:45:12 CDT', ignoretz=False))
print(parse_datetime('12/30/2011 13:45:12 CDT', ignoretz=False))
print(utc_to_tz(datetime(2011, 11, 25, 9), pytz.timezone('US/Central')))
print(utc_to_tz(datetime(2011, 11, 25, 9), 'US/Central', False))
print(tz_to_utc(datetime(2011, 11, 25, 9), pytz.timezone('US/Central')))
print(tz_to_utc(datetime(2011, 11, 25, 9), 'US/Central', False))
print(now_by_tz('US/Pacific'))
print(now_by_tz('US/Pacific', False))
print(now_by_tz(pytz.timezone('US/Central')))
print(now_by_tz(pytz.timezone('US/Central'), False))
"""
| {
"content_hash": "ab8ae3ee0e59d7543c049504a7f7bef6",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 80,
"avg_line_length": 31.56497175141243,
"alnum_prop": 0.6472167531770181,
"repo_name": "InfoAgeTech/django-core",
"id": "1b87f3425a3eb12bf465573828f28872eccdcc3d",
"size": "5587",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_core/utils/date_parsers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "452"
},
{
"name": "Python",
"bytes": "180676"
}
],
"symlink_target": ""
} |
"""
jsongit.models
The meat of jsongit code, particularly the Repository, resides here.
"""
import pygit2
# import collections
# import functools
import shutil
import itertools
from .exceptions import (
NotJsonError, InvalidKeyError, DifferentRepoError, StagedDataError)
from .wrappers import Commit, Diff, Conflict, Merge
import constants
import utils
class Repository(object):
def __init__(self, repo, dumps, loads):
self._repo = repo
self._global_name = utils.global_config('user.name')
self._global_email = utils.global_config('user.email')
self._dumps = dumps
self._loads = loads
def __eq__(self, other):
return self._repo.path == other._repo.path
def _key2ref(self, key):
"""The keys of a Repository are also references to the head commit
for that key. This translates keys to the appropriate path (in
/refs/heads/jsongit/).
:raises: :class:`InvalidKeyError`
"""
if not isinstance(key, basestring):
raise InvalidKeyError("%s must be a string to be a key." % key)
elif key[-1] == '.' or key[-1] == '/' or key[0] == '/' or key[0] == '.':
raise InvalidKeyError("Key '%s' should not start or end in . or /" % key)
else:
return 'refs/heads/jsongit/%s' % key
def _navigate_tree(self, oid, path):
"""Find an OID inside a nested tree.
"""
steps = path.split('/')
for step in steps:
oid = self._repo[oid][step].oid
return oid
def _build_commit(self, pygit2_commit):
#assert key in pygit2_commit.tree
key = pygit2_commit.tree[0].name
raw = self._repo[pygit2_commit.tree[0].oid].data
value = self._loads(raw)
return Commit(self, key, value, pygit2_commit)
def _head_target(self):
return self._repo.lookup_reference('HEAD').target
def _repo_head(self):
try:
return self._repo[self._repo.lookup_reference(self._head_target()).oid]
except KeyError:
return None
def add(self, key, value):
"""Add a value for a key to the working tree, staging it for commit.
>>> repo.add('added', 'but not committed!')
>>> repo.index('added')
u'but not committed!'
>>> repo.show('added')
KeyError: 'There is no key at added'
:param key: The key to add
:type key: string
:param value: The value to insert
:type value: anything that runs through :func:`json.dumps`
:raises:
:class:`NotJsonError <jsongit.NotJsonError>`
:class:`InvalidKeyError <jsongit.InvalidKeyError>`
"""
self._key2ref(key) # throw InvalidKeyError
try:
blob_id = self._repo.write(pygit2.GIT_OBJ_BLOB, self._dumps(value))
except ValueError as e:
raise NotJsonError(e)
except TypeError as e:
raise NotJsonError(e)
if key in self._repo.index:
del self._repo.index[key]
working_tree_id = self._repo.index.write_tree()
working_tree = self._repo[working_tree_id]
new_entry = b"100644 %s\x00%s" % (key, blob_id)
tree_data = working_tree.read_raw() + new_entry
working_tree_id = self._repo.write(pygit2.GIT_OBJ_TREE, tree_data)
self._repo.index.read_tree(working_tree_id)
self._repo.index.write()
def checkout(self, source, dest, **kwargs):
""" Replace the HEAD reference for dest with a commit that points back
to the value at source.
>>> repo.commit('spoon', {'material': 'silver'})
>>> repo.checkout('spoon', 'fork')
>>> repo.show('fork')
{u'material': u'silver'}
:param source: The source key.
:type source: string
:param dest: The destination key
:type dest: string
:param author:
(optional) The author of the commit. Defaults to global author.
:type author: pygit2.Signature
:param committer:
(optional) The committer of the commit. Will default to global author.
:type committer: pygit2.Signature
:raises: :class:`StagedDataError <jsongit.StagedDataError>`
"""
message = "Checkout %s from %s" % (dest, source)
commit = self.head(source)
self.commit(dest, commit.data, message=message, parents=[commit])
def commit(self, key=None, value=None, add=True, **kwargs):
"""Commit the index to the working tree.
>>> repo.add('foo', 'my very special bar')
>>> repo.commit()
>>> foo = repo.show('foo')
u'my very special bar'
If a key and value are specified, will add them immediately before
committing them.
>>> repo.commit('fast', 'and easy, too!')
>>> foo = repo.show('fast')
u'and easy, too!'
:param key: The key
:type key: string
:param value: The value of the key.
:type value: anything that runs through :func:`json.dumps`
:param author:
(optional) The signature for the author of the first commit.
Defaults to global author.
:param message:
(optional) Message for first commit. Defaults to "adding [key]" if
there was no prior value.
:type message: string
:param author:
(optional) The signature for the committer of the first commit.
Defaults to git's `--global` `author.name` and `author.email`.
:type author: pygit2.Signature
:param committer:
(optional) The signature for the committer of the first commit.
Defaults to author.
:type committer: pygit2.Signature
:param parents:
(optional) The parents of this commit. Defaults to the last commit
for this key if it already exists, or an empty list if not.
:type parents: list of :class:`Commit <jsongit.wrappers.Commit>`
:raises:
:class:`NotJsonError <jsongit.NotJsonError>`
:class:`InvalidKeyError <jsongit.InvalidKeyError>`
"""
keys = [key] if key is not None else [e.path for e in self._repo.index]
message = kwargs.pop('message', '')
parents = kwargs.pop('parents', None)
author = kwargs.pop('author', utils.signature(self._global_name,
self._global_email))
committer = kwargs.pop('committer', author)
if kwargs:
raise TypeError("Unknown keyword args %s" % kwargs)
if key is None and value is not None:
raise InvalidKeyError()
if parents is not None:
for parent in parents:
if parent.repo != self:
raise DifferentRepoError()
if add is True and key is not None and value is not None:
self.add(key, value)
repo_head = self._repo_head()
tree_id = self._repo.index.write_tree()
self._repo.create_commit(self._head_target(), author, committer,
message, tree_id,
[repo_head.oid] if repo_head else [])
# TODO This will create some keys but not others if there is a bad key
for key in keys:
if parents is None:
parents = [self.head(key)] if self.committed(key) else []
try:
# create a single-entry tree for the commit.
blob_id = self._navigate_tree(tree_id, key)
key_tree_data = b"100644 %s\x00%s" % (key, blob_id)
key_tree_id = self._repo.write(pygit2.GIT_OBJ_TREE, key_tree_data)
self._repo.create_commit(self._key2ref(key), author,
committer, message, key_tree_id,
[parent.oid for parent in parents])
except pygit2.GitError as e:
if str(e).startswith('Failed to create reference'):
raise InvalidKeyError(e)
else:
raise e
def committed(self, key):
"""Determine whether there is a commit for a key.
>>> repo.committed('foo')
False
>>> repo.commit('foo', 'bar')
>>> repo.committed('foo')
True
:param key: the key to check
:type key: string
:returns: whether there is a commit for the key.
:rtype: boolean
"""
try:
self._repo.lookup_reference(self._key2ref(key))
return True
except KeyError:
return False
def destroy(self):
"""Erase this Git repository entirely. This will remove its directory.
Methods called on a repository or its objects after it is destroyed
will throw exceptions.
>>> repo.destroy()
>>> repo.commit('foo', 'bar')
AttributeError: 'NoneType' object has no attribute 'write'
"""
shutil.rmtree(self._repo.path)
self._repo = None
def head(self, key, back=0):
"""Get the head commit for a key.
>>> repo.commit('foo', 'bar', message="leveraging fu")
>>> commit = repo.head('foo')
>>> commit.message
u'leveraging fu'
>>> commit.author.name
u'Jon Q. User'
>>> commit.time
1332438935L
:param key: The key to look up.
:type key: string
:param back:
(optional) How many steps back from head to get the commit.
Defaults to 0 (the current head).
:type back: integer
:returns: the data
:rtype: int, float, NoneType, unicode, boolean, list, or dict
:raises:
KeyError if there is no entry for key, IndexError if too many steps
back are specified.
"""
try:
return itertools.islice(self.log(key), back, back + 1).next()
except KeyError:
raise KeyError("There is no key at %s" % key)
except StopIteration:
raise IndexError("%s has fewer than %s commits" % (key, back))
def index(self, key):
"""Pull the current data for key from the index.
>>> repo.add('added', 'but not committed!')
>>> repo.index('added')
u'but not committed!'
:param key: the key to get data for
:type key: string
:returns: a value
:rtype: None, unicode, float, int, dict, list, or boolean
"""
self._repo.index.read()
raw = self._repo[self._repo.index[key].oid].data
return self._loads(raw)
def merge(self, dest, key=None, commit=None, **kwargs):
"""Try to merge two commits together.
>>> repo.commit('spoon', {'material': 'silver'})
>>> repo.checkout('spoon', 'fork')
>>> repo.show('fork')
{u'material': u'silver'}
>>> repo.commit('spoon', {'material': 'stainless'})
>>> merge = repo.merge('fork', 'spoon')
>>> merge.message
u'Auto-merge of d0e0aa8061 and ce29b985cf from shared parent d21cb53771'
>>> repo.show('fork')
{u'material': u'stainless'}
:param dest: the key to receive the merge
:type dest: string
:param key:
(optional) the key of the merge source, which will use the head
commit.
:type key: string
:param commit: (optional) the explicit commit to merge
:type commit: :class:`Commit <jsongit.wrappers.Commit>`
:param author:
(optional) The author of this commit, if one is necessary.
Defaults to global author.
:type author: pygit2.Signature
:param committer:
(optional) The committer of this commit, if one is necessary.
Will default to global author.
:type committer: pygit2.Signature
:returns: The results of the merge operation
:rtype: :class:`Merge <jsongit.wrappers.Merge>`
"""
if commit is None:
commit = self.head(key)
if commit.key == dest:
raise ValueError('Cannot merge a key with itself')
dest_head = self.head(dest)
# No difference
if commit.oid == dest_head.oid:
return Merge(True, commit, dest_head, "Same commit", result=commit)
# Do a merge if there were no overlapping changes
# First, find the shared parent
try:
shared_commit = (dc for dc in self.log(commit=dest_head)
if dc.oid in (sc.oid for sc in self.log(commit=commit))).next()
except StopIteration:
return Merge(False, commit, dest_head, "No shared parent")
# Now, see if the diffs conflict
source_diff = Diff(shared_commit.data, commit.data)
dest_diff = Diff(shared_commit.data, dest_head.data)
conflict = Conflict(source_diff, dest_diff)
# No-go, the user's gonna have to figure this one out
if conflict:
return Merge(False, commit, dest_head, "Merge conflict",
conflict=conflict)
# Sweet. we can apply all the diffs.
else:
merged_data = dest_diff.apply(source_diff.apply(shared_commit.data))
message = "Auto-merge of %s and %s from shared parent %s" % (
commit.hex[0:10], dest_head.hex[0:10], shared_commit.hex[0:10])
parents = [dest_head, commit]
result = self.commit(dest, merged_data, message=message, parents=parents, **kwargs)
return Merge(True, commit, dest_head, message, result=result)
def log(self, key=None, commit=None, order=constants.GIT_SORT_TOPOLOGICAL):
""" Traverse commits from the specified key or commit. Must specify
one or the other.
>>> repo.commit('president', 'washington')
>>> repo.commit('president', 'adams')
>>> repo.commit('president', 'madison')
>>> log = repo.log('president')
>>> for commit in log:
... print(commit.data)
...
madison
adams
washington
:param key:
(optional) The key to look up a log for. Will look from the head
commit.
:type key: string
:param commit:
(optional) An explicit commit to look up log for.
:type commit: :class:`Commit <jsongit.wrappers.Commit>`
:param order:
(optional) Flags to order traversal. Valid flags are in
:mod:`constants <jsongit.constants>`.
Defaults to :const:`GIT_SORT_TOPOLOGICAL <jsongit.GIT_SORT_TOPOLOGICAL>`
:type order: number
:returns:
A generator to traverse commits, yielding
:class:`Commit <jsongit.wrappers.Commit>`s.
:rtype: generator
"""
if key is None and commit is None:
raise TypeError()
elif commit is None:
c = self._repo[self._repo.lookup_reference(self._key2ref(key)).oid]
commit = self._build_commit(c)
return (self._build_commit(c) for c in self._repo.walk(commit.oid, order))
def remove(self, key, force=False):
"""Remove the head reference to this key, so that it is no longer
visible in the repo. Prior commits and blobs remain in the repo, but
detached.
>>> repo.commit('foo', 'bar')
>>> repo.remove('foo')
>>> repo.committed('foo')
False
>>> repo.staged('foo')
False
:param key: The key to remove
:type key: string
:param force:
(optional) Whether to remove the HEAD reference even if
there is data staged in the index but not yet committed. If force
is true, the index entry will be removed as well.
:type force: boolean
:raises: :class:`StagedDataError jsongit.StagedDataError`
"""
if force is True or self.staged(key) is False:
del self._repo.index[key]
elif force is False and self.staged(key):
raise StagedDataError("There is data staged for %s" % key)
self._repo.lookup_reference(self._key2ref(key)).delete()
def reset(self, key):
"""Reset the value in the index to its HEAD value.
>>> repo.commit('creation', 'eons')
>>> repo.add('creation', 'seven days')
>>> repo.reset('creation')
>>> repo.index('creation')
u'eons'
:param key: the key to reset
:type key: string
"""
self.add(key, self.head(key).data)
def show(self, key, back=0):
"""Obtain the data at HEAD, or a certain number of steps back, for key.
>>> repo.commit('president', 'washington')
>>> repo.commit('president', 'adams')
>>> repo.commit('president', 'madison')
>>> repo.show('president')
u'madison'
>>> repo.show('president', back=2)
u'washington'
:param key: The key to look up.
:type key: string
:param back:
(optional) How many steps back from head to get the commit.
Defaults to 0 (the current head).
:type back: integer
:returns: the data
:rtype: int, float, NoneType, unicode, boolean, list, or dict
:raises:
KeyError if there is no entry for key, IndexError if too many steps
back are specified.
"""
return self.head(key, back=back).data
def staged(self, key):
"""Determine whether the value in the index differs from the committed
value, if there is an entry in the index.
>>> repo.staged('huey')
False
>>> repo.add('huey', 'long')
>>> repo.staged('huey')
True
>>> repo.commit()
>>> repo.staged('huey')
False
:param key: The key to check
:type key: string
:returns: whether the entries are different.
:rtype: boolean
"""
if key in self._repo.index:
if self.committed(key):
return self.index(key) != self.show(key)
else:
return True
else:
return False
# try:
# self._repo.lookup_reference(self._key2ref(key))
# return True
# except KeyError:
# return False
# class Value(object):
# """Values are what exist behind a single key. They provide convenience
# methods to their underlying repository.
# """
#
# def __init__(self, repo, key, data):
# self._repo = repo
# self._key = key
# self._data = data
#
# def add(self, **kwargs):
# """Convenience method to add this value to the repository.
# """
# self.repo.add(self._key, self._data, **kwargs)
#
# def commit(self, **kwargs):
# """Convenience method to commit this value to the repository. By
# default will `add` as well.
# """
# self.repo.commit(self._key, self._data, **kwargs)
#
# @property
# def committed(self):
# """Whether this key has been committed to the repository.
# """
# return self.repo.head(self._key)[self._key].data == self.data
#
# @property
# def data(self):
# """Returns the data for this key.
# """
# return self._data
#
# def head(self, **kwargs):
# """Convenience method to get the head commit for this value's key from
# the repository.
# """
# return self.repo.head(self._key, **kwargs)
#
# def log(self, **kwargs):
# """Convenience method to get the log for this value's key.
# """
# return self.repo.log(self._key, **kwargs)
#
# @property
# def repo(self):
# """The repository.
# """
# return self._repo
#
# @property
# def staged(self):
# """Whether the data in this key has been staged to be committed to the
# repository (added, but not committed.)
# """
# return self.repo.staged(self._key)
#
# def unstage(self):
# """Unstage this key if it has been added, but not committed.
# """
# self.repo.unstage(self._key)
#
# def remove(self):
# """Convenience method to remove this key from the repository.
# """
# self.remove(self._key)
# class Object(collections.MutableMapping, collections.MutableSequence):
#
# def dirtify(meth):
# """Decorator that dirties up the object upon successful completion.
# """
# @functools.wraps(meth)
# def wrapped(self, *args, **kwargs):
# retval = meth(self, *args, **kwargs)
# self._dirty = True # if above call fails, we're not dirtied.
# if self.autocommit:
# self.commit()
# return retval
# return wrapped
#
# def __init__(self, repo, commit, value, autocommit):
#
# #: Whether changes should be automatically committed.
# self.autocommit = autocommit
#
# self._repo = repo
# self._head = commit
# self._value = value
# self._dirty = False
#
# def _value_meth(self, meth):
# cls = self.value.__class__
# try:
# return getattr(cls, meth)
# except AttributeError:
# raise TypeError("%s does not support %s" % (cls, meth))
#
# def __contains__(self, item):
# return self._value_meth('__contains__')(self.value, item)
#
# def __len__(self):
# return self._value_meth('__len__')(self.value)
#
# def __iter__(self):
# return self._value_meth('__iter__')(self.value)
#
# def __getitem__(self, key):
# return self._value_meth('__getitem__')(self.value, key)
#
# @dirtify
# def __setitem__(self, key, value):
# return self._value_meth('__setitem__')(self.value, key, value)
#
# @dirtify
# def __delitem__(self, key):
# return self._value_meth('__delitem__')(self.value, key)
#
# @dirtify
# def insert(self, item):
# return self._value_meth('insert')(self.value, item)
#
# def __str__(self):
# return self._value
#
# def __repr__(self):
# return '%s(key=%s,value=%s,dirty=%s)' % (
# type(self).__name__,
# self.key,
# self._value.__repr__(),
# self.dirty)
#
# def _update(self, commit=None):
# if commit is None:
# commit = self._repo.get(self.key).head
# self._value = commit.object.value
# self._head = commit
# self._dirty = False
#
# @property
# def repo(self):
# """The :class:`Repository` of this object.
# """
# return self._repo
#
# @property
# def key(self):
# """The String key for this dict in its repository.
# """
# return self.head.key
#
# @property
# def dirty(self):
# """Whether the current value is different than what's in the repo.
# """
# return self._dirty
#
# @property
# def value(self):
# """The current (possibly dirty) value of this object.
# """
# return self._value
#
# @property
# def head(self):
# """The :class:`Commit` last associated with this object.
# """
# return self._head
#
# def commit(self, **kwargs):
# """Convenience wrapper for :func:`Repository.commit` applying to this
# key. Resets the dirty flag and updates head.
# """
# commit = self.repo.commit(self.key, self.value, **kwargs).head
# self._update(commit=commit)
# assert self.value == commit.object.value
#
# def log(self, **kwargs):
# """Convenience wrapper for :func:`Repository.log` applying to this key.
# Returns the log based off this object's head, which may not be the
# most recent commit for the key in the repository.
# """
# return self.repo.log(commit=self.head, **kwargs)
#
# def merge(self, other, **kwargs):
# """Convenience wrapper for :func:`Repository.commit`
#
# :param other:
# the object to merge in. The merge is done to the head of
# this object.
# :type other: :class:`Object`
#
# :raises: DifferentRepoError
# """
# if other.repo == self.repo:
# merge = self.repo.merge(self.key, commit=other.head, **kwargs)
# if merge.success:
# self._update()
# return merge
# else:
# # this would run, but would break references in future.
# raise DifferentRepoError("Cannot merge object in, it's in a \
# different repo")
#
# def fork(self, dest_key, **kwargs):
# """Convenience wrapper for :func:`Repository.fork`
#
# :param dest_key:
# the key to fork to.
# :type dest_key: string
# """
# return self.repo.fork(dest_key, commit=self.head)
#
| {
"content_hash": "c71f8956ca4b16b7d580fa4e539943b8",
"timestamp": "",
"source": "github",
"line_count": 724,
"max_line_length": 95,
"avg_line_length": 34.802486187845304,
"alnum_prop": 0.5561773226971465,
"repo_name": "talos/jsongit",
"id": "23a518322252513be9edb2afcd35d8b8ce9b1dae",
"size": "25222",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jsongit/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "76717"
}
],
"symlink_target": ""
} |
import random
import sys
import os
if len(sys.argv) != 2:
print "you need to invoke this script with 1 parameter: the path to the offspring.csv file you want to analyze"
quit()
offspringFile = os.path.abspath(sys.argv[1])
if not os.path.isfile(offspringFile):
print "couldn't find file: " + offspringFile
quit()
def iterateThroughFile(action, search):
with open(offspringFile, 'r') as inputFile:
firstRun = True
max = 0
for line in inputFile:
if firstRun:
firstRun = False
continue
lineSplit = line.split("\t")
parent1 = int(lineSplit[0])
parent2 = int(lineSplit[1])
child = int(lineSplit[2])
if action == "max":
if child > max:
max = child
if action == "find":
if child == search:
return [parent1,parent2]
if action == "max":
return max
if action == "find":
return None
max = iterateThroughFile("max", None)
print "max:", max
findable = True
previousIndiv = max
while findable:
parents = iterateThroughFile("find", previousIndiv)
if parents == None:
findable = False
break
previousIndiv = random.choice(parents)
print "parents:",parents," picked:",previousIndiv
print "done"
| {
"content_hash": "e97faee9ec3d96ab29b32e4173feaf47",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 115,
"avg_line_length": 26.71153846153846,
"alnum_prop": 0.5694744420446364,
"repo_name": "fgolemo/EC14-main",
"id": "d1194bd971ce3d1753163914a40fe2175546c0bc",
"size": "1389",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "findAncestry.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "95893"
},
{
"name": "Shell",
"bytes": "2535"
}
],
"symlink_target": ""
} |
import unittest
from unittest.mock import Mock
class Mailer:
def send_email(self, email, message):
raise NotImplementedError("Not implemented yet")
class DB:
def insert_user(self, user):
raise NotImplementedError("Not implemented yet")
class User:
def __init__(self, email, name):
self.email = email
self.name = name
def registerUser(email, name, db, mailer):
user = User(email, name)
db.insert_user(user)
mailer.send_email(usr.email, "Good bye")
return user
class MockTest(unittest.TestCase):
TEST_EMAIL = 'student@campus.uib.es'
TEST_NAME = 'Student'
def testRegisterUser(self):
user = registerUser(self.TEST_EMAIL, self.TEST_NAME, DB(), Mailer())
self.assertIsInstance(user, User)
self.assertEqual(user.email, self.TEST_EMAIL)
self.assertEqual(user.name, self.TEST_NAME)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "b45389a45a30f61bb863470d22bc9af8",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 76,
"avg_line_length": 25.2972972972973,
"alnum_prop": 0.655982905982906,
"repo_name": "jordillull/unit-tests-uib-2015",
"id": "be6623d9ef988d30639a09a04770977e0b1ae6ae",
"size": "936",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code_sample/python/mock.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "202514"
},
{
"name": "HTML",
"bytes": "61717"
},
{
"name": "JavaScript",
"bytes": "254227"
},
{
"name": "PHP",
"bytes": "349"
},
{
"name": "Python",
"bytes": "4096"
}
],
"symlink_target": ""
} |
"""utilities for testing IPython kernels"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import atexit
from contextlib import contextmanager
from subprocess import PIPE, STDOUT
try:
from queue import Empty # Py 3
except ImportError:
from Queue import Empty # Py 2
import nose
import nose.tools as nt
from IPython.kernel import manager
#-------------------------------------------------------------------------------
# Globals
#-------------------------------------------------------------------------------
STARTUP_TIMEOUT = 60
TIMEOUT = 15
KM = None
KC = None
#-------------------------------------------------------------------------------
# code
#-------------------------------------------------------------------------------
def start_new_kernel(**kwargs):
"""start a new kernel, and return its Manager and Client
Integrates with our output capturing for tests.
"""
kwargs.update(dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT))
return manager.start_new_kernel(startup_timeout=STARTUP_TIMEOUT, **kwargs)
def flush_channels(kc=None):
"""flush any messages waiting on the queue"""
from .test_message_spec import validate_message
if kc is None:
kc = KC
for channel in (kc.shell_channel, kc.iopub_channel):
while True:
try:
msg = channel.get_msg(block=True, timeout=0.1)
except Empty:
break
else:
validate_message(msg)
def execute(code='', kc=None, **kwargs):
"""wrapper for doing common steps for validating an execution request"""
from .test_message_spec import validate_message
if kc is None:
kc = KC
msg_id = kc.execute(code=code, **kwargs)
reply = kc.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'execute_reply', msg_id)
busy = kc.get_iopub_msg(timeout=TIMEOUT)
validate_message(busy, 'status', msg_id)
nt.assert_equal(busy['content']['execution_state'], 'busy')
if not kwargs.get('silent'):
execute_input = kc.get_iopub_msg(timeout=TIMEOUT)
validate_message(execute_input, 'execute_input', msg_id)
nt.assert_equal(execute_input['content']['code'], code)
return msg_id, reply['content']
def start_global_kernel():
"""start the global kernel (if it isn't running) and return its client"""
global KM, KC
if KM is None:
KM, KC = start_new_kernel()
atexit.register(stop_global_kernel)
else:
flush_channels(KC)
return KC
@contextmanager
def kernel():
"""Context manager for the global kernel instance
Should be used for most kernel tests
Returns
-------
kernel_client: connected KernelClient instance
"""
yield start_global_kernel()
def uses_kernel(test_f):
"""Decorator for tests that use the global kernel"""
def wrapped_test():
with kernel() as kc:
test_f(kc)
wrapped_test.__doc__ = test_f.__doc__
wrapped_test.__name__ = test_f.__name__
return wrapped_test
def stop_global_kernel():
"""Stop the global shared kernel instance, if it exists"""
global KM, KC
KC.stop_channels()
KC = None
if KM is None:
return
KM.shutdown_kernel(now=True)
KM = None
def new_kernel(argv=None):
"""Context manager for a new kernel in a subprocess
Should only be used for tests where the kernel must not be re-used.
Returns
-------
kernel_client: connected KernelClient instance
"""
kwargs = dict(stdout=nose.iptest_stdstreams_fileno(), stderr=STDOUT,
startup_timeout=STARTUP_TIMEOUT)
if argv is not None:
kwargs['extra_arguments'] = argv
return manager.run_kernel(**kwargs)
def assemble_output(iopub):
"""assemble stdout/err from an execution"""
stdout = ''
stderr = ''
while True:
msg = iopub.get_msg(block=True, timeout=1)
msg_type = msg['msg_type']
content = msg['content']
if msg_type == 'status' and content['execution_state'] == 'idle':
# idle message signals end of output
break
elif msg['msg_type'] == 'stream':
if content['name'] == 'stdout':
stdout += content['text']
elif content['name'] == 'stderr':
stderr += content['text']
else:
raise KeyError("bad stream: %r" % content['name'])
else:
# other output, ignored
pass
return stdout, stderr
def wait_for_idle(kc):
while True:
msg = kc.iopub_channel.get_msg(block=True, timeout=1)
msg_type = msg['msg_type']
content = msg['content']
if msg_type == 'status' and content['execution_state'] == 'idle':
break
| {
"content_hash": "473170b236039ef20c5296811bd93c53",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 80,
"avg_line_length": 30.217391304347824,
"alnum_prop": 0.5749229188078109,
"repo_name": "wolfram74/numerical_methods_iserles_notes",
"id": "399f13e80b3bb129465dad10474aee7bdcc4ad68",
"size": "4865",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "venv/lib/python2.7/site-packages/IPython/kernel/tests/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "282435"
},
{
"name": "C++",
"bytes": "59801"
},
{
"name": "CSS",
"bytes": "2038"
},
{
"name": "FORTRAN",
"bytes": "3707"
},
{
"name": "Groff",
"bytes": "6753"
},
{
"name": "HTML",
"bytes": "37522"
},
{
"name": "JavaScript",
"bytes": "1368241"
},
{
"name": "Python",
"bytes": "31296026"
},
{
"name": "Shell",
"bytes": "3869"
},
{
"name": "Smarty",
"bytes": "21425"
},
{
"name": "XSLT",
"bytes": "366202"
}
],
"symlink_target": ""
} |
import re
from functools import partial
from syn.five import STR, PY2, unicode, unichr
from .py import get_typename, hasmethod
#-------------------------------------------------------------------------------
# String-Quoting Utils
QUOTES_PATTERN = re.compile('^(\'\'\'|"""|\'|")')
def quote_string(obj):
ret_type = type(obj)
quotes = ["'", '"', "'''", '"""']
quotes = [ret_type(q) for q in quotes]
for quote in quotes:
if quote not in obj:
ret = quote + obj + quote
return ret
q = quotes[0]
ret = obj.replace(q, ret_type("\'"))
ret = q + ret + q
return ret
def outer_quotes(string):
m = re.match(QUOTES_PATTERN, string)
if m:
ret = m.groups()[0]
if string.endswith(ret):
return ret
raise ValueError('String is not quoted')
def break_quoted_string(string, pattern, repl=None):
if repl is None:
repl = pattern
if pattern not in string:
return string
quotes = outer_quotes(string)
parts = string.split(pattern)
def fix(s):
ret = s
if not ret.startswith(quotes):
ret = quotes + ret
if not ret.endswith(quotes):
ret = ret + quotes
return ret
for k, part in enumerate(parts):
parts[k] = fix(part)
return repl.join(parts)
def break_around_line_breaks(string):
lf = '\n'
cr = '\r'
crlf = '\r\n'
ret = string
ret = break_quoted_string(ret, crlf, lf)
ret = break_quoted_string(ret, cr, lf)
ret = break_quoted_string(ret, lf, lf)
return ret
def escape_line_breaks(string):
ret = string.replace('\r', '\\r')
ret = ret.replace('\n', '\\n')
return ret
def escape_null(string):
ret = string.replace('\x00', '\\x00')
return ret
def escape_for_eval(string):
ret = string.replace('\\', '\\\\')
ret = escape_line_breaks(ret)
ret = escape_null(ret)
return ret
#-------------------------------------------------------------------------------
# String Creation
def chrs(lst):
return ''.join(chr(c) for c in lst)
#-------------------------------------------------------------------------------
# Unicode issues
def safe_chr(x):
if PY2 and x > 255:
return unichr(x)
return chr(x)
def safe_str(x, encoding='utf-8'):
try:
return str(x)
except UnicodeEncodeError:
return x.encode(encoding)
def safe_unicode(x):
try:
return unicode(x)
except UnicodeDecodeError:
return u''.join(unichr(ord(c)) for c in x)
def safe_print(x, encoding='utf-8'):
try:
print(x)
except UnicodeEncodeError:
print(x.encode(encoding))
#-------------------------------------------------------------------------------
# istr
#-----------------------------------------------------------
# _istr_sequence
def _istr_sequence(seq, ret, pretty, indent):
base = ','
if pretty:
indent += len(ret)
base += '\n' + ' ' * indent
else:
base += ' '
strs = [istr(val, pretty, indent) for val in seq]
ret += base.join(strs)
return ret
#-----------------------------------------------------------
# _istr_mapping
def _istr_mapping(dct, ret, pretty, indent):
base = ','
if pretty:
indent += len(ret)
base += '\n' + ' ' * indent
else:
base += ' '
strs = []
for key, val in dct.items():
start = '{}: '.format(istr(key, pretty, indent))
val_indent = indent + len(start)
tmp = start + istr(val, pretty, val_indent)
strs.append(tmp)
ret += base.join(strs)
return ret
#-----------------------------------------------------------
# istr_list
def _istr_list(lst, pretty, indent):
if type(lst) is not list:
ret = '{}(['.format(get_typename(lst))
end = '])'
else:
ret = '['
end = ']'
ret = _istr_sequence(lst, ret, pretty, indent) + end
return ret
#-----------------------------------------------------------
# istr_dict
def _istr_dict(dct, pretty, indent):
if type(dct) is not dict:
ret = '{}({{'.format(get_typename(dct))
end = '})'
else:
ret = '{'
end = '}'
ret = _istr_mapping(dct, ret, pretty, indent) + end
return ret
#-----------------------------------------------------------
# istr_set
def _istr_set(obj, pretty, indent):
if type(obj) is not set:
ret = '{}(['.format(get_typename(obj))
end = '])'
else:
if len(obj) == 1:
ret = 'set(['
end = '])'
else:
ret = '{'
end = '}'
ret = _istr_sequence(obj, ret, pretty, indent) + end
return ret
#-----------------------------------------------------------
# istr_tuple
def _istr_tuple(tup, pretty, indent):
if type(tup) is not tuple:
ret = '{}(['.format(get_typename(tup))
end = '])'
else:
ret = '('
end = ')'
ret = _istr_sequence(tup, ret, pretty, indent) + end
return ret
#-----------------------------------------------------------
# istr_str
def _istr_str(s, pretty, indent):
ret = quote_string(s)
if PY2:
if isinstance(s, unicode):
ret = 'u' + ret
return ret
#-----------------------------------------------------------
# istr_type
def _istr_type(obj, pretty, indent):
return get_typename(obj)
#-----------------------------------------------------------
# istr_object
def _istr_object(obj, pretty, indent):
if hasmethod(obj, 'istr'):
return obj.istr(pretty, indent)
return str(obj)
#-----------------------------------------------------------
# istr
def istr(obj, pretty=False, indent=0):
if isinstance(obj, list):
return _istr_list(obj, pretty, indent)
if isinstance(obj, dict):
return _istr_dict(obj, pretty, indent)
if isinstance(obj, set):
return _istr_set(obj, pretty, indent)
if isinstance(obj, tuple):
return _istr_tuple(obj, pretty, indent)
if isinstance(obj, STR):
return _istr_str(obj, pretty, indent)
if isinstance(obj, type):
return _istr_type(obj, pretty, indent)
return _istr_object(obj, pretty, indent)
#-------------------------------------------------------------------------------
# pretty
pretty = partial(istr, pretty=True)
#-------------------------------------------------------------------------------
# __all__
__all__ = ('quote_string', 'outer_quotes', 'break_quoted_string',
'break_around_line_breaks',
'escape_line_breaks', 'escape_null', 'escape_for_eval',
'chrs',
'safe_chr', 'safe_str', 'safe_unicode', 'safe_print',
'istr', 'pretty')
#-------------------------------------------------------------------------------
| {
"content_hash": "c455e5a406a31e2b7592da99f884183a",
"timestamp": "",
"source": "github",
"line_count": 277,
"max_line_length": 80,
"avg_line_length": 24.617328519855597,
"alnum_prop": 0.45607860390086524,
"repo_name": "mbodenhamer/syn",
"id": "a927b1c80b94a2d3d1de6eee030c2ba8f5648a54",
"size": "6819",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "syn/base_utils/str.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "3447"
},
{
"name": "Python",
"bytes": "571295"
},
{
"name": "Shell",
"bytes": "231"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from main.models import UserProfile, MetaData
class ProfAdmin(admin.ModelAdmin):
list_display = ('user', 'name', 'role')
admin.site.register(UserProfile, ProfAdmin)
class MDAdmin(admin.ModelAdmin):
list_display = ('xform', 'data_type', 'data_value', 'data_file_type')
admin.site.register(MetaData, MDAdmin)
| {
"content_hash": "6e1e9c0bf3997e75c80c4b3b2570f8d9",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 73,
"avg_line_length": 29.333333333333332,
"alnum_prop": 0.7386363636363636,
"repo_name": "ehealthafrica-ci/formhub",
"id": "296c2933c121bb76665891f8d33ee2ad438774fc",
"size": "352",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "main/admin.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "60276"
},
{
"name": "HTML",
"bytes": "251331"
},
{
"name": "JavaScript",
"bytes": "722151"
},
{
"name": "Makefile",
"bytes": "2286"
},
{
"name": "Nginx",
"bytes": "793"
},
{
"name": "Python",
"bytes": "1650038"
},
{
"name": "Shell",
"bytes": "11919"
}
],
"symlink_target": ""
} |
from datetime import datetime
import os
from azure.cli.testsdk.preparers import (
ResourceGroupPreparer,
VirtualNetworkPreparer,
RoleBasedServicePrincipalPreparer,
KEY_RESOURCE_GROUP,
KEY_VIRTUAL_NETWORK,
)
from azure.cli.testsdk.utilities import GraphClientPasswordReplacer
from azure.cli.command_modules.acs.tests.latest.recording_processors import MOCK_GUID, MOCK_SECRET
class AKSCustomResourceGroupPreparer(ResourceGroupPreparer):
"""
Override to support overriding the default location in test cases using this custom preparer with specific
environment variables, and a flag (preserve_default_location) to avoid being overridden by environment variables.
"""
def __init__(
self,
name_prefix="clitest.rg",
parameter_name="resource_group",
parameter_name_for_location="resource_group_location",
location="westus",
dev_setting_name="AZURE_CLI_TEST_DEV_RESOURCE_GROUP_NAME",
dev_setting_location="AZURE_CLI_TEST_DEV_RESOURCE_GROUP_LOCATION",
random_name_length=75,
key="rg",
preserve_default_location=False,
):
super(AKSCustomResourceGroupPreparer, self).__init__(
name_prefix,
parameter_name,
parameter_name_for_location,
location,
dev_setting_name,
dev_setting_location,
random_name_length,
key,
)
# use environment variable to modify the default value of location
self.dev_setting_location = os.environ.get(dev_setting_location, None)
if not preserve_default_location and self.dev_setting_location:
self.location = self.dev_setting_location
else:
self.dev_setting_location = location
class AKSCustomVirtualNetworkPreparer(VirtualNetworkPreparer):
"""
Override to specify custom address_prefixes to avoid conflict with aks cluster/service cidr.
TODO: remove this.
"""
def __init__(
self,
name_prefix="clitest.vn",
location="westus",
parameter_name="virtual_network",
resource_group_parameter_name="resource_group",
resource_group_key=KEY_RESOURCE_GROUP,
address_prefixes="10.128.0.0/24",
address_prefixes_parameter_name="address_prefixes",
dev_setting_name="AZURE_CLI_TEST_DEV_VIRTUAL_NETWORK_NAME",
dev_setting_location="AZURE_CLI_TEST_DEV_RESOURCE_GROUP_LOCATION",
random_name_length=24,
key=KEY_VIRTUAL_NETWORK,
):
super(AKSCustomVirtualNetworkPreparer, self).__init__(
name_prefix,
location,
parameter_name,
resource_group_parameter_name,
resource_group_key,
dev_setting_name,
random_name_length,
key,
)
# use environment variable to modify the default value of location
self.dev_setting_location = os.environ.get(dev_setting_location, None)
if self.dev_setting_location:
self.location = self.dev_setting_location
else:
self.dev_setting_location = location
# get address_prefixes
# custom address_prefixes to avoid conflict with aks cluster/service cidr
self.address_prefixes = address_prefixes
self.address_prefixes_parameter_name = address_prefixes_parameter_name
def create_resource(self, name, **kwargs):
if self.dev_setting_name:
self.test_class_instance.kwargs[self.key] = name
return {
self.parameter_name: self.dev_setting_name,
}
tags = {
"product": "azurecli",
"cause": "automation",
"date": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
}
if "ENV_JOB_NAME" in os.environ:
tags["job"] = os.environ["ENV_JOB_NAME"]
tags = " ".join(
["{}={}".format(key, value) for key, value in tags.items()]
)
template = (
"az network vnet create --resource-group {} --location {} --name {}"
" --subnet-name default --address-prefixes {} --tag " + tags
)
self._update_address_prefixes(**kwargs)
self.live_only_execute(
self.cli_ctx,
template.format(
self._get_resource_group(**kwargs),
self.location,
name,
self.address_prefixes,
),
)
self.test_class_instance.kwargs[self.key] = name
return {self.parameter_name: name}
def remove_resource(self, name, **kwargs):
# The exception "CloudError" used in "VirtualNetworkPreparer" is deprecated
# since the SDK that network commands relies on has been migrated to track 2,
# the new exception class should be "HttpResponseError".
# Only one test case ("test_aks_create_default_service_with_virtual_node_addon")
# uses this preparer currently, and the network resources are still used
# by some resources (such as vmss) after the test, since there is no clean way
# to sort out dependencies and delete resources one by one, we do
# rely on deleting the resource group later to clean up
pass
def _update_address_prefixes(self, **kwargs):
if self.address_prefixes_parameter_name in kwargs:
self.address_prefixes = kwargs.get(
self.address_prefixes_parameter_name
)
class AKSCustomRoleBasedServicePrincipalPreparer(
RoleBasedServicePrincipalPreparer
):
"""
Override to keep the recording consistent with the count in the mock request in scenarios such as the
check-in pipeline where the SP is pre-configured for testing and imported via environment variables.
"""
def __init__(
self,
name_prefix="clitest",
skip_assignment=True,
parameter_name="sp_name",
parameter_password="sp_password",
dev_setting_sp_name="AZURE_CLI_TEST_DEV_SP_NAME",
dev_setting_sp_password="AZURE_CLI_TEST_DEV_SP_PASSWORD",
key="sp",
):
super(AKSCustomRoleBasedServicePrincipalPreparer, self).__init__(
name_prefix,
skip_assignment,
parameter_name,
parameter_password,
dev_setting_sp_name,
dev_setting_sp_password,
key,
)
def create_resource(self, name, **kwargs):
if not self.dev_setting_sp_name:
command = "az ad sp create-for-rbac -n {}{}".format(
name, " --skip-assignment" if self.skip_assignment else ""
)
try:
self.result = self.live_only_execute(
self.cli_ctx, command
).get_output_in_json()
except AttributeError: # live only execute returns None if playing from record
pass
if self.live_test or self.test_class_instance.in_recording:
sp_name = self.result['appId']
sp_password = self.result.get("password") or GraphClientPasswordReplacer.PWD_REPLACEMENT
else:
sp_name = MOCK_GUID
sp_password = MOCK_SECRET
else:
# call AbstractPreparer.moniker to make resource counts and self.resource_moniker consistent between live
# and play-back. see SingleValueReplacer.process_request, AbstractPreparer.__call__._preparer_wrapper
# and ScenarioTest.create_random_name. This is so that when self.create_random_name is called for the
# first time during live or playback, it would have the same value.
# In short, the default sp preparer in live mode does not call moniker, which leads to inconsistent counts.
_ = self.moniker
# When performing live test and recording, original sp and secret will be returned, but sp and secret would
# be replaced by azure.cli.command_modules.acs.tests.latest.recording_processors.KeyReplacer with
# MOCK_GUID and MOCK_SECRET while recording. When performing recording test, MOCK_GUID and MOCK_SECRET will
# be returned.
if self.live_test or self.test_class_instance.in_recording:
sp_name = self.dev_setting_sp_name
sp_password = self.dev_setting_sp_password
else:
sp_name = MOCK_GUID
sp_password = MOCK_SECRET
# update kwargs and return
self.test_class_instance.kwargs[self.key] = sp_name
self.test_class_instance.kwargs["{}_pass".format(self.key)] = sp_password
return {
self.parameter_name: sp_name,
self.parameter_password: sp_password,
}
| {
"content_hash": "133096d088e08e64da6a65d68cc15ef6",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 119,
"avg_line_length": 40.37614678899082,
"alnum_prop": 0.6181549647807316,
"repo_name": "yugangw-msft/azure-cli",
"id": "d42dfbfb8fb9dc65598d61c056756392c2e37348",
"size": "9148",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "src/azure-cli/azure/cli/command_modules/acs/tests/latest/custom_preparers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ANTLR",
"bytes": "5355"
},
{
"name": "Batchfile",
"bytes": "14110"
},
{
"name": "Bicep",
"bytes": "1679"
},
{
"name": "C#",
"bytes": "1971"
},
{
"name": "C++",
"bytes": "275"
},
{
"name": "Dockerfile",
"bytes": "8427"
},
{
"name": "HTML",
"bytes": "794"
},
{
"name": "JavaScript",
"bytes": "1404"
},
{
"name": "Jupyter Notebook",
"bytes": "389"
},
{
"name": "PowerShell",
"bytes": "1781"
},
{
"name": "Python",
"bytes": "24270340"
},
{
"name": "Rich Text Format",
"bytes": "12032"
},
{
"name": "Roff",
"bytes": "1036959"
},
{
"name": "Shell",
"bytes": "56023"
},
{
"name": "TSQL",
"bytes": "1145"
}
],
"symlink_target": ""
} |
u'''
Created on Nov 11, 2010
@author: Mark V Systems Limited
(c) Copyright 2010 Mark V Systems Limited, All rights reserved.
'''
import os
from arelle import XmlUtil
from arelle.ModelDocument import ModelDocument, Type
class ModelRssObject(ModelDocument):
u"""
.. class:: ModelRssObject(type=ModelDocument.Type.RSSFEED, uri=None, filepath=None, xmlDocument=None)
ModelRssObject is a specialization of ModelDocument for RSS Feeds.
(for parameters and inherited attributes, please see ModelDocument)
"""
def __init__(self, modelXbrl,
type=Type.RSSFEED,
uri=None, filepath=None, xmlDocument=None):
super(ModelRssObject, self).__init__(modelXbrl, type, uri, filepath, xmlDocument)
self.rssItems = []
def rssFeedDiscover(self, rootElement):
u"""Initiates discovery of RSS feed
"""
# add self to namespaced document
self.xmlRootElement = rootElement
for itemElt in XmlUtil.descendants(rootElement, None, u"item"):
self.rssItems.append(itemElt)
| {
"content_hash": "165b8243a334dad9aab5377d6d4de268",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 105,
"avg_line_length": 35.5,
"alnum_prop": 0.6461267605633803,
"repo_name": "sternshus/arelle2.7",
"id": "49ff1465ebc87764c944ea399f0b1560a272cbac",
"size": "1136",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "svr-2.7/arelle/ModelRssObject.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "850"
},
{
"name": "Java",
"bytes": "4663"
},
{
"name": "PLSQL",
"bytes": "1056369"
},
{
"name": "Python",
"bytes": "4877037"
},
{
"name": "Shell",
"bytes": "42"
}
],
"symlink_target": ""
} |
from .responses import LogsResponse
url_bases = ["https?://logs.(.+).amazonaws.com"]
url_paths = {"{0}/$": LogsResponse.dispatch}
| {
"content_hash": "7ffe896a4fb60313bc70e22c996d3556",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 48,
"avg_line_length": 26.4,
"alnum_prop": 0.6742424242424242,
"repo_name": "william-richard/moto",
"id": "e4e1f5a887b9072f460c4c74decc9ecb7ae48306",
"size": "132",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "moto/logs/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "443"
},
{
"name": "HTML",
"bytes": "5848"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "756"
},
{
"name": "Makefile",
"bytes": "1213"
},
{
"name": "Python",
"bytes": "6637538"
},
{
"name": "Ruby",
"bytes": "188"
},
{
"name": "Scala",
"bytes": "782"
},
{
"name": "Shell",
"bytes": "797"
}
],
"symlink_target": ""
} |
"""Pre-commit hook that checks files added/modified in a commit.
To install the hook manually simply execute this script from the oppia root dir
with the `--install` flag.
To bypass the validation upon `git commit` use the following command:
`git commit --no-verify --am "<Your Commit Message>"`
This hook works only on Unix like systems as of now.
On Vagrant under Windows it will still copy the hook to the .git/hooks dir
but it will have no effect.
"""
from __future__ import annotations
import argparse
import json
import os
import shutil
import subprocess
import sys
from typing import Final, List, Optional, Tuple
# TODO(#15567): The order can be fixed after Literal in utils.py is loaded
# from typing instead of typing_extensions, this will be possible after
# we migrate to Python 3.8.
sys.path.append(os.getcwd())
from scripts import common # isort:skip # pylint: disable=wrong-import-position
from core import utils # isort:skip # pylint: disable=wrong-import-position
FECONF_FILEPATH: Final = os.path.join('core', 'feconf.py')
CONSTANTS_FILEPATH: Final = os.path.join('.', 'assets', 'constants.ts')
RELEASE_CONSTANTS_FILEPATH: Final = os.path.join(
'.', 'assets', 'release_constants.json')
KEYS_UPDATED_IN_FECONF: Final = [
b'INCOMING_EMAILS_DOMAIN_NAME', b'ADMIN_EMAIL_ADDRESS',
b'SYSTEM_EMAIL_ADDRESS', b'NOREPLY_EMAIL_ADDRESS', b'CAN_SEND_EMAILS',
b'CAN_SEND_EDITOR_ROLE_EMAILS', b'CAN_SEND_FEEDBACK_MESSAGE_EMAILS',
b'CAN_SEND_SUBSCRIPTION_EMAILS', b'DEFAULT_EMAIL_UPDATES_PREFERENCE',
b'REQUIRE_EMAIL_ON_MODERATOR_ACTION', b'EMAIL_SERVICE_PROVIDER',
b'SYSTEM_EMAIL_NAME', b'MAILGUN_DOMAIN_NAME']
KEYS_UPDATED_IN_CONSTANTS: Final = [
b'SITE_FEEDBACK_FORM_URL', b'FIREBASE_CONFIG_API_KEY',
b'FIREBASE_CONFIG_APP_ID', b'FIREBASE_CONFIG_AUTH_DOMAIN',
b'FIREBASE_CONFIG_MESSAGING_SENDER_ID', b'FIREBASE_CONFIG_PROJECT_ID',
b'FIREBASE_CONFIG_STORAGE_BUCKET', b'FIREBASE_CONFIG_GOOGLE_CLIENT_ID']
def install_hook() -> None:
"""Installs the pre_commit_hook script and makes it executable.
It ensures that oppia/ is the root folder.
Raises:
ValueError. If chmod command fails.
"""
oppia_dir = os.getcwd()
hooks_dir = os.path.join(oppia_dir, '.git', 'hooks')
pre_commit_file = os.path.join(hooks_dir, 'pre-commit')
chmod_cmd = ['chmod', '+x', pre_commit_file]
file_is_symlink = os.path.islink(pre_commit_file)
file_exists = os.path.exists(pre_commit_file)
if file_is_symlink and file_exists:
print('Symlink already exists')
else:
# This is needed, because otherwise some systems symlink/copy the .pyc
# file instead of the .py file.
this_file = __file__.replace('pyc', 'py')
# If its a broken symlink, delete it.
if file_is_symlink and not file_exists:
os.unlink(pre_commit_file)
print('Removing broken symlink')
try:
os.symlink(os.path.abspath(this_file), pre_commit_file)
print('Created symlink in .git/hooks directory')
# Raises AttributeError on windows, OSError added as failsafe.
except (OSError, AttributeError):
shutil.copy(this_file, pre_commit_file)
print('Copied file to .git/hooks directory')
print('Making pre-commit hook file executable ...')
if not common.is_windows_os():
_, err_chmod_cmd = start_subprocess_for_result(chmod_cmd)
if not err_chmod_cmd:
print('pre-commit hook file is now executable!')
else:
raise ValueError(err_chmod_cmd)
def start_subprocess_for_result(cmd: List[str]) -> Tuple[bytes, bytes]:
"""Starts subprocess and returns (stdout, stderr)."""
task = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = task.communicate()
return out, err
def does_diff_include_package_lock_file() -> bool:
"""Checks whether the diff includes package-lock.json.
Returns:
bool. Whether the diff includes package-lock.json.
Raises:
ValueError. If git command fails.
"""
git_cmd = ['git', 'diff', '--name-only', '--cached']
out, err = start_subprocess_for_result(git_cmd)
if not err:
files_changed = out.split(b'\n')
return b'package-lock.json' in files_changed
else:
raise ValueError(err)
def does_current_folder_contain_have_package_lock_file() -> bool:
"""Checks whether package-lock.json exists in the current folder.
Returns:
bool. Whether the current folder includes package-lock.json.
"""
return os.path.isfile('package-lock.json')
def check_changes(filetype: str) -> bool:
"""Checks if diff in feconf or constants file includes
changes made for release.
Args:
filetype: str. The file to check - feconf or constants.
Returns:
bool. Whether the diff includes changes made for release.
"""
if filetype == 'feconf':
filepath = FECONF_FILEPATH
keys_to_check = [b'%s = ' % key for key in KEYS_UPDATED_IN_FECONF]
elif filetype == 'constants':
filepath = CONSTANTS_FILEPATH
keys_to_check = [b'"%s": ' % key for key in KEYS_UPDATED_IN_CONSTANTS]
else:
return True
diff_output = subprocess.check_output([
'git', 'diff', filepath])[:-1].split(b'\n')
for line in diff_output:
if (line.startswith(b'-') or line.startswith(b'+')) and any(
key in line for key in keys_to_check):
return False
return True
def check_changes_in_config() -> None:
"""Checks whether feconf and assets have changes made for release
deployment.
Raises:
Exception. There are deployment changes in feconf or constants filepath.
"""
if not check_changes('feconf'):
raise Exception(
'Changes to %s made for deployment cannot be committed.' % (
FECONF_FILEPATH))
if not check_changes('constants'):
raise Exception(
'Changes to %s made for deployment cannot be committed.' % (
CONSTANTS_FILEPATH))
def check_changes_in_gcloud_path() -> None:
"""Checks that the gcloud path in common.py matches with the path in
release_constants.json.
Raises:
Exception. The gcloud path in common.py does not match with the path
in release_constants.json.
"""
with utils.open_file(RELEASE_CONSTANTS_FILEPATH, 'r') as f:
release_constants_gcloud_path = json.loads(f.read())['GCLOUD_PATH']
if not (
os.path.exists(release_constants_gcloud_path) and
os.path.samefile(release_constants_gcloud_path, common.GCLOUD_PATH)
):
raise Exception(
'The gcloud path in common.py: %s should match the path in '
'release_constants.json: %s. Please fix.' % (
common.GCLOUD_PATH, release_constants_gcloud_path))
def main(args: Optional[List[str]] = None) -> None:
"""Main method for pre-commit hook that checks files added/modified
in a commit.
"""
parser = argparse.ArgumentParser()
parser.add_argument(
'--install', action='store_true', default=False,
help='Install pre_commit_hook to the .git/hooks dir')
parsed_args = parser.parse_args(args=args)
if parsed_args.install:
install_hook()
return
print('Running pre-commit check for feconf and constants ...')
check_changes_in_config()
print('Running pre-commit check for gcloud path changes...')
check_changes_in_gcloud_path()
print('Running pre-commit check for package-lock.json ...')
if does_diff_include_package_lock_file() and (
does_current_folder_contain_have_package_lock_file()):
# The following message is necessary since there git commit aborts
# quietly when the status is non-zero.
print('-----------COMMIT ABORTED-----------')
print(
'Oppia utilize Yarn to manage node packages. Please delete '
'package-lock.json, revert the changes in package.json, and use '
'yarn to add, update, or delete the packages. For more information '
'on how to use yarn, see https://yarnpkg.com/en/docs/usage.'
)
sys.exit(1)
return
# The 'no coverage' pragma is used as this line is un-testable. This is because
# it will only be called when pre_commit_hook.py is used as a script.
if __name__ == '__main__': # pragma: no cover
main()
| {
"content_hash": "795e9673f9125782380e4748a3e8536d",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 80,
"avg_line_length": 36.96086956521739,
"alnum_prop": 0.6532172685566404,
"repo_name": "oppia/oppia",
"id": "c84c34e7b340259d5a670ca6bf1faa27f42f5722",
"size": "9130",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "scripts/pre_commit_hook.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "476480"
},
{
"name": "HTML",
"bytes": "2092923"
},
{
"name": "JavaScript",
"bytes": "1247116"
},
{
"name": "PEG.js",
"bytes": "71377"
},
{
"name": "Python",
"bytes": "17628953"
},
{
"name": "Shell",
"bytes": "2240"
},
{
"name": "TypeScript",
"bytes": "15541372"
}
],
"symlink_target": ""
} |
import unittest
import numpy as np
import pandas as pd
import scipy.sparse as sp
import smurff
import itertools
import collections
verbose = 0
class TestBPMF(unittest.TestCase):
# Python 2.7 @unittest.skip fix
__name__ = "TestSmurff"
def test_bpmf(self):
Y = sp.rand(10, 20, 0.2)
Y, Ytest = smurff.make_train_test(Y, 0.5)
predictions = smurff.bpmf(Y,
Ytest=Ytest,
num_latent=4,
verbose=verbose,
burnin=50,
nsamples=50)
self.assertEqual(Ytest.nnz, len(predictions))
def test_bpmf_numerictest(self):
X = sp.rand(15, 10, 0.2)
Xt = 0.3
X, Xt = smurff.make_train_test(X, Xt)
smurff.bpmf(X,
Ytest=Xt,
num_latent=10,
burnin=10,
nsamples=15,
verbose=verbose)
def test_bpmf_emptytest(self):
X = sp.rand(15, 10, 0.2)
smurff.bpmf(X,
num_latent=10,
burnin=10,
nsamples=15,
verbose=verbose)
def test_bpmf_tensor(self):
np.random.seed(1234)
Y = smurff.SparseTensor(pd.DataFrame({
"A": np.random.randint(0, 5, 7),
"B": np.random.randint(0, 4, 7),
"C": np.random.randint(0, 3, 7),
"value": np.random.randn(7)
}))
Ytest = smurff.SparseTensor(pd.DataFrame({
"A": np.random.randint(0, 5, 5),
"B": np.random.randint(0, 4, 5),
"C": np.random.randint(0, 3, 5),
"value": np.random.randn(5)
}))
predictions = smurff.bpmf(Y,
Ytest=Ytest,
num_latent=4,
verbose=verbose,
burnin=50,
nsamples=50)
def test_bpmf_sparse_matrix_sparse_2d_tensor(self):
np.random.seed(1234)
# Generate train matrix rows, cols and vals
train_shape = (5, 4)
sparse_random = sp.random(5, 4, density=1.0)
train_sparse_matrix, test_sparse_matrix = smurff.make_train_test(sparse_random, 0.2)
# Create train and test sparse matrices
train_sparse_matrix = train_sparse_matrix.tocoo()
test_sparse_matrix = test_sparse_matrix.tocoo()
# Create train and test sparse tensors
train_sparse_tensor = smurff.SparseTensor(pd.DataFrame({
'0': train_sparse_matrix.row,
'1': train_sparse_matrix.col,
'v': train_sparse_matrix.data
}), train_shape)
test_sparse_tensor = smurff.SparseTensor(pd.DataFrame({
'0': test_sparse_matrix.row,
'1': test_sparse_matrix.col,
'v': test_sparse_matrix.data
}), train_shape)
# Run SMURFF
sparse_matrix_predictions = smurff.bpmf(train_sparse_matrix,
Ytest=test_sparse_matrix,
num_latent=4,
num_threads=1,
verbose=verbose,
burnin=50,
nsamples=50,
seed=1234)
sparse_tensor_predictions = smurff.bpmf(train_sparse_tensor,
Ytest=test_sparse_tensor,
num_latent=4,
num_threads=1,
verbose=verbose,
burnin=50,
nsamples=50,
seed=1234)
# Transfrom SMURFF results to dictionary of coords and predicted values
sparse_matrix_predictions.sort()
sparse_tensor_predictions.sort()
self.assertEqual(len(sparse_matrix_predictions), len(sparse_tensor_predictions))
for m, t in zip(sparse_matrix_predictions, sparse_tensor_predictions):
self.assertEqual(m.coords, t.coords)
self.assertAlmostEqual(m.pred_1sample, t.pred_1sample)
def test_bpmf_dense_matrix_dense_2d_tensor(self):
np.random.seed(1234)
# Generate train matrix rows, cols and vals
train_shape = (5, 4)
sparse_random = sp.random(5, 4, density=1.0)
train_dense_matrix = sparse_random.todense()
_, test_sparse_matrix = smurff.make_train_test(sparse_random, 0.2)
# Create train and test sparse
train_sparse_matrix = sp.coo_matrix(train_dense_matrix) # acutally dense
test_sparse_matrix = test_sparse_matrix.tocoo()
# Create train and test sparse representations of dense tensors
train_sparse_tensor = smurff.SparseTensor(pd.DataFrame({
'0': train_sparse_matrix.row,
'1': train_sparse_matrix.col,
'v': train_sparse_matrix.data
}), train_shape)
test_sparse_tensor = smurff.SparseTensor(pd.DataFrame({
'0': test_sparse_matrix.row,
'1': test_sparse_matrix.col,
'v': test_sparse_matrix.data
}), train_shape)
# Run SMURFF
sparse_matrix_predictions = smurff.bpmf(train_dense_matrix,
Ytest=test_sparse_matrix,
num_latent=4,
num_threads=1,
verbose=verbose,
burnin=50,
nsamples=50,
seed=1234)
sparse_tensor_predictions = smurff.bpmf(train_sparse_tensor,
Ytest=test_sparse_tensor,
num_latent=4,
num_threads=1,
verbose=verbose,
burnin=50,
nsamples=50,
seed=1234)
# Sort and compare coords and predicted values
sparse_matrix_predictions.sort()
sparse_tensor_predictions.sort()
self.assertEqual(len(sparse_matrix_predictions), len(sparse_tensor_predictions))
for m, t in zip(sparse_matrix_predictions, sparse_tensor_predictions):
self.assertEqual(m.coords, t.coords)
self.assertAlmostEqual(m.pred_1sample, t.pred_1sample)
def test_bpmf_tensor2(self):
A = np.random.randn(15, 2)
B = np.random.randn(20, 2)
C = np.random.randn(3, 2)
idx = list( itertools.product(np.arange(A.shape[0]), np.arange(B.shape[0]), np.arange(C.shape[0])) )
df = pd.DataFrame( np.asarray(idx), columns=["A", "B", "C"])
df["value"] = np.array([ np.sum(A[i[0], :] * B[i[1], :] * C[i[2], :]) for i in idx ])
Ytrain, Ytest = smurff.make_train_test(df, 0.2)
predictions = smurff.bpmf(Ytrain,
Ytest=Ytest,
num_latent=4,
verbose=verbose,
burnin=20,
nsamples=20)
rmse = smurff.calc_rmse(predictions)
self.assertTrue(rmse < 0.5,
msg="Tensor factorization gave RMSE above 0.5 (%f)." % rmse)
def test_bpmf_tensor3(self):
A = np.random.randn(15, 2)
B = np.random.randn(20, 2)
C = np.random.randn(1, 2)
idx = list( itertools.product(np.arange(A.shape[0]), np.arange(B.shape[0]), np.arange(C.shape[0])) )
df = pd.DataFrame( np.asarray(idx), columns=["A", "B", "C"])
df["value"] = np.array([ np.sum(A[i[0], :] * B[i[1], :] * C[i[2], :]) for i in idx ])
Ytrain, Ytest = smurff.make_train_test(df, 0.2)
predictions = smurff.bpmf(Ytrain,
Ytest=Ytest,
num_latent=4,
verbose=verbose,
burnin=20,
nsamples=20)
rmse = smurff.calc_rmse(predictions)
self.assertTrue(rmse < 0.5,
msg="Tensor factorization gave RMSE above 0.5 (%f)." % rmse)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "f798864c40676d0719f1631114d8b8d1",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 108,
"avg_line_length": 40.3394495412844,
"alnum_prop": 0.46565840345690246,
"repo_name": "ExaScience/smurff",
"id": "95474b99b9585c76eb4bdb0be1c51ecf5acb0599",
"size": "8794",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/test/test_bpmf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1229"
},
{
"name": "C",
"bytes": "65340"
},
{
"name": "C++",
"bytes": "1032297"
},
{
"name": "CMake",
"bytes": "42024"
},
{
"name": "Dockerfile",
"bytes": "3753"
},
{
"name": "Jupyter Notebook",
"bytes": "266445"
},
{
"name": "Makefile",
"bytes": "369"
},
{
"name": "Objective-C++",
"bytes": "214673"
},
{
"name": "Perl",
"bytes": "1745"
},
{
"name": "PowerShell",
"bytes": "2577"
},
{
"name": "Python",
"bytes": "149232"
},
{
"name": "Ruby",
"bytes": "675"
},
{
"name": "Shell",
"bytes": "5918"
}
],
"symlink_target": ""
} |
"""
Use lldb Python SBFrame API to get the argument values of the call stacks.
And other SBFrame API tests.
"""
from __future__ import print_function
import os
import time
import re
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class FrameAPITestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@add_test_categories(['pyapi'])
@expectedFailureAll(oslist=["windows"], bugnumber="llvm.org/pr24778")
def test_get_arg_vals_for_call_stack(self):
"""Exercise SBFrame.GetVariables() API to get argument vals."""
self.build()
exe = self.getBuildArtifact("a.out")
# Create a target by the debugger.
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# Now create a breakpoint on main.c by name 'c'.
breakpoint = target.BreakpointCreateByName('c', 'a.out')
#print("breakpoint:", breakpoint)
self.assertTrue(breakpoint and
breakpoint.GetNumLocations() == 1,
VALID_BREAKPOINT)
# Now launch the process, and do not stop at the entry point.
process = target.LaunchSimple(
None, None, self.get_process_working_directory())
process = target.GetProcess()
self.assertTrue(process.GetState() == lldb.eStateStopped,
PROCESS_STOPPED)
# Keeps track of the number of times 'a' is called where it is within a
# depth of 3 of the 'c' leaf function.
callsOfA = 0
from six import StringIO as SixStringIO
session = SixStringIO()
while process.GetState() == lldb.eStateStopped:
thread = lldbutil.get_stopped_thread(
process, lldb.eStopReasonBreakpoint)
self.assertIsNotNone(thread)
# Inspect at most 3 frames.
numFrames = min(3, thread.GetNumFrames())
for i in range(numFrames):
frame = thread.GetFrameAtIndex(i)
if self.TraceOn():
print("frame:", frame)
name = frame.GetFunction().GetName()
if name == 'a':
callsOfA = callsOfA + 1
# We'll inspect only the arguments for the current frame:
#
# arguments => True
# locals => False
# statics => False
# in_scope_only => True
valList = frame.GetVariables(True, False, False, True)
argList = []
for val in valList:
argList.append("(%s)%s=%s" % (val.GetTypeName(),
val.GetName(),
val.GetValue()))
print("%s(%s)" % (name, ", ".join(argList)), file=session)
# Also check the generic pc & stack pointer. We can't test their absolute values,
# but they should be valid. Uses get_GPRs() from the lldbutil
# module.
gpr_reg_set = lldbutil.get_GPRs(frame)
pc_value = gpr_reg_set.GetChildMemberWithName("pc")
self.assertTrue(pc_value, "We should have a valid PC.")
pc_value_int = int(pc_value.GetValue(), 0)
# Make sure on arm targets we dont mismatch PC value on the basis of thumb bit.
# Frame PC will not have thumb bit set in case of a thumb
# instruction as PC.
if self.getArchitecture() in ['arm', 'armv7', 'armv7k']:
pc_value_int &= ~1
self.assertTrue(
pc_value_int == frame.GetPC(),
"PC gotten as a value should equal frame's GetPC")
sp_value = gpr_reg_set.GetChildMemberWithName("sp")
self.assertTrue(
sp_value, "We should have a valid Stack Pointer.")
self.assertTrue(int(sp_value.GetValue(), 0) == frame.GetSP(
), "SP gotten as a value should equal frame's GetSP")
print("---", file=session)
process.Continue()
# At this point, the inferior process should have exited.
self.assertTrue(
process.GetState() == lldb.eStateExited,
PROCESS_EXITED)
# Expect to find 'a' on the call stacks two times.
self.assertTrue(callsOfA == 2,
"Expect to find 'a' on the call stacks two times")
# By design, the 'a' call frame has the following arg vals:
# o a((int)val=1, (char)ch='A')
# o a((int)val=3, (char)ch='A')
if self.TraceOn():
print("Full stack traces when stopped on the breakpoint 'c':")
print(session.getvalue())
self.expect(session.getvalue(), "Argugment values displayed correctly",
exe=False,
substrs=["a((int)val=1, (char)ch='A')",
"a((int)val=3, (char)ch='A')"])
@add_test_categories(['pyapi'])
def test_frame_api_boundary_condition(self):
"""Exercise SBFrame APIs with boundary condition inputs."""
self.build()
exe = self.getBuildArtifact("a.out")
# Create a target by the debugger.
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# Now create a breakpoint on main.c by name 'c'.
breakpoint = target.BreakpointCreateByName('c', 'a.out')
#print("breakpoint:", breakpoint)
self.assertTrue(breakpoint and
breakpoint.GetNumLocations() == 1,
VALID_BREAKPOINT)
# Now launch the process, and do not stop at the entry point.
process = target.LaunchSimple(
None, None, self.get_process_working_directory())
process = target.GetProcess()
self.assertTrue(process.GetState() == lldb.eStateStopped,
PROCESS_STOPPED)
thread = lldbutil.get_stopped_thread(
process, lldb.eStopReasonBreakpoint)
self.assertIsNotNone(thread)
frame = thread.GetFrameAtIndex(0)
if self.TraceOn():
print("frame:", frame)
# Boundary condition testings.
val1 = frame.FindVariable(None, True)
val2 = frame.FindVariable(None, False)
val3 = frame.FindValue(None, lldb.eValueTypeVariableGlobal)
if self.TraceOn():
print("val1:", val1)
print("val2:", val2)
frame.EvaluateExpression(None)
@add_test_categories(['pyapi'])
def test_frame_api_IsEqual(self):
"""Exercise SBFrame API IsEqual."""
self.build()
exe = self.getBuildArtifact("a.out")
# Create a target by the debugger.
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# Now create a breakpoint on main.c by name 'c'.
breakpoint = target.BreakpointCreateByName('c', 'a.out')
#print("breakpoint:", breakpoint)
self.assertTrue(breakpoint and
breakpoint.GetNumLocations() == 1,
VALID_BREAKPOINT)
# Now launch the process, and do not stop at the entry point.
process = target.LaunchSimple(
None, None, self.get_process_working_directory())
process = target.GetProcess()
self.assertTrue(process.GetState() == lldb.eStateStopped,
PROCESS_STOPPED)
thread = lldbutil.get_stopped_thread(
process, lldb.eStopReasonBreakpoint)
self.assertIsNotNone(thread)
frameEntered = thread.GetFrameAtIndex(0)
if self.TraceOn():
print(frameEntered)
lldbutil.print_stacktrace(thread)
self.assertTrue(frameEntered)
# Doing two step overs while still inside c().
thread.StepOver()
thread.StepOver()
self.assertTrue(thread)
frameNow = thread.GetFrameAtIndex(0)
if self.TraceOn():
print(frameNow)
lldbutil.print_stacktrace(thread)
self.assertTrue(frameNow)
# The latest two frames are considered equal.
self.assertTrue(frameEntered.IsEqual(frameNow))
# Now let's step out of frame c().
thread.StepOutOfFrame(frameNow)
frameOutOfC = thread.GetFrameAtIndex(0)
if self.TraceOn():
print(frameOutOfC)
lldbutil.print_stacktrace(thread)
self.assertTrue(frameOutOfC)
# The latest two frames should not be equal.
self.assertFalse(frameOutOfC.IsEqual(frameNow))
| {
"content_hash": "a28ff6b6216c6dd7004ac13eb4e728ad",
"timestamp": "",
"source": "github",
"line_count": 225,
"max_line_length": 98,
"avg_line_length": 38.94222222222222,
"alnum_prop": 0.567678612188998,
"repo_name": "youtube/cobalt",
"id": "85e915ad3a48ace3eb1b9ff87e326568351ea7a4",
"size": "8762",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "third_party/llvm-project/lldb/packages/Python/lldbsuite/test/python_api/frame/TestFrames.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""Tests downloading and reading of the GO annotation file from NCBI Gene.
python test_NCBI_Entrez_annotations.py
"""
__copyright__ = "Copyright (C) 2016-2017, DV Klopfenstein, H Tang. All rights reserved."
__author__ = "DV Klopfenstein"
import sys
from collections import defaultdict
from goatools.associations import get_assoc_ncbi_taxids
from goatools.test_data.genes_NCBI_9606_ProteinCoding import GeneID2nt as GeneID2nt_hsa
from goatools.test_data.genes_NCBI_7227_ProteinCoding import GeneID2nt as GeneID2nt_dme
def test_ncbi_gene2go(log=sys.stdout):
"""Return GO associations to Entrez GeneIDs. Download if necessary.
Example report generated with Feb 22, 2013 download of:
NCBI Gene tables and associations in gene2go
49672 items found in gene2go from NCBI's ftp server
taxid GOs GeneIDs Description
----- ------ ------- -----------
10090 16,807 18,971 all DNA items
7227 7,022 12,019 all DNA items
7227 6,956 10,590 76% GO coverage of 13,919 protein-coding genes
9606 16,299 18,680 all DNA items
9606 16,296 18,253 87% GO coverage of 20,913 protein-coding genes
"""
# Get associations for human(9606), mouse(10090), and fly(7227)
# (optional) multi-level dictionary separate associations by taxid
taxid2asscs = defaultdict(lambda: defaultdict(lambda: defaultdict(set)))
# Simple dictionary containing id2gos
taxids = [9606, 10090, 7227]
id2gos = get_assoc_ncbi_taxids(taxids, taxid2asscs=taxid2asscs, loading_bar=None)
log.write(" {N} items found in gene2go from NCBI's ftp server\n".format(N=len(id2gos)))
taxid2pc = {9606:GeneID2nt_hsa, 7227:GeneID2nt_dme}
# Report findings
log.write(" taxid GOs GeneIDs Description\n")
log.write(" ----- ------ ------- -----------\n")
for taxid, asscs in taxid2asscs.items():
num_gene2gos_all = len(asscs['GeneID2GOs'])
num_go2genes_all = len(asscs['GO2GeneIDs'])
log.write(" {TAXID:>6} {N:>6,} {M:>7,} all DNA items\n".format(
TAXID=taxid, N=num_go2genes_all, M=num_gene2gos_all))
# Basic check to ensure gene2go was downloaded and data was returned.
assert num_gene2gos_all > 11000
assert num_go2genes_all > 6000
if taxid in taxid2pc.keys():
rpt_coverage(taxid, asscs, taxid2pc[taxid], log)
def rpt_coverage(taxid, asscs, pc2nt, log):
"""Calculate and report GO coverage on protein-coding genes.
Example report generated with Feb 22, 2013 download of:
NCBI Gene tables and associations in gene2go
taxid GOs GeneIDs Description
----- ------ ------- -----------
7227 6,956 10,590 76% GO coverage of 13,919 protein-coding genes
9606 16,296 18,253 87% GO coverage of 20,913 protein-coding genes
"""
# List of all protein-coding genes have GO terms associated with them
geneid2gos = asscs['GeneID2GOs']
pcgene_w_gos = set(geneid2gos.keys()).intersection(set(pc2nt.keys()))
num_pcgene_w_gos = len(pcgene_w_gos)
num_pc_genes = len(pc2nt)
perc_cov = 100.0*num_pcgene_w_gos/num_pc_genes
# Get list of GOs associated with protein-coding genes
gos_pcgenes = set()
for geneid in pcgene_w_gos:
gos_pcgenes |= geneid2gos[geneid]
txt = " {TAXID:>6} {N:>6,} {M:>7,} {COV:2.0f}% GO coverage of {TOT:,} protein-coding genes\n"
log.write(txt.format(
TAXID=taxid, N=len(gos_pcgenes), M=num_pcgene_w_gos, COV=perc_cov, TOT=num_pc_genes))
if __name__ == '__main__':
test_ncbi_gene2go()
| {
"content_hash": "190d96b6c6107fe6a4b55cf9872c5cc5",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 99,
"avg_line_length": 44.01204819277108,
"alnum_prop": 0.6454968519025458,
"repo_name": "lileiting/goatools",
"id": "731820711d7c5d5f3a8cf6ad514548ede058cf0c",
"size": "3653",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_ncbi_entrez_annotations.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "224437"
},
{
"name": "Makefile",
"bytes": "14930"
},
{
"name": "Python",
"bytes": "77536843"
},
{
"name": "Shell",
"bytes": "1068"
}
],
"symlink_target": ""
} |
from time import time
from constants import *
import pygame
class Metronome:
def __init__(self, measure_len, beats = 8):
self.measure_len = measure_len
self.beats = beats
assert measure_len % beats == 0
self.visual_buffer_width = VISUAL_BUFFER_WIDTH
self.beat_len = measure_len // beats
self.sound = True
def change_measure_length(self, change):
self.measure_len += change
self.measure_len = max(self.measure_len, 8)
assert self.measure_len % self.beats == 0
self.beat_len = self.measure_len // self.beats
return self.measure_len
def force_buffer_length(self, value):
for i in range(0, self.beats):
beats_to_try = self.beats+i
if value%beats_to_try == 0:
break
beats_to_try = self.beats-i
if value%beats_to_try == 0:
break
self.measure_len = value
self.beats = beats_to_try
assert self.measure_len % self.beats == 0
self.beat_len = self.measure_len // self.beats
return self.measure_len
def change_beat_count(self, change):
self.beats += change
self.measure_len -= self.measure_len % self.beats
assert self.measure_len % self.beats == 0
self.beat_len = self.measure_len // self.beats
return self.beats
def get_beat(self, buffer_number):
return buffer_number // self.beat_len
def is_beat(self, buffer_number):
return buffer_number // self.beat_len != (buffer_number - 1) // self.beat_len
def is_measure(self, buffer_number):
return buffer_number == 0
def paint_self(self, screen, buffer_number, is_active):
height = 40
m_width = self.measure_len * self.visual_buffer_width
b_width = m_width // self.beats
m_width = b_width * self.beats
self.visual_buffer_width = m_width // self.measure_len
x = 10
y = 10
## Background
pygame.draw.rect(screen, METRONOME_INACTIVE_COLOR, (x,y,m_width, height), 0)
## Active index outline
if is_active:
pygame.draw.rect(screen, ACTIVE_LOOP_OUTLINE_COLOR, (x-1,y-1,m_width+2,height+2), 1)
#Draw beats
for i in range(self.beats):
if self.get_beat(buffer_number) == i:
pygame.draw.rect(screen, METRONOME_ACTIVE_COLOR, (x,y,b_width, height), 0)
pygame.draw.rect(screen, (0,0,0), (x,y,b_width, height), 1)
x += b_width
## Exact position line
x = 10 + m_width * buffer_number / self.measure_len
pygame.draw.line(screen, (0,0,0), (x,y), (x,y+height))
| {
"content_hash": "eebe60d7790f7b82fcf0cf3d130a0fa6",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 96,
"avg_line_length": 34.18987341772152,
"alnum_prop": 0.5805257312106628,
"repo_name": "kenanbit/loopsichord",
"id": "3b0fd13bebbab9a98673ad2984b922fd845f51c6",
"size": "2701",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metronome.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "55502"
}
],
"symlink_target": ""
} |
"""Model unit tests."""
import datetime as dt
import pytest
from ccflasktest.user.models import User, Role
from .factories import UserFactory
@pytest.mark.usefixtures('db')
class TestUser:
def test_get_by_id(self):
user = User('foo', 'foo@bar.com')
user.save()
retrieved = User.get_by_id(user.id)
assert retrieved == user
def test_created_at_defaults_to_datetime(self):
user = User(username='foo', email='foo@bar.com')
user.save()
assert bool(user.created_at)
assert isinstance(user.created_at, dt.datetime)
def test_password_is_nullable(self):
user = User(username='foo', email='foo@bar.com')
user.save()
assert user.password is None
def test_factory(self, db):
user = UserFactory(password="myprecious")
db.session.commit()
assert bool(user.username)
assert bool(user.email)
assert bool(user.created_at)
assert user.is_admin is False
assert user.active is True
assert user.check_password('myprecious')
def test_check_password(self):
user = User.create(username="foo", email="foo@bar.com",
password="foobarbaz123")
assert user.check_password('foobarbaz123') is True
assert user.check_password("barfoobaz") is False
def test_full_name(self):
user = UserFactory(first_name="Foo", last_name="Bar")
assert user.full_name == "Foo Bar"
def test_roles(self):
role = Role(name='admin')
role.save()
u = UserFactory()
u.roles.append(role)
u.save()
assert role in u.roles
| {
"content_hash": "cc8aa213f03e450df29ae1da63ff5571",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 63,
"avg_line_length": 29.140350877192983,
"alnum_prop": 0.6152919927754364,
"repo_name": "chrcoe/ccflasktest",
"id": "5169593ca0637743b54fbdc46a662273ebe9d9fa",
"size": "1685",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1170"
},
{
"name": "HTML",
"bytes": "8694"
},
{
"name": "JavaScript",
"bytes": "240856"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "28985"
}
],
"symlink_target": ""
} |
__author__ = 'geebzter'
# Example of how to implement an RPS player within the framework
import Player
import Message
class RPSPlayerExample(Player.Player):
def __init__(self):
# Call super class constructor
Player.Player.__init__(self)
self.reset()
def play(self):
return RpsPlayingStrategy.play(self.opponents_moves)
def reset(self):
self.opponents_moves = []
def get_name(self):
return "Minimizer"
def notify(self, msg):
# We use notifications to store opponent's moves in past rounds
# Process match-start and round-end messages
# At the start of the match, clear opponent moves history since a new match has started
# At the end of a round, append move to opponent's move history. Move history is used
# to compute the next move played.
if msg.is_match_start_message():
players = msg.get_players()
if players[0] == self or players[1] == self:
self.reset()
elif msg.is_round_end_message():
players = msg.get_players()
# Check if this message is for me and only then proceed
if (players[0] == self) or (players[1] == self):
# In this case, (by convention) the info is a tuple of the moves made and result
# e.g. ((1, 0), (1,0)) which means player 1 played paper (1), the player 2 played
# rock(0) and the result was that player 1 won (got 1 point) and player 2 lost (got 0 point)
moves, result = msg.get_info()
# RPS is a two person game; figure out which of the players is me
# and which one is the opponent
if players[0] == self:
opponent = 1
else:
opponent = 0
# Update opponent's past moves history
self.opponents_moves.append(moves[opponent])
# An implementation of a simple rps playing strategy
class RpsPlayingStrategy(object):
@staticmethod
def play(opponents_moves):
# Implements some way of predicting what the opponent might do next
# and play accordingly.
# For instance, assume he is going to play the move he has played the
# least.
# count number of rock, paper and scissors moves made in the past
count = [0, 0, 0]
for move in opponents_moves:
count[move] += 1
if count[0] < count[1]:
least = 0
else:
least = 1
if count[least] > count[2]:
least = 2
# Assuming that opponent's move is going to be the value of least, play to beat it
return (least + 1) % 3
# Test driver
# Run by typing "python3 RpsPlayerExample.py"
if __name__ == "__main__":
player = RPSPlayerExample()
opponent = RPSPlayerExample()
players = [opponent, player]
fake_moves = (1, 2)
fake_result = (0, 1)
player.notify(Message.Message.get_match_start_message(players))
player.notify(Message.Message.get_round_start_message(players))
move = player.play()
print("Move played: ", move)
player.notify(Message.Message.get_round_end_message(players, fake_moves, fake_result)) | {
"content_hash": "a488a4575922e26f5dfaa88355e057b1",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 108,
"avg_line_length": 33.357142857142854,
"alnum_prop": 0.5971245029060875,
"repo_name": "PaulieC/sprint1_Council_b",
"id": "6be2b9cc4acedaca73f89ea22fd6e97f31fafdfc",
"size": "3269",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "RPSPlayerExample.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "43910"
}
],
"symlink_target": ""
} |
"""File: usage_audit.py
Find all users and projects where their total usage (current file + deleted files) is >= the set limit
Projects or users can have their GUID whitelisted via `usage_audit whitelist [GUID ...]`
User usage is defined as the total usage of all projects they have > READ access on
Project usage is defined as the total usage of it and all its children
total usage is defined as the sum of the size of all verions associated with X via OsfStorageFileNode and OsfStorageTrashedFileNode
"""
import os
import gc
import json
import logging
import functools
from collections import defaultdict
import progressbar
from framework.celery_tasks import app as celery_app
from osf.models import TrashedFile
from website import mails
from website.app import init_app
from scripts import utils as scripts_utils
# App must be init'd before django models are imported
init_app(set_backends=True, routes=False)
from osf.models import BaseFileNode, FileVersion, OSFUser, AbstractNode
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
GBs = 1024 ** 3.0
USER_LIMIT = 5 * GBs
PROJECT_LIMIT = 5 * GBs
WHITE_LIST_PATH = os.path.join(os.path.dirname(__file__), 'usage_whitelist.json')
try:
with open(WHITE_LIST_PATH, 'r') as fobj:
WHITE_LIST = set(json.load(fobj)) # Cast to set for constant time look ups
logger.info('Loaded whitelist.json from {}'.format(WHITE_LIST_PATH))
except IOError:
WHITE_LIST = set()
logger.warning('No whitelist found')
def add_to_white_list(gtg):
gtg = set(gtg).difference(WHITE_LIST)
logger.info('Adding {} to whitelist'.format(gtg))
with open(WHITE_LIST_PATH, 'w') as fobj:
json.dump(list(WHITE_LIST.union(gtg)), fobj) # Sets are not JSON serializable
logger.info('Whitelist updated to {}'.format(WHITE_LIST))
def get_usage(node):
vids = [each for each in BaseFileNode.active.filter(provider='osfstorage', node=node).values_list('versions', flat=True) if each]
t_vids = [each for eac in TrashedFile.objects.filter(provider='osfstorage', node=node).values_list('versions', flat=True) if each]
usage = sum([v.size or 0 for v in FileVersion.objects.filter(id__in=vids)])
trashed_usage = sum([v.size or 0 for v in FileVersion.objects.filter(id__in=t_vids)])
return map(sum, zip(*([(usage, trashed_usage)] + [get_usage(child) for child in node.nodes_primary]))) # Adds tuples together, map(sum, zip((a, b), (c, d))) -> (a+c, b+d)
def limit_filter(limit, (item, usage)):
"""Note: usage is a tuple(current_usage, deleted_usage)"""
return item not in WHITE_LIST and sum(usage) >= limit
def main(send_email=False):
logger.info('Starting Project storage audit')
lines = []
projects = {}
users = defaultdict(lambda: (0, 0))
top_level_nodes = AbstractNode.objects.get_roots()
progress_bar = progressbar.ProgressBar(maxval=top_level_nodes.count()).start()
top_level_nodes = top_level_nodes.iterator()
for i, node in enumerate(top_level_nodes):
progress_bar.update(i+1)
if node._id in WHITE_LIST:
continue # Dont count whitelisted nodes against users
projects[node._id] = get_usage(node)
for contrib in node.contributors:
if node.can_edit(user=contrib):
users[contrib._id] = tuple(map(sum, zip(users[contrib._id], projects[node._id]))) # Adds tuples together, map(sum, zip((a, b), (c, d))) -> (a+c, b+d)
if i % 25 == 0:
gc.collect()
progress_bar.finish()
for model, collection, limit in ((OSFUser, users, USER_LIMIT), (AbstractNode, projects, PROJECT_LIMIT)):
for item, (used, deleted) in filter(functools.partial(limit_filter, limit), collection.items()):
line = '{!r} has exceeded the limit {:.2f}GBs ({}b) with {:.2f}GBs ({}b) used and {:.2f}GBs ({}b) deleted.'.format(model.load(item), limit / GBs, limit, used / GBs, used, deleted / GBs, deleted)
logger.info(line)
lines.append(line)
if lines:
if send_email:
logger.info('Sending email...')
mails.send_mail('support+scripts@osf.io', mails.EMPTY, body='\n'.join(lines), subject='Script: OsfStorage usage audit')
else:
logger.info('send_email is False, not sending email'.format(len(lines)))
logger.info('{} offending project(s) and user(s) found'.format(len(lines)))
else:
logger.info('No offending projects or users found')
@celery_app.task(name='scripts.osfstorage.usage_audit')
def run_main(send_mail=False, white_list=None):
scripts_utils.add_file_logger(logger, __file__)
if white_list:
add_to_white_list(white_list)
else:
main(send_mail)
| {
"content_hash": "58ffb4e1d60e70e382d0f1d887b1b090",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 206,
"avg_line_length": 38.76229508196721,
"alnum_prop": 0.6770987523789385,
"repo_name": "crcresearch/osf.io",
"id": "8ec982a9cbc308090bc5b8c78110cbc1c2fd8e10",
"size": "4729",
"binary": false,
"copies": "8",
"ref": "refs/heads/develop",
"path": "scripts/osfstorage/usage_audit.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "110148"
},
{
"name": "HTML",
"bytes": "225000"
},
{
"name": "JavaScript",
"bytes": "1807027"
},
{
"name": "Mako",
"bytes": "642435"
},
{
"name": "Python",
"bytes": "7499660"
},
{
"name": "VCL",
"bytes": "13885"
}
],
"symlink_target": ""
} |
"""
Created on Fri Apr 27 13:08:16 2018
@author: gerar
"""
import os, datetime
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.dates as dates
#%%
def get_data(dataframe):
data = pd.read_table(dataframe,usecols=[0])
data['Date_MODIS'] = data['Date_MODIS'].astype('datetime64')
mdata = data.groupby([data['Date_MODIS'].dt.year,data['Date_MODIS'].dt.month]).count()
# freq.index = pd.MultiIndex.from_tuples([(x[0],calendar.month_abbr[x[1]]) for x in freq.index])
mdata.index.rename(['Year','Month'],inplace=True)
mdata.reset_index(inplace=True)
mdata.index=mdata.apply(lambda x:datetime.datetime.strptime("{0} {1} 00:00:00".format(x['Year'],x['Month']), "%Y %m %H:%M:%S"),axis=1)
del mdata['Year'], mdata['Month']
yearly = data.groupby([data['Date_MODIS'].dt.year]).count()
yearly.index.rename('Year',inplace=True)
yearly.reset_index(inplace=True)
yearly.index=yearly.apply(lambda x:datetime.datetime.strptime('{0} 01 00:00:00'.format(x['Year']),'%Y %m %H:%M:%S'),axis=1)
del yearly['Year']
monthly = data.groupby([data['Date_MODIS'].dt.month]).count()
monthly.index.rename('Month',inplace=True)
monthly.reset_index(inplace=True)
monthly.index=monthly.apply(lambda x:datetime.datetime.strptime('2018 {0} 00:00:00'.format(x['Month']),'%Y %m %H:%M:%S'), axis=1)
return mdata, monthly, yearly
def data_hist(dataframe):
fig, ax = plt.subplots(figsize=(25,7))
ax.bar(dataframe.index,dataframe['Date_MODIS'],width=25)
# freq.plot(kind='bar', ax=ax)
# ax.set_xticklabels(freq.index.strftime('%Y-%m'))
# ax.set_xticks(pd.date_range(freq.index.min(),freq.index.max(),freq='MS'))
ax.xaxis.set_minor_locator(dates.MonthLocator())
ax.xaxis.set_minor_formatter(dates.DateFormatter('%b'))
ax.xaxis.set_major_locator(dates.YearLocator())
ax.xaxis.set_major_formatter(dates.DateFormatter('%Y'))
ax.tick_params(axis='x',which='minor',labelsize=5)
ax.xaxis.set_tick_params(which='major', pad=15)
fig.tight_layout()
# fig.savefig("data_freq.png",dpi=800,bbox_inches='tight')
#%%
def yearly_hist(dataframe):
fig, ax = plt.subplots(figsize=(15,7))
ax.bar(dataframe.index,dataframe['Date_MODIS'],width=200)
# freq.plot(kind='bar', ax=ax)
# ax.set_xticklabels(freq.index.strftime('%Y-%m'))
# ax.set_xticks(pd.date_range(freq.index.min(),freq.index.max(),freq='MS'))
# ax.xaxis.set_minor_locator(dates.MonthLocator())
# ax.xaxis.set_minor_formatter(dates.DateFormatter('%b'))
ax.xaxis.set_major_locator(dates.YearLocator())
ax.xaxis.set_major_formatter(dates.DateFormatter('%Y'))
# ax.tick_params(axis='x',which='minor',labelsize=5)
# ax.xaxis.set_tick_params(which='major', pad=15)
fig.tight_layout()
# fig.savefig("yearly_freq.png",dpi=800,bbox_inches='tight')
#%%
def monthly_hist(dataframe):
fig, ax = plt.subplots(figsize=(15,7))
ax.bar(dataframe.index,dataframe['Date_MODIS'],width=20)
# freq.plot(kind='bar', ax=ax)
# ax.set_xticklabels(freq.index.strftime('%Y-%m'))
# ax.set_xticks(pd.date_range(freq.index.min(),freq.index.max(),freq='MS'))
# ax.xaxis.set_minor_locator(dates.MonthLocator())
# ax.xaxis.set_minor_formatter(dates.DateFormatter('%b'))
ax.xaxis.set_major_locator(dates.MonthLocator())
ax.xaxis.set_major_formatter(dates.DateFormatter('%b'))
# ax.tick_params(axis='x',which='minor',labelsize=5)
# ax.xaxis.set_tick_params(which='major', pad=15)
fig.tight_layout()
# fig.savefig("monthly_freq.png",dpi=800,bbox_inches='tight')
#%%
if __name__ == '__main__':
files = [x for x in os.listdir(os.getcwd()) if x.endswith('_end.txt')]
mdata,month, year = get_data(files[0])
data_hist(mdata)
monthly_hist(month)
yearly_hist(year) | {
"content_hash": "ab01d8abb3570cd9d8e977a11b5a3764",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 138,
"avg_line_length": 36.885714285714286,
"alnum_prop": 0.6524657887942164,
"repo_name": "DangoMelon0701/PyRemote-Sensing",
"id": "85c675e430d7ca083c00872ef8ef6a80c5ae0572",
"size": "3897",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MODIS_AOD modified/histogram.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "178614"
}
],
"symlink_target": ""
} |
'''
Created by auto_sdk on 2015.11.10
'''
from top.api.base import RestApi
class TradeGetRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.fields = None
self.tid = None
def getapiname(self):
return 'taobao.trade.get'
| {
"content_hash": "a8b42834bbc71e39589aa73ddb791634",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 55,
"avg_line_length": 25.333333333333332,
"alnum_prop": 0.6710526315789473,
"repo_name": "colaftc/webtool",
"id": "a81affe32ad043d99ae9265c9f113ba22a8f0f19",
"size": "304",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "top/api/rest/TradeGetRequest.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12208"
},
{
"name": "HTML",
"bytes": "16773"
},
{
"name": "JavaScript",
"bytes": "2571"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "258023"
},
{
"name": "Ruby",
"bytes": "861"
},
{
"name": "VimL",
"bytes": "401921"
}
],
"symlink_target": ""
} |
class TestCloseTooManyArgumentsPlugin:
def __init__(self):
self.called = False
def close(self, cardinal, _):
"""This should never be hit due to wrong number of args."""
self.called = True
def setup():
return TestCloseTooManyArgumentsPlugin()
| {
"content_hash": "f8df3b3262e32eb507fedc81f6de85c1",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 67,
"avg_line_length": 25.545454545454547,
"alnum_prop": 0.6512455516014235,
"repo_name": "JohnMaguire/Cardinal",
"id": "eab6946a7c668c4a08fb3ec796823bf5b1930dca",
"size": "281",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cardinal/fixtures/fake_plugins/close_too_many_arguments/plugin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "256"
},
{
"name": "Python",
"bytes": "318314"
}
],
"symlink_target": ""
} |
from calvin.actor.actor import Actor, manage, condition, stateguard, calvinsys
from calvin.utilities.calvinlogger import get_actor_logger
_log = get_actor_logger(__name__)
class RuntimeName(Actor):
"""
Get name of current runtime
Input:
trigger: Any token will trigger a read
Output:
value : A string denoting the name of this runtime, or null
"""
@manage(["registry"])
def init(self):
self.registry = calvinsys.open(self, "sys.attribute.indexed")
# select attribute to read
calvinsys.write(self.registry, "node_name.name")
@stateguard(lambda self: calvinsys.can_read(self.registry))
@condition(action_input=['trigger'], action_output=['value'])
def read(self, _):
attr = calvinsys.read(self.registry)
return (attr,)
action_priority = (read,)
requires = ["sys.attribute.indexed"]
test_calvinsys = {'sys.attribute.indexed': {'read': ["runtime.attribute"],
'write': ["node_name.name"]}}
test_set = [
{
'inports': {'trigger': [True]},
'outports': {'value': ["runtime.attribute"]}
}
]
| {
"content_hash": "78e153b1981d724924a4552025d25e3c",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 78,
"avg_line_length": 29.8,
"alnum_prop": 0.5964765100671141,
"repo_name": "EricssonResearch/calvin-base",
"id": "68d58c58a762d4074b26a3436c1f19804e8241fc",
"size": "1797",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "calvin/actorstore/systemactors/context/RuntimeName.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "769"
},
{
"name": "Dockerfile",
"bytes": "612"
},
{
"name": "HTML",
"bytes": "24571"
},
{
"name": "JavaScript",
"bytes": "78325"
},
{
"name": "Makefile",
"bytes": "816"
},
{
"name": "Python",
"bytes": "3291484"
},
{
"name": "Shell",
"bytes": "37140"
}
],
"symlink_target": ""
} |
"""
channel.py
:copyright: (c) 2015 by Fulfil.IO Inc.
:license: see LICENSE for more details.
"""
from trytond.pool import PoolMeta
from trytond.model import fields
__all__ = ['Channel']
__metaclass__ = PoolMeta
def submit_to_google(url, data):
import requests
import json
return requests.post(
url,
data=json.dumps(data),
headers={
'Content-Type': 'application/json',
'Authorization': 'Bearer ya29.5AE7v1wOfgun1gR_iXwuGhMnt8nPNbT4C-Pd39DUnsNGb9I6U5FQqRJXNyPb3a0Dk1OWzA', # noqa
}
)
class Channel:
__name__ = "sale.channel"
website = fields.Many2One('nereid.website', 'Website', select=True)
@classmethod
def upload_products_to_google_merchant(cls):
pass
| {
"content_hash": "93fd7be64b8dfe0da9206a8c0a62cd6b",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 122,
"avg_line_length": 22.085714285714285,
"alnum_prop": 0.6390685640362225,
"repo_name": "prakashpp/trytond-google-merchant",
"id": "d71e185ccd2c4b0683b4d002b008356bf22ae160",
"size": "797",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "channel.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "13180"
}
],
"symlink_target": ""
} |
import itertools
import pytest
import numpy as np
from numpy.testing import assert_allclose
from astropy.convolution.utils import discretize_model
from astropy.modeling.functional_models import (
Gaussian1D, Box1D, RickerWavelet1D, Gaussian2D, Box2D, RickerWavelet2D)
from astropy.modeling.tests.example_models import models_1D, models_2D
from astropy.modeling.tests.test_models import create_model
try:
import scipy # pylint: disable=W0611
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
modes = ['center', 'linear_interp', 'oversample']
test_models_1D = [Gaussian1D, Box1D, RickerWavelet1D]
test_models_2D = [Gaussian2D, Box2D, RickerWavelet2D]
@pytest.mark.parametrize(('model_class', 'mode'), list(itertools.product(test_models_1D, modes)))
def test_pixel_sum_1D(model_class, mode):
"""
Test if the sum of all pixels corresponds nearly to the integral.
"""
if model_class == Box1D and mode == "center":
pytest.skip("Non integrating mode. Skip integral test.")
parameters = models_1D[model_class]
model = create_model(model_class, parameters)
values = discretize_model(model, models_1D[model_class]['x_lim'], mode=mode)
assert_allclose(values.sum(), models_1D[model_class]['integral'], atol=0.0001)
@pytest.mark.parametrize('mode', modes)
def test_gaussian_eval_1D(mode):
"""
Discretize Gaussian with different modes and check
if result is at least similar to Gaussian1D.eval().
"""
model = Gaussian1D(1, 0, 20)
x = np.arange(-100, 101)
values = model(x)
disc_values = discretize_model(model, (-100, 101), mode=mode)
assert_allclose(values, disc_values, atol=0.001)
@pytest.mark.parametrize(('model_class', 'mode'), list(itertools.product(test_models_2D, modes)))
def test_pixel_sum_2D(model_class, mode):
"""
Test if the sum of all pixels corresponds nearly to the integral.
"""
if model_class == Box2D and mode == "center":
pytest.skip("Non integrating mode. Skip integral test.")
parameters = models_2D[model_class]
model = create_model(model_class, parameters)
values = discretize_model(model, models_2D[model_class]['x_lim'],
models_2D[model_class]['y_lim'], mode=mode)
assert_allclose(values.sum(), models_2D[model_class]['integral'], atol=0.0001)
@pytest.mark.parametrize('mode', modes)
def test_gaussian_eval_2D(mode):
"""
Discretize Gaussian with different modes and check
if result is at least similar to Gaussian2D.eval()
"""
model = Gaussian2D(0.01, 0, 0, 1, 1)
x = np.arange(-2, 3)
y = np.arange(-2, 3)
x, y = np.meshgrid(x, y)
values = model(x, y)
disc_values = discretize_model(model, (-2, 3), (-2, 3), mode=mode)
assert_allclose(values, disc_values, atol=1e-2)
@pytest.mark.skipif('not HAS_SCIPY')
def test_gaussian_eval_2D_integrate_mode():
"""
Discretize Gaussian with integrate mode
"""
model_list = [Gaussian2D(.01, 0, 0, 2, 2),
Gaussian2D(.01, 0, 0, 1, 2),
Gaussian2D(.01, 0, 0, 2, 1)]
x = np.arange(-2, 3)
y = np.arange(-2, 3)
x, y = np.meshgrid(x, y)
for model in model_list:
values = model(x, y)
disc_values = discretize_model(model, (-2, 3), (-2, 3), mode='integrate')
assert_allclose(values, disc_values, atol=1e-2)
@pytest.mark.skipif('not HAS_SCIPY')
def test_subpixel_gauss_1D():
"""
Test subpixel accuracy of the integrate mode with gaussian 1D model.
"""
gauss_1D = Gaussian1D(1, 0, 0.1)
values = discretize_model(gauss_1D, (-1, 2), mode='integrate', factor=100)
assert_allclose(values.sum(), np.sqrt(2 * np.pi) * 0.1, atol=0.00001)
@pytest.mark.skipif('not HAS_SCIPY')
def test_subpixel_gauss_2D():
"""
Test subpixel accuracy of the integrate mode with gaussian 2D model.
"""
gauss_2D = Gaussian2D(1, 0, 0, 0.1, 0.1)
values = discretize_model(gauss_2D, (-1, 2), (-1, 2), mode='integrate', factor=100)
assert_allclose(values.sum(), 2 * np.pi * 0.01, atol=0.00001)
def test_discretize_callable_1d():
"""
Test discretize when a 1d function is passed.
"""
def f(x):
return x ** 2
y = discretize_model(f, (-5, 6))
assert_allclose(y, np.arange(-5, 6) ** 2)
def test_discretize_callable_2d():
"""
Test discretize when a 2d function is passed.
"""
def f(x, y):
return x ** 2 + y ** 2
actual = discretize_model(f, (-5, 6), (-5, 6))
y, x = (np.indices((11, 11)) - 5)
desired = x ** 2 + y ** 2
assert_allclose(actual, desired)
def test_type_exception():
"""
Test type exception.
"""
with pytest.raises(TypeError) as exc:
discretize_model(float(0), (-10, 11))
assert exc.value.args[0] == 'Model must be callable.'
def test_dim_exception_1d():
"""
Test dimension exception 1d.
"""
def f(x):
return x ** 2
with pytest.raises(ValueError) as exc:
discretize_model(f, (-10, 11), (-10, 11))
assert exc.value.args[0] == "y range specified, but model is only 1-d."
def test_dim_exception_2d():
"""
Test dimension exception 2d.
"""
def f(x, y):
return x ** 2 + y ** 2
with pytest.raises(ValueError) as exc:
discretize_model(f, (-10, 11))
assert exc.value.args[0] == "y range not specified, but model is 2-d"
def test_float_x_range_exception():
def f(x, y):
return x ** 2 + y ** 2
with pytest.raises(ValueError) as exc:
discretize_model(f, (-10.002, 11.23))
assert exc.value.args[0] == ("The difference between the upper an lower"
" limit of 'x_range' must be a whole number.")
def test_float_y_range_exception():
def f(x, y):
return x ** 2 + y ** 2
with pytest.raises(ValueError) as exc:
discretize_model(f, (-10, 11), (-10.002, 11.23))
assert exc.value.args[0] == ("The difference between the upper an lower"
" limit of 'y_range' must be a whole number.")
def test_discretize_oversample():
gauss_2D = Gaussian2D(amplitude=1.0, x_mean=5.,
y_mean=125., x_stddev=0.75, y_stddev=3)
values = discretize_model(gauss_2D,
x_range=[0, 10],
y_range=[100, 135],
mode='oversample', factor=10)
vmax = np.max(values)
vmax_yx = np.unravel_index(values.argmax(), values.shape)
values_osf1 = discretize_model(gauss_2D,
x_range=[0, 10],
y_range=[100, 135],
mode='oversample', factor=1)
values_center = discretize_model(gauss_2D,
x_range=[0, 10],
y_range=[100, 135],
mode = 'center')
assert values.shape == (35, 10)
assert_allclose(vmax, 0.927, atol=1e-3)
assert vmax_yx == (25, 5)
assert_allclose(values_center, values_osf1)
| {
"content_hash": "78ef6fa3cd10a0e7498acdada1479e80",
"timestamp": "",
"source": "github",
"line_count": 217,
"max_line_length": 97,
"avg_line_length": 32.45622119815668,
"alnum_prop": 0.6021581712338492,
"repo_name": "MSeifert04/astropy",
"id": "b88b85fb5d0b45cb80f92ee08b2728561a7cb733",
"size": "7108",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "astropy/convolution/tests/test_discretize.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "444651"
},
{
"name": "C++",
"bytes": "1057"
},
{
"name": "HTML",
"bytes": "1172"
},
{
"name": "Objective-C",
"bytes": "615"
},
{
"name": "Python",
"bytes": "9891588"
},
{
"name": "TeX",
"bytes": "853"
}
],
"symlink_target": ""
} |
from msrest.paging import Paged
class NamespaceResourcePaged(Paged):
"""
A paging container for iterating over a list of NamespaceResource object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[NamespaceResource]'}
}
def __init__(self, *args, **kwargs):
super(NamespaceResourcePaged, self).__init__(*args, **kwargs)
| {
"content_hash": "522e5d709eecc892146b842afded94e7",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 76,
"avg_line_length": 27.25,
"alnum_prop": 0.6077981651376146,
"repo_name": "rjschwei/azure-sdk-for-python",
"id": "0ded738bc09c3a5b158f2714ae3dbeb3662736d3",
"size": "910",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "azure-mgmt-servicebus/azure/mgmt/servicebus/models/namespace_resource_paged.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8317911"
}
],
"symlink_target": ""
} |
>>> cd_list = []
>>> cd_list
[]
>>> with open("C:\/Users\/advance\/Documents\/GitHub\/career_path_prediction\/Career_data.csv",'rb') as csvfile:
filereader = csv.reader(csvfile,delimiter=',')
for row in filereader:
cd_list.append(row)
#-----------------------------------------------------------
>>> for i in range(1,data_array.shape[0]):
if fuzz.ratio(user_future,data_array[i][9])>60:
print "(Relevance = "+ str(fuzz.ratio(user_future,data_array[i][9])) + ") ",
if data_array[i][0] != '':
print "Bachelors : "+ data_array[i][0],
if data_array[i][3] != '':
print "-> Masters : "+ data_array[i][3],
if data_array[i][6] != '':
print "-> Doctoral : "+ data_array[i][6],
print "-> " + data_array[i][9],
print "\n"
(Relevance = 63) Bachelors : EE -> Masters : CS -> Software Engineer
(Relevance = 89) Bachelors : CS -> Masters : Software Engineering -> Software Developer | {
"content_hash": "04a613e8b922696b83d7ff9cfe70e0ee",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 112,
"avg_line_length": 33.55555555555556,
"alnum_prop": 0.5662251655629139,
"repo_name": "akshaynagpal/career_path_suggestion",
"id": "55ed374200e22329bcc06957890a83de48eaa4a4",
"size": "906",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "New folder/snippets2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16647"
}
],
"symlink_target": ""
} |
import socket
import time
from mc_bin_client.mc_bin_client import MemcachedClient
from perfrunner.settings import ClusterSpec, TestConfig
SOCKET_RETRY_INTERVAL = 2
MAX_RETRY = 600
class MemcachedHelper:
def __init__(self, cluster_spec: ClusterSpec, test_config: TestConfig):
self.username, self.password = cluster_spec.rest_credentials
if test_config.cluster.ipv6:
self.family = socket.AF_INET6
else:
self.family = socket.AF_INET
def get_stats(self, host: str, port: int, bucket: str, stats: str = '') -> dict:
retries = 0
while True:
try:
mc = MemcachedClient(host=host, port=port, family=self.family)
mc.enable_xerror()
mc.hello("mc")
mc.sasl_auth_plain(user=self.username, password=self.password)
mc.bucket_select(bucket)
return mc.stats(stats)
except Exception:
if retries < MAX_RETRY:
retries += 1
time.sleep(SOCKET_RETRY_INTERVAL)
else:
raise
def reset_stats(self, host: str, port: int, bucket: str):
self.get_stats(host, port, bucket, 'reset')
| {
"content_hash": "7a260b0c9bbc62392c203bff27f789db",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 84,
"avg_line_length": 32.17948717948718,
"alnum_prop": 0.5760956175298805,
"repo_name": "couchbase/perfrunner",
"id": "061125f2f52141d66b55707bfc409989d04a4bb3",
"size": "1255",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "perfrunner/helpers/memcached.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1853"
},
{
"name": "Dockerfile",
"bytes": "2761"
},
{
"name": "Go",
"bytes": "37531"
},
{
"name": "Groovy",
"bytes": "46365"
},
{
"name": "HCL",
"bytes": "40219"
},
{
"name": "Inno Setup",
"bytes": "25281"
},
{
"name": "JavaScript",
"bytes": "14317"
},
{
"name": "Makefile",
"bytes": "2405"
},
{
"name": "Python",
"bytes": "2416900"
},
{
"name": "Ruby",
"bytes": "154"
},
{
"name": "Shell",
"bytes": "5016"
}
],
"symlink_target": ""
} |
from django import forms
from django.contrib.auth.models import User
#from captcha.fields import CaptchaField
class LoginForm(forms.Form):
username=forms.CharField()
password=forms.CharField()
def valid_user(self):
user_exists=User.objects.filter(username=username).count()
if user_exists==0:
raise forms.ValidationError("username or password invalid")
return username
| {
"content_hash": "5742fd651ba49d31f1b67893100e1786",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 65,
"avg_line_length": 24.058823529411764,
"alnum_prop": 0.7359413202933985,
"repo_name": "Diksha-Rathi/Smart-Bin-GHC",
"id": "b7a084f4b9b9a2bb573c2bc1b13ff8715ee9c06a",
"size": "409",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "registration/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3234"
},
{
"name": "HTML",
"bytes": "24271"
},
{
"name": "JavaScript",
"bytes": "6629"
},
{
"name": "Python",
"bytes": "17815"
}
],
"symlink_target": ""
} |
""" The volume type & volume types extra specs extension"""
from webob import exc
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import db
from nova import exception
from nova.volume import volume_types
authorize = extensions.extension_authorizer('compute', 'volumetypes')
def make_voltype(elem):
elem.set('id')
elem.set('name')
extra_specs = xmlutil.make_flat_dict('extra_specs', selector='extra_specs')
elem.append(extra_specs)
class VolumeTypeTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('volume_type', selector='volume_type')
make_voltype(root)
return xmlutil.MasterTemplate(root, 1)
class VolumeTypesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('volume_types')
sel = lambda obj, do_raise=False: obj.values()
elem = xmlutil.SubTemplateElement(root, 'volume_type', selector=sel)
make_voltype(elem)
return xmlutil.MasterTemplate(root, 1)
class VolumeTypesController(object):
""" The volume types API controller for the Openstack API """
@wsgi.serializers(xml=VolumeTypesTemplate)
def index(self, req):
""" Returns the list of volume types """
context = req.environ['nova.context']
authorize(context)
return volume_types.get_all_types(context)
@wsgi.serializers(xml=VolumeTypeTemplate)
def create(self, req, body):
"""Creates a new volume type."""
context = req.environ['nova.context']
authorize(context)
if not body or body == "":
raise exc.HTTPUnprocessableEntity()
vol_type = body.get('volume_type', None)
if vol_type is None or vol_type == "":
raise exc.HTTPUnprocessableEntity()
name = vol_type.get('name', None)
specs = vol_type.get('extra_specs', {})
if name is None or name == "":
raise exc.HTTPUnprocessableEntity()
try:
volume_types.create(context, name, specs)
vol_type = volume_types.get_volume_type_by_name(context, name)
except exception.QuotaError as error:
self._handle_quota_error(error)
except exception.NotFound:
raise exc.HTTPNotFound()
return {'volume_type': vol_type}
@wsgi.serializers(xml=VolumeTypeTemplate)
def show(self, req, id):
""" Return a single volume type item """
context = req.environ['nova.context']
authorize(context)
try:
vol_type = volume_types.get_volume_type(context, id)
except exception.NotFound or exception.ApiError:
raise exc.HTTPNotFound()
return {'volume_type': vol_type}
def delete(self, req, id):
""" Deletes an existing volume type """
context = req.environ['nova.context']
authorize(context)
try:
vol_type = volume_types.get_volume_type(context, id)
volume_types.destroy(context, vol_type['name'])
except exception.NotFound:
raise exc.HTTPNotFound()
def _handle_quota_error(self, error):
"""Reraise quota errors as api-specific http exceptions."""
if error.code == "MetadataLimitExceeded":
raise exc.HTTPBadRequest(explanation=error.message)
raise error
class VolumeTypeExtraSpecsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.make_flat_dict('extra_specs', selector='extra_specs')
return xmlutil.MasterTemplate(root, 1)
class VolumeTypeExtraSpecTemplate(xmlutil.TemplateBuilder):
def construct(self):
tagname = xmlutil.Selector('key')
def extraspec_sel(obj, do_raise=False):
# Have to extract the key and value for later use...
key, value = obj.items()[0]
return dict(key=key, value=value)
root = xmlutil.TemplateElement(tagname, selector=extraspec_sel)
root.text = 'value'
return xmlutil.MasterTemplate(root, 1)
class VolumeTypeExtraSpecsController(object):
""" The volume type extra specs API controller for the Openstack API """
def _get_extra_specs(self, context, vol_type_id):
extra_specs = db.volume_type_extra_specs_get(context, vol_type_id)
specs_dict = {}
for key, value in extra_specs.iteritems():
specs_dict[key] = value
return dict(extra_specs=specs_dict)
def _check_body(self, body):
if body is None or body == "":
expl = _('No Request Body')
raise exc.HTTPBadRequest(explanation=expl)
@wsgi.serializers(xml=VolumeTypeExtraSpecsTemplate)
def index(self, req, vol_type_id):
""" Returns the list of extra specs for a given volume type """
context = req.environ['nova.context']
authorize(context)
return self._get_extra_specs(context, vol_type_id)
@wsgi.serializers(xml=VolumeTypeExtraSpecsTemplate)
def create(self, req, vol_type_id, body):
context = req.environ['nova.context']
authorize(context)
self._check_body(body)
specs = body.get('extra_specs')
try:
db.volume_type_extra_specs_update_or_create(context,
vol_type_id,
specs)
except exception.QuotaError as error:
self._handle_quota_error(error)
return body
@wsgi.serializers(xml=VolumeTypeExtraSpecTemplate)
def update(self, req, vol_type_id, id, body):
context = req.environ['nova.context']
authorize(context)
self._check_body(body)
if not id in body:
expl = _('Request body and URI mismatch')
raise exc.HTTPBadRequest(explanation=expl)
if len(body) > 1:
expl = _('Request body contains too many items')
raise exc.HTTPBadRequest(explanation=expl)
try:
db.volume_type_extra_specs_update_or_create(context,
vol_type_id,
body)
except exception.QuotaError as error:
self._handle_quota_error(error)
return body
@wsgi.serializers(xml=VolumeTypeExtraSpecTemplate)
def show(self, req, vol_type_id, id):
""" Return a single extra spec item """
context = req.environ['nova.context']
authorize(context)
specs = self._get_extra_specs(context, vol_type_id)
if id in specs['extra_specs']:
return {id: specs['extra_specs'][id]}
else:
raise exc.HTTPNotFound()
def delete(self, req, vol_type_id, id):
""" Deletes an existing extra spec """
context = req.environ['nova.context']
authorize(context)
db.volume_type_extra_specs_delete(context, vol_type_id, id)
def _handle_quota_error(self, error):
"""Reraise quota errors as api-specific http exceptions."""
if error.code == "MetadataLimitExceeded":
raise exc.HTTPBadRequest(explanation=error.message)
raise error
class Volumetypes(extensions.ExtensionDescriptor):
"""Volume types support"""
name = "VolumeTypes"
alias = "os-volume-types"
namespace = "http://docs.openstack.org/compute/ext/volume_types/api/v1.1"
updated = "2011-08-24T00:00:00+00:00"
def get_resources(self):
resources = []
res = extensions.ResourceExtension(
'os-volume-types',
VolumeTypesController())
resources.append(res)
res = extensions.ResourceExtension('extra_specs',
VolumeTypeExtraSpecsController(),
parent=dict(
member_name='vol_type',
collection_name='os-volume-types'))
resources.append(res)
return resources
| {
"content_hash": "cd4028b96787cef905e5cfb315430439",
"timestamp": "",
"source": "github",
"line_count": 232,
"max_line_length": 79,
"avg_line_length": 34.88793103448276,
"alnum_prop": 0.6088460588089943,
"repo_name": "rcbops/nova-buildpackage",
"id": "cbc205ea778c8c02011c2cd2681f76cda9c5f4fa",
"size": "8791",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/api/openstack/compute/contrib/volumetypes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7412"
},
{
"name": "Python",
"bytes": "5412903"
},
{
"name": "Shell",
"bytes": "24506"
}
],
"symlink_target": ""
} |
'''
This Python package (serial_device2) creates a class named SerialDevice,
which inherits from serial.Serial and adds methods to it, like auto
discovery of available serial ports in Linux, Windows, and Mac OS X. The
SerialDevice class can be used by itself, but it is mostly intended to
be a base class for other serial port devices with higher level
functions. SerialDevices creates a list of SerialDevice
instances from all available serial ports.
'''
from .serial_device2 import SerialDevice, SerialDevices, find_serial_device_ports, find_serial_device_port, WriteFrequencyError, WriteError, ReadError, __version__
| {
"content_hash": "c97e7c1e1122c13dd72ead22b59e16a0",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 163,
"avg_line_length": 62,
"alnum_prop": 0.8048387096774193,
"repo_name": "JaneliaSciComp/serial_device_python",
"id": "ac007f68c79f6269352c61575afcb1e55a879de0",
"size": "620",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "serial_device2/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "20001"
}
],
"symlink_target": ""
} |
#############################################################################
#
# CServer:
# Handles all services with the Navi-Xtreme server.
#############################################################################
from string import *
import sys, os.path
import urllib
import urllib2
import re, random, string
import xbmc, xbmcgui, xbmcaddon
import re, os, time, datetime, traceback
import shutil
import zipfile
from settings import *
from CFileLoader import *
from libs2 import *
from CDialogLogin import *
from CDialogRating import *
try: Emulating = xbmcgui.Emulating
except: Emulating = False
######################################################################
# Description: Text viewer
######################################################################
class CServer:
def __init__(self):
#public member of CServer class.
self.user_id = ''
#read the stored user ID
self.read_user_id()
######################################################################
# Description: -
# Parameters : -
# Return : -
######################################################################
def login(self): ## http://www.navixtreme.com/members/
## http://www.navixtreme.com/members/
## POST /members/ action=takelogin&ajax=1&username=###[]###&password=###[]###&rndval=###[numbers]###
##
##
##
keyboard = xbmc.Keyboard('', 'Enter User name')
keyboard.doModal()
if (keyboard.isConfirmed() != True):
return -2
username = keyboard.getText()
keyboard = xbmc.Keyboard('', 'Enter Password')
keyboard.doModal()
if (keyboard.isConfirmed() != True):
return -2
password = keyboard.getText()
#login to the Navi-X server
self.user_id=self.nxLogin(username,password)
self.user_id=str(self.user_id).strip()
if (self.user_id=='') or (self.user_id=="<class 'urllib2.HTTPError'>") or (self.user_id=="<type 'exceptions.ValueError'>"):
#failed
print {'user_id':self.user_id}
self.user_id=''
self.save_user_id()
#xbmcgui.Dialog().ok("Login",'Failed',' ',' ')
return -1
elif len(self.user_id)==48:
# xbmcgui.Dialog().ok("Login",'Successful',' ',' ')
print "Login to the NXServer was successful"
#save the returned user ID
self.save_user_id()
#success
return 0
else:
#failed
print {'user_id':self.user_id}
self.user_id=''
self.save_user_id()
#xbmcgui.Dialog().ok("Login",'Failed',' ',' ')
return -1
######################################################################
# Description: Login function for Navi-Xtreme login.
# Parameters : username: user name
# password: user password
# Return : blowfish-encrypted string identifying the user for
# saving locally, or an empty string if the login failed.
######################################################################
def nxLogin(self, username, password, LoginUrl='http://www.navixtreme.com/members/'):
## POST /members/ action=takelogin&ajax=1&username=###[]###&password=###[]###&rndval=###[numbers]###
LoginUrl='http://www.navixtreme.com/login/'
#return str(getRemote(LoginUrl,{'method':'post',
# 'postdata':urllib.urlencode({'username':username,'password':password,'action':'takelogin','ajax':'1'})
#})['content'])
#try:
print'Attempting to login'
html=UrlDoPost(LoginUrl,{'username':username,'password':password,'action':'takelogin','ajax':'1','rndval':''})
#except: html=''
print 'Length: '+str(len(html)); #print html
return html
#return str(getRemote('http://www.navixtreme.com/login/',{
# 'method':'post',
# 'postdata':urllib.urlencode({'username':username,'password':password})
#})['content'])
######################################################################
# Description: -
# Parameters : -
# Return : -
######################################################################
def logout(self): ## http://www.navixtreme.com/members/?action=signout
#empty the user ID
self.user_id=''
self.save_user_id()
######################################################################
# Description: -
# Parameters : -
# Return : -
######################################################################
def is_user_logged_in(self):
self.user_id=str(self.user_id).strip()
if (self.user_id != '') and (self.user_id !="<class 'urllib2.HTTPError'>") or (self.user_id=="<type 'exceptions.ValueError'>"):
if (len(self.user_id) != 48): return False
return True
return False
######################################################################
# Description: -
# Parameters : -
# Return : -
######################################################################
def rate_item(self, mediaitem):
#rate = CDialogRating("CRatingskin.xml", os.getcwd())
rate = CDialogRating("CRatingskin2.xml", addon.getAddonInfo('path'))
rate.doModal()
if rate.state != 0:
return -2
if self.is_user_logged_in() == False:
dialog = xbmcgui.Dialog()
dialog.ok(" Error", "You are not logged in.")
return -1
#login to the Navi-X server
result = self.nxrate_item(mediaitem, rate.rating)
######################################################################
# Description: -
# Parameters : mediaitem: CMediaItem instance to rate
# rating = value [0-5]
# Return : -
# API Return : Success: value [0-5] representing the new average rating
# Failure: error message string
######################################################################
def nxrate_item(self, mediaitem, rating):
result=getRemote('http://www.navixtreme.com/rate/',{
'method':'post',
'postdata':urllib.urlencode({'url':mediaitem.URL,'rating':rating}),
'cookie':'nxid='+nxserver.user_id
})['content']
dialog = xbmcgui.Dialog()
p=re.compile('^\d$')
match=p.search(result)
if match:
dialog.ok(" Rate", "Rating Successful.")
mediaitem.rating=result
else:
dialog.ok(" Rate", result)
return 0
######################################################################
# Description: -
# Parameters : -
# Return : -
######################################################################
def read_user_id(self):
try:
f=open(RootDir + 'user_id.dat', 'r')
self.user_id = f.read()
f.close()
except IOError:
return
######################################################################
# Description: -
# Parameters : -
# Return : -
######################################################################
def save_user_id(self):
f=open(RootDir + 'user_id.dat', 'w')
f.write(self.user_id)
f.close()
pass
#Create server instance here and use it as a global variable for all other components that import CServer.py.
global nxserver
nxserver = CServer()
global re_server
re_server = re.compile('^[^:]+://(?:www\.)?([^/]+)') | {
"content_hash": "91bd86718d7e51f4ecfe6de74f80bbf6",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 135,
"avg_line_length": 38.40758293838863,
"alnum_prop": 0.42744323790720634,
"repo_name": "aplicatii-romanesti/allinclusive-kodi-pi",
"id": "a2c40e40c0dc133697d9f316b343517ad3454dcb",
"size": "8989",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": ".kodi/addons/script.navi-x/src/CServer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "6178"
},
{
"name": "Python",
"bytes": "8657978"
},
{
"name": "Shell",
"bytes": "198"
}
],
"symlink_target": ""
} |
from pygments.lexers import find_lexer_class
from framework import LinguistTestBase, main, ROOT_DIR
from libs.repository import Repository
from libs.language import Language
class TestRepository(LinguistTestBase):
def repo(self, base_path):
return Repository.from_directory(base_path)
def linguist_repo(self):
return self.repo(ROOT_DIR)
def test_linguist_language(self):
assert self.linguist_repo().language == Language.find_by_name('Python')
def test_linguist_languages(self):
assert self.linguist_repo().languages[Language.find_by_name('Python')] > 2000
def test_linguist_size(self):
assert self.linguist_repo().size > 3000
def test_binary_override(self):
assert self.repo(ROOT_DIR + '/samples/Nimrod').language == Language.find_by_name('Nimrod')
if __name__ == '__main__':
main()
| {
"content_hash": "c4712aa4f32ee4df9853cf3900d1c7a9",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 98,
"avg_line_length": 31,
"alnum_prop": 0.6970046082949308,
"repo_name": "douban/linguist",
"id": "1c0fcb979a475b5a0d773ffdb3cf868d99ded20c",
"size": "893",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_repository.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "95070"
}
],
"symlink_target": ""
} |
"""Two dimensional convolutional neural net layers."""
from absl import logging
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
def weight_variable(shape, stddev=0.01):
"""Returns the weight variable."""
logging.vlog(1, 'weight init for shape %s', str(shape))
return tf.get_variable(
'w', shape, initializer=tf.random_normal_initializer(stddev=stddev))
def bias_variable(shape):
return tf.get_variable(
'b', shape, initializer=tf.zeros_initializer())
def conv2d(x, w, atrou_rate=1, data_format='NHWC'):
if atrou_rate > 1:
return tf.nn.convolution(
x,
w,
dilation_rate=[atrou_rate] * 2,
padding='SAME',
data_format=data_format)
else:
return tf.nn.conv2d(
x, w, strides=[1, 1, 1, 1], padding='SAME', data_format=data_format)
def make_conv_sep2d_layer(input_node,
in_channels,
channel_multiplier,
out_channels,
layer_name,
filter_size,
filter_size_2=None,
batch_norm=False,
is_training=True,
atrou_rate=1,
data_format='NHWC',
stddev=0.01):
"""Use separable convolutions."""
if filter_size_2 is None:
filter_size_2 = filter_size
logging.vlog(1, 'layer %s in %d out %d chan mult %d', layer_name, in_channels,
out_channels, channel_multiplier)
with tf.variable_scope(layer_name):
with tf.variable_scope('depthwise'):
w_depthwise = weight_variable(
[filter_size, filter_size_2, in_channels, channel_multiplier],
stddev=stddev)
with tf.variable_scope('pointwise'):
w_pointwise = weight_variable(
[1, 1, in_channels * channel_multiplier, out_channels], stddev=stddev)
h_conv = tf.nn.separable_conv2d(
input_node,
w_depthwise,
w_pointwise,
padding='SAME',
strides=[1, 1, 1, 1],
rate=[atrou_rate, atrou_rate],
data_format=data_format)
if batch_norm:
h_conv = batch_norm_layer(
h_conv, layer_name=layer_name, is_training=is_training,
data_format=data_format)
else:
b_conv = bias_variable([out_channels])
h_conv = tf.nn.bias_add(h_conv, b_conv, data_format=data_format)
return h_conv
def batch_norm_layer(h_conv, layer_name, is_training=True, data_format='NCHW'):
"""Batch norm layer."""
logging.vlog(1, 'batch norm for layer %s', layer_name)
return tf.contrib.layers.batch_norm(
h_conv,
is_training=is_training,
fused=True,
decay=0.999,
scope=layer_name,
data_format=data_format)
def make_conv_layer(input_node,
in_channels,
out_channels,
layer_name,
filter_size,
filter_size_2=None,
non_linearity=True,
batch_norm=False,
is_training=True,
atrou_rate=1,
data_format='NHWC',
stddev=0.01):
"""Creates a convolution layer."""
if filter_size_2 is None:
filter_size_2 = filter_size
logging.vlog(
1, 'layer %s in %d out %d', layer_name, in_channels, out_channels)
with tf.variable_scope(layer_name):
w_conv = weight_variable(
[filter_size, filter_size_2, in_channels, out_channels], stddev=stddev)
h_conv = conv2d(
input_node, w_conv, atrou_rate=atrou_rate, data_format=data_format)
if batch_norm:
h_conv = batch_norm_layer(
h_conv, layer_name=layer_name, is_training=is_training,
data_format=data_format)
else:
b_conv = bias_variable([out_channels])
h_conv = tf.nn.bias_add(h_conv, b_conv, data_format=data_format)
if non_linearity:
h_conv = tf.nn.elu(h_conv)
return h_conv
| {
"content_hash": "bbd69b79e1f8d813405e8c26657467c7",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 80,
"avg_line_length": 32.45967741935484,
"alnum_prop": 0.5662111801242236,
"repo_name": "deepmind/deepmind-research",
"id": "94cd09246ac0dd9c8d99158a46380ff238fb3e96",
"size": "4619",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "alphafold_casp13/two_dim_convnet.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1002"
},
{
"name": "C++",
"bytes": "5765"
},
{
"name": "Jupyter Notebook",
"bytes": "12330730"
},
{
"name": "Lua",
"bytes": "76186"
},
{
"name": "OpenEdge ABL",
"bytes": "15630"
},
{
"name": "PureBasic",
"bytes": "8"
},
{
"name": "Python",
"bytes": "3419119"
},
{
"name": "Racket",
"bytes": "226692"
},
{
"name": "Shell",
"bytes": "84450"
},
{
"name": "Starlark",
"bytes": "3463"
}
],
"symlink_target": ""
} |
from aiotg.bot import * # noqa
| {
"content_hash": "2c31fb66a5336b0e43faf55db523f240",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 31,
"avg_line_length": 32,
"alnum_prop": 0.6875,
"repo_name": "SijmenSchoon/aiotg",
"id": "d640cdcd379049a0929051fb7f9350b9d263d736",
"size": "32",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "aiotg/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "31974"
}
],
"symlink_target": ""
} |
from django.shortcuts import render
from pyvdp.paai.fundstransferattinq.cardattributes import fundstransferinquiry, FundsTransferInquiryModel
from demo.forms.paai.fundstransferattinq.cardattributes.fundstransferinquiry import FundsTransferInquiryPostForm
def funds_transfer_inquiry(request):
if request.method == 'POST':
form = FundsTransferInquiryPostForm(request.POST)
if form.is_valid():
pan = form.cleaned_data['pan']
ftid_kwargs = {
'systemsTraceAuditNumber': 123456,
'primaryAccountNumber': pan,
}
data = FundsTransferInquiryModel(**ftid_kwargs)
result = fundstransferinquiry.send(data=data)
return render(request, template_name='success.html', context={'result': result})
else:
form_post = FundsTransferInquiryPostForm()
return render(request,
template_name='paai/fundstransferattinq/cardattributes/fundstransferinquiry.html',
context={'form_post': form_post})
| {
"content_hash": "cb83e19964f6cd7eb4772b22683a01ae",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 112,
"avg_line_length": 38,
"alnum_prop": 0.6701127819548872,
"repo_name": "ppokrovsky/pyvdp",
"id": "b3a5d206600bc3643b60f780dfed108b7e4cc3d2",
"size": "1064",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demo/demo/views/paai/fundstransferattinq/cardattributes/fundstransferinquiry.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "266062"
}
],
"symlink_target": ""
} |
import threading
# DJANGAE
from djangae.db import transaction
from djangae.contrib import sleuth
from djangae.test import TestCase
class TransactionTests(TestCase):
def test_repeated_usage_in_a_loop(self):
from .test_connector import TestUser
pk = TestUser.objects.create(username="foo").pk
for i in xrange(4):
with transaction.atomic(xg=True):
TestUser.objects.get(pk=pk)
continue
with transaction.atomic(xg=True):
TestUser.objects.get(pk=pk)
def test_recursive_atomic(self):
l = []
@transaction.atomic
def txn():
l.append(True)
if len(l) == 3:
return
else:
txn()
txn()
def test_recursive_non_atomic(self):
l = []
@transaction.non_atomic
def txn():
l.append(True)
if len(l) == 3:
return
else:
txn()
txn()
def test_atomic_in_separate_thread(self):
""" Regression test. See #668. """
@transaction.atomic
def txn():
return
def target():
txn()
thread = threading.Thread(target=target)
thread.start()
thread.join()
def test_non_atomic_in_separate_thread(self):
""" Regression test. See #668. """
@transaction.non_atomic
def txn():
return
def target():
txn()
thread = threading.Thread(target=target)
thread.start()
thread.join()
def test_atomic_decorator(self):
from .test_connector import TestUser
@transaction.atomic
def txn():
TestUser.objects.create(username="foo", field2="bar")
self.assertTrue(transaction.in_atomic_block())
raise ValueError()
with self.assertRaises(ValueError):
txn()
self.assertEqual(0, TestUser.objects.count())
def test_interaction_with_datastore_txn(self):
from google.appengine.ext import db
from google.appengine.datastore.datastore_rpc import TransactionOptions
from .test_connector import TestUser
@db.transactional(propagation=TransactionOptions.INDEPENDENT)
def some_indie_txn(_username):
TestUser.objects.create(username=_username)
@db.transactional()
def some_non_indie_txn(_username):
TestUser.objects.create(username=_username)
@db.transactional()
def double_nested_transactional():
@db.transactional(propagation=TransactionOptions.INDEPENDENT)
def do_stuff():
TestUser.objects.create(username="Double")
raise ValueError()
try:
return do_stuff
except:
return
with transaction.atomic():
double_nested_transactional()
@db.transactional()
def something_containing_atomic():
with transaction.atomic():
TestUser.objects.create(username="Inner")
something_containing_atomic()
with transaction.atomic():
with transaction.atomic():
some_non_indie_txn("Bob1")
some_indie_txn("Bob2")
some_indie_txn("Bob3")
with transaction.atomic(independent=True):
some_non_indie_txn("Fred1")
some_indie_txn("Fred2")
some_indie_txn("Fred3")
def test_atomic_context_manager(self):
from .test_connector import TestUser
with self.assertRaises(ValueError):
with transaction.atomic():
TestUser.objects.create(username="foo", field2="bar")
raise ValueError()
self.assertEqual(0, TestUser.objects.count())
def test_non_atomic_context_manager(self):
from .test_connector import TestUser
existing = TestUser.objects.create(username="existing", field2="exists")
with transaction.atomic():
self.assertTrue(transaction.in_atomic_block())
user = TestUser.objects.create(username="foo", field2="bar")
with transaction.non_atomic():
# We're outside the transaction, so the user should not exist
self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user.pk)
self.assertFalse(transaction.in_atomic_block())
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
TestUser.objects.get(pk=existing.pk) # Should hit the cache, not the datastore
self.assertFalse(datastore_get.called)
with transaction.atomic(independent=True):
user2 = TestUser.objects.create(username="foo2", field2="bar2")
self.assertTrue(transaction.in_atomic_block())
with transaction.non_atomic():
self.assertFalse(transaction.in_atomic_block())
self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)
with transaction.non_atomic():
self.assertFalse(transaction.in_atomic_block())
self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
# Should hit the cache, not the Datastore
TestUser.objects.get(pk=existing.pk)
self.assertFalse(transaction.in_atomic_block())
self.assertRaises(TestUser.DoesNotExist, TestUser.objects.get, pk=user2.pk)
self.assertTrue(TestUser.objects.filter(pk=user2.pk).exists())
self.assertTrue(transaction.in_atomic_block())
def test_xg_argument(self):
from .test_connector import TestUser, TestFruit
@transaction.atomic(xg=True)
def txn(_username):
TestUser.objects.create(username=_username, field2="bar")
TestFruit.objects.create(name="Apple", color="pink")
raise ValueError()
with self.assertRaises(ValueError):
txn("foo")
self.assertEqual(0, TestUser.objects.count())
self.assertEqual(0, TestFruit.objects.count())
def test_independent_argument(self):
"""
We would get a XG error if the inner transaction was not independent
"""
from .test_connector import TestUser, TestFruit
@transaction.atomic
def txn1(_username, _fruit):
@transaction.atomic(independent=True)
def txn2(_fruit):
TestFruit.objects.create(name=_fruit, color="pink")
raise ValueError()
TestUser.objects.create(username=_username)
txn2(_fruit)
with self.assertRaises(ValueError):
txn1("test", "banana")
def test_nested_decorator(self):
# Nested decorator pattern we discovered can cause a connection_stack
# underflow.
@transaction.atomic
def inner_txn():
pass
@transaction.atomic
def outer_txn():
inner_txn()
# Calling inner_txn first puts it in a state which means it doesn't
# then behave properly in a nested transaction.
inner_txn()
outer_txn()
| {
"content_hash": "cf652025e1f73849ffc23b0d2f6f56d6",
"timestamp": "",
"source": "github",
"line_count": 233,
"max_line_length": 99,
"avg_line_length": 32.128755364806864,
"alnum_prop": 0.5797488645471547,
"repo_name": "kirberich/djangae",
"id": "84bb5c4e93f64120025cbc558c7cfed45da2218f",
"size": "7501",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "djangae/tests/test_transactional.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "277"
},
{
"name": "Python",
"bytes": "667411"
},
{
"name": "Shell",
"bytes": "368"
}
],
"symlink_target": ""
} |
import ast
from copy import deepcopy
import datetime
import tempfile
import time
from xml.dom import minidom
import mock
import requests
import six
from cinder import context
from cinder import exception
from cinder.objects import fields
from cinder.objects import group
from cinder.objects import group_snapshot
from cinder.objects import volume_type
from cinder import test
from cinder.tests.unit import fake_group
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
from cinder.volume.drivers.dell_emc.vmax import common
from cinder.volume.drivers.dell_emc.vmax import fc
from cinder.volume.drivers.dell_emc.vmax import iscsi
from cinder.volume.drivers.dell_emc.vmax import masking
from cinder.volume.drivers.dell_emc.vmax import provision
from cinder.volume.drivers.dell_emc.vmax import rest
from cinder.volume.drivers.dell_emc.vmax import utils
from cinder.volume import utils as volume_utils
from cinder.volume import volume_types
from cinder.zonemanager import utils as fczm_utils
CINDER_EMC_CONFIG_DIR = '/etc/cinder/'
class VMAXCommonData(object):
# array info
array = '000197800123'
srp = 'SRP_1'
srp2 = 'SRP_2'
slo = 'Diamond'
workload = 'DSS'
port_group_name_f = 'OS-fibre-PG'
port_group_name_i = 'OS-iscsi-PG'
masking_view_name_f = 'OS-HostX-F-OS-fibre-PG-MV'
masking_view_name_i = 'OS-HostX-SRP_1-I-OS-iscsi-PG-MV'
initiatorgroup_name_f = 'OS-HostX-F-IG'
initiatorgroup_name_i = 'OS-HostX-I-IG'
parent_sg_f = 'OS-HostX-F-OS-fibre-PG-SG'
parent_sg_i = 'OS-HostX-I-OS-iscsi-PG-SG'
storagegroup_name_f = 'OS-HostX-SRP_1-DiamondDSS-OS-fibre-PG'
storagegroup_name_i = 'OS-HostX-SRP_1-Diamond-DSS-OS-iscsi-PG'
defaultstoragegroup_name = 'OS-SRP_1-Diamond-DSS-SG'
default_sg_no_slo = 'OS-no_SLO-SG'
default_sg_compr_disabled = 'OS-SRP_1-Diamond-DSS-CD-SG'
default_sg_re_enabled = 'OS-SRP_1-Diamond-DSS-RE-SG'
failed_resource = 'OS-failed-resource'
fake_host = 'HostX@Backend#Diamond+DSS+SRP_1+000197800123'
new_host = 'HostX@Backend#Silver+OLTP+SRP_1+000197800123'
version = '3.0.0'
volume_wwn = '600000345'
remote_array = '000197800124'
device_id = '00001'
device_id2 = '00002'
rdf_group_name = '23_24_007'
rdf_group_no = '70'
u4v_version = '84'
storagegroup_name_source = 'Grp_source_sg'
storagegroup_name_target = 'Grp_target_sg'
group_snapshot_name = 'Grp_snapshot'
target_group_name = 'Grp_target'
storagegroup_name_with_id = 'GrpId_group_name'
# connector info
wwpn1 = "123456789012345"
wwpn2 = "123456789054321"
wwnn1 = "223456789012345"
initiator = 'iqn.1993-08.org.debian: 01: 222'
ip = u'123.456.7.8'
iqn = u'iqn.1992-04.com.emc:600009700bca30c01e3e012e00000001,t,0x0001'
connector = {'ip': ip,
'initiator': initiator,
'wwpns': [wwpn1, wwpn2],
'wwnns': [wwnn1],
'host': 'HostX'}
fabric_name_prefix = "fakeFabric"
end_point_map = {connector['wwpns'][0]: [wwnn1],
connector['wwpns'][1]: [wwnn1]}
target_wwns = [wwnn1]
zoning_mappings = {
'array': u'000197800123',
'init_targ_map': end_point_map,
'initiator_group': initiatorgroup_name_f,
'port_group': port_group_name_f,
'target_wwns': target_wwns}
device_map = {}
for wwn in connector['wwpns']:
fabric_name = ''.join([fabric_name_prefix,
wwn[-2:]])
target_wwn = wwn[::-1]
fabric_map = {'initiator_port_wwn_list': [wwn],
'target_port_wwn_list': [target_wwn]
}
device_map[fabric_name] = fabric_map
iscsi_device_info = {'maskingview': masking_view_name_i,
'ip_and_iqn': [{'ip': ip,
'iqn': initiator}],
'is_multipath': True,
'array': array,
'controller': {'host': '10.00.00.00'},
'hostlunid': 3}
fc_device_info = {'maskingview': masking_view_name_f,
'array': array,
'controller': {'host': '10.00.00.00'},
'hostlunid': 3}
# cinder volume info
ctx = context.RequestContext('admin', 'fake', True)
provider_location = {'array': six.text_type(array),
'device_id': device_id}
provider_location2 = {'array': six.text_type(array),
'device_id': device_id2}
provider_location3 = {'array': six.text_type(remote_array),
'device_id': device_id2}
legacy_provider_location = {
'classname': 'Symm_StorageVolume',
'keybindings': {'CreationClassName': u'Symm_StorageVolume',
'SystemName': u'SYMMETRIX+000197800123',
'DeviceID': device_id,
'SystemCreationClassName': u'Symm_StorageSystem'}}
legacy_provider_location2 = {
'classname': 'Symm_StorageVolume',
'keybindings': {'CreationClassName': u'Symm_StorageVolume',
'SystemName': u'SYMMETRIX+000197800123',
'DeviceID': device_id2,
'SystemCreationClassName': u'Symm_StorageSystem'}}
test_volume_type = fake_volume.fake_volume_type_obj(
context=ctx
)
test_volume = fake_volume.fake_volume_obj(
context=ctx, name='vol1', size=2, provider_auth=None,
provider_location=six.text_type(provider_location),
volume_type=test_volume_type, host=fake_host,
replication_driver_data=six.text_type(provider_location3))
test_attached_volume = fake_volume.fake_volume_obj(
context=ctx, name='vol1', size=2, provider_auth=None,
provider_location=six.text_type(provider_location), host=fake_host,
volume_type=test_volume_type, attach_status="attached",
replication_driver_data=six.text_type(provider_location3))
test_legacy_vol = fake_volume.fake_volume_obj(
context=ctx, name='vol1', size=2, provider_auth=None,
provider_location=six.text_type(legacy_provider_location),
replication_driver_data=six.text_type(legacy_provider_location2),
host=fake_host, volume_type=test_volume_type)
test_clone_volume = fake_volume.fake_volume_obj(
context=ctx, name='vol1', size=2, provider_auth=None,
provider_location=six.text_type(provider_location2),
host=fake_host)
snapshot_id = '390eeb4d-0f56-4a02-ba14-167167967014'
test_snapshot_snap_name = 'OS-' + snapshot_id[:6] + snapshot_id[-9:]
snap_location = {'snap_name': test_snapshot_snap_name,
'source_id': device_id}
test_snapshot = fake_snapshot.fake_snapshot_obj(
context=ctx, id=snapshot_id,
name='my_snap', size=2,
provider_location=six.text_type(snap_location),
host=fake_host, volume=test_volume)
test_legacy_snapshot = fake_snapshot.fake_snapshot_obj(
context=ctx, id='8d38ccfc-3d29-454c-858b-8348a8f9cc95',
name='my_snap', size=2,
provider_location=six.text_type(legacy_provider_location),
host=fake_host, volume=test_volume)
test_failed_snap = fake_snapshot.fake_snapshot_obj(
context=ctx,
id='4732de9b-98a4-4b6d-ae4b-3cafb3d34220',
name=failed_resource,
size=2,
provider_location=six.text_type(snap_location),
host=fake_host, volume=test_volume)
location_info = {'location_info': '000197800123#SRP_1#Diamond#DSS',
'storage_protocol': 'FC'}
test_host = {'capabilities': location_info,
'host': fake_host}
# extra-specs
vol_type_extra_specs = {'pool_name': u'Diamond+DSS+SRP_1+000197800123'}
vol_type_extra_specs_compr_disabled = {
'pool_name': u'Diamond+DSS+SRP_1+000197800123',
'storagetype:disablecompression': "true"}
vol_type_extra_specs_rep_enabled = {
'pool_name': u'Diamond+DSS+SRP_1+000197800123',
'replication_enabled': '<is> True'}
extra_specs = {'pool_name': u'Diamond+DSS+SRP_1+000197800123',
'slo': slo,
'workload': workload,
'srp': srp,
'array': array,
'interval': 3,
'retries': 120}
extra_specs_disable_compression = deepcopy(extra_specs)
extra_specs_disable_compression[utils.DISABLECOMPRESSION] = "true"
extra_specs_intervals_set = deepcopy(extra_specs)
extra_specs_intervals_set['interval'] = 1
extra_specs_intervals_set['retries'] = 1
extra_specs_rep_enabled = deepcopy(extra_specs)
extra_specs_rep_enabled['replication_enabled'] = True
rep_extra_specs = deepcopy(extra_specs_rep_enabled)
rep_extra_specs['array'] = remote_array
rep_extra_specs['interval'] = 0
rep_extra_specs['retries'] = 0
rep_extra_specs['srp'] = srp2
test_volume_type_1 = volume_type.VolumeType(
id='2b06255d-f5f0-4520-a953-b029196add6a', name='abc',
extra_specs=extra_specs
)
test_volume_type_list = volume_type.VolumeTypeList(
objects=[test_volume_type_1])
test_vol_grp_name_id_only = 'ec870a2f-6bf7-4152-aa41-75aad8e2ea96'
test_vol_grp_name = 'Grp_source_sg_%s' % test_vol_grp_name_id_only
test_group_1 = group.Group(
context=None, name=storagegroup_name_source,
group_id='abc', size=1,
id=test_vol_grp_name_id_only,
status='available',
provider_auth=None, volume_type_ids=['abc'],
group_type_id='grptypeid',
volume_types=test_volume_type_list,
host=fake_host, provider_location=six.text_type(provider_location))
test_group_failed = group.Group(
context=None, name=failed_resource,
group_id='14b8894e-54ec-450a-b168-c172a16ed166',
size=1,
id='318c721c-51ad-4160-bfe1-ebde2273836f',
status='available',
provider_auth=None, volume_type_ids=['abc'],
group_type_id='grptypeid',
volume_types=test_volume_type_list,
host=fake_host, provider_location=six.text_type(provider_location))
test_group = fake_group.fake_group_obj(
context=ctx, name=storagegroup_name_source,
id='7634bda4-6950-436f-998c-37c3e01bad30', host=fake_host)
test_group_without_name = fake_group.fake_group_obj(
context=ctx,
name=None,
id=test_vol_grp_name_id_only,
host=fake_host)
test_group_snapshot_1 = group_snapshot.GroupSnapshot(
context=None, id='6560405d-b89a-4f79-9e81-ad1752f5a139',
group_id='876d9fbb-de48-4948-9f82-15c913ed05e7',
name=group_snapshot_name,
group_type_id='c6934c26-dde8-4bf8-a765-82b3d0130e9f',
status='available',
group=test_group_1)
test_group_snapshot_failed = group_snapshot.GroupSnapshot(
context=None, id='0819dd5e-9aa1-4ec7-9dda-c78e51b2ad76',
group_id='1fc735cb-d36c-4352-8aa6-dc1e16b5a0a7',
name=failed_resource,
group_type_id='6b70de13-98c5-46b2-8f24-e4e96a8988fa',
status='available',
group=test_group_failed)
# masking view dict
masking_view_dict = {
'array': array,
'connector': connector,
'device_id': device_id,
'init_group_name': initiatorgroup_name_f,
'initiator_check': False,
'maskingview_name': masking_view_name_f,
'parent_sg_name': parent_sg_f,
'srp': srp,
'storagetype:disablecompression': False,
utils.PORTGROUPNAME: port_group_name_f,
'slo': slo,
'storagegroup_name': storagegroup_name_f,
'volume_name': test_volume.name,
'workload': workload,
'replication_enabled': False}
masking_view_dict_no_slo = deepcopy(masking_view_dict)
masking_view_dict_no_slo.update(
{'slo': None, 'workload': None,
'storagegroup_name': 'OS-HostX-No_SLO-OS-fibre-PG'})
masking_view_dict_compression_disabled = deepcopy(masking_view_dict)
masking_view_dict_compression_disabled.update(
{'storagetype:disablecompression': True,
'storagegroup_name': 'OS-HostX-SRP_1-DiamondDSS-OS-fibre-PG-CD'})
masking_view_dict_replication_enabled = deepcopy(masking_view_dict)
masking_view_dict_replication_enabled.update(
{'replication_enabled': True,
'storagegroup_name': 'OS-HostX-SRP_1-DiamondDSS-OS-fibre-PG-RE'})
# vmax data
# sloprovisioning
compression_info = {"symmetrixId": ["000197800128"]}
inititiatorgroup = [{"initiator": [wwpn1],
"hostId": initiatorgroup_name_f,
"maskingview": [masking_view_name_f]},
{"initiator": [initiator],
"hostId": initiatorgroup_name_i,
"maskingview": [masking_view_name_i]}]
initiator_list = [{"host": initiatorgroup_name_f,
"initiatorId": wwpn1,
"maskingview": [masking_view_name_f]},
{"host": initiatorgroup_name_i,
"initiatorId": initiator,
"maskingview": [masking_view_name_i]},
{"initiatorId": [
"FA-1D:4:" + wwpn1,
"SE-4E:0:" + initiator]}]
maskingview = [{"maskingViewId": masking_view_name_f,
"portGroupId": port_group_name_f,
"storageGroupId": storagegroup_name_f,
"hostId": initiatorgroup_name_f,
"maskingViewConnection": [
{"host_lun_address": "0003"}]},
{"maskingViewId": masking_view_name_i,
"portGroupId": port_group_name_i,
"storageGroupId": storagegroup_name_i,
"hostId": initiatorgroup_name_i,
"maskingViewConnection": [
{"host_lun_address": "0003"}]},
{}]
portgroup = [{"portGroupId": port_group_name_f,
"symmetrixPortKey": [
{"directorId": "FA-1D",
"portId": "FA-1D:4"}],
"maskingview": [masking_view_name_f]},
{"portGroupId": port_group_name_i,
"symmetrixPortKey": [
{"directorId": "SE-4E",
"portId": "SE-4E:0"}],
"maskingview": [masking_view_name_i]}]
port_list = [
{"symmetrixPort": {"num_of_masking_views": 1,
"maskingview": [masking_view_name_f],
"identifier": wwnn1,
"symmetrixPortKey": {
"directorId": "FA-1D",
"portId": "4"},
"portgroup": [port_group_name_f]}},
{"symmetrixPort": {"identifier": initiator,
"symmetrixPortKey": {
"directorId": "SE-4E",
"portId": "0"},
"ip_addresses": [ip],
"num_of_masking_views": 1,
"maskingview": [masking_view_name_i],
"portgroup": [port_group_name_i]}}]
sg_details = [{"srp": srp,
"num_of_vols": 2,
"cap_gb": 2,
"storageGroupId": defaultstoragegroup_name,
"slo": slo,
"workload": workload},
{"srp": srp,
"num_of_vols": 2,
"cap_gb": 2,
"storageGroupId": storagegroup_name_f,
"slo": slo,
"workload": workload,
"maskingview": [masking_view_name_f],
"parent_storage_group": [parent_sg_f]},
{"srp": srp,
"num_of_vols": 2,
"cap_gb": 2,
"storageGroupId": storagegroup_name_i,
"slo": slo,
"workload": workload,
"maskingview": [masking_view_name_i],
"parent_storage_group": [parent_sg_i]},
{"num_of_vols": 2,
"cap_gb": 2,
"storageGroupId": parent_sg_f,
"num_of_child_sgs": 1,
"child_storage_group": [storagegroup_name_f],
"maskingview": [masking_view_name_f]},
{"num_of_vols": 2,
"cap_gb": 2,
"storageGroupId": parent_sg_i,
"num_of_child_sgs": 1,
"child_storage_group": [storagegroup_name_i],
"maskingview": [masking_view_name_i], }
]
sg_details_rep = [{"childNames": [],
"numDevicesNonGk": 2,
"isLinkTarget": False,
"rdf": False,
"capacityGB": 2.0,
"name": storagegroup_name_source,
"snapVXSnapshots": ['12345'],
"symmetrixId": array,
"numSnapVXSnapshots": 1}]
sg_list = {"storageGroupId": [storagegroup_name_f,
defaultstoragegroup_name]}
sg_list_rep = [storagegroup_name_with_id]
srp_details = {"srpSloDemandId": ["Bronze", "Diamond", "Gold",
"None", "Optimized", "Silver"],
"srpId": srp,
"total_allocated_cap_gb": 5244.7,
"total_usable_cap_gb": 20514.4,
"total_subscribed_cap_gb": 84970.1,
"reserved_cap_percent": 10}
volume_details = [{"cap_gb": 2,
"num_of_storage_groups": 1,
"volumeId": device_id,
"volume_identifier": "1",
"wwn": volume_wwn,
"snapvx_target": 'false',
"snapvx_source": 'false',
"storageGroupId": [defaultstoragegroup_name,
storagegroup_name_f]},
{"cap_gb": 1,
"num_of_storage_groups": 1,
"volumeId": device_id2,
"volume_identifier": "OS-2",
"wwn": '600012345',
"storageGroupId": [defaultstoragegroup_name,
storagegroup_name_f]}]
volume_list = [
{"resultList": {"result": [{"volumeId": device_id}]}},
{"resultList": {"result": [{"volumeId": device_id2}]}},
{"resultList": {"result": [{"volumeId": device_id},
{"volumeId": device_id2}]}}]
private_vol_details = {
"resultList": {
"result": [{
"timeFinderInfo": {
"snapVXSession": [
{"srcSnapshotGenInfo": [
{"snapshotHeader": {
"snapshotName": "temp-1",
"device": device_id},
"lnkSnapshotGenInfo": [
{"targetDevice": device_id2}]}]},
{"tgtSrcSnapshotGenInfo": {
"snapshotName": "temp-1",
"targetDevice": device_id2,
"sourceDevice": device_id}}],
"snapVXSrc": 'true',
"snapVXTgt": 'true'}}]}}
workloadtype = {"workloadId": ["OLTP", "OLTP_REP", "DSS", "DSS_REP"]}
slo_details = {"sloId": ["Bronze", "Diamond", "Gold",
"Optimized", "Platinum", "Silver"]}
# replication
volume_snap_vx = {"snapshotLnks": [],
"snapshotSrcs": [
{"generation": 0,
"linkedDevices": [
{"targetDevice": device_id2,
"percentageCopied": 100,
"state": "Copied",
"copy": True,
"defined": True,
"linked": True}],
"snapshotName": test_snapshot_snap_name,
"state": "Established"}]}
capabilities = {"symmetrixCapability": [{"rdfCapable": True,
"snapVxCapable": True,
"symmetrixId": "0001111111"},
{"symmetrixId": array,
"snapVxCapable": True,
"rdfCapable": True}]}
group_snap_vx = {"generation": 0,
"isLinked": False,
"numUniqueTracks": 0,
"isRestored": False,
"name": group_snapshot_name,
"numStorageGroupVolumes": 1,
"state": ["Established"],
"timeToLiveExpiryDate": "N/A",
"isExpired": False,
"numSharedTracks": 0,
"timestamp": "00:30:50 Fri, 02 Jun 2017 IST +0100",
"numSourceVolumes": 1
}
group_snap_vx_1 = {"generation": 0,
"isLinked": False,
"numUniqueTracks": 0,
"isRestored": False,
"name": group_snapshot_name,
"numStorageGroupVolumes": 1,
"state": ["Copied"],
"timeToLiveExpiryDate": "N/A",
"isExpired": False,
"numSharedTracks": 0,
"timestamp": "00:30:50 Fri, 02 Jun 2017 IST +0100",
"numSourceVolumes": 1,
"linkedStorageGroup":
{"name": target_group_name,
"percentageCopied": 100},
}
grp_snapvx_links = [{"name": target_group_name,
"percentageCopied": 100},
{"name": "another-target",
"percentageCopied": 90}]
rdf_group_list = {"rdfGroupID": [{"rdfgNumber": rdf_group_no,
"label": rdf_group_name}]}
rdf_group_details = {"modes": ["Synchronous"],
"remoteSymmetrix": remote_array,
"label": rdf_group_name,
"type": "Dynamic",
"numDevices": 1,
"remoteRdfgNumber": rdf_group_no,
"rdfgNumber": rdf_group_no}
rdf_group_vol_details = {"remoteRdfGroupNumber": rdf_group_no,
"localSymmetrixId": array,
"volumeConfig": "RDF1+TDEV",
"localRdfGroupNumber": rdf_group_no,
"localVolumeName": device_id,
"rdfpairState": "Synchronized",
"remoteVolumeName": device_id2,
"localVolumeState": "Ready",
"rdfMode": "Synchronous",
"remoteVolumeState": "Write Disabled",
"remoteSymmetrixId": remote_array}
# system
job_list = [{"status": "SUCCEEDED",
"jobId": "12345",
"result": "created",
"resourceLink": "storagegroup/%s" % storagegroup_name_f},
{"status": "RUNNING", "jobId": "55555"},
{"status": "FAILED", "jobId": "09999"}]
symmetrix = {"symmetrixId": array,
"model": "VMAX250F",
"ucode": "5977.1091.1092"}
headroom = {"headroom": [{"headroomCapacity": 20348.29}]}
class FakeLookupService(object):
def get_device_mapping_from_network(self, initiator_wwns, target_wwns):
return VMAXCommonData.device_map
class FakeResponse(object):
def __init__(self, status_code, return_object):
self.status_code = status_code
self.return_object = return_object
def json(self):
if self.return_object:
return self.return_object
else:
raise ValueError
class FakeRequestsSession(object):
def __init__(self, *args, **kwargs):
self.data = VMAXCommonData()
def request(self, method, url, params=None, data=None):
return_object = ''
status_code = 200
if method == 'GET':
status_code, return_object = self._get_request(url, params)
elif method == 'POST' or method == 'PUT':
status_code, return_object = self._post_or_put(url, data)
elif method == 'DELETE':
status_code, return_object = self._delete(url)
elif method == 'TIMEOUT':
raise requests.Timeout
elif method == 'EXCEPTION':
raise Exception
return FakeResponse(status_code, return_object)
def _get_request(self, url, params):
status_code = 200
return_object = None
if self.data.failed_resource in url:
status_code = 500
return_object = self.data.job_list[2]
elif 'sloprovisioning' in url:
if 'volume' in url:
return_object = self._sloprovisioning_volume(url, params)
elif 'storagegroup' in url:
return_object = self._sloprovisioning_sg(url)
elif 'maskingview' in url:
return_object = self._sloprovisioning_mv(url)
elif 'portgroup' in url:
return_object = self._sloprovisioning_pg(url)
elif 'director' in url:
return_object = self._sloprovisioning_port(url)
elif 'host' in url:
return_object = self._sloprovisioning_ig(url)
elif 'initiator' in url:
return_object = self._sloprovisioning_initiator(url)
elif 'srp' in url:
return_object = self.data.srp_details
elif 'workloadtype' in url:
return_object = self.data.workloadtype
elif 'compressionCapable' in url:
return_object = self.data.compression_info
else:
return_object = self.data.slo_details
elif 'replication' in url:
return_object = self._replication(url)
elif 'system' in url:
return_object = self._system(url)
elif 'headroom' in url:
return_object = self.data.headroom
return status_code, return_object
def _sloprovisioning_volume(self, url, params):
return_object = self.data.volume_list[2]
if '/private' in url:
return_object = self.data.private_vol_details
elif params:
if '1' in params.values():
return_object = self.data.volume_list[0]
elif '2' in params.values():
return_object = self.data.volume_list[1]
else:
for vol in self.data.volume_details:
if vol['volumeId'] in url:
return_object = vol
break
return return_object
def _sloprovisioning_sg(self, url):
return_object = self.data.sg_list
for sg in self.data.sg_details:
if sg['storageGroupId'] in url:
return_object = sg
break
return return_object
def _sloprovisioning_mv(self, url):
if self.data.masking_view_name_i in url:
return_object = self.data.maskingview[1]
else:
return_object = self.data.maskingview[0]
return return_object
def _sloprovisioning_pg(self, url):
return_object = None
for pg in self.data.portgroup:
if pg['portGroupId'] in url:
return_object = pg
break
return return_object
def _sloprovisioning_port(self, url):
return_object = None
for port in self.data.port_list:
if port['symmetrixPort']['symmetrixPortKey']['directorId'] in url:
return_object = port
break
return return_object
def _sloprovisioning_ig(self, url):
return_object = None
for ig in self.data.inititiatorgroup:
if ig['hostId'] in url:
return_object = ig
break
return return_object
def _sloprovisioning_initiator(self, url):
return_object = self.data.initiator_list[2]
if self.data.wwpn1 in url:
return_object = self.data.initiator_list[0]
elif self.data.initiator in url:
return_object = self.data.initiator_list[1]
return return_object
def _replication(self, url):
return_object = None
if 'rdf_group' in url:
if self.data.device_id in url:
return_object = self.data.rdf_group_vol_details
elif self.data.rdf_group_no in url:
return_object = self.data.rdf_group_details
else:
return_object = self.data.rdf_group_list
elif 'storagegroup' in url:
return_object = self._replication_sg(url)
elif 'snapshot' in url:
return_object = self.data.volume_snap_vx
elif 'capabilities' in url:
return_object = self.data.capabilities
return return_object
def _replication_sg(self, url):
return_object = None
if 'generation' in url:
return_object = self.data.group_snap_vx
elif 'storagegroup' in url:
return_object = self.data.sg_details_rep[0]
return return_object
def _system(self, url):
return_object = None
if 'job' in url:
for job in self.data.job_list:
if job['jobId'] in url:
return_object = job
break
else:
return_object = self.data.symmetrix
return return_object
def _post_or_put(self, url, payload):
return_object = self.data.job_list[0]
status_code = 201
if self.data.failed_resource in url:
status_code = 500
return_object = self.data.job_list[2]
elif payload:
payload = ast.literal_eval(payload)
if self.data.failed_resource in payload.values():
status_code = 500
return_object = self.data.job_list[2]
if payload.get('executionOption'):
status_code = 202
return status_code, return_object
def _delete(self, url):
if self.data.failed_resource in url:
status_code = 500
return_object = self.data.job_list[2]
else:
status_code = 204
return_object = None
return status_code, return_object
def session(self):
return FakeRequestsSession()
class FakeConfiguration(object):
def __init__(self, emc_file=None, volume_backend_name=None,
interval=0, retries=0, replication_device=None):
self.cinder_dell_emc_config_file = emc_file
self.interval = interval
self.retries = retries
self.volume_backend_name = volume_backend_name
self.config_group = volume_backend_name
if replication_device:
self.replication_device = [replication_device]
def safe_get(self, key):
try:
return getattr(self, key)
except Exception:
return None
def append_config_values(self, values):
pass
class FakeXML(object):
def __init__(self):
""""""
self.tempdir = tempfile.mkdtemp()
self.data = VMAXCommonData()
def create_fake_config_file(self, config_group, portgroup,
ssl_verify=False):
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
doc = self.add_array_info(doc, emc, portgroup, ssl_verify)
filename = 'cinder_dell_emc_config_%s.xml' % config_group
config_file_path = self.tempdir + '/' + filename
f = open(config_file_path, 'w')
doc.writexml(f)
f.close()
return config_file_path
def add_array_info(self, doc, emc, portgroup_name, ssl_verify):
array = doc.createElement("Array")
arraytext = doc.createTextNode(self.data.array)
emc.appendChild(array)
array.appendChild(arraytext)
ecomserverip = doc.createElement("RestServerIp")
ecomserveriptext = doc.createTextNode("1.1.1.1")
emc.appendChild(ecomserverip)
ecomserverip.appendChild(ecomserveriptext)
ecomserverport = doc.createElement("RestServerPort")
ecomserverporttext = doc.createTextNode("8443")
emc.appendChild(ecomserverport)
ecomserverport.appendChild(ecomserverporttext)
ecomusername = doc.createElement("RestUserName")
ecomusernametext = doc.createTextNode("smc")
emc.appendChild(ecomusername)
ecomusername.appendChild(ecomusernametext)
ecompassword = doc.createElement("RestPassword")
ecompasswordtext = doc.createTextNode("smc")
emc.appendChild(ecompassword)
ecompassword.appendChild(ecompasswordtext)
portgroup = doc.createElement("PortGroup")
portgrouptext = doc.createTextNode(portgroup_name)
portgroup.appendChild(portgrouptext)
portgroups = doc.createElement("PortGroups")
portgroups.appendChild(portgroup)
emc.appendChild(portgroups)
srp = doc.createElement("SRP")
srptext = doc.createTextNode("SRP_1")
emc.appendChild(srp)
srp.appendChild(srptext)
if ssl_verify:
restcert = doc.createElement("SSLCert")
restcerttext = doc.createTextNode("/path/cert.crt")
emc.appendChild(restcert)
restcert.appendChild(restcerttext)
restverify = doc.createElement("SSLVerify")
restverifytext = doc.createTextNode("/path/cert.pem")
emc.appendChild(restverify)
restverify.appendChild(restverifytext)
return doc
class VMAXUtilsTest(test.TestCase):
def setUp(self):
self.data = VMAXCommonData()
super(VMAXUtilsTest, self).setUp()
config_group = 'UtilsTests'
fake_xml = FakeXML().create_fake_config_file(
config_group, self.data.port_group_name_i, True)
configuration = FakeConfiguration(fake_xml, config_group)
rest.VMAXRest._establish_rest_session = mock.Mock(
return_value=FakeRequestsSession())
driver = iscsi.VMAXISCSIDriver(configuration=configuration)
self.driver = driver
self.common = self.driver.common
self.utils = self.common.utils
def test_get_volumetype_extra_specs(self):
with mock.patch.object(volume_types, 'get_volume_type_extra_specs',
return_value={'specs'}) as type_mock:
# path 1: volume_type_id not passed in
self.data.test_volume.volume_type_id = (
self.data.test_volume_type.id)
self.utils.get_volumetype_extra_specs(self.data.test_volume)
volume_types.get_volume_type_extra_specs.assert_called_once_with(
self.data.test_volume_type.id)
type_mock.reset_mock()
# path 2: volume_type_id passed in
self.utils.get_volumetype_extra_specs(self.data.test_volume, '123')
volume_types.get_volume_type_extra_specs.assert_called_once_with(
'123')
type_mock.reset_mock()
# path 3: no type_id
self.utils.get_volumetype_extra_specs(self.data.test_clone_volume)
(volume_types.get_volume_type_extra_specs.
assert_not_called())
def test_get_volumetype_extra_specs_exception(self):
extra_specs = self.utils.get_volumetype_extra_specs(
{'name': 'no_type_id'})
self.assertEqual({}, extra_specs)
def test_get_random_portgroup(self):
# 4 portgroups
data = ("<?xml version='1.0' encoding='UTF-8'?>\n<EMC>\n"
"<PortGroups>"
"<PortGroup>OS-PG1</PortGroup>\n"
"<PortGroup>OS-PG2</PortGroup>\n"
"<PortGroup>OS-PG3</PortGroup>\n"
"<PortGroup>OS-PG4</PortGroup>\n"
"</PortGroups>"
"</EMC>")
dom = minidom.parseString(data)
portgroup = self.utils._get_random_portgroup(dom)
self.assertIn('OS-PG', portgroup)
# Duplicate portgroups
data = ("<?xml version='1.0' encoding='UTF-8'?>\n<EMC>\n"
"<PortGroups>"
"<PortGroup>OS-PG1</PortGroup>\n"
"<PortGroup>OS-PG1</PortGroup>\n"
"<PortGroup>OS-PG1</PortGroup>\n"
"<PortGroup>OS-PG2</PortGroup>\n"
"</PortGroups>"
"</EMC>")
dom = minidom.parseString(data)
portgroup = self.utils._get_random_portgroup(dom)
self.assertIn('OS-PG', portgroup)
def test_get_random_portgroup_none(self):
# Missing PortGroup tag
data = ("<?xml version='1.0' encoding='UTF-8'?>\n<EMC>\n"
"</EMC>")
dom = minidom.parseString(data)
self.assertIsNone(self.utils._get_random_portgroup(dom))
# Missing portgroups
data = ("<?xml version='1.0' encoding='UTF-8'?>\n<EMC>\n"
"<PortGroups>"
"</PortGroups>"
"</EMC>")
dom = minidom.parseString(data)
self.assertIsNone(self.utils._get_random_portgroup(dom))
def test_get_host_short_name(self):
host_under_16_chars = 'host_13_chars'
host1 = self.utils.get_host_short_name(
host_under_16_chars)
self.assertEqual(host_under_16_chars, host1)
host_over_16_chars = (
'host_over_16_chars_host_over_16_chars_host_over_16_chars')
# Check that the same md5 value is retrieved from multiple calls
host2 = self.utils.get_host_short_name(
host_over_16_chars)
host3 = self.utils.get_host_short_name(
host_over_16_chars)
self.assertEqual(host2, host3)
host_with_period = 'hostname.with.many.parts'
ref_host_name = self.utils.generate_unique_trunc_host('hostname')
host4 = self.utils.get_host_short_name(host_with_period)
self.assertEqual(ref_host_name, host4)
def test_get_volume_element_name(self):
volume_id = 'ea95aa39-080b-4f11-9856-a03acf9112ad'
volume_element_name = self.utils.get_volume_element_name(volume_id)
expect_vol_element_name = ('OS-' + volume_id)
self.assertEqual(expect_vol_element_name, volume_element_name)
def test_parse_file_to_get_array_map(self):
kwargs = (
{'RestServerIp': '1.1.1.1',
'RestServerPort': '8443',
'RestUserName': 'smc',
'RestPassword': 'smc',
'SSLCert': '/path/cert.crt',
'SSLVerify': '/path/cert.pem',
'SerialNumber': self.data.array,
'srpName': 'SRP_1',
'PortGroup': self.data.port_group_name_i})
array_info = self.utils.parse_file_to_get_array_map(
self.common.configuration.cinder_dell_emc_config_file)
self.assertEqual(kwargs, array_info)
@mock.patch.object(utils.VMAXUtils,
'_get_connection_info')
@mock.patch.object(utils.VMAXUtils,
'_get_random_portgroup')
def test_parse_file_to_get_array_map_errors(self, mock_port, mock_conn):
tempdir = tempfile.mkdtemp()
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
filename = 'cinder_dell_emc_config_%s.xml' % 'fake_xml'
config_file_path = tempdir + '/' + filename
f = open(config_file_path, 'w')
doc.writexml(f)
f.close()
array_info = self.utils.parse_file_to_get_array_map(
config_file_path)
self.assertIsNone(array_info['SerialNumber'])
def test_parse_file_to_get_array_map_conn_errors(self):
tempdir = tempfile.mkdtemp()
doc = minidom.Document()
emc = doc.createElement("EMC")
doc.appendChild(emc)
filename = 'cinder_dell_emc_config_%s.xml' % 'fake_xml'
config_file_path = tempdir + '/' + filename
f = open(config_file_path, 'w')
doc.writexml(f)
f.close()
self.assertRaises(exception.VolumeBackendAPIException,
self.utils.parse_file_to_get_array_map,
config_file_path)
def test_truncate_string(self):
# string is less than max number
str_to_truncate = 'string'
response = self.utils.truncate_string(str_to_truncate, 10)
self.assertEqual(str_to_truncate, response)
def test_get_default_oversubscription_ratio(self):
default_ratio = 20.0
max_over_sub_ratio1 = 30.0
returned_max = self.utils.get_default_oversubscription_ratio(
max_over_sub_ratio1)
self.assertEqual(max_over_sub_ratio1, returned_max)
max_over_sub_ratio2 = 0.5
returned_max = self.utils.get_default_oversubscription_ratio(
max_over_sub_ratio2)
self.assertEqual(default_ratio, returned_max)
def test_get_default_storage_group_name_slo_workload(self):
srp_name = self.data.srp
slo = self.data.slo
workload = self.data.workload
sg_name = self.utils.get_default_storage_group_name(
srp_name, slo, workload)
self.assertEqual(self.data.defaultstoragegroup_name, sg_name)
def test_get_default_storage_group_name_no_slo(self):
srp_name = self.data.srp
slo = None
workload = None
sg_name = self.utils.get_default_storage_group_name(
srp_name, slo, workload)
self.assertEqual(self.data.default_sg_no_slo, sg_name)
def test_get_default_storage_group_name_compr_disabled(self):
srp_name = self.data.srp
slo = self.data.slo
workload = self.data.workload
sg_name = self.utils.get_default_storage_group_name(
srp_name, slo, workload, True)
self.assertEqual(self.data.default_sg_compr_disabled, sg_name)
def test_get_time_delta(self):
start_time = 1487781721.09
end_time = 1487781758.16
delta = end_time - start_time
ref_delta = six.text_type(datetime.timedelta(seconds=int(delta)))
time_delta = self.utils.get_time_delta(start_time, end_time)
self.assertEqual(ref_delta, time_delta)
def test_get_short_protocol_type(self):
# iscsi
short_i_protocol = self.utils.get_short_protocol_type('iscsi')
self.assertEqual('I', short_i_protocol)
# fc
short_f_protocol = self.utils.get_short_protocol_type('FC')
self.assertEqual('F', short_f_protocol)
# else
other_protocol = self.utils.get_short_protocol_type('OTHER')
self.assertEqual('OTHER', other_protocol)
def test_get_temp_snap_name(self):
clone_name = "12345"
source_device_id = self.data.device_id
ref_name = "temp-00001-12345"
snap_name = self.utils.get_temp_snap_name(
clone_name, source_device_id)
self.assertEqual(ref_name, snap_name)
def test_get_array_and_device_id(self):
volume = deepcopy(self.data.test_volume)
external_ref = {u'source-name': u'00002'}
array, device_id = self.utils.get_array_and_device_id(
volume, external_ref)
self.assertEqual(self.data.array, array)
self.assertEqual('00002', device_id)
def test_get_array_and_device_id_exception(self):
volume = deepcopy(self.data.test_volume)
external_ref = {u'source-name': None}
self.assertRaises(exception.VolumeBackendAPIException,
self.utils.get_array_and_device_id,
volume, external_ref)
def test_get_pg_short_name(self):
pg_under_12_chars = 'pg_11_chars'
pg1 = self.utils.get_pg_short_name(pg_under_12_chars)
self.assertEqual(pg_under_12_chars, pg1)
pg_over_12_chars = 'portgroup_over_12_characters'
# Check that the same md5 value is retrieved from multiple calls
pg2 = self.utils.get_pg_short_name(pg_over_12_chars)
pg3 = self.utils.get_pg_short_name(pg_over_12_chars)
self.assertEqual(pg2, pg3)
def test_is_compression_disabled_true(self):
extra_specs = self.data.extra_specs_disable_compression
do_disable_compression = self.utils.is_compression_disabled(
extra_specs)
self.assertTrue(do_disable_compression)
def test_is_compression_disabled_false(self):
# Path 1: no compression extra spec set
extra_specs = self.data.extra_specs
do_disable_compression = self.utils.is_compression_disabled(
extra_specs)
self.assertFalse(do_disable_compression)
# Path 2: compression extra spec set to false
extra_specs2 = deepcopy(extra_specs)
extra_specs2.update({utils.DISABLECOMPRESSION: 'false'})
do_disable_compression2 = self.utils.is_compression_disabled(
extra_specs)
self.assertFalse(do_disable_compression2)
def test_change_compression_type_true(self):
source_compr_disabled_true = 'true'
new_type_compr_disabled = {
'extra_specs': {utils.DISABLECOMPRESSION: 'no'}}
ans = self.utils.change_compression_type(
source_compr_disabled_true, new_type_compr_disabled)
self.assertTrue(ans)
def test_change_compression_type_false(self):
source_compr_disabled_true = True
new_type_compr_disabled = {
'extra_specs': {utils.DISABLECOMPRESSION: 'true'}}
ans = self.utils.change_compression_type(
source_compr_disabled_true, new_type_compr_disabled)
self.assertFalse(ans)
def test_is_replication_enabled(self):
is_re = self.utils.is_replication_enabled(
self.data.vol_type_extra_specs_rep_enabled)
self.assertTrue(is_re)
is_re2 = self.utils.is_replication_enabled(self.data.extra_specs)
self.assertFalse(is_re2)
def test_get_replication_config(self):
# Success, allow_extend false
rep_device_list1 = [{'target_device_id': self.data.remote_array,
'remote_pool': self.data.srp,
'remote_port_group': self.data.port_group_name_f,
'rdf_group_label': self.data.rdf_group_name}]
rep_config1 = self.utils.get_replication_config(rep_device_list1)
self.assertEqual(self.data.remote_array, rep_config1['array'])
# Success, allow_extend true
rep_device_list2 = [{'target_device_id': self.data.remote_array,
'remote_pool': self.data.srp,
'rdf_group_label': self.data.rdf_group_name,
'remote_port_group': self.data.port_group_name_f,
'allow_extend': 'true'}]
rep_config2 = self.utils.get_replication_config(rep_device_list2)
self.assertTrue(rep_config2['allow_extend'])
# No rep_device_list
rep_device_list3 = []
rep_config3 = self.utils.get_replication_config(rep_device_list3)
self.assertIsNone(rep_config3)
# Exception
rep_device_list4 = [{'target_device_id': self.data.remote_array,
'remote_pool': self.data.srp}]
self.assertRaises(exception.VolumeBackendAPIException,
self.utils.get_replication_config, rep_device_list4)
def test_is_volume_failed_over(self):
vol = deepcopy(self.data.test_volume)
vol.replication_status = fields.ReplicationStatus.FAILED_OVER
is_fo1 = self.utils.is_volume_failed_over(vol)
self.assertTrue(is_fo1)
is_fo2 = self.utils.is_volume_failed_over(self.data.test_volume)
self.assertFalse(is_fo2)
is_fo3 = self.utils.is_volume_failed_over(None)
self.assertFalse(is_fo3)
def test_add_legacy_pools(self):
pools = [{'pool_name': "Diamond+None+SRP_1+000197800111"},
{'pool_name': "Diamond+OLTP+SRP_1+000197800111"}]
new_pools = self.utils.add_legacy_pools(pools)
ref_pools = [{'pool_name': "Diamond+None+SRP_1+000197800111"},
{'pool_name': "Diamond+OLTP+SRP_1+000197800111"},
{'pool_name': "Diamond+SRP_1+000197800111"}]
self.assertEqual(ref_pools, new_pools)
def test_update_volume_group_name(self):
group = self.data.test_group_1
ref_group_name = self.data.test_vol_grp_name
vol_grp_name = self.utils.update_volume_group_name(group)
self.assertEqual(ref_group_name, vol_grp_name)
def test_update_volume_group_name_id_only(self):
group = self.data.test_group_without_name
ref_group_name = self.data.test_vol_grp_name_id_only
vol_grp_name = self.utils.update_volume_group_name(group)
self.assertEqual(ref_group_name, vol_grp_name)
def test_update_admin_metadata(self):
admin_metadata = {'targetVolumeName': '123456'}
ref_model_update = [{'id': '12345',
'admin_metadata': admin_metadata}]
volume_model_update = {'id': '12345'}
volumes_model_update = [volume_model_update]
key = 'targetVolumeName'
values = {}
values['12345'] = '123456'
self.utils.update_admin_metadata(
volumes_model_update, key, values)
self.assertEqual(ref_model_update, volumes_model_update)
def test_get_volume_group_utils(self):
group = self.data.test_group_1
array, extraspecs_dict = self.utils.get_volume_group_utils(
group, interval=1, retries=1)
ref_array = self.data.array
self.assertEqual(ref_array, array)
def test_update_extra_specs_list(self):
extra_specs = self.data.extra_specs
volume_type_id = 'abc'
extraspecs_dict = self.utils._update_extra_specs_list(
extra_specs, volume_type_id, interval=1, retries=1)
self.assertEqual(extra_specs, extraspecs_dict['extra_specs'])
def test_update_intervals_and_retries(self):
extra_specs = self.data.extra_specs
ref_interval = 1
extraspecs = self.utils._update_intervals_and_retries(
extra_specs, interval=1, retries=1)
self.assertEqual(ref_interval, extraspecs['interval'])
def test_get_intervals_retries_dict(self):
ref_value = {'interval': 1, 'retries': 1}
ret_dict = self.utils.get_intervals_retries_dict(
interval=1, retries=1)
self.assertEqual(ref_value, ret_dict)
def test_update_volume_model_updates(self):
volume_model_updates = [{'id': '1', 'status': 'available'}]
volumes = [self.data.test_volume]
ref_val = {'id': self.data.test_volume.id,
'status': 'error_deleting'}
ret_val = self.utils.update_volume_model_updates(
volume_model_updates, volumes, 'abc', status='error_deleting')
self.assertEqual(ref_val, ret_val[1])
def test_update_volume_model_updates_empty_update_list(self):
volume_model_updates = []
volumes = [self.data.test_volume]
ref_val = [{'id': self.data.test_volume.id,
'status': 'available'}]
ret_val = self.utils.update_volume_model_updates(
volume_model_updates, volumes, 'abc')
self.assertEqual(ref_val, ret_val)
def test_update_volume_model_updates_empty_vol_list(self):
volume_model_updates = []
volumes = []
ref_val = []
ret_val = self.utils.update_volume_model_updates(
volume_model_updates, volumes, 'abc')
self.assertEqual(ref_val, ret_val)
class VMAXRestTest(test.TestCase):
def setUp(self):
self.data = VMAXCommonData()
super(VMAXRestTest, self).setUp()
config_group = 'RestTests'
fake_xml = FakeXML().create_fake_config_file(
config_group, self.data.port_group_name_f)
configuration = FakeConfiguration(fake_xml, config_group)
rest.VMAXRest._establish_rest_session = mock.Mock(
return_value=FakeRequestsSession())
driver = fc.VMAXFCDriver(configuration=configuration)
self.driver = driver
self.common = self.driver.common
self.rest = self.common.rest
self.utils = self.common.utils
def test_rest_request_exception(self):
sc, msg = self.rest.request('/fake_url', 'TIMEOUT')
self.assertIsNone(sc)
self.assertIsNone(msg)
self.assertRaises(exception.VolumeBackendAPIException,
self.rest.request, '', 'EXCEPTION')
def test_wait_for_job_complete(self):
rc, job, status, task = self.rest.wait_for_job_complete(
{'status': 'created', 'jobId': '12345'}, self.data.extra_specs)
self.assertEqual(0, rc)
def test_wait_for_job_complete_failed(self):
with mock.patch.object(self.rest, '_is_job_finished',
side_effect=exception.BadHTTPResponseStatus):
self.assertRaises(exception.VolumeBackendAPIException,
self.rest.wait_for_job_complete,
self.data.job_list[0], self.data.extra_specs)
def test_is_job_finished_false(self):
job_id = "55555"
complete, response, rc, status, task = self.rest._is_job_finished(
job_id)
self.assertFalse(complete)
def test_is_job_finished_failed(self):
job_id = "55555"
complete, response, rc, status, task = self.rest._is_job_finished(
job_id)
self.assertFalse(complete)
with mock.patch.object(self.rest, 'request',
return_value=(200, {'status': 'FAILED'})):
complete, response, rc, status, task = (
self.rest._is_job_finished(job_id))
self.assertTrue(complete)
self.assertEqual(-1, rc)
def test_check_status_code_success(self):
status_code = 200
self.rest.check_status_code_success(
'test success', status_code, "")
def test_check_status_code_not_success(self):
status_code = 500
self.assertRaises(exception.VolumeBackendAPIException,
self.rest.check_status_code_success,
'test exception', status_code, "")
def test_wait_for_job_success(self):
operation = 'test'
status_code = 202
job = self.data.job_list[0]
extra_specs = self.data.extra_specs
self.rest.wait_for_job(
operation, status_code, job, extra_specs)
def test_wait_for_job_failed(self):
operation = 'test'
status_code = 202
job = self.data.job_list[2]
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'wait_for_job_complete',
return_value=(-1, '', '', '')):
self.assertRaises(exception.VolumeBackendAPIException,
self.rest.wait_for_job,
operation, status_code, job, extra_specs)
def test_get_resource_present(self):
array = self.data.array
category = 'sloprovisioning'
resource_type = 'storagegroup'
resource = self.rest.get_resource(array, category, resource_type)
self.assertEqual(self.data.sg_list, resource)
def test_get_resource_not_present(self):
array = self.data.array
category = 'sloprovisioning'
resource_type = self.data.failed_resource
resource = self.rest.get_resource(array, category, resource_type)
self.assertIsNone(resource)
def test_create_resource_success(self):
array = self.data.array
category = ''
resource_type = ''
payload = {'someKey': 'someValue'}
status_code, message = self.rest.create_resource(
array, category, resource_type, payload)
self.assertEqual(self.data.job_list[0], message)
def test_create_resource_failed(self):
array = self.data.array
category = ''
resource_type = ''
payload = {'someKey': self.data.failed_resource}
self.assertRaises(
exception.VolumeBackendAPIException,
self.rest.create_resource, array, category,
resource_type, payload)
def test_modify_resource(self):
array = self.data.array
category = ''
resource_type = ''
payload = {'someKey': 'someValue'}
status_code, message = self.rest.modify_resource(
array, category, resource_type, payload)
self.assertEqual(self.data.job_list[0], message)
def test_modify_resource_failed(self):
array = self.data.array
category = ''
resource_type = ''
payload = {'someKey': self.data.failed_resource}
self.assertRaises(
exception.VolumeBackendAPIException,
self.rest.modify_resource, array, category,
resource_type, payload)
def test_delete_resource(self):
operation = 'delete res resource'
status_code = 204
message = None
array = self.data.array
category = 'cat'
resource_type = 'res'
resource_name = 'name'
with mock.patch.object(self.rest, 'check_status_code_success'):
self.rest.delete_resource(
array, category, resource_type, resource_name)
self.rest.check_status_code_success.assert_called_with(
operation, status_code, message)
def test_delete_resource_failed(self):
array = self.data.array
category = self.data.failed_resource
resource_type = self.data.failed_resource
resource_name = self.data.failed_resource
self.assertRaises(
exception.VolumeBackendAPIException,
self.rest.modify_resource, array, category,
resource_type, resource_name)
def test_get_array_serial(self):
ref_details = self.data.symmetrix
array_details = self.rest.get_array_serial(self.data.array)
self.assertEqual(ref_details, array_details)
def test_get_array_serial_failed(self):
array_details = self.rest.get_array_serial(self.data.failed_resource)
self.assertIsNone(array_details)
def test_get_srp_by_name(self):
ref_details = self.data.srp_details
srp_details = self.rest.get_srp_by_name(
self.data.array, self.data.srp)
self.assertEqual(ref_details, srp_details)
def test_get_slo_list(self):
ref_settings = self.data.slo_details['sloId']
slo_settings = self.rest.get_slo_list(self.data.array)
self.assertEqual(ref_settings, slo_settings)
def test_get_workload_settings(self):
ref_settings = self.data.workloadtype['workloadId']
wl_settings = self.rest.get_workload_settings(
self.data.array)
self.assertEqual(ref_settings, wl_settings)
def test_get_workload_settings_failed(self):
wl_settings = self.rest.get_workload_settings(
self.data.failed_resource)
self.assertFalse(wl_settings)
def test_get_headroom_capacity(self):
ref_headroom = self.data.headroom['headroom'][0]['headroomCapacity']
headroom_cap = self.rest.get_headroom_capacity(
self.data.array, self.data.srp,
self.data.slo, self.data.workload)
self.assertEqual(ref_headroom, headroom_cap)
def test_get_headroom_capacity_failed(self):
headroom_cap = self.rest.get_headroom_capacity(
self.data.failed_resource, self.data.srp,
self.data.slo, self.data.workload)
self.assertIsNone(headroom_cap)
def test_is_compression_capable_true(self):
compr_capable = self.rest.is_compression_capable('000197800128')
self.assertTrue(compr_capable)
def test_is_compression_capable_false(self):
compr_capable = self.rest.is_compression_capable(self.data.array)
self.assertFalse(compr_capable)
with mock.patch.object(self.rest, 'request', return_value=(200, {})):
compr_capable = self.rest.is_compression_capable(self.data.array)
self.assertFalse(compr_capable)
def test_get_storage_group(self):
ref_details = self.data.sg_details[0]
sg_details = self.rest.get_storage_group(
self.data.array, self.data.defaultstoragegroup_name)
self.assertEqual(ref_details, sg_details)
def test_get_storage_group_list(self):
ref_details = self.data.sg_list['storageGroupId']
sg_list = self.rest.get_storage_group_list(
self.data.array, {})
self.assertEqual(ref_details, sg_list)
def test_get_storage_group_list_none(self):
with mock.patch.object(self.rest, 'get_resource', return_value=None):
sg_list = self.rest.get_storage_group_list(
self.data.array, {})
self.assertFalse(sg_list)
def test_create_storage_group(self):
with mock.patch.object(self.rest, 'create_resource'):
payload = {'someKey': 'someValue'}
self.rest._create_storagegroup(self.data.array, payload)
self.rest.create_resource.assert_called_once_with(
self.data.array, 'sloprovisioning', 'storagegroup', payload)
def test_create_storage_group_success(self):
sg_name = self.rest.create_storage_group(
self.data.array, self.data.storagegroup_name_f, self.data.srp,
self.data.slo, self.data.workload, self.data.extra_specs)
self.assertEqual(self.data.storagegroup_name_f, sg_name)
def test_create_storage_group_failed(self):
self.assertRaises(
exception.VolumeBackendAPIException,
self.rest.create_storage_group, self.data.array,
self.data.failed_resource, self.data.srp, self.data.slo,
self.data.workload, self.data.extra_specs)
def test_create_storage_group_no_slo(self):
sg_name = self.rest.create_storage_group(
self.data.array, self.data.default_sg_no_slo, self.data.srp,
None, None, self.data.extra_specs)
self.assertEqual(self.data.default_sg_no_slo, sg_name)
def test_create_storage_group_compression_disabled(self):
with mock.patch.object(self.rest, '_create_storagegroup',
return_value=(200, self.data.job_list[0])):
self.rest.create_storage_group(
self.data.array, self.data.default_sg_compr_disabled,
self.data.srp, self.data.slo, self.data.workload,
self.data.extra_specs, True)
payload = {"srpId": self.data.srp,
"storageGroupId": self.data.default_sg_compr_disabled,
"emulation": "FBA",
"sloBasedStorageGroupParam": [
{"num_of_vols": 0,
"sloId": self.data.slo,
"workloadSelection": self.data.workload,
"volumeAttribute": {
"volume_size": "0",
"capacityUnit": "GB"},
"noCompression": "true"}]}
self.rest._create_storagegroup.assert_called_once_with(
self.data.array, payload)
def test_modify_storage_group(self):
array = self.data.array
storagegroup = self.data.defaultstoragegroup_name
payload = {'someKey': 'someValue'}
version = self.data.u4v_version
with mock.patch.object(self.rest, 'modify_resource'):
self.rest.modify_storage_group(array, storagegroup, payload)
self.rest.modify_resource.assert_called_once_with(
self.data.array, 'sloprovisioning', 'storagegroup',
payload, version, resource_name=storagegroup)
def test_create_volume_from_sg_success(self):
volume_name = self.data.volume_details[0]['volume_identifier']
ref_dict = self.data.provider_location
volume_dict = self.rest.create_volume_from_sg(
self.data.array, volume_name, self.data.defaultstoragegroup_name,
self.data.test_volume.size, self.data.extra_specs)
self.assertEqual(ref_dict, volume_dict)
def test_create_volume_from_sg_failed(self):
volume_name = self.data.volume_details[0]['volume_identifier']
self.assertRaises(
exception.VolumeBackendAPIException,
self.rest.create_volume_from_sg, self.data.array,
volume_name, self.data.failed_resource,
self.data.test_volume.size, self.data.extra_specs)
def test_create_volume_from_sg_cannot_retrieve_device_id(self):
with mock.patch.object(self.rest, 'find_volume_device_id',
return_value=None):
volume_name = self.data.volume_details[0]['volume_identifier']
self.assertRaises(
exception.VolumeBackendAPIException,
self.rest.create_volume_from_sg, self.data.array,
volume_name, self.data.failed_resource,
self.data.test_volume.size, self.data.extra_specs)
def test_add_vol_to_sg_success(self):
operation = 'Add volume to sg'
status_code = 202
message = self.data.job_list[0]
with mock.patch.object(self.rest, 'wait_for_job'):
device_id = self.data.device_id
self.rest.add_vol_to_sg(
self.data.array, self.data.storagegroup_name_f, device_id,
self.data.extra_specs)
self.rest.wait_for_job.assert_called_with(
operation, status_code, message, self.data.extra_specs)
def test_add_vol_to_sg_failed(self):
device_id = [self.data.device_id]
self.assertRaises(
exception.VolumeBackendAPIException,
self.rest.add_vol_to_sg, self.data.array,
self.data.failed_resource, device_id,
self.data.extra_specs)
def test_remove_vol_from_sg_success(self):
operation = 'Remove vol from sg'
status_code = 202
message = self.data.job_list[0]
with mock.patch.object(self.rest, 'wait_for_job'):
device_id = self.data.device_id
self.rest.remove_vol_from_sg(
self.data.array, self.data.storagegroup_name_f, device_id,
self.data.extra_specs)
self.rest.wait_for_job.assert_called_with(
operation, status_code, message, self.data.extra_specs)
@mock.patch.object(time, 'sleep')
def test_remove_vol_from_sg_failed(self, mock_sleep):
device_id = [self.data.volume_details[0]['volumeId']]
self.assertRaises(
exception.VolumeBackendAPIException,
self.rest.remove_vol_from_sg, self.data.array,
self.data.failed_resource, device_id,
self.data.extra_specs)
def test_get_vmax_default_storage_group(self):
ref_storage_group = self.data.sg_details[0]
ref_sg_name = self.data.defaultstoragegroup_name
storagegroup, storagegroup_name = (
self.rest.get_vmax_default_storage_group(
self.data.array, self.data.srp,
self.data.slo, self.data.workload))
self.assertEqual(ref_sg_name, storagegroup_name)
self.assertEqual(ref_storage_group, storagegroup)
def test_delete_storage_group(self):
operation = 'delete storagegroup resource'
status_code = 204
message = None
with mock.patch.object(self.rest, 'check_status_code_success'):
self.rest.delete_storage_group(
self.data.array, self.data.storagegroup_name_f)
self.rest.check_status_code_success.assert_called_with(
operation, status_code, message)
def test_is_child_sg_in_parent_sg(self):
is_child1 = self.rest.is_child_sg_in_parent_sg(
self.data.array, self.data.storagegroup_name_f,
self.data.parent_sg_f)
is_child2 = self.rest.is_child_sg_in_parent_sg(
self.data.array, self.data.defaultstoragegroup_name,
self.data.parent_sg_f)
self.assertTrue(is_child1)
self.assertFalse(is_child2)
def test_add_child_sg_to_parent_sg(self):
payload = {"editStorageGroupActionParam": {
"expandStorageGroupParam": {
"addExistingStorageGroupParam": {
"storageGroupId": [self.data.storagegroup_name_f]}}}}
with mock.patch.object(self.rest, 'modify_storage_group',
return_value=(202, self.data.job_list[0])):
self.rest.add_child_sg_to_parent_sg(
self.data.array, self.data.storagegroup_name_f,
self.data.parent_sg_f, self.data.extra_specs)
self.rest.modify_storage_group.assert_called_once_with(
self.data.array, self.data.parent_sg_f, payload)
def test_remove_child_sg_from_parent_sg(self):
payload = {"editStorageGroupActionParam": {
"removeStorageGroupParam": {
"storageGroupId": [self.data.storagegroup_name_f],
"force": 'true'}}}
with mock.patch.object(self.rest, 'modify_storage_group',
return_value=(202, self.data.job_list[0])):
self.rest.remove_child_sg_from_parent_sg(
self.data.array, self.data.storagegroup_name_f,
self.data.parent_sg_f, self.data.extra_specs)
self.rest.modify_storage_group.assert_called_once_with(
self.data.array, self.data.parent_sg_f, payload)
def test_get_volume_list(self):
ref_volumes = [self.data.device_id, self.data.device_id2]
volumes = self.rest.get_volume_list(self.data.array, {})
self.assertEqual(ref_volumes, volumes)
def test_get_volume(self):
ref_volumes = self.data.volume_details[0]
device_id = self.data.device_id
volumes = self.rest.get_volume(self.data.array, device_id)
self.assertEqual(ref_volumes, volumes)
def test_get_private_volume(self):
device_id = self.data.device_id
ref_volume = self.data.private_vol_details['resultList']['result'][0]
volume = self.rest._get_private_volume(self.data.array, device_id)
self.assertEqual(ref_volume, volume)
def test_get_private_volume_exception(self):
device_id = self.data.device_id
with mock.patch.object(self.rest, 'get_resource',
return_value={}):
self.assertRaises(exception.VolumeBackendAPIException,
self.rest._get_private_volume,
self.data.array, device_id)
def test_modify_volume_success(self):
array = self.data.array
device_id = self.data.device_id
payload = {'someKey': 'someValue'}
with mock.patch.object(self.rest, 'modify_resource'):
self.rest._modify_volume(array, device_id, payload)
self.rest.modify_resource.assert_called_once_with(
self.data.array, 'sloprovisioning', 'volume',
payload, resource_name=device_id)
def test_modify_volume_failed(self):
payload = {'someKey': self.data.failed_resource}
device_id = self.data.device_id
self.assertRaises(
exception.VolumeBackendAPIException,
self.rest._modify_volume, self.data.array,
device_id, payload)
def test_extend_volume(self):
device_id = self.data.device_id
new_size = '3'
extend_vol_payload = {"executionOption": "ASYNCHRONOUS",
"editVolumeActionParam": {
"expandVolumeParam": {
"volumeAttribute": {
"volume_size": new_size,
"capacityUnit": "GB"}}}}
with mock.patch.object(self.rest, '_modify_volume',
return_value=(202, self.data.job_list[0])):
self.rest.extend_volume(self.data.array, device_id, new_size,
self.data.extra_specs)
self.rest._modify_volume.assert_called_once_with(
self.data.array, device_id, extend_vol_payload)
def test_delete_volume(self):
device_id = self.data.device_id
with mock.patch.object(self.rest, 'delete_resource'):
with mock.patch.object(
self.rest, '_modify_volume',
side_effect=[None, exception.VolumeBackendAPIException]):
for x in range(0, 2):
self.rest.delete_volume(self.data.array, device_id)
mod_call_count = self.rest._modify_volume.call_count
self.assertEqual(2, mod_call_count)
self.rest.delete_resource.assert_called_once_with(
self.data.array, 'sloprovisioning', 'volume', device_id)
def test_rename_volume(self):
device_id = self.data.device_id
payload = {"editVolumeActionParam": {
"modifyVolumeIdentifierParam": {
"volumeIdentifier": {
"identifier_name": 'new_name',
"volumeIdentifierChoice": "identifier_name"}}}}
with mock.patch.object(self.rest, '_modify_volume'):
self.rest.rename_volume(self.data.array, device_id, 'new_name')
self.rest._modify_volume.assert_called_once_with(
self.data.array, device_id, payload)
def test_find_mv_connections_for_vol(self):
device_id = self.data.device_id
ref_lun_id = int((self.data.maskingview[0]['maskingViewConnection']
[0]['host_lun_address']), 16)
host_lun_id = self.rest.find_mv_connections_for_vol(
self.data.array, self.data.masking_view_name_f, device_id)
self.assertEqual(ref_lun_id, host_lun_id)
def test_find_mv_connections_for_vol_failed(self):
# no masking view info retrieved
device_id = self.data.volume_details[0]['volumeId']
host_lun_id = self.rest.find_mv_connections_for_vol(
self.data.array, self.data.failed_resource, device_id)
self.assertIsNone(host_lun_id)
# no connection info received
with mock.patch.object(self.rest, 'get_resource',
return_value={'no_conn': 'no_info'}):
host_lun_id2 = self.rest.find_mv_connections_for_vol(
self.data.array, self.data.masking_view_name_f, device_id)
self.assertIsNone(host_lun_id2)
def test_get_storage_groups_from_volume(self):
array = self.data.array
device_id = self.data.device_id
ref_list = self.data.volume_details[0]['storageGroupId']
sg_list = self.rest.get_storage_groups_from_volume(array, device_id)
self.assertEqual(ref_list, sg_list)
def test_get_num_vols_in_sg(self):
num_vol = self.rest.get_num_vols_in_sg(
self.data.array, self.data.defaultstoragegroup_name)
self.assertEqual(2, num_vol)
def test_get_num_vols_in_sg_no_num(self):
with mock.patch.object(self.rest, 'get_storage_group',
return_value={}):
num_vol = self.rest.get_num_vols_in_sg(
self.data.array, self.data.defaultstoragegroup_name)
self.assertEqual(0, num_vol)
def test_is_volume_in_storagegroup(self):
# True
array = self.data.array
device_id = self.data.device_id
storagegroup = self.data.defaultstoragegroup_name
is_vol1 = self.rest.is_volume_in_storagegroup(
array, device_id, storagegroup)
# False
with mock.patch.object(self.rest, 'get_storage_groups_from_volume',
return_value=[]):
is_vol2 = self.rest.is_volume_in_storagegroup(
array, device_id, storagegroup)
self.assertTrue(is_vol1)
self.assertFalse(is_vol2)
def test_find_volume_device_number(self):
array = self.data.array
volume_name = self.data.volume_details[0]['volume_identifier']
ref_device = self.data.device_id
device_number = self.rest.find_volume_device_id(array, volume_name)
self.assertEqual(ref_device, device_number)
def test_find_volume_device_number_failed(self):
array = self.data.array
with mock.patch.object(self.rest, 'get_volume_list',
return_value=[]):
device_number = self.rest.find_volume_device_id(
array, 'name')
self.assertIsNone(device_number)
def test_get_volume_success(self):
array = self.data.array
device_id = self.data.device_id
ref_volume = self.data.volume_details[0]
volume = self.rest.get_volume(array, device_id)
self.assertEqual(ref_volume, volume)
def test_get_volume_failed(self):
array = self.data.array
device_id = self.data.failed_resource
self.assertRaises(exception.VolumeBackendAPIException,
self.rest.get_volume,
array, device_id)
def test_find_volume_identifier(self):
array = self.data.array
device_id = self.data.device_id
ref_name = self.data.volume_details[0]['volume_identifier']
vol_name = self.rest.find_volume_identifier(array, device_id)
self.assertEqual(ref_name, vol_name)
def test_get_volume_size(self):
array = self.data.array
device_id = self.data.device_id
ref_size = self.data.test_volume.size
size = self.rest.get_size_of_device_on_array(array, device_id)
self.assertEqual(ref_size, size)
def test_get_volume_size_exception(self):
array = self.data.array
device_id = self.data.device_id
with mock.patch.object(self.rest, 'get_volume',
return_value=None):
size = self.rest.get_size_of_device_on_array(
array, device_id)
self.assertIsNone(size)
def test_get_portgroup(self):
array = self.data.array
pg_name = self.data.port_group_name_f
ref_pg = self.data.portgroup[0]
portgroup = self.rest.get_portgroup(array, pg_name)
self.assertEqual(ref_pg, portgroup)
def test_get_port_ids(self):
array = self.data.array
pg_name = self.data.port_group_name_f
ref_ports = ["FA-1D:4"]
port_ids = self.rest.get_port_ids(array, pg_name)
self.assertEqual(ref_ports, port_ids)
def test_get_port_ids_no_portgroup(self):
array = self.data.array
pg_name = self.data.port_group_name_f
with mock.patch.object(self.rest, 'get_portgroup',
return_value=None):
port_ids = self.rest.get_port_ids(array, pg_name)
self.assertFalse(port_ids)
def test_get_port(self):
array = self.data.array
port_id = "FA-1D:4"
ref_port = self.data.port_list[0]
port = self.rest.get_port(array, port_id)
self.assertEqual(ref_port, port)
def test_get_iscsi_ip_address_and_iqn(self):
array = self.data.array
port_id = "SE-4E:0"
ref_ip = [self.data.ip]
ref_iqn = self.data.initiator
ip_addresses, iqn = self.rest.get_iscsi_ip_address_and_iqn(
array, port_id)
self.assertEqual(ref_ip, ip_addresses)
self.assertEqual(ref_iqn, iqn)
def test_get_iscsi_ip_address_and_iqn_no_port(self):
array = self.data.array
port_id = "SE-4E:0"
with mock.patch.object(self.rest, 'get_port', return_value=None):
ip_addresses, iqn = self.rest.get_iscsi_ip_address_and_iqn(
array, port_id)
self.assertIsNone(ip_addresses)
self.assertIsNone(iqn)
def test_get_target_wwns(self):
array = self.data.array
pg_name = self.data.port_group_name_f
ref_wwns = [self.data.wwnn1]
target_wwns = self.rest.get_target_wwns(array, pg_name)
self.assertEqual(ref_wwns, target_wwns)
def test_get_target_wwns_failed(self):
array = self.data.array
pg_name = self.data.port_group_name_f
with mock.patch.object(self.rest, 'get_port',
return_value=None):
target_wwns = self.rest.get_target_wwns(array, pg_name)
self.assertFalse(target_wwns)
def test_get_initiator_group(self):
array = self.data.array
ig_name = self.data.initiatorgroup_name_f
ref_ig = self.data.inititiatorgroup[0]
response_ig = self.rest.get_initiator_group(array, ig_name)
self.assertEqual(ref_ig, response_ig)
def test_get_initiator(self):
array = self.data.array
initiator_name = self.data.initiator
ref_initiator = self.data.initiator_list[1]
response_initiator = self.rest.get_initiator(array, initiator_name)
self.assertEqual(ref_initiator, response_initiator)
def test_get_initiator_list(self):
array = self.data.array
with mock.patch.object(self.rest, 'get_resource',
return_value={'initiatorId': '1234'}):
init_list = self.rest.get_initiator_list(array)
self.assertIsNotNone(init_list)
def test_get_initiator_list_none(self):
array = self.data.array
with mock.patch.object(self.rest, 'get_resource', return_value={}):
init_list = self.rest.get_initiator_list(array)
self.assertFalse(init_list)
def test_get_in_use_initiator_list_from_array(self):
ref_list = self.data.initiator_list[2]['initiatorId']
init_list = self.rest.get_in_use_initiator_list_from_array(
self.data.array)
self.assertEqual(ref_list, init_list)
def test_get_in_use_initiator_list_from_array_failed(self):
array = self.data.array
with mock.patch.object(self.rest, 'get_initiator_list',
return_value=[]):
init_list = self.rest.get_in_use_initiator_list_from_array(array)
self.assertFalse(init_list)
def test_get_initiator_group_from_initiator(self):
initiator = self.data.wwpn1
ref_group = self.data.initiatorgroup_name_f
init_group = self.rest.get_initiator_group_from_initiator(
self.data.array, initiator)
self.assertEqual(ref_group, init_group)
def test_get_initiator_group_from_initiator_failed(self):
initiator = self.data.wwpn1
with mock.patch.object(self.rest, 'get_initiator',
return_value=None):
init_group = self.rest.get_initiator_group_from_initiator(
self.data.array, initiator)
self.assertIsNone(init_group)
with mock.patch.object(self.rest, 'get_initiator',
return_value={'name': 'no_host'}):
init_group = self.rest.get_initiator_group_from_initiator(
self.data.array, initiator)
self.assertIsNone(init_group)
def test_create_initiator_group(self):
init_group_name = self.data.initiatorgroup_name_f
init_list = [self.data.wwpn1]
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'create_resource',
return_value=(202, self.data.job_list[0])):
payload = ({"executionOption": "ASYNCHRONOUS",
"hostId": init_group_name, "initiatorId": init_list})
self.rest.create_initiator_group(
self.data.array, init_group_name, init_list, extra_specs)
self.rest.create_resource.assert_called_once_with(
self.data.array, 'sloprovisioning', 'host', payload)
def test_delete_initiator_group(self):
with mock.patch.object(self.rest, 'delete_resource'):
self.rest.delete_initiator_group(
self.data.array, self.data.initiatorgroup_name_f)
self.rest.delete_resource.assert_called_once_with(
self.data.array, 'sloprovisioning', 'host',
self.data.initiatorgroup_name_f)
def test_get_masking_view(self):
array = self.data.array
masking_view_name = self.data.masking_view_name_f
ref_mask_view = self.data.maskingview[0]
masking_view = self.rest.get_masking_view(array, masking_view_name)
self.assertEqual(ref_mask_view, masking_view)
def test_get_masking_views_from_storage_group(self):
array = self.data.array
storagegroup_name = self.data.storagegroup_name_f
ref_mask_view = [self.data.masking_view_name_f]
masking_view = self.rest.get_masking_views_from_storage_group(
array, storagegroup_name)
self.assertEqual(ref_mask_view, masking_view)
def test_get_masking_views_by_initiator_group(self):
array = self.data.array
initiatorgroup_name = self.data.initiatorgroup_name_f
ref_mask_view = [self.data.masking_view_name_f]
masking_view = self.rest.get_masking_views_by_initiator_group(
array, initiatorgroup_name)
self.assertEqual(ref_mask_view, masking_view)
def test_get_masking_views_by_initiator_group_failed(self):
array = self.data.array
initiatorgroup_name = self.data.initiatorgroup_name_f
with mock.patch.object(self.rest, 'get_initiator_group',
return_value=None):
masking_view = self.rest.get_masking_views_by_initiator_group(
array, initiatorgroup_name)
self.assertFalse(masking_view)
with mock.patch.object(self.rest, 'get_initiator_group',
return_value={'name': 'no_mv'}):
masking_view = self.rest.get_masking_views_by_initiator_group(
array, initiatorgroup_name)
self.assertFalse(masking_view)
def test_get_element_from_masking_view(self):
array = self.data.array
maskingview_name = self.data.masking_view_name_f
# storage group
ref_sg = self.data.storagegroup_name_f
storagegroup = self.rest.get_element_from_masking_view(
array, maskingview_name, storagegroup=True)
self.assertEqual(ref_sg, storagegroup)
# initiator group
ref_ig = self.data.initiatorgroup_name_f
initiatorgroup = self.rest.get_element_from_masking_view(
array, maskingview_name, host=True)
self.assertEqual(ref_ig, initiatorgroup)
# portgroup
ref_pg = self.data.port_group_name_f
portgroup = self.rest.get_element_from_masking_view(
array, maskingview_name, portgroup=True)
self.assertEqual(ref_pg, portgroup)
def test_get_element_from_masking_view_failed(self):
array = self.data.array
maskingview_name = self.data.masking_view_name_f
# no element chosen
element = self.rest.get_element_from_masking_view(
array, maskingview_name)
self.assertIsNone(element)
# cannot retrieve maskingview
with mock.patch.object(self.rest, 'get_masking_view',
return_value=None):
self.assertRaises(exception.VolumeBackendAPIException,
self.rest.get_element_from_masking_view,
array, maskingview_name)
def test_get_common_masking_views(self):
array = self.data.array
initiatorgroup = self.data.initiatorgroup_name_f
portgroup = self.data.port_group_name_f
ref_maskingview = self.data.masking_view_name_f
maskingview_list = self.rest.get_common_masking_views(
array, portgroup, initiatorgroup)
self.assertEqual(ref_maskingview, maskingview_list)
def test_get_common_masking_views_none(self):
array = self.data.array
initiatorgroup = self.data.initiatorgroup_name_f
portgroup = self.data.port_group_name_f
with mock.patch.object(self.rest, 'get_masking_view_list',
return_value=[]):
maskingview_list = self.rest.get_common_masking_views(
array, portgroup, initiatorgroup)
self.assertFalse(maskingview_list)
def test_create_masking_view(self):
maskingview_name = self.data.masking_view_name_f
storagegroup_name = self.data.storagegroup_name_f
port_group_name = self.data.port_group_name_f
init_group_name = self.data.initiatorgroup_name_f
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'create_resource',
return_value=(202, self.data.job_list[0])):
payload = ({"executionOption": "ASYNCHRONOUS",
"portGroupSelection": {
"useExistingPortGroupParam": {
"portGroupId": port_group_name}},
"maskingViewId": maskingview_name,
"hostOrHostGroupSelection": {
"useExistingHostParam": {
"hostId": init_group_name}},
"storageGroupSelection": {
"useExistingStorageGroupParam": {
"storageGroupId": storagegroup_name}}})
self.rest.create_masking_view(
self.data.array, maskingview_name, storagegroup_name,
port_group_name, init_group_name, extra_specs)
self.rest.create_resource.assert_called_once_with(
self.data.array, 'sloprovisioning', 'maskingview', payload)
def test_delete_masking_view(self):
with mock.patch.object(self.rest, 'delete_resource'):
self.rest.delete_masking_view(
self.data.array, self.data.masking_view_name_f)
self.rest.delete_resource.assert_called_once_with(
self.data.array, 'sloprovisioning', 'maskingview',
self.data.masking_view_name_f)
def test_get_replication_capabilities(self):
ref_response = self.data.capabilities['symmetrixCapability'][1]
capabilities = self.rest.get_replication_capabilities(self.data.array)
self.assertEqual(ref_response, capabilities)
def test_is_clone_licenced(self):
licence = self.rest.is_snapvx_licensed(self.data.array)
self.assertTrue(licence)
false_response = {'rdfCapable': True,
'snapVxCapable': False,
'symmetrixId': '000197800123'}
with mock.patch.object(self.rest, 'get_replication_capabilities',
return_value=false_response):
licence2 = self.rest.is_snapvx_licensed(self.data.array)
self.assertFalse(licence2)
def test_is_clone_licenced_error(self):
with mock.patch.object(self.rest, 'get_replication_capabilities',
return_value=None):
licence3 = self.rest.is_snapvx_licensed(self.data.array)
self.assertFalse(licence3)
def test_create_volume_snap(self):
snap_name = (self.data.volume_snap_vx
['snapshotSrcs'][0]['snapshotName'])
device_id = self.data.device_id
extra_specs = self.data.extra_specs
payload = {"deviceNameListSource": [{"name": device_id}],
"bothSides": 'false', "star": 'false',
"force": 'false'}
resource_type = 'snapshot/%(snap)s' % {'snap': snap_name}
with mock.patch.object(self.rest, 'create_resource',
return_value=(202, self.data.job_list[0])):
self.rest.create_volume_snap(
self.data.array, snap_name, device_id, extra_specs)
self.rest.create_resource.assert_called_once_with(
self.data.array, 'replication', resource_type,
payload, private='/private')
def test_modify_volume_snap(self):
array = self.data.array
source_id = self.data.device_id
target_id = (self.data.volume_snap_vx
['snapshotSrcs'][0]['linkedDevices'][0]['targetDevice'])
snap_name = (self.data.volume_snap_vx
['snapshotSrcs'][0]['snapshotName'])
extra_specs = self.data.extra_specs
payload = {"deviceNameListSource": [{"name": source_id}],
"deviceNameListTarget": [
{"name": target_id}],
"copy": 'true', "action": "",
"star": 'false', "force": 'false',
"exact": 'false', "remote": 'false',
"symforce": 'false', "nocopy": 'false'}
with mock.patch.object(
self.rest, 'modify_resource', return_value=(
202, self.data.job_list[0])) as mock_modify:
# link
payload["action"] = "Link"
self.rest.modify_volume_snap(
array, source_id, target_id, snap_name, extra_specs, link=True)
self.rest.modify_resource.assert_called_once_with(
array, 'replication', 'snapshot', payload,
resource_name=snap_name, private='/private')
# unlink
mock_modify.reset_mock()
payload["action"] = "Unlink"
self.rest.modify_volume_snap(
array, source_id, target_id, snap_name,
extra_specs, unlink=True)
self.rest.modify_resource.assert_called_once_with(
array, 'replication', 'snapshot', payload,
resource_name=snap_name, private='/private')
# none selected
mock_modify.reset_mock()
self.rest.modify_volume_snap(
array, source_id, target_id, snap_name,
extra_specs)
self.rest.modify_resource.assert_not_called()
def test_delete_volume_snap(self):
array = self.data.array
snap_name = (self.data.volume_snap_vx
['snapshotSrcs'][0]['snapshotName'])
source_device_id = self.data.device_id
payload = {"deviceNameListSource": [{"name": source_device_id}]}
with mock.patch.object(self.rest, 'delete_resource'):
self.rest.delete_volume_snap(array, snap_name, source_device_id)
self.rest.delete_resource.assert_called_once_with(
array, 'replication', 'snapshot', snap_name,
payload=payload, private='/private')
def test_get_volume_snap_info(self):
array = self.data.array
source_device_id = self.data.device_id
ref_snap_info = self.data.volume_snap_vx
snap_info = self.rest.get_volume_snap_info(array, source_device_id)
self.assertEqual(ref_snap_info, snap_info)
def test_get_volume_snap(self):
array = self.data.array
snap_name = (self.data.volume_snap_vx
['snapshotSrcs'][0]['snapshotName'])
device_id = self.data.device_id
ref_snap = self.data.volume_snap_vx['snapshotSrcs'][0]
snap = self.rest.get_volume_snap(array, device_id, snap_name)
self.assertEqual(ref_snap, snap)
def test_get_volume_snap_none(self):
array = self.data.array
snap_name = (self.data.volume_snap_vx
['snapshotSrcs'][0]['snapshotName'])
device_id = self.data.device_id
with mock.patch.object(self.rest, 'get_volume_snap_info',
return_value=None):
snap = self.rest.get_volume_snap(array, device_id, snap_name)
self.assertIsNone(snap)
with mock.patch.object(self.rest, 'get_volume_snap_info',
return_value={'snapshotSrcs': []}):
snap = self.rest.get_volume_snap(array, device_id, snap_name)
self.assertIsNone(snap)
def test_get_sync_session(self):
array = self.data.array
source_id = self.data.device_id
target_id = (self.data.volume_snap_vx
['snapshotSrcs'][0]['linkedDevices'][0]['targetDevice'])
snap_name = (self.data.volume_snap_vx
['snapshotSrcs'][0]['snapshotName'])
ref_sync = (self.data.volume_snap_vx
['snapshotSrcs'][0]['linkedDevices'][0])
sync = self.rest.get_sync_session(
array, source_id, snap_name, target_id)
self.assertEqual(ref_sync, sync)
def test_find_snap_vx_sessions(self):
array = self.data.array
source_id = self.data.device_id
ref_sessions = [{'snap_name': 'temp-1',
'source_vol': self.data.device_id,
'target_vol_list': [self.data.device_id2]},
{'snap_name': 'temp-1',
'source_vol': self.data.device_id,
'target_vol_list': [self.data.device_id2]}]
sessions = self.rest.find_snap_vx_sessions(array, source_id)
self.assertEqual(ref_sessions, sessions)
def test_find_snap_vx_sessions_tgt_only(self):
array = self.data.array
source_id = self.data.device_id
ref_sessions = [{'snap_name': 'temp-1',
'source_vol': self.data.device_id,
'target_vol_list': [self.data.device_id2]}]
sessions = self.rest.find_snap_vx_sessions(
array, source_id, tgt_only=True)
self.assertEqual(ref_sessions, sessions)
def test_update_storagegroup_qos(self):
sg_qos = {"srp": self.data.srp, "num_of_vols": 2, "cap_gb": 2,
"storageGroupId": "OS-QOS-SG",
"slo": self.data.slo, "workload": self.data.workload,
"hostIOLimit": {"host_io_limit_io_sec": "4000",
"dynamicDistribution": "Always",
"host_io_limit_mb_sec": "4000"}}
self.data.sg_details.append(sg_qos)
array = self.data.array
extra_specs = self.data.extra_specs
extra_specs['qos'] = {
'total_iops_sec': '4000', 'DistributionType': 'Always'}
return_value = self.rest.update_storagegroup_qos(
array, "OS-QOS-SG", extra_specs)
self.assertEqual(False, return_value)
extra_specs['qos'] = {
'DistributionType': 'onFailure', 'total_bytes_sec': '419430400'}
return_value = self.rest.update_storagegroup_qos(
array, "OS-QOS-SG", extra_specs)
self.assertTrue(return_value)
def test_update_storagegroup_qos_exception(self):
array = self.data.array
storage_group = self.data.defaultstoragegroup_name
extra_specs = self.data.extra_specs
extra_specs['qos'] = {
'total_iops_sec': '4000', 'DistributionType': 'Wrong',
'total_bytes_sec': '4194304000'}
with mock.patch.object(self.rest, 'check_status_code_success',
side_effect=[None, None, None, Exception]):
self.assertRaises(exception.VolumeBackendAPIException,
self.rest.update_storagegroup_qos, array,
storage_group, extra_specs)
extra_specs['qos']['DistributionType'] = 'Always'
return_value = self.rest.update_storagegroup_qos(
array, "OS-QOS-SG", extra_specs)
self.assertFalse(return_value)
def test_validate_qos_input_exception(self):
qos_extra_spec = {
'total_iops_sec': 90, 'DistributionType': 'Wrong',
'total_bytes_sec': 100}
input_key = 'total_iops_sec'
sg_value = 4000
self.assertRaises(exception.VolumeBackendAPIException,
self.rest.validate_qos_input, input_key, sg_value,
qos_extra_spec, {})
input_key = 'total_bytes_sec'
sg_value = 4000
self.assertRaises(exception.VolumeBackendAPIException,
self.rest.validate_qos_input, input_key, sg_value,
qos_extra_spec, {})
def test_validate_qos_distribution_type(self):
qos_extra_spec = {
'total_iops_sec': 4000, 'DistributionType': 'Always',
'total_bytes_sec': 4194304000}
input_prop_dict = {'total_iops_sec': 4000}
sg_value = 'Always'
ret_prop_dict = self.rest.validate_qos_distribution_type(
sg_value, qos_extra_spec, input_prop_dict)
self.assertEqual(input_prop_dict, ret_prop_dict)
def test_get_rdf_group(self):
with mock.patch.object(self.rest, 'get_resource') as mock_get:
self.rest.get_rdf_group(self.data.array, self.data.rdf_group_no)
mock_get.assert_called_once_with(
self.data.array, 'replication', 'rdf_group',
self.data.rdf_group_no)
def test_get_rdf_group_list(self):
rdf_list = self.rest.get_rdf_group_list(self.data.array)
self.assertEqual(self.data.rdf_group_list, rdf_list)
def test_get_rdf_group_volume(self):
with mock.patch.object(self.rest, 'get_resource') as mock_get:
self.rest.get_rdf_group_volume(
self.data.array, self.data.rdf_group_no, self.data.device_id)
mock_get.assert_called_once_with(
self.data.array, 'replication', 'rdf_group', "70/volume/00001")
def test_are_vols_rdf_paired(self):
are_vols1, local_state, pair_state = self.rest.are_vols_rdf_paired(
self.data.array, self.data.remote_array, self.data.device_id,
self.data.device_id2, self.data.rdf_group_no)
self.assertTrue(are_vols1)
are_vols2, local_state, pair_state = self.rest.are_vols_rdf_paired(
self.data.array, "00012345", self.data.device_id,
self.data.device_id2, self.data.rdf_group_no)
self.assertFalse(are_vols2)
with mock.patch.object(self.rest, "get_rdf_group_volume",
return_value=None):
are_vols3, local, pair = self.rest.are_vols_rdf_paired(
self.data.array, self.data.remote_array, self.data.device_id,
self.data.device_id2, self.data.rdf_group_no)
self.assertFalse(are_vols3)
def test_get_rdf_group_number(self):
rdfg_num = self.rest.get_rdf_group_number(
self.data.array, self.data.rdf_group_name)
self.assertEqual(self.data.rdf_group_no, rdfg_num)
with mock.patch.object(self.rest, 'get_rdf_group_list',
return_value=None):
rdfg_num2 = self.rest.get_rdf_group_number(
self.data.array, self.data.rdf_group_name)
self.assertIsNone(rdfg_num2)
with mock.patch.object(self.rest, 'get_rdf_group',
return_value=None):
rdfg_num3 = self.rest.get_rdf_group_number(
self.data.array, self.data.rdf_group_name)
self.assertIsNone(rdfg_num3)
def test_create_rdf_device_pair(self):
ref_dict = {'array': self.data.remote_array,
'device_id': self.data.device_id2}
rdf_dict = self.rest.create_rdf_device_pair(
self.data.array, self.data.device_id, self.data.rdf_group_no,
self.data.device_id2, self.data.remote_array, "OS-2",
self.data.extra_specs)
self.assertEqual(ref_dict, rdf_dict)
def test_modify_rdf_device_pair(self):
resource_name = "70/volume/00001"
common_opts = {"force": 'false',
"symForce": 'false',
"star": 'false',
"hop2": 'false',
"bypass": 'false'}
split_opts = deepcopy(common_opts)
split_opts.update({"immediate": 'false'})
split_payload = {"action": "Split",
'executionOption': 'ASYNCHRONOUS',
"split": split_opts}
failover_opts = deepcopy(common_opts)
failover_opts.update({"establish": 'true',
"restore": 'false',
"remote": 'false',
"immediate": 'false'})
failover_payload = {"action": "Failover",
'executionOption': 'ASYNCHRONOUS',
"failover": failover_opts}
with mock.patch.object(
self.rest, "modify_resource",
return_value=(200, self.data.job_list[0])) as mock_mod:
self.rest.modify_rdf_device_pair(
self.data.array, self.data.device_id, self.data.rdf_group_no,
self.data.extra_specs, split=True)
mock_mod.assert_called_once_with(
self.data.array, 'replication', 'rdf_group',
split_payload, resource_name=resource_name,
private='/private')
mock_mod.reset_mock()
self.rest.modify_rdf_device_pair(
self.data.array, self.data.device_id, self.data.rdf_group_no,
self.data.extra_specs, split=False)
mock_mod.assert_called_once_with(
self.data.array, 'replication', 'rdf_group',
failover_payload, resource_name=resource_name,
private='/private')
def test_get_storage_group_rep(self):
array = self.data.array
source_group_name = self.data.storagegroup_name_source
ref_details = self.data.sg_details_rep[0]
volume_group = self.rest.get_storage_group_rep(array,
source_group_name)
self.assertEqual(volume_group, ref_details)
def test_get_volumes_in_storage_group(self):
array = self.data.array
storagegroup_name = self.data.storagegroup_name_source
ref_volumes = [self.data.device_id, self.data.device_id2]
volume_list = self.rest.get_volumes_in_storage_group(
array, storagegroup_name)
self.assertEqual(ref_volumes, volume_list)
def test_create_storagegroup_snap(self):
array = self.data.array
extra_specs = self.data.extra_specs
source_group = self.data.storagegroup_name_source
snap_name = self.data.group_snapshot_name
with mock.patch.object(
self.rest, "create_storagegroup_snap") as mock_create:
self.rest.create_storagegroup_snap(
array, source_group, snap_name, extra_specs)
mock_create.assert_called_once_with(array,
source_group,
snap_name,
extra_specs)
class VMAXProvisionTest(test.TestCase):
def setUp(self):
self.data = VMAXCommonData()
super(VMAXProvisionTest, self).setUp()
config_group = 'ProvisionTests'
self.fake_xml = FakeXML().create_fake_config_file(
config_group, self.data.port_group_name_i)
configuration = FakeConfiguration(self.fake_xml, config_group)
rest.VMAXRest._establish_rest_session = mock.Mock(
return_value=FakeRequestsSession())
provision.UNLINK_INTERVAL = 0
driver = iscsi.VMAXISCSIDriver(configuration=configuration)
self.driver = driver
self.common = self.driver.common
self.provision = self.common.provision
self.utils = self.common.utils
self.rest = self.common.rest
def test_create_storage_group(self):
array = self.data.array
storagegroup_name = self.data.storagegroup_name_f
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
extra_specs = self.data.extra_specs
storagegroup = self.provision.create_storage_group(
array, storagegroup_name, srp, slo, workload, extra_specs)
self.assertEqual(storagegroup_name, storagegroup)
def test_create_volume_from_sg(self):
array = self.data.array
storagegroup_name = self.data.storagegroup_name_f
volumeId = self.data.test_volume.id
volume_name = self.utils.get_volume_element_name(volumeId)
volume_size = self.data.test_volume.size
extra_specs = self.data.extra_specs
ref_dict = self.data.provider_location
volume_dict = self.provision.create_volume_from_sg(
array, volume_name, storagegroup_name, volume_size, extra_specs)
self.assertEqual(ref_dict, volume_dict)
def test_delete_volume_from_srp(self):
array = self.data.array
device_id = self.data.device_id
volume_name = self.data.volume_details[0]['volume_identifier']
with mock.patch.object(self.provision.rest, 'delete_volume'):
self.provision.delete_volume_from_srp(
array, device_id, volume_name)
self.provision.rest.delete_volume.assert_called_once_with(
array, device_id)
def test_create_volume_snap_vx(self):
array = self.data.array
source_device_id = self.data.device_id
snap_name = self.data.snap_location['snap_name']
extra_specs = self.data.extra_specs
with mock.patch.object(self.provision.rest, 'create_volume_snap'):
self.provision.create_volume_snapvx(
array, source_device_id, snap_name, extra_specs)
self.provision.rest.create_volume_snap.assert_called_once_with(
array, snap_name, source_device_id, extra_specs)
def test_create_volume_replica_create_snap_true(self):
array = self.data.array
source_device_id = self.data.device_id
target_device_id = self.data.device_id2
snap_name = self.data.snap_location['snap_name']
extra_specs = self.data.extra_specs
with mock.patch.object(self.provision, 'create_volume_snapvx'):
with mock.patch.object(self.provision.rest, 'modify_volume_snap'):
self.provision.create_volume_replica(
array, source_device_id, target_device_id,
snap_name, extra_specs, create_snap=True)
self.provision.rest.modify_volume_snap.assert_called_once_with(
array, source_device_id, target_device_id, snap_name,
extra_specs, link=True)
self.provision.create_volume_snapvx.assert_called_once_with(
array, source_device_id, snap_name, extra_specs)
def test_create_volume_replica_create_snap_false(self):
array = self.data.array
source_device_id = self.data.device_id
target_device_id = self.data.device_id2
snap_name = self.data.snap_location['snap_name']
extra_specs = self.data.extra_specs
with mock.patch.object(self.provision, 'create_volume_snapvx'):
with mock.patch.object(self.provision.rest, 'modify_volume_snap'):
self.provision.create_volume_replica(
array, source_device_id, target_device_id,
snap_name, extra_specs, create_snap=False)
self.provision.rest.modify_volume_snap.assert_called_once_with(
array, source_device_id, target_device_id, snap_name,
extra_specs, link=True)
self.provision.create_volume_snapvx.assert_not_called()
def test_break_replication_relationship(self):
array = self.data.array
source_device_id = self.data.device_id
target_device_id = self.data.device_id2
snap_name = self.data.snap_location['snap_name']
extra_specs = self.data.extra_specs
with mock.patch.object(self.provision.rest, 'modify_volume_snap'):
self.provision.break_replication_relationship(
array, target_device_id, source_device_id, snap_name,
extra_specs)
(self.provision.rest.modify_volume_snap.
assert_called_once_with(
array, source_device_id, target_device_id,
snap_name, extra_specs, unlink=True))
def test_unlink_volume(self):
with mock.patch.object(self.rest, 'modify_volume_snap') as mock_mod:
self.provision._unlink_volume(
self.data.array, self.data.device_id, self.data.device_id2,
self.data.snap_location['snap_name'], self.data.extra_specs)
mock_mod.assert_called_once_with(
self.data.array, self.data.device_id, self.data.device_id2,
self.data.snap_location['snap_name'], self.data.extra_specs,
unlink=True)
def test_unlink_volume_exception(self):
with mock.patch.object(
self.rest, 'modify_volume_snap', side_effect=[
exception.VolumeBackendAPIException(data=''), '']
) as mock_mod:
self.provision._unlink_volume(
self.data.array, self.data.device_id, self.data.device_id2,
self.data.snap_location['snap_name'], self.data.extra_specs)
self.assertEqual(2, mock_mod.call_count)
def test_delete_volume_snap(self):
array = self.data.array
source_device_id = self.data.device_id
snap_name = self.data.snap_location['snap_name']
with mock.patch.object(self.provision.rest, 'delete_volume_snap'):
self.provision.delete_volume_snap(
array, snap_name, source_device_id)
self.provision.rest.delete_volume_snap.assert_called_once_with(
array, snap_name, source_device_id)
def test_extend_volume(self):
array = self.data.array
device_id = self.data.device_id
new_size = '3'
extra_specs = self.data.extra_specs
with mock.patch.object(self.provision.rest, 'extend_volume'):
self.provision.extend_volume(array, device_id, new_size,
extra_specs)
self.provision.rest.extend_volume.assert_called_once_with(
array, device_id, new_size, extra_specs)
def test_get_srp_pool_stats_no_wlp(self):
array = self.data.array
array_info = self.common.pool_info['arrays_info'][0]
ref_stats = (self.data.srp_details['total_usable_cap_gb'],
float(self.data.srp_details['total_usable_cap_gb']
- self.data.srp_details['total_allocated_cap_gb']),
self.data.srp_details['total_subscribed_cap_gb'],
self.data.srp_details['reserved_cap_percent'], False)
with mock.patch.object(self.provision,
'_get_remaining_slo_capacity_wlp',
return_value=-1):
stats = self.provision.get_srp_pool_stats(array, array_info)
self.assertEqual(ref_stats, stats)
def test_get_srp_pool_stats_wlp_enabled(self):
array = self.data.array
array_info = self.common.pool_info['arrays_info'][0]
srp = self.data.srp
headroom_capacity = self.provision.rest.get_headroom_capacity(
array, srp, array_info['SLO'], array_info['Workload'])
ref_stats = (self.data.srp_details['total_usable_cap_gb'],
float(headroom_capacity
- self.data.srp_details['total_allocated_cap_gb']),
self.data.srp_details['total_subscribed_cap_gb'],
self.data.srp_details['reserved_cap_percent'], True)
stats = self.provision.get_srp_pool_stats(array, array_info)
self.assertEqual(ref_stats, stats)
def test_get_srp_pool_stats_errors(self):
# cannot retrieve srp
array = self.data.array
array_info = {'srpName': self.data.failed_resource}
ref_stats = (0, 0, 0, 0, False)
stats = self.provision.get_srp_pool_stats(array, array_info)
self.assertEqual(ref_stats, stats)
# cannot report on all stats
with mock.patch.object(self.provision.rest, 'get_srp_by_name',
return_value={'total_usable_cap_gb': 33}):
with mock.patch.object(self.provision,
'_get_remaining_slo_capacity_wlp',
return_value=(-1)):
ref_stats = (33, 0, 0, 0, False)
stats = self.provision.get_srp_pool_stats(array, array_info)
self.assertEqual(ref_stats, stats)
def test_get_remaining_slo_capacity_wlp(self):
array = self.data.array
array_info = self.common.pool_info['arrays_info'][0]
srp = self.data.srp
ref_capacity = self.provision.rest.get_headroom_capacity(
array, srp, array_info['SLO'], array_info['Workload'])
remaining_capacity = (
self.provision._get_remaining_slo_capacity_wlp(
array, srp, array_info))
self.assertEqual(ref_capacity, remaining_capacity)
def test_get_remaining_slo_capacity_no_slo_or_wlp(self):
array = self.data.array
array_info = self.common.pool_info['arrays_info'][0]
srp = self.data.srp
ref_capacity = -1
with mock.patch.object(self.provision.rest, 'get_headroom_capacity',
return_value=None):
remaining_capacity = (
self.provision._get_remaining_slo_capacity_wlp(
array, srp, {'SLO': None}))
self.assertEqual(ref_capacity, remaining_capacity)
self.provision.rest.get_headroom_capacity.assert_not_called()
remaining_capacity = (
self.provision._get_remaining_slo_capacity_wlp(
array, srp, array_info))
self.assertEqual(ref_capacity, remaining_capacity)
def test_verify_slo_workload_true(self):
# with slo and workload
array = self.data.array
slo = self.data.slo
workload = self.data.workload
srp = self.data.srp
valid_slo, valid_workload = self.provision.verify_slo_workload(
array, slo, workload, srp)
self.assertTrue(valid_slo)
self.assertTrue(valid_workload)
# slo and workload = none
slo2 = None
workload2 = None
valid_slo2, valid_workload2 = self.provision.verify_slo_workload(
array, slo2, workload2, srp)
self.assertTrue(valid_slo2)
self.assertTrue(valid_workload2)
slo2 = None
workload2 = 'None'
valid_slo2, valid_workload2 = self.provision.verify_slo_workload(
array, slo2, workload2, srp)
self.assertTrue(valid_slo2)
self.assertTrue(valid_workload2)
def test_verify_slo_workload_false(self):
# Both wrong
array = self.data.array
slo = 'Diamante'
workload = 'DSSS'
srp = self.data.srp
valid_slo, valid_workload = self.provision.verify_slo_workload(
array, slo, workload, srp)
self.assertFalse(valid_slo)
self.assertFalse(valid_workload)
# Workload set, no slo set
valid_slo, valid_workload = self.provision.verify_slo_workload(
array, None, self.data.workload, srp)
self.assertTrue(valid_slo)
self.assertFalse(valid_workload)
def test_get_slo_workload_settings_from_storage_group(self):
ref_settings = "Diamond+DSS"
sg_slo_settings = (
self.provision.get_slo_workload_settings_from_storage_group(
self.data.array, self.data.defaultstoragegroup_name))
self.assertEqual(ref_settings, sg_slo_settings)
# No workload
with mock.patch.object(self.provision.rest, 'get_storage_group',
return_value={'slo': 'Silver'}):
ref_settings2 = "Silver+NONE"
sg_slo_settings2 = (
self.provision.get_slo_workload_settings_from_storage_group(
self.data.array, 'no_workload_sg'))
self.assertEqual(ref_settings2, sg_slo_settings2)
def test_break_rdf_relationship(self):
array = self.data.array
device_id = self.data.device_id
target_device = self.data.device_id2
rdf_group_name = self.data.rdf_group_name
rep_extra_specs = self.data.rep_extra_specs
with mock.patch.object(
self.provision.rest, 'modify_rdf_device_pair') as mod_rdf:
with mock.patch.object(
self.provision.rest, 'delete_rdf_pair') as del_rdf:
self.provision.break_rdf_relationship(
array, device_id, target_device,
rdf_group_name, rep_extra_specs, "Synchronized")
mod_rdf.assert_called_once_with(
array, device_id, rdf_group_name, rep_extra_specs,
split=True)
del_rdf.assert_called_once_with(
array, device_id, rdf_group_name)
def test_failover_volume(self):
array = self.data.array
device_id = self.data.device_id
rdf_group_name = self.data.rdf_group_name
extra_specs = self.data.extra_specs
with mock.patch.object(
self.provision.rest, 'modify_rdf_device_pair') as mod_rdf:
self.provision.failover_volume(
array, device_id, rdf_group_name,
extra_specs, '', True)
mod_rdf.assert_called_once_with(
array, device_id, rdf_group_name, extra_specs,
split=False)
mod_rdf.reset_mock()
self.provision.failover_volume(
array, device_id, rdf_group_name,
extra_specs, '', False)
mod_rdf.assert_called_once_with(
array, device_id, rdf_group_name, extra_specs,
split=False)
def test_create_volume_group_success(self):
array = self.data.array
group_name = self.data.storagegroup_name_source
extra_specs = self.data.extra_specs
ref_value = self.data.storagegroup_name_source
storagegroup = self.provision.create_volume_group(array,
group_name,
extra_specs)
self.assertEqual(ref_value, storagegroup)
def test_create_group_replica(self):
array = self.data.array
source_group = self.data.storagegroup_name_source
snap_name = self.data.group_snapshot_name
extra_specs = self.data.extra_specs
with mock.patch.object(
self.provision,
'create_group_replica') as mock_create_replica:
self.provision.create_group_replica(
array, source_group, snap_name, extra_specs)
mock_create_replica.assert_called_once_with(
array, source_group, snap_name, extra_specs)
def test_delete_group_replica(self):
array = self.data.array
snap_name = self.data.group_snapshot_name
source_group_name = self.data.storagegroup_name_source
with mock.patch.object(
self.provision,
'delete_group_replica') as mock_delete_replica:
self.provision.delete_group_replica(array,
snap_name,
source_group_name)
mock_delete_replica.assert_called_once_with(
array, snap_name, source_group_name)
def test_link_and_break_replica(self):
array = self.data.array
source_group_name = self.data.storagegroup_name_source
target_group_name = self.data.target_group_name
snap_name = self.data.group_snapshot_name
extra_specs = self.data.extra_specs
deleteSnapshot = False
with mock.patch.object(
self.provision,
'link_and_break_replica') as mock_link_and_break_replica:
self.provision.link_and_break_replica(
array, source_group_name,
target_group_name, snap_name,
extra_specs, deleteSnapshot)
mock_link_and_break_replica.assert_called_once_with(
array, source_group_name,
target_group_name, snap_name,
extra_specs, deleteSnapshot)
def test_unlink_group(self):
with mock.patch.object(self.rest,
'modify_storagegroup_snap') as mock_mod:
self.provision._unlink_group(
self.data.array, self.data.storagegroup_name_source,
self.data.target_group_name,
self.data.group_snapshot_name, self.data.extra_specs)
mock_mod.assert_called_once_with(
self.data.array, self.data.storagegroup_name_source,
self.data.target_group_name,
self.data.group_snapshot_name, self.data.extra_specs,
unlink=True)
class VMAXCommonTest(test.TestCase):
def setUp(self):
self.data = VMAXCommonData()
super(VMAXCommonTest, self).setUp()
config_group = 'CommonTests'
self.fake_xml = FakeXML().create_fake_config_file(
config_group, self.data.port_group_name_f)
configuration = FakeConfiguration(self.fake_xml, config_group,
1, 1)
rest.VMAXRest._establish_rest_session = mock.Mock(
return_value=FakeRequestsSession())
driver = fc.VMAXFCDriver(configuration=configuration)
self.driver = driver
self.common = self.driver.common
self.masking = self.common.masking
self.provision = self.common.provision
self.rest = self.common.rest
self.utils = self.common.utils
self.utils.get_volumetype_extra_specs = (
mock.Mock(return_value=self.data.vol_type_extra_specs))
@mock.patch.object(rest.VMAXRest,
'set_rest_credentials')
@mock.patch.object(common.VMAXCommon,
'_get_slo_workload_combinations',
return_value=[])
@mock.patch.object(utils.VMAXUtils,
'parse_file_to_get_array_map',
return_value=[])
def test_gather_info_no_opts(self, mock_parse, mock_combo, mock_rest):
configuration = FakeConfiguration(None, 'config_group', None, None)
fc.VMAXFCDriver(configuration=configuration)
def test_get_slo_workload_combinations_success(self):
array_info = self.utils.parse_file_to_get_array_map(
self.common.pool_info['config_file'])
finalarrayinfolist = self.common._get_slo_workload_combinations(
array_info)
self.assertTrue(len(finalarrayinfolist) > 1)
def test_get_slo_workload_combinations_failed(self):
array_info = {}
self.assertRaises(exception.VolumeBackendAPIException,
self.common._get_slo_workload_combinations,
array_info)
def test_create_volume(self):
ref_model_update = (
{'provider_location': six.text_type(self.data.provider_location)})
model_update = self.common.create_volume(self.data.test_volume)
self.assertEqual(ref_model_update, model_update)
def test_create_volume_from_snapshot(self):
ref_model_update = (
{'provider_location': six.text_type(
self.data.provider_location)})
model_update = self.common.create_volume_from_snapshot(
self.data.test_clone_volume, self.data.test_snapshot)
self.assertEqual(ref_model_update, model_update)
# Test from legacy snapshot
model_update = self.common.create_volume_from_snapshot(
self.data.test_clone_volume, self.data.test_legacy_snapshot)
self.assertEqual(ref_model_update, model_update)
def test_cloned_volume(self):
ref_model_update = (
{'provider_location': six.text_type(
self.data.provider_location)})
model_update = self.common.create_cloned_volume(
self.data.test_clone_volume, self.data.test_volume)
self.assertEqual(ref_model_update, model_update)
def test_delete_volume(self):
with mock.patch.object(self.common, '_delete_volume'):
self.common.delete_volume(self.data.test_volume)
self.common._delete_volume.assert_called_once_with(
self.data.test_volume)
def test_create_snapshot(self):
ref_model_update = (
{'provider_location': six.text_type(
self.data.snap_location)})
model_update = self.common.create_snapshot(
self.data.test_snapshot, self.data.test_volume)
self.assertEqual(ref_model_update, model_update)
def test_delete_snapshot(self):
snap_name = self.data.snap_location['snap_name']
sourcedevice_id = self.data.snap_location['source_id']
with mock.patch.object(self.provision, 'delete_volume_snap'):
self.common.delete_snapshot(self.data.test_snapshot,
self.data.test_volume)
self.provision.delete_volume_snap.assert_called_once_with(
self.data.array, snap_name, sourcedevice_id)
def test_delete_snapshot_not_found(self):
with mock.patch.object(self.common, '_parse_snap_info',
return_value=(None, 'Something')):
with mock.patch.object(self.provision, 'delete_volume_snap'):
self.common.delete_snapshot(self.data.test_snapshot,
self.data.test_volume)
self.provision.delete_volume_snap.assert_not_called()
def test_delete_legacy_snap(self):
with mock.patch.object(self.common, '_delete_volume') as mock_del:
self.common.delete_snapshot(self.data.test_legacy_snapshot,
self.data.test_legacy_vol)
mock_del.assert_called_once_with(self.data.test_legacy_snapshot)
def test_remove_members(self):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(self.masking,
'remove_and_reset_members') as mock_rm:
self.common._remove_members(array, volume, device_id,
extra_specs, self.data.connector)
mock_rm.assert_called_once_with(
array, device_id, volume_name,
extra_specs, True, self.data.connector)
def test_unmap_lun(self):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
connector = self.data.connector
with mock.patch.object(self.common, '_remove_members'):
self.common._unmap_lun(volume, connector)
self.common._remove_members.assert_called_once_with(
array, volume, device_id, extra_specs, connector)
def test_unmap_lun_not_mapped(self):
volume = self.data.test_volume
connector = self.data.connector
with mock.patch.object(self.common, 'find_host_lun_id',
return_value=({}, False, [])):
with mock.patch.object(self.common, '_remove_members'):
self.common._unmap_lun(volume, connector)
self.common._remove_members.assert_not_called()
def test_unmap_lun_connector_is_none(self):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs['storagetype:portgroupname'] = (
self.data.port_group_name_f)
with mock.patch.object(self.common, '_remove_members'):
self.common._unmap_lun(volume, None)
self.common._remove_members.assert_called_once_with(
array, volume, device_id, extra_specs, None)
def test_initialize_connection_already_mapped(self):
volume = self.data.test_volume
connector = self.data.connector
host_lun = (self.data.maskingview[0]['maskingViewConnection'][0]
['host_lun_address'])
ref_dict = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
device_info_dict = self.common.initialize_connection(volume, connector)
self.assertEqual(ref_dict, device_info_dict)
def test_initialize_connection_not_mapped(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
with mock.patch.object(self.common, 'find_host_lun_id',
return_value=({}, False, [])):
with mock.patch.object(
self.common, '_attach_volume', return_value=(
{}, self.data.port_group_name_f)):
device_info_dict = self.common.initialize_connection(volume,
connector)
self.assertEqual({}, device_info_dict)
self.common._attach_volume.assert_called_once_with(
volume, connector, extra_specs, masking_view_dict, False)
def test_attach_volume_success(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
host_lun = (self.data.maskingview[0]['maskingViewConnection'][0]
['host_lun_address'])
ref_dict = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
with mock.patch.object(self.masking, 'setup_masking_view',
return_value={
utils.PORTGROUPNAME:
self.data.port_group_name_f}):
device_info_dict, pg = self.common._attach_volume(
volume, connector, extra_specs, masking_view_dict)
self.assertEqual(ref_dict, device_info_dict)
def test_attach_volume_failed(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
with mock.patch.object(self.masking, 'setup_masking_view',
return_value={}):
with mock.patch.object(self.common, 'find_host_lun_id',
return_value=({}, False, [])):
with mock.patch.object(
self.masking,
'check_if_rollback_action_for_masking_required'):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._attach_volume, volume,
connector, extra_specs,
masking_view_dict)
device_id = self.data.device_id
(self.masking.
check_if_rollback_action_for_masking_required.
assert_called_once_with(self.data.array, device_id, {}))
def test_terminate_connection(self):
volume = self.data.test_volume
connector = self.data.connector
with mock.patch.object(self.common, '_unmap_lun'):
self.common.terminate_connection(volume, connector)
self.common._unmap_lun.assert_called_once_with(
volume, connector)
@mock.patch.object(common.VMAXCommon, '_sync_check')
@mock.patch.object(provision.VMAXProvision, 'extend_volume')
def test_extend_volume_success(self, mock_extend, mock_sync):
volume = self.data.test_volume
array = self.data.array
device_id = self.data.device_id
new_size = self.data.test_volume.size
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
with mock.patch.object(self.rest, 'is_vol_in_rep_session',
return_value=(False, False, None)):
self.common.extend_volume(volume, new_size)
mock_extend.assert_called_once_with(
array, device_id, new_size, ref_extra_specs)
def test_extend_volume_failed_snap_src(self):
volume = self.data.test_volume
new_size = self.data.test_volume.size
with mock.patch.object(self.rest, 'is_vol_in_rep_session',
return_value=(False, True, None)):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.extend_volume, volume, new_size)
def test_extend_volume_failed_no_device_id(self):
volume = self.data.test_volume
new_size = self.data.test_volume.size
with mock.patch.object(self.common, '_find_device_on_array',
return_value=None):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.extend_volume, volume, new_size)
def test_extend_volume_failed_wrong_size(self):
volume = self.data.test_volume
new_size = 1
self.assertRaises(exception.VolumeBackendAPIException,
self.common.extend_volume, volume, new_size)
def test_update_volume_stats(self):
data = self.common.update_volume_stats()
self.assertEqual('CommonTests', data['volume_backend_name'])
def test_update_volume_stats_no_wlp(self):
with mock.patch.object(self.common, '_update_srp_stats',
return_value=('123s#SRP_1#None#None',
100, 90, 90, 10, False)):
data = self.common.update_volume_stats()
self.assertEqual('CommonTests', data['volume_backend_name'])
def test_set_config_file_and_get_extra_specs(self):
volume = self.data.test_volume
extra_specs, config_file, qos_specs = (
self.common._set_config_file_and_get_extra_specs(volume))
self.assertEqual(self.data.vol_type_extra_specs, extra_specs)
self.assertEqual(self.fake_xml, config_file)
def test_set_config_file_and_get_extra_specs_no_specs(self):
volume = self.data.test_volume
ref_config = '/etc/cinder/cinder_dell_emc_config.xml'
with mock.patch.object(self.utils, 'get_volumetype_extra_specs',
return_value=None):
extra_specs, config_file, qos_specs = (
self.common._set_config_file_and_get_extra_specs(volume))
self.assertIsNone(extra_specs)
self.assertEqual(ref_config, config_file)
def test_find_device_on_array_success(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
ref_device_id = self.data.device_id
founddevice_id = self.common._find_device_on_array(volume, extra_specs)
self.assertEqual(ref_device_id, founddevice_id)
def test_find_device_on_array_different_device_id(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
with mock.patch.object(
self.rest, 'find_volume_device_id',
return_value='01234'):
founddevice_id = self.common._find_device_on_array(
volume, extra_specs)
self.assertIsNone(founddevice_id)
def test_find_device_on_array_provider_location_not_string(self):
volume = fake_volume.fake_volume_obj(
context='cxt', provider_location=None)
extra_specs = self.data.extra_specs
founddevice_id = self.common._find_device_on_array(
volume, extra_specs)
self.assertIsNone(founddevice_id)
def test_find_legacy_device_on_array(self):
volume = self.data.test_legacy_vol
extra_specs = self.data.extra_specs
ref_device_id = self.data.device_id
founddevice_id = self.common._find_device_on_array(volume, extra_specs)
self.assertEqual(ref_device_id, founddevice_id)
def test_find_host_lun_id_attached(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
host = 'HostX'
host_lun = (self.data.maskingview[0]['maskingViewConnection'][0]
['host_lun_address'])
ref_masked = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
maskedvols, __, __ = self.common.find_host_lun_id(
volume, host, extra_specs)
self.assertEqual(ref_masked, maskedvols)
def test_find_host_lun_id_not_attached(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
host = 'HostX'
with mock.patch.object(self.rest, 'find_mv_connections_for_vol',
return_value=None):
maskedvols, __, __ = self.common.find_host_lun_id(
volume, host, extra_specs)
self.assertEqual({}, maskedvols)
def test_get_masking_views_from_volume(self):
array = self.data.array
device_id = self.data.device_id
host = 'HostX'
ref_mv_list = [self.data.masking_view_name_f]
maskingview_list = self.common.get_masking_views_from_volume(
array, device_id, host)
self.assertEqual(ref_mv_list, maskingview_list)
def test_get_masking_views_from_volume_wrong_host(self):
array = self.data.array
device_id = self.data.device_id
host = 'DifferentHost'
maskingview_list = self.common.get_masking_views_from_volume(
array, device_id, host)
self.assertFalse(maskingview_list)
def test_find_host_lun_id_no_host_check(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
host_lun = (self.data.maskingview[0]['maskingViewConnection'][0]
['host_lun_address'])
ref_masked = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
maskedvols, __, __ = self.common.find_host_lun_id(
volume, None, extra_specs)
self.assertEqual(ref_masked, maskedvols)
def test_register_config_file_from_config_group_exists(self):
config_group_name = 'CommonTests'
config_file = self.common._register_config_file_from_config_group(
config_group_name)
self.assertEqual(self.fake_xml, config_file)
def test_register_config_file_from_config_group_does_not_exist(self):
config_group_name = 'IncorrectName'
self.assertRaises(exception.VolumeBackendAPIException,
self.common._register_config_file_from_config_group,
config_group_name)
def test_initial_setup_success(self):
volume = self.data.test_volume
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
extra_specs = self.common._initial_setup(volume)
self.assertEqual(ref_extra_specs, extra_specs)
def test_initial_setup_failed(self):
volume = self.data.test_volume
with mock.patch.object(self.utils, 'parse_file_to_get_array_map',
return_value=None):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._initial_setup, volume)
def test_populate_masking_dict(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
ref_mv_dict = self.data.masking_view_dict
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertEqual(ref_mv_dict, masking_view_dict)
def test_populate_masking_dict_no_slo(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = {
'slo': None,
'workload': None,
'srp': self.data.srp,
'array': self.data.array,
utils.PORTGROUPNAME: self.data.port_group_name_f}
ref_mv_dict = self.data.masking_view_dict_no_slo
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertEqual(ref_mv_dict, masking_view_dict)
def test_populate_masking_dict_compr_disabled(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
extra_specs[utils.DISABLECOMPRESSION] = "true"
ref_mv_dict = self.data.masking_view_dict_compression_disabled
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertEqual(ref_mv_dict, masking_view_dict)
def test_create_cloned_volume(self):
volume = self.data.test_clone_volume
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
ref_dict = self.data.provider_location
clone_dict = self.common._create_cloned_volume(
volume, source_volume, extra_specs)
self.assertEqual(ref_dict, clone_dict)
def test_create_cloned_volume_is_snapshot(self):
volume = self.data.test_snapshot
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
ref_dict = self.data.snap_location
clone_dict = self.common._create_cloned_volume(
volume, source_volume, extra_specs, True, False)
self.assertEqual(ref_dict, clone_dict)
def test_create_cloned_volume_from_snapshot(self):
volume = self.data.test_clone_volume
source_volume = self.data.test_snapshot
extra_specs = self.data.extra_specs
ref_dict = self.data.provider_location
clone_dict = self.common._create_cloned_volume(
volume, source_volume, extra_specs, False, True)
self.assertEqual(ref_dict, clone_dict)
def test_create_cloned_volume_not_licenced(self):
volume = self.data.test_clone_volume
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'is_snapvx_licensed',
return_value=False):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_cloned_volume,
volume, source_volume, extra_specs)
def test_parse_snap_info_found(self):
ref_device_id = self.data.device_id
ref_snap_name = self.data.snap_location['snap_name']
sourcedevice_id, foundsnap_name = self.common._parse_snap_info(
self.data.array, self.data.test_snapshot)
self.assertEqual(ref_device_id, sourcedevice_id)
self.assertEqual(ref_snap_name, foundsnap_name)
def test_parse_snap_info_not_found(self):
ref_snap_name = None
with mock.patch.object(self.rest, 'get_volume_snap',
return_value=None):
__, foundsnap_name = self.common._parse_snap_info(
self.data.array, self.data.test_snapshot)
self.assertIsNone(ref_snap_name, foundsnap_name)
def test_parse_snap_info_exception(self):
with mock.patch.object(
self.rest, 'get_volume_snap',
side_effect=exception.VolumeBackendAPIException):
__, foundsnap_name = self.common._parse_snap_info(
self.data.array, self.data.test_snapshot)
self.assertIsNone(foundsnap_name)
def test_parse_snap_info_provider_location_not_string(self):
snapshot = fake_snapshot.fake_snapshot_obj(
context='ctxt', provider_loaction={'not': 'string'})
sourcedevice_id, foundsnap_name = self.common._parse_snap_info(
self.data.array, snapshot)
self.assertIsNone(foundsnap_name)
def test_create_snapshot_success(self):
array = self.data.array
snapshot = self.data.test_snapshot
source_device_id = self.data.device_id
extra_specs = self.data.extra_specs
ref_dict = {'snap_name': self.data.test_snapshot_snap_name,
'source_id': self.data.device_id}
snap_dict = self.common._create_snapshot(
array, snapshot, source_device_id, extra_specs)
self.assertEqual(ref_dict, snap_dict)
def test_create_snapshot_exception(self):
array = self.data.array
snapshot = self.data.test_snapshot
source_device_id = self.data.device_id
extra_specs = self.data.extra_specs
with mock.patch.object(
self.provision, 'create_volume_snapvx',
side_effect=exception.VolumeBackendAPIException):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_snapshot,
array, snapshot, source_device_id, extra_specs)
@mock.patch.object(masking.VMAXMasking, 'remove_vol_from_storage_group')
def test_delete_volume_from_srp(self, mock_rm):
array = self.data.array
device_id = self.data.device_id
volume_name = self.data.test_volume.name
ref_extra_specs = self.data.extra_specs_intervals_set
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
volume = self.data.test_volume
with mock.patch.object(self.common, '_sync_check'):
with mock.patch.object(self.common, '_delete_from_srp'):
self.common._delete_volume(volume)
self.common._delete_from_srp.assert_called_once_with(
array, device_id, volume_name, ref_extra_specs)
def test_delete_volume_not_found(self):
volume = self.data.test_volume
with mock.patch.object(self.common, '_find_device_on_array',
return_value=None):
with mock.patch.object(self.common, '_delete_from_srp'):
self.common._delete_volume(volume)
self.common._delete_from_srp.assert_not_called()
def test_create_volume_success(self):
volume_name = '1'
volume_size = self.data.test_volume.size
extra_specs = self.data.extra_specs
ref_dict = self.data.provider_location
volume_dict = self.common._create_volume(
volume_name, volume_size, extra_specs)
self.assertEqual(ref_dict, volume_dict)
def test_create_volume_failed(self):
volume_name = self.data.test_volume.name
volume_size = self.data.test_volume.size
extra_specs = self.data.extra_specs
with mock.patch.object(self.masking,
'get_or_create_default_storage_group',
return_value=self.data.failed_resource):
with mock.patch.object(self.rest, 'delete_storage_group'):
# path 1: not last vol in sg
with mock.patch.object(self.rest, 'get_num_vols_in_sg',
return_value=2):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_volume,
volume_name, volume_size, extra_specs)
self.rest.delete_storage_group.assert_not_called()
# path 2: last vol in sg, delete sg
with mock.patch.object(self.rest, 'get_num_vols_in_sg',
return_value=0):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_volume,
volume_name, volume_size, extra_specs)
(self.rest.delete_storage_group.
assert_called_once_with(self.data.array,
self.data.failed_resource))
def test_create_volume_incorrect_slo(self):
volume_name = self.data.test_volume.name
volume_size = self.data.test_volume.size
extra_specs = {'slo': 'Diamondz',
'workload': 'DSSSS',
'srp': self.data.srp,
'array': self.data.array}
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._create_volume,
volume_name, volume_size, extra_specs)
def test_set_vmax_extra_specs(self):
srp_record = self.utils.parse_file_to_get_array_map(
self.fake_xml)
extra_specs = self.common._set_vmax_extra_specs(
self.data.vol_type_extra_specs, srp_record)
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.assertEqual(ref_extra_specs, extra_specs)
def test_set_vmax_extra_specs_no_srp_name(self):
srp_record = self.utils.parse_file_to_get_array_map(
self.fake_xml)
extra_specs = self.common._set_vmax_extra_specs({}, srp_record)
self.assertEqual('Optimized', extra_specs['slo'])
def test_set_vmax_extra_specs_compr_disabled(self):
with mock.patch.object(self.rest, 'is_compression_capable',
return_value=True):
srp_record = self.utils.parse_file_to_get_array_map(
self.fake_xml)
extra_specs = self.common._set_vmax_extra_specs(
self.data.vol_type_extra_specs_compr_disabled, srp_record)
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
ref_extra_specs[utils.DISABLECOMPRESSION] = "true"
self.assertEqual(ref_extra_specs, extra_specs)
def test_set_vmax_extra_specs_compr_disabled_not_compr_capable(self):
srp_record = self.utils.parse_file_to_get_array_map(
self.fake_xml)
extra_specs = self.common._set_vmax_extra_specs(
self.data.vol_type_extra_specs_compr_disabled, srp_record)
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.assertEqual(ref_extra_specs, extra_specs)
def test_set_vmax_extra_specs_portgroup_as_spec(self):
srp_record = self.utils.parse_file_to_get_array_map(
self.fake_xml)
extra_specs = self.common._set_vmax_extra_specs(
{utils.PORTGROUPNAME: 'extra_spec_pg'}, srp_record)
self.assertEqual('extra_spec_pg', extra_specs[utils.PORTGROUPNAME])
def test_set_vmax_extra_specs_no_portgroup_set(self):
fake_xml = FakeXML().create_fake_config_file(
'test_no_pg_set', '')
srp_record = self.utils.parse_file_to_get_array_map(fake_xml)
self.assertRaises(exception.VolumeBackendAPIException,
self.common._set_vmax_extra_specs,
{}, srp_record)
def test_delete_volume_from_srp_success(self):
array = self.data.array
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(
self.provision, 'delete_volume_from_srp') as mock_del:
self.common._delete_from_srp(array, device_id, volume_name,
extra_specs)
mock_del.assert_called_once_with(array, device_id, volume_name)
def test_delete_volume_from_srp_failed(self):
array = self.data.array
device_id = self.data.failed_resource
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(self.masking,
'add_volume_to_default_storage_group'):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._delete_from_srp, array,
device_id, volume_name, extra_specs)
(self.masking.add_volume_to_default_storage_group.
assert_called_once_with(
array, device_id, volume_name, extra_specs))
@mock.patch.object(utils.VMAXUtils, 'is_replication_enabled',
side_effect=[False, True])
def test_remove_vol_and_cleanup_replication(self, mock_rep_enabled):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(
self.masking, 'remove_and_reset_members') as mock_rm:
with mock.patch.object(
self.common, 'cleanup_lun_replication') as mock_clean:
self.common._remove_vol_and_cleanup_replication(
array, device_id, volume_name, extra_specs)
mock_rm.assert_called_once_with(
array, device_id, volume_name, extra_specs, False)
mock_clean.assert_not_called()
self.common._remove_vol_and_cleanup_replication(
array, device_id, volume_name, extra_specs, volume)
mock_clean.assert_called_once_with(
volume, volume_name, device_id, extra_specs)
@mock.patch.object(common.VMAXCommon, '_get_replication_extra_specs',
return_value=VMAXCommonData.rep_extra_specs)
def test_get_target_wwns_from_masking_view(self, mock_rep_specs):
target_wwns = self.common.get_target_wwns_from_masking_view(
self.data.test_volume, self.data.connector)
ref_wwns = [self.data.wwnn1]
self.assertEqual(ref_wwns, target_wwns)
# Volume is failed over
with mock.patch.object(self.utils, 'is_volume_failed_over',
return_value=True):
self.common.get_target_wwns_from_masking_view(
self.data.test_volume, self.data.connector)
mock_rep_specs.assert_called_once()
def test_get_target_wwns_from_masking_view_no_mv(self):
with mock.patch.object(self.common, 'get_masking_views_from_volume',
return_value=None):
target_wwns = self.common.get_target_wwns_from_masking_view(
self.data.test_volume, self.data.connector)
self.assertFalse(target_wwns)
def test_get_port_group_from_masking_view(self):
array = self.data.array
maskingview_name = self.data.masking_view_name_f
with mock.patch.object(self.rest,
'get_element_from_masking_view'):
self.common.get_port_group_from_masking_view(
array, maskingview_name)
self.rest.get_element_from_masking_view.assert_called_once_with(
array, maskingview_name, portgroup=True)
def test_get_initiator_group_from_masking_view(self):
array = self.data.array
maskingview_name = self.data.masking_view_name_f
with mock.patch.object(self.rest,
'get_element_from_masking_view'):
self.common.get_initiator_group_from_masking_view(
array, maskingview_name)
self.rest.get_element_from_masking_view.assert_called_once_with(
array, maskingview_name, host=True)
def test_get_common_masking_views(self):
array = self.data.array
portgroup_name = self.data.port_group_name_f
initiator_group_name = self.data.initiatorgroup_name_f
with mock.patch.object(self.rest, 'get_common_masking_views'):
self.common.get_common_masking_views(
array, portgroup_name, initiator_group_name)
self.rest.get_common_masking_views.assert_called_once_with(
array, portgroup_name, initiator_group_name)
def test_get_ip_and_iqn(self):
ref_ip_iqn = [{'iqn': self.data.initiator,
'ip': self.data.ip}]
port = self.data.portgroup[1]['symmetrixPortKey'][0]['portId']
ip_iqn_list = self.common._get_ip_and_iqn(self.data.array, port)
self.assertEqual(ref_ip_iqn, ip_iqn_list)
def test_find_ip_and_iqns(self):
ref_ip_iqn = [{'iqn': self.data.initiator,
'ip': self.data.ip}]
ip_iqn_list = self.common._find_ip_and_iqns(
self.data.array, self.data.port_group_name_i)
self.assertEqual(ref_ip_iqn, ip_iqn_list)
def test_create_replica_snap_name(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
snap_name = self.data.snap_location['snap_name']
ref_dict = self.data.provider_location
clone_dict = self.common._create_replica(
array, clone_volume, source_device_id,
self.data.extra_specs, snap_name)
self.assertEqual(ref_dict, clone_dict)
def test_create_replica_no_snap_name(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
snap_name = "temp-" + source_device_id + clone_volume.id
ref_dict = self.data.provider_location
with mock.patch.object(self.utils, 'get_temp_snap_name',
return_value=snap_name):
clone_dict = self.common._create_replica(
array, clone_volume, source_device_id,
self.data.extra_specs)
self.assertEqual(ref_dict, clone_dict)
self.utils.get_temp_snap_name.assert_called_once_with(
('OS-' + clone_volume.id), source_device_id)
def test_create_replica_failed_cleanup_target(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
device_id = self.data.device_id
snap_name = self.data.failed_resource
clone_name = 'OS-' + clone_volume.id
extra_specs = self.data.extra_specs
with mock.patch.object(self.common, '_cleanup_target'):
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._create_replica, array, clone_volume,
device_id, self.data.extra_specs, snap_name)
self.common._cleanup_target.assert_called_once_with(
array, device_id, device_id, clone_name,
snap_name, extra_specs)
def test_create_replica_failed_no_target(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
snap_name = self.data.failed_resource
with mock.patch.object(self.common, '_create_volume',
return_value={'device_id': None}):
with mock.patch.object(self.common, '_cleanup_target'):
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._create_replica, array, clone_volume,
source_device_id, self.data.extra_specs, snap_name)
self.common._cleanup_target.assert_not_called()
@mock.patch.object(
masking.VMAXMasking,
'remove_and_reset_members')
def test_cleanup_target_sync_present(self, mock_remove):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
target_device_id = self.data.device_id2
snap_name = self.data.failed_resource
clone_name = clone_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'get_sync_session',
return_value='session'):
with mock.patch.object(self.provision,
'break_replication_relationship'):
self.common._cleanup_target(
array, target_device_id, source_device_id,
clone_name, snap_name, extra_specs)
(self.provision.break_replication_relationship.
assert_called_with(
array, target_device_id, source_device_id,
snap_name, extra_specs))
def test_cleanup_target_no_sync(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
target_device_id = self.data.device_id2
snap_name = self.data.failed_resource
clone_name = clone_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'get_sync_session',
return_value=None):
with mock.patch.object(self.common,
'_delete_from_srp'):
self.common._cleanup_target(
array, target_device_id, source_device_id,
clone_name, snap_name, extra_specs)
self.common._delete_from_srp.assert_called_once_with(
array, target_device_id, clone_name,
extra_specs)
@mock.patch.object(
provision.VMAXProvision,
'delete_volume_snap')
@mock.patch.object(
provision.VMAXProvision,
'break_replication_relationship')
def test_sync_check_temp_snap(self, mock_break, mock_delete):
array = self.data.array
device_id = self.data.device_id
target = self.data.volume_details[1]['volumeId']
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
snap_name = 'temp-1'
with mock.patch.object(self.rest, 'get_volume_snap',
return_value=snap_name):
self.common._sync_check(array, device_id, volume_name,
extra_specs)
mock_break.assert_called_with(
array, target, device_id, snap_name, extra_specs)
mock_delete.assert_called_with(array, snap_name, device_id)
# Delete legacy temp snap
mock_delete.reset_mock()
snap_name2 = 'EMC_SMI_12345'
sessions = [{'source_vol': device_id,
'snap_name': snap_name2,
'target_vol_list': []}]
with mock.patch.object(self.rest, 'find_snap_vx_sessions',
return_value=sessions):
with mock.patch.object(self.rest, 'get_volume_snap',
return_value=snap_name2):
self.common._sync_check(array, device_id, volume_name,
extra_specs)
mock_delete.assert_called_once_with(
array, snap_name2, device_id)
@mock.patch.object(
provision.VMAXProvision,
'delete_volume_snap')
@mock.patch.object(
provision.VMAXProvision,
'break_replication_relationship')
def test_sync_check_not_temp_snap(self, mock_break, mock_delete):
array = self.data.array
device_id = self.data.device_id
target = self.data.volume_details[1]['volumeId']
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
snap_name = 'OS-1'
sessions = [{'source_vol': device_id,
'snap_name': snap_name,
'target_vol_list': [target]}]
with mock.patch.object(self.rest, 'find_snap_vx_sessions',
return_value=sessions):
self.common._sync_check(array, device_id, volume_name,
extra_specs)
mock_break.assert_called_with(
array, target, device_id, snap_name, extra_specs)
mock_delete.assert_not_called()
@mock.patch.object(
provision.VMAXProvision,
'break_replication_relationship')
def test_sync_check_no_sessions(self, mock_break):
array = self.data.array
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'find_snap_vx_sessions',
return_value=None):
self.common._sync_check(array, device_id, volume_name,
extra_specs)
mock_break.assert_not_called()
def test_manage_existing_success(self):
external_ref = {u'source-name': u'00002'}
provider_location = {'device_id': u'00002', 'array': u'000197800123'}
ref_update = {'provider_location': six.text_type(provider_location)}
with mock.patch.object(
self.common, '_check_lun_valid_for_cinder_management'):
model_update = self.common.manage_existing(
self.data.test_volume, external_ref)
self.assertEqual(ref_update, model_update)
@mock.patch.object(
rest.VMAXRest, 'get_masking_views_from_storage_group',
return_value=None)
@mock.patch.object(
rest.VMAXRest, 'is_vol_in_rep_session',
return_value=(False, False, None))
def test_check_lun_valid_for_cinder_management(self, mock_rep, mock_mv):
external_ref = {u'source-name': u'00001'}
self.common._check_lun_valid_for_cinder_management(
self.data.array, '00001',
self.data.test_volume.id, external_ref)
@mock.patch.object(
rest.VMAXRest, 'get_volume',
side_effect=[
None,
VMAXCommonData.volume_details[0],
VMAXCommonData.volume_details[0],
VMAXCommonData.volume_details[1]])
@mock.patch.object(
rest.VMAXRest, 'get_masking_views_from_storage_group',
side_effect=[VMAXCommonData.sg_details[1]['maskingview'],
None])
@mock.patch.object(rest.VMAXRest, 'get_storage_groups_from_volume',
return_value=[VMAXCommonData.defaultstoragegroup_name])
@mock.patch.object(rest.VMAXRest, 'is_vol_in_rep_session',
side_effect=[(True, False, []), (False, False, None)])
def test_check_lun_valid_for_cinder_management_exception(
self, mock_rep, mock_sg, mock_mvs, mock_get_vol):
external_ref = {u'source-name': u'00001'}
for x in range(0, 3):
self.assertRaises(
exception.ManageExistingInvalidReference,
self.common._check_lun_valid_for_cinder_management,
self.data.array, '00001',
self.data.test_volume.id, external_ref)
self.assertRaises(exception.ManageExistingAlreadyManaged,
self.common._check_lun_valid_for_cinder_management,
self.data.array, '00001',
self.data.test_volume.id, external_ref)
def test_manage_existing_get_size(self):
external_ref = {u'source-name': u'00001'}
size = self.common.manage_existing_get_size(
self.data.test_volume, external_ref)
self.assertEqual(2, size)
def test_manage_existing_get_size_exception(self):
external_ref = {u'source-name': u'00001'}
with mock.patch.object(self.rest, 'get_size_of_device_on_array',
return_value=3.5):
self.assertRaises(exception.ManageExistingInvalidReference,
self.common.manage_existing_get_size,
self.data.test_volume, external_ref)
@mock.patch.object(common.VMAXCommon,
'_remove_vol_and_cleanup_replication')
def test_unmanage_success(self, mock_rm):
volume = self.data.test_volume
with mock.patch.object(self.rest, 'rename_volume'):
self.common.unmanage(volume)
self.rest.rename_volume.assert_called_once_with(
self.data.array, self.data.device_id,
self.data.test_volume.id)
def test_unmanage_device_not_found(self):
volume = self.data.test_volume
with mock.patch.object(self.common, '_find_device_on_array',
return_value=None):
with mock.patch.object(self.rest, 'rename_volume'):
self.common.unmanage(volume)
self.rest.rename_volume.assert_not_called()
@mock.patch.object(common.VMAXCommon,
'_slo_workload_migration')
def test_retype(self, mock_migrate):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs_intervals_set
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
volume = self.data.test_volume
new_type = {'extra_specs': {}}
host = {'host': self.data.new_host}
self.common.retype(volume, new_type, host)
mock_migrate.assert_called_once_with(
device_id, volume, host, volume_name, new_type, extra_specs)
mock_migrate.reset_mock()
with mock.patch.object(
self.common, '_find_device_on_array', return_value=None):
self.common.retype(volume, new_type, host)
mock_migrate.assert_not_called()
mock_migrate.reset_mock()
volume2 = self.data.test_attached_volume
self.common.retype(volume2, new_type, host)
mock_migrate.assert_not_called()
def test_slo_workload_migration_valid(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
new_type = {'extra_specs': {}}
volume = self.data.test_volume
host = {'host': self.data.new_host}
with mock.patch.object(self.common, '_migrate_volume'):
self.common._slo_workload_migration(
device_id, volume, host, volume_name, new_type, extra_specs)
self.common._migrate_volume.assert_called_once_with(
extra_specs[utils.ARRAY], device_id,
extra_specs[utils.SRP], 'Silver',
'OLTP', volume_name, new_type, extra_specs)
def test_slo_workload_migration_not_valid(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
volume = self.data.test_volume
new_type = {'extra_specs': {}}
host = {'host': self.data.new_host}
with mock.patch.object(self.common,
'_is_valid_for_storage_assisted_migration',
return_value=(False, 'Silver', 'OLTP')):
migrate_status = self.common._slo_workload_migration(
device_id, volume, host, volume_name, new_type, extra_specs)
self.assertFalse(migrate_status)
def test_slo_workload_migration_same_hosts(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
volume = self.data.test_volume
host = {'host': self.data.fake_host}
new_type = {'extra_specs': {}}
migrate_status = self.common._slo_workload_migration(
device_id, volume, host, volume_name, new_type, extra_specs)
self.assertFalse(migrate_status)
def test_slo_workload_migration_same_host_change_compression(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
volume = self.data.test_volume
host = {'host': self.data.fake_host}
new_type = {'extra_specs': {utils.DISABLECOMPRESSION: "true"}}
with mock.patch.object(
self.common, '_is_valid_for_storage_assisted_migration',
return_value=(True, self.data.slo, self.data.workload)):
with mock.patch.object(self.common, '_migrate_volume'):
migrate_status = self.common._slo_workload_migration(
device_id, volume, host, volume_name, new_type,
extra_specs)
self.assertTrue(migrate_status)
self.common._migrate_volume.assert_called_once_with(
extra_specs[utils.ARRAY], device_id,
extra_specs[utils.SRP], self.data.slo,
self.data.workload, volume_name, new_type, extra_specs)
@mock.patch.object(masking.VMAXMasking, 'remove_and_reset_members')
def test_migrate_volume_success(self, mock_remove):
with mock.patch.object(self.rest, 'is_volume_in_storagegroup',
return_value=True):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
new_type = {'extra_specs': {}}
migrate_status = self.common._migrate_volume(
self.data.array, device_id, self.data.srp, self.data.slo,
self.data.workload, volume_name, new_type, extra_specs)
self.assertTrue(migrate_status)
target_extra_specs = {
'array': self.data.array, 'interval': 3,
'retries': 120, 'slo': self.data.slo,
'srp': self.data.srp, 'workload': self.data.workload}
mock_remove.assert_called_once_with(
self.data.array, device_id, volume_name,
target_extra_specs, reset=True)
mock_remove.reset_mock()
with mock.patch.object(
self.rest, 'get_storage_groups_from_volume',
return_value=[]):
migrate_status = self.common._migrate_volume(
self.data.array, device_id, self.data.srp, self.data.slo,
self.data.workload, volume_name, new_type, extra_specs)
self.assertTrue(migrate_status)
mock_remove.assert_not_called()
@mock.patch.object(masking.VMAXMasking, 'remove_and_reset_members')
def test_migrate_volume_failed_get_new_sg_failed(self, mock_remove):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
new_type = {'extra_specs': {}}
with mock.patch.object(
self.masking, 'get_or_create_default_storage_group',
side_effect=exception.VolumeBackendAPIException):
migrate_status = self.common._migrate_volume(
self.data.array, device_id, self.data.srp, self.data.slo,
self.data.workload, volume_name, new_type, extra_specs)
self.assertFalse(migrate_status)
def test_migrate_volume_failed_vol_not_added(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
new_type = {'extra_specs': {}}
with mock.patch.object(
self.rest, 'is_volume_in_storagegroup',
return_value=False):
migrate_status = self.common._migrate_volume(
self.data.array, device_id, self.data.srp, self.data.slo,
self.data.workload, volume_name, new_type, extra_specs)
self.assertFalse(migrate_status)
def test_is_valid_for_storage_assisted_migration_true(self):
device_id = self.data.device_id
host = {'host': self.data.new_host}
volume_name = self.data.test_volume.name
ref_return = (True, 'Silver', 'OLTP')
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array,
self.data.srp, volume_name, False)
self.assertEqual(ref_return, return_val)
# No current sgs found
with mock.patch.object(self.rest, 'get_storage_groups_from_volume',
return_value=None):
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array, self.data.srp,
volume_name, False)
self.assertEqual(ref_return, return_val)
def test_is_valid_for_storage_assisted_migration_false(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
ref_return = (False, None, None)
# IndexError
host = {'host': 'HostX@Backend#Silver+SRP_1+000197800123'}
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array,
self.data.srp, volume_name, False)
self.assertEqual(ref_return, return_val)
# Wrong array
host2 = {'host': 'HostX@Backend#Silver+OLTP+SRP_1+00012345678'}
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host2, self.data.array,
self.data.srp, volume_name, False)
self.assertEqual(ref_return, return_val)
# Wrong srp
host3 = {'host': 'HostX@Backend#Silver+OLTP+SRP_2+000197800123'}
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host3, self.data.array,
self.data.srp, volume_name, False)
self.assertEqual(ref_return, return_val)
# Already in correct sg
host4 = {'host': self.data.fake_host}
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host4, self.data.array,
self.data.srp, volume_name, False)
self.assertEqual(ref_return, return_val)
def test_find_volume_group_name_from_id(self):
array = self.data.array
group_id = 'GrpId'
group_name = None
ref_group_name = self.data.storagegroup_name_with_id
with mock.patch.object(
self.rest, 'get_storage_group_list',
return_value=self.data.sg_list_rep):
group_name = self.common._find_volume_group_name_from_id(
array, group_id)
self.assertEqual(ref_group_name, group_name)
def test_find_volume_group_name_from_id_not_found(self):
array = self.data.array
group_id = 'GrpId'
group_name = None
group_name = self.common._find_volume_group_name_from_id(
array, group_id)
self.assertIsNone(group_name)
def test_find_volume_group(self):
group = self.data.test_group_1
array = self.data.array
volume_group = self.common._find_volume_group(array, group)
ref_group = self.data.sg_details_rep[0]
self.assertEqual(ref_group, volume_group)
def test_get_volume_device_ids(self):
array = self.data.array
volumes = [self.data.test_volume]
ref_device_ids = [self.data.device_id]
device_ids = self.common._get_volume_device_ids(volumes, array)
self.assertEqual(ref_device_ids, device_ids)
def test_get_members_of_volume_group(self):
array = self.data.array
group_name = self.data.storagegroup_name_source
ref_volumes = [self.data.device_id, self.data.device_id2]
member_device_ids = self.common._get_members_of_volume_group(
array, group_name)
self.assertEqual(ref_volumes, member_device_ids)
def test_get_members_of_volume_group_empty(self):
array = self.data.array
group_name = self.data.storagegroup_name_source
with mock.patch.object(
self.rest, 'get_volumes_in_storage_group',
return_value=None):
member_device_ids = self.common._get_members_of_volume_group(
array, group_name
)
self.assertIsNone(member_device_ids)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_create_group_replica(self, mock_check):
source_group = self.data.test_group_1
snap_name = self.data.group_snapshot_name
with mock.patch.object(
self.common,
'_create_group_replica') as mock_create_replica:
self.common._create_group_replica(
source_group, snap_name)
mock_create_replica.assert_called_once_with(
source_group, snap_name)
def test_create_group_replica_exception(self):
source_group = self.data.test_group_failed
snap_name = self.data.group_snapshot_name
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_group_replica,
source_group,
snap_name)
def test_create_group_snapshot(self):
context = None
group_snapshot = self.data.test_group_snapshot_1
snapshots = []
ref_model_update = {'status': fields.GroupStatus.AVAILABLE}
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, snapshots_model_update = (
self.common.create_group_snapshot(
context, group_snapshot, snapshots))
self.assertEqual(ref_model_update, model_update)
def test_create_group_snapshot_exception(self):
context = None
group_snapshot = self.data.test_group_snapshot_failed
snapshots = []
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.create_group_snapshot,
context,
group_snapshot,
snapshots)
def test_create_group(self):
ref_model_update = {'status': fields.GroupStatus.AVAILABLE}
context = None
group = self.data.test_group_1
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update = self.common.create_group(context, group)
self.assertEqual(ref_model_update, model_update)
def test_create_group_exception(self):
context = None
group = self.data.test_group_snapshot_failed
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.create_group,
context,
group)
def test_delete_group_snapshot(self):
group_snapshot = self.data.test_group_snapshot_1
snapshots = []
context = None
ref_model_update = {'status': fields.GroupSnapshotStatus.DELETED}
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, snapshots_model_update = (
self.common.delete_group_snapshot(context,
group_snapshot, snapshots))
self.assertEqual(ref_model_update, model_update)
def test_delete_group_snapshot_success(self):
group_snapshot = self.data.test_group_snapshot_1
snapshots = []
ref_model_update = {'status': fields.GroupSnapshotStatus.DELETED}
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, snapshots_model_update = (
self.common._delete_group_snapshot(group_snapshot,
snapshots))
self.assertEqual(ref_model_update, model_update)
def test_delete_group_snapshot_failed(self):
group_snapshot = self.data.test_group_snapshot_failed
snapshots = []
ref_model_update = (
{'status': fields.GroupSnapshotStatus.ERROR_DELETING})
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, snapshots_model_update = (
self.common._delete_group_snapshot(group_snapshot,
snapshots))
self.assertEqual(ref_model_update, model_update)
def test_update_group(self):
group = self.data.test_group_1
add_vols = [self.data.test_volume]
remove_vols = []
ref_model_update = {'status': fields.GroupStatus.AVAILABLE}
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, __, __ = self.common.update_group(group,
add_vols,
remove_vols)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_update_group_not_found(self, mock_check):
group = self.data.test_group_1
add_vols = []
remove_vols = []
with mock.patch.object(
self.common, '_find_volume_group',
return_value=None):
self.assertRaises(exception.GroupNotFound,
self.common.update_group,
group,
add_vols,
remove_vols)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_update_group_exception(self, mock_check):
group = self.data.test_group_1
add_vols = []
remove_vols = []
with mock.patch.object(
self.common, '_find_volume_group',
side_effect=exception.VolumeBackendAPIException):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.update_group,
group, add_vols, remove_vols)
def test_delete_group(self):
group = self.data.test_group_1
volumes = [self.data.test_volume]
context = None
ref_model_update = {'status': fields.GroupStatus.DELETED}
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True),\
mock.patch.object(self.rest, 'get_volumes_in_storage_group',
return_value=[]):
model_update, __ = self.common.delete_group(
context, group, volumes)
self.assertEqual(ref_model_update, model_update)
def test_delete_group_success(self):
group = self.data.test_group_1
volumes = []
ref_model_update = {'status': fields.GroupStatus.DELETED}
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True),\
mock.patch.object(self.rest, 'get_volumes_in_storage_group',
return_value=[]):
model_update, __ = self.common._delete_group(group, volumes)
self.assertEqual(ref_model_update, model_update)
def test_delete_group_already_deleted(self):
group = self.data.test_group_failed
ref_model_update = {'status': fields.GroupStatus.DELETED}
volumes = []
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, __ = self.common._delete_group(group, volumes)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_delete_group_failed(self, mock_check):
group = self.data.test_group_1
volumes = []
ref_model_update = {'status': fields.GroupStatus.ERROR_DELETING}
with mock.patch.object(
self.rest, 'delete_storage_group',
side_effect=exception.VolumeBackendAPIException):
model_update, __ = self.common._delete_group(
group, volumes)
self.assertEqual(ref_model_update, model_update)
def test_create_group_from_src_success(self):
context = None
group = self.data.test_group_1
group_snapshot = self.data.test_group_snapshot_1
snapshots = []
volumes = [self.data.test_volume]
source_group = None
source_vols = []
ref_model_update = {'status': fields.GroupStatus.AVAILABLE}
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, volumes_model_update = (
self.common.create_group_from_src(
context, group, volumes,
group_snapshot, snapshots,
source_group, source_vols))
self.assertEqual(ref_model_update, model_update)
class VMAXFCTest(test.TestCase):
def setUp(self):
self.data = VMAXCommonData()
super(VMAXFCTest, self).setUp()
config_group = 'FCTests'
self.fake_xml = FakeXML().create_fake_config_file(
config_group, self.data.port_group_name_f)
self.configuration = FakeConfiguration(self.fake_xml, config_group)
rest.VMAXRest._establish_rest_session = mock.Mock(
return_value=FakeRequestsSession())
driver = fc.VMAXFCDriver(configuration=self.configuration)
self.driver = driver
self.common = self.driver.common
self.masking = self.common.masking
self.utils = self.common.utils
self.utils.get_volumetype_extra_specs = (
mock.Mock(return_value=self.data.vol_type_extra_specs))
def test_create_volume(self):
with mock.patch.object(self.common, 'create_volume'):
self.driver.create_volume(self.data.test_volume)
self.common.create_volume.assert_called_once_with(
self.data.test_volume)
def test_create_volume_from_snapshot(self):
volume = self.data.test_clone_volume
snapshot = self.data.test_snapshot
with mock.patch.object(self.common, 'create_volume_from_snapshot'):
self.driver.create_volume_from_snapshot(volume, snapshot)
self.common.create_volume_from_snapshot.assert_called_once_with(
volume, snapshot)
def test_create_cloned_volume(self):
volume = self.data.test_clone_volume
src_volume = self.data.test_volume
with mock.patch.object(self.common, 'create_cloned_volume'):
self.driver.create_cloned_volume(volume, src_volume)
self.common.create_cloned_volume.assert_called_once_with(
volume, src_volume)
def test_delete_volume(self):
with mock.patch.object(self.common, 'delete_volume'):
self.driver.delete_volume(self.data.test_volume)
self.common.delete_volume.assert_called_once_with(
self.data.test_volume)
def test_create_snapshot(self):
with mock.patch.object(self.common, 'create_snapshot'):
self.driver.create_snapshot(self.data.test_snapshot)
self.common.create_snapshot.assert_called_once_with(
self.data.test_snapshot, self.data.test_snapshot.volume)
def test_delete_snapshot(self):
with mock.patch.object(self.common, 'delete_snapshot'):
self.driver.delete_snapshot(self.data.test_snapshot)
self.common.delete_snapshot.assert_called_once_with(
self.data.test_snapshot, self.data.test_snapshot.volume)
def test_initialize_connection(self):
with mock.patch.object(self.common, 'initialize_connection',
return_value=self.data.fc_device_info):
with mock.patch.object(self.driver, 'populate_data'):
self.driver.initialize_connection(self.data.test_volume,
self.data.connector)
self.common.initialize_connection.assert_called_once_with(
self.data.test_volume, self.data.connector)
self.driver.populate_data.assert_called_once_with(
self.data.fc_device_info, self.data.test_volume,
self.data.connector)
def test_populate_data(self):
with mock.patch.object(self.driver, '_build_initiator_target_map',
return_value=([], {})):
ref_data = {
'driver_volume_type': 'fibre_channel',
'data': {'target_lun': self.data.fc_device_info['hostlunid'],
'target_discovered': True,
'target_wwn': [],
'initiator_target_map': {}}}
data = self.driver.populate_data(self.data.fc_device_info,
self.data.test_volume,
self.data.connector)
self.assertEqual(ref_data, data)
self.driver._build_initiator_target_map.assert_called_once_with(
self.data.test_volume, self.data.connector)
def test_terminate_connection(self):
with mock.patch.object(self.common, 'terminate_connection'):
self.driver.terminate_connection(self.data.test_volume,
self.data.connector)
self.common.terminate_connection.assert_called_once_with(
self.data.test_volume, self.data.connector)
def test_terminate_connection_no_zoning_mappings(self):
with mock.patch.object(self.driver, '_get_zoning_mappings',
return_value=None):
with mock.patch.object(self.common, 'terminate_connection'):
self.driver.terminate_connection(self.data.test_volume,
self.data.connector)
self.common.terminate_connection.assert_not_called()
def test_get_zoning_mappings(self):
ref_mappings = self.data.zoning_mappings
zoning_mappings = self.driver._get_zoning_mappings(
self.data.test_volume, self.data.connector)
self.assertEqual(ref_mappings, zoning_mappings)
# Legacy vol
zoning_mappings2 = self.driver._get_zoning_mappings(
self.data.test_legacy_vol, self.data.connector)
self.assertEqual(ref_mappings, zoning_mappings2)
def test_get_zoning_mappings_no_mv(self):
with mock.patch.object(self.common, 'get_masking_views_from_volume',
return_value=None):
zoning_mappings = self.driver._get_zoning_mappings(
self.data.test_volume, self.data.connector)
self.assertFalse(zoning_mappings)
def test_cleanup_zones_other_vols_mapped(self):
ref_data = {'driver_volume_type': 'fibre_channel',
'data': {}}
data = self.driver._cleanup_zones(self.data.zoning_mappings)
self.assertEqual(ref_data, data)
def test_cleanup_zones_no_vols_mapped(self):
zoning_mappings = self.data.zoning_mappings
ref_data = {'driver_volume_type': 'fibre_channel',
'data': {'target_wwn': zoning_mappings['target_wwns'],
'initiator_target_map':
zoning_mappings['init_targ_map']}}
with mock.patch.object(self.common, 'get_common_masking_views',
return_value=[]):
data = self.driver._cleanup_zones(self.data.zoning_mappings)
self.assertEqual(ref_data, data)
def test_build_initiator_target_map(self):
ref_target_map = {'123456789012345': ['543210987654321'],
'123456789054321': ['123450987654321']}
with mock.patch.object(fczm_utils, 'create_lookup_service',
return_value=FakeLookupService()):
driver = fc.VMAXFCDriver(configuration=self.configuration)
with mock.patch.object(driver.common,
'get_target_wwns_from_masking_view',
return_value=self.data.target_wwns):
targets, target_map = driver._build_initiator_target_map(
self.data.test_volume, self.data.connector)
self.assertEqual(ref_target_map, target_map)
def test_extend_volume(self):
with mock.patch.object(self.common, 'extend_volume'):
self.driver.extend_volume(self.data.test_volume, '3')
self.common.extend_volume.assert_called_once_with(
self.data.test_volume, '3')
def test_get_volume_stats(self):
with mock.patch.object(self.driver, 'update_volume_stats'):
# no refresh
self.driver.get_volume_stats()
self.driver.update_volume_stats.assert_not_called()
# with refresh
self.driver.get_volume_stats(True)
self.driver.update_volume_stats.assert_called_once_with()
def test_update_volume_stats(self):
with mock.patch.object(self.common, 'update_volume_stats',
return_value={}):
self.driver.update_volume_stats()
self.common.update_volume_stats.assert_called_once_with()
def test_check_for_setup_error(self):
self.driver.check_for_setup_error()
def test_ensure_export(self):
self.driver.ensure_export('context', 'volume')
def test_create_export(self):
self.driver.create_export('context', 'volume', 'connector')
def test_remove_export(self):
self.driver.remove_export('context', 'volume')
def test_check_for_export(self):
self.driver.check_for_export('context', 'volume_id')
def test_manage_existing(self):
with mock.patch.object(self.common, 'manage_existing',
return_value={}):
external_ref = {u'source-name': u'00002'}
self.driver.manage_existing(self.data.test_volume, external_ref)
self.common.manage_existing.assert_called_once_with(
self.data.test_volume, external_ref)
def test_manage_existing_get_size(self):
with mock.patch.object(self.common, 'manage_existing_get_size',
return_value='1'):
external_ref = {u'source-name': u'00002'}
self.driver.manage_existing_get_size(
self.data.test_volume, external_ref)
self.common.manage_existing_get_size.assert_called_once_with(
self.data.test_volume, external_ref)
def test_unmanage_volume(self):
with mock.patch.object(self.common, 'unmanage',
return_value={}):
self.driver.unmanage(self.data.test_volume)
self.common.unmanage.assert_called_once_with(
self.data.test_volume)
def test_retype(self):
host = {'host': self.data.new_host}
new_type = {'extra_specs': {}}
with mock.patch.object(self.common, 'retype',
return_value=True):
self.driver.retype({}, self.data.test_volume, new_type, '', host)
self.common.retype.assert_called_once_with(
self.data.test_volume, new_type, host)
def test_failover_host(self):
with mock.patch.object(
self.common, 'failover_host',
return_value=(self.data.remote_array, [], [])) as mock_fo:
self.driver.failover_host(self.data.ctx, [self.data.test_volume])
mock_fo.assert_called_once_with([self.data.test_volume], None,
None)
class VMAXISCSITest(test.TestCase):
def setUp(self):
self.data = VMAXCommonData()
super(VMAXISCSITest, self).setUp()
config_group = 'ISCSITests'
self.fake_xml = FakeXML().create_fake_config_file(
config_group, self.data.port_group_name_i)
configuration = FakeConfiguration(self.fake_xml, config_group)
rest.VMAXRest._establish_rest_session = mock.Mock(
return_value=FakeRequestsSession())
driver = iscsi.VMAXISCSIDriver(configuration=configuration)
self.driver = driver
self.common = self.driver.common
self.masking = self.common.masking
self.utils = self.common.utils
self.utils.get_volumetype_extra_specs = (
mock.Mock(return_value=self.data.vol_type_extra_specs))
def test_create_volume(self):
with mock.patch.object(self.common, 'create_volume'):
self.driver.create_volume(self.data.test_volume)
self.common.create_volume.assert_called_once_with(
self.data.test_volume)
def test_create_volume_from_snapshot(self):
volume = self.data.test_clone_volume
snapshot = self.data.test_snapshot
with mock.patch.object(self.common, 'create_volume_from_snapshot'):
self.driver.create_volume_from_snapshot(volume, snapshot)
self.common.create_volume_from_snapshot.assert_called_once_with(
volume, snapshot)
def test_create_cloned_volume(self):
volume = self.data.test_clone_volume
src_volume = self.data.test_volume
with mock.patch.object(self.common, 'create_cloned_volume'):
self.driver.create_cloned_volume(volume, src_volume)
self.common.create_cloned_volume.assert_called_once_with(
volume, src_volume)
def test_delete_volume(self):
with mock.patch.object(self.common, 'delete_volume'):
self.driver.delete_volume(self.data.test_volume)
self.common.delete_volume.assert_called_once_with(
self.data.test_volume)
def test_create_snapshot(self):
with mock.patch.object(self.common, 'create_snapshot'):
self.driver.create_snapshot(self.data.test_snapshot)
self.common.create_snapshot.assert_called_once_with(
self.data.test_snapshot, self.data.test_snapshot.volume)
def test_delete_snapshot(self):
with mock.patch.object(self.common, 'delete_snapshot'):
self.driver.delete_snapshot(self.data.test_snapshot)
self.common.delete_snapshot.assert_called_once_with(
self.data.test_snapshot, self.data.test_snapshot.volume)
def test_initialize_connection(self):
ref_dict = {'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'hostlunid': 3,
'device_id': self.data.device_id,
'ip_and_iqn': [{'ip': self.data.ip,
'iqn': self.data.initiator}],
'is_multipath': False}
with mock.patch.object(self.driver, 'get_iscsi_dict'):
with mock.patch.object(
self.common, 'get_port_group_from_masking_view',
return_value=self.data.port_group_name_i):
self.driver.initialize_connection(self.data.test_volume,
self.data.connector)
self.driver.get_iscsi_dict.assert_called_once_with(
ref_dict, self.data.test_volume)
def test_get_iscsi_dict_success(self):
ip_and_iqn = self.common._find_ip_and_iqns(
self.data.array, self.data.port_group_name_i)
host_lun_id = self.data.iscsi_device_info['hostlunid']
volume = self.data.test_volume
device_info = self.data.iscsi_device_info
ref_data = {'driver_volume_type': 'iscsi', 'data': {}}
with mock.patch.object(
self.driver, 'vmax_get_iscsi_properties', return_value={}):
data = self.driver.get_iscsi_dict(device_info, volume)
self.assertEqual(ref_data, data)
self.driver.vmax_get_iscsi_properties.assert_called_once_with(
volume, ip_and_iqn, True, host_lun_id)
def test_get_iscsi_dict_exception(self):
device_info = {'ip_and_iqn': ''}
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.get_iscsi_dict,
device_info, self.data.test_volume)
def test_vmax_get_iscsi_properties_one_target_no_auth(self):
vol = deepcopy(self.data.test_volume)
ip_and_iqn = self.common._find_ip_and_iqns(
self.data.array, self.data.port_group_name_i)
host_lun_id = self.data.iscsi_device_info['hostlunid']
ref_properties = {
'target_discovered': True,
'target_iqn': ip_and_iqn[0]['iqn'].split(",")[0],
'target_portal': ip_and_iqn[0]['ip'] + ":3260",
'target_lun': host_lun_id,
'volume_id': self.data.test_volume.id}
iscsi_properties = self.driver.vmax_get_iscsi_properties(
vol, ip_and_iqn, True, host_lun_id)
self.assertEqual(type(ref_properties), type(iscsi_properties))
self.assertEqual(ref_properties, iscsi_properties)
def test_vmax_get_iscsi_properties_multiple_targets(self):
ip_and_iqn = [{'ip': self.data.ip, 'iqn': self.data.initiator},
{'ip': self.data.ip, 'iqn': self.data.iqn}]
host_lun_id = self.data.iscsi_device_info['hostlunid']
ref_properties = {
'target_portals': (
[t['ip'] + ":3260" for t in ip_and_iqn]),
'target_iqns': (
[t['iqn'].split(",")[0] for t in ip_and_iqn]),
'target_luns': [host_lun_id] * len(ip_and_iqn),
'target_discovered': True,
'target_iqn': ip_and_iqn[0]['iqn'].split(",")[0],
'target_portal': ip_and_iqn[0]['ip'] + ":3260",
'target_lun': host_lun_id,
'volume_id': self.data.test_volume.id}
iscsi_properties = self.driver.vmax_get_iscsi_properties(
self.data.test_volume, ip_and_iqn, True, host_lun_id)
self.assertEqual(ref_properties, iscsi_properties)
def test_vmax_get_iscsi_properties_auth(self):
vol = deepcopy(self.data.test_volume)
vol.provider_auth = "auth_method auth_username auth_secret"
ip_and_iqn = [{'ip': self.data.ip, 'iqn': self.data.initiator},
{'ip': self.data.ip, 'iqn': self.data.iqn}]
host_lun_id = self.data.iscsi_device_info['hostlunid']
ref_properties = {
'target_portals': (
[t['ip'] + ":3260" for t in ip_and_iqn]),
'target_iqns': (
[t['iqn'].split(",")[0] for t in ip_and_iqn]),
'target_luns': [host_lun_id] * len(ip_and_iqn),
'target_discovered': True,
'target_iqn': ip_and_iqn[0]['iqn'].split(",")[0],
'target_portal': ip_and_iqn[0]['ip'] + ":3260",
'target_lun': host_lun_id,
'volume_id': self.data.test_volume.id,
'auth_method': 'auth_method',
'auth_username': 'auth_username',
'auth_password': 'auth_secret'}
iscsi_properties = self.driver.vmax_get_iscsi_properties(
vol, ip_and_iqn, True, host_lun_id)
self.assertEqual(ref_properties, iscsi_properties)
def test_terminate_connection(self):
with mock.patch.object(self.common, 'terminate_connection'):
self.driver.terminate_connection(self.data.test_volume,
self.data.connector)
self.common.terminate_connection.assert_called_once_with(
self.data.test_volume, self.data.connector)
def test_extend_volume(self):
with mock.patch.object(self.common, 'extend_volume'):
self.driver.extend_volume(self.data.test_volume, '3')
self.common.extend_volume.assert_called_once_with(
self.data.test_volume, '3')
def test_get_volume_stats(self):
with mock.patch.object(self.driver, 'update_volume_stats'):
# no refresh
self.driver.get_volume_stats()
self.driver.update_volume_stats.assert_not_called()
# with refresh
self.driver.get_volume_stats(True)
self.driver.update_volume_stats.assert_called_once_with()
def test_update_volume_stats(self):
with mock.patch.object(self.common, 'update_volume_stats',
return_value={}):
self.driver.update_volume_stats()
self.common.update_volume_stats.assert_called_once_with()
def test_check_for_setup_error(self):
self.driver.check_for_setup_error()
def test_ensure_export(self):
self.driver.ensure_export('context', 'volume')
def test_create_export(self):
self.driver.create_export('context', 'volume', 'connector')
def test_remove_export(self):
self.driver.remove_export('context', 'volume')
def test_check_for_export(self):
self.driver.check_for_export('context', 'volume_id')
def test_manage_existing(self):
with mock.patch.object(self.common, 'manage_existing',
return_value={}):
external_ref = {u'source-name': u'00002'}
self.driver.manage_existing(self.data.test_volume, external_ref)
self.common.manage_existing.assert_called_once_with(
self.data.test_volume, external_ref)
def test_manage_existing_get_size(self):
with mock.patch.object(self.common, 'manage_existing_get_size',
return_value='1'):
external_ref = {u'source-name': u'00002'}
self.driver.manage_existing_get_size(
self.data.test_volume, external_ref)
self.common.manage_existing_get_size.assert_called_once_with(
self.data.test_volume, external_ref)
def test_unmanage_volume(self):
with mock.patch.object(self.common, 'unmanage',
return_value={}):
self.driver.unmanage(self.data.test_volume)
self.common.unmanage.assert_called_once_with(
self.data.test_volume)
def test_retype(self):
host = {'host': self.data.new_host}
new_type = {'extra_specs': {}}
with mock.patch.object(self.common, 'retype',
return_value=True):
self.driver.retype({}, self.data.test_volume, new_type, '', host)
self.common.retype.assert_called_once_with(
self.data.test_volume, new_type, host)
def test_failover_host(self):
with mock.patch.object(self.common, 'failover_host',
return_value={}) as mock_fo:
self.driver.failover_host({}, [self.data.test_volume])
mock_fo.assert_called_once_with([self.data.test_volume], None,
None)
class VMAXMaskingTest(test.TestCase):
def setUp(self):
self.data = VMAXCommonData()
super(VMAXMaskingTest, self).setUp()
configuration = mock.Mock()
configuration.safe_get.return_value = 'MaskingTests'
configuration.config_group = 'MaskingTests'
self._gather_info = common.VMAXCommon._gather_info
common.VMAXCommon._gather_info = mock.Mock()
driver = common.VMAXCommon(
'iSCSI', common.VMAXCommon.VERSION, configuration=configuration)
driver_fc = common.VMAXCommon(
'FC', common.VMAXCommon.VERSION, configuration=configuration)
self.driver = driver
self.driver_fc = driver_fc
self.mask = self.driver.masking
self.extra_specs = self.data.extra_specs
self.extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_i
self.maskingviewdict = self.driver._populate_masking_dict(
self.data.test_volume, self.data.connector, self.extra_specs)
self.maskingviewdict['extra_specs'] = self.extra_specs
self.device_id = self.data.device_id
self.volume_name = self.data.volume_details[0]['volume_identifier']
def tearDown(self):
super(VMAXMaskingTest, self).tearDown()
common.VMAXCommon._gather_info = self._gather_info
@mock.patch.object(
masking.VMAXMasking,
'get_or_create_masking_view_and_map_lun')
def test_setup_masking_view(self, mock_get_or_create_mv):
self.driver.masking.setup_masking_view(
self.data.array, self.maskingviewdict, self.extra_specs)
mock_get_or_create_mv.assert_called_once()
@mock.patch.object(
masking.VMAXMasking,
'_check_adding_volume_to_storage_group')
@mock.patch.object(
masking.VMAXMasking,
'_move_vol_from_default_sg',
return_value=None)
@mock.patch.object(
masking.VMAXMasking,
'_get_or_create_masking_view',
side_effect=[None, "Error in masking view retrieval",
exception.VolumeBackendAPIException])
@mock.patch.object(
rest.VMAXRest,
'get_element_from_masking_view',
side_effect=[VMAXCommonData.port_group_name_i, Exception])
def test_get_or_create_masking_view_and_map_lun(
self, mock_masking_view_element, mock_masking, mock_move,
mock_add_volume):
rollback_dict = (
self.driver.masking.get_or_create_masking_view_and_map_lun(
self.data.array, self.maskingviewdict['maskingview_name'],
self.maskingviewdict, self.extra_specs))
self.assertEqual(self.maskingviewdict, rollback_dict)
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.masking.get_or_create_masking_view_and_map_lun,
self.data.array, self.maskingviewdict['maskingview_name'],
self.maskingviewdict, self.extra_specs)
self.maskingviewdict['slo'] = None
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.masking.get_or_create_masking_view_and_map_lun,
self.data.array, self.maskingviewdict['maskingview_name'],
self.maskingviewdict, self.extra_specs)
@mock.patch.object(
masking.VMAXMasking,
'_check_adding_volume_to_storage_group',
return_value=None)
@mock.patch.object(
rest.VMAXRest,
'move_volume_between_storage_groups',
side_effect=[None, exception.VolumeBackendAPIException(data='')])
@mock.patch.object(
rest.VMAXRest,
'is_volume_in_storagegroup',
side_effect=[True, False, True])
def test_move_vol_from_default_sg(
self, mock_volume_in_sg, mock_move_volume, mock_add):
msg = None
for x in range(0, 2):
msg = self.driver.masking._move_vol_from_default_sg(
self.data.array, self.device_id, self.volume_name,
self.data.defaultstoragegroup_name,
self.data.storagegroup_name_i, self.extra_specs)
mock_move_volume.assert_called_once()
mock_add.assert_called_once()
self.assertIsNone(msg)
msg = self.driver.masking._move_vol_from_default_sg(
self.data.array, self.device_id, self.volume_name,
self.data.defaultstoragegroup_name,
self.data.storagegroup_name_i, self.extra_specs)
self.assertIsNotNone(msg)
@mock.patch.object(
rest.VMAXRest,
'get_masking_view',
side_effect=[VMAXCommonData.maskingview,
VMAXCommonData.maskingview, None])
@mock.patch.object(
masking.VMAXMasking,
'_validate_existing_masking_view',
side_effect=[(VMAXCommonData.maskingview[1]['storageGroupId'],
None), (None, "Error Message")])
@mock.patch.object(
masking.VMAXMasking,
'_create_new_masking_view',
return_value=None)
def test_get_or_create_masking_view(
self, mock_create_mv, mock_validate_mv,
mock_get_mv):
for x in range(0, 3):
self.driver.masking._get_or_create_masking_view(
self.data.array, self.maskingviewdict,
self.data.defaultstoragegroup_name, self.extra_specs)
mock_create_mv.assert_called_once()
@mock.patch.object(
masking.VMAXMasking,
'_get_or_create_storage_group',
side_effect=["Storage group not found", None,
"Storage group not found", None, None, None,
None, None, None, None, None])
@mock.patch.object(
masking.VMAXMasking,
'_check_port_group',
side_effect=[(None, "Port group error"), (None, None), (None, None),
(None, None)])
@mock.patch.object(
masking.VMAXMasking,
'_get_or_create_initiator_group',
side_effect=[(None, "Initiator group error"), (None, None),
(None, None)])
@mock.patch.object(
masking.VMAXMasking,
'_move_vol_from_default_sg',
side_effect=["Storage group error", None])
@mock.patch.object(
masking.VMAXMasking,
'create_masking_view',
return_value=None)
def test_create_new_masking_view(
self, mock_create_mv, mock_move, mock_create_IG,
mock_check_PG, mock_create_SG):
for x in range(0, 6):
self.driver.masking._create_new_masking_view(
self.data.array, self.maskingviewdict,
self.maskingviewdict['maskingview_name'],
self.data.defaultstoragegroup_name, self.extra_specs)
mock_create_mv.assert_called_once()
@mock.patch.object(
masking.VMAXMasking,
'_check_existing_storage_group',
side_effect=[(VMAXCommonData.storagegroup_name_i, None),
(VMAXCommonData.storagegroup_name_i, None),
(None, "Error Checking existing storage group")])
@mock.patch.object(
rest.VMAXRest,
'get_element_from_masking_view',
return_value=VMAXCommonData.port_group_name_i)
@mock.patch.object(
masking.VMAXMasking,
'_check_port_group',
side_effect=[(None, None), (None, "Error checking pg")])
@mock.patch.object(
masking.VMAXMasking,
'_check_existing_initiator_group',
return_value=(VMAXCommonData.initiatorgroup_name_i, None))
def test_validate_existing_masking_view(
self, mock_check_ig, mock_check_pg, mock_get_mv_element,
mock_check_sg):
for x in range(0, 3):
self.driver.masking._validate_existing_masking_view(
self.data.array, self.maskingviewdict,
self.maskingviewdict['maskingview_name'],
self.data.defaultstoragegroup_name, self.extra_specs)
self.assertEqual(3, mock_check_sg.call_count)
mock_get_mv_element.assert_called_with(
self.data.array, self.maskingviewdict['maskingview_name'],
portgroup=True)
mock_check_ig.assert_called_once()
@mock.patch.object(
rest.VMAXRest,
'get_storage_group',
side_effect=[VMAXCommonData.storagegroup_name_i, None, None])
@mock.patch.object(
provision.VMAXProvision,
'create_storage_group',
side_effect=[VMAXCommonData.storagegroup_name_i, None])
def test_get_or_create_storage_group(self, mock_sg, mock_get_sg):
for x in range(0, 2):
self.driver.masking._get_or_create_storage_group(
self.data.array, self.maskingviewdict,
self.data.storagegroup_name_i, self.extra_specs)
self.driver.masking._get_or_create_storage_group(
self.data.array, self.maskingviewdict,
self.data.storagegroup_name_i, self.extra_specs, True)
self.assertEqual(3, mock_get_sg.call_count)
self.assertEqual(2, mock_sg.call_count)
@mock.patch.object(
masking.VMAXMasking,
'_move_vol_from_default_sg',
return_value=None)
@mock.patch.object(
masking.VMAXMasking,
'_get_or_create_storage_group',
return_value=None)
@mock.patch.object(
rest.VMAXRest,
'get_element_from_masking_view',
return_value=VMAXCommonData.parent_sg_i)
@mock.patch.object(
rest.VMAXRest,
'is_child_sg_in_parent_sg',
side_effect=[True, False])
@mock.patch.object(
masking.VMAXMasking,
'_check_add_child_sg_to_parent_sg',
return_value=None)
def test_check_existing_storage_group_success(
self, mock_add_sg, mock_is_child, mock_get_mv_element,
mock_create_sg, mock_move):
masking_view_dict = deepcopy(self.data.masking_view_dict)
masking_view_dict['extra_specs'] = self.data.extra_specs
with mock.patch.object(self.driver.rest, 'get_storage_group',
side_effect=[
VMAXCommonData.parent_sg_i,
VMAXCommonData.storagegroup_name_i]):
_, msg = (
self.driver.masking._check_existing_storage_group(
self.data.array, self.maskingviewdict['maskingview_name'],
self.data.defaultstoragegroup_name, masking_view_dict))
self.assertIsNone(msg)
mock_create_sg.assert_not_called()
with mock.patch.object(self.driver.rest, 'get_storage_group',
side_effect=[
VMAXCommonData.parent_sg_i, None]):
_, msg = (
self.driver.masking._check_existing_storage_group(
self.data.array, self.maskingviewdict['maskingview_name'],
self.data.defaultstoragegroup_name, masking_view_dict))
self.assertIsNone(msg)
mock_create_sg.assert_called_once_with(
self.data.array, masking_view_dict,
VMAXCommonData.storagegroup_name_f,
self.data.extra_specs)
@mock.patch.object(
masking.VMAXMasking,
'_move_vol_from_default_sg',
side_effect=[None, "Error Message"])
@mock.patch.object(
rest.VMAXRest,
'is_child_sg_in_parent_sg',
side_effect=[True, False, False])
@mock.patch.object(
rest.VMAXRest,
'get_element_from_masking_view',
return_value=VMAXCommonData.parent_sg_i)
@mock.patch.object(
rest.VMAXRest,
'get_storage_group',
side_effect=[None, VMAXCommonData.parent_sg_i, None,
VMAXCommonData.parent_sg_i, None,
VMAXCommonData.parent_sg_i, None])
def test_check_existing_storage_group_failed(
self, mock_get_sg, mock_get_mv_element, mock_child, mock_move):
masking_view_dict = deepcopy(self.data.masking_view_dict)
masking_view_dict['extra_specs'] = self.data.extra_specs
for x in range(0, 4):
_, msg = (
self.driver.masking._check_existing_storage_group(
self.data.array, self.maskingviewdict['maskingview_name'],
self.data.defaultstoragegroup_name, masking_view_dict))
self.assertIsNotNone(msg)
self.assertEqual(7, mock_get_sg.call_count)
self.assertEqual(1, mock_move.call_count)
@mock.patch.object(rest.VMAXRest, 'get_portgroup',
side_effect=[VMAXCommonData.port_group_name_i, None])
def test_check_port_group(
self, mock_get_pg):
for x in range(0, 2):
_, msg = self.driver.masking._check_port_group(
self.data.array, self.maskingviewdict['maskingview_name'])
self.assertIsNotNone(msg)
self.assertEqual(2, mock_get_pg.call_count)
@mock.patch.object(
masking.VMAXMasking, '_find_initiator_group',
side_effect=[VMAXCommonData.initiatorgroup_name_i, None, None])
@mock.patch.object(masking.VMAXMasking, '_create_initiator_group',
side_effect=[VMAXCommonData.initiatorgroup_name_i, None]
)
def test_get_or_create_initiator_group(self, mock_create_ig, mock_find_ig):
self.driver.masking._get_or_create_initiator_group(
self.data.array, self.data.initiatorgroup_name_i,
self.data.connector, self.extra_specs)
mock_create_ig.assert_not_called()
found_init_group, msg = (
self.driver.masking._get_or_create_initiator_group(
self.data.array, self.data.initiatorgroup_name_i,
self.data.connector, self.extra_specs))
self.assertIsNone(msg)
found_init_group, msg = (
self.driver.masking._get_or_create_initiator_group(
self.data.array, self.data.initiatorgroup_name_i,
self.data.connector, self.extra_specs))
self.assertIsNotNone(msg)
def test_check_existing_initiator_group(self):
with mock.patch.object(
rest.VMAXRest, 'get_element_from_masking_view',
return_value=VMAXCommonData.inititiatorgroup):
ig_from_mv, msg = (
self.driver.masking._check_existing_initiator_group(
self.data.array, self.maskingviewdict['maskingview_name'],
self.maskingviewdict, self.data.storagegroup_name_i,
self.data.port_group_name_i, self.extra_specs))
self.assertEqual(self.data.inititiatorgroup, ig_from_mv)
def test_check_adding_volume_to_storage_group(self):
with mock.patch.object(
masking.VMAXMasking, '_create_initiator_group'):
with mock.patch.object(
rest.VMAXRest, 'is_volume_in_storagegroup',
side_effect=[True, False]):
msg = (
self.driver.masking._check_adding_volume_to_storage_group(
self.data.array, self.device_id,
self.data.storagegroup_name_i,
self.maskingviewdict[utils.VOL_NAME],
self.maskingviewdict[utils.EXTRA_SPECS]))
self.assertIsNone(msg)
msg = (
self.driver.masking._check_adding_volume_to_storage_group(
self.data.array, self.device_id,
self.data.storagegroup_name_i,
self.maskingviewdict[utils.VOL_NAME],
self.maskingviewdict[utils.EXTRA_SPECS]))
@mock.patch.object(rest.VMAXRest, 'add_vol_to_sg')
def test_add_volume_to_storage_group(self, mock_add_volume):
self.driver.masking.add_volume_to_storage_group(
self.data.array, self.device_id, self.data.storagegroup_name_i,
self.volume_name, self.extra_specs)
mock_add_volume.assert_called_once()
@mock.patch.object(rest.VMAXRest, 'remove_vol_from_sg')
def test_remove_vol_from_storage_group(self, mock_remove_volume):
with mock.patch.object(
rest.VMAXRest, 'is_volume_in_storagegroup',
side_effect=[False, True]):
self.driver.masking.remove_vol_from_storage_group(
self.data.array, self.device_id, self.data.storagegroup_name_i,
self.volume_name, self.extra_specs)
mock_remove_volume.assert_called_once()
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.masking.remove_vol_from_storage_group,
self.data.array, self.device_id, self.data.storagegroup_name_i,
self.volume_name, self.extra_specs)
def test_find_initiator_names(self):
foundinitiatornames = self.driver.masking.find_initiator_names(
self.data.connector)
self.assertEqual(self.data.connector['initiator'],
foundinitiatornames[0])
foundinitiatornames = self.driver_fc.masking.find_initiator_names(
self.data.connector)
self.assertEqual(self.data.connector['wwpns'][0],
foundinitiatornames[0])
connector = {'ip': self.data.ip, 'initiator': None, 'host': 'HostX'}
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.masking.find_initiator_names, connector)
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver_fc.masking.find_initiator_names, connector)
def test_find_initiator_group(self):
with mock.patch.object(
rest.VMAXRest, 'get_in_use_initiator_list_from_array',
return_value=self.data.initiator_list[2]['initiatorId']):
with mock.patch.object(
rest.VMAXRest, 'get_initiator_group_from_initiator',
return_value=self.data.initiator_list):
found_init_group_nam = (
self.driver.masking._find_initiator_group(
self.data.array, ['FA-1D:4:123456789012345']))
self.assertEqual(self.data.initiator_list,
found_init_group_nam)
found_init_group_nam = (
self.driver.masking._find_initiator_group(
self.data.array, ['Error']))
self.assertIsNone(found_init_group_nam)
def test_create_masking_view(self):
with mock.patch.object(rest.VMAXRest, 'create_masking_view',
side_effect=[None, Exception]):
error_message = self.driver.masking.create_masking_view(
self.data.array, self.maskingviewdict['maskingview_name'],
self.data.storagegroup_name_i, self.data.port_group_name_i,
self.data.initiatorgroup_name_i, self.extra_specs)
self.assertIsNone(error_message)
error_message = self.driver.masking.create_masking_view(
self.data.array, self.maskingviewdict['maskingview_name'],
self.data.storagegroup_name_i, self.data.port_group_name_i,
self.data.initiatorgroup_name_i, self.extra_specs)
self.assertIsNotNone(error_message)
@mock.patch.object(masking.VMAXMasking, '_check_ig_rollback')
def test_check_if_rollback_action_for_masking_required(self,
mock_check_ig):
with mock.patch.object(rest.VMAXRest,
'get_storage_groups_from_volume',
side_effect=[
exception.VolumeBackendAPIException,
self.data.defaultstoragegroup_name,
self.data.defaultstoragegroup_name, None,
None, ]):
self.assertRaises(
exception.VolumeBackendAPIException,
self.mask.check_if_rollback_action_for_masking_required,
self.data.array, self.device_id, self.maskingviewdict)
with mock.patch.object(masking.VMAXMasking,
'remove_and_reset_members'):
self.maskingviewdict[
'default_sg_name'] = self.data.defaultstoragegroup_name
error_message = (
self.mask.check_if_rollback_action_for_masking_required(
self.data.array, self.device_id, self.maskingviewdict))
self.assertIsNone(error_message)
@mock.patch.object(rest.VMAXRest, 'delete_masking_view')
@mock.patch.object(rest.VMAXRest, 'delete_initiator_group')
@mock.patch.object(rest.VMAXRest, 'get_initiator_group')
@mock.patch.object(masking.VMAXMasking, '_find_initiator_group',
return_value=VMAXCommonData.initiatorgroup_name_i)
def test_verify_initiator_group_from_masking_view(
self, mock_find_ig, mock_get_ig, mock_delete_ig, mock_delete_mv):
self.mask._verify_initiator_group_from_masking_view(
self.data.array, self.maskingviewdict['maskingview_name'],
self.maskingviewdict, self.data.initiatorgroup_name_i,
self.data.storagegroup_name_i, self.data.port_group_name_i,
self.extra_specs)
mock_get_ig.assert_not_called()
mock_get_ig.return_value = False
self.mask._verify_initiator_group_from_masking_view(
self.data.array, self.maskingviewdict['maskingview_name'],
self.maskingviewdict, 'OS-Wrong-Host-I-IG',
self.data.storagegroup_name_i, self.data.port_group_name_i,
self.extra_specs)
mock_get_ig.assert_called()
@mock.patch.object(rest.VMAXRest, 'delete_masking_view')
@mock.patch.object(rest.VMAXRest, 'delete_initiator_group')
@mock.patch.object(rest.VMAXRest, 'get_initiator_group',
return_value=True)
@mock.patch.object(masking.VMAXMasking, '_find_initiator_group',
return_value=VMAXCommonData.initiatorgroup_name_i)
def test_verify_initiator_group_from_masking_view2(
self, mock_find_ig, mock_get_ig, mock_delete_ig, mock_delete_mv):
mock_delete_mv.side_effect = [None, Exception]
self.mask._verify_initiator_group_from_masking_view(
self.data.array, self.maskingviewdict['maskingview_name'],
self.maskingviewdict, 'OS-Wrong-Host-I-IG',
self.data.storagegroup_name_i, self.data.port_group_name_i,
self.extra_specs)
mock_delete_mv.assert_called()
_, found_ig_from_connector = (
self.mask._verify_initiator_group_from_masking_view(
self.data.array, self.maskingviewdict['maskingview_name'],
self.maskingviewdict, 'OS-Wrong-Host-I-IG',
self.data.storagegroup_name_i, self.data.port_group_name_i,
self.extra_specs))
self.assertEqual(self.data.initiatorgroup_name_i,
found_ig_from_connector)
@mock.patch.object(rest.VMAXRest, 'create_initiator_group')
def test_create_initiator_group(self, mock_create_ig):
initiator_names = self.mask.find_initiator_names(self.data.connector)
ret_init_group_name = self.mask._create_initiator_group(
self.data.array, self.data.initiatorgroup_name_i, initiator_names,
self.extra_specs)
self.assertEqual(self.data.initiatorgroup_name_i, ret_init_group_name)
@mock.patch.object(masking.VMAXMasking,
'_last_volume_delete_initiator_group')
def test_check_ig_rollback(self, mock_last_volume):
with mock.patch.object(masking.VMAXMasking, '_find_initiator_group',
side_effect=[
None, 'FAKE-I-IG',
self.data.initiatorgroup_name_i]):
for x in range(0, 2):
self.mask._check_ig_rollback(self.data.array,
self.data.initiatorgroup_name_i,
self.data.connector)
mock_last_volume.assert_not_called()
self.mask._check_ig_rollback(
self.data.array, self.data.initiatorgroup_name_i,
self.data.connector)
mock_last_volume.assert_called()
@mock.patch.object(masking.VMAXMasking, '_cleanup_deletion')
def test_remove_and_reset_members(self, mock_cleanup):
self.mask.remove_and_reset_members(self.data.array, self.device_id,
self.volume_name, self.extra_specs,
reset=False)
mock_cleanup.assert_called_once()
@mock.patch.object(rest.VMAXRest, 'get_storage_groups_from_volume',
side_effect=[[VMAXCommonData.storagegroup_name_i],
[VMAXCommonData.storagegroup_name_i,
VMAXCommonData.storagegroup_name_f]])
@mock.patch.object(masking.VMAXMasking, 'remove_volume_from_sg')
@mock.patch.object(masking.VMAXMasking,
'add_volume_to_default_storage_group')
def test_cleanup_deletion(self, mock_add, mock_remove_vol, mock_get_sg):
self.mask._cleanup_deletion(
self.data.array, self.device_id, self.volume_name,
self.extra_specs, None, True)
mock_add.assert_not_called()
self.mask._cleanup_deletion(
self.data.array, self.device_id, self.volume_name,
self.extra_specs, None, True)
mock_add.assert_called_once_with(self.data.array, self.device_id,
self.volume_name, self.extra_specs)
@mock.patch.object(masking.VMAXMasking, '_last_vol_in_sg')
@mock.patch.object(masking.VMAXMasking, '_multiple_vols_in_sg')
def test_remove_volume_from_sg(self, mock_multiple_vols, mock_last_vol):
with mock.patch.object(
rest.VMAXRest, 'get_masking_views_from_storage_group',
return_value=None):
with mock.patch.object(
rest.VMAXRest, 'get_num_vols_in_sg',
side_effect=[2, 1]):
self.mask.remove_volume_from_sg(
self.data.array, self.device_id, self.volume_name,
self.data.defaultstoragegroup_name, self.extra_specs)
mock_last_vol.assert_not_called()
self.mask.remove_volume_from_sg(
self.data.array, self.device_id, self.volume_name,
self.data.defaultstoragegroup_name, self.extra_specs)
mock_last_vol.assert_called()
@mock.patch.object(masking.VMAXMasking, '_last_vol_in_sg')
@mock.patch.object(masking.VMAXMasking, '_multiple_vols_in_sg')
def test_remove_volume_from_sg_2(self, mock_multiple_vols, mock_last_vol):
with mock.patch.object(
rest.VMAXRest, 'is_volume_in_storagegroup',
return_value=True):
with mock.patch.object(
rest.VMAXRest, 'get_masking_views_from_storage_group',
return_value=[self.data.masking_view_name_i]):
with mock.patch.object(
rest.VMAXRest, 'get_num_vols_in_sg',
side_effect=[2, 1]):
self.mask.remove_volume_from_sg(
self.data.array, self.device_id, self.volume_name,
self.data.storagegroup_name_i, self.extra_specs)
mock_last_vol.assert_not_called()
self.mask.remove_volume_from_sg(
self.data.array, self.device_id, self.volume_name,
self.data.storagegroup_name_i, self.extra_specs)
mock_last_vol.assert_called()
@mock.patch.object(masking.VMAXMasking, '_last_vol_masking_views',
return_value=True)
@mock.patch.object(masking.VMAXMasking, '_last_vol_no_masking_views',
return_value=True)
def test_last_vol_in_sg(self, mock_no_mv, mock_mv):
mv_list = [self.data.masking_view_name_i,
self.data.masking_view_name_f]
with mock.patch.object(rest.VMAXRest,
'get_masking_views_from_storage_group',
side_effect=[mv_list, []]):
for x in range(0, 2):
self.mask._last_vol_in_sg(
self.data.array, self.device_id, self.volume_name,
self.data.storagegroup_name_i, self.extra_specs,
self.data.connector)
self.assertEqual(1, mock_mv.call_count)
self.assertEqual(1, mock_no_mv.call_count)
@mock.patch.object(masking.VMAXMasking, '_remove_last_vol_and_delete_sg')
@mock.patch.object(masking.VMAXMasking, '_delete_cascaded_storage_groups')
@mock.patch.object(rest.VMAXRest, 'get_num_vols_in_sg',
side_effect=[1, 3])
@mock.patch.object(rest.VMAXRest, 'delete_storage_group')
@mock.patch.object(masking.VMAXMasking, 'get_parent_sg_from_child',
side_effect=[None, 'parent_sg_name', 'parent_sg_name'])
def test_last_vol_no_masking_views(
self, mock_get_parent, mock_delete, mock_num_vols,
mock_delete_casc, mock_remove):
for x in range(0, 3):
self.mask._last_vol_no_masking_views(
self.data.array, self.data.storagegroup_name_i,
self.device_id, self.volume_name, self.extra_specs,
False)
self.assertEqual(1, mock_delete.call_count)
self.assertEqual(1, mock_delete_casc.call_count)
self.assertEqual(1, mock_remove.call_count)
@mock.patch.object(masking.VMAXMasking, '_remove_last_vol_and_delete_sg')
@mock.patch.object(masking.VMAXMasking, '_delete_mv_ig_and_sg')
@mock.patch.object(masking.VMAXMasking, '_get_num_vols_from_mv',
side_effect=[(1, 'parent_name'), (3, 'parent_name')])
def test_last_vol_masking_views(
self, mock_num_vols, mock_delete_all, mock_remove):
for x in range(0, 2):
self.mask._last_vol_masking_views(
self.data.array, self.data.storagegroup_name_i,
[self.data.masking_view_name_i], self.device_id,
self.volume_name, self.extra_specs, self.data.connector,
True)
self.assertEqual(1, mock_delete_all.call_count)
self.assertEqual(1, mock_remove.call_count)
@mock.patch.object(masking.VMAXMasking,
'add_volume_to_default_storage_group')
@mock.patch.object(rest.VMAXRest, 'get_num_vols_in_sg')
@mock.patch.object(masking.VMAXMasking, 'remove_vol_from_storage_group')
def test_multiple_vols_in_sg(self, mock_remove_vol, mock_get_volumes,
mock_add):
self.mask._multiple_vols_in_sg(
self.data.array, self.device_id, self.data.storagegroup_name_i,
self.volume_name, self.extra_specs, False)
mock_remove_vol.assert_called_once()
self.mask._multiple_vols_in_sg(
self.data.array, self.device_id, self.data.storagegroup_name_i,
self.volume_name, self.extra_specs, True)
mock_add.assert_called_once()
@mock.patch.object(rest.VMAXRest, 'get_element_from_masking_view')
@mock.patch.object(masking.VMAXMasking, '_last_volume_delete_masking_view')
@mock.patch.object(masking.VMAXMasking,
'_last_volume_delete_initiator_group')
@mock.patch.object(masking.VMAXMasking, '_delete_cascaded_storage_groups')
def test_delete_mv_ig_and_sg(self, mock_delete_sg, mock_delete_ig,
mock_delete_mv, mock_get_element):
self.mask._delete_mv_ig_and_sg(
self.data.array, self.data.device_id,
self.data.masking_view_name_i,
self.data.storagegroup_name_i, self.data.parent_sg_i,
self.data.connector, True, self.data.extra_specs)
mock_delete_sg.assert_called_once()
@mock.patch.object(rest.VMAXRest, 'delete_masking_view')
def test_last_volume_delete_masking_view(self, mock_delete_mv):
self.mask._last_volume_delete_masking_view(
self.data.array, self.data.masking_view_name_i)
mock_delete_mv.assert_called_once()
@mock.patch.object(rest.VMAXRest, 'move_volume_between_storage_groups')
@mock.patch.object(masking.VMAXMasking,
'get_or_create_default_storage_group')
@mock.patch.object(masking.VMAXMasking, 'add_volume_to_storage_group')
def test_add_volume_to_default_storage_group(
self, mock_add_sg, mock_get_sg, mock_move):
self.mask.add_volume_to_default_storage_group(
self.data.array, self.device_id, self.volume_name,
self.extra_specs)
mock_add_sg.assert_called_once()
self.mask.add_volume_to_default_storage_group(
self.data.array, self.device_id, self.volume_name,
self.extra_specs, src_sg=self.data.storagegroup_name_i)
mock_move.assert_called_once()
@mock.patch.object(provision.VMAXProvision, 'create_storage_group')
def test_get_or_create_default_storage_group(self, mock_create_sg):
with mock.patch.object(
rest.VMAXRest, 'get_vmax_default_storage_group',
return_value=(None, self.data.storagegroup_name_i)):
storage_group_name = self.mask.get_or_create_default_storage_group(
self.data.array, self.data.srp, self.data.slo,
self.data.workload, self.extra_specs)
self.assertEqual(self.data.storagegroup_name_i, storage_group_name)
with mock.patch.object(
rest.VMAXRest, 'get_vmax_default_storage_group',
return_value=("test_sg", self.data.storagegroup_name_i)):
with mock.patch.object(
rest.VMAXRest, 'get_masking_views_from_storage_group',
return_value=self.data.masking_view_name_i):
self.assertRaises(
exception.VolumeBackendAPIException,
self.mask.get_or_create_default_storage_group,
self.data.array, self.data.srp, self.data.slo,
self.data.workload, self.extra_specs)
@mock.patch.object(masking.VMAXMasking,
'add_volume_to_default_storage_group')
@mock.patch.object(rest.VMAXRest, 'remove_child_sg_from_parent_sg')
@mock.patch.object(rest.VMAXRest, 'delete_storage_group')
@mock.patch.object(masking.VMAXMasking, 'remove_vol_from_storage_group')
def test_remove_last_vol_and_delete_sg(self, mock_vol_sg,
mock_delete_sg, mock_rm, mock_add):
self.mask._remove_last_vol_and_delete_sg(
self.data.array, self.device_id, self.volume_name,
self.data.storagegroup_name_i, self.extra_specs)
self.mask._remove_last_vol_and_delete_sg(
self.data.array, self.device_id, self.volume_name,
self.data.storagegroup_name_i, self.extra_specs,
self.data.parent_sg_i, True)
self.assertEqual(2, mock_delete_sg.call_count)
self.assertEqual(1, mock_vol_sg.call_count)
self.assertEqual(1, mock_rm.call_count)
self.assertEqual(1, mock_add.call_count)
@mock.patch.object(rest.VMAXRest, 'delete_initiator_group')
def test_last_volume_delete_initiator_group(self, mock_delete_ig):
self.mask._last_volume_delete_initiator_group(
self.data.array, self.data.initiatorgroup_name_f, 'Wrong_Host')
mock_delete_ig.assert_not_called()
self.mask._last_volume_delete_initiator_group(
self.data.array, self.data.initiatorgroup_name_f, None)
mock_delete_ig.assert_not_called()
mv_list = [self.data.masking_view_name_i,
self.data.masking_view_name_f]
with mock.patch.object(rest.VMAXRest,
'get_masking_views_by_initiator_group',
side_effect=[mv_list, []]):
self.mask._last_volume_delete_initiator_group(
self.data.array, self.data.initiatorgroup_name_i,
self.data.connector['host'])
mock_delete_ig.assert_not_called()
self.mask._last_volume_delete_initiator_group(
self.data.array, self.data.initiatorgroup_name_i,
self.data.connector['host'])
mock_delete_ig.assert_called_once()
def test_populate_masking_dict_init_check_false(self):
extra_specs = self.data.extra_specs
connector = self.data.connector
with mock.patch.object(self.driver, '_get_initiator_check_flag',
return_value=False):
masking_view_dict = self.driver._populate_masking_dict(
self.data.test_volume, connector, extra_specs)
self.assertFalse(masking_view_dict['initiator_check'])
def test_populate_masking_dict_init_check_true(self):
extra_specs = self.data.extra_specs
connector = self.data.connector
with mock.patch.object(self.driver, '_get_initiator_check_flag',
return_value=True):
masking_view_dict = self.driver._populate_masking_dict(
self.data.test_volume, connector, extra_specs)
self.assertTrue(masking_view_dict['initiator_check'])
def test_check_existing_initiator_group_verify_true(self):
mv_dict = deepcopy(self.data.masking_view_dict)
mv_dict['initiator_check'] = True
with mock.patch.object(
rest.VMAXRest, 'get_element_from_masking_view',
return_value=VMAXCommonData.initiatorgroup_name_f):
with mock.patch.object(
self.mask, '_verify_initiator_group_from_masking_view',
return_value=(True, self.data.initiatorgroup_name_f)):
self.mask._check_existing_initiator_group(
self.data.array, self.data.masking_view_name_f,
mv_dict, self.data.storagegroup_name_f,
self.data.port_group_name_f, self.data.extra_specs)
(self.mask._verify_initiator_group_from_masking_view.
assert_called_once_with(
self.data.array, self.data.masking_view_name_f,
mv_dict, self.data.initiatorgroup_name_f,
self.data.storagegroup_name_f,
self.data.port_group_name_f, self.data.extra_specs))
@mock.patch.object(masking.VMAXMasking, 'add_child_sg_to_parent_sg',
side_effect=[
None, exception.VolumeBackendAPIException])
@mock.patch.object(rest.VMAXRest, 'is_child_sg_in_parent_sg',
side_effect=[True, False, False])
def test_check_add_child_sg_to_parent_sg(self, mock_is_child, mock_add):
for x in range(0, 3):
message = self.mask._check_add_child_sg_to_parent_sg(
self.data.array, self.data.storagegroup_name_i,
self.data.parent_sg_i, self.data.extra_specs)
self.assertIsNotNone(message)
@mock.patch.object(rest.VMAXRest, 'add_child_sg_to_parent_sg')
@mock.patch.object(rest.VMAXRest, 'is_child_sg_in_parent_sg',
side_effect=[True, False])
def test_add_child_sg_to_parent_sg(self, mock_is_child, mock_add):
for x in range(0, 2):
self.mask.add_child_sg_to_parent_sg(
self.data.array, self.data.storagegroup_name_i,
self.data.parent_sg_i, self.data.extra_specs)
self.assertEqual(1, mock_add.call_count)
def test_get_parent_sg_from_child(self):
with mock.patch.object(self.driver.rest, 'get_storage_group',
side_effect=[None, self.data.sg_details[1]]):
sg_name = self.mask.get_parent_sg_from_child(
self.data.array, self.data.storagegroup_name_i)
self.assertIsNone(sg_name)
sg_name2 = self.mask.get_parent_sg_from_child(
self.data.array, self.data.storagegroup_name_f)
self.assertEqual(self.data.parent_sg_f, sg_name2)
@mock.patch.object(rest.VMAXRest, 'get_element_from_masking_view',
return_value='parent_sg')
@mock.patch.object(rest.VMAXRest, 'get_num_vols_in_sg',
return_value=2)
def test_get_num_vols_from_mv(self, mock_num, mock_element):
num_vols, sg = self.mask._get_num_vols_from_mv(
self.data.array, self.data.masking_view_name_f)
self.assertEqual(2, num_vols)
@mock.patch.object(masking.VMAXMasking,
'add_volume_to_default_storage_group')
@mock.patch.object(rest.VMAXRest, 'delete_storage_group')
def test_delete_cascaded(self, mock_delete, mock_add):
self.mask._delete_cascaded_storage_groups(
self.data.array, self.data.masking_view_name_f,
self.data.parent_sg_f, self.data.extra_specs,
self.data.device_id, False)
self.assertEqual(2, mock_delete.call_count)
mock_add.assert_not_called()
# Delete legacy masking view, parent sg = child sg
mock_delete.reset_mock()
self.mask._delete_cascaded_storage_groups(
self.data.array, self.data.masking_view_name_f,
self.data.masking_view_name_f, self.data.extra_specs,
self.data.device_id, True)
self.assertEqual(1, mock_delete.call_count)
mock_add.assert_called_once()
@mock.patch.object(masking.VMAXMasking, 'add_child_sg_to_parent_sg')
@mock.patch.object(masking.VMAXMasking,
'move_volume_between_storage_groups')
@mock.patch.object(provision.VMAXProvision, 'create_storage_group')
def test_pre_live_migration(self, mock_create_sg, mock_move, mock_add):
with mock.patch.object(
rest.VMAXRest, 'get_storage_group',
side_effect=[None, self.data.sg_details[1]["storageGroupId"]]
):
source_sg = self.data.sg_details[2]["storageGroupId"]
source_parent_sg = self.data.sg_details[4]["storageGroupId"]
source_nf_sg = source_parent_sg[:-2] + 'NONFAST'
self.data.iscsi_device_info['device_id'] = self.data.device_id
self.mask.pre_live_migration(
source_nf_sg, source_sg, source_parent_sg, False,
self.data.iscsi_device_info, None)
mock_create_sg.assert_called_once()
@mock.patch.object(rest.VMAXRest, 'delete_storage_group')
@mock.patch.object(rest.VMAXRest, 'remove_child_sg_from_parent_sg')
def test_post_live_migration(self, mock_remove_child_sg, mock_delete_sg):
self.data.iscsi_device_info['source_sg'] = self.data.sg_details[2][
"storageGroupId"]
self.data.iscsi_device_info['source_parent_sg'] = self.data.sg_details[
4]["storageGroupId"]
with mock.patch.object(
rest.VMAXRest, 'get_num_vols_in_sg', side_effect=[0, 1]):
self.mask.post_live_migration(self.data.iscsi_device_info, None)
mock_remove_child_sg.assert_called_once()
mock_delete_sg.assert_called_once()
@mock.patch.object(masking.VMAXMasking,
'move_volume_between_storage_groups')
@mock.patch.object(rest.VMAXRest, 'delete_storage_group')
@mock.patch.object(rest.VMAXRest, 'remove_child_sg_from_parent_sg')
@mock.patch.object(masking.VMAXMasking, 'remove_volume_from_sg')
def test_failed_live_migration(
self, mock_remove_volume, mock_remove_child_sg, mock_delete_sg,
mock_move):
device_dict = self.data.iscsi_device_info
device_dict['device_id'] = self.data.device_id
device_dict['source_sg'] = self.data.sg_details[2]["storageGroupId"]
device_dict['source_parent_sg'] = self.data.sg_details[4][
"storageGroupId"]
device_dict['source_nf_sg'] = (
self.data.sg_details[4]["storageGroupId"][:-2] + 'NONFAST')
sg_list = [device_dict['source_nf_sg']]
with mock.patch.object(
rest.VMAXRest, 'is_child_sg_in_parent_sg',
side_effect=[True, False]):
self.mask.failed_live_migration(device_dict, sg_list, None)
mock_remove_volume.assert_not_called()
mock_remove_child_sg.assert_called_once()
class VMAXCommonReplicationTest(test.TestCase):
def setUp(self):
self.data = VMAXCommonData()
super(VMAXCommonReplicationTest, self).setUp()
config_group = 'CommonReplicationTests'
self.fake_xml = FakeXML().create_fake_config_file(
config_group, self.data.port_group_name_f)
self.replication_device = {
'target_device_id': self.data.remote_array,
'remote_port_group': self.data.port_group_name_f,
'remote_pool': self.data.srp2,
'rdf_group_label': self.data.rdf_group_name,
'allow_extend': 'True'}
configuration = FakeConfiguration(
self.fake_xml, config_group,
replication_device=self.replication_device)
rest.VMAXRest._establish_rest_session = mock.Mock(
return_value=FakeRequestsSession())
driver = fc.VMAXFCDriver(configuration=configuration)
self.driver = driver
self.common = self.driver.common
self.masking = self.common.masking
self.provision = self.common.provision
self.rest = self.common.rest
self.utils = self.common.utils
self.utils.get_volumetype_extra_specs = (
mock.Mock(
return_value=self.data.vol_type_extra_specs_rep_enabled))
self.extra_specs = deepcopy(self.data.extra_specs_rep_enabled)
self.extra_specs['retries'] = 0
self.extra_specs['interval'] = 0
def test_get_replication_info(self):
self.common._get_replication_info()
self.assertTrue(self.common.replication_enabled)
def test_create_replicated_volume(self):
extra_specs = deepcopy(self.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
vol_identifier = self.utils.get_volume_element_name(
self.data.test_volume.id)
with mock.patch.object(self.common, '_replicate_volume',
return_value={}) as mock_rep:
self.common.create_volume(self.data.test_volume)
volume_dict = self.data.provider_location
mock_rep.assert_called_once_with(
self.data.test_volume, vol_identifier, volume_dict,
extra_specs)
def test_create_cloned_replicated_volume(self):
extra_specs = deepcopy(self.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
with mock.patch.object(self.common, '_replicate_volume',
return_value={}) as mock_rep:
self.common.create_cloned_volume(
self.data.test_clone_volume, self.data.test_volume)
volume_dict = self.data.provider_location
mock_rep.assert_called_once_with(
self.data.test_clone_volume,
self.data.test_clone_volume.name, volume_dict, extra_specs)
def test_create_replicated_volume_from_snap(self):
extra_specs = deepcopy(self.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
with mock.patch.object(self.common, '_replicate_volume',
return_value={}) as mock_rep:
self.common.create_volume_from_snapshot(
self.data.test_clone_volume, self.data.test_snapshot)
volume_dict = self.data.provider_location
mock_rep.assert_called_once_with(
self.data.test_clone_volume,
"snapshot-%s" % self.data.snapshot_id,
volume_dict,
extra_specs)
def test_replicate_volume(self):
volume_dict = self.data.provider_location
rs_enabled = fields.ReplicationStatus.ENABLED
with mock.patch.object(self.common, 'setup_volume_replication',
return_value=(rs_enabled, {})) as mock_setup:
self.common._replicate_volume(
self.data.test_volume, "1", volume_dict, self.extra_specs)
mock_setup.assert_called_once_with(
self.data.array, self.data.test_volume,
self.data.device_id, self.extra_specs)
def test_replicate_volume_exception(self):
volume_dict = self.data.provider_location
with mock.patch.object(
self.common, 'setup_volume_replication',
side_effect=exception.VolumeBackendAPIException(data='')):
with mock.patch.object(
self.common, '_cleanup_replication_source') as mock_clean:
self.assertRaises(exception.VolumeBackendAPIException,
self.common._replicate_volume,
self.data.test_volume,
"1", volume_dict, self.extra_specs)
mock_clean.assert_called_once_with(
self.data.array, self.data.test_volume, "1",
volume_dict, self.extra_specs)
@mock.patch.object(common.VMAXCommon, '_remove_members')
@mock.patch.object(common.VMAXCommon,
'_get_replication_extra_specs',
return_value=VMAXCommonData.rep_extra_specs)
@mock.patch.object(utils.VMAXUtils, 'is_volume_failed_over',
return_value=True)
def test_unmap_lun_volume_failed_over(self, mock_fo, mock_es, mock_rm):
extra_specs = deepcopy(self.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
rep_config = self.utils.get_replication_config(
[self.replication_device])
self.common._unmap_lun(self.data.test_volume, self.data.connector)
mock_es.assert_called_once_with(extra_specs, rep_config)
@mock.patch.object(utils.VMAXUtils, 'is_volume_failed_over',
return_value=True)
def test_initialize_connection_vol_failed_over(self, mock_fo):
extra_specs = deepcopy(self.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
rep_extra_specs = deepcopy(VMAXCommonData.rep_extra_specs)
rep_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
rep_config = self.utils.get_replication_config(
[self.replication_device])
with mock.patch.object(self.common, '_get_replication_extra_specs',
return_value=rep_extra_specs) as mock_es:
self.common.initialize_connection(
self.data.test_volume, self.data.connector)
mock_es.assert_called_once_with(extra_specs, rep_config)
@mock.patch.object(common.VMAXCommon, '_sync_check')
def test_extend_volume_rep_enabled(self, mock_sync):
extra_specs = deepcopy(self.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
volume_name = self.data.test_volume.name
with mock.patch.object(self.rest, 'is_vol_in_rep_session',
return_value=(False, False, None)):
with mock.patch.object(
self.common, 'extend_volume_is_replicated') as mock_ex_re:
self.common.extend_volume(self.data.test_volume, '5')
mock_ex_re.assert_called_once_with(
self.data.array, self.data.test_volume,
self.data.device_id, volume_name, "5", extra_specs)
def test_set_config_file_get_extra_specs_rep_enabled(self):
extra_specs, _, _ = self.common._set_config_file_and_get_extra_specs(
self.data.test_volume)
self.assertTrue(extra_specs['replication_enabled'])
def test_populate_masking_dict_is_re(self):
extra_specs = deepcopy(self.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
masking_dict = self.common._populate_masking_dict(
self.data.test_volume, self.data.connector, extra_specs)
self.assertTrue(masking_dict['replication_enabled'])
self.assertEqual('OS-HostX-SRP_1-DiamondDSS-OS-fibre-PG-RE',
masking_dict[utils.SG_NAME])
@mock.patch.object(common.VMAXCommon,
'_replicate_volume',
return_value={})
def test_manage_existing_is_replicated(self, mock_rep):
extra_specs = deepcopy(self.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
external_ref = {u'source-name': u'00002'}
volume_name = self.utils.get_volume_element_name(
self.data.test_volume.id)
provider_location = {'device_id': u'00002', 'array': self.data.array}
with mock.patch.object(
self.common, '_check_lun_valid_for_cinder_management'):
self.common.manage_existing(
self.data.test_volume, external_ref)
mock_rep.assert_called_once_with(
self.data.test_volume, volume_name, provider_location,
extra_specs, delete_src=False)
@mock.patch.object(masking.VMAXMasking, 'remove_and_reset_members')
def test_setup_volume_replication(self, mock_rm):
rep_status, rep_data = self.common.setup_volume_replication(
self.data.array, self.data.test_volume, self.data.device_id,
self.extra_specs)
self.assertEqual(fields.ReplicationStatus.ENABLED, rep_status)
self.assertEqual({'array': self.data.remote_array,
'device_id': self.data.device_id}, rep_data)
@mock.patch.object(masking.VMAXMasking, 'remove_and_reset_members')
@mock.patch.object(common.VMAXCommon, '_create_volume')
def test_setup_volume_replication_target(self, mock_create, mock_rm):
rep_status, rep_data = self.common.setup_volume_replication(
self.data.array, self.data.test_volume, self.data.device_id,
self.extra_specs, self.data.device_id2)
self.assertEqual(fields.ReplicationStatus.ENABLED, rep_status)
self.assertEqual({'array': self.data.remote_array,
'device_id': self.data.device_id2}, rep_data)
mock_create.assert_not_called()
@mock.patch.object(masking.VMAXMasking, 'remove_and_reset_members')
@mock.patch.object(common.VMAXCommon, '_cleanup_remote_target')
def test_cleanup_lun_replication_success(self, mock_clean, mock_rm):
rep_extra_specs = deepcopy(self.data.rep_extra_specs)
rep_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.common.cleanup_lun_replication(
self.data.test_volume, "1", self.data.device_id,
self.extra_specs)
mock_clean.assert_called_once_with(
self.data.array, self.data.remote_array, self.data.device_id,
self.data.device_id2, self.data.rdf_group_no, "1",
rep_extra_specs)
mock_rm.assert_called_once_with(
self.data.remote_array, self.data.device_id2, "1",
rep_extra_specs, False)
# Cleanup legacy replication
self.common.cleanup_lun_replication(
self.data.test_legacy_vol, "1", self.data.device_id,
self.extra_specs)
mock_clean.assert_called_once_with(
self.data.array, self.data.remote_array, self.data.device_id,
self.data.device_id2, self.data.rdf_group_no, "1",
rep_extra_specs)
@mock.patch.object(common.VMAXCommon, '_cleanup_remote_target')
def test_cleanup_lun_replication_no_target(self, mock_clean):
with mock.patch.object(self.common, 'get_remote_target_device',
return_value=(None, '', '', '', '')):
self.common.cleanup_lun_replication(
self.data.test_volume, "1", self.data.device_id,
self.extra_specs)
mock_clean.assert_not_called()
def test_cleanup_lun_replication_exception(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.cleanup_lun_replication,
self.data.test_volume, "1", self.data.device_id,
self.extra_specs)
@mock.patch.object(common.VMAXCommon, '_delete_from_srp')
@mock.patch.object(provision.VMAXProvision, 'break_rdf_relationship')
def test_cleanup_remote_target(self, mock_break, mock_del):
with mock.patch.object(self.rest, 'are_vols_rdf_paired',
return_value=(False, '', '')):
self.common._cleanup_remote_target(
self.data.array, self.data.remote_array, self.data.device_id,
self.data.device_id2, self.data.rdf_group_name,
"vol1", self.data.rep_extra_specs)
mock_break.assert_not_called()
self.common._cleanup_remote_target(
self.data.array, self.data.remote_array, self.data.device_id,
self.data.device_id2, self.data.rdf_group_name,
"vol1", self.data.rep_extra_specs)
mock_break.assert_called_once_with(
self.data.array, self.data.device_id,
self.data.device_id2, self.data.rdf_group_name,
self.data.rep_extra_specs, "Synchronized")
@mock.patch.object(common.VMAXCommon,
'_remove_vol_and_cleanup_replication')
@mock.patch.object(masking.VMAXMasking, 'remove_vol_from_storage_group')
@mock.patch.object(common.VMAXCommon, '_delete_from_srp')
def test_cleanup_replication_source(self, mock_del, mock_rm, mock_clean):
self.common._cleanup_replication_source(
self.data.array, self.data.test_volume, "vol1",
{'device_id': self.data.device_id}, self.extra_specs)
mock_del.assert_called_once_with(
self.data.array, self.data.device_id, "vol1", self.extra_specs)
def test_get_rdf_details(self):
rdf_group_no, remote_array = self.common.get_rdf_details(
self.data.array)
self.assertEqual(self.data.rdf_group_no, rdf_group_no)
self.assertEqual(self.data.remote_array, remote_array)
def test_get_rdf_details_exception(self):
with mock.patch.object(self.rest, 'get_rdf_group_number',
return_value=None):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.get_rdf_details, self.data.array)
def test_failover_host(self):
volumes = [self.data.test_volume, self.data.test_clone_volume]
with mock.patch.object(self.common, '_failover_volume',
return_value={}) as mock_fo:
self.common.failover_host(volumes)
self.assertEqual(2, mock_fo.call_count)
def test_failover_host_exception(self):
volumes = [self.data.test_volume, self.data.test_clone_volume]
self.assertRaises(exception.VolumeBackendAPIException,
self.common.failover_host,
volumes, secondary_id="default")
def test_failover_volume(self):
ref_model_update = {
'volume_id': self.data.test_volume.id,
'updates':
{'replication_status': fields.ReplicationStatus.FAILED_OVER,
'replication_driver_data': six.text_type(
self.data.provider_location),
'provider_location': six.text_type(
self.data.provider_location3)}}
model_update = self.common._failover_volume(
self.data.test_volume, True, self.extra_specs)
self.assertEqual(ref_model_update, model_update)
ref_model_update2 = {
'volume_id': self.data.test_volume.id,
'updates':
{'replication_status': fields.ReplicationStatus.ENABLED,
'replication_driver_data': six.text_type(
self.data.provider_location),
'provider_location': six.text_type(
self.data.provider_location3)}}
model_update2 = self.common._failover_volume(
self.data.test_volume, False, self.extra_specs)
self.assertEqual(ref_model_update2, model_update2)
def test_failover_legacy_volume(self):
ref_model_update = {
'volume_id': self.data.test_volume.id,
'updates':
{'replication_status': fields.ReplicationStatus.FAILED_OVER,
'replication_driver_data': six.text_type(
self.data.legacy_provider_location),
'provider_location': six.text_type(
self.data.legacy_provider_location2)}}
model_update = self.common._failover_volume(
self.data.test_legacy_vol, True, self.extra_specs)
self.assertEqual(ref_model_update, model_update)
def test_failover_volume_exception(self):
with mock.patch.object(
self.provision, 'failover_volume',
side_effect=exception.VolumeBackendAPIException):
ref_model_update = {
'volume_id': self.data.test_volume.id,
'updates': {'replication_status':
fields.ReplicationStatus.FAILOVER_ERROR,
'replication_driver_data': six.text_type(
self.data.provider_location3),
'provider_location': six.text_type(
self.data.provider_location)}}
model_update = self.common._failover_volume(
self.data.test_volume, True, self.extra_specs)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(
common.VMAXCommon, '_find_device_on_array',
side_effect=[None, VMAXCommonData.device_id,
VMAXCommonData.device_id, VMAXCommonData.device_id])
@mock.patch.object(
common.VMAXCommon, 'get_masking_views_from_volume',
side_effect=['OS-host-MV', None, exception.VolumeBackendAPIException])
def test_recover_volumes_on_failback(self, mock_mv, mock_dev):
recovery1 = self.common.recover_volumes_on_failback(
self.data.test_volume, self.extra_specs)
self.assertEqual('error', recovery1['updates']['status'])
recovery2 = self.common.recover_volumes_on_failback(
self.data.test_volume, self.extra_specs)
self.assertEqual('in-use', recovery2['updates']['status'])
recovery3 = self.common.recover_volumes_on_failback(
self.data.test_volume, self.extra_specs)
self.assertEqual('available', recovery3['updates']['status'])
recovery4 = self.common.recover_volumes_on_failback(
self.data.test_volume, self.extra_specs)
self.assertEqual('available', recovery4['updates']['status'])
def test_get_remote_target_device(self):
target_device1, _, _, _, _ = (
self.common.get_remote_target_device(
self.data.array, self.data.test_volume, self.data.device_id))
self.assertEqual(self.data.device_id2, target_device1)
target_device2, _, _, _, _ = (
self.common.get_remote_target_device(
self.data.array, self.data.test_clone_volume,
self.data.device_id))
self.assertIsNone(target_device2)
with mock.patch.object(self.rest, 'are_vols_rdf_paired',
return_value=(False, '')):
target_device3, _, _, _, _ = (
self.common.get_remote_target_device(
self.data.array, self.data.test_volume,
self.data.device_id))
self.assertIsNone(target_device3)
with mock.patch.object(self.rest, 'get_volume',
return_value=None):
target_device4, _, _, _, _ = (
self.common.get_remote_target_device(
self.data.array, self.data.test_volume,
self.data.device_id))
self.assertIsNone(target_device4)
@mock.patch.object(common.VMAXCommon, 'setup_volume_replication')
@mock.patch.object(provision.VMAXProvision, 'extend_volume')
@mock.patch.object(provision.VMAXProvision, 'break_rdf_relationship')
@mock.patch.object(masking.VMAXMasking, 'remove_and_reset_members')
def test_extend_volume_is_replicated(self, mock_remove,
mock_break, mock_extend, mock_setup):
self.common.extend_volume_is_replicated(
self.data.array, self.data.test_volume, self.data.device_id,
'vol1', '5', self.data.extra_specs_rep_enabled)
self.assertEqual(2, mock_remove.call_count)
self.assertEqual(2, mock_extend.call_count)
def test_extend_volume_is_replicated_exception(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.extend_volume_is_replicated,
self.data.failed_resource, self.data.test_volume,
self.data.device_id, 'vol1', '1',
self.data.extra_specs_rep_enabled)
@mock.patch.object(common.VMAXCommon, 'add_volume_to_replication_group')
@mock.patch.object(masking.VMAXMasking, 'remove_and_reset_members')
def test_enable_rdf(self, mock_remove, mock_add):
rep_config = self.utils.get_replication_config(
[self.replication_device])
self.common.enable_rdf(
self.data.array, self.data.device_id, self.data.rdf_group_no,
rep_config, 'OS-1', self.data.remote_array, self.data.device_id2,
self.extra_specs)
self.assertEqual(2, mock_remove.call_count)
self.assertEqual(2, mock_add.call_count)
@mock.patch.object(masking.VMAXMasking, 'remove_vol_from_storage_group')
@mock.patch.object(common.VMAXCommon, '_cleanup_remote_target')
def test_enable_rdf_exception(self, mock_cleanup, mock_rm):
rep_config = self.utils.get_replication_config(
[self.replication_device])
self.assertRaises(
exception.VolumeBackendAPIException, self.common.enable_rdf,
self.data.array, self.data.device_id,
self.data.failed_resource, rep_config, 'OS-1',
self.data.remote_array, self.data.device_id2, self.extra_specs)
self.assertEqual(1, mock_cleanup.call_count)
def test_add_volume_to_replication_group(self):
sg_name = self.common.add_volume_to_replication_group(
self.data.array, self.data.device_id, 'vol1',
self.extra_specs)
self.assertEqual(self.data.default_sg_re_enabled, sg_name)
@mock.patch.object(masking.VMAXMasking,
'get_or_create_default_storage_group',
side_effect=exception.VolumeBackendAPIException)
def test_add_volume_to_replication_group_exception(self, mock_get):
self.assertRaises(
exception.VolumeBackendAPIException,
self.common.add_volume_to_replication_group,
self.data.array, self.data.device_id, 'vol1',
self.extra_specs)
def test_get_replication_extra_specs(self):
rep_config = self.utils.get_replication_config(
[self.replication_device])
# Path one - disable compression
extra_specs1 = deepcopy(self.extra_specs)
extra_specs1[utils.DISABLECOMPRESSION] = "true"
ref_specs1 = deepcopy(self.data.rep_extra_specs)
ref_specs1[utils.PORTGROUPNAME] = self.data.port_group_name_f
rep_extra_specs1 = self.common._get_replication_extra_specs(
extra_specs1, rep_config)
self.assertEqual(ref_specs1, rep_extra_specs1)
# Path two - disable compression, not all flash
ref_specs2 = deepcopy(self.data.rep_extra_specs)
ref_specs2[utils.PORTGROUPNAME] = self.data.port_group_name_f
with mock.patch.object(self.rest, 'is_compression_capable',
return_value=False):
rep_extra_specs2 = self.common._get_replication_extra_specs(
extra_specs1, rep_config)
self.assertEqual(ref_specs2, rep_extra_specs2)
# Path three - slo not valid
extra_specs3 = deepcopy(self.extra_specs)
ref_specs3 = deepcopy(ref_specs1)
ref_specs3['slo'] = None
ref_specs3['workload'] = None
with mock.patch.object(self.provision, 'verify_slo_workload',
return_value=(False, False)):
rep_extra_specs3 = self.common._get_replication_extra_specs(
extra_specs3, rep_config)
self.assertEqual(ref_specs3, rep_extra_specs3)
def test_get_secondary_stats(self):
rep_config = self.utils.get_replication_config(
[self.replication_device])
array_map = self.utils.parse_file_to_get_array_map(
self.common.pool_info['config_file'])
finalarrayinfolist = self.common._get_slo_workload_combinations(
array_map)
array_info = finalarrayinfolist[0]
ref_info = deepcopy(array_info)
ref_info['SerialNumber'] = six.text_type(rep_config['array'])
ref_info['srpName'] = rep_config['srp']
secondary_info = self.common.get_secondary_stats_info(
rep_config, array_info)
self.assertEqual(ref_info, secondary_info)
| {
"content_hash": "a3d44f879b528cafb557bef161969f1c",
"timestamp": "",
"source": "github",
"line_count": 6237,
"max_line_length": 79,
"avg_line_length": 46.37341670675004,
"alnum_prop": 0.5865692128437131,
"repo_name": "eharney/cinder",
"id": "af4ac41b2ecd87e5db3fad0ec56d66f05e617a4d",
"size": "289881",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cinder/tests/unit/volume/drivers/dell_emc/vmax/test_vmax.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "561"
},
{
"name": "Python",
"bytes": "19839107"
},
{
"name": "Shell",
"bytes": "6453"
}
],
"symlink_target": ""
} |
"""Generic add-on view factories"""
# -*- coding: utf-8 -*-
import httplib as http
from flask import request
from framework.exceptions import HTTPError, PermissionsError
from framework.auth.decorators import must_be_logged_in
from website.oauth.models import ExternalAccount
from website.util import permissions, rubeus
from website.project.decorators import (
must_have_addon, must_be_addon_authorizer,
must_have_permission, must_not_be_registration,
must_be_valid_project
)
def import_auth(addon_short_name, Serializer):
@must_have_addon(addon_short_name, 'user')
@must_have_addon(addon_short_name, 'node')
@must_have_permission(permissions.WRITE)
def _import_auth(auth, node_addon, user_addon, **kwargs):
"""Import add-on credentials from the currently logged-in user to a node.
"""
external_account = ExternalAccount.load(
request.json['external_account_id']
)
if not user_addon.external_accounts.filter(id=external_account.id).exists():
raise HTTPError(http.FORBIDDEN)
try:
node_addon.set_auth(external_account, user_addon.owner)
except PermissionsError:
raise HTTPError(http.FORBIDDEN)
node_addon.save()
return {
'result': Serializer().serialize_settings(node_addon, auth.user),
'message': 'Successfully imported access token from profile.',
}
_import_auth.__name__ = '{0}_import_auth'.format(addon_short_name)
return _import_auth
def account_list(addon_short_name, Serializer):
@must_be_logged_in
def _account_list(auth):
user_settings = auth.user.get_addon(addon_short_name)
serializer = Serializer(user_settings=user_settings)
return serializer.serialized_user_settings
_account_list.__name__ = '{0}_account_list'.format(addon_short_name)
return _account_list
def folder_list(addon_short_name, addon_full_name, get_folders):
# TODO [OSF-6678]: Generalize this for API use after node settings have been refactored
@must_have_addon(addon_short_name, 'node')
@must_be_addon_authorizer(addon_short_name)
def _folder_list(node_addon, **kwargs):
"""Returns a list of folders"""
if not node_addon.has_auth:
raise HTTPError(http.FORBIDDEN)
folder_id = request.args.get('folderId')
return get_folders(node_addon, folder_id)
_folder_list.__name__ = '{0}_folder_list'.format(addon_short_name)
return _folder_list
def root_folder(addon_short_name):
def _root_folder(node_settings, auth, **kwargs):
"""Return the Rubeus/HGrid-formatted response for the root folder only."""
# Quit if node settings does not have authentication
if not node_settings.has_auth or not node_settings.folder_id:
return None
node = node_settings.owner
root = rubeus.build_addon_root(
node_settings=node_settings,
name=node_settings.fetch_folder_name(),
permissions=auth,
nodeUrl=node.url,
nodeApiUrl=node.api_url,
private_key=kwargs.get('view_only', None),
)
return [root]
_root_folder.__name__ = '{0}_root_folder'.format(addon_short_name)
return _root_folder
def get_config(addon_short_name, Serializer):
@must_be_logged_in
@must_have_addon(addon_short_name, 'node')
@must_be_valid_project
@must_have_permission(permissions.WRITE)
def _get_config(node_addon, auth, **kwargs):
"""API that returns the serialized node settings."""
return {
'result': Serializer().serialize_settings(
node_addon,
auth.user
)
}
_get_config.__name__ = '{0}_get_config'.format(addon_short_name)
return _get_config
def set_config(addon_short_name, addon_full_name, Serializer, set_folder):
@must_not_be_registration
@must_have_addon(addon_short_name, 'user')
@must_have_addon(addon_short_name, 'node')
@must_be_addon_authorizer(addon_short_name)
@must_have_permission(permissions.WRITE)
def _set_config(node_addon, user_addon, auth, **kwargs):
"""View for changing a node's linked folder."""
folder = request.json.get('selected')
set_folder(node_addon, folder, auth)
path = node_addon.folder_path
return {
'result': {
'folder': {
'name': path.replace('All Files', '') if path != '/' else '/ (Full {0})'.format(
addon_full_name
),
'path': path,
},
'urls': Serializer(node_settings=node_addon).addon_serialized_urls,
},
'message': 'Successfully updated settings.',
}
_set_config.__name__ = '{0}_set_config'.format(addon_short_name)
return _set_config
def deauthorize_node(addon_short_name):
@must_not_be_registration
@must_have_addon(addon_short_name, 'node')
@must_have_permission(permissions.WRITE)
def _deauthorize_node(auth, node_addon, **kwargs):
node_addon.deauthorize(auth=auth)
node_addon.save()
_deauthorize_node.__name__ = '{0}_deauthorize_node'.format(addon_short_name)
return _deauthorize_node
| {
"content_hash": "01cdac5e045aa22c9b0e4919c7490a94",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 100,
"avg_line_length": 37.695035460992905,
"alnum_prop": 0.6314205079962371,
"repo_name": "alexschiller/osf.io",
"id": "2a44ba4d5a0acb68af9032f23a9ee59c4a71e3e3",
"size": "5315",
"binary": false,
"copies": "8",
"ref": "refs/heads/develop",
"path": "addons/base/generic_views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "175048"
},
{
"name": "HTML",
"bytes": "181210"
},
{
"name": "JavaScript",
"bytes": "1893215"
},
{
"name": "Jupyter Notebook",
"bytes": "19626"
},
{
"name": "Mako",
"bytes": "710089"
},
{
"name": "Perl",
"bytes": "13885"
},
{
"name": "Python",
"bytes": "8340746"
},
{
"name": "Shell",
"bytes": "379"
}
],
"symlink_target": ""
} |
def client_query_w_positional_params() -> None:
# [START bigquery_query_params_positional]
from google.cloud import bigquery
# Construct a BigQuery client object.
client = bigquery.Client()
query = """
SELECT word, word_count
FROM `bigquery-public-data.samples.shakespeare`
WHERE corpus = ?
AND word_count >= ?
ORDER BY word_count DESC;
"""
# Set the name to None to use positional parameters.
# Note that you cannot mix named and positional parameters.
job_config = bigquery.QueryJobConfig(
query_parameters=[
bigquery.ScalarQueryParameter(None, "STRING", "romeoandjuliet"),
bigquery.ScalarQueryParameter(None, "INT64", 250),
]
)
query_job = client.query(query, job_config=job_config) # Make an API request.
for row in query_job:
print("{}: \t{}".format(row.word, row.word_count))
# [END bigquery_query_params_positional]
| {
"content_hash": "bb171e7d2e225a01e7bfbed8b2203413",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 82,
"avg_line_length": 34.57142857142857,
"alnum_prop": 0.6394628099173554,
"repo_name": "googleapis/python-bigquery",
"id": "b088b305ea2231e66c111f434fa9e80b812ae7ef",
"size": "1545",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/client_query_w_positional_params.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "2520564"
},
{
"name": "Shell",
"bytes": "31939"
}
],
"symlink_target": ""
} |
from flask import Flask, g, request, redirect, send_from_directory
from biblioteca import Biblioteca
from json import dumps
# VARIABILI GLOBALI
app = Flask(__name__)
biblioteca = Biblioteca(g, 'database.db', 'clorurodisodio')
# OPERAZIONI DI SESSIONE
@app.before_request
def apri_connessione():
if request.url.startswith('http://'):
url = request.url.replace('http://', 'https://')
return redirect(url, code = 301)
biblioteca.manager.apri_connessione()
@app.teardown_request
def chiudi_connessione(exception):
biblioteca.manager.chiudi_connessione()
# INVIO FILES
@app.route('/')
@app.route('/home')
def home():
return send_from_directory('../client-side/html/', 'home.html')
@app.route('/<nome_pagina>')
def invia_pagina(nome_pagina):
return send_from_directory('../client-side/html/', nome_pagina + '.html')
@app.route('/<nome_cartella>/<nome_file>')
def invia_file(nome_cartella, nome_file):
return send_from_directory('../client-side/' + nome_cartella + '/', nome_file)
@app.route('/img/copertine/<nome_file>')
def invia_copertina(nome_file):
return send_from_directory('../client-side/img/copertine/', nome_file)
# CONTESTI
# Accedi
@app.route('/accedi', methods = ['POST'])
def accedi():
richiesta = request.get_json(force = True)
chiave = richiesta['chiave']
hash_chiave = biblioteca.genera_hash(chiave)
utente_valido = biblioteca.utente_autorizzato(hash_chiave)
return dumps({'utente_valido': utente_valido, 'hash_chiave': hash_chiave})
# Utente autorizzato
@app.route('/utente_autorizzato', methods = ['POST'])
def utente_autorizzato():
richiesta = request.get_json(force = True)
chiave = richiesta['chiave']
return dumps({'utente_valido': biblioteca.utente_autorizzato(chiave)})
# Leggi galleria
@app.route('/leggi_galleria', methods = ['POST'])
def leggi_galleria():
lista_libri = biblioteca.leggi_galleria()
classifica = biblioteca.leggi_classifica()
return dumps({'lista_libri': lista_libri, 'classifica': classifica})
# Leggi lista
@app.route('/leggi_lista', methods = ['POST'])
def leggi_lista():
richiesta = request.get_json(force = True)
ordine = richiesta['ordine']
if ordine == 'titolo':
lista_libri = biblioteca.leggi_lista_titolo()
elif ordine == 'autore':
lista_libri = biblioteca.leggi_lista_autore()
elif ordine == 'stato':
lista_libri = biblioteca.leggi_lista_stato()
else:
lista_libri = biblioteca.leggi_lista(ordine)
return dumps({'lista_libri': lista_libri})
# Esegui ricerca
@app.route('/esegui_ricerca', methods = ['POST'])
def esegui_ricerca():
richiesta = request.get_json(force = True)
filtro = richiesta['filtro']
richiesta = richiesta['richiesta']
return dumps({'lista_libri': biblioteca.esegui_ricerca(filtro, richiesta)})
# Nuovo libro
@app.route('/nuovo_libro', methods = ['POST'])
def nuovo_libro():
richiesta = request.get_json(force = True)
chiave = richiesta['chiave']
if not biblioteca.utente_autorizzato(chiave):
return dumps({'non_autorizzato': True})
titolo = richiesta['titolo']
autore = richiesta['autore']
genere = richiesta['genere']
descrizione = richiesta['descrizione']
editore = richiesta['editore']
anno = richiesta['anno']
copertina = richiesta['copertina']
codice = biblioteca.nuovo_libro(titolo, autore, genere, descrizione, editore, anno, copertina)
biblioteca.nuova_posizione(codice)
return dumps({'codice': codice})
# Leggi scheda
@app.route('/leggi_scheda', methods = ['POST'])
def leggi_scheda():
richiesta = request.get_json(force = True)
codice = richiesta['codice']
return dumps({'scheda': biblioteca.leggi_scheda(codice)})
# Elimina scheda
@app.route('/elimina_scheda', methods = ['POST'])
def elimina_scheda():
richiesta = request.get_json(force = True)
chiave = richiesta['chiave']
if not biblioteca.utente_autorizzato(chiave):
return dumps({'non_autorizzato': True})
codice = richiesta['codice']
biblioteca.elimina_copertina(codice)
biblioteca.elimina_scheda(codice)
biblioteca.elimina_recensioni(codice)
biblioteca.elimina_posizione(codice)
return dumps({'successo': True})
# Modifica scheda
@app.route('/modifica_scheda', methods = ['POST'])
def modifica_scheda():
richiesta = request.get_json(force = True)
chiave = richiesta['chiave']
if not biblioteca.utente_autorizzato(chiave):
return dumps({'non_autorizzato': True})
codice = richiesta['codice']
titolo = richiesta['titolo']
autore = richiesta['autore']
genere = richiesta['genere']
descrizione = richiesta['descrizione']
editore = richiesta['editore']
anno = richiesta['anno']
copertina = biblioteca.leggi_copertina(codice)
biblioteca.elimina_scheda(codice)
nuovo_codice = biblioteca.aggiorna_libro(titolo, autore, genere, descrizione, editore, anno, copertina)
if (codice != nuovo_codice):
biblioteca.aggiorna_copertina(nuovo_codice, copertina)
biblioteca.aggiorna_recensioni(codice, nuovo_codice)
biblioteca.aggiorna_posizione(codice, nuovo_codice)
return dumps({'codice': nuovo_codice})
# Modifica copertina
@app.route('/modifica_copertina', methods = ['POST'])
def modifica_copertina():
richiesta = request.get_json(force = True)
chiave = richiesta['chiave']
if not biblioteca.utente_autorizzato(chiave):
return dumps({'non_autorizzato': True})
codice = richiesta['codice']
copertina = richiesta['copertina']
biblioteca.modifica_copertina(codice, copertina)
return dumps({'successo': True})
# Leggi recensioni
@app.route('/leggi_recensioni', methods = ['POST'])
def leggi_recensioni():
richiesta = request.get_json(force = True)
libro = richiesta['libro']
sommario = biblioteca.leggi_sommario(libro)
recensioni = biblioteca.leggi_recensioni(libro)
return dumps({'sommario': sommario, 'recensioni': recensioni})
# Invia recensione
@app.route('/invia_recensione', methods = ['POST'])
def invia_recensione():
richiesta = request.get_json(force = True)
libro = richiesta['libro']
valore = richiesta['valore']
autore = richiesta['autore']
testo = richiesta['testo']
biblioteca.invia_recensione(libro, valore, autore, testo)
return dumps({'successo': True})
# Elimina recensione
@app.route('/elimina_recensione', methods = ['POST'])
def elimina_recensione():
richiesta = request.get_json(force = True)
chiave = richiesta['chiave']
if not biblioteca.utente_autorizzato(chiave):
return dumps({'non_autorizzato': True})
id_recensione = richiesta['id']
biblioteca.elimina_recensione(id_recensione)
return dumps({'successo': True})
# Leggi posizione
@app.route('/leggi_posizione', methods = ['POST'])
def leggi_posizione():
richiesta = request.get_json(force = True)
libro = richiesta['libro']
return dumps({'posizione': biblioteca.leggi_posizione(libro)})
# Modifica posizione
@app.route('/modifica_posizione', methods = ['POST'])
def modifica_posizione():
richiesta = request.get_json(force = True)
chiave = richiesta['chiave']
if not biblioteca.utente_autorizzato(chiave):
return dumps({'non_autorizzato': True})
libro = richiesta['libro']
stato = richiesta['stato']
testo = richiesta['testo']
biblioteca.modifica_posizione(libro, stato, testo)
return dumps({'successo': True})
# AVVIO DEL SERVER
if __name__ == '__main__':
app.run(threaded = True)
| {
"content_hash": "8988e1ec4aa349e8c22d380bf08aedc9",
"timestamp": "",
"source": "github",
"line_count": 223,
"max_line_length": 107,
"avg_line_length": 33.860986547085204,
"alnum_prop": 0.6857369884783472,
"repo_name": "tomellericcardo/Biblioteca",
"id": "d78616ce4778a109c2a1c221fbf16b22c745a8e9",
"size": "7576",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server-side/webserver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "35420"
},
{
"name": "HTML",
"bytes": "50364"
},
{
"name": "JavaScript",
"bytes": "58317"
},
{
"name": "Python",
"bytes": "23157"
}
],
"symlink_target": ""
} |
default_app_config = 'test.account.apps.AppConfig'
| {
"content_hash": "f22e890d634c85c4d02fefb503707c8a",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 50,
"avg_line_length": 51,
"alnum_prop": 0.7843137254901961,
"repo_name": "rishikumar/django_test_model",
"id": "23c64483920511d081ca7ee0102f0ff97b3784d7",
"size": "51",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/account/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1212"
},
{
"name": "HTML",
"bytes": "20226"
},
{
"name": "JavaScript",
"bytes": "2394"
},
{
"name": "Python",
"bytes": "38359"
},
{
"name": "Shell",
"bytes": "3620"
}
],
"symlink_target": ""
} |
import json, pprint
from pathlib import Path
from aiohttp import web
from web_chains_202105 import utils
# ======================================================================
sTablePage = """<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
{stylesheets}
{remote_scripts}
{inline_scripts}
<title>{table_name}</title>
</head>
<body>
<h2 id="title"></h2>
{body}
</body>
</html>
"""
# ======================================================================
def table_page(request, subtype_id, table_date):
global sTablePage
remote_scripts = [
"js/jquery.js",
"js/table-page.js",
]
stylesheets = [
"js/maps.css",
"js/table-page.css",
]
data = collect_table_data(request=request, subtype_id=subtype_id, table_date=table_date)
inline_scripts = [
f"table_page_data =\n{json.dumps(data, separators=(',', ':'))};",
]
return web.Response(
text=sTablePage.format(
remote_scripts="\n ".join(f'<script src="{script}" type="module"></script>' for script in remote_scripts),
stylesheets="\n ".join(f'<link rel="stylesheet" href="{stylesheet}">' for stylesheet in stylesheets),
inline_scripts="\n ".join(f'<script>\n{code}\n </script>' for code in inline_scripts),
table_name=f"{subtype_id} {table_date}",
body="", # f"<pre>\n{pprint.pformat(data)}</pre>"
),
content_type='text/html')
# ----------------------------------------------------------------------
def collect_table_data(request, subtype_id, table_date):
from web_chains_202105.chart import get_chart
def collect_table_data_part():
for patt in ["i-*", "f-*", "b-*"]:
for subdir in sorted(Path(subtype_id).glob(patt), reverse=True):
entries = make_entries(subdir, patt[0] == "i")
if entries:
yield {
"type": "individual" if patt[0] == "i" else "chain",
"chain_id": subdir.name,
**entries
}
def make_entries(subdir, individual):
entries = {}
filenames = [fn for fn in subdir.glob(f"*{table_date}.*") if "~" not in str(fn)] # ignore temp files (with ~pid~ infix) made by export chart
if not filenames and subdir.name[0] == "b": # workaround for the backward chain step naming problem
filenames = list(subdir.glob(f"*.{table_date}-*"))
for filename in filenames:
key1, key2 = keys_for_filename(filename)
if individual and key1 == "scratch":
key1 = "individual"
entries.setdefault(key1, {})[key2] = str(filename)
if key2 == "ace":
chart = get_chart(request=request, filename=filename)
chart_date = chart.date()
entries[key1]["date"] = chart_date
entries.setdefault("date", chart_date)
return entries
def keys_for_filename(filename):
suffx = filename.suffixes
if suffx[-1] == ".ace":
if len(suffx) >= 2: # 123.20160113-20210625.incremental.ace
return (suffx[-2][1:], "ace")
else:
return ("scratch", "ace")
elif suffx[-1] == ".json":
if len(suffx) >= 3: # 123.20160113-20210625.scratch.grid.json
return (suffx[-3][1:], suffx[-2][1:])
else:
return ("scratch", suffx[-2][1:])
else:
return ("unknown", "".join(suffx))
return {
"subtype_id": subtype_id,
"table_date": table_date,
"parts": [part_data for part_data in collect_table_data_part() if part_data],
**utils.format_subtype(request=request, subtype_id=subtype_id, date_range=[table_date, table_date])
}
# ----------------------------------------------------------------------
# ======================================================================
| {
"content_hash": "08d7b43e04c67e558e090c6f8163f86f",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 149,
"avg_line_length": 36,
"alnum_prop": 0.4945436507936508,
"repo_name": "acorg/acmacs-whocc",
"id": "2a1b92195779c80a04ebb80e31742f7821dfcd3e",
"size": "4032",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web/chains-202105/py/table_page.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "263999"
},
{
"name": "CSS",
"bytes": "4870"
},
{
"name": "HTML",
"bytes": "6432"
},
{
"name": "JavaScript",
"bytes": "24500"
},
{
"name": "Makefile",
"bytes": "3966"
},
{
"name": "Python",
"bytes": "392380"
},
{
"name": "R",
"bytes": "2424"
},
{
"name": "Shell",
"bytes": "15763"
}
],
"symlink_target": ""
} |
from oslo_log import log as logging
import sahara.context as context
import sahara.plugins.mapr.domain.configuration_file as bcf
import sahara.plugins.mapr.domain.node_process as np
import sahara.plugins.mapr.domain.service as s
import sahara.plugins.mapr.services.mysql.mysql as mysql
import sahara.plugins.mapr.util.general as g
import sahara.plugins.mapr.util.validation_utils as vu
LOG = logging.getLogger(__name__)
OOZIE_START_DELAY = 30
OOZIE = np.NodeProcess(
name='oozie',
ui_name='Oozie',
package='mapr-oozie',
open_ports=[11000]
)
class Oozie(s.Service):
def __init__(self):
super(Oozie, self).__init__()
self._name = 'oozie'
self._ui_name = 'Oozie'
self._node_processes = [OOZIE]
self._cluster_defaults = ['oozie-default.json']
self._validation_rules = [vu.exactly(1, OOZIE)]
self._ui_info = [('Oozie', OOZIE,
{s.SERVICE_UI: 'http://%s:11000/oozie'})]
def libext_path(self):
return '/opt/mapr/oozie/oozie-%s/oozie-server/lib/' % self.version
def get_config_files(self, cluster_context, configs, instance=None):
oozie_site = bcf.HadoopXML("oozie-site.xml")
oozie_site.remote_path = self.conf_dir(cluster_context)
if instance:
oozie_site.fetch(instance)
oozie_site.load_properties(configs)
oozie_site.add_properties(self._get_oozie_site_props(cluster_context))
return [oozie_site]
def _get_oozie_site_props(self, cluster_context):
oozie_specs = mysql.MySQL.OOZIE_SPECS
return {
'oozie.db.schema.name': oozie_specs.db_name,
'oozie.service.JPAService.create.db.schema': True,
'oozie.service.JPAService.jdbc.driver': mysql.MySQL.DRIVER_CLASS,
'oozie.service.JPAService.jdbc.url': self._get_jdbc_uri(
cluster_context),
'oozie.service.JPAService.jdbc.username': oozie_specs.user,
'oozie.service.JPAService.jdbc.password': oozie_specs.password,
'oozie.service.HadoopAccessorService.hadoop.configurations':
'*=%s' % cluster_context.hadoop_conf
}
def _get_jdbc_uri(self, cluster_context):
jdbc_uri = ('jdbc:mysql://%(db_host)s:%(db_port)s/%(db_name)s?'
'createDatabaseIfNotExist=true')
jdbc_args = {
'db_host': mysql.MySQL.get_db_instance(
cluster_context).internal_ip,
'db_port': mysql.MySQL.MYSQL_SERVER_PORT,
'db_name': mysql.MySQL.OOZIE_SPECS.db_name,
}
return jdbc_uri % jdbc_args
def install(self, cluster_context, instances):
# oozie requires executed configure.sh
pass
def post_configure(self, cluster_context, instances):
super(Oozie, self).install(cluster_context, instances)
oozie_instances = cluster_context.filter_instances(instances,
service=self)
for instance in oozie_instances:
with instance.remote() as r:
r.execute_command(
'sudo cp '
'/opt/mapr/oozie/oozie-%s/conf/warden.oozie.conf '
'/opt/mapr/conf/conf.d' % self.version)
def post_install(self, cluster_context, instances):
oozie_inst = cluster_context.get_instance(OOZIE)
oozie_service = cluster_context.get_service(OOZIE)
if oozie_service:
symlink_cmd = (
'cp /opt/mapr/lib/mysql-connector-*.jar %s' %
self.libext_path())
with oozie_inst.remote() as r:
LOG.debug('Installing MySQL connector for Oozie')
r.execute_command(symlink_cmd, run_as_root=True,
raise_when_error=False)
self._set_service_dir_owner(cluster_context, instances)
def post_start(self, cluster_context, instances):
instances = cluster_context.filter_instances(instances, OOZIE)
self._rebuild(cluster_context, instances)
@g.remote_command(1)
def _rebuild_oozie_war(self, remote, cluster_context):
cmd = '%(home)s/bin/oozie-setup.sh -hadoop %(version)s' \
' /opt/mapr/hadoop/hadoop-%(version)s'
args = {'home': self.home_dir(cluster_context),
'version': cluster_context.hadoop_version}
remote.execute_command(cmd % args, run_as_root=True)
def update(self, cluster_context, instances=None):
instances = instances or cluster_context.get_instances()
instances = cluster_context.filter_instances(instances, OOZIE)
self._rebuild(cluster_context, instances)
def _rebuild(self, cluster_context, instances):
OOZIE.stop(filter(OOZIE.is_started, instances))
g.execute_on_instances(
instances, self._rebuild_oozie_war, cluster_context)
OOZIE.start(instances)
context.sleep(OOZIE_START_DELAY)
class OozieV401(Oozie):
def __init__(self):
super(OozieV401, self).__init__()
self._version = '4.0.1'
self._dependencies = [('mapr-oozie-internal', self.version)]
class OozieV410(Oozie):
def __init__(self):
super(OozieV410, self).__init__()
self._version = '4.1.0'
self._dependencies = [('mapr-oozie-internal', self.version)]
class OozieV420(Oozie):
def __init__(self):
super(OozieV420, self).__init__()
self._version = '4.2.0'
self._dependencies = [('mapr-oozie-internal', self.version)]
def libext_path(self):
return '/opt/mapr/oozie/oozie-%s/libext/' % self.version
def post_install(self, cluster_context, instances):
super(OozieV420, self).post_install(cluster_context, instances)
self.fix_oozie_bug(cluster_context)
def fix_oozie_bug(self, cluster_context):
"""Wrong maprfs jar bug
On some environments Oozie installation
process takes incorrect jar that causes failure
to run jobs. This is a temporary bug in Oozie and
is going to be fixed soon.
"""
if cluster_context.mapr_version != '5.1.0':
return
oozie_inst = cluster_context.get_instance(OOZIE)
command = "sudo rm /opt/mapr/hadoop/hadoop-2.7.0/share/hadoop/kms/" \
"tomcat/webapps/kms/WEB-INF/lib/maprfs-5.1.0-mapr.jar" \
" && sudo ln -s /opt/mapr/lib/maprfs-5.1.0-mapr.jar" \
" /opt/mapr/hadoop/hadoop-2.7.0/share/hadoop/kms/" \
"tomcat/webapps/kms/WEB-INF/lib/maprfs-5.1.0-mapr.jar"
with oozie_inst.remote() as r:
r.execute_command(command, run_as_root=True)
| {
"content_hash": "a7baf8aa603a8900eae9a1d02fa6adee",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 78,
"avg_line_length": 39.470588235294116,
"alnum_prop": 0.6093889716840537,
"repo_name": "egafford/sahara",
"id": "5ed1e0dd8c6e5f2bea60376e892bb1891a12042b",
"size": "7299",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sahara/plugins/mapr/services/oozie/oozie.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "952"
},
{
"name": "Python",
"bytes": "3332337"
},
{
"name": "Shell",
"bytes": "52759"
}
],
"symlink_target": ""
} |
import subprocess, os, sys
# 'Darwin' or 'Linux'
uname = os.uname().sysname
def check_same(file_path, cmd, from_cmd, comments):
from_comments = "\n".join(comments)
if from_cmd != from_comments:
print("error in", file_path, "executing", cmd)
print("expected >>{}<<, len:{}".format(from_comments, len(from_comments)))
print("got: >>{}<<, len:{}".format(from_cmd, len(from_cmd)))
def run(cmd):
try:
a = subprocess.check_output(cmd,
shell=True,
universal_newlines=True,
executable='/bin/bash')
except subprocess.CalledProcessError as e:
a = e.output + "ERROR: return code {}".format(e.returncode)
return a.rstrip().replace(" \n", "\n")
class CommentStateMachine:
def __init__(self, file_path):
self.last_command = None
self.last_command_output = None
self.comments_below_command = []
self.file_path = file_path
self.osrestriction = None
# seen a line without comment -> end of comment block
def no_comment(self):
if len(self.comments_below_command) == 0:
return
if self.osrestriction and self.osrestriction != uname:
return
if self.last_command_output != None:
check_same(self.file_path, self.last_command,
self.last_command_output, self.comments_below_command)
self.last_command_output = None
self.comments_below_command = []
def comment(self, line):
if self.osrestriction and self.osrestriction != uname:
return
self.comments_below_command.append(line)
def line_with_command(self, line):
self.no_comment()
if self.osrestriction and self.osrestriction != uname:
return
self.last_command_output = run(line)
self.last_command = line
if __name__ == "__main__":
if len(sys.argv) == 2:
limit = sys.argv[1]
else:
limit = None
# add target/debug to path
cur_path = os.path.dirname(os.path.realpath(__file__))
if os.environ.get('CARGO_TARGET_DIR'):
d = os.environ.get('CARGO_TARGET_DIR')
debug_dir = os.path.join(cur_path, d, 'debug')
release_dir = os.path.join(cur_path, d, 'release')
else:
debug_dir = os.path.join(cur_path, 'target', 'debug')
release_dir = os.path.join(cur_path, 'target', 'release')
os.environ["PATH"] += os.pathsep + release_dir + os.pathsep + debug_dir
src_dir = os.path.join(cur_path, 'src', 'bin')
for root, dirs, files in os.walk(src_dir):
for f in [i for i in files if i.endswith('.rs')]:
# limit execution of tests to only files which match the first argument
if limit and f.find(limit) < 0:
continue
m = CommentStateMachine(os.path.join(root, f))
for line in open(os.path.join(root, f), encoding="utf-8"):
if line.startswith('///'):
line = line[4:].rstrip()
if line.lower() == 'linux only:':
m.osrestriction = 'Linux'
m.no_comment()
elif line.lower() == 'mac only:':
m.osrestriction = 'Darwin'
m.no_comment()
elif line.startswith('$'):
m.line_with_command(line[2:])
elif len(line) == 0:
m.no_comment()
else:
m.comment(line)
elif len(line.strip()) == 0:
m.no_comment()
elif line.startswith('#'):
# skip build flags
pass
else:
# stop when we see the first non-comment line, e.g. `extern crate`
m.no_comment()
break | {
"content_hash": "c926970c12f7661a789a476e2f243a3e",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 86,
"avg_line_length": 37.923809523809524,
"alnum_prop": 0.5198392767453541,
"repo_name": "philippkeller/apue-rust",
"id": "ba9b061be49c0f0137395d601ac4e8001b16e3e4",
"size": "4006",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2277"
},
{
"name": "Python",
"bytes": "4006"
},
{
"name": "Rust",
"bytes": "220693"
}
],
"symlink_target": ""
} |
"""
Deployment settings
All settings which are typically edited for a deployment should be done here
Deployers shouldn't typically need to edit any other files.
NOTE FOR DEVELOPERS:
/models/000_config.py is NOT in the Git repository, as this file will be changed
during deployments.
To for changes to be committed to trunk, please also edit:
deployment-templates/models/000_config.py
"""
# Remind admin to edit this file
FINISHED_EDITING_CONFIG_FILE = False # change to True after you finish editing this file
# Database settings
deployment_settings.database.db_type = "sqlite"
#deployment_settings.database.db_type = "postgres"
deployment_settings.database.host = "localhost"
deployment_settings.database.port = None # use default
deployment_settings.database.database = "sahana"
deployment_settings.database.username = "sahana"
deployment_settings.database.password = "password" # NB Web2Py doesn't like passwords with an @ in them
deployment_settings.database.pool_size = 30
# Authentication settings
# This setting should be changed _before_ registering the 1st user
deployment_settings.auth.hmac_key = "akeytochange"
# These settings should be changed _after_ the 1st (admin) user is
# registered in order to secure the deployment
# Should users be allowed to register themselves?
deployment_settings.security.self_registration = True
deployment_settings.auth.registration_requires_verification = False
deployment_settings.auth.registration_requires_approval = False
# The name of the teams that users are added to when they opt-in to receive alerts
#deployment_settings.auth.opt_in_team_list = ["Updates"]
# Uncomment this to set the opt in default to True
#deployment_settings.auth.opt_in_default = True
# Uncomment this to request the Mobile Phone when a user registers
#deployment_settings.auth.registration_requests_mobile_phone = True
# Uncomment this to have the Mobile Phone selection during registration be mandatory
#deployment_settings.auth.registration_mobile_phone_mandatory = True
# Uncomment this to request the Organisation when a user registers
#deployment_settings.auth.registration_requests_organisation = True
# Uncomment this to have the Organisation selection during registration be mandatory
#deployment_settings.auth.registration_organisation_mandatory = True
# Uncomment this to have the Organisation input hidden unless the user enters a non-whitelisted domain
#deployment_settings.auth.registration_organisation_hidden = True
# Uncomment this to default the Organisation during registration
deployment_settings.auth.registration_organisation_default = "ReadyISCRAM"
# Uncomment & populate these to set the default roles assigned to newly-registered users
deployment_settings.auth.registration_roles = ["ASSET_EDIT","LOGS_EDIT","PROJECT_EDIT"]
# Uncomment this to request an image when users register
#deployment_settings.auth.registration_requests_image = True
# Uncomment this to direct newly-registered users to their volunteer page to be able to add extra details
# NB This requires Verification/Approval to be Off
# @ToDo: Extend to all optional Profile settings: Homepage, Twitter, Facebook, Mobile Phone, Image
#deployment_settings.auth.registration_volunteer = True
# Uncomment this to allow users to Login using Gmail's SMTP
#deployment_settings.auth.gmail_domains = ["gmail.com"]
# Fill these to allow users to Login using Facebook
# https://developers.facebook.com/apps
#deployment_settings.auth.facebook_id = ""
#deployment_settings.auth.facebook_secret = ""
# Fill these to allow users to Login using Google
# https://code.google.com/apis/console/
#deployment_settings.auth.google_id = ""
#deployment_settings.auth.google_secret = ""
# Uncomment this to allow users to Login using OpenID
#deployment_settings.auth.openid = True
# Always notify the approver of a new (verified) user, even if the user is automatically approved
deployment_settings.auth.always_notify_approver = True
# Base settings
deployment_settings.base.system_name = T("Sahana Eden Humanitarian Management Platform")
deployment_settings.base.system_name_short = T("Sahana Eden")
# Set this to the Public URL of the instance
deployment_settings.base.public_url = "http://127.0.0.1:8000"
#deployment_settings.base.public_url = "http://readyiscram.org"
# Switch to "False" in Production for a Performance gain
# (need to set to "True" again when Table definitions are changed)
deployment_settings.base.migrate = True
# To just create the .table files:
#deployment_settings.base.fake_migrate = True
# Enable/disable pre-population of the database.
# Should be non-zero on 1st_run to pre-populate the database
# - unless doing a manual DB migration
# Then set to zero in Production (to save 1x DAL hit every page)
# NOTE: the web UI will not be accessible while the DB is empty,
# instead run:
# python web2py.py -N -S eden -M
# to create the db structure, then exit and re-import the data.
# This is a simple status flag with the following meanings
# 0 - No pre-population
# 1 - Base data entered in the database
# 2 - Regression (data used by the regression tests)
# 3 - Scalability testing
# 4-9 Reserved
# 10 - User (data required by the user typically for specialised test)
# 11-19 Reserved
# 20+ Demo (Data required for a default demo)
# Each subsequent Demos can take any unique number >= 20
# The actual demo will be defined by the file demo_folders.cfg
deployment_settings.base.prepopulate = ['demo/ISCRAM']
# Set this to True to use Content Delivery Networks to speed up Internet-facing sites
deployment_settings.base.cdn = False
# Set this to True to switch to Debug mode
# Debug mode means that uncompressed CSS/JS files are loaded
# JS Debug messages are also available in the Console
# can also load an individual page in debug mode by appending URL with
# ?debug=1
deployment_settings.base.debug = False
# Email settings
# Outbound server
deployment_settings.mail.server = "127.0.0.1:25"
#deployment_settings.mail.tls = True
# Useful for Windows Laptops:
#deployment_settings.mail.server = "smtp.gmail.com:587"
#deployment_settings.mail.tls = True
#deployment_settings.mail.login = "username:password"
# From Address
deployment_settings.mail.sender = "'Sahana' <sahana@readyiscram.org>"
# Default email address to which requests to approve new user accounts gets sent
# This can be overridden for specific domains/organisations via the auth_domain table
deployment_settings.mail.approver = "useradmin@readyiscram.org"
# Daily Limit on Sending of emails
#deployment_settings.mail.limit = 1000
# Frontpage settings
# RSS feeds
deployment_settings.frontpage.rss = []
#{"title": "Eden",
# # Trac timeline
# "url": "http://eden.sahanafoundation.org/timeline?ticket=on&changeset=on&milestone=on&wiki=on&max=50&daysback=90&format=rss"
#},
#{"title": "Twitter",
# # @SahanaFOSS
# # Find ID via http://api.twitter.com/users/show/username.json
# "url": "http://twitter.com/statuses/user_timeline/96591754.rss"
# # Hashtag
# #url: "http://search.twitter.com/search.atom?q=%23eqnz"
#}
#]
# L10n settings
#deployment_settings.L10n.default_country_code = 1
# Languages used in the deployment (used for Language Toolbar & GIS Locations)
# http://www.loc.gov/standards/iso639-2/php/code_list.php
deployment_settings.L10n.languages = OrderedDict([
("ar", "العربية"),
("zh-cn", "中文 (简体)"),
("zh-tw", "中文 (繁體)"),
("en", "English"),
("fr", "Français"),
("de", "Deutsch"),
("el", "ελληνικά"),
("it", "Italiano"),
("ja", "日本語"),
("ko", "한국어"),
("pt", "Português"),
("pt-br", "Português (Brasil)"),
("ru", "русский"),
("es", "Español"),
("tl", "Tagalog"),
("ur", "اردو"),
("vi", "Tiếng Việt"),
])
# Default language for Language Toolbar (& GIS Locations in future)
deployment_settings.L10n.default_language = "en"
# Display the language toolbar
deployment_settings.L10n.display_toolbar = True
# Default timezone for users
deployment_settings.L10n.utc_offset = "UTC +0000"
# Uncomment these to use US-style dates in English (localisations can still convert to local format)
#deployment_settings.L10n.date_format = T("%m-%d-%Y")
#deployment_settings.L10n.time_format = T("%H:%M:%S")
#deployment_settings.L10n.datetime_format = T("%m-%d-%Y %H:%M:%S")
# Religions used in Person Registry
# @ToDo: find a better code
# http://eden.sahanafoundation.org/ticket/594
deployment_settings.L10n.religions = {
"none":T("none"),
"christian":T("Christian"),
"muslim":T("Muslim"),
"jewish":T("Jewish"),
"buddhist":T("Buddhist"),
"hindu":T("Hindu"),
"bahai":T("Bahai"),
"other":T("other")
}
# Make last name in person/user records mandatory
#deployment_settings.L10n.mandatory_lastname = True
# Number formats (defaults to ISO 31-0)
# Decimal separator for numbers (defaults to ,)
deployment_settings.L10n.decimal_separator = "."
# Thousands separator for numbers (defaults to space)
#deployment_settings.L10n.thousands_separator = ","
# Finance settings
deployment_settings.fin.currencies = {
"USD" :T("United States Dollars"),
"CHF" :T("Swiss Francs"),
"EUR" :T("Euros"),
"GBP" :T("Great British Pounds"),
"CAD" :T("Canadian Dollars"),
"AUD" :T("Australian Dollars")
}
#deployment_settings.fin.currency_default = "USD" # Dollars
#deployment_settings.fin.currency_writable = False # False currently breaks things
# PDF settings
# Default page size for reports (defaults to A4)
#deployment_settings.base.paper_size = T("Letter")
# Location of Logo used in pdfs headers
#deployment_settings.ui.pdf_logo = "static/img/mylogo.png"
# GIS (Map) settings
# Size of the Embedded Map
# Change this if-required for your theme
# NB API can override this in specific modules
#deployment_settings.gis.map_height = 600
#deployment_settings.gis.map_width = 1000
# Restrict the Location Selector to just certain countries
# NB This can also be over-ridden for specific contexts later
# e.g. Activities filtered to those of parent Project
#deployment_settings.gis.countries = ["US"]
# Hide the Map-based selection tool in the Location Selector
#deployment_settings.gis.map_selector = False
# Hide LatLon boxes in the Location Selector
#deployment_settings.gis.latlon_selector = False
# Use Building Names as a separate field in Street Addresses?
#deployment_settings.gis.building_name = False
# Display Resources recorded to Admin-Level Locations on the map
# @ToDo: Move into gis_config?
deployment_settings.gis.display_L0 = False
# Currently unused
#deployment_settings.gis.display_L1 = True
# Set this if there will be multiple areas in which work is being done,
# and a menu to select among them is wanted. With this on, any map
# configuration that is designated as being available in the menu will appear
#deployment_settings.gis.menu = T("Maps")
# Maximum Marker Size
# (takes effect only on display)
#deployment_settings.gis.marker_max_height = 35
#deployment_settings.gis.marker_max_width = 30
# Duplicate Features so that they show wrapped across the Date Line?
# Points only for now
# lon<0 have a duplicate at lon+360
# lon>0 have a duplicate at lon-360
#deployment_settings.gis.duplicate_features = True
# Mouse Position: 'normal', 'mgrs' or 'off'
#deployment_settings.gis.mouse_position = "mgrs"
# Print Service URL: http://eden.sahanafoundation.org/wiki/BluePrintGISPrinting
#deployment_settings.gis.print_service = "/geoserver/pdf/"
# Do we have a spatial DB available? (currently supports PostGIS. Spatialite to come.)
deployment_settings.gis.spatialdb = True
# Bing API Key (for Map layers)
#deployment_settings.gis.api_bing = ""
# Google API Key (for Earth & MapMaker Layers)
# default works for localhost
#deployment_settings.gis.api_google = ""
# Yahoo API Key (for Geocoder)
#deployment_settings.gis.api_yahoo = ""
# GeoServer (Currently used by GeoExplorer. Will allow REST control of GeoServer.)
# NB Needs to be publically-accessible URL for querying via client JS
#deployment_settings.gis.geoserver_url = "http://localhost/geoserver"
#deployment_settings.gis.geoserver_username = "admin"
#deployment_settings.gis.geoserver_password = ""
# Twitter settings:
# Register an app at http://twitter.com/apps
# (select Aplication Type: Client)
# You'll get your consumer_key and consumer_secret from Twitter
#deployment_settings.twitter.oauth_consumer_key = ""
#deployment_settings.twitter.oauth_consumer_secret = ""
# Use 'soft' deletes
#deployment_settings.security.archive_not_delete = False
# AAA Settings
# Security Policy
# http://eden.sahanafoundation.org/wiki/S3AAA#System-widePolicy
# 1: Simple (default): Global as Reader, Authenticated as Editor
# 2: Editor role required for Update/Delete, unless record owned by session
# 3: Apply Controller ACLs
# 4: Apply both Controller & Function ACLs
# 5: Apply Controller, Function & Table ACLs
# 6: Apply Controller, Function, Table & Organisation ACLs
#
deployment_settings.security.policy = 6 # Organisation-ACLs
#acl = deployment_settings.aaa.acl
#deployment_settings.aaa.default_uacl = acl.READ # User ACL
#deployment_settings.aaa.default_oacl = acl.CREATE | acl.READ | acl.UPDATE # Owner ACL
# Lock-down access to Map Editing
#deployment_settings.security.map = True
# Allow non-MapAdmins to edit hierarchy locations? Defaults to True if not set.
# (Permissions can be set per-country within a gis_config)
#deployment_settings.gis.edit_Lx = False
# Allow non-MapAdmins to edit group locations? Defaults to False if not set.
#deployment_settings.gis.edit_GR = True
# Note that editing of locations used as regions for the Regions menu is always
# restricted to MapAdmins.
# Audit settings
# We Audit if either the Global or Module asks us to
# (ignore gracefully if module author hasn't implemented this)
# NB Auditing (especially Reads) slows system down & consumes diskspace
#deployment_settings.security.audit_write = False
#deployment_settings.security.audit_read = False
# UI/Workflow options
# Should user be prompted to save before navigating away?
#deployment_settings.ui.navigate_away_confirm = False
# Should user be prompted to confirm actions?
#deployment_settings.ui.confirm = False
# Should potentially large dropdowns be turned into autocompletes?
# (unused currently)
#deployment_settings.ui.autocomplete = True
#deployment_settings.ui.update_label = "Edit"
# Enable this for a UN-style deployment
#deployment_settings.ui.cluster = True
# Enable this to use the label 'Camp' instead of 'Shelter'
#deployment_settings.ui.camp = True
# Enable this to change the label for 'Mobile Phone'
#deployment_settings.ui.label_mobile_phone = T("Cell Phone")
# Enable this to change the label for 'Postcode'
#deployment_settings.ui.label_postcode = T("ZIP Code")
# Enable Social Media share buttons
#deployment_settings.ui.social_buttons = True
# Request
#deployment_settings.req.type_inv_label = T("Donations")
#deployment_settings.req.type_hrm_label = T("Volunteers")
# Allow the status for requests to be set manually,
# rather than just automatically from commitments and shipments
#deployment_settings.req.status_writable = False
#deployment_settings.req.quantities_writable = True
#deployment_settings.req.show_quantity_transit = False
#deployment_settings.req.multiple_req_items = False
#deployment_settings.req.use_commit = False
#deployment_settings.req.use_req_number = False
# Restrict the type of requests that can be made, valid values in the
# list are ["Stock", "People", "Other"]. If this is commented out then
# all types will be valid.
#deployment_settings.req.req_type = ["Stock"]
# Custom Crud Strings for specific req_req types
#deployment_settings.req.req_crud_strings = dict()
#ADD_ITEM_REQUEST = T("Make a Request for Donations")
#LIST_ITEM_REQUEST = T("List Requests for Donations")
# req_req Crud Strings for Item Request (type=1)
#deployment_settings.req.req_crud_strings[1] = Storage(
# title_create = ADD_ITEM_REQUEST,
# title_display = T("Request for Donations Details"),
# title_list = LIST_ITEM_REQUEST,
# title_update = T("Edit Request for Donations"),
# title_search = T("Search Requests for Donations"),
# subtitle_create = ADD_ITEM_REQUEST,
# subtitle_list = T("Requests for Donations"),
# label_list_button = LIST_ITEM_REQUEST,
# label_create_button = ADD_ITEM_REQUEST,
# label_delete_button = T("Delete Request for Donations"),
# msg_record_created = T("Request for Donations Added"),
# msg_record_modified = T("Request for Donations Updated"),
# msg_record_deleted = T("Request for Donations Canceled"),
# msg_list_empty = T("No Requests for Donations"))
#ADD_PEOPLE_REQUEST = T("Make a Request for Volunteers")
#LIST_PEOPLE_REQUEST = T("List Requests for Volunteers")
# req_req Crud Strings for People Request (type=3)
#deployment_settings.req.req_crud_strings[3] = Storage(
# title_create = ADD_PEOPLE_REQUEST,
# title_display = T("Request for Volunteers Details"),
# title_list = LIST_PEOPLE_REQUEST,
# title_update = T("Edit Request for Volunteers"),
# title_search = T("Search Requests for Volunteers"),
# subtitle_create = ADD_PEOPLE_REQUEST,
# subtitle_list = T("Requests for Volunteers"),
# label_list_button = LIST_PEOPLE_REQUEST,
# label_create_button = ADD_PEOPLE_REQUEST,
# label_delete_button = T("Delete Request for Volunteers"),
# msg_record_created = T("Request for Volunteers Added"),
# msg_record_modified = T("Request for Volunteers Updated"),
# msg_record_deleted = T("Request for Volunteers Canceled"),
# msg_list_empty = T("No Requests for Volunteers"))
# Inventory Management
#deployment_settings.inv.collapse_tabs = False
# Use the term 'Order' instead of 'Shipment'
#deployment_settings.inv.shipment_name = "order"
# Supply
#deployment_settings.supply.use_alt_name = False
# Do not edit after deployment
#deployment_settings.supply.catalog_default = T("Other Items")
# Organsiation Management
# Set the length of the auto-generated org/site code the default is 10
#deployment_settings.org.site_code_len = 3
# Human Resource Management
# Uncomment to allow Staff & Volunteers to be registered without an email address
#deployment_settings.hrm.email_required = False
# Uncomment to hide the Staff resource
#deployment_settings.hrm.show_staff = False
# Uncomment to hide the Volunteer resource
#deployment_settings.hrm.show_vols = False
# Uncomment to allow hierarchical categories of Skills, which each need their own set of competency levels.
#deployment_settings.hrm.skill_types = True
# Project Tracking
# Uncomment this to use settings suitable for a global/regional organisation (e.g. DRR)
#deployment_settings.project.drr = True
# Uncomment this to use Milestones in project/task.
#deployment_settings.project.milestones = True
# Save Search Widget
#deployment_settings.save_search.widget = False
# Terms of Service to be able to Register on the system
#deployment_settings.options.terms_of_service = T("Terms of Service\n\nYou have to be eighteen or over to register as a volunteer.")
# Comment/uncomment modules here to disable/enable them
# @ToDo: have the system automatically enable migrate if a module is enabled
# Modules menu is defined in 01_menu.py
deployment_settings.modules = OrderedDict([
# Core modules which shouldn't be disabled
("default", Storage(
name_nice = T("Home"),
restricted = False, # Use ACLs to control access to this module
access = None, # All Users (inc Anonymous) can see this module in the default menu & access the controller
module_type = None # This item is not shown in the menu
)),
("admin", Storage(
name_nice = T("Administration"),
description = T("Site Administration"),
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("appadmin", Storage(
name_nice = T("Administration"),
description = T("Site Administration"),
restricted = True,
module_type = None # No Menu
)),
("errors", Storage(
name_nice = T("Ticket Viewer"),
description = T("Needed for Breadcrumbs"),
restricted = False,
module_type = None # No Menu
)),
("sync", Storage(
name_nice = T("Synchronization"),
description = T("Synchronization"),
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
# Uncomment to enable internal support requests
#("support", Storage(
# name_nice = T("Support"),
# description = T("Support Requests"),
# restricted = True,
# module_type = None # This item is handled separately for the menu
# )),
("gis", Storage(
name_nice = T("Map"),
description = T("Situation Awareness & Geospatial Analysis"),
restricted = True,
module_type = 6, # 6th item in the menu
)),
("pr", Storage(
name_nice = T("Person Registry"),
description = T("Central point to record details on People"),
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu (access to controller is possible to all still)
module_type = 10
)),
("org", Storage(
name_nice = T("Organizations"),
description = T('Lists "who is doing what & where". Allows relief agencies to coordinate their activities'),
restricted = True,
module_type = 10
)),
# All modules below here should be possible to disable safely
("hrm", Storage(
name_nice = T("Staff & Volunteers"),
description = T("Human Resource Management"),
restricted = True,
module_type = 1,
)),
("doc", Storage(
name_nice = T("Documents"),
description = T("A library of digital resources, such as photos, documents and reports"),
restricted = True,
module_type = 10,
)),
("msg", Storage(
name_nice = T("Messaging"),
description = T("Sends & Receives Alerts via Email & SMS"),
restricted = True,
# The user-visible functionality of this module isn't normally required. Rather it's main purpose is to be accessed from other modules.
module_type = None,
)),
("supply", Storage(
name_nice = T("Supply Chain Management"),
description = T("Used within Inventory Management, Request Management and Asset Management"),
restricted = True,
module_type = None, # Not displayed
)),
("inv", Storage(
name_nice = T("Warehouse"),
description = T("Receiving and Sending Items"),
restricted = True,
module_type = 3
)),
#("proc", Storage(
# name_nice = T("Procurement"),
# description = T("Ordering & Purchasing of Goods & Services"),
# restricted = True,
# module_type = 10
# )),
("asset", Storage(
name_nice = T("Assets"),
description = T("Recording and Assigning Assets"),
restricted = True,
module_type = 2,
)),
# Vehicle depends on Assets
("vehicle", Storage(
name_nice = T("Vehicles"),
description = T("Manage Vehicles"),
restricted = True,
module_type = 10,
)),
("req", Storage(
name_nice = T("Requests"),
description = T("Manage requests for supplies, assets, staff or other resources. Matches against Inventories where supplies are requested."),
restricted = True,
module_type = 10,
)),
("project", Storage(
name_nice = T("Projects"),
description = T("Tracking of Projects, Activities and Tasks"),
restricted = True,
module_type = 10
)),
("survey", Storage(
name_nice = T("Assessments"),
description = T("Create, enter, and manage assessments."),
restricted = True,
module_type = 5,
)),
("cr", Storage(
name_nice = T("Shelters"),
description = T("Tracks the location, capacity and breakdown of victims in Shelters"),
restricted = True,
module_type = 10
)),
("hms", Storage(
name_nice = T("Hospitals"),
description = T("Helps to monitor status of hospitals"),
restricted = True,
module_type = 10
)),
("irs", Storage(
name_nice = T("Incidents"),
description = T("Incident Reporting System"),
restricted = False,
module_type = 4
)),
#("impact", Storage(
# name_nice = T("Impacts"),
# description = T("Used by Assess"),
# restricted = True,
# module_type = None,
# )),
# Assess currently depends on CR, IRS & Impact
# Deprecated by Surveys module
#("assess", Storage(
# name_nice = T("Assessments"),
# description = T("Rapid Assessments & Flexible Impact Assessments"),
# restricted = True,
# module_type = 10,
# )),
("scenario", Storage(
name_nice = T("Scenarios"),
description = T("Define Scenarios for allocation of appropriate Resources (Human, Assets & Facilities)."),
restricted = True,
module_type = 10,
)),
("event", Storage(
name_nice = T("Events"),
description = T("Activate Events (e.g. from Scenario templates) for allocation of appropriate Resources (Human, Assets & Facilities)."),
restricted = True,
module_type = 10,
)),
# NB Budget module depends on Project Tracking Module
# @ToDo: Rewrite in a modern style
#("budget", Storage(
# name_nice = T("Budgeting Module"),
# description = T("Allows a Budget to be drawn up"),
# restricted = True,
# module_type = 10
# )),
# @ToDo: Port these Assessments to the Survey module
#("building", Storage(
# name_nice = T("Building Assessments"),
# description = T("Building Safety Assessments"),
# restricted = True,
# module_type = 10,
# )),
# These are specialist modules
# Requires RPy2
#("climate", Storage(
# name_nice = T("Climate"),
# description = T("Climate data portal"),
# restricted = True,
# module_type = 10,
#)),
("delphi", Storage(
name_nice = T("Delphi Decision Maker"),
description = T("Supports the decision making of large groups of Crisis Management Experts by helping the groups create ranked list."),
restricted = False,
module_type = 10,
)),
#("dvi", Storage(
# name_nice = T("Disaster Victim Identification"),
# description = T("Disaster Victim Identification"),
# restricted = True,
# module_type = 10,
# #access = "|DVI|", # Only users with the DVI role can see this module in the default menu & access the controller
# #audit_read = True, # Can enable Audit for just an individual module here
# #audit_write = True
# )),
#("mpr", Storage(
# name_nice = T("Missing Person Registry"),
# description = T("Helps to report and search for missing persons"),
# restricted = False,
# module_type = 10,
# )),
("cms", Storage(
name_nice = T("Content Management"),
description = T("Content Management System"),
restricted = True,
module_type = 10,
)),
#("member", Storage(
# name_nice = T("Members"),
# description = T("Membership Management System"),
# restricted = True,
# module_type = 10,
# )),
#("fire", Storage(
# name_nice = T("Fire Stations"),
# description = T("Fire Station Management"),
# restricted = True,
# module_type = 1,
# )),
#("patient", Storage(
# name_nice = T("Patient Tracking"),
# description = T("Tracking of Patients"),
# restricted = True,
# module_type = 10
# )),
#("ocr", Storage(
# name_nice = T("Optical Character Recognition"),
# description = T("Optical Character Recognition for reading the scanned handwritten paper forms."),
# restricted = False,
# module_type = 10
# )),
# This module has very limited functionality
#("flood", Storage(
# name_nice = T("Flood Alerts"),
# description = T("Flood Alerts show water levels in various parts of the country"),
# restricted = False,
# module_type = 10
# )),
])
| {
"content_hash": "594b7b70873340e9b1a03a6de8b4b842",
"timestamp": "",
"source": "github",
"line_count": 690,
"max_line_length": 153,
"avg_line_length": 42.59130434782609,
"alnum_prop": 0.6768408874370492,
"repo_name": "flavour/iscram",
"id": "2eaafa242da190bbc1085bdfab524c685b675bee",
"size": "29475",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deployment-templates/models/000_config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "10046797"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "21691465"
}
],
"symlink_target": ""
} |
"""Modified input prompt for entering text with >>> or ... at the start.
We define a special input line filter to allow typing lines which begin with
'>>> ' or '... '. These two strings, if present at the start of the input
line, are stripped. This allows for direct pasting of code from examples such
as those available in the standard Python tutorial.
Normally pasting such code is one chunk is impossible because of the
extraneous >>> and ..., requiring one to do a line by line paste with careful
removal of those characters. This module allows pasting that kind of
multi-line examples in one pass.
Here is an 'screenshot' of a section of the tutorial pasted into IPython with
this feature enabled:
In [1]: >>> def fib2(n): # return Fibonacci series up to n
...: ... '''Return a list containing the Fibonacci series up to n.'''
...: ... result = []
...: ... a, b = 0, 1
...: ... while b < n:
...: ... result.append(b) # see below
...: ... a, b = b, a+b
...: ... return result
...:
In [2]: fib2(10)
Out[2]: [1, 1, 2, 3, 5, 8]
The >>> and ... are stripped from the input so that the python interpreter
only sees the real part of the code.
All other input is processed normally.
Notes
=====
* You can even paste code that has extra initial spaces, such as is common in
doctests:
In [3]: >>> a = ['Mary', 'had', 'a', 'little', 'lamb']
In [4]: >>> for i in range(len(a)):
...: ... print i, a[i]
...: ...
0 Mary
1 had
2 a
3 little
4 lamb
"""
#*****************************************************************************
# Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#*****************************************************************************
from IPython import Release
__author__ = '%s <%s>' % Release.authors['Fernando']
__license__ = Release.license
# This file is an example of how to modify IPython's line-processing behavior
# without touching the internal code. We'll define an alternate pre-processing
# stage which allows a special form of input (which is invalid Python syntax)
# for certain quantities, rewrites a line of proper Python in those cases, and
# then passes it off to IPython's normal processor for further work.
# With this kind of customization, IPython can be adapted for many
# special-purpose scenarios providing alternate input syntaxes.
# This file can be imported like a regular module.
# IPython has a prefilter() function that analyzes each input line. We redefine
# it here to first pre-process certain forms of input
# The prototype of any alternate prefilter must be like this one (the name
# doesn't matter):
# - line is a string containing the user input line.
# - continuation is a parameter which tells us if we are processing a first
# line of user input or the second or higher of a multi-line statement.
import re
from IPython.iplib import InteractiveShell
PROMPT_RE = re.compile(r'(^[ \t]*>>> |^[ \t]*\.\.\. )')
def prefilter_paste(self,line,continuation):
"""Alternate prefilter for input of pasted code from an interpreter.
"""
if not line:
return ''
m = PROMPT_RE.match(line)
if m:
# In the end, always call the default IPython _prefilter() function.
# Note that self must be passed explicitly, b/c we're calling the
# unbound class method (since this method will overwrite the instance
# prefilter())
return self._prefilter(line[len(m.group(0)):],continuation)
elif line.strip() == '...':
return self._prefilter('',continuation)
elif line.isspace():
# This allows us to recognize multiple input prompts separated by blank
# lines and pasted in a single chunk, very common when pasting doctests
# or long tutorial passages.
return ''
else:
return self._prefilter(line,continuation)
def activate_prefilter():
"""Rebind the input-pasting filter to be the new IPython prefilter"""
InteractiveShell.prefilter = prefilter_paste
def deactivate_prefilter():
"""Reset the filter."""
InteractiveShell.prefilter = InteractiveShell._prefilter
# Just a heads up at the console
activate_prefilter()
print '*** Pasting of code with ">>>" or "..." has been enabled.'
| {
"content_hash": "cfc72aa547a037f8db26193d18879239",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 79,
"avg_line_length": 36.553719008264466,
"alnum_prop": 0.6520461225412616,
"repo_name": "santisiri/popego",
"id": "bcedda2483d0cb7c632cb708e6c5491112fd399a",
"size": "4447",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "envs/ALPHA-POPEGO/lib/python2.5/site-packages/ipython-0.8.2-py2.5.egg/IPython/Extensions/InterpreterPasteInput.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1246"
},
{
"name": "C",
"bytes": "504141"
},
{
"name": "C++",
"bytes": "26125"
},
{
"name": "CSS",
"bytes": "342653"
},
{
"name": "FORTRAN",
"bytes": "4872"
},
{
"name": "GAP",
"bytes": "13267"
},
{
"name": "Genshi",
"bytes": "407"
},
{
"name": "Groff",
"bytes": "17116"
},
{
"name": "HTML",
"bytes": "383181"
},
{
"name": "JavaScript",
"bytes": "1090769"
},
{
"name": "Makefile",
"bytes": "2441"
},
{
"name": "Mako",
"bytes": "376944"
},
{
"name": "Python",
"bytes": "20895618"
},
{
"name": "Ruby",
"bytes": "3380"
},
{
"name": "Shell",
"bytes": "23581"
},
{
"name": "Smarty",
"bytes": "522"
},
{
"name": "TeX",
"bytes": "35712"
}
],
"symlink_target": ""
} |
from peewee import Model, PrimaryKeyField
from model import database
class BaseEntity(Model):
id = PrimaryKeyField()
class Meta:
database = database
| {
"content_hash": "06ad4533ad41b6855a418cb4f3fc272b",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 41,
"avg_line_length": 17.3,
"alnum_prop": 0.6994219653179191,
"repo_name": "Djaler/ZloyBot",
"id": "de3e4d336558f06a30e00d84d66afcaf10747cd5",
"size": "173",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model/base_entity.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "43497"
}
],
"symlink_target": ""
} |
import inspect
import urllib.request
import json
import pytest
from gemstone.util import as_completed
from gemstone.client.remote_service import RemoteService
from gemstone.client.structs import Result, MethodCall, BatchResult, AsyncMethodCall
DUMMY_SERVICE_URL = "http://example.com/api"
def test_remote_service_initialize():
service = RemoteService(DUMMY_SERVICE_URL)
assert service.url == DUMMY_SERVICE_URL
def test_remote_service_make_request_obj():
service = RemoteService(DUMMY_SERVICE_URL)
body = {
"test": "ok"
}
req_obj = service.build_http_request_obj(body)
assert isinstance(req_obj, urllib.request.Request)
assert req_obj.get_full_url() == DUMMY_SERVICE_URL
assert req_obj.method == "POST"
assert req_obj.get_header("Content-Type".capitalize()) == 'application/json'
assert req_obj.data == b'{"test": "ok"}'
def dummy_urlopen(url, *args, **kwargs):
class DummyResponse:
def __init__(self, id=None, *a, **k):
self.id = id
def read(self):
return json.dumps(
{"jsonrpc": "2.0", "error": None, "result": None, "id": self.id}).encode()
if not isinstance(url, urllib.request.Request):
return DummyResponse()
body = json.loads(url.data.decode())
return DummyResponse(body.get('id'))
def dummy_urlopen_batch(url, *args, **kwargs):
class DummyResponse:
def __init__(self, *ids):
self.ids = ids
def read(self):
return json.dumps([{"jsonrpc": "2.0", "error": None, "result": None, "id": i} for i in
self.ids]).encode()
if not isinstance(url, urllib.request.Request):
return DummyResponse()
body = json.loads(url.data.decode())
ids = [x["id"] for x in body]
return DummyResponse(*ids)
def test_simple_call(monkeypatch):
service = RemoteService(DUMMY_SERVICE_URL)
monkeypatch.setattr(urllib.request, 'urlopen', dummy_urlopen)
result = service.call_method("test", [])
assert isinstance(result, Result)
assert result.id == result.method_call.id
def test_simple_call_notify(monkeypatch):
service = RemoteService(DUMMY_SERVICE_URL)
monkeypatch.setattr(urllib.request, 'urlopen', dummy_urlopen)
result = service.notify("test", [])
assert result is None
def test_simple_batch_call(monkeypatch):
service = RemoteService(DUMMY_SERVICE_URL)
monkeypatch.setattr(urllib.request, 'urlopen', dummy_urlopen_batch)
calls = [
MethodCall("test", []),
MethodCall("test2", []),
MethodCall("test3", [])
]
result = service.call_batch(*calls)
assert isinstance(result, BatchResult)
assert len(result) == 3
assert result.get_response_for_call(calls[0]).id == calls[0].id
assert result.get_response_for_call(calls[1]).id == calls[1].id
assert result.get_response_for_call(calls[2]).id == calls[2].id
assert result.get_response_for_call(MethodCall("invalid")) is None
def test_async_call(monkeypatch):
service = RemoteService(DUMMY_SERVICE_URL)
monkeypatch.setattr(urllib.request, 'urlopen', dummy_urlopen)
result = service.call_method_async("test", [])
assert isinstance(result, AsyncMethodCall)
result = result.result(wait=True)
assert isinstance(result, Result)
assert result.id == result.method_call.id
def test_batch_call_errors():
service = RemoteService(DUMMY_SERVICE_URL)
with pytest.raises(TypeError):
service.call_batch(1, 2, 3)
def test_as_completed(monkeypatch):
service = RemoteService(DUMMY_SERVICE_URL)
monkeypatch.setattr(urllib.request, 'urlopen', dummy_urlopen)
items = [service.call_method_async("test", []) for _ in range(10)]
data = as_completed(*items)
assert inspect.isgenerator(data)
results = list(data)
assert len(results) == 10
| {
"content_hash": "a4986755ec414d27d07016bb3038c8a5",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 98,
"avg_line_length": 27.75,
"alnum_prop": 0.6597168597168597,
"repo_name": "vladcalin/gemstone",
"id": "cca0600d8c56024723b0e641b3dbdb1bf3a7a569",
"size": "3885",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "32"
},
{
"name": "Python",
"bytes": "130541"
}
],
"symlink_target": ""
} |
'''
:codeauthor: :email:`Jayesh Kariya <jayeshk@saltstack.com>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import skipIf, TestCase
from salttesting.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import Salt Libs
from salt.states import keyboard
keyboard.__salt__ = {}
keyboard.__opts__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class KeyboardTestCase(TestCase):
'''
Test cases for salt.states.keyboard
'''
# 'system' function tests: 1
def test_system(self):
'''
Test to set the keyboard layout for the system.
'''
name = 'salt'
ret = {'name': name,
'result': True,
'comment': '',
'changes': {}}
mock = MagicMock(side_effect=[name, '', '', ''])
mock_t = MagicMock(side_effect=[True, False])
with patch.dict(keyboard.__salt__, {'keyboard.get_sys': mock,
'keyboard.set_sys': mock_t}):
comt = ('System layout {0} already set'.format(name))
ret.update({'comment': comt})
self.assertDictEqual(keyboard.system(name), ret)
with patch.dict(keyboard.__opts__, {'test': True}):
comt = ('System layout {0} needs to be set'.format(name))
ret.update({'comment': comt, 'result': None})
self.assertDictEqual(keyboard.system(name), ret)
with patch.dict(keyboard.__opts__, {'test': False}):
comt = ('Set system keyboard layout {0}'.format(name))
ret.update({'comment': comt, 'result': True,
'changes': {'layout': name}})
self.assertDictEqual(keyboard.system(name), ret)
comt = ('Failed to set system keyboard layout')
ret.update({'comment': comt, 'result': False, 'changes': {}})
self.assertDictEqual(keyboard.system(name), ret)
# 'xorg' function tests: 1
def test_xorg(self):
'''
Test to set the keyboard layout for XOrg.
'''
name = 'salt'
ret = {'name': name,
'result': True,
'comment': '',
'changes': {}}
mock = MagicMock(side_effect=[name, '', '', ''])
mock_t = MagicMock(side_effect=[True, False])
with patch.dict(keyboard.__salt__, {'keyboard.get_x': mock,
'keyboard.set_x': mock_t}):
comt = ('XOrg layout {0} already set'.format(name))
ret.update({'comment': comt})
self.assertDictEqual(keyboard.xorg(name), ret)
with patch.dict(keyboard.__opts__, {'test': True}):
comt = ('XOrg layout {0} needs to be set'.format(name))
ret.update({'comment': comt, 'result': None})
self.assertDictEqual(keyboard.xorg(name), ret)
with patch.dict(keyboard.__opts__, {'test': False}):
comt = ('Set XOrg keyboard layout {0}'.format(name))
ret.update({'comment': comt, 'result': True,
'changes': {'layout': name}})
self.assertDictEqual(keyboard.xorg(name), ret)
comt = ('Failed to set XOrg keyboard layout')
ret.update({'comment': comt, 'result': False, 'changes': {}})
self.assertDictEqual(keyboard.xorg(name), ret)
if __name__ == '__main__':
from integration import run_tests
run_tests(KeyboardTestCase, needs_daemon=False)
| {
"content_hash": "cac231ea556bf39b2a7e48959107dd92",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 77,
"avg_line_length": 34.84905660377358,
"alnum_prop": 0.531672983216026,
"repo_name": "stephane-martin/salt-debian-packaging",
"id": "bd5af7a438edcb202cfbd55ad151d96e7c1cdb1b",
"size": "3718",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "salt-2016.3.2/tests/unit/states/keyboard_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "13798"
},
{
"name": "C",
"bytes": "986"
},
{
"name": "Groff",
"bytes": "13634346"
},
{
"name": "HTML",
"bytes": "39558"
},
{
"name": "Makefile",
"bytes": "20902"
},
{
"name": "NSIS",
"bytes": "22316"
},
{
"name": "PowerShell",
"bytes": "38719"
},
{
"name": "Python",
"bytes": "40857506"
},
{
"name": "SaltStack",
"bytes": "58278"
},
{
"name": "Scheme",
"bytes": "1790"
},
{
"name": "Shell",
"bytes": "829927"
},
{
"name": "Tcl",
"bytes": "6532"
},
{
"name": "TeX",
"bytes": "11632"
}
],
"symlink_target": ""
} |
import numpy as np
#import dtree
import dtw
import bagging
import randomforest
tree = dtw.dtree()
#tree = dtree.dtree()
bagger = bagging.bagger()
forest = randomforest.randomforest()
party,classes,features = tree.read_data('../6 Trees/party.data')
#w = np.random.rand((np.shape(party)[0]))/np.shape(party)[0]
w = np.ones((np.shape(party)[0]),dtype = float)/np.shape(party)[0]
f = forest.rf(party,classes,features,10,7,2,maxlevel=2)
print "RF prediction"
print forest.rfclass(f,party)
#t=tree.make_tree(party,classes,features)
t=tree.make_tree(party,w,classes,features)
#tree.printTree(t,' ')
print "Decision Tree prediction"
print tree.classifyAll(t,party)
print "Tree Stump Prediction"
print tree.classifyAll(t,party)
c=bagger.bag(party,classes,features,20)
print "Bagged Results"
print bagger.bagclass(c,party)
print "True Classes"
print classes
| {
"content_hash": "b0695cae30e259e527c148a371387e11",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 66,
"avg_line_length": 25.147058823529413,
"alnum_prop": 0.7485380116959064,
"repo_name": "Anderson-Lab/anderson-lab.github.io",
"id": "dc3e74a2ac7adab3a29050d7d46bcc1552530de4",
"size": "1290",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "csc_466_2021_spring/MLCode/Ch13/party.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "79604"
},
{
"name": "JavaScript",
"bytes": "53016"
},
{
"name": "Jupyter Notebook",
"bytes": "64098"
},
{
"name": "Python",
"bytes": "557510"
},
{
"name": "Ruby",
"bytes": "681"
},
{
"name": "SCSS",
"bytes": "64925"
},
{
"name": "Shell",
"bytes": "25"
}
],
"symlink_target": ""
} |
from ecpy.fields.FractionField import FractionField, FractionFieldElement
from ecpy.rings.Integer import ZZ
from ecpy.utils import gcd
class RationalField(FractionField):
def __init__(s):
FractionField.__init__(s, ZZ)
s.element_class = RationalFieldElement
class RationalFieldElement(FractionFieldElement):
def __init__(s, *args):
FractionFieldElement.__init__(s, *args)
p, q = s.x, s.y
s.p = int(p // gcd(p, q))
s.q = int(q // gcd(p, q))
def __int__(s):
if s.p % s.q != 0:
raise ValueError('Can\'t divide a value: %s' % s)
return s.p // s.q
QQ = RationalField()
| {
"content_hash": "cc84ad29cf821c572f753eaaf92e1715",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 73,
"avg_line_length": 27.772727272727273,
"alnum_prop": 0.6448445171849427,
"repo_name": "elliptic-shiho/ecpy",
"id": "0f0d160734badce6000592e7e3740e83de0a1192",
"size": "611",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ecpy/fields/RationalField.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "41935"
},
{
"name": "Makefile",
"bytes": "1666"
},
{
"name": "Python",
"bytes": "92179"
},
{
"name": "Shell",
"bytes": "101"
}
],
"symlink_target": ""
} |
"""
Provides an asynchronous, crypto and compression aware socket for connecting to
servers and processing incoming packet data.
Coordinates with the Timers plugin to honor wall-clock timers
"""
import logging
import select
import socket
import time
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import ciphers
from cryptography.hazmat.primitives.ciphers import algorithms, modes
from spock import utils
from spock.mcp import mcdata, mcpacket
from spock.plugins.base import PluginBase
from spock.utils import pl_announce
logger = logging.getLogger('spock')
backend = default_backend()
class AESCipher(object):
def __init__(self, shared_secret):
cipher = ciphers.Cipher(algorithms.AES(shared_secret),
modes.CFB8(shared_secret), backend)
# Name courtesy of dx
self.encryptifier = cipher.encryptor()
self.decryptifier = cipher.decryptor()
def encrypt(self, data):
return self.encryptifier.update(data)
def decrypt(self, data):
return self.decryptifier.update(data)
class SelectSocket(socket.socket):
"""
Provides an asynchronous socket with a poll method built on
top of select.select for cross-platform compatiability
"""
def __init__(self, timer):
super(SelectSocket, self).__init__(socket.AF_INET, socket.SOCK_STREAM)
self.sending = False
self.timer = timer
def poll(self):
flags = []
if self.sending:
self.sending = False
slist = [(self,), (self,), (self,)]
else:
slist = [(self,), (), (self,)]
timeout = self.timer.get_timeout()
if timeout >= 0:
slist.append(timeout)
try:
rlist, wlist, xlist = select.select(*slist)
except select.error as e:
logger.error("SELECTSOCKET: Socket Error: %s", str(e))
rlist, wlist, xlist = [], [], []
if rlist:
flags.append('SOCKET_RECV')
if wlist:
flags.append('SOCKET_SEND')
if xlist:
flags.append('SOCKET_ERR')
return flags
class NetCore(object):
def __init__(self, sock, event):
self.sock = sock
self.event = event
self.host = None
self.port = None
self.connected = False
self.encrypted = False
self.proto_state = mcdata.HANDSHAKE_STATE
self.comp_state = mcdata.PROTO_COMP_OFF
self.comp_threshold = -1
self.sbuff = b''
self.rbuff = utils.BoundBuffer()
def connect(self, host='localhost', port=25565):
self.host = host
self.port = port
try:
logger.info("NETCORE: Attempting to connect to host: %s port: %s",
host, port)
# Set the connect to be a blocking operation
self.sock.setblocking(True)
self.sock.connect((self.host, self.port))
self.sock.setblocking(False)
self.connected = True
self.event.emit('connect', (self.host, self.port))
logger.info("NETCORE: Connected to host: %s port: %s", host, port)
except socket.error as error:
logger.error("NETCORE: Error on Connect: %s", str(error))
def set_proto_state(self, state):
self.proto_state = state
self.event.emit(mcdata.state_lookup[state] + '_STATE')
def set_comp_state(self, threshold):
self.comp_threshold = threshold
if threshold >= 0:
self.comp_state = mcdata.PROTO_COMP_ON
def push(self, packet):
data = packet.encode(self.comp_state, self.comp_threshold)
self.sbuff += (self.cipher.encrypt(data) if self.encrypted else data)
self.event.emit(packet.ident, packet)
self.event.emit(packet.str_ident, packet)
self.sock.sending = True
def push_packet(self, ident, data):
self.push(mcpacket.Packet(ident, data))
def read_packet(self, data=b''):
self.rbuff.append(
self.cipher.decrypt(data) if self.encrypted else data)
while self.rbuff:
self.rbuff.save()
try:
packet = mcpacket.Packet(ident=(
self.proto_state,
mcdata.SERVER_TO_CLIENT
)).decode(self.rbuff, self.comp_state)
except utils.BufferUnderflowException:
self.rbuff.revert()
break
except mcpacket.PacketDecodeFailure as err:
logger.warning('NETCORE: Packet decode failed')
logger.warning(
'NETCORE: Failed packet ident is probably: %s',
err.packet.str_ident
)
self.event.emit('PACKET_ERR', err)
break
self.event.emit(packet.ident, packet)
self.event.emit(packet.str_ident, packet)
def enable_crypto(self, secret_key):
self.cipher = AESCipher(secret_key)
self.encrypted = True
def disable_crypto(self):
self.cipher = None
self.encrypted = False
def reset(self, sock):
self.__init__(sock, self.event)
@pl_announce('Net')
class NetPlugin(PluginBase):
requires = ('Event', 'Timers')
defaults = {
'bufsize': 4096,
'sock_quit': True,
}
events = {
'event_tick': 'tick',
'SOCKET_RECV': 'handle_recv',
'SOCKET_SEND': 'handle_send',
'SOCKET_ERR': 'handle_err',
'SOCKET_HUP': 'handle_hup',
'PLAY<Disconnect': 'handle_disconnect',
'HANDSHAKE>Handshake': 'handle_handshake',
'LOGIN<Login Success': 'handle_login_success',
'LOGIN<Set Compression': 'handle_comp',
'PLAY<Set Compression': 'handle_comp',
'kill': 'handle_kill',
}
def __init__(self, ploader, settings):
super(NetPlugin, self).__init__(ploader, settings)
self.bufsize = self.settings['bufsize']
self.sock_quit = self.settings['sock_quit']
self.sock = SelectSocket(self.timers)
self.net = NetCore(self.sock, self.event)
self.sock_dead = False
ploader.provides('Net', self.net)
def tick(self, name, data):
if self.net.connected:
for flag in self.sock.poll():
self.event.emit(flag)
else:
timeout = self.timers.get_timeout()
if timeout == -1:
time.sleep(1)
else:
time.sleep(timeout)
# SOCKET_RECV - Socket is ready to recieve data
def handle_recv(self, name, data):
if self.net.connected:
try:
data = self.sock.recv(self.bufsize)
if not data:
self.event.emit('SOCKET_HUP')
return
self.net.read_packet(data)
except socket.error as error:
self.event.emit('SOCKET_ERR', error)
# SOCKET_SEND - Socket is ready to send data and Send buffer contains
# data to send
def handle_send(self, name, data):
if self.net.connected:
try:
sent = self.sock.send(self.net.sbuff)
self.net.sbuff = self.net.sbuff[sent:]
if self.net.sbuff:
self.sock.sending = True
except socket.error as error:
logger.error(str(error))
self.event.emit('SOCKET_ERR', error)
# SOCKET_ERR - Socket Error has occured
def handle_err(self, name, data):
self.sock.close()
self.sock = SelectSocket(self.timers)
self.net.reset(self.sock)
logger.error("NETPLUGIN: Socket Error: %s", data)
self.event.emit('disconnect', data)
if self.sock_quit and not self.event.kill_event:
self.sock_dead = True
self.event.kill()
# SOCKET_HUP - Socket has hung up
def handle_hup(self, name, data):
self.sock.close()
self.sock = SelectSocket(self.timers)
self.net.reset(self.sock)
logger.error("NETPLUGIN: Socket has hung up")
self.event.emit('disconnect', "Socket Hung Up")
if self.sock_quit and not self.event.kill_event:
self.sock_dead = True
self.event.kill()
# Handshake - Change to whatever the next state is going to be
def handle_handshake(self, name, packet):
self.net.set_proto_state(packet.data['next_state'])
# Login Success - Change to Play state
def handle_login_success(self, name, packet):
self.net.set_proto_state(mcdata.PLAY_STATE)
# Handle Set Compression packets
def handle_comp(self, name, packet):
self.net.set_comp_state(packet.data['threshold'])
def handle_disconnect(self, name, packet):
logger.info("NETPLUGIN: Disconnected: %s", packet.data['reason'])
self.event.emit('disconnect', packet.data['reason'])
# Kill event - Try to shutdown the socket politely
def handle_kill(self, name, data):
logger.info("NETPLUGIN: Kill event recieved, shutting down socket")
if not self.sock_dead:
self.sock.shutdown(socket.SHUT_WR)
self.sock.close()
| {
"content_hash": "6760387f5edf7f24a830202fd9d486f7",
"timestamp": "",
"source": "github",
"line_count": 268,
"max_line_length": 79,
"avg_line_length": 34.42910447761194,
"alnum_prop": 0.5868646363931939,
"repo_name": "MrSwiss/SpockBot",
"id": "4e3e03e2c75e30ff6254abd6d1934a79cc182967",
"size": "9227",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spock/plugins/core/net.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "283412"
}
],
"symlink_target": ""
} |
from abc import ABC, abstractmethod
from .component import Component
class Session(ABC, Component):
"""A model session link a database with a lux application and models
"""
def __init__(self, app, request=None):
self.request = request
self.init_app(app)
@property
def models(self):
return self.app.models
@property
def auth(self):
return self.app.auth
@property
def config(self):
return self.app.config
@abstractmethod
def add(self, obj):
pass
@abstractmethod
def delete(self, obj):
pass
@abstractmethod
def flush(self):
pass
@abstractmethod
def commit(self):
pass
@abstractmethod
def close(self):
pass
def rollback(self):
pass
class Query(ABC):
"""Interface for a Query
"""
def __init__(self, model, session):
self.model = model
self.session = session
self.fields = None
self.filters = {}
self.app.fire_event('on_query', data=self)
@property
def app(self):
return self.model.app
@property
def request(self):
return self.session.request
@property
def logger(self):
return self.request.logger
@abstractmethod
def one(self):
"""Return a single element from this query.
If the query has more than one element it should return the first
and log an error. If no element found it should raise 404"""
@abstractmethod
def all(self):
"""Aggregate results of this query.
:return: an iterable over models
"""
@abstractmethod
def count(self):
"""Return the number of elements in this query"""
@abstractmethod
def delete(self):
"""Delete all elements in this query"""
@property
def query_fields(self):
schema = self.model.get_schema(
self.model.query_schema or self.model.model_schema
)
return schema.fields if schema else ()
def limit(self, limit):
raise NotImplementedError
def offset(self, offset):
raise NotImplementedError
def sortby_field(self, entry, direction):
raise NotImplementedError
def filter_args(self, *args):
raise NotImplementedError
def filter_field(self, field, op, value):
raise NotImplementedError
def search(self, search):
return self
def load_only(self, *fields):
if self.fields is None:
self.fields = set(fields)
else:
self.fields = self.fields.intersection(fields)
return self
def filter(self, *filters, search=None, **params):
if filters:
self.filter_args(*filters)
fields = self.query_fields
for key, value in params.items():
bits = key.split(':')
field = bits[0]
op = bits[1] if len(bits) == 2 else 'eq'
if field in self.filters:
self.filters[field](self, op, value)
if field in fields:
self.filter_field(fields[field], op, value)
if search:
self.search(search)
return self
def sortby(self, sortby=None):
if sortby:
if not isinstance(sortby, list):
sortby = (sortby,)
for entry in sortby:
direction = None
if ':' in entry:
entry, direction = entry.split(':')
self.sortby_field(entry, direction)
return self
| {
"content_hash": "925206430b0cfb1d037bbbedf826dd09",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 73,
"avg_line_length": 23.246753246753247,
"alnum_prop": 0.5740223463687151,
"repo_name": "quantmind/lux",
"id": "fa5bb8408cbd1c1ecd05f437616a0e15f80e171e",
"size": "3580",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lux/models/query.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "906"
},
{
"name": "HTML",
"bytes": "5107"
},
{
"name": "JavaScript",
"bytes": "219127"
},
{
"name": "Makefile",
"bytes": "422"
},
{
"name": "Mako",
"bytes": "1050"
},
{
"name": "PLpgSQL",
"bytes": "140"
},
{
"name": "Python",
"bytes": "615221"
},
{
"name": "Shell",
"bytes": "196"
}
],
"symlink_target": ""
} |
from __future__ import annotations
import json
import sys
import timeit
from collections import Counter
import iotbx.phil
from dials.util.options import ArgumentParser, flatten_experiments
from libtbx import easy_mp
from scitbx.array_family import flex
help_message = """
Examples::
xia2.overload (data_master.h5|integrated.expt) [nproc=8]
"""
phil_scope = iotbx.phil.parse(
"""
nproc = 1
.type = int(value_min=1)
.help = "The number of processes to use."
output {
filename = overload.json
.type = path
.help = "Histogram output file name"
}
"""
)
def run(args=None):
usage = "xia2.overload (data_master.h5|integrated.expt) [nproc=8]"
parser = ArgumentParser(
usage=usage,
phil=phil_scope,
read_experiments=True,
read_experiments_from_images=True,
epilog=help_message,
)
params, _ = parser.parse_args(args=args, show_diff_phil=True)
experiments = flatten_experiments(params.input.experiments)
if len(experiments) != 1:
parser.print_help()
sys.exit("Please pass an experiment list\n")
return
build_hist(experiments, params)
def build_hist(experiment_list, params):
"""Iterate through the images in experiment_list and generate a pixel
histogram, which is written to params.output.filename."""
nproc = params.nproc
for experiment in experiment_list:
imageset = experiment.imageset
limit = experiment.detector[0].get_trusted_range()[1]
n0, n1 = experiment.scan.get_image_range()
image_count = n1 - n0 + 1
binfactor = 5 # register up to 500% counts
histmax = (limit * binfactor) + 0.0
histbins = int(limit * binfactor) + 1
use_python_counter = histbins > 90000000 # empirically determined
print(
"Processing %d images in %d processes using %s\n"
% (
image_count,
nproc,
"python Counter" if use_python_counter else "flex arrays",
)
)
def process_image(process):
last_update = start = timeit.default_timer()
i = process
if use_python_counter:
local_hist = Counter()
else:
local_hist = flex.histogram(
flex.double(), data_min=0.0, data_max=histmax, n_slots=histbins
)
max_images = image_count // nproc
if process >= image_count % nproc:
max_images += 1
while i < image_count:
data = imageset.get_raw_data(i)[0]
if not use_python_counter:
data = flex.histogram(
data.as_double().as_1d(),
data_min=0.0,
data_max=histmax,
n_slots=histbins,
)
local_hist.update(data)
i = i + nproc
if process == 0:
if timeit.default_timer() > (last_update + 3):
last_update = timeit.default_timer()
if sys.stdout.isatty():
sys.stdout.write("\033[A")
print(
"Processed %d%% (%d seconds remain) "
% (
100 * i // image_count,
round((image_count - i) * (last_update - start) / (i + 1)),
)
)
return local_hist
results = easy_mp.parallel_map(
func=process_image,
iterable=range(nproc),
processes=nproc,
preserve_exception_message=True,
)
print("Merging results")
result_hist = None
for hist in results:
if result_hist is None:
result_hist = hist
else:
result_hist.update(hist)
if not use_python_counter:
# reformat histogram into dictionary
result = list(result_hist.slots())
result_hist = {b: count for b, count in enumerate(result) if count > 0}
results = {
"scale_factor": 1 / limit,
"overload_limit": limit,
"counts": result_hist,
}
print("Writing results to overload.json")
with open("overload.json", "w") as fh:
json.dump(results, fh, indent=1, sort_keys=True)
if __name__ == "__main__":
run()
| {
"content_hash": "29603ac25bd8c0967c16cf214eedc915",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 87,
"avg_line_length": 29.193548387096776,
"alnum_prop": 0.5250828729281768,
"repo_name": "xia2/xia2",
"id": "ee8337285ef6bcaad53f2432ff3c24f9328677dc",
"size": "4525",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/xia2/cli/overload.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "3958"
},
{
"name": "HTML",
"bytes": "35830"
},
{
"name": "Python",
"bytes": "1857098"
}
],
"symlink_target": ""
} |
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from .._common import strip_ansi_escape
from .._const import DefaultValue, ParamKey
from ..error import TypeConversionError
from ._interface import AbstractValueConverter
class IpAddressConverter(AbstractValueConverter):
def force_convert(self):
import ipaddress
value = str(self._value)
try:
return ipaddress.ip_address(value)
except ValueError:
pass
if self._params.get(ParamKey.STRIP_ANSI_ESCAPE, DefaultValue.STRIP_ANSI_ESCAPE):
try:
return ipaddress.ip_address(strip_ansi_escape(value))
except ValueError:
pass
raise TypeConversionError(
f"failed to force_convert to dictionary: type={type(self._value)}"
)
| {
"content_hash": "2ca6b043861100501283ae445d11be3b",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 88,
"avg_line_length": 28.1,
"alnum_prop": 0.6465005931198102,
"repo_name": "thombashi/typepy",
"id": "5b8306b48560efd7cbd19f2d2abac50c3adb3c97",
"size": "843",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "typepy/converter/_ipaddress.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1107"
},
{
"name": "Python",
"bytes": "103291"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import swapper
from django.conf import settings
from django.db import models
from accelerator_abstract.models.accelerator_model import AcceleratorModel
class BasePartnerTeamMember(AcceleratorModel):
partner = models.ForeignKey(
swapper.get_model_name(AcceleratorModel.Meta.app_label, "Partner"),
on_delete=models.CASCADE)
team_member = models.ForeignKey(settings.AUTH_USER_MODEL,
on_delete=models.CASCADE)
partner_administrator = models.BooleanField(default=False)
champion_admin = models.BooleanField(default=False)
class Meta(AcceleratorModel.Meta):
db_table = 'accelerator_partnerteammember'
abstract = True
verbose_name_plural = 'Partner Team Members'
ordering = ['team_member__last_name', 'team_member__first_name', ]
unique_together = ('partner', 'team_member')
def __str__(self):
return "Member %s from %s" % (self.team_member,
self.partner.name)
| {
"content_hash": "8fde73bbd0de2262c00c9b5fda1a3944",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 75,
"avg_line_length": 37.92857142857143,
"alnum_prop": 0.6666666666666666,
"repo_name": "masschallenge/django-accelerator",
"id": "d6c370c288dc960f0ecf9b1e6eadcb15fa704c8b",
"size": "1062",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "accelerator_abstract/models/base_partner_team_member.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1848"
},
{
"name": "Makefile",
"bytes": "6817"
},
{
"name": "Python",
"bytes": "996767"
},
{
"name": "Shell",
"bytes": "2453"
}
],
"symlink_target": ""
} |
"""
(C) Copyright [2014] Avery Rozar
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__author__ = 'Avery Rozar'
| {
"content_hash": "34cd7869250d226c9331d5137b59d227",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 72,
"avg_line_length": 34.8235294117647,
"alnum_prop": 0.7702702702702703,
"repo_name": "asrozar/Cisco_SNMPv3_tool",
"id": "3e35ad4be1622cb09431816ee98630c5a060fab5",
"size": "641",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16709"
}
],
"symlink_target": ""
} |
"""Load a file resource and return the contents."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
# pylint: disable=g-import-not-at-top
# pylint: disable=wildcard-import
from . import control_imports
import tensorflow.python.platform
if control_imports.USE_OSS:
from tensorflow.python.platform.default._resource_loader import *
else:
from tensorflow.python.platform.google._resource_loader import *
| {
"content_hash": "d17c78958df76a4d716056e614f279e7",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 67,
"avg_line_length": 35.642857142857146,
"alnum_prop": 0.779559118236473,
"repo_name": "brodyh/tensorflow",
"id": "46f0ab654bbb46437551ae676c8ffdf6f0303d3f",
"size": "499",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tensorflow/python/platform/resource_loader.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "165934"
},
{
"name": "C++",
"bytes": "4901913"
},
{
"name": "CSS",
"bytes": "107"
},
{
"name": "HTML",
"bytes": "637241"
},
{
"name": "Java",
"bytes": "44388"
},
{
"name": "JavaScript",
"bytes": "5067"
},
{
"name": "Objective-C",
"bytes": "630"
},
{
"name": "Protocol Buffer",
"bytes": "45213"
},
{
"name": "Python",
"bytes": "2473570"
},
{
"name": "Shell",
"bytes": "7535"
},
{
"name": "TypeScript",
"bytes": "237446"
}
],
"symlink_target": ""
} |
import numpy as np
from ..layers import Layer, ParamMixin
from .conv import conv_bc01, bprop_conv_bc01
from .pool import pool_bc01, bprop_pool_bc01
class Conv(Layer, ParamMixin):
def __init__(self, n_feats, filter_shape, strides, weight_scale,
weight_decay=0.0, padding_mode='same', border_mode='nearest'):
self.n_feats = n_feats
self.filter_shape = filter_shape
self.strides = strides
self.weight_scale = weight_scale
self.weight_decay = weight_decay
self.padding_mode = padding_mode
self.border_mode = border_mode
def _setup(self, input_shape, rng):
n_channels = input_shape[1]
W_shape = (n_channels, self.n_feats) + self.filter_shape
self.W = rng.normal(size=W_shape, scale=self.weight_scale)
self.b = np.zeros(self.n_feats)
def fprop(self, input):
self.last_input = input
self.last_input_shape = input.shape
convout = np.empty(self.output_shape(input.shape))
conv_bc01(input, self.W, convout)
return convout + self.b[np.newaxis, :, np.newaxis, np.newaxis]
def bprop(self, output_grad):
input_grad = np.empty(self.last_input_shape)
self.dW = np.empty(self.W.shape)
bprop_conv_bc01(self.last_input, output_grad, self.W, input_grad,
self.dW)
n_imgs = output_grad.shape[0]
self.db = np.sum(output_grad, axis=(0, 2, 3)) / (n_imgs)
self.dW -= self.weight_decay*self.W
return input_grad
def params(self):
return self.W, self.b
def param_incs(self):
return self.dW, self.db
def param_grads(self):
# undo weight decay
gW = self.dW+self.weight_decay*self.W
return gW, self.db
def output_shape(self, input_shape):
if self.padding_mode == 'same':
h = input_shape[2]
w = input_shape[3]
elif self.padding_mode == 'full':
h = input_shape[2]-self.filter_shape[1]+1
w = input_shape[3]-self.filter_shape[2]+1
else:
h = input_shape[2]+self.filter_shape[1]-1
w = input_shape[3]+self.filter_shape[2]-1
shape = (input_shape[0], self.n_feats, h, w)
return shape
class Pool(Layer):
def __init__(self, pool_shape=(3, 3), strides=(1, 1), mode='max'):
self.mode = mode
self.pool_h, self.pool_w = pool_shape
self.stride_y, self.stride_x = strides
def fprop(self, input):
self.last_input_shape = input.shape
self.last_switches = np.empty(self.output_shape(input.shape)+(2,),
dtype=np.int)
poolout = np.empty(self.output_shape(input.shape))
pool_bc01(input, poolout, self.last_switches, self.pool_h, self.pool_w,
self.stride_y, self.stride_x)
return poolout
def bprop(self, output_grad):
input_grad = np.empty(self.last_input_shape)
bprop_pool_bc01(output_grad, self.last_switches, input_grad)
return input_grad
def output_shape(self, input_shape):
shape = (input_shape[0],
input_shape[1],
input_shape[2]//self.stride_y,
input_shape[3]//self.stride_x)
return shape
class Flatten(Layer):
def fprop(self, input):
self.last_input_shape = input.shape
return np.reshape(input, (input.shape[0], -1))
def bprop(self, output_grad):
return np.reshape(output_grad, self.last_input_shape)
def output_shape(self, input_shape):
return (input_shape[0], np.prod(input_shape[1:]))
| {
"content_hash": "b25e2b1468b39e04214f73fec08caf01",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 79,
"avg_line_length": 34.98076923076923,
"alnum_prop": 0.5882352941176471,
"repo_name": "andersbll/nnet",
"id": "c666de7de28f4395d6deed3fd154a4d41b44ed96",
"size": "3638",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nnet/convnet/layers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "29804"
}
],
"symlink_target": ""
} |
"""Tools to help with mapping/geographic applications.
Currently this includes tools for working with CartoPy projections.
"""
import cartopy.crs as ccrs
from ..cbook import Registry
class CFProjection(object):
"""Handle parsing CF projection metadata."""
_default_attr_mapping = [('false_easting', 'false_easting'),
('false_northing', 'false_northing'),
('central_latitude', 'latitude_of_projection_origin'),
('central_longitude', 'longitude_of_projection_origin')]
projection_registry = Registry()
def __init__(self, attrs):
"""Initialize the CF Projection handler with a set of metadata attributes."""
self._attrs = attrs
@classmethod
def register(cls, name):
"""Register a new projection to handle."""
return cls.projection_registry.register(name)
@classmethod
def build_projection_kwargs(cls, source, mapping):
"""Handle mapping a dictionary of metadata to keyword arguments."""
return cls._map_arg_names(source, cls._default_attr_mapping + mapping)
@staticmethod
def _map_arg_names(source, mapping):
"""Map one set of keys to another."""
return {cartopy_name: source[cf_name] for cartopy_name, cf_name in mapping
if cf_name in source}
@property
def cartopy_globe(self):
"""Initialize a `cartopy.crs.Globe` from the metadata."""
if 'earth_radius' in self._attrs:
kwargs = {'ellipse': 'sphere', 'semimajor_axis': self._attrs['earth_radius'],
'semiminor_axis': self._attrs['earth_radius']}
else:
attr_mapping = [('semimajor_axis', 'semi_major_axis'),
('semiminor_axis', 'semi_minor_axis'),
('inverse_flattening', 'inverse_flattening')]
kwargs = self._map_arg_names(self._attrs, attr_mapping)
# WGS84 with semi_major==semi_minor is NOT the same as spherical Earth
# Also need to handle the case where we're not given any spheroid
kwargs['ellipse'] = None if kwargs else 'sphere'
return ccrs.Globe(**kwargs)
def to_cartopy(self):
"""Convert to a CartoPy projection."""
globe = self.cartopy_globe
proj_name = self._attrs['grid_mapping_name']
try:
proj_handler = self.projection_registry[proj_name]
except KeyError:
raise ValueError('Unhandled projection: {}'.format(proj_name))
return proj_handler(self._attrs, globe)
def to_dict(self):
"""Get the dictionary of metadata attributes."""
return self._attrs.copy()
def __str__(self):
"""Get a string representation of the projection."""
return 'Projection: ' + self._attrs['grid_mapping_name']
def __getitem__(self, item):
"""Return a given attribute."""
return self._attrs[item]
def __eq__(self, other):
"""Test equality (CFProjection with matching attrs)."""
return self.__class__ == other.__class__ and self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Test inequality (not equal to)."""
return not self.__eq__(other)
@CFProjection.register('geostationary')
def make_geo(attrs_dict, globe):
"""Handle geostationary projection."""
attr_mapping = [('satellite_height', 'perspective_point_height'),
('sweep_axis', 'sweep_angle_axis')]
kwargs = CFProjection.build_projection_kwargs(attrs_dict, attr_mapping)
# CartoPy can't handle central latitude for Geostationary (nor should it)
# Just remove it if it's 0.
if not kwargs.get('central_latitude'):
kwargs.pop('central_latitude', None)
# If sweep_angle_axis is not present, we should look for fixed_angle_axis and adjust
if 'sweep_axis' not in kwargs:
kwargs['sweep_axis'] = 'x' if attrs_dict['fixed_angle_axis'] == 'y' else 'y'
return ccrs.Geostationary(globe=globe, **kwargs)
@CFProjection.register('lambert_conformal_conic')
def make_lcc(attrs_dict, globe):
"""Handle Lambert conformal conic projection."""
attr_mapping = [('central_longitude', 'longitude_of_central_meridian'),
('standard_parallels', 'standard_parallel')]
kwargs = CFProjection.build_projection_kwargs(attrs_dict, attr_mapping)
if 'standard_parallels' in kwargs:
try:
len(kwargs['standard_parallels'])
except TypeError:
kwargs['standard_parallels'] = [kwargs['standard_parallels']]
return ccrs.LambertConformal(globe=globe, **kwargs)
@CFProjection.register('latitude_longitude')
def make_latlon(attrs_dict, globe):
"""Handle plain latitude/longitude mapping."""
# TODO: Really need to use Geodetic to pass the proper globe
return ccrs.PlateCarree()
@CFProjection.register('mercator')
def make_mercator(attrs_dict, globe):
"""Handle Mercator projection."""
attr_mapping = [('latitude_true_scale', 'standard_parallel'),
('scale_factor', 'scale_factor_at_projection_origin')]
kwargs = CFProjection.build_projection_kwargs(attrs_dict, attr_mapping)
# Work around the fact that in CartoPy <= 0.16 can't handle the easting/northing
# in Mercator
if not kwargs.get('false_easting'):
kwargs.pop('false_easting', None)
if not kwargs.get('false_northing'):
kwargs.pop('false_northing', None)
return ccrs.Mercator(globe=globe, **kwargs)
@CFProjection.register('stereographic')
def make_stereo(attrs_dict, globe):
"""Handle generic stereographic projection."""
attr_mapping = [('scale_factor', 'scale_factor_at_projection_origin')]
kwargs = CFProjection.build_projection_kwargs(attrs_dict, attr_mapping)
return ccrs.Stereographic(globe=globe, **kwargs)
@CFProjection.register('polar_stereographic')
def make_polar_stereo(attrs_dict, globe):
"""Handle polar stereographic projection."""
attr_mapping = [('central_longitude', 'straight_vertical_longitude_from_pole'),
('true_scale_latitude', 'standard_parallel'),
('scale_factor', 'scale_factor_at_projection_origin')]
kwargs = CFProjection.build_projection_kwargs(attrs_dict, attr_mapping)
return ccrs.Stereographic(globe=globe, **kwargs)
| {
"content_hash": "421a21c5213bbdd146ae35b57f243dca",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 89,
"avg_line_length": 38.61818181818182,
"alnum_prop": 0.6377903327055869,
"repo_name": "jrleeman/MetPy",
"id": "95ac1711947ecdbc6bf78b18c24b631782ea86dc",
"size": "6510",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metpy/plots/mapping.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1241442"
},
{
"name": "Shell",
"bytes": "1600"
}
],
"symlink_target": ""
} |
"""
WSGI config for sheeshmohsin project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "sheeshmohsin.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sheeshmohsin.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| {
"content_hash": "f949e15a701770edc9d5fea286e371ca",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 79,
"avg_line_length": 44.90625,
"alnum_prop": 0.7954070981210856,
"repo_name": "sheeshmohsin/sheeshmohsin",
"id": "98ec794528dde31a72f93670c1b8cafdfc691ed3",
"size": "1437",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sheeshmohsin/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8175"
}
],
"symlink_target": ""
} |
from django.db import models, migrations
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='LogEntry',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_pk', models.TextField(verbose_name='object pk')),
('object_id', models.PositiveIntegerField(db_index=True, null=True, verbose_name='object id', blank=True)),
('object_repr', models.TextField(verbose_name='object representation')),
('action', models.PositiveSmallIntegerField(verbose_name='action', choices=[(0, 'create'), (1, 'update'), (2, 'delete')])),
('changes', models.TextField(verbose_name='change message', blank=True)),
('timestamp', models.DateTimeField(auto_now_add=True, verbose_name='timestamp')),
('actor', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, verbose_name='actor', blank=True, to=settings.AUTH_USER_MODEL, null=True)),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', verbose_name='content type', to='contenttypes.ContentType')),
],
options={
'ordering': ['-timestamp'],
'get_latest_by': 'timestamp',
'verbose_name': 'log entry',
'verbose_name_plural': 'log entries',
},
bases=(models.Model,),
),
]
| {
"content_hash": "d6b7e2ef16fae5a3a50446040e9a6e6e",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 183,
"avg_line_length": 50.6,
"alnum_prop": 0.5990965556182948,
"repo_name": "kbussell/django-auditlog",
"id": "f7346ec6cfeb4caaf0a0c65e80a7451eea888dc3",
"size": "1795",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/auditlog/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "80631"
}
],
"symlink_target": ""
} |
from django.urls import reverse
from django.utils.translation import gettext_noop
from corehq.apps.locations.util import (
load_locs_json,
location_hierarchy_config,
)
from corehq.apps.reports.filters.base import BaseReportFilter
class AsyncLocationFilter(BaseReportFilter):
# todo: cleanup template
label = gettext_noop("Location")
slug = "location_async"
template = "reports/filters/location_async.html"
make_optional = False
auto_drill = True
@property
def api_root(self):
return reverse('api_dispatch_list', kwargs={'domain': self.domain,
'resource_name': 'location_internal',
'api_name': 'v0.5'})
def load_locations_json(self, loc_id):
return load_locs_json(self.domain, loc_id, user=self.request.couch_user)
@property
def location_hierarchy_config(self):
return location_hierarchy_config(self.domain)
@property
def filter_context(self):
api_root = self.api_root
user = self.request.couch_user
loc_id = self.request.GET.get('location_id')
if not loc_id:
# Don't use enterprise permissions, because any location not in the current domain won't exist
domain_membership = user.get_domain_membership(self.domain, allow_enterprise=False)
if domain_membership:
loc_id = domain_membership.location_id
return {
'api_root': api_root,
'control_name': self.label, # todo: cleanup, don't follow this structure
'control_slug': self.slug, # todo: cleanup, don't follow this structure
'auto_drill': self.auto_drill,
'loc_id': loc_id,
'locations': self.load_locations_json(loc_id),
'make_optional': self.make_optional,
'hierarchy': self.location_hierarchy_config,
'path': self.request.path,
}
@classmethod
def get_value(cls, request, domain):
return request.GET.get('location_id')
class OptionalAsyncLocationFilter(AsyncLocationFilter):
"""
This is the same as the AsyncLocationFilter, only when the template is
rendered, it will give the user the option of filtering by location or
not. If the user chooses to not filter by location, the location_id
value will be blank.
"""
make_optional = True
| {
"content_hash": "4df14c8e2288d4e638cf3788b5b3194a",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 106,
"avg_line_length": 36.93939393939394,
"alnum_prop": 0.6296144380639869,
"repo_name": "dimagi/commcare-hq",
"id": "b2bce9aa86952f942dc210d05c5c911193d82ede",
"size": "2438",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/reports/filters/fixtures.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "82928"
},
{
"name": "Dockerfile",
"bytes": "2341"
},
{
"name": "HTML",
"bytes": "2589268"
},
{
"name": "JavaScript",
"bytes": "5889543"
},
{
"name": "Jinja",
"bytes": "3693"
},
{
"name": "Less",
"bytes": "176180"
},
{
"name": "Makefile",
"bytes": "1622"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "66704"
},
{
"name": "Python",
"bytes": "21779773"
},
{
"name": "Roff",
"bytes": "150"
},
{
"name": "Shell",
"bytes": "67473"
}
],
"symlink_target": ""
} |
from django.views.generic import View, TemplateView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.shortcuts import render_to_response
from django.conf import settings
from denguealert.forms import DengueCaseForm
class ReportView(TemplateView):
template_name = 'report.html'
@method_decorator(login_required(login_url='login'))
def dispatch(self, *args, **kwargs):
return super(ReportView, self).dispatch(*args, **kwargs)
def get(self, request, *args, **kwargs):
# context = super(ReportView, self).get_context_data(**kwargs)
# context['form'] = DengueCaseForm()
extra_context = {
'form': DengueCaseForm(),
'GMAPS_API_KEY': settings.GMAPS_API_KEY}
return render_to_response(self.template_name, extra_context)
| {
"content_hash": "28841aadeeac56e66ac2a261ee599f10",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 70,
"avg_line_length": 39.5,
"alnum_prop": 0.7077100115074798,
"repo_name": "ray-dino/DengueAlert",
"id": "ca3c94b65abc4cedfcbfc6f1f4f9194ba4c60be5",
"size": "869",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "denguealert/denguealert/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15786"
}
],
"symlink_target": ""
} |
from contextlib import contextmanager
import warnings
import gossip
def test_deprecated_hook():
with python_warnings_recording() as recorded:
hook = gossip.define('hook', deprecated=True)
assert recorded == []
@hook.register
def handler(): # pylint: disable=unused-variable
pass
[rec] = recorded
assert rec.filename == __file__
@contextmanager
def python_warnings_recording():
warnings.simplefilter('always')
with warnings.catch_warnings(record=True) as recorded:
yield recorded
| {
"content_hash": "21b9a72ee5fa7831d40cb0398665a01b",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 58,
"avg_line_length": 21.884615384615383,
"alnum_prop": 0.6590509666080844,
"repo_name": "vmalloc/gossip",
"id": "95a9cb08eee59d444da99beca7f701b56ce93ab2",
"size": "569",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/test_deprecation.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "369"
},
{
"name": "Python",
"bytes": "62133"
}
],
"symlink_target": ""
} |
import os
# import sys
import openstackdocstheme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
# TODO(ajaeger): enable PDF building, for example add 'rst2pdf.pdfbuilder'
# extensions =
# Add any paths that contain templates here, relative to this directory.
# templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Installation Guide for Bare Metal Service'
bug_tag = u'install-guide'
copyright = u'2016, OpenStack contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# A few variables have to be set for the log-a-bug feature.
# giturl: The location of conf.py on Git. Must be set manually.
# gitsha: The SHA checksum of the bug description. Automatically extracted
# from git log.
# bug_tag: Tag for categorizing the bug. Must be set manually.
# These variables are passed to the logabug code via html_context.
giturl = u'https://git.openstack.org/cgit/openstack/ironic/tree/install-guide/source' # noqa
git_cmd = "/usr/bin/git log | head -n1 | cut -f2 -d' '"
gitsha = os.popen(git_cmd).read().strip('\n')
html_context = {"gitsha": gitsha, "bug_tag": bug_tag,
"giturl": giturl,
"bug_project": "ironic"}
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['include']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [openstackdocstheme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# So that we can enable "log-a-bug" links from each output HTML page, this
# variable must be set to a format that includes year, month, day, hours and
# minutes.
html_last_updated_fmt = '%Y-%m-%d %H:%M'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'install-guide'
# If true, publish source files
html_copy_source = False
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'InstallGuide.tex', u'Install Guide',
u'OpenStack contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'installguide', u'Install Guide',
[u'OpenStack contributors'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'InstallGuide', u'Install Guide',
u'OpenStack contributors', 'InstallGuide',
'This guide shows OpenStack end users how to install '
'an OpenStack cloud.', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# -- Options for Internationalization output ------------------------------
locale_dirs = ['locale/']
# -- Options for PDF output --------------------------------------------------
pdf_documents = [
('index', u'InstallGuide', u'Install Guide',
u'OpenStack contributors')
]
| {
"content_hash": "1a3a247246d88903eb48480e27a07a1e",
"timestamp": "",
"source": "github",
"line_count": 279,
"max_line_length": 93,
"avg_line_length": 33.272401433691755,
"alnum_prop": 0.6934180760530001,
"repo_name": "SauloAislan/ironic",
"id": "1ea0708b51d01d39911799c685cfd72dd501a5b2",
"size": "10111",
"binary": false,
"copies": "6",
"ref": "refs/heads/SauloAislan-WIP",
"path": "doc/source/install/conf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "349"
},
{
"name": "Python",
"bytes": "5596702"
},
{
"name": "Shell",
"bytes": "119832"
}
],
"symlink_target": ""
} |
from __future__ import print_function
"""
Copyright (c) 2012, Cody Schafer <cpschafer --- gmail.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from collections import deque
import threading
class Signal:
def __init__(self):
self.last = None
self.first = None
def attach(self, connection):
self.last.attach(connection)
class Event:
def __init__(self, etype, **kws):
self.type = etype
self.__dict__.update(kws)
class EventQueue:
def __init__(self):
self.cond = threading.Condition()
self.q = deque()
def post(self, event):
c = self.cond
c.acquire()
self.q.appendleft(event)
c.notify()
c.release()
def wait(self, timeout=None):
c = self.cond
c.acquire()
if timeout != None:
start_time = _time()
while not self.q:
c.wait(timeout)
# required due to http://bugs.python.org/issue1175933
if timeout != None and (_time() - start_time) > timeout:
c.release()
return None
it = self.q.pop()
c.release()
return it
| {
"content_hash": "8fbb8de3c00ce2c5217bcfae55c81534",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 79,
"avg_line_length": 32.75,
"alnum_prop": 0.734171531207903,
"repo_name": "mkoval/FieldforceTCM",
"id": "ef74796ba74bccaad1902fc1bc404bd06ada4fe1",
"size": "2281",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/event.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "50715"
}
],
"symlink_target": ""
} |
"""
Configuration settings for the Flask application. Every variable in capital
letters is available inside the flask instance.
"""
DEBUG = True
| {
"content_hash": "1ef96ae39d0c0771593337ca13be4043",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 75,
"avg_line_length": 24.333333333333332,
"alnum_prop": 0.7808219178082192,
"repo_name": "infothrill/flask-socketio-dbus-demo",
"id": "07d16aee5ec12eb3e776f9cd5431257c7b4c3815",
"size": "171",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "7509"
},
{
"name": "Python",
"bytes": "15382"
},
{
"name": "Shell",
"bytes": "377"
}
],
"symlink_target": ""
} |
from google.cloud import edgecontainer_v1
def sample_list_vpn_connections():
# Create a client
client = edgecontainer_v1.EdgeContainerClient()
# Initialize request argument(s)
request = edgecontainer_v1.ListVpnConnectionsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_vpn_connections(request=request)
# Handle the response
for response in page_result:
print(response)
# [END edgecontainer_v1_generated_EdgeContainer_ListVpnConnections_sync]
| {
"content_hash": "217fa8208a2503fe04537dd7273fc429",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 72,
"avg_line_length": 26.55,
"alnum_prop": 0.7212806026365348,
"repo_name": "googleapis/python-edgecontainer",
"id": "49f255f0137ab6d9c0802250c62a7c1d3ef7635e",
"size": "1938",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/generated_samples/edgecontainer_v1_generated_edge_container_list_vpn_connections_sync.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "607901"
},
{
"name": "Shell",
"bytes": "30681"
}
],
"symlink_target": ""
} |
from setuptools import setup
from setuptools import find_packages
setup(
name='cloudnsapi',
version='0.0.2',
description="ClouDNS API Wrapper",
author="R Franks",
author_email='git@rf152.co.uk',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: System Administrators',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
],
packages=['cloudnsapi'],
install_requires=['requests']
) | {
"content_hash": "fbb00124e8990b31b1ccd6b2706168e3",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 53,
"avg_line_length": 32.37931034482759,
"alnum_prop": 0.5910543130990416,
"repo_name": "rf152/python-cloudns",
"id": "647c3d66cdeaa17e014dcfd1e95d497b79ffe94f",
"size": "939",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13613"
}
],
"symlink_target": ""
} |
"""
Docstrings are another source of information for functions and classes.
:mod:`jedi.evaluate.dynamic` tries to find all executions of functions, while
the docstring parsing is much easier. There are two different types of
docstrings that |jedi| understands:
- `Sphinx <http://sphinx-doc.org/markup/desc.html#info-field-lists>`_
- `Epydoc <http://epydoc.sourceforge.net/manual-fields.html>`_
For example, the sphinx annotation ``:type foo: str`` clearly states that the
type of ``foo`` is ``str``.
As an addition to parameter searching, this module also provides return
annotations.
"""
from ast import literal_eval
import re
from itertools import chain
from textwrap import dedent
from jedi.evaluate.cache import memoize_default
from jedi.parser import ParserWithRecovery, load_grammar
from jedi.parser.tree import Class
from jedi.common import indent_block
from jedi.evaluate.iterable import Array, FakeSequence, AlreadyEvaluated
DOCSTRING_PARAM_PATTERNS = [
r'\s*:type\s+%s:\s*([^\n]+)', # Sphinx
r'\s*:param\s+(\w+)\s+%s:[^\n]+', # Sphinx param with type
r'\s*@type\s+%s:\s*([^\n]+)', # Epydoc
]
DOCSTRING_RETURN_PATTERNS = [
re.compile(r'\s*:rtype:\s*([^\n]+)', re.M), # Sphinx
re.compile(r'\s*@rtype:\s*([^\n]+)', re.M), # Epydoc
]
REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`')
try:
from numpydoc.docscrape import NumpyDocString
except ImportError:
def _search_param_in_numpydocstr(docstr, param_str):
return []
else:
def _search_param_in_numpydocstr(docstr, param_str):
"""Search `docstr` (in numpydoc format) for type(-s) of `param_str`."""
params = NumpyDocString(docstr)._parsed_data['Parameters']
for p_name, p_type, p_descr in params:
if p_name == param_str:
m = re.match('([^,]+(,[^,]+)*?)(,[ ]*optional)?$', p_type)
if m:
p_type = m.group(1)
if p_type.startswith('{'):
types = set(type(x).__name__ for x in literal_eval(p_type))
return list(types)
else:
return [p_type]
return []
def _search_param_in_docstr(docstr, param_str):
"""
Search `docstr` for type(-s) of `param_str`.
>>> _search_param_in_docstr(':type param: int', 'param')
['int']
>>> _search_param_in_docstr('@type param: int', 'param')
['int']
>>> _search_param_in_docstr(
... ':type param: :class:`threading.Thread`', 'param')
['threading.Thread']
>>> bool(_search_param_in_docstr('no document', 'param'))
False
>>> _search_param_in_docstr(':param int param: some description', 'param')
['int']
"""
# look at #40 to see definitions of those params
patterns = [re.compile(p % re.escape(param_str))
for p in DOCSTRING_PARAM_PATTERNS]
for pattern in patterns:
match = pattern.search(docstr)
if match:
return [_strip_rst_role(match.group(1))]
return (_search_param_in_numpydocstr(docstr, param_str) or
[])
def _strip_rst_role(type_str):
"""
Strip off the part looks like a ReST role in `type_str`.
>>> _strip_rst_role(':class:`ClassName`') # strip off :class:
'ClassName'
>>> _strip_rst_role(':py:obj:`module.Object`') # works with domain
'module.Object'
>>> _strip_rst_role('ClassName') # do nothing when not ReST role
'ClassName'
See also:
http://sphinx-doc.org/domains.html#cross-referencing-python-objects
"""
match = REST_ROLE_PATTERN.match(type_str)
if match:
return match.group(1)
else:
return type_str
def _evaluate_for_statement_string(evaluator, string, module):
code = dedent("""
def pseudo_docstring_stuff():
# Create a pseudo function for docstring statements.
%s
""")
if string is None:
return []
for element in re.findall('((?:\w+\.)*\w+)\.', string):
# Try to import module part in dotted name.
# (e.g., 'threading' in 'threading.Thread').
string = 'import %s\n' % element + string
# Take the default grammar here, if we load the Python 2.7 grammar here, it
# will be impossible to use `...` (Ellipsis) as a token. Docstring types
# don't need to conform with the current grammar.
p = ParserWithRecovery(load_grammar(), code % indent_block(string))
try:
pseudo_cls = p.module.subscopes[0]
# First pick suite, then simple_stmt (-2 for DEDENT) and then the node,
# which is also not the last item, because there's a newline.
stmt = pseudo_cls.children[-1].children[-2].children[-2]
except (AttributeError, IndexError):
return []
# Use the module of the param.
# TODO this module is not the module of the param in case of a function
# call. In that case it's the module of the function call.
# stuffed with content from a function call.
pseudo_cls.parent = module
return list(_execute_types_in_stmt(evaluator, stmt))
def _execute_types_in_stmt(evaluator, stmt):
"""
Executing all types or general elements that we find in a statement. This
doesn't include tuple, list and dict literals, because the stuff they
contain is executed. (Used as type information).
"""
definitions = evaluator.eval_element(stmt)
return chain.from_iterable(_execute_array_values(evaluator, d) for d in definitions)
def _execute_array_values(evaluator, array):
"""
Tuples indicate that there's not just one return value, but the listed
ones. `(str, int)` means that it returns a tuple with both types.
"""
if isinstance(array, Array):
values = []
for types in array.py__iter__():
objects = set(chain.from_iterable(_execute_array_values(evaluator, typ) for typ in types))
values.append(AlreadyEvaluated(objects))
return [FakeSequence(evaluator, values, array.type)]
else:
return evaluator.execute(array)
@memoize_default(None, evaluator_is_first_arg=True)
def follow_param(evaluator, param):
def eval_docstring(docstring):
return set(
[p for param_str in _search_param_in_docstr(docstring, str(param.name))
for p in _evaluate_for_statement_string(evaluator, param_str, module)]
)
func = param.parent_function
module = param.get_parent_until()
types = eval_docstring(func.raw_doc)
if func.name.value == '__init__':
cls = func.get_parent_until(Class)
if cls.type == 'classdef':
types |= eval_docstring(cls.raw_doc)
return types
@memoize_default(None, evaluator_is_first_arg=True)
def find_return_types(evaluator, func):
def search_return_in_docstr(code):
for p in DOCSTRING_RETURN_PATTERNS:
match = p.search(code)
if match:
return _strip_rst_role(match.group(1))
type_str = search_return_in_docstr(func.raw_doc)
return _evaluate_for_statement_string(evaluator, type_str, func.get_parent_until())
| {
"content_hash": "e6232341e933055455db2555c38ad327",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 102,
"avg_line_length": 34.588235294117645,
"alnum_prop": 0.6302437641723356,
"repo_name": "adammenges/atomconfig",
"id": "d2ab34ed10c672df18712b7b9df582dcd583a6cc",
"size": "7056",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "packages/autocomplete-python/lib/jedi/evaluate/docstrings.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Apex",
"bytes": "1010"
},
{
"name": "Batchfile",
"bytes": "1842"
},
{
"name": "C",
"bytes": "1104724"
},
{
"name": "C#",
"bytes": "3424"
},
{
"name": "C++",
"bytes": "679558"
},
{
"name": "CSS",
"bytes": "164756"
},
{
"name": "Clojure",
"bytes": "3693"
},
{
"name": "CoffeeScript",
"bytes": "335025"
},
{
"name": "ColdFusion",
"bytes": "387"
},
{
"name": "Common Lisp",
"bytes": "1913"
},
{
"name": "Crystal",
"bytes": "1780"
},
{
"name": "D",
"bytes": "246"
},
{
"name": "Elm",
"bytes": "118"
},
{
"name": "Fortran",
"bytes": "131"
},
{
"name": "Gherkin",
"bytes": "1644"
},
{
"name": "Go",
"bytes": "273"
},
{
"name": "HTML",
"bytes": "705712"
},
{
"name": "Haskell",
"bytes": "300"
},
{
"name": "Java",
"bytes": "1962"
},
{
"name": "JavaScript",
"bytes": "497845"
},
{
"name": "Lua",
"bytes": "1773"
},
{
"name": "OCaml",
"bytes": "1348"
},
{
"name": "Objective-C",
"bytes": "8993"
},
{
"name": "PHP",
"bytes": "1938"
},
{
"name": "Perl",
"bytes": "2333"
},
{
"name": "PowerShell",
"bytes": "5987"
},
{
"name": "Puppet",
"bytes": "5650"
},
{
"name": "Python",
"bytes": "545739"
},
{
"name": "R",
"bytes": "1474"
},
{
"name": "Ruby",
"bytes": "1646"
},
{
"name": "Rust",
"bytes": "86"
},
{
"name": "Shell",
"bytes": "4673"
},
{
"name": "TeX",
"bytes": "145"
},
{
"name": "TypeScript",
"bytes": "1565"
},
{
"name": "Vue",
"bytes": "1298"
}
],
"symlink_target": ""
} |
from netforce.model import Model, fields, get_model
import time
from netforce import database
from netforce.access import get_active_user, set_active_user
from netforce.access import get_active_company
class Move(Model):
_inherit= "stock.move"
_fields = {
"related_id": fields.Reference([["sale.order", "Sales Order"],
["purchase.order", "Purchase Order"],
["production.order", "Production Order"],
["job", "Service Order"],
["account.invoice", "Invoice"],
["pawn.loan", "Loan"]], "Related To"),
}
def get_production_orders(self, ids, context={}):
prod_ids = []
for obj in self.browse(ids):
prod_ids.append(obj.product_id.id)
prod_ids = list(set(prod_ids))
production_ids = []
for comp in get_model("production.component").\
search_browse([["product_id", "in", prod_ids]]):
production_ids.append(comp.order_id.id)
return list(set(production_ids))
Move.register()
| {
"content_hash": "07bea98587b97e78352ac9df54cd1026",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 70,
"avg_line_length": 34.53125,
"alnum_prop": 0.5619909502262443,
"repo_name": "nfco/netforce",
"id": "4333d33096e13a5da1bc3f3b747d3208f3473124",
"size": "2207",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "netforce_mfg/netforce_mfg/models/stock_move.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "73"
},
{
"name": "CSS",
"bytes": "407336"
},
{
"name": "HTML",
"bytes": "478918"
},
{
"name": "Java",
"bytes": "11870"
},
{
"name": "JavaScript",
"bytes": "3712147"
},
{
"name": "Makefile",
"bytes": "353"
},
{
"name": "PHP",
"bytes": "2274"
},
{
"name": "Python",
"bytes": "3469515"
},
{
"name": "Roff",
"bytes": "15858"
},
{
"name": "Shell",
"bytes": "117"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import unicode_literals
import six
from acos_client.v30 import base
class SLBCommon(base.BaseV30):
url_prefix = "/slb/common"
def _underscore_to_dash(self, val):
rv = val.replace("_", "-")
return rv
def create(self, **kwargs):
params = {"common": {}}
for k, v in six.iteritems(kwargs):
params["common"][self._underscore_to_dash(k)] = v
kwargs = {}
return self._post(self.url_prefix, params, **kwargs)
| {
"content_hash": "d5df7b14c080190fc9d145bf7634b77e",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 61,
"avg_line_length": 23.304347826086957,
"alnum_prop": 0.5988805970149254,
"repo_name": "mdurrant-b3/acos-client",
"id": "521157f2b40fba6c5c74d9c6f9cf7dcd554dd156",
"size": "1146",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "acos_client/v30/slb/common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "477240"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from __future__ import unicode_literals
try:
input = raw_input # Python 2.7
except NameError:
pass
import requests
import json
import time
try:
from ConfigParser import ConfigParser # Python 2.7
config = ConfigParser()
config.read("settings.ini")
server = config.get("account", "server")
login_id = config.get("account", "login_id")
login_password = config.get("account", "login_password")
except:
try:
from configparser import ConfigParser # Python 3
config = ConfigParser()
config.read("settings.ini")
server = config["account"]["server"]
login_id = config["account"]["login_id"]
login_password = config["account"]["login_password"]
except:
server = input("server ip and port: ")
login_id = input("username: ")
login_password = input("password: ")
print()
r = requests.post("http://{}/api/login".format(server),
headers={"User-Agent": "ZDCLIENT", "Cache-Control": "no-cache"},
data={"username": login_id, "password": login_password})
if r.status_code == 200:
print("/api/login OK")
else:
print("/api/login ERR")
exit(1)
token = r.json()["message"]
r = requests.get("http://{}/api/client/{}/vms".format(server, login_id),
headers={"User-Agent": "ZDCLIENT", "Content-Type": "plain/text"},
params={"token": token})
try:
login = r.json()[0]["clientPreferences"][1]["value"]
password = r.json()[0]["clientPreferences"][0]["value"]
client_id = r.json()[0]["clientPreferences"][0]["clientId"]
vm_id = r.json()[0]["clientPreferences"][0]["vmId"]
service_host_id = r.json()[0]["serviceHostId"]
print("/api/client/{}/vms OK".format(login_id))
except:
print("/api/client/{}/vms ERR".format(login_id))
exit(1)
r = requests.post("http://{}/api/vm/{}/start".format(server, vm_id),
headers={"User-Agent": "ZDCLIENT", "Content-Type": "application/json"},
params={"token": token, "endPointId": str(service_host_id)}, data="{}")
if r.status_code == 200:
print("/api/vm/{}/start OK".format(vm_id))
else:
print("/api/vm/{}/start ERR".format(vm_id))
exit(1)
while True:
r = requests.post("http://{}/api/session/connect".format(server),
headers={"User-Agent": "ZDCLIENT", "Content-Type": "application/json", "Cache-Control": "no-cache"},
params={"token": token},
data=json.dumps({"clientId": client_id, "vmId": vm_id, "type": "RDP"}, separators=(',', ':')))
if r.status_code != 200:
print("/api/session/connect ERR ({}) retry after 10 seconds ..".format(r.json()["message"]))
time.sleep(10)
else:
break
print()
print("ip address: {}:{}".format(r.json()["ip"], r.json()["port"]))
print("windows id: {}".format(login))
print("windows password: {}".format(password))
| {
"content_hash": "b33c98ab8e5550f5844c7707af1b1566",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 122,
"avg_line_length": 33.225806451612904,
"alnum_prop": 0.5660194174757281,
"repo_name": "youngminz/zdesktop.py",
"id": "48ef3d0beb82c8303672409cd6dab8bc8b82f97a",
"size": "3113",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/zdesktop.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3113"
}
],
"symlink_target": ""
} |
"""Create high-resolution head surfaces for coordinate alignment.
Examples
--------
.. code-block:: console
$ mne make_scalp_surfaces --overwrite --subject sample
"""
import os
import sys
import mne
from mne.bem import make_scalp_surfaces
def run():
"""Run command."""
from mne.commands.utils import get_optparser, _add_verbose_flag
parser = get_optparser(__file__)
subjects_dir = mne.get_config('SUBJECTS_DIR')
parser.add_option('-o', '--overwrite', dest='overwrite',
action='store_true',
help='Overwrite previously computed surface')
parser.add_option('-s', '--subject', dest='subject',
help='The name of the subject', type='str')
parser.add_option('-f', '--force', dest='force', action='store_true',
help='Force creation of the surface even if it has '
'some topological defects.')
parser.add_option("-d", "--subjects-dir", dest="subjects_dir",
help="Subjects directory", default=subjects_dir)
parser.add_option("-n", "--no-decimate", dest="no_decimate",
help="Disable medium and sparse decimations "
"(dense only)", action='store_true')
_add_verbose_flag(parser)
options, args = parser.parse_args()
subject = vars(options).get('subject', os.getenv('SUBJECT'))
subjects_dir = options.subjects_dir
if subject is None or subjects_dir is None:
parser.print_help()
sys.exit(1)
make_scalp_surfaces(
subject=subject,
subjects_dir=subjects_dir,
force=options.force,
overwrite=options.overwrite,
no_decimate=options.no_decimate,
verbose=options.verbose)
mne.utils.run_command_if_main()
| {
"content_hash": "8c7153aafa1a8297adba29762c876610",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 74,
"avg_line_length": 33.351851851851855,
"alnum_prop": 0.6046640755136036,
"repo_name": "bloyl/mne-python",
"id": "1e0e4cbcd8a5e03d02e0dc9c6703a1e33c24f1b0",
"size": "2037",
"binary": false,
"copies": "3",
"ref": "refs/heads/placeholder",
"path": "mne/commands/mne_make_scalp_surfaces.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Csound Document",
"bytes": "24999"
},
{
"name": "Makefile",
"bytes": "4450"
},
{
"name": "Python",
"bytes": "8190297"
},
{
"name": "Shell",
"bytes": "936"
}
],
"symlink_target": ""
} |
from base import eigenvalue_decomposition, principal_component_decomposition
| {
"content_hash": "550a8dfe9664478d369096edf5475b1e",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 76,
"avg_line_length": 77,
"alnum_prop": 0.8831168831168831,
"repo_name": "jabooth/menpo-archive",
"id": "6c00970d9d2faa579e435ad05fee315af39790b6",
"size": "77",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "menpo/decomposition/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "101730"
},
{
"name": "C++",
"bytes": "169304"
},
{
"name": "Python",
"bytes": "818217"
},
{
"name": "Shell",
"bytes": "776"
}
],
"symlink_target": ""
} |
import sys
import os
import re
import OpenSSL.crypto as crypto
import json
from jsonpointer import resolve_pointer
from jsonpath_rw import jsonpath, parse
import argparse
from datetime import datetime
import ssltools.certificates as certificates
from ssltools.openssl import call_openssl
from ssltools.json import to_json
if __name__ == "__main__":
cli = argparse.ArgumentParser(description = "Get and process SSL certificate chains.")
cli.add_argument("--host", dest = "host", nargs = 1, type = str, required = True,
help = "Hostname/IP to connect to.")
cli.add_argument("-p", "--port", dest = "port", type = int, nargs = 1, default = 443,
help = "Port to connect to (defaults to 443)")
cli.add_argument("-s", "--sni-name", dest = "sni_name", type = str, nargs = 1,
help = "SNI name to send to the server. Use this if the host supports multiple SSL certificates.")
cli.add_argument("--json-pointer", dest = "json_pointer", type = str, nargs = 1,
help = "JSON pointer query string (RFC6901) to get a specific attribute from the certificate data.")
cli.add_argument("--json-path", dest = "json_path", nargs = "+",
help = "JSON path (http://goessner.net/articles/JsonPath/) filter string " +
"to query a subset of the certificate data. Multiple queries can be specified that are executed in " +
"order on the result of the previous query.")
cli.add_argument("-u", "--unwrap", dest = "unwrap", action = "store_true",
help = "Unwrap transforms different data types into a simpler format. If a result is a simple string, " +
"or a datetime the quotes are removed. If the result is a X509 name, its parts are joined to a string " +
"in the way used by openssl (C=..., O=..., OU=..., CN=...)")
cli.add_argument("-r", "--raw", dest = "raw", action = "store_true",
help = "Just get the certificate chain in PEM format and print it to standard output.")
args = cli.parse_args()
opensslCommandLine = ["s_client", "-connect", "%s:%i" % (args.host[0], args.port[0]), "-showcerts"]
if args.sni_name != None and len(args.sni_name) > 0:
opensslCommandLine.append("-servername")
opensslCommandLine.append(args.sni_name[0])
openssl = call_openssl(opensslCommandLine, "Q\n")
if openssl['code'] != 0:
print >> sys.stderr, "Error: Failure executing openssl command.\n"
print >> sys.stderr, openssl['err']
sys.exit(1)
if openssl['out'] != None and openssl['out'] != "":
plainCerts = certificates.find_certificates(openssl['out'])
if args.raw:
for cert in plainCerts:
print cert
sys.exit(0)
certs = []
jsonCerts = []
for cert in plainCerts:
certs.append(crypto.load_certificate(crypto.FILETYPE_PEM, cert))
for cert in certs:
jsonCerts.append(certificates.certificate_to_dict(cert))
if args.json_path != None and len(args.json_path) > 0:
for pathExpression in args.json_path:
expr = parse(pathExpression)
jsonCerts = [match.value for match in expr.find(jsonCerts)]
if args.json_pointer != None and len(args.json_pointer) > 0:
pointer = args.json_pointer[0]
jsonCerts = resolve_pointer(jsonCerts, pointer)
if args.unwrap and isinstance(jsonCerts, str):
jsonData = jsonCerts
elif args.unwrap and isinstance(jsonCerts, datetime):
jsonData = jsonCerts.isoformat()
elif args.unwrap and isinstance(jsonCerts, dict):
jsonData = ""
for key in jsonCerts:
jsonData += key + "=" + jsonCerts[key] + ", "
if len(jsonData) > 0: jsonData = jsonData[0:-2]
else:
jsonData = to_json(jsonCerts, pretty = True)
print jsonData
else:
print >> sys.stderr, "no output!"
| {
"content_hash": "ab636341fce94012d14c89e888e8fa22",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 113,
"avg_line_length": 48.74074074074074,
"alnum_prop": 0.6223404255319149,
"repo_name": "nanobot248/ssltools",
"id": "64cbee6e9e431ce3e08537f642387fb260c5f26d",
"size": "3971",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "get-ssl-certificate-chain.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14702"
}
],
"symlink_target": ""
} |
import re
import Gaffer
import GafferUI
from Qt import QtCore
from Qt import QtGui
class NameWidget( GafferUI.TextWidget ) :
def __init__( self, graphComponent, **kw ) :
GafferUI.TextWidget.__init__( self, **kw )
self._qtWidget().setValidator( _Validator( self._qtWidget() ) )
self.__graphComponent = None
self.setGraphComponent( graphComponent )
self.editingFinishedSignal().connect( Gaffer.WeakMethod( self.__setName ), scoped = False )
def setGraphComponent( self, graphComponent ) :
if self.__graphComponent == graphComponent :
return
self.__graphComponent = graphComponent
if self.__graphComponent is not None :
self.__nameChangedConnection = self.__graphComponent.nameChangedSignal().connect( Gaffer.WeakMethod( self.__setText ) )
if isinstance( self.__graphComponent, Gaffer.Node ) :
self.__metadataChangedConnection = Gaffer.Metadata.nodeValueChangedSignal().connect( Gaffer.WeakMethod( self.__nodeMetadataChanged ) )
elif isinstance( self.__graphComponent, Gaffer.Plug ) :
self.__metadataChangedConnection = Gaffer.Metadata.plugValueChangedSignal().connect( Gaffer.WeakMethod( self.__plugMetadataChanged ) )
else :
self.__metadataChangedConnection = None
else :
self.__nameChangedConnection = None
self.__metadataChangedConnection = None
self.__setText()
self.__updateEditability()
def getGraphComponent( self ) :
return self.__graphComponent
def __setName( self, *unwantedArgs ) :
if self.__graphComponent is None :
return
with Gaffer.UndoScope( self.__graphComponent.ancestor( Gaffer.ScriptNode ) ) :
self.setText( self.__graphComponent.setName( self.getText() ) )
def __setText( self, *unwantedArgs ) :
self.setText( self.__graphComponent.getName() if self.__graphComponent is not None else "" )
def __updateEditability( self ) :
editable = False
if self.__graphComponent is not None :
editable = not Gaffer.MetadataAlgo.readOnly( self.__graphComponent ) and Gaffer.Metadata.value( self.__graphComponent, "renameable" )
self.setEditable( editable )
def __nodeMetadataChanged( self, nodeTypeId, key, node ) :
if (
Gaffer.MetadataAlgo.readOnlyAffectedByChange( self.__graphComponent, nodeTypeId, key, node ) or
node == self.__graphComponent and key == "renameable"
) :
self.__updateEditability()
def __plugMetadataChanged( self, nodeTypeId, plugPath, key, plug ) :
if (
Gaffer.MetadataAlgo.readOnlyAffectedByChange( self.__graphComponent, nodeTypeId, plugPath, key, plug ) or
plug == self.__graphComponent and key == "renameable"
) :
self.__updateEditability()
class _Validator( QtGui.QValidator ) :
def __init__( self, parent ) :
QtGui.QValidator.__init__( self, parent )
def validate( self, input, pos ) :
input = input.replace( " ", "_" )
if len( input ) :
if re.match( "^[A-Za-z_]+[A-Za-z_0-9]*$", input ) :
result = QtGui.QValidator.Acceptable
else :
result = QtGui.QValidator.Invalid
else :
result = QtGui.QValidator.Intermediate
if hasattr( QtCore, "QString" ) and isinstance( input, QtCore.QString ) :
# PyQt API, where QString type is exposed and we modify it in place
return result, pos
else :
# PySide API, where QString is mapped automatically to python string
# and we return a new string.
return result, input, pos
| {
"content_hash": "fb2b39b9722fb2eb5eb43d53d8fb68e9",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 138,
"avg_line_length": 31.40566037735849,
"alnum_prop": 0.707720036046861,
"repo_name": "lucienfostier/gaffer",
"id": "d0f99fe83999e132ca66d8458f60da117011b303",
"size": "5194",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python/GafferUI/NameWidget.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "41979"
},
{
"name": "C++",
"bytes": "7610953"
},
{
"name": "CMake",
"bytes": "85201"
},
{
"name": "GLSL",
"bytes": "6236"
},
{
"name": "Python",
"bytes": "7892655"
},
{
"name": "Shell",
"bytes": "15031"
}
],
"symlink_target": ""
} |
"""
Import from public cell data into a local dev environment.
Download from https://location.services.mozilla.com/downloads
This has been tested with a differential cell export (~400kB compressed).
A full cell export (~370,000kB) contains unexpected data that will
require code changes to handle gracefully, and may require adjusting
the resources of the development environment.
"""
import argparse
import logging
import os
import os.path
import sys
from ichnaea.conf import settings
from ichnaea.db import db_worker_session
from ichnaea.log import configure_logging
from ichnaea.data.public import read_stations_from_csv
from ichnaea.taskapp.config import init_worker
from ichnaea.util import gzip_open
LOGGER = logging.getLogger(__name__)
def get_eager_celery_app():
"""Returns an eagerly configured celery app."""
# We have to import and fix celery settings before importing the celery_app
# module since that has a side effect of creating the celery app
from ichnaea.taskapp import settings as celery_settings
celery_settings.task_always_eager = True
celery_settings.task_eager_propagates = True
from ichnaea.taskapp.app import celery_app
return celery_app
def main(argv, _db=None):
parser = argparse.ArgumentParser(
prog=argv[0],
description=(
"Import from public cell data into a local dev environment. "
"See https://location.services.mozilla.com/downloads"
),
)
parser.add_argument("filename", help="Path to the csv.gz import file.")
args = parser.parse_args(argv[1:])
if not settings("local_dev_env"):
print("This script can only be run in a local dev environment.")
print("Set LOCAL_DEV_ENV=True in your environment.")
return 1
filename = os.path.abspath(os.path.expanduser(args.filename))
if not os.path.isfile(filename):
print("File %s not found." % filename)
return 1
configure_logging()
celery_app = get_eager_celery_app()
init_worker(celery_app)
cellarea_queue = celery_app.data_queues["update_cellarea"]
with db_worker_session(celery_app.db, commit=False) as session:
with gzip_open(filename, "r") as file_handle:
read_stations_from_csv(
session, file_handle, celery_app.redis_client, cellarea_queue
)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
| {
"content_hash": "ef345486cad61695a6487c8f4a928696",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 79,
"avg_line_length": 30.696202531645568,
"alnum_prop": 0.6989690721649484,
"repo_name": "mozilla/ichnaea",
"id": "f15d6fdadaa96d4414742a7b8855eaf373f4195f",
"size": "2447",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "ichnaea/scripts/load_cell_data.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "34767"
},
{
"name": "Cython",
"bytes": "16678"
},
{
"name": "Dockerfile",
"bytes": "2819"
},
{
"name": "HTML",
"bytes": "32679"
},
{
"name": "JavaScript",
"bytes": "139102"
},
{
"name": "Makefile",
"bytes": "11673"
},
{
"name": "Mako",
"bytes": "432"
},
{
"name": "Python",
"bytes": "1007139"
},
{
"name": "Shell",
"bytes": "8899"
}
],
"symlink_target": ""
} |
import sys
import yaml
import string
from yamllint.linter import LintProblem
if len(sys.argv) < 2:
print 'Missing file to lint'
sys.exit(1)
ID = 'trailing-spaces'
TYPE = 'line'
def check(conf, line):
if line.end == 0:
return
pos = line.end
while line.buffer[pos - 1] in string.whitespace and pos > line.start:
pos -= 1
if pos != line.end and line.buffer[pos] in ' \t':
yield LintProblem(line.line_no, pos - line.start + 1,
'trailing spaces')
i = 1
while i < len(sys.argv):
try:
yaml.load( open(sys.argv[i], 'r'), Loader=yaml.CLoader)
print 'YAML: Syntax OK in file: '+ sys.argv[i]
except:
print "YAML: Invalid FAIL in file " + sys.argv[i]
i+=1
| {
"content_hash": "5aca6903f9f4a929409f885625af3922",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 73,
"avg_line_length": 25.266666666666666,
"alnum_prop": 0.5897097625329816,
"repo_name": "open-switch/infra_project-config",
"id": "7cf878d497d371da1a283d4ee58b5d884107f91b",
"size": "781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jenkins/scripts/yamllint.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "222592"
},
{
"name": "Shell",
"bytes": "164489"
}
],
"symlink_target": ""
} |
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("""Error: Can't find the file 'settings.py' in the
directory containing %r. It appears you've customized things. You'll
have to run django-admin.py, passing it your settings module.
(If the file settings.py does indeed exist, it's causing an ImportError
somehow.)\n""" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| {
"content_hash": "472e03c748b274cc2e406fbccaf4c3ee",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 71,
"avg_line_length": 37.57142857142857,
"alnum_prop": 0.7186311787072244,
"repo_name": "MapofLife/MOL",
"id": "0b932dae01b91464e63babc16af38d1b0f182c93",
"size": "544",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "earthengine/google-api-python-client/samples/django_sample/manage.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "83354"
},
{
"name": "CSS",
"bytes": "245523"
},
{
"name": "JavaScript",
"bytes": "1302309"
},
{
"name": "PHP",
"bytes": "613"
},
{
"name": "Perl",
"bytes": "2100"
},
{
"name": "Python",
"bytes": "1953387"
},
{
"name": "R",
"bytes": "52"
},
{
"name": "SQL",
"bytes": "21299"
},
{
"name": "Shell",
"bytes": "3146"
}
],
"symlink_target": ""
} |
"""Generally useful utilities for AWS web services not specific to a service.
New things in this module should be of relevance to more than one of Amazon's
services.
"""
from base64 import b64encode
from hashlib import sha1, md5, sha256
import hmac
from urlparse import urlparse, urlunparse
import time
# Import XMLTreeBuilder from somewhere; here in one place to prevent
# duplication.
try:
from xml.etree.ElementTree import XMLTreeBuilder
except ImportError:
from elementtree.ElementTree import XMLTreeBuilder
__all__ = ["hmac_sha1", "hmac_sha256", "iso8601time", "calculate_md5", "XML"]
def calculate_md5(data):
digest = md5(data).digest()
return b64encode(digest)
def hmac_sha1(secret, data):
digest = hmac.new(secret, data, sha1).digest()
return b64encode(digest)
def hmac_sha256(secret, data):
digest = hmac.new(secret, data, sha256).digest()
return b64encode(digest)
def iso8601time(time_tuple):
"""Format time_tuple as a ISO8601 time string.
:param time_tuple: Either None, to use the current time, or a tuple tuple.
"""
if time_tuple:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time_tuple)
else:
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
class NamespaceFixXmlTreeBuilder(XMLTreeBuilder):
def _fixname(self, key):
if "}" in key:
key = key.split("}", 1)[1]
return key
def XML(text):
parser = NamespaceFixXmlTreeBuilder()
parser.feed(text)
return parser.close()
def parse(url, defaultPort=True):
"""
Split the given URL into the scheme, host, port, and path.
@type url: C{str}
@param url: An URL to parse.
@type defaultPort: C{bool}
@param defaultPort: Whether to return the default port associated with the
scheme in the given url, when the url doesn't specify one.
@return: A four-tuple of the scheme, host, port, and path of the URL. All
of these are C{str} instances except for port, which is an C{int}.
"""
url = url.strip()
parsed = urlparse(url)
scheme = parsed[0]
path = urlunparse(("", "") + parsed[2:])
host = parsed[1]
if ":" in host:
host, port = host.split(":")
try:
port = int(port)
except ValueError:
# A non-numeric port was given, it will be replaced with
# an appropriate default value if defaultPort is True
port = None
else:
port = None
if port is None and defaultPort:
if scheme == "https":
port = 443
else:
port = 80
if path == "":
path = "/"
return (str(scheme), str(host), port, str(path))
| {
"content_hash": "6cd4f422a1c5329d1d93eb31b5822f3e",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 78,
"avg_line_length": 26.135922330097088,
"alnum_prop": 0.6341010401188707,
"repo_name": "daira/txaws",
"id": "333097542cede183b51711ecdd7a09d48dc1de9c",
"size": "2692",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "txaws/util.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Perl",
"bytes": "2140"
},
{
"name": "Python",
"bytes": "410616"
},
{
"name": "Racket",
"bytes": "448"
}
],
"symlink_target": ""
} |
import subprocess
import tempfile
import os
import shutil
# Options
themes = ['default', 'AnnArbor', 'Antibes', 'Bergen', 'Berkeley', 'Berlin',
'Boadilla', 'CambridgeUS', 'Copenhagen', 'Darmstadt', 'Dresden',
'EastLansing', 'Frankfurt', 'Goettingen', 'Hannover', 'Ilmenau',
'JuanLesPins', 'Luebeck', 'Madrid', 'Malmoe', 'Marburg', 'Montpellier',
'PaloAlto', 'Pittsburgh', 'Rochester', 'Singapore', 'Szeged', 'Warsaw']
colorThemes = ['default', 'albatross', 'beaver', 'beetle', 'crane', 'dolphin',
'dove', 'fly', 'lily', 'monarca', 'orchid', 'rose', 'seagull', 'seahorse',
'spruce', 'whale', 'wolverine']
thumbSize = 200 # Changing thumbnail size requires additonal CSS changes
fullSize = 1000
# Always behave the same no matter where script was called from
scriptDir = os.path.dirname(os.path.realpath(__file__))
#
# Generate LaTeX outputs
#
with open(os.path.join(scriptDir, 'beamer.tex')) as inFile:
texSource = inFile.read()
outputDir = os.path.join(scriptDir, 'output')
try:
os.mkdir(outputDir)
except OSError:
pass # Directory already exists
os.chdir(outputDir)
tempDir = tempfile.mkdtemp()
# Create PDF for given theme / color combination and optionally copy result
def createPDF(theme, colorTheme, copy=True):
outFilename = os.path.join(tempDir, theme + '-' + colorTheme + '.tex')
with open(outFilename, 'w') as outFile:
out = texSource.replace('#THEME', theme).\
replace('#COLOR_THEME', colorTheme)
outFile.write(out)
subprocess.call(['pdflatex', '-output-directory=' + tempDir, outFilename])
result = outFilename[:-3] + 'pdf'
if copy:
shutil.copy(result, colorTheme + '.pdf')
return colorTheme + '.pdf'
# Create PNG from PDF
def createImage(pdf, prefix, width):
subprocess.call(['pdftoppm', '-scale-to', str(width), '-png', pdf, prefix])
# First LaTeX run
createPDF('default', 'default', False)
# Create samples
for theme in themes:
themeDir = os.path.join(outputDir, theme)
try:
os.mkdir(themeDir)
except OSError:
pass # Directory already exists
os.chdir(themeDir)
thumbs = []
for colorTheme in colorThemes:
pdf = createPDF(theme, colorTheme)
createImage(pdf, pdf[:-4] + '-thumb', thumbSize)
createImage(pdf, pdf[:-4] + '-full', fullSize)
thumbs.append(pdf[:-4] + '-thumb-1.png')
thumbs.append(pdf[:-4] + '-thumb-2.png')
os.remove(pdf) # Clean up
subprocess.call(['convert'] + thumbs + ['+append', 'thumbs.png'])
subprocess.call(['pngquant', '-f', '--ext', '.png', 'thumbs.png'])
# Clean up
for thumb in thumbs:
os.remove(thumb)
# Optimize
subprocess.call('optipng *.png', shell=True)
#
# Create web page
#
htmlTable = '<table class="theme-grid">'
for theme in themes:
htmlTable += '<tr>'
for i in range(len(colorThemes)):
colorTheme = colorThemes[i]
htmlTable += '<td><div class="iblock"><div class="table">' \
+ '<div class="table-row"><div class="table-cell">' \
+ '<a href="' + theme + '/' + colorTheme \
+ '-full-1.png" data-sbox=' + theme + colorTheme \
+ ' title="Theme: ' + theme + ', Color Theme: ' \
+ colorTheme + '">' \
+ '<div class="beamer-thumb" style="background: url(\'' + theme \
+ '/thumbs.png\') -' + str(i * 2 * thumbSize) \
+ 'px 0;"></div></a></div><div class="table-cell">' \
+ '<a href="' + theme + '/' + colorTheme \
+ '-full-2.png" data-sbox=' + theme + colorTheme \
+ ' title="Theme: ' + theme + ', Color Theme: ' + colorTheme \
+ '"><div class="beamer-thumb beamer-right" ' \
+ 'style="background: url(\'' + theme + '/thumbs.png\') -' \
+ str((i * 2 + 1) * thumbSize) \
+ 'px 0;"></div></a></div></div></div></div></td>\n'
htmlTable += '</tr>'
htmlTable += '</table>'
topHeader = ''
for colorTheme in colorThemes:
topHeader += '<td>' + colorTheme + '</td>\n'
leftHeader = ''
for theme in themes:
leftHeader += '<tr><td><div>' + theme + '</div></td></tr>\n'
os.chdir(outputDir)
with open(os.path.join(scriptDir, 'matrix.html')) as inFile:
htmlSource = inFile.read()
with open('index.html', 'w') as outFile:
out = htmlSource.replace('#TABLE', htmlTable).\
replace('#TOP_HEADER', topHeader).replace('#LEFT_HEADER', leftHeader)
outFile.write(out)
with open(os.path.join(scriptDir, 'style.css')) as inFile:
cssSource = inFile.read()
with open('style.css', 'w') as outFile:
out = cssSource.replace('#TABLE_WIDTH', str(425 * len(colorThemes)) + 'px')
outFile.write(out)
includesDir = os.path.join(scriptDir, 'includes')
shutil.copy(os.path.join(includesDir, 'bootstrap.min.css'), '.')
shutil.copy(os.path.join(includesDir, 'slenderbox.css'), '.')
shutil.copy(os.path.join(includesDir, 'slenderbox.js'), '.')
| {
"content_hash": "7af208a5fe85658460ae96b425dd0d69",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 79,
"avg_line_length": 34.87323943661972,
"alnum_prop": 0.60359450726979,
"repo_name": "mpetroff/beamer-theme-matrix",
"id": "9806c8fde41a618bfa5cd2b461cf3eb2edba44c4",
"size": "6118",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "generate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3609"
},
{
"name": "Python",
"bytes": "6118"
},
{
"name": "TeX",
"bytes": "1095"
}
],
"symlink_target": ""
} |
import json
from passrotate.provider import Provider, ProviderOption, PromptType, register_provider
from passrotate.forms import get_form
from urllib.parse import urlparse
import requests
from bs4 import BeautifulSoup
class GitLab(Provider):
"""
[gitlab.com]
username=Your GitLab username
"""
name = "GitLab"
domains = [
"gitlab.com",
]
options = {
"username": ProviderOption(str, "Your GitLab username")
}
def __init__(self, options):
self.username = options["username"]
def _read_userid(self):
try:
r = self._session.get("https://gitlab.com/api/v4/user")
self.user_id = json.loads(r.text)["id"]
except:
raise Exception("Can't read user id from API")
def _handle_two_factor_auth(self, r):
soup = BeautifulSoup(r.text, "html5lib")
# look for the OTP input field
otp_input = soup.find("input", attrs={ 'id': 'user_otp_attempt' })
# if we didn't find it its probably not enabled, great!
if otp_input is None:
return
# else we ask the user to provide its token and send it
code = self.prompt("Enter your two factor (TOTP) code", PromptType.totp)
form = get_form(r.text)
form.update({
"user[otp_attempt]": code
})
r = self._session.post("https://gitlab.com/users/sign_in", data=form)
if r.status_code != 200:
raise Exception("Unable to login via OTP")
def _login(self, old_password):
r = self._session.get("https://gitlab.com/users/sign_in")
form = get_form(r.text)
form.update({
"user[login]": self.username,
"user[password]": old_password
})
r = self._session.post("https://gitlab.com/users/sign_in", data=form)
if r.status_code != 200:
raise Exception("Unable to log into GitLab account with current password")
return r
def _set_form(self):
r = self._session.get("https://gitlab.com/profile/password/edit")
self._form = get_form(r.text, id="edit_user_{}".format(self.user_id))
def prepare(self, old_password):
self._session = requests.Session()
r = self._login(old_password)
self._handle_two_factor_auth(r)
self._read_userid()
self._set_form()
def execute(self, old_password, new_password):
self._form.update({
"user[current_password]": old_password,
"user[password]": new_password,
"user[password_confirmation]": new_password,
})
r = self._session.post("https://gitlab.com/profile/password", data=self._form)
register_provider(GitLab)
| {
"content_hash": "68859d2a75111ae8c42cc19d7dfa9db3",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 87,
"avg_line_length": 31.056818181818183,
"alnum_prop": 0.5934870106110501,
"repo_name": "SirCmpwn/pass-rotate",
"id": "9ed0dd246f609dd724dc3bd563fe2ee80717e499",
"size": "2733",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "passrotate/providers/gitlab.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "44610"
}
],
"symlink_target": ""
} |
"""variables checkers for Python code
"""
import os
import sys
from copy import copy
import astroid
from astroid import are_exclusive, builtin_lookup, AstroidBuildingException
from logilab.common.modutils import file_from_modpath
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers.utils import (PYMETHODS, is_ancestor_name, is_builtin,
is_defined_before, is_error, is_func_default, is_func_decorator,
assign_parent, check_messages, is_inside_except, clobber_in_except,
get_all_elements)
def in_for_else_branch(parent, stmt):
"""Returns True if stmt in inside the else branch for a parent For stmt."""
return (isinstance(parent, astroid.For) and
any(else_stmt.parent_of(stmt) for else_stmt in parent.orelse))
def overridden_method(klass, name):
"""get overridden method if any"""
try:
parent = klass.local_attr_ancestors(name).next()
except (StopIteration, KeyError):
return None
try:
meth_node = parent[name]
except KeyError:
# We have found an ancestor defining <name> but it's not in the local
# dictionary. This may happen with astroid built from living objects.
return None
if isinstance(meth_node, astroid.Function):
return meth_node
return None
def _get_unpacking_extra_info(node, infered):
"""return extra information to add to the message for unpacking-non-sequence
and unbalanced-tuple-unpacking errors
"""
more = ''
infered_module = infered.root().name
if node.root().name == infered_module:
if node.lineno == infered.lineno:
more = ' %s' % infered.as_string()
elif infered.lineno:
more = ' defined at line %s' % infered.lineno
elif infered.lineno:
more = ' defined at line %s of %s' % (infered.lineno, infered_module)
return more
MSGS = {
'E0601': ('Using variable %r before assignment',
'used-before-assignment',
'Used when a local variable is accessed before it\'s \
assignment.'),
'E0602': ('Undefined variable %r',
'undefined-variable',
'Used when an undefined variable is accessed.'),
'E0603': ('Undefined variable name %r in __all__',
'undefined-all-variable',
'Used when an undefined variable name is referenced in __all__.'),
'E0604': ('Invalid object %r in __all__, must contain only strings',
'invalid-all-object',
'Used when an invalid (non-string) object occurs in __all__.'),
'E0611': ('No name %r in module %r',
'no-name-in-module',
'Used when a name cannot be found in a module.'),
'W0601': ('Global variable %r undefined at the module level',
'global-variable-undefined',
'Used when a variable is defined through the "global" statement \
but the variable is not defined in the module scope.'),
'W0602': ('Using global for %r but no assignment is done',
'global-variable-not-assigned',
'Used when a variable is defined through the "global" statement \
but no assignment to this variable is done.'),
'W0603': ('Using the global statement', # W0121
'global-statement',
'Used when you use the "global" statement to update a global \
variable. PyLint just try to discourage this \
usage. That doesn\'t mean you can not use it !'),
'W0604': ('Using the global statement at the module level', # W0103
'global-at-module-level',
'Used when you use the "global" statement at the module level \
since it has no effect'),
'W0611': ('Unused import %s',
'unused-import',
'Used when an imported module or variable is not used.'),
'W0612': ('Unused variable %r',
'unused-variable',
'Used when a variable is defined but not used.'),
'W0613': ('Unused argument %r',
'unused-argument',
'Used when a function or method argument is not used.'),
'W0614': ('Unused import %s from wildcard import',
'unused-wildcard-import',
'Used when an imported module or variable is not used from a \
\'from X import *\' style import.'),
'W0621': ('Redefining name %r from outer scope (line %s)',
'redefined-outer-name',
'Used when a variable\'s name hide a name defined in the outer \
scope.'),
'W0622': ('Redefining built-in %r',
'redefined-builtin',
'Used when a variable or function override a built-in.'),
'W0623': ('Redefining name %r from %s in exception handler',
'redefine-in-handler',
'Used when an exception handler assigns the exception \
to an existing name'),
'W0631': ('Using possibly undefined loop variable %r',
'undefined-loop-variable',
'Used when an loop variable (i.e. defined by a for loop or \
a list comprehension or a generator expression) is used outside \
the loop.'),
'W0632': ('Possible unbalanced tuple unpacking with '
'sequence%s: '
'left side has %d label(s), right side has %d value(s)',
'unbalanced-tuple-unpacking',
'Used when there is an unbalanced tuple unpacking in assignment'),
'W0633': ('Attempting to unpack a non-sequence%s',
'unpacking-non-sequence',
'Used when something which is not '
'a sequence is used in an unpack assignment'),
'W0640': ('Cell variable %s defined in loop',
'cell-var-from-loop',
'A variable used in a closure is defined in a loop. '
'This will result in all closures using the same value for '
'the closed-over variable.'),
}
class VariablesChecker(BaseChecker):
"""checks for
* unused variables / imports
* undefined variables
* redefinition of variable from builtins or from an outer scope
* use of variable before assignment
* __all__ consistency
"""
__implements__ = IAstroidChecker
name = 'variables'
msgs = MSGS
priority = -1
options = (
("init-import",
{'default': 0, 'type' : 'yn', 'metavar' : '<y_or_n>',
'help' : 'Tells whether we should check for unused import in \
__init__ files.'}),
("dummy-variables-rgx",
{'default': ('_$|dummy'),
'type' :'regexp', 'metavar' : '<regexp>',
'help' : 'A regular expression matching the name of dummy \
variables (i.e. expectedly not used).'}),
("additional-builtins",
{'default': (), 'type' : 'csv',
'metavar' : '<comma separated list>',
'help' : 'List of additional names supposed to be defined in \
builtins. Remember that you should avoid to define new builtins when possible.'
}),
)
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
self._to_consume = None
self._checking_mod_attr = None
def visit_module(self, node):
"""visit module : update consumption analysis variable
checks globals doesn't overrides builtins
"""
self._to_consume = [(copy(node.locals), {}, 'module')]
for name, stmts in node.locals.iteritems():
if is_builtin(name) and not is_inside_except(stmts[0]):
# do not print Redefining builtin for additional builtins
self.add_message('redefined-builtin', args=name, node=stmts[0])
@check_messages('unused-import', 'unused-wildcard-import', 'redefined-builtin', 'undefined-all-variable', 'invalid-all-object')
def leave_module(self, node):
"""leave module: check globals
"""
assert len(self._to_consume) == 1
not_consumed = self._to_consume.pop()[0]
# attempt to check for __all__ if defined
if '__all__' in node.locals:
assigned = node.igetattr('__all__').next()
if assigned is not astroid.YES:
for elt in getattr(assigned, 'elts', ()):
try:
elt_name = elt.infer().next()
except astroid.InferenceError:
continue
if not isinstance(elt_name, astroid.Const) \
or not isinstance(elt_name.value, basestring):
self.add_message('invalid-all-object', args=elt.as_string(), node=elt)
continue
elt_name = elt_name.value
# If elt is in not_consumed, remove it from not_consumed
if elt_name in not_consumed:
del not_consumed[elt_name]
continue
if elt_name not in node.locals:
if not node.package:
self.add_message('undefined-all-variable',
args=elt_name,
node=elt)
else:
basename = os.path.splitext(node.file)[0]
if os.path.basename(basename) == '__init__':
name = node.name + "." + elt_name
try:
file_from_modpath(name.split("."))
except ImportError:
self.add_message('undefined-all-variable',
args=elt_name,
node=elt)
except SyntaxError, exc:
# don't yield an syntax-error warning,
# because it will be later yielded
# when the file will be checked
pass
# don't check unused imports in __init__ files
if not self.config.init_import and node.package:
return
for name, stmts in not_consumed.iteritems():
if any(isinstance(stmt, astroid.AssName)
and isinstance(stmt.ass_type(), astroid.AugAssign)
for stmt in stmts):
continue
stmt = stmts[0]
if isinstance(stmt, astroid.Import):
self.add_message('unused-import', args=name, node=stmt)
elif isinstance(stmt, astroid.From) and stmt.modname != '__future__':
if stmt.names[0][0] == '*':
self.add_message('unused-wildcard-import', args=name, node=stmt)
else:
self.add_message('unused-import', args=name, node=stmt)
del self._to_consume
def visit_class(self, node):
"""visit class: update consumption analysis variable
"""
self._to_consume.append((copy(node.locals), {}, 'class'))
def leave_class(self, _):
"""leave class: update consumption analysis variable
"""
# do not check for not used locals here (no sense)
self._to_consume.pop()
def visit_lambda(self, node):
"""visit lambda: update consumption analysis variable
"""
self._to_consume.append((copy(node.locals), {}, 'lambda'))
def leave_lambda(self, _):
"""leave lambda: update consumption analysis variable
"""
# do not check for not used locals here
self._to_consume.pop()
def visit_genexpr(self, node):
"""visit genexpr: update consumption analysis variable
"""
self._to_consume.append((copy(node.locals), {}, 'comprehension'))
def leave_genexpr(self, _):
"""leave genexpr: update consumption analysis variable
"""
# do not check for not used locals here
self._to_consume.pop()
def visit_dictcomp(self, node):
"""visit dictcomp: update consumption analysis variable
"""
self._to_consume.append((copy(node.locals), {}, 'comprehension'))
def leave_dictcomp(self, _):
"""leave dictcomp: update consumption analysis variable
"""
# do not check for not used locals here
self._to_consume.pop()
def visit_setcomp(self, node):
"""visit setcomp: update consumption analysis variable
"""
self._to_consume.append((copy(node.locals), {}, 'comprehension'))
def leave_setcomp(self, _):
"""leave setcomp: update consumption analysis variable
"""
# do not check for not used locals here
self._to_consume.pop()
def visit_function(self, node):
"""visit function: update consumption analysis variable and check locals
"""
self._to_consume.append((copy(node.locals), {}, 'function'))
if not (self.linter.is_message_enabled('redefined-outer-name') or
self.linter.is_message_enabled('redefined-builtin')):
return
globs = node.root().globals
for name, stmt in node.items():
if is_inside_except(stmt):
continue
if name in globs and not isinstance(stmt, astroid.Global):
line = globs[name][0].fromlineno
dummy_rgx = self.config.dummy_variables_rgx
if not dummy_rgx.match(name):
self.add_message('redefined-outer-name', args=(name, line), node=stmt)
elif is_builtin(name):
# do not print Redefining builtin for additional builtins
self.add_message('redefined-builtin', args=name, node=stmt)
def leave_function(self, node):
"""leave function: check function's locals are consumed"""
not_consumed = self._to_consume.pop()[0]
if not (self.linter.is_message_enabled('unused-variable') or
self.linter.is_message_enabled('unused-argument')):
return
# don't check arguments of function which are only raising an exception
if is_error(node):
return
# don't check arguments of abstract methods or within an interface
is_method = node.is_method()
klass = node.parent.frame()
if is_method and (klass.type == 'interface' or node.is_abstract()):
return
authorized_rgx = self.config.dummy_variables_rgx
called_overridden = False
argnames = node.argnames()
for name, stmts in not_consumed.iteritems():
# ignore some special names specified by user configuration
if authorized_rgx.match(name):
continue
# ignore names imported by the global statement
# FIXME: should only ignore them if it's assigned latter
stmt = stmts[0]
if isinstance(stmt, astroid.Global):
continue
# care about functions with unknown argument (builtins)
if name in argnames:
if is_method:
# don't warn for the first argument of a (non static) method
if node.type != 'staticmethod' and name == argnames[0]:
continue
# don't warn for argument of an overridden method
if not called_overridden:
overridden = overridden_method(klass, node.name)
called_overridden = True
if overridden is not None and name in overridden.argnames():
continue
if node.name in PYMETHODS and node.name not in ('__init__', '__new__'):
continue
# don't check callback arguments XXX should be configurable
if node.name.startswith('cb_') or node.name.endswith('_cb'):
continue
self.add_message('unused-argument', args=name, node=stmt)
else:
self.add_message('unused-variable', args=name, node=stmt)
@check_messages('global-variable-undefined', 'global-variable-not-assigned', 'global-statement',
'global-at-module-level', 'redefined-builtin')
def visit_global(self, node):
"""check names imported exists in the global scope"""
frame = node.frame()
if isinstance(frame, astroid.Module):
self.add_message('global-at-module-level', node=node)
return
module = frame.root()
default_message = True
for name in node.names:
try:
assign_nodes = module.getattr(name)
except astroid.NotFoundError:
# unassigned global, skip
assign_nodes = []
for anode in assign_nodes:
if anode.parent is None:
# node returned for builtin attribute such as __file__,
# __doc__, etc...
continue
if anode.frame() is frame:
# same scope level assignment
break
else:
# global but no assignment
self.add_message('global-variable-not-assigned', args=name, node=node)
default_message = False
if not assign_nodes:
continue
for anode in assign_nodes:
if anode.parent is None:
self.add_message('redefined-builtin', args=name, node=node)
break
if anode.frame() is module:
# module level assignment
break
else:
# global undefined at the module scope
self.add_message('global-variable-undefined', args=name, node=node)
default_message = False
if default_message:
self.add_message('global-statement', node=node)
def _check_late_binding_closure(self, node, assignment_node, scope_type):
node_scope = node.scope()
if not isinstance(node_scope, (astroid.Lambda, astroid.Function)):
return
if isinstance(assignment_node, astroid.Comprehension):
if assignment_node.parent.parent_of(node.scope()):
self.add_message('cell-var-from-loop', node=node, args=node.name)
else:
assign_scope = assignment_node.scope()
maybe_for = assignment_node
while not isinstance(maybe_for, astroid.For):
if maybe_for is assign_scope:
break
maybe_for = maybe_for.parent
else:
if maybe_for.parent_of(node_scope) and not isinstance(node_scope.statement(), astroid.Return):
self.add_message('cell-var-from-loop', node=node, args=node.name)
def _loopvar_name(self, node, name):
# filter variables according to node's scope
# XXX used to filter parents but don't remember why, and removing this
# fixes a W0631 false positive reported by Paul Hachmann on 2008/12 on
# python-projects (added to func_use_for_or_listcomp_var test)
#astmts = [stmt for stmt in node.lookup(name)[1]
# if hasattr(stmt, 'ass_type')] and
# not stmt.statement().parent_of(node)]
if not self.linter.is_message_enabled('undefined-loop-variable'):
return
astmts = [stmt for stmt in node.lookup(name)[1]
if hasattr(stmt, 'ass_type')]
# filter variables according their respective scope test is_statement
# and parent to avoid #74747. This is not a total fix, which would
# introduce a mechanism similar to special attribute lookup in
# modules. Also, in order to get correct inference in this case, the
# scope lookup rules would need to be changed to return the initial
# assignment (which does not exist in code per se) as well as any later
# modifications.
if not astmts or (astmts[0].is_statement or astmts[0].parent) \
and astmts[0].statement().parent_of(node):
_astmts = []
else:
_astmts = astmts[:1]
for i, stmt in enumerate(astmts[1:]):
if (astmts[i].statement().parent_of(stmt)
and not in_for_else_branch(astmts[i].statement(), stmt)):
continue
_astmts.append(stmt)
astmts = _astmts
if len(astmts) == 1:
ass = astmts[0].ass_type()
if isinstance(ass, (astroid.For, astroid.Comprehension, astroid.GenExpr)) \
and not ass.statement() is node.statement():
self.add_message('undefined-loop-variable', args=name, node=node)
@check_messages('redefine-in-handler')
def visit_excepthandler(self, node):
for name in get_all_elements(node.name):
clobbering, args = clobber_in_except(name)
if clobbering:
self.add_message('redefine-in-handler', args=args, node=name)
def visit_assname(self, node):
if isinstance(node.ass_type(), astroid.AugAssign):
self.visit_name(node)
def visit_delname(self, node):
self.visit_name(node)
@check_messages(*(MSGS.keys()))
def visit_name(self, node):
"""check that a name is defined if the current scope and doesn't
redefine a built-in
"""
stmt = node.statement()
if stmt.fromlineno is None:
# name node from a astroid built from live code, skip
assert not stmt.root().file.endswith('.py')
return
name = node.name
frame = stmt.scope()
# if the name node is used as a function default argument's value or as
# a decorator, then start from the parent frame of the function instead
# of the function frame - and thus open an inner class scope
if (is_func_default(node) or is_func_decorator(node)
or is_ancestor_name(frame, node)):
start_index = len(self._to_consume) - 2
else:
start_index = len(self._to_consume) - 1
# iterates through parent scopes, from the inner to the outer
base_scope_type = self._to_consume[start_index][-1]
for i in range(start_index, -1, -1):
to_consume, consumed, scope_type = self._to_consume[i]
# if the current scope is a class scope but it's not the inner
# scope, ignore it. This prevents to access this scope instead of
# the globals one in function members when there are some common
# names. The only exception is when the starting scope is a
# comprehension and its direct outer scope is a class
if scope_type == 'class' and i != start_index and not (
base_scope_type == 'comprehension' and i == start_index-1):
# XXX find a way to handle class scope in a smoother way
continue
# the name has already been consumed, only check it's not a loop
# variable used outside the loop
if name in consumed:
defnode = assign_parent(consumed[name][0])
self._check_late_binding_closure(node, defnode, scope_type)
self._loopvar_name(node, name)
break
# mark the name as consumed if it's defined in this scope
# (i.e. no KeyError is raised by "to_consume[name]")
try:
consumed[name] = to_consume[name]
except KeyError:
continue
# checks for use before assignment
defnode = assign_parent(to_consume[name][0])
if defnode is not None:
self._check_late_binding_closure(node, defnode, scope_type)
defstmt = defnode.statement()
defframe = defstmt.frame()
maybee0601 = True
if not frame is defframe:
maybee0601 = False
elif defframe.parent is None:
# we are at the module level, check the name is not
# defined in builtins
if name in defframe.scope_attrs or builtin_lookup(name)[1]:
maybee0601 = False
else:
# we are in a local scope, check the name is not
# defined in global or builtin scope
if defframe.root().lookup(name)[1]:
maybee0601 = False
else:
# check if we have a nonlocal
if name in defframe.locals:
maybee0601 = not any(isinstance(child, astroid.Nonlocal)
and name in child.names
for child in defframe.get_children())
if (maybee0601
and stmt.fromlineno <= defstmt.fromlineno
and not is_defined_before(node)
and not are_exclusive(stmt, defstmt, ('NameError', 'Exception', 'BaseException'))):
if defstmt is stmt and isinstance(node, (astroid.DelName,
astroid.AssName)):
self.add_message('undefined-variable', args=name, node=node)
elif self._to_consume[-1][-1] != 'lambda':
# E0601 may *not* occurs in lambda scope
self.add_message('used-before-assignment', args=name, node=node)
if isinstance(node, astroid.AssName): # Aug AssName
del consumed[name]
else:
del to_consume[name]
# check it's not a loop variable used outside the loop
self._loopvar_name(node, name)
break
else:
# we have not found the name, if it isn't a builtin, that's an
# undefined name !
if not (name in astroid.Module.scope_attrs or is_builtin(name)
or name in self.config.additional_builtins):
self.add_message('undefined-variable', args=name, node=node)
@check_messages('no-name-in-module')
def visit_import(self, node):
"""check modules attribute accesses"""
for name, _ in node.names:
parts = name.split('.')
try:
module = node.infer_name_module(parts[0]).next()
except astroid.ResolveError:
continue
self._check_module_attrs(node, module, parts[1:])
@check_messages('no-name-in-module')
def visit_from(self, node):
"""check modules attribute accesses"""
name_parts = node.modname.split('.')
level = getattr(node, 'level', None)
try:
module = node.root().import_module(name_parts[0], level=level)
except AstroidBuildingException:
return
except Exception, exc:
print 'Unhandled exception in VariablesChecker:', exc
return
module = self._check_module_attrs(node, module, name_parts[1:])
if not module:
return
for name, _ in node.names:
if name == '*':
continue
self._check_module_attrs(node, module, name.split('.'))
@check_messages('unbalanced-tuple-unpacking', 'unpacking-non-sequence')
def visit_assign(self, node):
"""Check unbalanced tuple unpacking for assignments
and unpacking non-sequences.
"""
if not isinstance(node.targets[0], (astroid.Tuple, astroid.List)):
return
targets = node.targets[0].itered()
try:
for infered in node.value.infer():
self._check_unpacking(infered, node, targets)
except astroid.InferenceError:
return
def _check_unpacking(self, infered, node, targets):
""" Check for unbalanced tuple unpacking
and unpacking non sequences.
"""
if infered is astroid.YES:
return
if isinstance(infered, (astroid.Tuple, astroid.List)):
# attempt to check unpacking is properly balanced
values = infered.itered()
if len(targets) != len(values):
self.add_message('unbalanced-tuple-unpacking', node=node,
args=(_get_unpacking_extra_info(node, infered),
len(targets),
len(values)))
# attempt to check unpacking may be possible (ie RHS is iterable)
elif isinstance(infered, astroid.Instance):
for meth in ('__iter__', '__getitem__'):
try:
infered.getattr(meth)
break
except astroid.NotFoundError:
continue
else:
self.add_message('unpacking-non-sequence', node=node,
args=(_get_unpacking_extra_info(node, infered),))
else:
self.add_message('unpacking-non-sequence', node=node,
args=(_get_unpacking_extra_info(node, infered),))
def _check_module_attrs(self, node, module, module_names):
"""check that module_names (list of string) are accessible through the
given module
if the latest access name corresponds to a module, return it
"""
assert isinstance(module, astroid.Module), module
while module_names:
name = module_names.pop(0)
if name == '__dict__':
module = None
break
try:
module = module.getattr(name)[0].infer().next()
if module is astroid.YES:
return None
except astroid.NotFoundError:
self.add_message('no-name-in-module', args=(name, module.name), node=node)
return None
except astroid.InferenceError:
return None
if module_names:
# FIXME: other message if name is not the latest part of
# module_names ?
modname = module and module.name or '__dict__'
self.add_message('no-name-in-module', node=node,
args=('.'.join(module_names), modname))
return None
if isinstance(module, astroid.Module):
return module
return None
class VariablesChecker3k(VariablesChecker):
'''Modified variables checker for 3k'''
# listcomp have now also their scope
def visit_listcomp(self, node):
"""visit dictcomp: update consumption analysis variable
"""
self._to_consume.append((copy(node.locals), {}, 'comprehension'))
def leave_listcomp(self, _):
"""leave dictcomp: update consumption analysis variable
"""
# do not check for not used locals here
self._to_consume.pop()
def leave_module(self, node):
""" Update consumption analysis variable
for metaclasses.
"""
for klass in node.nodes_of_class(astroid.Class):
if klass._metaclass:
metaclass = klass.metaclass()
module_locals = self._to_consume[0][0]
if isinstance(klass._metaclass, astroid.Name):
module_locals.pop(klass._metaclass.name, None)
if metaclass:
# if it uses a `metaclass=module.Class`
module_locals.pop(metaclass.root().name, None)
super(VariablesChecker3k, self).leave_module(node)
if sys.version_info >= (3, 0):
VariablesChecker = VariablesChecker3k
def register(linter):
"""required method to auto register this checker"""
linter.register_checker(VariablesChecker(linter))
| {
"content_hash": "ab03c4f34d8c69eef4c3b794f4b92ffa",
"timestamp": "",
"source": "github",
"line_count": 726,
"max_line_length": 131,
"avg_line_length": 44.544077134986225,
"alnum_prop": 0.5530164816475464,
"repo_name": "bdastur/pyvim",
"id": "dc8d11154747774aeb0c546bbb48765d46424e6d",
"size": "33153",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": ".vim/bundle/python-mode/pymode/libs/pylama/lint/pylama_pylint/pylint/checkers/variables.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Erlang",
"bytes": "2183"
},
{
"name": "JavaScript",
"bytes": "1064"
},
{
"name": "Makefile",
"bytes": "1563"
},
{
"name": "Python",
"bytes": "2373033"
},
{
"name": "Ruby",
"bytes": "3957"
},
{
"name": "Vim script",
"bytes": "1391935"
}
],
"symlink_target": ""
} |
"""Tests hal.help implementation"""
def test_main():
"""Tests hal.help.main method"""
pass # todo auto generated method stub
class TestBugReporter:
"""Tests BugReporter class"""
@staticmethod
def test_get_platform_info():
"""Tests hal.help.BugReporter.get_platform_info method"""
pass # todo auto generated method stub
@staticmethod
def test_get_bug_report():
"""Tests hal.help.BugReporter.get_bug_report method"""
pass # todo auto generated method stub
@staticmethod
def test__get_table():
"""Tests hal.help.BugReporter._get_table method"""
pass # todo auto generated method stub
@staticmethod
def test_as_json():
"""Tests hal.help.BugReporter.as_json method"""
pass # todo auto generated method stub
@staticmethod
def test_as_sql():
"""Tests hal.help.BugReporter.as_sql method"""
pass # todo auto generated method stub
@staticmethod
def test_as_markdown():
"""Tests hal.help.BugReporter.as_markdown method"""
pass # todo auto generated method stub
| {
"content_hash": "afd22d01b4b526d8dc55e9981d2fb67d",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 65,
"avg_line_length": 24.04255319148936,
"alnum_prop": 0.6345132743362832,
"repo_name": "sirfoga/hal",
"id": "6053bc2fe2c329ededfea27cb8decec13172c36b",
"size": "1156",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_hal_help.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "101879"
}
],
"symlink_target": ""
} |
import json
from sahara.plugins import provisioning as p
from sahara.utils import files as f
CDH5_UBUNTU_REPO = ('deb [arch=amd64] http://archive.cloudera.com/cdh5'
'/ubuntu/precise/amd64/cdh precise-cdh5.0.0 contrib'
'\ndeb-src http://archive.cloudera.com/cdh5/ubuntu'
'/precise/amd64/cdh precise-cdh5.0.0 contrib')
DEFAULT_CDH5_UBUNTU_REPO_KEY_URL = ('http://archive.cloudera.com/cdh5/ubuntu'
'/precise/amd64/cdh/archive.key')
CM5_UBUNTU_REPO = ('deb [arch=amd64] http://archive.cloudera.com/cm5'
'/ubuntu/precise/amd64/cm precise-cm5.0.0 contrib'
'\ndeb-src http://archive.cloudera.com/cm5/ubuntu'
'/precise/amd64/cm precise-cm5.0.0 contrib')
DEFAULT_CM5_UBUNTU_REPO_KEY_URL = ('http://archive.cloudera.com/cm5/ubuntu'
'/precise/amd64/cm/archive.key')
CDH5_CENTOS_REPO = ('[cloudera-cdh5]'
'\nname=Cloudera\'s Distribution for Hadoop, Version 5'
'\nbaseurl=http://archive.cloudera.com/cdh5/redhat/6'
'/x86_64/cdh/5.0.0/'
'\ngpgkey = http://archive.cloudera.com/cdh5/redhat/6'
'/x86_64/cdh/RPM-GPG-KEY-cloudera'
'\ngpgcheck = 1')
CM5_CENTOS_REPO = ('[cloudera-manager]'
'\nname=Cloudera Manager'
'\nbaseurl=http://archive.cloudera.com/cm5/redhat/6'
'/x86_64/cm/5.0.0/'
'\ngpgkey = http://archive.cloudera.com/cm5/redhat/6'
'/x86_64/cm/RPM-GPG-KEY-cloudera'
'\ngpgcheck = 1')
DEFAULT_SWIFT_LIB_URL = ('https://repository.cloudera.com/artifactory/repo/org'
'/apache/hadoop/hadoop-openstack/2.3.0-cdh5.0.0'
'/hadoop-openstack-2.3.0-cdh5.0.0.jar')
DEFAULT_EXTJS_LIB_URL = 'http://extjs.com/deploy/ext-2.2.zip'
CDH5_REPO_URL = p.Config(
'CDH5 repo list URL', 'general', 'cluster', priority=1,
default_value="")
CDH5_REPO_KEY_URL = p.Config(
'CDH5 repo key URL (for debian-based only)', 'general', 'cluster',
priority=1, default_value="")
CM5_REPO_URL = p.Config(
'CM5 repo list URL', 'general', 'cluster', priority=1,
default_value="")
CM5_REPO_KEY_URL = p.Config(
'CM5 repo key URL (for debian-based only)', 'general', 'cluster',
priority=1, default_value="")
ENABLE_SWIFT = p.Config('Enable Swift', 'general', 'cluster',
config_type='bool', priority=1,
default_value=True)
ENABLE_HBASE_COMMON_LIB = p.Config('Enable HBase Common Lib',
'general', 'cluster', config_type='bool',
priority=1, default_value=True)
SWIFT_LIB_URL = p.Config(
'Hadoop OpenStack library URL', 'general', 'cluster', priority=1,
default_value=DEFAULT_SWIFT_LIB_URL,
description=("Library that adds Swift support to CDH. The file will be "
"downloaded from VM."))
EXTJS_LIB_URL = p.Config(
"ExtJS library URL", 'general', 'cluster', priority=1,
default_value=DEFAULT_EXTJS_LIB_URL,
description=("Ext 2.2 library is required for Oozie Web Console. "
"The file will be downloaded from VM with oozie."))
AWAIT_AGENTS_TIMEOUT = p.Config(
'Await Cloudera agents timeout', 'general', 'cluster', config_type='int',
priority=1, default_value=300, is_optional=True,
description="Timeout for Cloudera agents connecting to Coudera Manager, "
"in seconds")
AWAIT_MANAGER_STARTING_TIMEOUT = p.Config(
'Timeout for Cloudera Manager starting', 'general', 'cluster',
config_type='int', priority=1, default_value=300, is_optional=True,
description='Timeout for Cloudera Manager starting, in seconds')
def _get_cluster_plugin_configs():
return [CDH5_REPO_URL, CDH5_REPO_KEY_URL, CM5_REPO_URL, CM5_REPO_KEY_URL,
ENABLE_SWIFT, ENABLE_HBASE_COMMON_LIB, SWIFT_LIB_URL,
EXTJS_LIB_URL, AWAIT_MANAGER_STARTING_TIMEOUT,
AWAIT_AGENTS_TIMEOUT]
# ng wide configs
def _load_json(path_to_file):
data = f.get_file_text(path_to_file)
return json.loads(data)
path_to_config = 'plugins/cdh/v5/resources/'
hdfs_confs = _load_json(path_to_config + 'hdfs-service.json')
namenode_confs = _load_json(path_to_config + 'hdfs-namenode.json')
datanode_confs = _load_json(path_to_config + 'hdfs-datanode.json')
secnamenode_confs = _load_json(path_to_config + 'hdfs-secondarynamenode.json')
yarn_confs = _load_json(path_to_config + 'yarn-service.json')
resourcemanager_confs = _load_json(
path_to_config + 'yarn-resourcemanager.json')
nodemanager_confs = _load_json(path_to_config + 'yarn-nodemanager.json')
jobhistory_confs = _load_json(path_to_config + 'yarn-jobhistory.json')
oozie_service_confs = _load_json(path_to_config + 'oozie-service.json')
oozie_role_confs = _load_json(path_to_config + 'oozie-oozie.json')
hive_service_confs = _load_json(path_to_config + 'hive-service.json')
hive_metastore_confs = _load_json(path_to_config + 'hive-metastore.json')
hive_hiveserver_confs = _load_json(path_to_config + 'hive-hiveserver2.json')
hive_webhcat_confs = _load_json(path_to_config + 'hive-webhcat.json')
hue_service_confs = _load_json(path_to_config + 'hue-service.json')
hue_role_confs = _load_json(path_to_config + 'hue-hue.json')
spark_service_confs = _load_json(path_to_config + 'spark-service.json')
spark_role_confs = _load_json(path_to_config + 'spark-history.json')
zookeeper_server_confs = _load_json(path_to_config + 'zookeeper-server.json')
zookeeper_service_confs = _load_json(path_to_config + 'zookeeper-service.json')
hbase_confs = _load_json(path_to_config + 'hbase-service.json')
master_confs = _load_json(path_to_config + 'hbase-master.json')
regionserver_confs = _load_json(path_to_config + 'hbase-regionserver.json')
priority_one_confs = _load_json(path_to_config + 'priority-one-confs.json')
def _prepare_value(value):
if not value:
return ""
return value.replace('\n', ' ')
def _init_configs(confs, app_target, scope):
cfgs = []
for cfg in confs:
priority = 1 if cfg['name'] in priority_one_confs else 2
c = p.Config(cfg['name'], app_target, scope, priority=priority,
default_value=_prepare_value(cfg['value']),
description=cfg['desc'], is_optional=True)
cfgs.append(c)
return cfgs
def _get_ng_plugin_configs():
cfg = []
cfg += _init_configs(hdfs_confs, 'HDFS', 'cluster')
cfg += _init_configs(namenode_confs, 'NAMENODE', 'node')
cfg += _init_configs(datanode_confs, 'DATANODE', 'node')
cfg += _init_configs(secnamenode_confs, 'SECONDARYNAMENODE', 'node')
cfg += _init_configs(yarn_confs, 'YARN', 'cluster')
cfg += _init_configs(resourcemanager_confs, 'RESOURCEMANAGER', 'node')
cfg += _init_configs(nodemanager_confs, 'NODEMANAGER', 'node')
cfg += _init_configs(jobhistory_confs, 'JOBHISTORY', 'node')
cfg += _init_configs(oozie_service_confs, 'OOZIE', 'cluster')
cfg += _init_configs(oozie_role_confs, 'OOZIE', 'node')
cfg += _init_configs(hive_service_confs, 'HIVE', 'cluster')
cfg += _init_configs(hive_metastore_confs, 'HIVEMETASTORE', 'node')
cfg += _init_configs(hive_hiveserver_confs, 'HIVESERVER', 'node')
cfg += _init_configs(hive_webhcat_confs, 'WEBHCAT', 'node')
cfg += _init_configs(hue_service_confs, 'HUE', 'cluster')
cfg += _init_configs(hue_role_confs, 'HUE', 'node')
cfg += _init_configs(spark_service_confs, 'SPARK_ON_YARN', 'cluster')
cfg += _init_configs(spark_role_confs, 'SPARK_ON_YARN', 'node')
cfg += _init_configs(zookeeper_service_confs, 'ZOOKEEPER', 'cluster')
cfg += _init_configs(zookeeper_server_confs, 'ZOOKEEPER', 'node')
cfg += _init_configs(hbase_confs, 'HBASE', 'cluster')
cfg += _init_configs(master_confs, 'MASTER', 'node')
cfg += _init_configs(regionserver_confs, 'REGIONSERVER', 'node')
return cfg
def get_plugin_configs():
cluster_wide = _get_cluster_plugin_configs()
ng_wide = _get_ng_plugin_configs()
return cluster_wide + ng_wide
def _get_config_value(cluster, key):
return cluster.cluster_configs.get(
'general', {}).get(key.name, key.default_value)
def get_cdh5_repo_url(cluster):
return _get_config_value(cluster, CDH5_REPO_URL)
def get_cdh5_key_url(cluster):
return _get_config_value(cluster, CDH5_REPO_KEY_URL)
def get_cm5_repo_url(cluster):
return _get_config_value(cluster, CM5_REPO_URL)
def get_cm5_key_url(cluster):
return _get_config_value(cluster, CM5_REPO_KEY_URL)
def is_swift_enabled(cluster):
return _get_config_value(cluster, ENABLE_SWIFT)
def is_hbase_common_lib_enabled(cluster):
return _get_config_value(cluster, ENABLE_HBASE_COMMON_LIB)
def get_swift_lib_url(cluster):
return _get_config_value(cluster, SWIFT_LIB_URL)
def get_extjs_lib_url(cluster):
return _get_config_value(cluster, EXTJS_LIB_URL)
| {
"content_hash": "626231d1b08dab7af262243ca49b64b3",
"timestamp": "",
"source": "github",
"line_count": 222,
"max_line_length": 79,
"avg_line_length": 40.945945945945944,
"alnum_prop": 0.6425742574257426,
"repo_name": "bigfootproject/sahara",
"id": "f50c7efdefb9744b7e403b61232f790419f39efe",
"size": "9673",
"binary": false,
"copies": "3",
"ref": "refs/heads/spark-plugin",
"path": "sahara/plugins/cdh/v5/config_helper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "3609"
},
{
"name": "Mako",
"bytes": "1528"
},
{
"name": "PigLatin",
"bytes": "792"
},
{
"name": "Python",
"bytes": "2844588"
},
{
"name": "Shell",
"bytes": "45605"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.