hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0671defc3ce000eb89e98a0c75b3956348db13fe
| 351
|
py
|
Python
|
easy/Palindrome Number/solution.py
|
ashutosh1919/leetcode-problems
|
65f99a3694549af88c7702b598de1a8ccb7db5fb
|
[
"MIT"
] | 8
|
2021-08-21T19:10:04.000Z
|
2022-03-11T14:30:02.000Z
|
easy/Palindrome Number/solution.py
|
ashutosh1919/leetcode-problems
|
65f99a3694549af88c7702b598de1a8ccb7db5fb
|
[
"MIT"
] | null | null | null |
easy/Palindrome Number/solution.py
|
ashutosh1919/leetcode-problems
|
65f99a3694549af88c7702b598de1a8ccb7db5fb
|
[
"MIT"
] | 1
|
2021-08-24T06:29:02.000Z
|
2021-08-24T06:29:02.000Z
|
# Time complexity: O(log10(x))
# Approach: Reverse the number and check if it is same as original number.
class Solution:
def isPalindrome(self, x: int) -> bool:
if x < 0 or x == -2**31:
return False
dup, ans = x, 0
while dup!=0:
ans = ans*10 + dup%10
dup //= 10
return ans == x
| 29.25
| 74
| 0.527066
|
b06d49e3afeaebd93dd71e01256b6424b019edf0
| 28,587
|
py
|
Python
|
tests/graph/test_edges.py
|
enavarro51/retworkx
|
71e34d111623d1de2e4870a8227eddacfb3ade4c
|
[
"Apache-2.0"
] | null | null | null |
tests/graph/test_edges.py
|
enavarro51/retworkx
|
71e34d111623d1de2e4870a8227eddacfb3ade4c
|
[
"Apache-2.0"
] | null | null | null |
tests/graph/test_edges.py
|
enavarro51/retworkx
|
71e34d111623d1de2e4870a8227eddacfb3ade4c
|
[
"Apache-2.0"
] | 1
|
2022-03-24T05:00:30.000Z
|
2022-03-24T05:00:30.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
import retworkx
class TestEdges(unittest.TestCase):
def test_get_edge_data(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "Edgy")
res = graph.get_edge_data(node_a, node_b)
self.assertEqual("Edgy", res)
def test_get_all_edge_data(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "Edgy")
graph.add_edge(node_a, node_b, "b")
res = graph.get_all_edge_data(node_a, node_b)
self.assertIn("b", res)
self.assertIn("Edgy", res)
def test_no_edge(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
self.assertRaises(retworkx.NoEdgeBetweenNodes, graph.get_edge_data, node_a, node_b)
def test_num_edges(self):
graph = retworkx.PyGraph()
graph.add_node(1)
graph.add_node(42)
graph.add_node(146)
graph.add_edges_from_no_data([(0, 1), (1, 2)])
self.assertEqual(2, graph.num_edges())
def test_num_edges_no_edges(self):
graph = retworkx.PyGraph()
graph.add_node(1)
graph.add_node(42)
self.assertEqual(0, graph.num_edges())
def test_update_edge(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "not edgy")
graph.update_edge(node_a, node_b, "Edgy")
self.assertEqual([(0, 1, "Edgy")], graph.weighted_edge_list())
def test_update_edge_no_edge(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
self.assertRaises(retworkx.NoEdgeBetweenNodes, graph.update_edge, node_a, node_b, None)
def test_update_edge_by_index(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
edge_index = graph.add_edge(node_a, node_b, "not edgy")
graph.update_edge_by_index(edge_index, "Edgy")
self.assertEqual([(0, 1, "Edgy")], graph.weighted_edge_list())
def test_update_edge_invalid_index(self):
graph = retworkx.PyGraph()
graph.add_node("a")
graph.add_node("b")
self.assertRaises(IndexError, graph.update_edge_by_index, 0, None)
def test_update_edge_parallel_edges(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "not edgy")
edge_index = graph.add_edge(node_a, node_b, "not edgy")
graph.update_edge_by_index(edge_index, "Edgy")
self.assertEqual(
[(0, 1, "not edgy"), (0, 1, "Edgy")],
list(graph.weighted_edge_list()),
)
def test_no_edge_get_all_edge_data(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
self.assertRaises(retworkx.NoEdgeBetweenNodes, graph.get_all_edge_data, node_a, node_b)
def test_has_edge(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, {})
self.assertTrue(graph.has_edge(node_a, node_b))
self.assertTrue(graph.has_edge(node_b, node_a))
def test_has_edge_no_edge(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
self.assertFalse(graph.has_edge(node_a, node_b))
def test_edges(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "Edgy")
node_c = graph.add_node("c")
graph.add_edge(node_b, node_c, "Super edgy")
self.assertEqual(["Edgy", "Super edgy"], graph.edges())
def test_edges_empty(self):
graph = retworkx.PyGraph()
graph.add_node("a")
self.assertEqual([], graph.edges())
def test_edge_indices(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "Edgy")
node_c = graph.add_node("c")
graph.add_edge(node_b, node_c, "Super edgy")
self.assertEqual([0, 1], graph.edge_indices())
def test_get_edge_indices_empty(self):
graph = retworkx.PyGraph()
graph.add_node("a")
self.assertEqual([], graph.edge_indices())
def test_add_duplicates(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("a")
graph.add_edge(node_a, node_b, "a")
graph.add_edge(node_a, node_b, "b")
self.assertEqual(["a", "b"], graph.edges())
def test_remove_no_edge(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
self.assertRaises(retworkx.NoEdgeBetweenNodes, graph.remove_edge, node_a, node_b)
def test_remove_edge_single(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "edgy")
graph.remove_edge(node_a, node_b)
self.assertEqual([], graph.edges())
def test_remove_multiple(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "edgy")
graph.add_edge(node_a, node_b, "super_edgy")
graph.remove_edge_from_index(0)
self.assertEqual(["super_edgy"], graph.edges())
def test_remove_edge_from_index(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "edgy")
graph.remove_edge_from_index(0)
self.assertEqual([], graph.edges())
def test_remove_edge_no_edge(self):
graph = retworkx.PyGraph()
graph.add_node("a")
graph.remove_edge_from_index(0)
self.assertEqual([], graph.edges())
def test_remove_edges_from(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
node_c = graph.add_node("c")
graph.add_edge(node_a, node_b, "edgy")
graph.add_edge(node_a, node_c, "super_edgy")
graph.remove_edges_from([(node_a, node_b), (node_a, node_c)])
self.assertEqual([], graph.edges())
def test_remove_edges_from_invalid(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
node_c = graph.add_node("c")
graph.add_edge(node_a, node_b, "edgy")
graph.add_edge(node_a, node_c, "super_edgy")
with self.assertRaises(retworkx.NoEdgeBetweenNodes):
graph.remove_edges_from([(node_b, node_c), (node_a, node_c)])
def test_degree(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "Edgy")
node_c = graph.add_node("c")
graph.add_edge(node_b, node_c, "Super edgy")
self.assertEqual(2, graph.degree(node_b))
def test_degree_with_self_loops(self):
graph = retworkx.PyGraph()
graph.extend_from_edge_list([(0, 0), (0, 1), (0, 0)])
self.assertEqual(5, graph.degree(0))
def test_add_edge_from(self):
graph = retworkx.PyGraph()
nodes = list(range(4))
graph.add_nodes_from(nodes)
edge_list = [
(0, 1, "a"),
(1, 2, "b"),
(0, 2, "c"),
(2, 3, "d"),
(0, 3, "e"),
]
res = graph.add_edges_from(edge_list)
self.assertEqual(len(res), 5)
self.assertEqual(["a", "b", "c", "d", "e"], graph.edges())
self.assertEqual(3, graph.degree(0))
self.assertEqual(2, graph.degree(1))
self.assertEqual(3, graph.degree(2))
self.assertEqual(2, graph.degree(3))
def test_add_edge_from_empty(self):
graph = retworkx.PyGraph()
res = graph.add_edges_from([])
self.assertEqual([], res)
def test_add_edge_from_no_data(self):
graph = retworkx.PyGraph()
nodes = list(range(4))
graph.add_nodes_from(nodes)
edge_list = [(0, 1), (1, 2), (0, 2), (2, 3), (0, 3)]
res = graph.add_edges_from_no_data(edge_list)
self.assertEqual(len(res), 5)
self.assertEqual([None, None, None, None, None], graph.edges())
self.assertEqual(3, graph.degree(0))
self.assertEqual(2, graph.degree(1))
self.assertEqual(3, graph.degree(2))
self.assertEqual(2, graph.degree(3))
def test_add_edge_from_empty_no_data(self):
graph = retworkx.PyGraph()
res = graph.add_edges_from_no_data([])
self.assertEqual([], res)
def test_extend_from_weighted_edge_list_empty(self):
graph = retworkx.PyGraph()
graph.extend_from_weighted_edge_list([])
self.assertEqual(0, len(graph))
def test_extend_from_weighted_edge_list_nodes_exist(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(4)))
edge_list = [
(0, 1, "a"),
(1, 2, "b"),
(0, 2, "c"),
(2, 3, "d"),
(0, 3, "e"),
]
graph.extend_from_weighted_edge_list(edge_list)
self.assertEqual(len(graph), 4)
self.assertEqual(["a", "b", "c", "d", "e"], graph.edges())
def test_extend_from_weighted_edge_list_edges_exist(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(4)))
edge_list = [
(0, 1, "a"),
(1, 2, "b"),
(0, 2, "c"),
(2, 3, "d"),
(0, 3, "e"),
(0, 1, "not_a"),
]
graph.extend_from_weighted_edge_list(edge_list)
self.assertEqual(len(graph), 4)
self.assertEqual(["a", "b", "c", "d", "e", "not_a"], graph.edges())
def test_edge_list(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(4)))
edge_list = [
(0, 1, "a"),
(1, 2, "b"),
(0, 2, "c"),
(2, 3, "d"),
(0, 3, "e"),
]
graph.add_edges_from(edge_list)
self.assertEqual([(x[0], x[1]) for x in edge_list], graph.edge_list())
def test_edge_list_empty(self):
graph = retworkx.PyGraph()
self.assertEqual([], graph.edge_list())
def test_weighted_edge_list(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(4)))
edge_list = [
(0, 1, "a"),
(1, 2, "b"),
(0, 2, "c"),
(2, 3, "d"),
(0, 3, "e"),
]
graph.add_edges_from(edge_list)
self.assertEqual(edge_list, graph.weighted_edge_list())
def test_weighted_edge_list_empty(self):
graph = retworkx.PyGraph()
self.assertEqual([], graph.weighted_edge_list())
def test_extend_from_edge_list(self):
graph = retworkx.PyGraph()
edge_list = [(0, 1), (1, 2), (0, 2), (2, 3), (0, 3)]
graph.extend_from_edge_list(edge_list)
self.assertEqual(len(graph), 4)
self.assertEqual([None] * 5, graph.edges())
self.assertEqual(3, graph.degree(0))
self.assertEqual(2, graph.degree(1))
self.assertEqual(3, graph.degree(2))
self.assertEqual(2, graph.degree(3))
def test_extend_from_edge_list_empty(self):
graph = retworkx.PyGraph()
graph.extend_from_edge_list([])
self.assertEqual(0, len(graph))
def test_extend_from_edge_list_nodes_exist(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(4)))
edge_list = [(0, 1), (1, 2), (0, 2), (2, 3), (0, 3)]
graph.extend_from_edge_list(edge_list)
self.assertEqual(len(graph), 4)
self.assertEqual([None] * 5, graph.edges())
self.assertEqual(3, graph.degree(0))
self.assertEqual(2, graph.degree(1))
self.assertEqual(3, graph.degree(2))
self.assertEqual(2, graph.degree(3))
def test_extend_from_edge_list_existing_edge(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(4)))
edge_list = [(0, 1), (1, 2), (0, 2), (2, 3), (0, 3), (0, 1)]
graph.extend_from_edge_list(edge_list)
self.assertEqual(len(graph), 4)
self.assertEqual([None] * 6, graph.edges())
def test_extend_from_weighted_edge_list(self):
graph = retworkx.PyGraph()
edge_list = [
(0, 1, "a"),
(1, 2, "b"),
(0, 2, "c"),
(2, 3, "d"),
(0, 3, "e"),
]
graph.extend_from_weighted_edge_list(edge_list)
self.assertEqual(len(graph), 4)
def test_add_edges_from_parallel_edges(self):
graph = retworkx.PyGraph()
graph.add_nodes_from([0, 1])
res = graph.add_edges_from([(0, 1, False), (1, 0, True)])
self.assertEqual([0, 1], res)
self.assertEqual([False, True], graph.edges())
def test_add_edges_from_no_data_parallel_edges(self):
graph = retworkx.PyGraph()
graph.add_nodes_from([0, 1])
res = graph.add_edges_from_no_data([(0, 1), (1, 0)])
self.assertEqual([0, 1], res)
self.assertEqual([None, None], graph.edges())
def test_multigraph_attr(self):
graph = retworkx.PyGraph()
self.assertTrue(graph.multigraph)
def test_has_parallel_edges(self):
graph = retworkx.PyGraph()
graph.add_nodes_from([0, 1])
graph.add_edge(0, 1, None)
graph.add_edge(1, 0, 0)
self.assertTrue(graph.has_parallel_edges())
def test_has_parallel_edges_no_parallel_edges(self):
graph = retworkx.PyGraph()
graph.add_nodes_from([0, 1])
graph.add_edge(0, 1, None)
self.assertFalse(graph.has_parallel_edges())
def test_has_parallel_edges_empty(self):
graph = retworkx.PyGraph()
self.assertFalse(graph.has_parallel_edges())
def test_edge_index_map(self):
graph = retworkx.PyGraph()
node_a = graph.add_node(0)
node_b = graph.add_node(1)
node_c = graph.add_node("c")
node_d = graph.add_node("d")
graph.add_edge(node_a, node_c, "edge a")
graph.add_edge(node_b, node_d, "edge_b")
graph.add_edge(node_c, node_d, "edge c")
self.assertEqual(
{
0: (node_a, node_c, "edge a"),
1: (node_b, node_d, "edge_b"),
2: (node_c, node_d, "edge c"),
},
graph.edge_index_map(),
)
def test_incident_edges(self):
graph = retworkx.PyGraph()
node_a = graph.add_node(0)
node_b = graph.add_node(1)
node_c = graph.add_node("c")
node_d = graph.add_node("d")
graph.add_edge(node_a, node_c, "edge a")
graph.add_edge(node_b, node_d, "edge_b")
graph.add_edge(node_c, node_d, "edge c")
res = graph.incident_edges(node_d)
self.assertEqual({1, 2}, set(res))
def test_incident_edges_invalid_node(self):
graph = retworkx.PyGraph()
res = graph.incident_edges(42)
self.assertEqual([], res)
def test_incident_edge_index_map(self):
graph = retworkx.PyGraph()
node_a = graph.add_node(0)
node_b = graph.add_node(1)
node_c = graph.add_node("c")
node_d = graph.add_node("d")
graph.add_edge(node_a, node_c, "edge a")
graph.add_edge(node_b, node_d, "edge_b")
graph.add_edge(node_c, node_d, "edge c")
res = graph.incident_edge_index_map(node_d)
self.assertEqual({2: (3, 2, "edge c"), 1: (3, 1, "edge_b")}, res)
def test_incident_edge_index_map_invalid_node(self):
graph = retworkx.PyGraph()
res = graph.incident_edge_index_map(42)
self.assertEqual({}, res)
def test_single_neighbor_out_edges(self):
g = retworkx.PyGraph()
node_a = g.add_node("a")
node_b = g.add_node("b")
g.add_edge(node_a, node_b, {"a": 1})
node_c = g.add_node("c")
g.add_edge(node_a, node_c, {"a": 2})
res = g.out_edges(node_a)
self.assertEqual([(node_a, node_c, {"a": 2}), (node_a, node_b, {"a": 1})], res)
def test_neighbor_surrounded_in_out_edges(self):
g = retworkx.PyGraph()
node_a = g.add_node("a")
node_b = g.add_node("b")
node_c = g.add_node("c")
g.add_edge(node_a, node_b, {"a": 1})
g.add_edge(node_b, node_c, {"a": 2})
res = g.out_edges(node_b)
self.assertEqual([(node_b, node_c, {"a": 2}), (node_b, node_a, {"a": 1})], res)
res = g.in_edges(node_b)
self.assertEqual([(node_c, node_b, {"a": 2}), (node_a, node_b, {"a": 1})], res)
def test_edge_index_map_empty(self):
graph = retworkx.PyGraph()
self.assertEqual({}, graph.edge_index_map())
def test_get_edge_data_by_index(self):
graph = retworkx.PyGraph()
edge_list = [
(0, 1, "a"),
(1, 2, "b"),
(0, 2, "c"),
(2, 3, "d"),
(0, 3, "e"),
]
graph.extend_from_weighted_edge_list(edge_list)
res = graph.get_edge_data_by_index(2)
self.assertEqual("c", res)
def test_get_edge_data_by_index_invalid_index(self):
graph = retworkx.PyGraph()
with self.assertRaisesRegex(
IndexError, "Provided edge index 2 is not present in the graph"
):
graph.get_edge_data_by_index(2)
def test_get_edge_endpoints_by_index(self):
graph = retworkx.PyGraph()
edge_list = [
(0, 1, "a"),
(1, 2, "b"),
(0, 2, "c"),
(2, 3, "d"),
(0, 3, "e"),
]
graph.extend_from_weighted_edge_list(edge_list)
res = graph.get_edge_endpoints_by_index(2)
self.assertEqual((0, 2), res)
def test_get_edge_endpoints_by_index_invalid_index(self):
graph = retworkx.PyGraph()
with self.assertRaisesRegex(
IndexError, "Provided edge index 2 is not present in the graph"
):
graph.get_edge_endpoints_by_index(2)
class TestEdgesMultigraphFalse(unittest.TestCase):
def test_multigraph_attr(self):
graph = retworkx.PyGraph(multigraph=False)
self.assertFalse(graph.multigraph)
def test_has_parallel_edges(self):
graph = retworkx.PyGraph(multigraph=False)
graph.add_nodes_from([0, 1])
graph.add_edge(0, 1, None)
graph.add_edge(1, 0, 0)
self.assertFalse(graph.has_parallel_edges())
def test_get_edge_data(self):
graph = retworkx.PyGraph(False)
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "Edgy")
res = graph.get_edge_data(node_a, node_b)
self.assertEqual("Edgy", res)
def test_get_all_edge_data(self):
graph = retworkx.PyGraph(False)
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "Edgy")
graph.add_edge(node_a, node_b, "b")
res = graph.get_all_edge_data(node_a, node_b)
self.assertIn("b", res)
self.assertNotIn("Edgy", res)
def test_no_edge(self):
graph = retworkx.PyGraph(False)
node_a = graph.add_node("a")
node_b = graph.add_node("b")
self.assertRaises(retworkx.NoEdgeBetweenNodes, graph.get_edge_data, node_a, node_b)
def test_no_edge_get_all_edge_data(self):
graph = retworkx.PyGraph(False)
node_a = graph.add_node("a")
node_b = graph.add_node("b")
self.assertRaises(retworkx.NoEdgeBetweenNodes, graph.get_all_edge_data, node_a, node_b)
def test_has_edge(self):
graph = retworkx.PyGraph(False)
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, {})
self.assertTrue(graph.has_edge(node_a, node_b))
self.assertTrue(graph.has_edge(node_b, node_a))
def test_has_edge_no_edge(self):
graph = retworkx.PyGraph()
node_a = graph.add_node("a")
node_b = graph.add_node("b")
self.assertFalse(graph.has_edge(node_a, node_b))
def test_edges(self):
graph = retworkx.PyGraph(False)
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "Edgy")
node_c = graph.add_node("c")
graph.add_edge(node_b, node_c, "Super edgy")
self.assertEqual(["Edgy", "Super edgy"], graph.edges())
def test_edges_empty(self):
graph = retworkx.PyGraph(False)
graph.add_node("a")
self.assertEqual([], graph.edges())
def test_add_duplicates(self):
graph = retworkx.PyGraph(False)
node_a = graph.add_node("a")
node_b = graph.add_node("a")
graph.add_edge(node_a, node_b, "a")
graph.add_edge(node_a, node_b, "b")
self.assertEqual(["b"], graph.edges())
def test_remove_no_edge(self):
graph = retworkx.PyGraph(False)
node_a = graph.add_node("a")
node_b = graph.add_node("b")
self.assertRaises(retworkx.NoEdgeBetweenNodes, graph.remove_edge, node_a, node_b)
def test_remove_edge_single(self):
graph = retworkx.PyGraph(False)
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "edgy")
graph.remove_edge(node_a, node_b)
self.assertEqual([], graph.edges())
def test_remove_multiple(self):
graph = retworkx.PyGraph(False)
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "edgy")
graph.add_edge(node_a, node_b, "super_edgy")
graph.remove_edge_from_index(0)
self.assertEqual([], graph.edges())
def test_remove_edge_from_index(self):
graph = retworkx.PyGraph(False)
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "edgy")
graph.remove_edge_from_index(0)
self.assertEqual([], graph.edges())
def test_remove_edge_no_edge(self):
graph = retworkx.PyGraph(False)
graph.add_node("a")
graph.remove_edge_from_index(0)
self.assertEqual([], graph.edges())
def test_degree(self):
graph = retworkx.PyGraph(False)
node_a = graph.add_node("a")
node_b = graph.add_node("b")
graph.add_edge(node_a, node_b, "Edgy")
node_c = graph.add_node("c")
graph.add_edge(node_b, node_c, "Super edgy")
self.assertEqual(2, graph.degree(node_b))
def test_add_edge_from(self):
graph = retworkx.PyGraph(False)
nodes = list(range(4))
graph.add_nodes_from(nodes)
edge_list = [
(0, 1, "a"),
(1, 2, "b"),
(0, 2, "c"),
(2, 3, "d"),
(0, 3, "e"),
]
res = graph.add_edges_from(edge_list)
self.assertEqual(len(res), 5)
self.assertEqual(["a", "b", "c", "d", "e"], graph.edges())
self.assertEqual(3, graph.degree(0))
self.assertEqual(2, graph.degree(1))
self.assertEqual(3, graph.degree(2))
self.assertEqual(2, graph.degree(3))
def test_add_edge_from_empty(self):
graph = retworkx.PyGraph(False)
res = graph.add_edges_from([])
self.assertEqual([], res)
def test_add_edge_from_no_data(self):
graph = retworkx.PyGraph(False)
nodes = list(range(4))
graph.add_nodes_from(nodes)
edge_list = [(0, 1), (1, 2), (0, 2), (2, 3), (0, 3)]
res = graph.add_edges_from_no_data(edge_list)
self.assertEqual(len(res), 5)
self.assertEqual([None, None, None, None, None], graph.edges())
self.assertEqual(3, graph.degree(0))
self.assertEqual(2, graph.degree(1))
self.assertEqual(3, graph.degree(2))
self.assertEqual(2, graph.degree(3))
def test_add_edge_from_empty_no_data(self):
graph = retworkx.PyGraph(False)
res = graph.add_edges_from_no_data([])
self.assertEqual([], res)
def test_add_edges_from_parallel_edges(self):
graph = retworkx.PyGraph(False)
graph.add_nodes_from([0, 1])
res = graph.add_edges_from([(0, 1, False), (1, 0, True)])
self.assertEqual([0, 0], res)
self.assertEqual([True], graph.edges())
def test_add_edges_from_no_data_parallel_edges(self):
graph = retworkx.PyGraph(False)
graph.add_nodes_from([0, 1])
res = graph.add_edges_from_no_data([(0, 1), (1, 0)])
self.assertEqual([0, 0], res)
self.assertEqual([None], graph.edges())
def test_extend_from_weighted_edge_list_empty(self):
graph = retworkx.PyGraph()
graph.extend_from_weighted_edge_list([])
self.assertEqual(0, len(graph))
def test_extend_from_weighted_edge_list_nodes_exist(self):
graph = retworkx.PyGraph()
graph.add_nodes_from(list(range(4)))
edge_list = [
(0, 1, "a"),
(1, 2, "b"),
(0, 2, "c"),
(2, 3, "d"),
(0, 3, "e"),
]
graph.extend_from_weighted_edge_list(edge_list)
self.assertEqual(len(graph), 4)
self.assertEqual(["a", "b", "c", "d", "e"], graph.edges())
def test_extend_from_weighted_edge_list_edges_exist(self):
graph = retworkx.PyGraph(False)
graph.add_nodes_from(list(range(4)))
edge_list = [
(0, 1, "a"),
(1, 2, "b"),
(0, 2, "c"),
(2, 3, "d"),
(0, 3, "e"),
(0, 1, "not_a"),
]
graph.extend_from_weighted_edge_list(edge_list)
self.assertEqual(len(graph), 4)
self.assertEqual(["not_a", "b", "c", "d", "e"], graph.edges())
def test_extend_from_edge_list(self):
graph = retworkx.PyGraph(False)
edge_list = [(0, 1), (1, 2), (0, 2), (2, 3), (0, 3)]
graph.extend_from_edge_list(edge_list)
self.assertEqual(len(graph), 4)
self.assertEqual([None] * 5, graph.edges())
def test_extend_from_edge_list_empty(self):
graph = retworkx.PyGraph(False)
graph.extend_from_edge_list([])
self.assertEqual(0, len(graph))
def test_extend_from_edge_list_nodes_exist(self):
graph = retworkx.PyGraph(False)
graph.add_nodes_from(list(range(4)))
edge_list = [(0, 1), (1, 2), (0, 2), (2, 3), (0, 3)]
graph.extend_from_edge_list(edge_list)
self.assertEqual(len(graph), 4)
self.assertEqual([None] * 5, graph.edges())
self.assertEqual(3, graph.degree(0))
self.assertEqual(2, graph.degree(1))
self.assertEqual(3, graph.degree(2))
self.assertEqual(2, graph.degree(3))
def test_extend_from_edge_list_existing_edge(self):
graph = retworkx.PyGraph(False)
graph.add_nodes_from(list(range(4)))
edge_list = [(0, 1), (1, 2), (0, 2), (2, 3), (0, 3), (0, 1)]
graph.extend_from_edge_list(edge_list)
self.assertEqual(len(graph), 4)
self.assertEqual([None] * 5, graph.edges())
def test_extend_from_weighted_edge_list(self):
graph = retworkx.PyGraph(False)
edge_list = [
(0, 1, "a"),
(1, 2, "b"),
(0, 2, "c"),
(2, 3, "d"),
(0, 3, "e"),
]
graph.extend_from_weighted_edge_list(edge_list)
self.assertEqual(len(graph), 4)
self.assertEqual(["a", "b", "c", "d", "e"], graph.edges())
| 36.003778
| 95
| 0.593382
|
ce53d7bae6711e12a2f19d37bbf8a44d5352f351
| 2,307
|
py
|
Python
|
apps/help/get_sensego_class.py
|
Chise1/my_audit_monitor
|
e302c339be4083cc03349096142bcff85b6947e5
|
[
"BSD-3-Clause"
] | null | null | null |
apps/help/get_sensego_class.py
|
Chise1/my_audit_monitor
|
e302c339be4083cc03349096142bcff85b6947e5
|
[
"BSD-3-Clause"
] | null | null | null |
apps/help/get_sensego_class.py
|
Chise1/my_audit_monitor
|
e302c339be4083cc03349096142bcff85b6947e5
|
[
"BSD-3-Clause"
] | null | null | null |
import base64
import hashlib
import hmac
import time
import uuid
class Nonce():
def __init__(self, ak, sk, group_id, face_image):
self.face_image = base64.b64encode(face_image).decode()
self.id_image = None
self.ak = ak
self.sk = sk
self.group_id = group_id
self.sign = self.get_sign()
def get_sign(self):
self.ts = str(int(time.time()))
# self.nonce = str(uuid.uuid1()).replace("-", "")
self.nonce = str(uuid.uuid1()).replace("-", "")
a = [self.ak, self.nonce, self.ts]
# a = [self.ak, 'ZPMxNpPhmrPzQj27AGKijM3FmEcHW4BY', '1550032562']
if a[0] > a[1]:
a[0], a[1] = a[1], a[0]
if a[0] > a[2]:
a[0], a[2] = a[2], a[0]
if a[1] > a[2]:
a[1], a[2] = a[2], a[1]
join_str = "".join(a)
return hmac.new(self.sk.encode(), join_str.encode(), hashlib.sha256).hexdigest()
def get_dic(self):
dic = {
'ak': self.ak,
'nonce': self.nonce,
'sign': self.sign,
'ts': self.ts,
'group_id': self.group_id,
'face_image': self.face_image
}
if self.id_image != None:
dic["id_image"] = self.id_image
return dic
class NonceBase64(Nonce):
'''base64图片'''
def __init__(self, ak, sk, group_id, face_image, id_image=None):
self.face_image = face_image
self.id_image = None
if id_image is not None:
self.id_image = id_image
self.ak = ak
self.sk = sk
self.group_id = group_id
self.sign = self.get_sign()
class Trace(Nonce):
def __init__(self, ak, sk, group_id, person_id, limit=None):
self.limit = None
if limit is not None:
self.limit = limit
self.person_id = person_id
self.ak = ak
self.sk = sk
self.group_id = group_id
self.sign = self.get_sign()
def get_dic(self):
dic = {
'ak': self.ak,
'nonce': self.nonce,
'sign': self.sign,
'ts': self.ts,
'group_id': self.group_id,
'person_id': self.person_id
}
if self.limit is not None:
dic["limit"] = self.limit
return dic
| 26.825581
| 88
| 0.516255
|
f32a0ca2966e28e56553b6f91dc80a957d284fd4
| 3,083
|
py
|
Python
|
src/express-route-cross-connection/azext_expressroutecrossconnection/vendored_sdks/v2018_04_01/models/route_filter_rule.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 207
|
2017-11-29T06:59:41.000Z
|
2022-03-31T10:00:53.000Z
|
src/express-route-cross-connection/azext_expressroutecrossconnection/vendored_sdks/v2018_04_01/models/route_filter_rule.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 4,061
|
2017-10-27T23:19:56.000Z
|
2022-03-31T23:18:30.000Z
|
src/express-route-cross-connection/azext_expressroutecrossconnection/vendored_sdks/v2018_04_01/models/route_filter_rule.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 802
|
2017-10-11T17:36:26.000Z
|
2022-03-31T22:24:32.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class RouteFilterRule(SubResource):
"""Route Filter Rule Resource.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:param access: Required. The access type of the rule. Valid values are:
'Allow', 'Deny'. Possible values include: 'Allow', 'Deny'
:type access: str or ~azure.mgmt.network.v2018_04_01.models.Access
:ivar route_filter_rule_type: Required. The rule type of the rule. Valid
value is: 'Community'. Default value: "Community" .
:vartype route_filter_rule_type: str
:param communities: Required. The collection for bgp community values to
filter on. e.g. ['12076:5010','12076:5020']
:type communities: list[str]
:ivar provisioning_state: The provisioning state of the resource. Possible
values are: 'Updating', 'Deleting', 'Succeeded' and 'Failed'.
:vartype provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param location: Resource location.
:type location: str
:ivar etag: A unique read-only string that changes whenever the resource
is updated.
:vartype etag: str
"""
_validation = {
'access': {'required': True},
'route_filter_rule_type': {'required': True, 'constant': True},
'communities': {'required': True},
'provisioning_state': {'readonly': True},
'etag': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'access': {'key': 'properties.access', 'type': 'str'},
'route_filter_rule_type': {'key': 'properties.routeFilterRuleType', 'type': 'str'},
'communities': {'key': 'properties.communities', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
route_filter_rule_type = "Community"
def __init__(self, **kwargs):
super(RouteFilterRule, self).__init__(**kwargs)
self.access = kwargs.get('access', None)
self.communities = kwargs.get('communities', None)
self.provisioning_state = None
self.name = kwargs.get('name', None)
self.location = kwargs.get('location', None)
self.etag = None
| 40.565789
| 91
| 0.622121
|
ce67665764b54abbd799fd63129019bd8bb4908b
| 3,626
|
py
|
Python
|
python/cugraph/cores/k_core.py
|
mike-wendt/cugraph
|
1f0f14eba2e6253423b1a58ca38989261308df6c
|
[
"Apache-2.0"
] | null | null | null |
python/cugraph/cores/k_core.py
|
mike-wendt/cugraph
|
1f0f14eba2e6253423b1a58ca38989261308df6c
|
[
"Apache-2.0"
] | 1
|
2020-12-01T17:34:57.000Z
|
2020-12-01T17:34:57.000Z
|
python/cugraph/cores/k_core.py
|
mike-wendt/cugraph
|
1f0f14eba2e6253423b1a58ca38989261308df6c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2019-2020, NVIDIA CORPORATION.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cugraph.cores import k_core_wrapper, core_number_wrapper
from cugraph.utilities import cugraph_to_nx
from cugraph.utilities import check_nx_graph
from cugraph.structure.graph import Graph
def k_core(G, k=None, core_number=None):
"""
Compute the k-core of the graph G based on the out degree of its nodes. A
k-core of a graph is a maximal subgraph that contains nodes of degree k or
more. This call does not support a graph with self-loops and parallel
edges.
Parameters
----------
G : cuGraph.Graph or networkx.Graph
cuGraph graph descriptor with connectivity information. The graph
should contain undirected edges where undirected edges are represented
as directed edges in both directions. While this graph can contain edge
weights, they don't participate in the calculation of the k-core.
k : int, optional
Order of the core. This value must not be negative. If set to None, the
main core is returned.
core_number : cudf.DataFrame, optional
Precomputed core number of the nodes of the graph G containing two
cudf.Series of size V: the vertex identifiers and the corresponding
core number values. If set to None, the core numbers of the nodes are
calculated internally.
core_number['vertex'] : cudf.Series
Contains the vertex identifiers
core_number['values'] : cudf.Series
Contains the core number of vertices
Returns
-------
KCoreGraph : cuGraph.Graph
K Core of the input graph
Examples
--------
>>> gdf = cudf.read_csv('datasets/karate.csv', delimiter=' ',
>>> dtype=['int32', 'int32', 'float32'], header=None)
>>> G = cugraph.Graph()
>>> G.from_cudf_edgelist(gdf, source='0', destination='1')
>>> KCoreGraph = cugraph.k_core(G)
"""
G, isNx = check_nx_graph(G)
mytype = type(G)
KCoreGraph = mytype()
if mytype is not Graph:
raise Exception("directed graph not supported")
if core_number is not None:
if G.renumbered is True:
core_number = G.add_internal_vertex_id(
core_number, "vertex", "vertex", drop=True
)
else:
core_number = core_number_wrapper.core_number(G)
core_number = core_number.rename(
columns={"core_number": "values"}, copy=False
)
print(core_number)
if k is None:
k = core_number["values"].max()
k_core_df = k_core_wrapper.k_core(G, k, core_number)
if G.renumbered:
k_core_df = G.unrenumber(k_core_df, "src")
k_core_df = G.unrenumber(k_core_df, "dst")
if G.edgelist.weights:
KCoreGraph.from_cudf_edgelist(
k_core_df, source="src", destination="dst", edge_attr="weight"
)
else:
KCoreGraph.from_cudf_edgelist(
k_core_df, source="src", destination="dst"
)
if isNx is True:
KCoreGraph = cugraph_to_nx(KCoreGraph)
return KCoreGraph
| 35.203883
| 79
| 0.666575
|
f8912addb31be75998362bb95e8c2731a63e31c3
| 1,926
|
py
|
Python
|
pylib/laserviz.py
|
jheidel/ece4760-lab5
|
c1ab262e40c22ee77e54067fe6dbfa2b7ee78c89
|
[
"MIT"
] | 4
|
2016-02-05T08:04:17.000Z
|
2021-08-30T15:44:46.000Z
|
pylib/laserviz.py
|
jheidel/ece4760-lab5
|
c1ab262e40c22ee77e54067fe6dbfa2b7ee78c89
|
[
"MIT"
] | null | null | null |
pylib/laserviz.py
|
jheidel/ece4760-lab5
|
c1ab262e40c22ee77e54067fe6dbfa2b7ee78c89
|
[
"MIT"
] | 4
|
2016-02-05T08:04:18.000Z
|
2019-06-04T15:48:49.000Z
|
from gi.repository import Gtk, Gdk, Pango, PangoCairo
from threading import Thread, Event
from time import sleep
import cairo
import math
class LaserViz(Gtk.DrawingArea):
def __init__(self, parent):
Gtk.DrawingArea.__init__(self)
self.connect('draw', self._do_expose)
self.parent = parent
self.show()
self.container = parent.builder.get_object("laserVizBox")
self.container.pack_start(self, True, True, 0)
self.ildaframe = None
def set_frame(self, frame):
self.ildaframe = frame
self.queue_draw()
def _do_expose(self, widget, cr):
allocation = self.get_allocation()
width = allocation.width
height = allocation.height
#draw background
cr.set_source_rgb(0,0,0)
cr.rectangle(0,0, width, height)
cr.fill()
if self.ildaframe is not None:
cr.set_source_rgb(255,0,255)
cr.move_to(20,20)
cr.show_text("Frame points: %d" % self.ildaframe.get_length())
cr.set_line_width(1.0)
past_xy = None
for i,pt in enumerate(self.ildaframe.get_points()):
radius = 1
if (pt["blank"]):
cr.set_source_rgb(0.4,0.1,0) #dim laser (blank)
else:
cr.set_source_rgb(0,1.0,0) #green laser!
draw_x = width / 2 + float(pt["x"]) / 2**16 * width
draw_y = height / 2 - float(pt["y"]) / 2**16 * height
cr.arc(draw_x, draw_y, radius, 0, 2 * math.pi)
cr.fill()
if past_xy is not None:
#Draw line from past to present
(px, py) = past_xy
cr.move_to(px, py)
cr.line_to(draw_x, draw_y)
cr.stroke()
past_xy = (draw_x, draw_y)
| 31.064516
| 74
| 0.529595
|
ecdfde9045f2b897fb953935b704136289a7b9be
| 4,564
|
py
|
Python
|
tests/django/test_oauth2/models.py
|
jonathanunderwood/authlib
|
3834a2a80876a87cdaab4240d77185179970c3ab
|
[
"BSD-3-Clause"
] | 1
|
2019-10-26T20:23:28.000Z
|
2019-10-26T20:23:28.000Z
|
tests/django/test_oauth2/models.py
|
jonathanunderwood/authlib
|
3834a2a80876a87cdaab4240d77185179970c3ab
|
[
"BSD-3-Clause"
] | null | null | null |
tests/django/test_oauth2/models.py
|
jonathanunderwood/authlib
|
3834a2a80876a87cdaab4240d77185179970c3ab
|
[
"BSD-3-Clause"
] | 2
|
2021-05-24T20:34:12.000Z
|
2022-03-26T07:46:17.000Z
|
import time
from django.db.models import (
Model,
CharField,
TextField,
BooleanField,
IntegerField,
)
from django.db.models import ForeignKey, CASCADE
from django.contrib.auth.models import User
from authlib.common.security import generate_token
from authlib.oauth2.rfc6749 import (
ClientMixin,
TokenMixin,
AuthorizationCodeMixin,
)
from authlib.oauth2.rfc6749.util import scope_to_list, list_to_scope
def now_timestamp():
return int(time.time())
class Client(Model, ClientMixin):
user = ForeignKey(User, on_delete=CASCADE)
client_id = CharField(max_length=48, unique=True, db_index=True)
client_secret = CharField(max_length=48, blank=True)
redirect_uris = TextField(default='')
default_redirect_uri = TextField(blank=False, default='')
scope = TextField(default='')
response_type = TextField(default='')
grant_type = TextField(default='')
token_endpoint_auth_method = CharField(max_length=120, default='')
def get_client_id(self):
return self.client_id
def get_default_redirect_uri(self):
return self.default_redirect_uri
def get_allowed_scope(self, scope):
if not scope:
return ''
allowed = set(scope_to_list(self.scope))
return list_to_scope([s for s in scope.split() if s in allowed])
def check_redirect_uri(self, redirect_uri):
if redirect_uri == self.default_redirect_uri:
return True
return redirect_uri in self.redirect_uris
def has_client_secret(self):
return bool(self.client_secret)
def check_client_secret(self, client_secret):
return self.client_secret == client_secret
def check_token_endpoint_auth_method(self, method):
return self.token_endpoint_auth_method == method
def check_response_type(self, response_type):
allowed = self.response_type.split()
return response_type in allowed
def check_grant_type(self, grant_type):
allowed = self.grant_type.split()
return grant_type in allowed
class OAuth2Token(Model, TokenMixin):
user = ForeignKey(User, on_delete=CASCADE)
client_id = CharField(max_length=48, db_index=True)
token_type = CharField(max_length=40)
access_token = CharField(max_length=255, unique=True, null=False)
refresh_token = CharField(max_length=255, db_index=True)
scope = TextField(default='')
revoked = BooleanField(default=False)
issued_at = IntegerField(null=False, default=now_timestamp)
expires_in = IntegerField(null=False, default=0)
def get_client_id(self):
return self.client_id
def get_scope(self):
return self.scope
def get_expires_in(self):
return self.expires_in
def get_expires_at(self):
return self.issued_at + self.expires_in
def is_refresh_token_active(self):
if self.revoked:
return False
expired_at = self.issued_at + self.expires_in * 2
return expired_at >= time.time()
class OAuth2Code(Model, AuthorizationCodeMixin):
user = ForeignKey(User, on_delete=CASCADE)
client_id = CharField(max_length=48, db_index=True)
code = CharField(max_length=120, unique=True, null=False)
redirect_uri = TextField(default='', null=True)
response_type = TextField(default='')
scope = TextField(default='', null=True)
auth_time = IntegerField(null=False, default=now_timestamp)
def is_expired(self):
return self.auth_time + 300 < time.time()
def get_redirect_uri(self):
return self.redirect_uri
def get_scope(self):
return self.scope or ''
def get_auth_time(self):
return self.auth_time
class CodeGrantMixin(object):
def parse_authorization_code(self, code, client):
try:
item = OAuth2Code.objects.get(code=code, client_id=client.client_id)
except OAuth2Code.DoesNotExist:
return None
if not item.is_expired():
return item
def delete_authorization_code(self, authorization_code):
authorization_code.delete()
def authenticate_user(self, authorization_code):
return authorization_code.user
def generate_authorization_code(client, grant_user, request, **extra):
code = generate_token(48)
item = OAuth2Code(
code=code,
client_id=client.client_id,
redirect_uri=request.redirect_uri,
response_type=request.response_type,
scope=request.scope,
user=grant_user,
**extra
)
item.save()
return code
| 29.830065
| 80
| 0.696976
|
219b112a687f116ba68a4bc87ce79038eb0d3f88
| 1,855
|
py
|
Python
|
userbot/__main__.py
|
Rewtio/Mikoo-Userbot
|
418f0017241fa65bdf7f99c84381317cb4dbeb55
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 4
|
2022-03-03T01:31:48.000Z
|
2022-03-26T00:15:41.000Z
|
userbot/__main__.py
|
Rewtio/Mikoo-Userbot
|
418f0017241fa65bdf7f99c84381317cb4dbeb55
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1
|
2022-03-16T02:54:27.000Z
|
2022-03-17T09:17:12.000Z
|
userbot/__main__.py
|
Rewtio/Mikoo-Userbot
|
418f0017241fa65bdf7f99c84381317cb4dbeb55
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1
|
2022-03-16T02:41:38.000Z
|
2022-03-16T02:41:38.000Z
|
# Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.c (the "License");
# you may not use this file except in compliance with the License.
#
# Copyright (C) 2021 TeamUltroid for autobot
# Recode by @divarvian
# FROM Mikoo-Userbot <https://github.com/divarvian/Mikoo-Userbot>
# t.me/MikooUserbot & t.me/MikooUserbot
#
""" Userbot start point """
import sys
from importlib import import_module
from platform import python_version
from pytgcalls import __version__ as pytgcalls
from pytgcalls import idle
from telethon import version
from userbot import BOT_TOKEN
from userbot import BOT_VER as ubotversion
from userbot import BOTLOG_CHATID, LOGS, LOOP, bot
from userbot.clients import man_userbot_on, multiman
from userbot.core.git import git
from userbot.modules import ALL_MODULES
from userbot.utils import autobot, autopilot
try:
for module_name in ALL_MODULES:
imported_module = import_module(f"userbot.modules.{module_name}")
client = multiman()
total = 5 - client
git()
LOGS.info(f"Total Clients = {total} User")
LOGS.info(f"Python Version - {python_version()}")
LOGS.info(f"Telethon Version - {version.__version__}")
LOGS.info(f"PyTgCalls Version - {pytgcalls.__version__}")
LOGS.info(f"Mikoo-Userbot Version - {ubotversion} [🔥 BERHASIL DIAKTIFKAN! 🔥]")
except (ConnectionError, KeyboardInterrupt, NotImplementedError, SystemExit):
pass
except BaseException as e:
LOGS.info(str(e), exc_info=True)
sys.exit(1)
LOOP.run_until_complete(man_userbot_on())
if not BOTLOG_CHATID:
LOOP.run_until_complete(autopilot())
if not BOT_TOKEN:
LOOP.run_until_complete(autobot())
idle()
if len(sys.argv) not in (1, 3, 4):
bot.disconnect()
else:
try:
bot.run_until_disconnected()
except ConnectionError:
pass
| 30.409836
| 82
| 0.745553
|
cceb75df205cbc3b20c39f8de66d872539db1aa8
| 1,487
|
py
|
Python
|
vendor/github.com/elastic/beats/packetbeat/tests/system/test_0012_http_basicauth.py
|
luckydonald/dockbeat
|
806a70b38d4220a21b1de029443da25f77749c90
|
[
"Apache-2.0"
] | 201
|
2016-09-14T10:35:16.000Z
|
2022-02-26T12:03:03.000Z
|
vendor/github.com/elastic/beats/packetbeat/tests/system/test_0012_http_basicauth.py
|
luckydonald/dockbeat
|
806a70b38d4220a21b1de029443da25f77749c90
|
[
"Apache-2.0"
] | 107
|
2015-09-30T12:30:49.000Z
|
2016-09-05T11:36:41.000Z
|
vendor/github.com/elastic/beats/packetbeat/tests/system/test_0012_http_basicauth.py
|
luckydonald/dockbeat
|
806a70b38d4220a21b1de029443da25f77749c90
|
[
"Apache-2.0"
] | 69
|
2016-09-20T20:42:02.000Z
|
2021-12-06T02:21:57.000Z
|
from pbtests.packetbeat import TestCase
import re
"""
Tests for removing base64-encoded authentication information
"""
class Test(TestCase):
def test_http_auth_headers(self):
self.render_config_template(
dns_ports=[], # disable dns because the pcap
# contains the DNS query
http_send_all_headers=1,
http_redact_authorization=1,
http_ports=[80]
)
self.run_packetbeat(pcap="http_basicauth.pcap",
debug_selectors=["http", "httpdetailed"])
objs = self.read_output()
assert len(objs) >= 1
assert all([o["type"] == "http" for o in objs])
assert all([o["http.request_headers"]["authorization"] == "*"
is not None for o in objs])
def test_http_auth_raw(self):
self.render_config_template(
dns_ports=[], # disable dns because the pcap
# contains the DNS query
http_redact_authorization=1,
http_send_request=1,
http_ports=[80]
)
self.run_packetbeat(pcap="http_basicauth.pcap",
debug_selectors=["http", "httpdetailed"])
objs = self.read_output()
assert len(objs) >= 1
assert all([o["type"] == "http" for o in objs])
assert all([re.search("[Aa]uthorization:\*+", o["request"])
is not None for o in objs])
| 33.795455
| 69
| 0.553463
|
d78a536e95c645a2bf4285b45d4eb9a63eed873d
| 2,025
|
py
|
Python
|
tests/components/vera/common.py
|
tuxbox/home-assistant
|
df74272ba6311d527fd07198929c80a45d9fed15
|
[
"Apache-2.0"
] | null | null | null |
tests/components/vera/common.py
|
tuxbox/home-assistant
|
df74272ba6311d527fd07198929c80a45d9fed15
|
[
"Apache-2.0"
] | null | null | null |
tests/components/vera/common.py
|
tuxbox/home-assistant
|
df74272ba6311d527fd07198929c80a45d9fed15
|
[
"Apache-2.0"
] | null | null | null |
"""Common code for tests."""
from typing import Callable, NamedTuple, Tuple
from mock import MagicMock
from pyvera import VeraController, VeraDevice, VeraScene
from homeassistant.components.vera import CONF_CONTROLLER, DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
ComponentData = NamedTuple("ComponentData", (("controller", VeraController),))
class ComponentFactory:
"""Factory class."""
def __init__(self, init_controller_mock):
"""Constructor."""
self.init_controller_mock = init_controller_mock
async def configure_component(
self,
hass: HomeAssistant,
devices: Tuple[VeraDevice] = (),
scenes: Tuple[VeraScene] = (),
setup_callback: Callable[[VeraController], None] = None,
) -> ComponentData:
"""Configure the component with specific mock data."""
controller_url = "http://127.0.0.1:123"
hass_config = {
DOMAIN: {CONF_CONTROLLER: controller_url},
}
controller = MagicMock(spec=VeraController) # type: VeraController
controller.base_url = controller_url
controller.register = MagicMock()
controller.get_devices = MagicMock(return_value=devices or ())
controller.get_scenes = MagicMock(return_value=scenes or ())
for vera_obj in controller.get_devices() + controller.get_scenes():
vera_obj.vera_controller = controller
controller.get_devices.reset_mock()
controller.get_scenes.reset_mock()
if setup_callback:
setup_callback(controller, hass_config)
def init_controller(base_url: str) -> list:
nonlocal controller
return [controller, True]
self.init_controller_mock.side_effect = init_controller
# Setup home assistant.
assert await async_setup_component(hass, DOMAIN, hass_config)
await hass.async_block_till_done()
return ComponentData(controller=controller)
| 32.66129
| 78
| 0.688395
|
c290f624a4d0815e1b6a5fccf9e8ae8233c260e0
| 5,445
|
py
|
Python
|
paperPlots.py
|
andersdot/LyA-InvertPhase
|
142a3c8e11be04c82d1cf61020e8fd54536ecc18
|
[
"MIT"
] | null | null | null |
paperPlots.py
|
andersdot/LyA-InvertPhase
|
142a3c8e11be04c82d1cf61020e8fd54536ecc18
|
[
"MIT"
] | null | null | null |
paperPlots.py
|
andersdot/LyA-InvertPhase
|
142a3c8e11be04c82d1cf61020e8fd54536ecc18
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib as mpl
#mpl.use('pdf')
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from astropy.io import ascii
from scipy.interpolate import interp1d
def lnMeanFlux(z):
return np.log(0.8)*((1. + z)/3.25)**3.2
def rms(x):
np.sqrt(np.sum(x**2.)/len(x))
def plotAllPS(power, k, ax, color='black', alpha=0.1, label=None):
for i, p, in enumerate(power):
if i == 0: label = label
else: label = None
ax.loglog(k, p, color=color, lw=0.5, alpha=alpha, label=label)
def variance(power, nPaired=25., nTrad=50.):
mean = np.mean(power, axis=0)
var = np.sum((power - mean)**2., axis=0)/(len(power) - 1.)
return var
#meanT = np.mean(np.vstack(dataT[pkey]), axis=0)
#meanP = np.mean(np.vstack(paired_p), axis=0)
#meanT = np.mean(powerT, axis=0)
#meanP = np.mean(powerP, axis=0)
#varP = np.sum((powerP - meanP)**2., axis=0)/len(powerP)
#varT = np.sum((powerT - meanT)**2., axis=0)/len(powerT)
def bias(k, powerT, powerP, nPaired=25., nTrad = 50.):
meanT = np.mean(powerT, axis=0)
meanP = np.mean(powerP, axis=0)
varP = variance(powerP)
varT = variance(powerT)
bias = (meanT - meanP)/np.sqrt(varP/nPaired + varT/nTrad)
return bias
snap_nums = [0, 1, 2]
#snap_nums = [0, 1, 2]
zz = [4, 3, 2]#, 3, 2]
colors = ['C0', 'C1', 'C2']
spectral_res = 10
xlim1d = (0.3, 30)
xlim3d = (0.3, 30)
ylim_avg = (0.3, 2)
savenpz_pre = ['T', 'NCV_0', 'NCV_1']
snap_pre = ['', 'NCV_0_', 'NCV_1_']
bigbox = False
dimension = '3d'
if bigbox:
grid_width = 400
boxsize = 40 #Mpc/h
filenamePostfix = '_1mubin.npz'
else:
grid_width = 200
boxsize = 20
filenamePostfix = '_1mubin_20Mpc.npz'
if dimension == '1d':
normalization = boxsize
plotPostFix = ''
else:
normalization = boxsize**3.
plotPostFix = '_1mubin'
alpha_line = 0.75
alpha_fill = 0.75
tradColor = 'C0'
pairColor = 'C1'
fig, ax = plt.subplots(3, sharex=True) #, figsize=(6, 6))
for sn, z, c in zip(snap_nums, zz, colors):
fig.subplots_adjust(hspace=0)
plt.setp([a.get_xticklabels() for a in fig.axes[:-1]], visible=False)
dataT = np.load('/Users/landerson/LyA-InvertPhase/goodspec{0}/spec{0}{1}_{2}'.format(dimension, grid_width, sn) + filenamePostfix)
data0 = np.load('/Users/landerson/LyA-InvertPhase/goodspec{0}/spec{0}{1}_NCV_0_{2}'.format(dimension, grid_width, sn) + filenamePostfix)
data1 = np.load('/Users/landerson/LyA-InvertPhase/goodspec{0}/spec{0}{1}_NCV_1_{2}'.format(dimension, grid_width, sn) + filenamePostfix)
shapeT = dataT['power'].shape
powerT = dataT['power'].reshape(shapeT[0:2])*normalization
nTrad = shapeT[0]
if dimension == '1d':
kT = np.arange(shapeT[1])[np.newaxis, :]*2*np.pi/boxsize
else:
kT = dataT['k'].reshape(shapeT[0:2])
shapeP = data0['power'].shape
powerP = 0.5*(data0['power'].reshape(shapeP[0:2]) + data1['power'].reshape(shapeP[0:2]))*normalization
if dimension == '1d':
kP = np.arange(shapeP[1])[np.newaxis, :]*2*np.pi/boxsize
else:
kP = data0['k'].reshape(shapeP[0:2])
nPaired = shapeP[0]
print(nTrad, nPaired)
biasValues = bias(kT, powerT, powerP, nPaired=nPaired, nTrad = nTrad)
ax[1].semilogx(kP[0], biasValues, color=c, lw=2, label='z='+str(z))
varT = variance(powerT) # nPaired=25., nTrad=50.):
varP = variance(powerP) #, nPaired=25., nTrad=50.)
#import pdb; pdb.set_trace()
uncert = 1/2.*np.sqrt(varT/varP)*np.sqrt(2./nTrad + 2./nPaired)
yp = varT/varP + uncert
ym = varT/varP - uncert
print(yp, ym)
ax[2].semilogx(kT[0], varT/varP, color=c, lw=2)
ax[2].fill_between(kT[0], ym, yp, color=c, alpha=0.75)
#ax[2].fill_between(kT[0][:,0], ym[:,0], yp[:,0], color=c, alpha=0.5)
plotAllPS(powerT, kT[0], ax[0], color=c, alpha=0.1)
plotAllPS(powerP, kP[0], ax[0], color=c, alpha=0.1)
#ax[0].set_title('spectral resolution {0}km/s\n'.format(spectral_res), y=1.25)
if dimension == '1d':
ax[0].set_ylabel('$\mathrm{P_F(k) \; [h/Mpc]}$', fontsize=12)
else:
ax[0].set_ylabel('$\mathrm{3D \; P_F(k) \;[h/Mpc]^3}$', fontsize=12)
ax[1].set_ylabel(r'$\mathrm{(\overline{P_T} - \overline{P_P})/\sigma_{\bar{P_T} - \bar{P_P}}}$', fontsize=11)
ax[2].set_ylabel('$\mathrm{\sigma_T^2/\sigma_P^2}$', fontsize=12)
ax[2].set_xlabel('$\mathrm{k_{\parallel} \; [h/Mpc]}$', fontsize=12)
ax[1].axhline(0, linestyle='--', color='black')
ax[1].axhline(-3, linestyle='--', color='black', lw=0.5)
ax[1].axhline(3, linestyle='--', color='black', lw=0.5)
ax[2].axhline(1, linestyle='--', color='black')
ax[0].set_xlim(0.2, 30)
if dimension == '1d':
ax[0].set_ylim(1e-6, 1e0)
ax[2].set_xlabel('$\mathrm{k_{\parallel} \; [h/Mpc]}$', fontsize=12)
else:
ax[0].set_ylim(1e-6, 1e2)
ax[2].set_xlabel('$\mathrm{k \;[h/Mpc]}$', fontsize=12)
ax[1].set_xlim(0.2, 30)
ax[1].set_ylim(-5, 5)
ax[2].set_xlim(0.2, 30)
ax[2].set_ylim(0.1, 300)
ax[2].set_yscale('log')
#lgd = pylab.legend(loc=9, bbox_to_anchor=(0.5, -0.1), ncol=2)
lgd = [fig.legend(loc=9, bbox_to_anchor=(0.5, 1.02), ncol=4)]
#labels = ['traditional', 'paired']
#lT3 = mpatches.Patch(color='red', alpha=0.5, label='Traditional')
#lP3 = mpatches.Patch(color='black', alpha=0.5, label='Paired')
#legend3 = [lT3, lP3]
fig.tight_layout()
fig.savefig('varRatio{0}_{1}Mpc{2}.pdf'.format(dimension, boxsize, plotPostFix), additional_artists=lgd,
bbox_inches="tight")
| 31.293103
| 140
| 0.627548
|
7062016ce5fb75662b8af569c072e966f8932336
| 2,379
|
py
|
Python
|
DoublyLinkedBase.py
|
ivaste/Algorithms
|
04580d7990af5cc91977b96b987871ccfd89cfaf
|
[
"MIT"
] | null | null | null |
DoublyLinkedBase.py
|
ivaste/Algorithms
|
04580d7990af5cc91977b96b987871ccfd89cfaf
|
[
"MIT"
] | null | null | null |
DoublyLinkedBase.py
|
ivaste/Algorithms
|
04580d7990af5cc91977b96b987871ccfd89cfaf
|
[
"MIT"
] | null | null | null |
# Exception when attempting to access an element from an empty container.
class Empty(Exception):
pass
class _DoublyLinkedBase:
"""A base class providing a doubly linked list representation."""
#-------------------------- nested _Node class --------------------------
# nested _Node class
class _Node:
"""Lightweight, nonpublic class for storing a doubly linked node."""
__slots__ = '_element', '_prev', '_next' # streamline memory
def __init__(self, element, prev, next): # initialize node's fields
self._element = element # user's element
self._prev = prev # previous node reference
self._next = next # next node reference
#-------------------------- list constructor --------------------------
def __init__(self):
"""Create an empty list."""
self._header = self._Node(None, None, None)
self._trailer = self._Node(None, None, None)
self._header._next = self._trailer # trailer is after header
self._trailer._prev = self._header # header is before trailer
self._size = 0 # number of elements
#-------------------------- public accessors --------------------------
def __len__(self):
"""Return the number of elements in the list."""
return self._size
def is_empty(self):
"""Return True if list is empty."""
return self._size == 0
#-------------------------- nonpublic utilities --------------------------
def _insert_between(self, e, predecessor, successor):
"""Add element e between two existing nodes and return new node."""
newest = self._Node(e, predecessor, successor) # linked to neighbors
predecessor._next = newest
successor._prev = newest
self._size += 1
return newest
def _delete_node(self, node):
"""Delete nonsentinel node from the list and return its element."""
predecessor = node._prev
successor = node._next
predecessor._next = successor
successor._prev = predecessor
self._size -= 1
element = node._element # record deleted element
node._prev = node._next = node._element = None # deprecate node
return element # return deleted element
| 39.65
| 82
| 0.556536
|
cb677faf181202fa11c2772a84a0af6b5489be72
| 509
|
py
|
Python
|
src/main/resources/ee/ut/similaritydetector/python/SyntaxHighlighter.py
|
stargateprovider/source-code-similarity-detector
|
a24310bbe506d0bac240483ffbcfc0cc1bd33867
|
[
"MIT"
] | 1
|
2021-11-10T22:55:37.000Z
|
2021-11-10T22:55:37.000Z
|
src/main/resources/ee/ut/similaritydetector/python/SyntaxHighlighter.py
|
stargateprovider/source-code-similarity-detector
|
a24310bbe506d0bac240483ffbcfc0cc1bd33867
|
[
"MIT"
] | null | null | null |
src/main/resources/ee/ut/similaritydetector/python/SyntaxHighlighter.py
|
stargateprovider/source-code-similarity-detector
|
a24310bbe506d0bac240483ffbcfc0cc1bd33867
|
[
"MIT"
] | 1
|
2021-09-17T08:13:09.000Z
|
2021-09-17T08:13:09.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from pygments import highlight
from pygments.formatters.html import HtmlFormatter
from pygments.lexers.python import PythonLexer
with open(source_code_filepath, 'rb') as source_code_file:
source_code = source_code_file.read()
lexer = PythonLexer(encoding='utf-8')
html_formatter = HtmlFormatter(full='true', linenos=True, style=style, encoding="utf-8")
with open(html_file_path, 'wb') as html_file:
highlight(source_code, lexer, html_formatter, html_file)
| 33.933333
| 88
| 0.772102
|
4d195c36aed24602945acccd3be4a7c4509bd3ad
| 865
|
py
|
Python
|
predict/bin/model.py
|
antkak/deeparg-keras
|
0f3b640249d22a0c5aea7e8ad9c9af0b667d2176
|
[
"MIT"
] | null | null | null |
predict/bin/model.py
|
antkak/deeparg-keras
|
0f3b640249d22a0c5aea7e8ad9c9af0b667d2176
|
[
"MIT"
] | null | null | null |
predict/bin/model.py
|
antkak/deeparg-keras
|
0f3b640249d22a0c5aea7e8ad9c9af0b667d2176
|
[
"MIT"
] | null | null | null |
from lasagne import layers
import lasagne
def model(input_size, output_size):
# input size: x_train.shape[1]
# output size: len(set(Y)
return [
(layers.InputLayer, {'shape':(None, input_size)}),
(layers.DenseLayer, {'num_units':2000}),
(layers.DropoutLayer, {'p':0.5}),
(layers.DenseLayer, {'num_units':1000}),
(layers.DropoutLayer, {'p':0.5}),
(layers.DenseLayer, {'num_units':500}),
(layers.DropoutLayer, {'p':0.5}),
(layers.DenseLayer, {'num_units':100}),
(layers.DenseLayer, {'num_units':output_size,
'nonlinearity':lasagne.nonlinearities.softmax
}),
]
| 28.833333
| 81
| 0.460116
|
5e96b3afa89dd2df46a5d570209378c30b6b488d
| 19,336
|
py
|
Python
|
ansys-grantami-bomanalytics-openapi/src/ansys/grantami/bomanalytics_openapi/api/impacted_substances_api.py
|
pyansys/grantami-bomanalytics-openapi
|
2879348388d6d7bbdd3b5082c5b7759b4ed28ab7
|
[
"MIT"
] | null | null | null |
ansys-grantami-bomanalytics-openapi/src/ansys/grantami/bomanalytics_openapi/api/impacted_substances_api.py
|
pyansys/grantami-bomanalytics-openapi
|
2879348388d6d7bbdd3b5082c5b7759b4ed28ab7
|
[
"MIT"
] | 2
|
2022-02-18T15:56:36.000Z
|
2022-02-24T11:44:59.000Z
|
ansys-grantami-bomanalytics-openapi/src/ansys/grantami/bomanalytics_openapi/api/impacted_substances_api.py
|
pyansys/grantami-bomanalytics-openapi
|
2879348388d6d7bbdd3b5082c5b7759b4ed28ab7
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Granta.BomAnalyticsServices
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: V1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import re # noqa: F401
from . import ApiBase
class ImpactedSubstancesApi(ApiBase):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def post_impactedsubstances_bom1711(self, body, **kwargs): # noqa: E501
"""Get the impacted substances for a Bill of Materials # noqa: E501
Examines the substances contained within a 17/11 Bill of Materials (BoM) by following links to substances, materials, specifications, and parts, and reports those that are impacted by the specified legislations. Each substance includes the quantity of that substance in the parent and the quantity threshold imposed on that substance by the relevant legislation. Both the quantity and threshold are only reported if present in Granta MI, otherwise they are null. If the same substance is impacted in more than one place, e.g. once in a linked material and once in a linked coating, the one with the higher percentage amount will be reported. References to Granta MI records are constructed as 'GrantaBaseType' RecordReferences; see the 17/11 BoM schema for more details on how to construct a valid BoM. Legislations are specified by the legislation 'Short title' attribute. # noqa: E501
This method makes a synchronous HTTP request.
:param GetImpactedSubstancesForBom1711Request body: (required)
:return: GetImpactedSubstancesForBom1711Response
"""
kwargs['_return_http_data_only'] = True
(data) = self.post_impactedsubstances_bom1711_with_http_info(body, **kwargs) # noqa: E501
return data
def post_impactedsubstances_bom1711_with_http_info(self, body, **kwargs): # noqa: E501
"""Get the impacted substances for a Bill of Materials # noqa: E501
Examines the substances contained within a 17/11 Bill of Materials (BoM) by following links to substances, materials, specifications, and parts, and reports those that are impacted by the specified legislations. Each substance includes the quantity of that substance in the parent and the quantity threshold imposed on that substance by the relevant legislation. Both the quantity and threshold are only reported if present in Granta MI, otherwise they are null. If the same substance is impacted in more than one place, e.g. once in a linked material and once in a linked coating, the one with the higher percentage amount will be reported. References to Granta MI records are constructed as 'GrantaBaseType' RecordReferences; see the 17/11 BoM schema for more details on how to construct a valid BoM. Legislations are specified by the legislation 'Short title' attribute. # noqa: E501
This method makes a synchronous HTTP request.
:param GetImpactedSubstancesForBom1711Request body: (required)
:return: GetImpactedSubstancesForBom1711Response
"""
all_params = ['body'] # noqa: E501
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in params['kwargs'].items():
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '{}' to method post_impactedsubstances_bom1711".format(key)
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_impactedsubstances_bom1711`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/impacted-substances/bom1711', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetImpactedSubstancesForBom1711Response', # noqa: E501
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_impactedsubstances_materials(self, body, **kwargs): # noqa: E501
"""Get the impacted substances for materials # noqa: E501
Examines the substances contained within one or more materials and reports those that are impacted by the specified legislations. Each substance includes the quantity of that substance in the material and the quantity threshold imposed on that substance by the relevant legislation. Both the quantity and threshold are only reported if present in Granta MI, otherwise they are null. A material can be referenced by one of four different identifiers: record GUID, record history GUID, record history identity, or material ID. The table that contains the material of interest is not required, materials will be discovered if they are present in either in the \"Materials in-house\" or \"MaterialUniverse\" tables. Legislations are specified by the legislation 'Short title' attribute. # noqa: E501
This method makes a synchronous HTTP request.
:param GetImpactedSubstancesForMaterialsRequest body: (required)
:return: GetImpactedSubstancesForMaterialsResponse
"""
kwargs['_return_http_data_only'] = True
(data) = self.post_impactedsubstances_materials_with_http_info(body, **kwargs) # noqa: E501
return data
def post_impactedsubstances_materials_with_http_info(self, body, **kwargs): # noqa: E501
"""Get the impacted substances for materials # noqa: E501
Examines the substances contained within one or more materials and reports those that are impacted by the specified legislations. Each substance includes the quantity of that substance in the material and the quantity threshold imposed on that substance by the relevant legislation. Both the quantity and threshold are only reported if present in Granta MI, otherwise they are null. A material can be referenced by one of four different identifiers: record GUID, record history GUID, record history identity, or material ID. The table that contains the material of interest is not required, materials will be discovered if they are present in either in the \"Materials in-house\" or \"MaterialUniverse\" tables. Legislations are specified by the legislation 'Short title' attribute. # noqa: E501
This method makes a synchronous HTTP request.
:param GetImpactedSubstancesForMaterialsRequest body: (required)
:return: GetImpactedSubstancesForMaterialsResponse
"""
all_params = ['body'] # noqa: E501
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in params['kwargs'].items():
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '{}' to method post_impactedsubstances_materials".format(key)
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_impactedsubstances_materials`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/impacted-substances/materials', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetImpactedSubstancesForMaterialsResponse', # noqa: E501
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_impactedsubstances_parts(self, body, **kwargs): # noqa: E501
"""Get the impacted substances for parts # noqa: E501
Examines the substances contained within one or more parts by following links to substances, materials, specifications and other parts, and reports those that are impacted by the specified legislations. Each substance includes the quantity of that substance in the parent and the quantity threshold imposed on that substance by the relevant legislation. Both the quantity and threshold are only reported if present in Granta MI, otherwise they are null. If the same substance is impacted in more than one place, e.g. once in a linked material and once in a linked coating, the one with the higher percentage amount will be reported. A part can be referenced by one of four different identifiers: record GUID, record history GUID, record history identity, or part number. Legislations are specified by the legislation 'Short title' attribute. # noqa: E501
This method makes a synchronous HTTP request.
:param GetImpactedSubstancesForPartsRequest body: (required)
:return: GetImpactedSubstancesForPartsResponse
"""
kwargs['_return_http_data_only'] = True
(data) = self.post_impactedsubstances_parts_with_http_info(body, **kwargs) # noqa: E501
return data
def post_impactedsubstances_parts_with_http_info(self, body, **kwargs): # noqa: E501
"""Get the impacted substances for parts # noqa: E501
Examines the substances contained within one or more parts by following links to substances, materials, specifications and other parts, and reports those that are impacted by the specified legislations. Each substance includes the quantity of that substance in the parent and the quantity threshold imposed on that substance by the relevant legislation. Both the quantity and threshold are only reported if present in Granta MI, otherwise they are null. If the same substance is impacted in more than one place, e.g. once in a linked material and once in a linked coating, the one with the higher percentage amount will be reported. A part can be referenced by one of four different identifiers: record GUID, record history GUID, record history identity, or part number. Legislations are specified by the legislation 'Short title' attribute. # noqa: E501
This method makes a synchronous HTTP request.
:param GetImpactedSubstancesForPartsRequest body: (required)
:return: GetImpactedSubstancesForPartsResponse
"""
all_params = ['body'] # noqa: E501
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in params['kwargs'].items():
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '{}' to method post_impactedsubstances_parts".format(key)
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_impactedsubstances_parts`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/impacted-substances/parts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetImpactedSubstancesForPartsResponse', # noqa: E501
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_impactedsubstances_specifications(self, body, **kwargs): # noqa: E501
"""Get the impacted substances for specifications # noqa: E501
Examines the substances contained within one or more specifications by following links to substances, materials, coatings and other specifications, and reports those that are impacted by the specified legislations. Each substance includes the quantity of that substance in the parent and the quantity threshold imposed on that substance by the relevant legislation. Both the quantity and threshold are only reported if present in Granta MI, otherwise they are null. If the same substance is impacted in more than one place, e.g. once in a linked material and once in a linked coating, the one with the higher percentage amount will be reported. A specification can be referenced by one of four different identifiers: record GUID, record history GUID, record history identity, or specification ID. Legislations are specified by the legislation 'Short title' attribute. # noqa: E501
This method makes a synchronous HTTP request.
:param GetImpactedSubstancesForSpecificationsRequest body: (required)
:return: GetImpactedSubstancesForSpecificationsResponse
"""
kwargs['_return_http_data_only'] = True
(data) = self.post_impactedsubstances_specifications_with_http_info(body, **kwargs) # noqa: E501
return data
def post_impactedsubstances_specifications_with_http_info(self, body, **kwargs): # noqa: E501
"""Get the impacted substances for specifications # noqa: E501
Examines the substances contained within one or more specifications by following links to substances, materials, coatings and other specifications, and reports those that are impacted by the specified legislations. Each substance includes the quantity of that substance in the parent and the quantity threshold imposed on that substance by the relevant legislation. Both the quantity and threshold are only reported if present in Granta MI, otherwise they are null. If the same substance is impacted in more than one place, e.g. once in a linked material and once in a linked coating, the one with the higher percentage amount will be reported. A specification can be referenced by one of four different identifiers: record GUID, record history GUID, record history identity, or specification ID. Legislations are specified by the legislation 'Short title' attribute. # noqa: E501
This method makes a synchronous HTTP request.
:param GetImpactedSubstancesForSpecificationsRequest body: (required)
:return: GetImpactedSubstancesForSpecificationsResponse
"""
all_params = ['body'] # noqa: E501
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in params['kwargs'].items():
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '{}' to method post_impactedsubstances_specifications".format(key)
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_impactedsubstances_specifications`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/impacted-substances/specifications', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetImpactedSubstancesForSpecificationsResponse', # noqa: E501
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 56.373178
| 897
| 0.694197
|
7c442eb13fea04fd0fd573669a1653d110b4d8d6
| 8,122
|
py
|
Python
|
src/oci/core/models/create_boot_volume_backup_details.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 249
|
2017-09-11T22:06:05.000Z
|
2022-03-04T17:09:29.000Z
|
src/oci/core/models/create_boot_volume_backup_details.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 228
|
2017-09-11T23:07:26.000Z
|
2022-03-23T10:58:50.000Z
|
src/oci/core/models/create_boot_volume_backup_details.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 224
|
2017-09-27T07:32:43.000Z
|
2022-03-25T16:55:42.000Z
|
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class CreateBootVolumeBackupDetails(object):
"""
CreateBootVolumeBackupDetails model.
"""
#: A constant which can be used with the type property of a CreateBootVolumeBackupDetails.
#: This constant has a value of "FULL"
TYPE_FULL = "FULL"
#: A constant which can be used with the type property of a CreateBootVolumeBackupDetails.
#: This constant has a value of "INCREMENTAL"
TYPE_INCREMENTAL = "INCREMENTAL"
def __init__(self, **kwargs):
"""
Initializes a new CreateBootVolumeBackupDetails object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param boot_volume_id:
The value to assign to the boot_volume_id property of this CreateBootVolumeBackupDetails.
:type boot_volume_id: str
:param defined_tags:
The value to assign to the defined_tags property of this CreateBootVolumeBackupDetails.
:type defined_tags: dict(str, dict(str, object))
:param display_name:
The value to assign to the display_name property of this CreateBootVolumeBackupDetails.
:type display_name: str
:param freeform_tags:
The value to assign to the freeform_tags property of this CreateBootVolumeBackupDetails.
:type freeform_tags: dict(str, str)
:param type:
The value to assign to the type property of this CreateBootVolumeBackupDetails.
Allowed values for this property are: "FULL", "INCREMENTAL"
:type type: str
"""
self.swagger_types = {
'boot_volume_id': 'str',
'defined_tags': 'dict(str, dict(str, object))',
'display_name': 'str',
'freeform_tags': 'dict(str, str)',
'type': 'str'
}
self.attribute_map = {
'boot_volume_id': 'bootVolumeId',
'defined_tags': 'definedTags',
'display_name': 'displayName',
'freeform_tags': 'freeformTags',
'type': 'type'
}
self._boot_volume_id = None
self._defined_tags = None
self._display_name = None
self._freeform_tags = None
self._type = None
@property
def boot_volume_id(self):
"""
**[Required]** Gets the boot_volume_id of this CreateBootVolumeBackupDetails.
The OCID of the boot volume that needs to be backed up.
:return: The boot_volume_id of this CreateBootVolumeBackupDetails.
:rtype: str
"""
return self._boot_volume_id
@boot_volume_id.setter
def boot_volume_id(self, boot_volume_id):
"""
Sets the boot_volume_id of this CreateBootVolumeBackupDetails.
The OCID of the boot volume that needs to be backed up.
:param boot_volume_id: The boot_volume_id of this CreateBootVolumeBackupDetails.
:type: str
"""
self._boot_volume_id = boot_volume_id
@property
def defined_tags(self):
"""
Gets the defined_tags of this CreateBootVolumeBackupDetails.
Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see `Resource Tags`__.
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:return: The defined_tags of this CreateBootVolumeBackupDetails.
:rtype: dict(str, dict(str, object))
"""
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
"""
Sets the defined_tags of this CreateBootVolumeBackupDetails.
Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see `Resource Tags`__.
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:param defined_tags: The defined_tags of this CreateBootVolumeBackupDetails.
:type: dict(str, dict(str, object))
"""
self._defined_tags = defined_tags
@property
def display_name(self):
"""
Gets the display_name of this CreateBootVolumeBackupDetails.
A user-friendly name for the boot volume backup. Does not have to be unique and it's changeable.
Avoid entering confidential information.
:return: The display_name of this CreateBootVolumeBackupDetails.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this CreateBootVolumeBackupDetails.
A user-friendly name for the boot volume backup. Does not have to be unique and it's changeable.
Avoid entering confidential information.
:param display_name: The display_name of this CreateBootVolumeBackupDetails.
:type: str
"""
self._display_name = display_name
@property
def freeform_tags(self):
"""
Gets the freeform_tags of this CreateBootVolumeBackupDetails.
Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see `Resource Tags`__.
Example: `{\"Department\": \"Finance\"}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:return: The freeform_tags of this CreateBootVolumeBackupDetails.
:rtype: dict(str, str)
"""
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
"""
Sets the freeform_tags of this CreateBootVolumeBackupDetails.
Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see `Resource Tags`__.
Example: `{\"Department\": \"Finance\"}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:param freeform_tags: The freeform_tags of this CreateBootVolumeBackupDetails.
:type: dict(str, str)
"""
self._freeform_tags = freeform_tags
@property
def type(self):
"""
Gets the type of this CreateBootVolumeBackupDetails.
The type of backup to create. If omitted, defaults to incremental.
Allowed values for this property are: "FULL", "INCREMENTAL"
:return: The type of this CreateBootVolumeBackupDetails.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this CreateBootVolumeBackupDetails.
The type of backup to create. If omitted, defaults to incremental.
:param type: The type of this CreateBootVolumeBackupDetails.
:type: str
"""
allowed_values = ["FULL", "INCREMENTAL"]
if not value_allowed_none_or_none_sentinel(type, allowed_values):
raise ValueError(
"Invalid value for `type`, must be None or one of {0}"
.format(allowed_values)
)
self._type = type
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 34.709402
| 245
| 0.658459
|
ec7a3ffc6f2980abf4a59f37026279654eb0197a
| 1,533
|
py
|
Python
|
calvin/actorstore/systemactors/io/LogWarning.py
|
gabrielcercel/calvin-base
|
c0315f100643230d65aed1745e1c22df3e7a7c2c
|
[
"Apache-2.0"
] | 334
|
2015-06-04T15:14:28.000Z
|
2022-02-09T11:14:17.000Z
|
calvin/actorstore/systemactors/io/LogWarning.py
|
gabrielcercel/calvin-base
|
c0315f100643230d65aed1745e1c22df3e7a7c2c
|
[
"Apache-2.0"
] | 89
|
2015-06-13T19:15:35.000Z
|
2019-12-03T19:23:20.000Z
|
calvin/actorstore/systemactors/io/LogWarning.py
|
gabrielcercel/calvin-base
|
c0315f100643230d65aed1745e1c22df3e7a7c2c
|
[
"Apache-2.0"
] | 112
|
2015-06-06T19:16:54.000Z
|
2020-10-19T01:27:55.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, manage, condition, calvinsys, stateguard
class LogWarning(Actor):
"""
Write data to system log at loglevel "warning"
Input:
data : data to be logged
"""
def exception_handler(self, action_function, args):
# The action 'log' takes a single token
exception_token = args[0]
return action_function(self, "Exception '%s'" % (exception_token,))
@manage(["log"])
def init(self):
self.log = calvinsys.open(self, "log.warning")
@stateguard(lambda self: calvinsys.can_write(self.log))
@condition(action_input=['data'])
def write(self, data):
calvinsys.write(self.log, data)
action_priority = (write, )
requires = ["log.warning"]
test_calvinsys = {'log.warning': {'write': ['a', 'b', 'c', 'd']}}
test_set = [
{
'inports': {'data': ['a', 'b', 'c', 'd']},
}
]
| 29.480769
| 78
| 0.649706
|
543d00868b701549f616e258d78953402d7d63e4
| 552
|
py
|
Python
|
demo/main/urls.py
|
9dev/django-fb-rank
|
2edaf50b6e43231806c3adb9e7ae2a97ddff4006
|
[
"MIT"
] | 2
|
2016-02-03T11:05:57.000Z
|
2020-09-29T22:41:29.000Z
|
demo/main/urls.py
|
9dev/django-fb-rank
|
2edaf50b6e43231806c3adb9e7ae2a97ddff4006
|
[
"MIT"
] | null | null | null |
demo/main/urls.py
|
9dev/django-fb-rank
|
2edaf50b6e43231806c3adb9e7ae2a97ddff4006
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url
from django.views.decorators.cache import cache_page
from main import views
CACHE_TIME = 60
urlpatterns = [
url(
r'^top$',
cache_page(CACHE_TIME)(views.TopItemListView.as_view()),
name='top_item_list'
),
url(
r'^trending$',
cache_page(CACHE_TIME)(views.TrendingItemListView.as_view()),
name='trending_item_list'
),
url(
r'^(?P<slug>.*)',
cache_page(CACHE_TIME)(views.ItemDetailView.as_view()),
name='item_detail'
),
]
| 20.444444
| 69
| 0.619565
|
9d8fcb1399aa5ecdf05ac05b418b817152137b15
| 2,813
|
py
|
Python
|
tests/providers/microsoft/azure/operators/test_file_to_wasb.py
|
wileeam/airflow
|
f46be8152a4d89c57db4ca46f5b3339e4876b723
|
[
"Apache-2.0"
] | 8
|
2017-04-20T16:15:44.000Z
|
2020-10-11T13:44:10.000Z
|
tests/providers/microsoft/azure/operators/test_file_to_wasb.py
|
devlocalca/airflow
|
58c3542ed25061320ce61dbe0adf451a44c738dd
|
[
"Apache-2.0"
] | 219
|
2017-03-15T18:40:16.000Z
|
2022-02-28T22:52:43.000Z
|
tests/providers/microsoft/azure/operators/test_file_to_wasb.py
|
devlocalca/airflow
|
58c3542ed25061320ce61dbe0adf451a44c738dd
|
[
"Apache-2.0"
] | 3
|
2016-07-14T21:51:10.000Z
|
2020-10-12T13:26:36.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import datetime
import unittest
import mock
from airflow import DAG
from airflow.providers.microsoft.azure.operators.file_to_wasb import FileToWasbOperator
class TestFileToWasbOperator(unittest.TestCase):
_config = {
'file_path': 'file',
'container_name': 'container',
'blob_name': 'blob',
'wasb_conn_id': 'wasb_default',
'retries': 3,
}
def setUp(self):
args = {
'owner': 'airflow',
'start_date': datetime.datetime(2017, 1, 1)
}
self.dag = DAG('test_dag_id', default_args=args)
def test_init(self):
operator = FileToWasbOperator(
task_id='wasb_operator',
dag=self.dag,
**self._config
)
self.assertEqual(operator.file_path, self._config['file_path'])
self.assertEqual(operator.container_name,
self._config['container_name'])
self.assertEqual(operator.blob_name, self._config['blob_name'])
self.assertEqual(operator.wasb_conn_id, self._config['wasb_conn_id'])
self.assertEqual(operator.load_options, {})
self.assertEqual(operator.retries, self._config['retries'])
operator = FileToWasbOperator(
task_id='wasb_operator',
dag=self.dag,
load_options={'timeout': 2},
**self._config
)
self.assertEqual(operator.load_options, {'timeout': 2})
@mock.patch('airflow.providers.microsoft.azure.operators.file_to_wasb.WasbHook',
autospec=True)
def test_execute(self, mock_hook):
mock_instance = mock_hook.return_value
operator = FileToWasbOperator(
task_id='wasb_sensor',
dag=self.dag,
load_options={'timeout': 2},
**self._config
)
operator.execute(None)
mock_instance.load_file.assert_called_once_with(
'file', 'container', 'blob', timeout=2
)
if __name__ == '__main__':
unittest.main()
| 32.709302
| 87
| 0.653395
|
e1df91205a6f1731d71728a6b49de96d44fe75f3
| 1,121
|
py
|
Python
|
brawlbracket/routes/login.py
|
TheLastBanana/BrawlBracket
|
1cad26b6499352b1b282388f4f76bfb4b2b6b4fe
|
[
"BSD-3-Clause"
] | null | null | null |
brawlbracket/routes/login.py
|
TheLastBanana/BrawlBracket
|
1cad26b6499352b1b282388f4f76bfb4b2b6b4fe
|
[
"BSD-3-Clause"
] | null | null | null |
brawlbracket/routes/login.py
|
TheLastBanana/BrawlBracket
|
1cad26b6499352b1b282388f4f76bfb4b2b6b4fe
|
[
"BSD-3-Clause"
] | null | null | null |
import re
from flask import session
from flask import redirect
from flask import url_for
from flask import g
from brawlbracket.app import app
from brawlbracket.app import oid
from brawlbracket import usermanager as um
print('Registering login routes...')
_steam_id_re = re.compile('steamcommunity.com/openid/id/(.*?)$')
@app.route('/login/')
@oid.loginhandler
def login():
if g.user is None:
if g.userId is not None:
session.pop('userId')
return oid.try_login('http://steamcommunity.com/openid')
else:
return redirect(oid.get_next_url())
@app.route('/logout/')
def logout():
session.pop('userId')
return redirect(oid.get_next_url())
@oid.after_login
def createOrLogin(resp):
match = _steam_id_re.search(resp.identity_url)
match = int(match.group(1))
user = um.getUserBySteamId(match)
if user is None:
user = um.createUser(match)
if user is None:
print('Couldn\'t make user!')
return
print(user) # DEBUG
session['userId'] = user.id
return redirect(oid.get_next_url())
| 23.851064
| 64
| 0.658341
|
34904a224e3b611ade0ef3636caf3759f6e980b4
| 34,421
|
py
|
Python
|
gamestonk_terminal/stocks/comparison_analysis/ca_controller.py
|
minhhoang1023/GamestonkTerminal
|
195dc19b491052df080178c0cc6a9d535a91a704
|
[
"MIT"
] | null | null | null |
gamestonk_terminal/stocks/comparison_analysis/ca_controller.py
|
minhhoang1023/GamestonkTerminal
|
195dc19b491052df080178c0cc6a9d535a91a704
|
[
"MIT"
] | null | null | null |
gamestonk_terminal/stocks/comparison_analysis/ca_controller.py
|
minhhoang1023/GamestonkTerminal
|
195dc19b491052df080178c0cc6a9d535a91a704
|
[
"MIT"
] | null | null | null |
"""Comparison Analysis Controller Module"""
__docformat__ = "numpy"
import argparse
import random
from typing import List
from datetime import datetime, timedelta
import yfinance as yf
from prompt_toolkit.completion import NestedCompleter
from gamestonk_terminal.rich_config import console
from gamestonk_terminal.parent_classes import BaseController
from gamestonk_terminal import feature_flags as gtff
from gamestonk_terminal.helper_funcs import (
check_non_negative,
check_positive,
parse_known_args_and_warn,
valid_date,
EXPORT_ONLY_RAW_DATA_ALLOWED,
)
from gamestonk_terminal.menu import session
from gamestonk_terminal.portfolio.portfolio_optimization import po_controller
from gamestonk_terminal.stocks.comparison_analysis import (
finbrain_view,
finnhub_model,
finviz_compare_model,
finviz_compare_view,
marketwatch_view,
polygon_model,
yahoo_finance_view,
yahoo_finance_model,
)
# pylint: disable=E1121,C0302,R0904
# TODO: HELP WANTED! This controller still has some view functionality that should be
# refactored in order to implement an API wrapper. Use the discovery controller
# as an example.
class ComparisonAnalysisController(BaseController):
"""Comparison Analysis Controller class"""
CHOICES_COMMANDS = [
"ticker",
"getpoly",
"getfinnhub",
"getfinviz",
"set",
"add",
"rmv",
"historical",
"hcorr",
"volume",
"income",
"balance",
"cashflow",
"sentiment",
"scorr",
"overview",
"valuation",
"financial",
"ownership",
"performance",
"technical",
"tsne",
]
CHOICES_MENUS = [
"po",
]
PATH = "/stocks/ca/"
def __init__(self, similar: List[str] = None, queue: List[str] = None):
"""Constructor"""
super().__init__(queue)
self.ticker = ""
self.user = ""
if similar:
self.similar = similar
if len(similar) == 1:
self.ticker = self.similar[0].upper()
else:
self.similar = []
if session and gtff.USE_PROMPT_TOOLKIT:
choices: dict = {c: {} for c in self.controller_choices}
self.completer = NestedCompleter.from_nested_dict(choices)
def print_help(self):
"""Print help"""
has_ticker_start = "" if self.ticker else "[unvl]"
has_ticker_end = "" if self.ticker else "[/unvl]"
has_similar_start = "" if self.similar and len(self.similar) > 1 else "[unvl]"
has_similar_end = "" if self.similar and len(self.similar) > 1 else "[/unvl]"
help_text = f"""[cmds]
ticker set ticker to get similar companies from[/cmds]
[param]Ticker to get similar companies from: [/param]{self.ticker}
[cmds]{has_ticker_start}
tsne run TSNE on all SP500 stocks and returns closest tickers
getpoly get similar stocks from polygon API
getfinnhub get similar stocks from finnhub API
getfinviz get similar stocks from finviz API{has_ticker_end}
set reset and set similar companies
add add more similar companies
rmv remove similar companies individually or all[/cmds]
{has_similar_start}
[param]Similar Companies: [/param]{', '.join(self.similar) if self.similar else ''}
[src][Yahoo Finance][/src]
historical historical price data comparison
hcorr historical price correlation
volume historical volume data comparison
[src][Market Watch][/src]
income income financials comparison
balance balance financials comparison
cashflow cashflow comparison
[src][Finbrain][/src]
sentiment sentiment analysis comparison
scorr sentiment correlation
[src][Finviz][/src]
overview brief overview comparison
valuation brief valuation comparison
financial brief financial comparison
ownership brief ownership comparison
performance brief performance comparison
technical brief technical comparison
[menu]> po portfolio optimization for selected tickers[/menu]{has_similar_end}
"""
console.print(text=help_text, menu="Stocks - Comparison Analysis")
def custom_reset(self):
"""Class specific component of reset command"""
if self.similar:
return ["stocks", "ca", f"set {','.join(self.similar)}"]
return []
# TODO: Figure out if this function is actually needed here
def call_ticker(self, other_args: List[str]):
"""Process ticker command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="ticker",
description="""Set ticker to extract similar from""",
)
parser.add_argument(
"-t",
"--ticker",
dest="ticker",
type=str,
required="-h" not in other_args,
help="Ticker get similar tickers from",
)
if other_args and "-" not in other_args[0][0]:
other_args.insert(0, "-t")
ns_parser = parse_known_args_and_warn(parser, other_args)
if ns_parser:
if "," in ns_parser.ticker:
console.print("Only one ticker must be selected!")
else:
stock_data = yf.download(
ns_parser.ticker,
progress=False,
)
if stock_data.empty:
console.print(
f"The ticker '{ns_parser.ticker}' provided does not exist!"
)
else:
self.ticker = ns_parser.ticker.upper()
console.print("")
def call_tsne(self, other_args: List[str]):
"""Process tsne command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="tsne",
description="""Get similar companies to compare with using sklearn TSNE.""",
)
parser.add_argument(
"-r",
"--learnrate",
default=200,
dest="lr",
type=check_non_negative,
help="TSNE Learning rate. Typical values are between 50 and 200",
)
parser.add_argument(
"-l",
"--limit",
default=10,
dest="limit",
type=check_positive,
help="Limit of stocks to retrieve. The subsample will occur randomly.",
)
parser.add_argument(
"-p", "--no_plot", action="store_true", default=False, dest="no_plot"
)
if other_args and "-" not in other_args[0][0]:
other_args.insert(0, "-l")
ns_parser = parse_known_args_and_warn(parser, other_args)
if ns_parser:
if self.ticker:
self.similar = yahoo_finance_model.get_sp500_comps_tsne(
self.ticker,
lr=ns_parser.lr,
no_plot=ns_parser.no_plot,
num_tickers=ns_parser.limit,
)
self.similar = [self.ticker] + self.similar
console.print(
f"[ML] Similar Companies: {', '.join(self.similar)}", "\n"
)
else:
console.print(
"You need to 'set' a ticker to get similar companies from first!"
)
def call_getfinviz(self, other_args: List[str]):
"""Process getfinviz command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="getfinviz",
description="""Get similar companies from finviz to compare with.""",
)
parser.add_argument(
"-n",
"--nocountry",
action="store_true",
default=False,
dest="b_no_country",
help="Similar stocks from finviz using only Industry and Sector.",
)
parser.add_argument(
"-l",
"--limit",
default=10,
dest="limit",
type=check_positive,
help="Limit of stocks to retrieve.",
)
if other_args and "-" not in other_args[0][0]:
other_args.insert(0, "-l")
ns_parser = parse_known_args_and_warn(parser, other_args)
if ns_parser:
if self.ticker:
if ns_parser.b_no_country:
compare_list = ["Sector", "Industry"]
else:
compare_list = ["Sector", "Industry", "Country"]
self.similar, self.user = finviz_compare_model.get_similar_companies(
self.ticker, compare_list
)
if self.ticker.upper() in self.similar:
self.similar.remove(self.ticker.upper())
if len(self.similar) > ns_parser.limit:
random.shuffle(self.similar)
self.similar = sorted(self.similar[: ns_parser.limit])
console.print(
f"The limit of stocks to compare are {ns_parser.limit}. The subsample will occur randomly.\n",
)
if self.similar:
self.similar = [self.ticker] + self.similar
console.print(
f"[{self.user}] Similar Companies: {', '.join(self.similar)}",
"\n",
)
else:
console.print(
"You need to 'set' a ticker to get similar companies from first!"
)
def call_getpoly(self, other_args: List[str]):
"""Process get command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="getpoly",
description="""Get similar companies from polygon to compare with.""",
)
parser.add_argument(
"-u",
"--us_only",
action="store_true",
default=False,
dest="us_only",
help="Show only stocks from the US stock exchanges",
)
parser.add_argument(
"-l",
"--limit",
default=10,
dest="limit",
type=check_positive,
help="Limit of stocks to retrieve.",
)
if other_args and "-" not in other_args[0][0]:
other_args.insert(0, "-l")
ns_parser = parse_known_args_and_warn(parser, other_args)
if ns_parser:
if self.ticker:
self.similar, self.user = polygon_model.get_similar_companies(
self.ticker, ns_parser.us_only
)
if self.ticker.upper() in self.similar:
self.similar.remove(self.ticker.upper())
if len(self.similar) > ns_parser.limit:
random.shuffle(self.similar)
self.similar = sorted(self.similar[: ns_parser.limit])
console.print(
f"The limit of stocks to compare are {ns_parser.limit}. The subsample will occur randomly.\n",
)
self.similar = [self.ticker] + self.similar
if self.similar:
console.print(
f"[{self.user}] Similar Companies: {', '.join(self.similar)}",
"\n",
)
else:
console.print(
"You need to 'set' a ticker to get similar companies from first!"
)
def call_getfinnhub(self, other_args: List[str]):
"""Process get command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="getfinnhub",
description="""Get similar companies from finnhub to compare with.""",
)
parser.add_argument(
"-l",
"--limit",
default=10,
dest="limit",
type=check_positive,
help="Limit of stocks to retrieve.",
)
if other_args and "-" not in other_args[0][0]:
other_args.insert(0, "-l")
ns_parser = parse_known_args_and_warn(parser, other_args)
if ns_parser:
if self.ticker:
self.similar, self.user = finnhub_model.get_similar_companies(
self.ticker
)
if self.ticker.upper() in self.similar:
self.similar.remove(self.ticker.upper())
if len(self.similar) > ns_parser.limit:
random.shuffle(self.similar)
self.similar = sorted(self.similar[: ns_parser.limit])
console.print(
f"The limit of stocks to compare are {ns_parser.limit}. The subsample will occur randomly.\n",
)
self.similar = [self.ticker] + self.similar
if self.similar:
console.print(
f"[{self.user}] Similar Companies: {', '.join(self.similar)}",
"\n",
)
else:
console.print(
"You need to 'set' a ticker to get similar companies from first!"
)
def call_add(self, other_args: List[str]):
"""Process add command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="add",
description="""Add similar tickers to compare with.""",
)
parser.add_argument(
"-s",
"--similar",
dest="l_similar",
type=lambda s: [str(item).upper() for item in s.split(",")],
default=[],
help="Tickers to add to similar list",
)
if other_args and "-" not in other_args[0][0]:
other_args.insert(0, "-s")
ns_parser = parse_known_args_and_warn(parser, other_args)
if ns_parser:
if self.similar:
self.similar = list(set(self.similar + ns_parser.l_similar))
else:
self.similar = ns_parser.l_similar
self.user = "Custom"
console.print(
f"[{self.user}] Similar Companies: {', '.join(self.similar)}", "\n"
)
def call_rmv(self, other_args: List[str]):
"""Process rmv command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="rmv",
description="""Remove similar tickers to compare with.""",
)
parser.add_argument(
"-s",
"--similar",
dest="l_similar",
type=lambda s: [str(item).upper() for item in s.split(",")],
default=[],
help="Tickers to remove from similar list",
)
if other_args and "-" not in other_args[0][0]:
other_args.insert(0, "-s")
ns_parser = parse_known_args_and_warn(parser, other_args)
if ns_parser:
if ns_parser.l_similar:
for symbol in ns_parser.l_similar:
if symbol in self.similar:
self.similar.remove(symbol)
else:
console.print(
f"Ticker {symbol} does not exist in similar list to be removed"
)
console.print(
f"[{self.user}] Similar Companies: {', '.join(self.similar)}"
)
else:
self.similar = []
console.print("")
self.user = "Custom"
def call_set(self, other_args: List[str]):
"""Process set command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="set",
description="""Select similar companies to compare with.""",
)
parser.add_argument(
"-s",
"--similar",
dest="l_similar",
type=lambda s: [str(item).upper() for item in s.split(",")],
default=[],
help="similar companies to compare with.",
)
if other_args and "-" not in other_args[0][0]:
other_args.insert(0, "-s")
ns_parser = parse_known_args_and_warn(parser, other_args)
if ns_parser:
self.similar = list(set(ns_parser.l_similar))
self.user = "Custom"
console.print(
f"[{self.user}] Similar Companies: {', '.join(self.similar)}", "\n"
)
def call_historical(self, other_args: List[str]):
"""Process historical command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="historical",
description="""Historical price comparison between similar companies.""",
)
parser.add_argument(
"-t",
"--type",
action="store",
dest="type_candle",
type=str,
choices=["o", "h", "l", "c", "a"],
default="a", # in case it's adjusted close
help="Candle data to use: o-open, h-high, l-low, c-close, a-adjusted close.",
)
parser.add_argument(
"-n",
"--no-scale",
action="store_false",
dest="no_scale",
default=False,
help="Flag to not put all prices on same 0-1 scale",
)
parser.add_argument(
"-s",
"--start",
type=valid_date,
default=(datetime.now() - timedelta(days=366)).strftime("%Y-%m-%d"),
dest="start",
help="The starting date (format YYYY-MM-DD) of the stock",
)
if other_args and "-" not in other_args[0][0]:
other_args.insert(0, "-t")
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
if self.similar and len(self.similar) > 1:
yahoo_finance_view.display_historical(
similar_tickers=self.similar,
start=ns_parser.start.strftime("%Y-%m-%d"),
candle_type=ns_parser.type_candle,
normalize=not ns_parser.no_scale,
export=ns_parser.export,
)
else:
console.print(
"Please make sure there are more than 1 similar tickers selected. \n"
)
def call_hcorr(self, other_args: List[str]):
"""Process historical correlation command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="hcorr",
description=""" Correlation heatmap based on historical price comparison between similar
companies.
""",
)
parser.add_argument(
"-t",
"--type",
action="store",
dest="type_candle",
type=str,
choices=["o", "h", "l", "c", "a"],
default="a", # in case it's adjusted close
help="Candle data to use: o-open, h-high, l-low, c-close, a-adjusted close.",
)
parser.add_argument(
"-s",
"--start",
type=valid_date,
default=(datetime.now() - timedelta(days=366)).strftime("%Y-%m-%d"),
dest="start",
help="The starting date (format YYYY-MM-DD) of the stock",
)
if other_args and "-" not in other_args[0][0]:
other_args.insert(0, "-t")
ns_parser = parse_known_args_and_warn(parser, other_args)
if ns_parser:
if self.similar and len(self.similar) > 1:
yahoo_finance_view.display_correlation(
similar_tickers=self.similar,
start=ns_parser.start.strftime("%Y-%m-%d"),
candle_type=ns_parser.type_candle,
)
else:
console.print("Please make sure there are similar tickers selected. \n")
def call_income(self, other_args: List[str]):
"""Process income command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="income",
description="""
Prints either yearly or quarterly income statement the company, and compares
it against similar companies.
""",
)
parser.add_argument(
"-q",
"--quarter",
action="store_true",
default=False,
dest="b_quarter",
help="Quarter financial data flag.",
)
parser.add_argument(
"-t",
"--timeframe",
dest="s_timeframe",
type=str,
default=None,
help="Specify yearly/quarterly timeframe. Default is last.",
)
if other_args and "-" not in other_args[0][0]:
other_args.insert(0, "-t")
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
marketwatch_view.display_income_comparison(
similar=self.similar,
timeframe=ns_parser.s_timeframe,
quarter=ns_parser.b_quarter,
)
def call_volume(self, other_args: List[str]):
"""Process volume command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="volume",
description="""Historical volume comparison between similar companies.
""",
)
parser.add_argument(
"-s",
"--start",
type=valid_date,
default=(datetime.now() - timedelta(days=366)).strftime("%Y-%m-%d"),
dest="start",
help="The starting date (format YYYY-MM-DD) of the stock",
)
if other_args and "-" not in other_args[0][0]:
other_args.insert(0, "-s")
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
if self.similar and len(self.similar) > 1:
yahoo_finance_view.display_volume(
similar_tickers=self.similar,
start=ns_parser.start.strftime("%Y-%m-%d"),
export=ns_parser.export,
)
else:
console.print("Please make sure there are similar tickers selected. \n")
def call_balance(self, other_args: List[str]):
"""Process balance command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="balance",
description="""
Prints either yearly or quarterly balance statement the company, and compares
it against similar companies.
""",
)
parser.add_argument(
"-q",
"--quarter",
action="store_true",
default=False,
dest="b_quarter",
help="Quarter financial data flag.",
)
parser.add_argument(
"-t",
"--timeframe",
dest="s_timeframe",
type=str,
default=None,
help="Specify yearly/quarterly timeframe. Default is last.",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
marketwatch_view.display_balance_comparison(
similar=self.similar,
timeframe=ns_parser.s_timeframe,
quarter=ns_parser.b_quarter,
)
def call_cashflow(self, other_args: List[str]):
"""Process cashflow command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="cashflow",
description="""
Prints either yearly or quarterly cashflow statement the company, and compares
it against similar companies.
""",
)
parser.add_argument(
"-q",
"--quarter",
action="store_true",
default=False,
dest="b_quarter",
help="Quarter financial data flag.",
)
parser.add_argument(
"-t",
"--timeframe",
dest="s_timeframe",
type=str,
default=None,
help="Specify yearly/quarterly timeframe. Default is last.",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
marketwatch_view.display_cashflow_comparison(
similar=self.similar,
timeframe=ns_parser.s_timeframe,
quarter=ns_parser.b_quarter,
)
def call_sentiment(self, other_args: List[str]):
"""Process sentiment command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="sentiment_compare",
description="""
FinBrain's sentiment comparison across similar tickers.
""",
)
parser.add_argument(
"-r",
"--raw",
action="store_true",
default=False,
help="Display raw sentiment data",
dest="raw",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
if self.similar and len(self.similar) > 1:
finbrain_view.display_sentiment_compare(
similar=self.similar,
raw=ns_parser.raw,
export=ns_parser.export,
)
else:
console.print(
"Please make sure there are more than 1 similar tickers selected. \n"
)
def call_scorr(self, other_args: List[str]):
"""Process sentiment correlation command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="sentiment_compare",
description="""
FinBrain's sentiment correlation across similar tickers.
""",
)
parser.add_argument(
"-r",
"--raw",
action="store_true",
default=False,
help="Display raw sentiment data",
dest="raw",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
if self.similar and len(self.similar) > 1:
finbrain_view.display_sentiment_correlation(
similar=self.similar,
raw=ns_parser.raw,
export=ns_parser.export,
)
else:
console.print("Please make sure there are similar tickers selected. \n")
def call_overview(self, other_args: List[str]):
"""Process overview command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="overview",
description="""
Prints screener data of similar companies. [Source: Finviz]
""",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
if self.similar and len(self.similar) > 1:
finviz_compare_view.screener(
similar=self.similar,
data_type="overview",
export=ns_parser.export,
)
else:
console.print(
"Please make sure there are more than 1 similar tickers selected. \n"
)
def call_valuation(self, other_args: List[str]):
"""Process valuation command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="valuation",
description="""
Prints screener data of similar companies. [Source: Finviz]
""",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
if self.similar and len(self.similar) > 1:
finviz_compare_view.screener(
similar=self.similar,
data_type="valuation",
export=ns_parser.export,
)
else:
console.print(
"Please make sure there are more than 1 similar tickers selected. \n"
)
def call_financial(self, other_args: List[str]):
"""Process financial command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="financial",
description="""
Prints screener data of similar companies. [Source: Finviz]
""",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
if self.similar and len(self.similar) > 1:
finviz_compare_view.screener(
similar=self.similar,
data_type="financial",
export=ns_parser.export,
)
else:
console.print(
"Please make sure there are more than 1 similar tickers selected. \n"
)
def call_ownership(self, other_args: List[str]):
"""Process ownership command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="ownership",
description="""
Prints screener data of similar companies. [Source: Finviz]
""",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
if self.similar and len(self.similar) > 1:
finviz_compare_view.screener(
similar=self.similar,
data_type="ownership",
export=ns_parser.export,
)
else:
console.print(
"Please make sure there are more than 1 similar tickers selected. \n"
)
def call_performance(self, other_args: List[str]):
"""Process performance command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="performance",
description="""
Prints screener data of similar companies. [Source: Finviz]
""",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
if self.similar and len(self.similar) > 1:
finviz_compare_view.screener(
similar=self.similar,
data_type="performance",
export=ns_parser.export,
)
else:
console.print(
"Please make sure there are more than 1 similar tickers selected. \n"
)
def call_technical(self, other_args: List[str]):
"""Process technical command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="technical",
description="""
Prints screener data of similar companies. [Source: Finviz]
""",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
if self.similar and len(self.similar) > 1:
finviz_compare_view.screener(
similar=self.similar,
data_type="technical",
export=ns_parser.export,
)
else:
console.print(
"Please make sure there are more than 1 similar tickers selected. \n"
)
def call_po(self, _):
"""Call the portfolio optimization menu with selected tickers"""
if self.similar and len(self.similar) > 1:
self.queue = po_controller.PortfolioOptimization(
self.similar, self.queue
).menu(custom_path_menu_above="/portfolio/")
else:
console.print(
"Please make sure there are more than 1 similar tickers selected. \n"
)
| 36.080713
| 118
| 0.537056
|
4248b1a29d7c5bf8468a3c313bf32b6c81d8a454
| 1,518
|
py
|
Python
|
test/test_zhengshiban_dialog.py
|
tudoulei/zhengshiban
|
a6acd1b21605c25dc4f6d8a00b8297038d913393
|
[
"Unlicense"
] | null | null | null |
test/test_zhengshiban_dialog.py
|
tudoulei/zhengshiban
|
a6acd1b21605c25dc4f6d8a00b8297038d913393
|
[
"Unlicense"
] | null | null | null |
test/test_zhengshiban_dialog.py
|
tudoulei/zhengshiban
|
a6acd1b21605c25dc4f6d8a00b8297038d913393
|
[
"Unlicense"
] | null | null | null |
# coding=utf-8
"""Dialog test.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'zhengshiban'
__date__ = '2019-11-07'
__copyright__ = 'Copyright 2019, zhengshiban'
import unittest
from qgis.PyQt.QtGui import QDialogButtonBox, QDialog
from zhengshiban_dialog import zhengshibanDialog
from utilities import get_qgis_app
QGIS_APP = get_qgis_app()
class zhengshibanDialogTest(unittest.TestCase):
"""Test dialog works."""
def setUp(self):
"""Runs before each test."""
self.dialog = zhengshibanDialog(None)
def tearDown(self):
"""Runs after each test."""
self.dialog = None
def test_dialog_ok(self):
"""Test we can click OK."""
button = self.dialog.button_box.button(QDialogButtonBox.Ok)
button.click()
result = self.dialog.result()
self.assertEqual(result, QDialog.Accepted)
def test_dialog_cancel(self):
"""Test we can click cancel."""
button = self.dialog.button_box.button(QDialogButtonBox.Cancel)
button.click()
result = self.dialog.result()
self.assertEqual(result, QDialog.Rejected)
if __name__ == "__main__":
suite = unittest.makeSuite(zhengshibanDialogTest)
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite)
| 27.107143
| 78
| 0.689723
|
94358f49d733be6e7472ff4216fc5818c31924da
| 39,693
|
py
|
Python
|
vyper/parser/expr.py
|
MichielCuijpers/vyper
|
54b8dc06f258eb0e1815ea222eb5f875aae443f2
|
[
"MIT"
] | null | null | null |
vyper/parser/expr.py
|
MichielCuijpers/vyper
|
54b8dc06f258eb0e1815ea222eb5f875aae443f2
|
[
"MIT"
] | 1
|
2018-07-17T08:14:44.000Z
|
2018-07-17T08:14:44.000Z
|
vyper/parser/expr.py
|
MichielCuijpers/vyper
|
54b8dc06f258eb0e1815ea222eb5f875aae443f2
|
[
"MIT"
] | 1
|
2018-09-11T11:47:13.000Z
|
2018-09-11T11:47:13.000Z
|
import ast
from vyper.exceptions import (
ConstancyViolationException,
InvalidLiteralException,
NonPayableViolationException,
StructureException,
TypeMismatchException,
VariableDeclarationException,
FunctionDeclarationException,
ParserException
)
from .parser_utils import LLLnode
from .parser_utils import (
getpos,
unwrap_location,
get_original_if_0_prefixed,
get_number_as_fraction,
add_variable_offset,
)
from vyper.utils import (
MemoryPositions,
SizeLimits,
bytes_to_int,
string_to_bytes,
DECIMAL_DIVISOR,
checksum_encode,
is_varname_valid,
)
from vyper.types import (
BaseType,
ByteArrayType,
ContractType,
ListType,
MappingType,
NullType,
StructType,
TupleType,
)
from vyper.types import (
get_size_of_type,
is_base_type,
)
from vyper.types import (
are_units_compatible,
is_numeric_type,
combine_units
)
class Expr(object):
# TODO: Once other refactors are made reevaluate all inline imports
def __init__(self, expr, context):
self.expr = expr
self.context = context
self.expr_table = {
LLLnode: self.get_expr,
ast.Num: self.number,
ast.Str: self.string,
ast.NameConstant: self.constants,
ast.Name: self.variables,
ast.Attribute: self.attribute,
ast.Subscript: self.subscript,
ast.BinOp: self.arithmetic,
ast.Compare: self.compare,
ast.BoolOp: self.boolean_operations,
ast.UnaryOp: self.unary_operations,
ast.Call: self.call,
ast.List: self.list_literals,
ast.Dict: self.struct_literals,
ast.Tuple: self.tuple_literals,
}
expr_type = self.expr.__class__
if expr_type in self.expr_table:
self.lll_node = self.expr_table[expr_type]()
else:
raise Exception("Unsupported operator: %r" % ast.dump(self.expr))
def get_expr(self):
return self.expr
def number(self):
orignum = get_original_if_0_prefixed(self.expr, self.context)
if orignum is None and isinstance(self.expr.n, int):
# Literal becomes int128
if SizeLimits.in_bounds('int128', self.expr.n):
return LLLnode.from_list(self.expr.n, typ=BaseType('int128', None, is_literal=True), pos=getpos(self.expr))
# Literal is large enough, becomes uint256.
elif SizeLimits.in_bounds('uint256', self.expr.n):
return LLLnode.from_list(self.expr.n, typ=BaseType('uint256', None, is_literal=True), pos=getpos(self.expr))
else:
raise InvalidLiteralException("Number out of range: " + str(self.expr.n), self.expr)
elif isinstance(self.expr.n, float):
numstring, num, den = get_number_as_fraction(self.expr, self.context)
if not (SizeLimits.MINNUM * den < num < SizeLimits.MAXNUM * den):
raise InvalidLiteralException("Number out of range: " + numstring, self.expr)
if DECIMAL_DIVISOR % den:
raise InvalidLiteralException("Too many decimal places: " + numstring, self.expr)
return LLLnode.from_list(num * DECIMAL_DIVISOR // den, typ=BaseType('decimal', None), pos=getpos(self.expr))
# Binary literal.
elif orignum[:2] == '0b':
str_val = orignum[2:]
total_bits = len(orignum[2:])
total_bits = total_bits if total_bits % 8 == 0 else total_bits + 8 - (total_bits % 8) # ceil8 to get byte length.
if len(orignum[2:]) != total_bits: # Support only full formed bit definitions.
raise InvalidLiteralException("Bit notation requires a multiple of 8 bits / 1 byte. {} bit(s) are missing.".format(total_bits - len(orignum[2:])), self.expr)
byte_len = int(total_bits / 8)
placeholder = self.context.new_placeholder(ByteArrayType(byte_len))
seq = []
seq.append(['mstore', placeholder, byte_len])
for i in range(0, total_bits, 256):
section = str_val[i:i + 256]
int_val = int(section, 2) << (256 - len(section)) # bytes are right padded.
seq.append(
['mstore', ['add', placeholder, i + 32], int_val])
return LLLnode.from_list(['seq'] + seq + [placeholder],
typ=ByteArrayType(byte_len), location='memory', pos=getpos(self.expr), annotation='Create ByteArray (Binary literal): %s' % str_val)
elif len(orignum) == 42:
if checksum_encode(orignum) != orignum:
raise InvalidLiteralException("Address checksum mismatch. If you are sure this is the "
"right address, the correct checksummed form is: " +
checksum_encode(orignum), self.expr)
return LLLnode.from_list(self.expr.n, typ=BaseType('address', is_literal=True), pos=getpos(self.expr))
elif len(orignum) == 66:
return LLLnode.from_list(self.expr.n, typ=BaseType('bytes32', is_literal=True), pos=getpos(self.expr))
else:
raise InvalidLiteralException("Cannot read 0x value with length %d. Expecting 42 (address incl 0x) or 66 (bytes32 incl 0x)"
% len(orignum), self.expr)
# Byte array literals
def string(self):
bytez, bytez_length = string_to_bytes(self.expr.s)
placeholder = self.context.new_placeholder(ByteArrayType(bytez_length))
seq = []
seq.append(['mstore', placeholder, bytez_length])
for i in range(0, len(bytez), 32):
seq.append(['mstore', ['add', placeholder, i + 32], bytes_to_int((bytez + b'\x00' * 31)[i: i + 32])])
return LLLnode.from_list(['seq'] + seq + [placeholder],
typ=ByteArrayType(bytez_length), location='memory', pos=getpos(self.expr), annotation='Create ByteArray: %s' % bytez)
# True, False, None constants
def constants(self):
if self.expr.value is True:
return LLLnode.from_list(1, typ='bool', pos=getpos(self.expr))
elif self.expr.value is False:
return LLLnode.from_list(0, typ='bool', pos=getpos(self.expr))
elif self.expr.value is None:
return LLLnode.from_list(None, typ=NullType(), pos=getpos(self.expr))
else:
raise Exception("Unknown name constant: %r" % self.expr.value.value)
# Variable names
def variables(self):
constants = {
'ZERO_ADDRESS': LLLnode.from_list([0], typ=BaseType('address', None, is_literal=True), pos=getpos(self.expr)),
'MAX_INT128': LLLnode.from_list(['mload', MemoryPositions.MAXNUM], typ=BaseType('int128', None, is_literal=True), pos=getpos(self.expr)),
'MIN_INT128': LLLnode.from_list(['mload', MemoryPositions.MINNUM], typ=BaseType('int128', None, is_literal=True), pos=getpos(self.expr)),
'MAX_DECIMAL': LLLnode.from_list(['mload', MemoryPositions.MAXDECIMAL], typ=BaseType('decimal', None, is_literal=True), pos=getpos(self.expr)),
'MIN_DECIMAL': LLLnode.from_list(['mload', MemoryPositions.MINDECIMAL], typ=BaseType('decimal', None, is_literal=True), pos=getpos(self.expr)),
'MAX_UINT256': LLLnode.from_list([2**256 - 1], typ=BaseType('uint256', None, is_literal=True), pos=getpos(self.expr)),
}
if self.expr.id == 'self':
return LLLnode.from_list(['address'], typ='address', pos=getpos(self.expr))
elif self.expr.id in self.context.vars:
var = self.context.vars[self.expr.id]
return LLLnode.from_list(var.pos, typ=var.typ, location='memory', pos=getpos(self.expr), annotation=self.expr.id, mutable=var.mutable)
elif self.expr.id in constants:
return constants[self.expr.id]
else:
raise VariableDeclarationException("Undeclared variable: " + self.expr.id, self.expr)
# x.y or x[5]
def attribute(self):
# x.balance: balance of address x
if self.expr.attr == 'balance':
addr = Expr.parse_value_expr(self.expr.value, self.context)
if not is_base_type(addr.typ, 'address'):
raise TypeMismatchException("Type mismatch: balance keyword expects an address as input", self.expr)
return LLLnode.from_list(['balance', addr], typ=BaseType('uint256', {'wei': 1}), location=None, pos=getpos(self.expr))
# x.codesize: codesize of address x
elif self.expr.attr == 'codesize' or self.expr.attr == 'is_contract':
addr = Expr.parse_value_expr(self.expr.value, self.context)
if not is_base_type(addr.typ, 'address'):
raise TypeMismatchException("Type mismatch: codesize keyword expects an address as input", self.expr)
if self.expr.attr == 'codesize':
eval_code = ['extcodesize', addr]
output_type = 'int128'
else:
eval_code = ['gt', ['extcodesize', addr], 0]
output_type = 'bool'
return LLLnode.from_list(eval_code, typ=BaseType(output_type), location=None, pos=getpos(self.expr))
# self.x: global attribute
elif isinstance(self.expr.value, ast.Name) and self.expr.value.id == "self":
if self.expr.attr not in self.context.globals:
raise VariableDeclarationException("Persistent variable undeclared: " + self.expr.attr, self.expr)
var = self.context.globals[self.expr.attr]
return LLLnode.from_list(var.pos, typ=var.typ, location='storage', pos=getpos(self.expr), annotation='self.' + self.expr.attr)
# Reserved keywords
elif isinstance(self.expr.value, ast.Name) and self.expr.value.id in ("msg", "block", "tx"):
key = self.expr.value.id + "." + self.expr.attr
if key == "msg.sender":
return LLLnode.from_list(['caller'], typ='address', pos=getpos(self.expr))
elif key == "msg.value":
if not self.context.is_payable:
raise NonPayableViolationException("Cannot use msg.value in a non-payable function", self.expr)
return LLLnode.from_list(['callvalue'], typ=BaseType('uint256', {'wei': 1}), pos=getpos(self.expr))
elif key == "msg.gas":
return LLLnode.from_list(['gas'], typ='uint256', pos=getpos(self.expr))
elif key == "block.difficulty":
return LLLnode.from_list(['difficulty'], typ='uint256', pos=getpos(self.expr))
elif key == "block.timestamp":
return LLLnode.from_list(['timestamp'], typ=BaseType('uint256', {'sec': 1}, True), pos=getpos(self.expr))
elif key == "block.coinbase":
return LLLnode.from_list(['coinbase'], typ='address', pos=getpos(self.expr))
elif key == "block.number":
return LLLnode.from_list(['number'], typ='uint256', pos=getpos(self.expr))
elif key == "block.prevhash":
return LLLnode.from_list(['blockhash', ['sub', 'number', 1]], typ='bytes32', pos=getpos(self.expr))
elif key == "tx.origin":
return LLLnode.from_list(['origin'], typ='address', pos=getpos(self.expr))
else:
raise Exception("Unsupported keyword: " + key)
# Other variables
else:
sub = Expr.parse_variable_location(self.expr.value, self.context)
# contract type
if isinstance(sub.typ, ContractType):
return sub
if not isinstance(sub.typ, StructType):
raise TypeMismatchException("Type mismatch: member variable access not expected", self.expr.value)
attrs = sorted(sub.typ.members.keys())
if self.expr.attr not in attrs:
raise TypeMismatchException("Member %s not found. Only the following available: %s" % (self.expr.attr, " ".join(attrs)), self.expr)
return add_variable_offset(sub, self.expr.attr, pos=getpos(self.expr))
def subscript(self):
sub = Expr.parse_variable_location(self.expr.value, self.context)
if isinstance(sub.typ, (MappingType, ListType)):
if 'value' not in vars(self.expr.slice):
raise StructureException("Array access must access a single element, not a slice", self.expr)
index = Expr.parse_value_expr(self.expr.slice.value, self.context)
elif isinstance(sub.typ, TupleType):
if not isinstance(self.expr.slice.value, ast.Num) or self.expr.slice.value.n < 0 or self.expr.slice.value.n >= len(sub.typ.members):
raise TypeMismatchException("Tuple index invalid", self.expr.slice.value)
index = self.expr.slice.value.n
else:
raise TypeMismatchException("Bad subscript attempt", self.expr.value)
o = add_variable_offset(sub, index, pos=getpos(self.expr))
o.mutable = sub.mutable
return o
def arithmetic(self):
left = Expr.parse_value_expr(self.expr.left, self.context)
right = Expr.parse_value_expr(self.expr.right, self.context)
if not is_numeric_type(left.typ) or not is_numeric_type(right.typ):
raise TypeMismatchException("Unsupported types for arithmetic op: %r %r" % (left.typ, right.typ), self.expr)
arithmetic_pair = {left.typ.typ, right.typ.typ}
# Special Case: Simplify any literal to literal arithmetic at compile time.
if left.typ.is_literal and right.typ.is_literal and \
isinstance(right.value, int) and isinstance(left.value, int):
if isinstance(self.expr.op, ast.Add):
val = left.value + right.value
elif isinstance(self.expr.op, ast.Sub):
val = left.value - right.value
elif isinstance(self.expr.op, ast.Mult):
val = left.value * right.value
elif isinstance(self.expr.op, ast.Div):
val = left.value // right.value
elif isinstance(self.expr.op, ast.Mod):
val = left.value % right.value
elif isinstance(self.expr.op, ast.Pow):
val = left.value ** right.value
else:
raise ParserException('Unsupported literal operator: %s' % str(type(self.expr.op)), self.expr)
num = ast.Num(val)
num.source_code = self.expr.source_code
num.lineno = self.expr.lineno
num.col_offset = self.expr.col_offset
return Expr.parse_value_expr(num, self.context)
# Special case with uint256 were int literal may be casted.
if arithmetic_pair == {'uint256', 'int128'}:
# Check right side literal.
if right.typ.is_literal and SizeLimits.in_bounds('uint256', right.value):
right = LLLnode.from_list(right.value, typ=BaseType('uint256', None, is_literal=True), pos=getpos(self.expr))
arithmetic_pair = {left.typ.typ, right.typ.typ}
# Check left side literal.
elif left.typ.is_literal and SizeLimits.in_bounds('uint256', left.value):
left = LLLnode.from_list(left.value, typ=BaseType('uint256', None, is_literal=True), pos=getpos(self.expr))
arithmetic_pair = {left.typ.typ, right.typ.typ}
# Only allow explicit conversions to occur.
if left.typ.typ != right.typ.typ:
raise TypeMismatchException("Cannot implicitly convert {} to {}.".format(left.typ.typ, right.typ.typ), self.expr)
ltyp, rtyp = left.typ.typ, right.typ.typ
if isinstance(self.expr.op, (ast.Add, ast.Sub)):
if left.typ.unit != right.typ.unit and left.typ.unit is not None and right.typ.unit is not None:
raise TypeMismatchException("Unit mismatch: %r %r" % (left.typ.unit, right.typ.unit), self.expr)
if left.typ.positional and right.typ.positional and isinstance(self.expr.op, ast.Add):
raise TypeMismatchException("Cannot add two positional units!", self.expr)
new_unit = left.typ.unit or right.typ.unit
new_positional = left.typ.positional ^ right.typ.positional # xor, as subtracting two positionals gives a delta
op = 'add' if isinstance(self.expr.op, ast.Add) else 'sub'
if ltyp == 'uint256' and isinstance(self.expr.op, ast.Add):
o = LLLnode.from_list(['seq',
# Checks that: a + b >= a
['assert', ['ge', ['add', left, right], left]],
['add', left, right]], typ=BaseType('uint256', new_unit, new_positional), pos=getpos(self.expr))
elif ltyp == 'uint256' and isinstance(self.expr.op, ast.Sub):
o = LLLnode.from_list(['seq',
# Checks that: a >= b
['assert', ['ge', left, right]],
['sub', left, right]], typ=BaseType('uint256', new_unit, new_positional), pos=getpos(self.expr))
elif ltyp == rtyp:
o = LLLnode.from_list([op, left, right], typ=BaseType(ltyp, new_unit, new_positional), pos=getpos(self.expr))
else:
raise Exception("Unsupported Operation '%r(%r, %r)'" % (op, ltyp, rtyp))
elif isinstance(self.expr.op, ast.Mult):
if left.typ.positional or right.typ.positional:
raise TypeMismatchException("Cannot multiply positional values!", self.expr)
new_unit = combine_units(left.typ.unit, right.typ.unit)
if ltyp == rtyp == 'uint256':
o = LLLnode.from_list(['seq',
# Checks that: a == 0 || a / b == b
['assert', ['or', ['iszero', left],
['eq', ['div', ['mul', left, right], left], right]]],
['mul', left, right]], typ=BaseType('uint256', new_unit), pos=getpos(self.expr))
elif ltyp == rtyp == 'int128':
o = LLLnode.from_list(['mul', left, right], typ=BaseType('int128', new_unit), pos=getpos(self.expr))
elif ltyp == rtyp == 'decimal':
o = LLLnode.from_list(['with', 'r', right, ['with', 'l', left,
['with', 'ans', ['mul', 'l', 'r'],
['seq',
['assert', ['or', ['eq', ['sdiv', 'ans', 'l'], 'r'], ['iszero', 'l']]],
['sdiv', 'ans', DECIMAL_DIVISOR]]]]], typ=BaseType('decimal', new_unit), pos=getpos(self.expr))
else:
raise Exception("Unsupported Operation 'mul(%r, %r)'" % (ltyp, rtyp))
elif isinstance(self.expr.op, ast.Div):
if left.typ.positional or right.typ.positional:
raise TypeMismatchException("Cannot divide positional values!", self.expr)
new_unit = combine_units(left.typ.unit, right.typ.unit, div=True)
if ltyp == rtyp == 'uint256':
o = LLLnode.from_list(['seq',
# Checks that: b != 0
['assert', right],
['div', left, right]], typ=BaseType('uint256', new_unit), pos=getpos(self.expr))
elif ltyp == rtyp == 'int128':
o = LLLnode.from_list(['sdiv', left, ['clamp_nonzero', right]], typ=BaseType('int128', new_unit), pos=getpos(self.expr))
elif ltyp == rtyp == 'decimal':
o = LLLnode.from_list(['with', 'l', left, ['with', 'r', ['clamp_nonzero', right],
['sdiv', ['mul', 'l', DECIMAL_DIVISOR], 'r']]],
typ=BaseType('decimal', new_unit), pos=getpos(self.expr))
else:
raise Exception("Unsupported Operation 'div(%r, %r)'" % (ltyp, rtyp))
elif isinstance(self.expr.op, ast.Mod):
if left.typ.positional or right.typ.positional:
raise TypeMismatchException("Cannot use positional values as modulus arguments!", self.expr)
if left.typ.unit != right.typ.unit and left.typ.unit is not None and right.typ.unit is not None:
raise TypeMismatchException("Modulus arguments must have same unit", self.expr)
new_unit = left.typ.unit or right.typ.unit
if ltyp == rtyp == 'uint256':
o = LLLnode.from_list(['seq',
['assert', right],
['mod', left, right]], typ=BaseType('uint256', new_unit), pos=getpos(self.expr))
elif ltyp == rtyp:
o = LLLnode.from_list(['smod', left, ['clamp_nonzero', right]], typ=BaseType(ltyp, new_unit), pos=getpos(self.expr))
else:
raise Exception("Unsupported Operation 'mod(%r, %r)'" % (ltyp, rtyp))
elif isinstance(self.expr.op, ast.Pow):
if left.typ.positional or right.typ.positional:
raise TypeMismatchException("Cannot use positional values as exponential arguments!", self.expr)
if right.typ.unit:
raise TypeMismatchException("Cannot use unit values as exponents", self.expr)
if ltyp != 'int128' and ltyp != 'uint256' and isinstance(self.expr.right, ast.Name):
raise TypeMismatchException("Cannot use dynamic values as exponents, for unit base types", self.expr)
if ltyp == rtyp == 'uint256':
o = LLLnode.from_list(['seq',
['assert', ['or', ['or', ['eq', right, 1], ['iszero', right]],
['lt', left, ['exp', left, right]]]],
['exp', left, right]], typ=BaseType('uint256'), pos=getpos(self.expr))
elif ltyp == rtyp == 'int128':
new_unit = left.typ.unit
if left.typ.unit and not isinstance(self.expr.right, ast.Name):
new_unit = {left.typ.unit.copy().popitem()[0]: self.expr.right.n}
o = LLLnode.from_list(['exp', left, right], typ=BaseType('int128', new_unit), pos=getpos(self.expr))
else:
raise TypeMismatchException('Only whole number exponents are supported', self.expr)
else:
raise Exception("Unsupported binop: %r" % self.expr.op)
if o.typ.typ == 'int128':
return LLLnode.from_list(['clamp', ['mload', MemoryPositions.MINNUM], o, ['mload', MemoryPositions.MAXNUM]], typ=o.typ, pos=getpos(self.expr))
elif o.typ.typ == 'decimal':
return LLLnode.from_list(['clamp', ['mload', MemoryPositions.MINDECIMAL], o, ['mload', MemoryPositions.MAXDECIMAL]], typ=o.typ, pos=getpos(self.expr))
if o.typ.typ == 'uint256':
return o
else:
raise Exception("%r %r" % (o, o.typ))
def build_in_comparator(self):
from vyper.parser.parser import make_setter
left = Expr(self.expr.left, self.context).lll_node
right = Expr(self.expr.comparators[0], self.context).lll_node
if left.typ.typ != right.typ.subtype.typ:
raise TypeMismatchException("%s cannot be in a list of %s" % (left.typ.typ, right.typ.subtype.typ))
result_placeholder = self.context.new_placeholder(BaseType('bool'))
setter = []
# Load nth item from list in memory.
if right.value == 'multi':
# Copy literal to memory to be compared.
tmp_list = LLLnode.from_list(
obj=self.context.new_placeholder(ListType(right.typ.subtype, right.typ.count)),
typ=ListType(right.typ.subtype, right.typ.count),
location='memory'
)
setter = make_setter(tmp_list, right, 'memory', pos=getpos(self.expr))
load_i_from_list = ['mload', ['add', tmp_list, ['mul', 32, ['mload', MemoryPositions.FREE_LOOP_INDEX]]]]
elif right.location == "storage":
load_i_from_list = ['sload', ['add', ['sha3_32', right], ['mload', MemoryPositions.FREE_LOOP_INDEX]]]
else:
load_i_from_list = ['mload', ['add', right, ['mul', 32, ['mload', MemoryPositions.FREE_LOOP_INDEX]]]]
# Condition repeat loop has to break on.
break_loop_condition = [
'if',
['eq', unwrap_location(left), load_i_from_list],
['seq',
['mstore', '_result', 1], # store true.
'break']
]
# Repeat loop to loop-compare each item in the list.
for_loop_sequence = [
['mstore', result_placeholder, 0],
['with', '_result', result_placeholder,
['repeat', MemoryPositions.FREE_LOOP_INDEX, 0, right.typ.count, break_loop_condition]],
['mload', result_placeholder]
]
# Save list to memory, so one can iterate over it,
# used when literal was created with tmp_list.
if setter:
compare_sequence = ['seq', setter] + for_loop_sequence
else:
compare_sequence = ['seq'] + for_loop_sequence
# Compare the result of the repeat loop to 1, to know if a match was found.
o = LLLnode.from_list([
'eq', 1,
compare_sequence],
typ='bool',
annotation="in comporator"
)
return o
@staticmethod
def _signed_to_unsigned_comparision_op(op):
translation_map = {
'sgt': 'gt',
'sge': 'ge',
'sle': 'le',
'slt': 'lt',
}
if op in translation_map:
return translation_map[op]
else:
return op
def compare(self):
left = Expr.parse_value_expr(self.expr.left, self.context)
right = Expr.parse_value_expr(self.expr.comparators[0], self.context)
if isinstance(left.typ, ByteArrayType) and isinstance(right.typ, ByteArrayType):
if left.typ.maxlen != right.typ.maxlen:
raise TypeMismatchException('Can only compare bytes of the same length', self.expr)
if left.typ.maxlen > 32 or right.typ.maxlen > 32:
raise ParserException('Can only compare bytes of length shorter than 32 bytes', self.expr)
elif isinstance(self.expr.ops[0], ast.In) and \
isinstance(right.typ, ListType):
if not are_units_compatible(left.typ, right.typ.subtype) and not are_units_compatible(right.typ.subtype, left.typ):
raise TypeMismatchException("Can't use IN comparison with different types!", self.expr)
return self.build_in_comparator()
else:
if not are_units_compatible(left.typ, right.typ) and not are_units_compatible(right.typ, left.typ):
raise TypeMismatchException("Can't compare values with different units!", self.expr)
if len(self.expr.ops) != 1:
raise StructureException("Cannot have a comparison with more than two elements", self.expr)
if isinstance(self.expr.ops[0], ast.Gt):
op = 'sgt'
elif isinstance(self.expr.ops[0], ast.GtE):
op = 'sge'
elif isinstance(self.expr.ops[0], ast.LtE):
op = 'sle'
elif isinstance(self.expr.ops[0], ast.Lt):
op = 'slt'
elif isinstance(self.expr.ops[0], ast.Eq):
op = 'eq'
elif isinstance(self.expr.ops[0], ast.NotEq):
op = 'ne'
else:
raise Exception("Unsupported comparison operator")
# Compare (limited to 32) byte arrays.
if isinstance(left.typ, ByteArrayType) and isinstance(left.typ, ByteArrayType):
left = Expr(self.expr.left, self.context).lll_node
right = Expr(self.expr.comparators[0], self.context).lll_node
def load_bytearray(side):
if side.location == 'memory':
return ['mload', ['add', 32, side]]
elif side.location == 'storage':
return ['sload', ['add', 1, ['sha3_32', side]]]
return LLLnode.from_list(
[op, load_bytearray(left), load_bytearray(right)], typ='bool', pos=getpos(self.expr))
# Compare other types.
if not is_numeric_type(left.typ) or not is_numeric_type(right.typ):
if op not in ('eq', 'ne'):
raise TypeMismatchException("Invalid type for comparison op", self.expr)
left_type, right_type = left.typ.typ, right.typ.typ
# Special Case: comparison of a literal integer. If in valid range allow it to be compared.
if {left_type, right_type} == {'int128', 'uint256'} and {left.typ.is_literal, right.typ.is_literal} == {True, False}:
comparison_allowed = False
if left.typ.is_literal and SizeLimits.in_bounds(right_type, left.value):
comparison_allowed = True
elif right.typ.is_literal and SizeLimits.in_bounds(left_type, right.value):
comparison_allowed = True
op = self._signed_to_unsigned_comparision_op(op)
if comparison_allowed:
return LLLnode.from_list([op, left, right], typ='bool', pos=getpos(self.expr))
elif {left_type, right_type} == {'uint256', 'uint256'}:
op = self._signed_to_unsigned_comparision_op(op)
elif (left_type in ('decimal', 'int128') or right_type in ('decimal', 'int128')) and left_type != right_type:
raise TypeMismatchException(
'Implicit conversion from {} to {} disallowed, please convert.'.format(left_type, right_type),
self.expr
)
if left_type == right_type:
return LLLnode.from_list([op, left, right], typ='bool', pos=getpos(self.expr))
else:
raise TypeMismatchException("Unsupported types for comparison: %r %r" % (left_type, right_type), self.expr)
def boolean_operations(self):
if len(self.expr.values) != 2:
raise StructureException("Expected two arguments for a bool op", self.expr)
if self.context.in_assignment and (isinstance(self.expr.values[0], ast.Call) or isinstance(self.expr.values[1], ast.Call)):
raise StructureException("Boolean operations with calls may not be performed on assignment", self.expr)
left = Expr.parse_value_expr(self.expr.values[0], self.context)
right = Expr.parse_value_expr(self.expr.values[1], self.context)
if not is_base_type(left.typ, 'bool') or not is_base_type(right.typ, 'bool'):
raise TypeMismatchException("Boolean operations can only be between booleans!", self.expr)
if isinstance(self.expr.op, ast.And):
op = 'and'
elif isinstance(self.expr.op, ast.Or):
op = 'or'
else:
raise Exception("Unsupported bool op: " + self.expr.op)
return LLLnode.from_list([op, left, right], typ='bool', pos=getpos(self.expr))
# Unary operations (only "not" supported)
def unary_operations(self):
operand = Expr.parse_value_expr(self.expr.operand, self.context)
if isinstance(self.expr.op, ast.Not):
# Note that in the case of bool, num, address, decimal, uint256 AND bytes32,
# a zero entry represents false, all others represent true
return LLLnode.from_list(["iszero", operand], typ='bool', pos=getpos(self.expr))
elif isinstance(self.expr.op, ast.USub):
if not is_numeric_type(operand.typ):
raise TypeMismatchException("Unsupported type for negation: %r" % operand.typ, operand)
return LLLnode.from_list(["sub", 0, operand], typ=operand.typ, pos=getpos(self.expr))
else:
raise StructureException("Only the 'not' unary operator is supported")
def _get_external_contract_keywords(self):
value, gas = None, None
for kw in self.expr.keywords:
if kw.arg not in ('value', 'gas'):
raise TypeMismatchException('Invalid keyword argument, only "gas" and "value" supported.', self.expr)
elif kw.arg == 'gas':
gas = Expr.parse_value_expr(kw.value, self.context)
elif kw.arg == 'value':
value = Expr.parse_value_expr(kw.value, self.context)
return value, gas
# Function calls
def call(self):
from .parser import (
external_contract_call,
pack_arguments,
)
from vyper.functions import (
dispatch_table,
)
if isinstance(self.expr.func, ast.Name):
function_name = self.expr.func.id
if function_name in dispatch_table:
return dispatch_table[function_name](self.expr, self.context)
else:
err_msg = "Not a top-level function: {}".format(function_name)
if function_name in self.context.sigs['self']:
err_msg += ". Did you mean self.{}?".format(function_name)
raise StructureException(err_msg, self.expr)
elif isinstance(self.expr.func, ast.Attribute) and isinstance(self.expr.func.value, ast.Name) and self.expr.func.value.id == "self":
method_name = self.expr.func.attr
if method_name not in self.context.sigs['self']:
raise FunctionDeclarationException("Function not declared yet (reminder: functions cannot "
"call functions later in code than themselves): %s" % self.expr.func.attr)
sig = self.context.sigs['self'][method_name]
if self.context.is_constant and not sig.const:
raise ConstancyViolationException(
"May not call non-constant function '%s' within a constant function." % (method_name),
getpos(self.expr)
)
add_gas = self.context.sigs['self'][method_name].gas # gas of call
inargs, inargsize = pack_arguments(sig, [Expr(arg, self.context).lll_node for arg in self.expr.args], self.context, pos=getpos(self.expr))
output_placeholder = self.context.new_placeholder(typ=sig.output_type)
multi_arg = []
if isinstance(sig.output_type, BaseType):
returner = output_placeholder
elif isinstance(sig.output_type, ByteArrayType):
returner = output_placeholder + 32
elif self.context.in_assignment and isinstance(sig.output_type, TupleType):
returner = output_placeholder
else:
raise TypeMismatchException("Invalid output type: %r" % sig.output_type, self.expr)
o = LLLnode.from_list(multi_arg +
['seq',
['assert', ['call', ['gas'], ['address'], 0,
inargs, inargsize,
output_placeholder, get_size_of_type(sig.output_type) * 32]], returner],
typ=sig.output_type, location='memory',
pos=getpos(self.expr), add_gas_estimate=add_gas, annotation='Internal Call: %s' % method_name)
o.gas += sig.gas
return o
elif isinstance(self.expr.func, ast.Attribute) and isinstance(self.expr.func.value, ast.Call):
contract_name = self.expr.func.value.func.id
contract_address = Expr.parse_value_expr(self.expr.func.value.args[0], self.context)
value, gas = self._get_external_contract_keywords()
return external_contract_call(self.expr, self.context, contract_name, contract_address, pos=getpos(self.expr), value=value, gas=gas)
elif isinstance(self.expr.func.value, ast.Attribute) and self.expr.func.value.attr in self.context.sigs:
contract_name = self.expr.func.value.attr
var = self.context.globals[self.expr.func.value.attr]
contract_address = unwrap_location(LLLnode.from_list(var.pos, typ=var.typ, location='storage', pos=getpos(self.expr), annotation='self.' + self.expr.func.value.attr))
value, gas = self._get_external_contract_keywords()
return external_contract_call(self.expr, self.context, contract_name, contract_address, pos=getpos(self.expr), value=value, gas=gas)
elif isinstance(self.expr.func.value, ast.Attribute) and self.expr.func.value.attr in self.context.globals:
contract_name = self.context.globals[self.expr.func.value.attr].typ.unit
var = self.context.globals[self.expr.func.value.attr]
contract_address = unwrap_location(LLLnode.from_list(var.pos, typ=var.typ, location='storage', pos=getpos(self.expr), annotation='self.' + self.expr.func.value.attr))
value, gas = self._get_external_contract_keywords()
return external_contract_call(self.expr, self.context, contract_name, contract_address, pos=getpos(self.expr), value=value, gas=gas)
else:
raise StructureException("Unsupported operator: %r" % ast.dump(self.expr), self.expr)
def list_literals(self):
if not len(self.expr.elts):
raise StructureException("List must have elements", self.expr)
o = []
out_type = None
for elt in self.expr.elts:
o.append(Expr(elt, self.context).lll_node)
if not out_type:
out_type = o[-1].typ
previous_type = o[-1].typ.subtype.typ if hasattr(o[-1].typ, 'subtype') else o[-1].typ
current_type = out_type.subtype.typ if hasattr(out_type, 'subtype') else out_type
if len(o) > 1 and previous_type != current_type:
raise TypeMismatchException("Lists may only contain one type", self.expr)
return LLLnode.from_list(["multi"] + o, typ=ListType(out_type, len(o)), pos=getpos(self.expr))
def struct_literals(self):
o = {}
members = {}
for key, value in zip(self.expr.keys, self.expr.values):
if not isinstance(key, ast.Name) or not is_varname_valid(key.id, self.context.custom_units):
raise TypeMismatchException("Invalid member variable for struct: %r" % vars(key).get('id', key), key)
if key.id in o:
raise TypeMismatchException("Member variable duplicated: " + key.id, key)
o[key.id] = Expr(value, self.context).lll_node
members[key.id] = o[key.id].typ
return LLLnode.from_list(["multi"] + [o[key] for key in sorted(list(o.keys()))], typ=StructType(members), pos=getpos(self.expr))
def tuple_literals(self):
if not len(self.expr.elts):
raise StructureException("Tuple must have elements", self.expr)
o = []
for elt in self.expr.elts:
o.append(Expr(elt, self.context).lll_node)
return LLLnode.from_list(["multi"] + o, typ=TupleType(o), pos=getpos(self.expr))
# Parse an expression that results in a value
def parse_value_expr(expr, context):
return unwrap_location(Expr(expr, context).lll_node)
# Parse an expression that represents an address in memory or storage
def parse_variable_location(expr, context):
o = Expr(expr, context).lll_node
if not o.location:
raise Exception("Looking for a variable location, instead got a value")
return o
| 55.052705
| 178
| 0.596982
|
558017d9d00821f6535f0e658f66270cb992aa32
| 17,929
|
py
|
Python
|
tests/wallet/cc_wallet/test_trades.py
|
ChivesWorld/chives-blockchain
|
56734ef0719f7bf844213823bb95b0fcc642d222
|
[
"Apache-2.0"
] | 1
|
2021-08-01T17:14:54.000Z
|
2021-08-01T17:14:54.000Z
|
tests/wallet/cc_wallet/test_trades.py
|
ChivesWorld/chives-blockchain
|
56734ef0719f7bf844213823bb95b0fcc642d222
|
[
"Apache-2.0"
] | null | null | null |
tests/wallet/cc_wallet/test_trades.py
|
ChivesWorld/chives-blockchain
|
56734ef0719f7bf844213823bb95b0fcc642d222
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
from pathlib import Path
from secrets import token_bytes
import pytest
from chives.simulator.simulator_protocol import FarmNewBlockProtocol
from chives.types.peer_info import PeerInfo
from chives.util.ints import uint16, uint64
from chives.wallet.cc_wallet.cc_wallet import CCWallet
from chives.wallet.trade_manager import TradeManager
from chives.wallet.trading.trade_status import TradeStatus
from tests.setup_nodes import setup_simulators_and_wallets
from tests.time_out_assert import time_out_assert
from tests.wallet.sync.test_wallet_sync import wallet_height_at_least
@pytest.fixture(scope="module")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
@pytest.fixture(scope="module")
async def two_wallet_nodes():
async for _ in setup_simulators_and_wallets(1, 2, {}):
yield _
buffer_blocks = 4
@pytest.fixture(scope="module")
async def wallets_prefarm(two_wallet_nodes):
"""
Sets up the node with 10 blocks, and returns a payer and payee wallet.
"""
farm_blocks = 10
buffer = 4
full_nodes, wallets = two_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, wallet_server_0 = wallets[0]
wallet_node_1, wallet_server_1 = wallets[1]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
ph0 = await wallet_0.get_new_puzzlehash()
ph1 = await wallet_1.get_new_puzzlehash()
await wallet_server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await wallet_server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(0, farm_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph0))
for i in range(0, farm_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph1))
for i in range(0, buffer):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
return wallet_node_0, wallet_node_1, full_node_api
class TestCCTrades:
@pytest.mark.asyncio
async def test_cc_trade(self, wallets_prefarm):
wallet_node_0, wallet_node_1, full_node = wallets_prefarm
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
cc_wallet: CCWallet = await CCWallet.create_new_cc(wallet_node_0.wallet_state_manager, wallet_0, uint64(100))
for i in range(1, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
await time_out_assert(15, wallet_height_at_least, True, wallet_node_0, 27)
await time_out_assert(15, cc_wallet.get_confirmed_balance, 100)
await time_out_assert(15, cc_wallet.get_unconfirmed_balance, 100)
assert cc_wallet.cc_info.my_genesis_checker is not None
colour = cc_wallet.get_colour()
cc_wallet_2: CCWallet = await CCWallet.create_wallet_for_cc(
wallet_node_1.wallet_state_manager, wallet_1, colour
)
assert cc_wallet.cc_info.my_genesis_checker == cc_wallet_2.cc_info.my_genesis_checker
for i in range(0, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
await time_out_assert(15, wallet_height_at_least, True, wallet_node_0, 31)
# send cc_wallet 2 a coin
cc_hash = await cc_wallet_2.get_new_inner_hash()
tx_record = await cc_wallet.generate_signed_transaction([uint64(1)], [cc_hash])
await wallet_0.wallet_state_manager.add_pending_transaction(tx_record)
for i in range(0, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
await time_out_assert(15, wallet_height_at_least, True, wallet_node_0, 35)
trade_manager_0 = wallet_node_0.wallet_state_manager.trade_manager
trade_manager_1 = wallet_node_1.wallet_state_manager.trade_manager
file = "test_offer_file.offer"
file_path = Path(file)
if file_path.exists():
file_path.unlink()
offer_dict = {1: 10, 2: -30}
success, trade_offer, error = await trade_manager_0.create_offer_for_ids(offer_dict, file)
assert success is True
assert trade_offer is not None
success, offer, error = await trade_manager_1.get_discrepancies_for_offer(file_path)
assert error is None
assert success is True
assert offer is not None
assert offer["chives"] == -10
assert offer[colour] == 30
success, trade, reason = await trade_manager_1.respond_to_offer(file_path)
assert success is True
for i in range(0, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
await time_out_assert(15, wallet_height_at_least, True, wallet_node_0, 39)
await time_out_assert(15, cc_wallet_2.get_confirmed_balance, 31)
await time_out_assert(15, cc_wallet_2.get_unconfirmed_balance, 31)
trade_2 = await trade_manager_0.get_trade_by_id(trade_offer.trade_id)
assert TradeStatus(trade_2.status) is TradeStatus.CONFIRMED
@pytest.mark.asyncio
async def test_cc_trade_accept_with_zero(self, wallets_prefarm):
wallet_node_0, wallet_node_1, full_node = wallets_prefarm
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
cc_wallet: CCWallet = await CCWallet.create_new_cc(wallet_node_0.wallet_state_manager, wallet_0, uint64(100))
for i in range(1, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
await time_out_assert(15, cc_wallet.get_confirmed_balance, 100)
await time_out_assert(15, cc_wallet.get_unconfirmed_balance, 100)
assert cc_wallet.cc_info.my_genesis_checker is not None
colour = cc_wallet.get_colour()
cc_wallet_2: CCWallet = await CCWallet.create_wallet_for_cc(
wallet_node_1.wallet_state_manager, wallet_1, colour
)
assert cc_wallet.cc_info.my_genesis_checker == cc_wallet_2.cc_info.my_genesis_checker
ph = await wallet_1.get_new_puzzlehash()
for i in range(0, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(ph))
trade_manager_0 = wallet_node_0.wallet_state_manager.trade_manager
trade_manager_1 = wallet_node_1.wallet_state_manager.trade_manager
file = "test_offer_file.offer"
file_path = Path(file)
if file_path.exists():
file_path.unlink()
offer_dict = {1: 10, 3: -30}
success, trade_offer, error = await trade_manager_0.create_offer_for_ids(offer_dict, file)
assert success is True
assert trade_offer is not None
success, offer, error = await trade_manager_1.get_discrepancies_for_offer(file_path)
assert error is None
assert success is True
assert offer is not None
assert cc_wallet.get_colour() == cc_wallet_2.get_colour()
assert offer["chives"] == -10
assert offer[colour] == 30
success, trade, reason = await trade_manager_1.respond_to_offer(file_path)
assert success is True
for i in range(0, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
await time_out_assert(15, cc_wallet_2.get_confirmed_balance, 30)
await time_out_assert(15, cc_wallet_2.get_unconfirmed_balance, 30)
trade_2 = await trade_manager_0.get_trade_by_id(trade_offer.trade_id)
assert TradeStatus(trade_2.status) is TradeStatus.CONFIRMED
@pytest.mark.asyncio
async def test_cc_trade_with_multiple_colours(self, wallets_prefarm):
# This test start with CCWallet in both wallets. wall
# wallet1 {wallet_id: 2 = 70}
# wallet2 {wallet_id: 2 = 30}
wallet_node_a, wallet_node_b, full_node = wallets_prefarm
wallet_a = wallet_node_a.wallet_state_manager.main_wallet
wallet_b = wallet_node_b.wallet_state_manager.main_wallet
# cc_a_2 = coloured coin, Alice, wallet id = 2
cc_a_2 = wallet_node_a.wallet_state_manager.wallets[2]
cc_b_2 = wallet_node_b.wallet_state_manager.wallets[2]
cc_a_3: CCWallet = await CCWallet.create_new_cc(wallet_node_a.wallet_state_manager, wallet_a, uint64(100))
for i in range(0, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
await time_out_assert(15, cc_a_3.get_confirmed_balance, 100)
await time_out_assert(15, cc_a_3.get_unconfirmed_balance, 100)
# store these for asserting change later
cc_balance = await cc_a_2.get_unconfirmed_balance()
cc_balance_2 = await cc_b_2.get_unconfirmed_balance()
assert cc_a_3.cc_info.my_genesis_checker is not None
red = cc_a_3.get_colour()
for i in range(0, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
cc_b_3: CCWallet = await CCWallet.create_wallet_for_cc(wallet_node_b.wallet_state_manager, wallet_b, red)
assert cc_a_3.cc_info.my_genesis_checker == cc_b_3.cc_info.my_genesis_checker
for i in range(0, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
trade_manager_0 = wallet_node_a.wallet_state_manager.trade_manager
trade_manager_1 = wallet_node_b.wallet_state_manager.trade_manager
file = "test_offer_file.offer"
file_path = Path(file)
if file_path.exists():
file_path.unlink()
# Wallet
offer_dict = {1: 1000, 2: -20, 4: -50}
success, trade_offer, error = await trade_manager_0.create_offer_for_ids(offer_dict, file)
assert success is True
assert trade_offer is not None
success, offer, error = await trade_manager_1.get_discrepancies_for_offer(file_path)
assert error is None
assert success is True
assert offer is not None
assert offer["chives"] == -1000
colour_2 = cc_a_2.get_colour()
colour_3 = cc_a_3.get_colour()
assert offer[colour_2] == 20
assert offer[colour_3] == 50
success, trade, reason = await trade_manager_1.respond_to_offer(file_path)
assert success is True
for i in range(0, 10):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
await time_out_assert(15, cc_b_3.get_confirmed_balance, 50)
await time_out_assert(15, cc_b_3.get_unconfirmed_balance, 50)
await time_out_assert(15, cc_a_3.get_confirmed_balance, 50)
await time_out_assert(15, cc_a_3.get_unconfirmed_balance, 50)
await time_out_assert(15, cc_a_2.get_unconfirmed_balance, cc_balance - offer[colour_2])
await time_out_assert(15, cc_b_2.get_unconfirmed_balance, cc_balance_2 + offer[colour_2])
trade = await trade_manager_0.get_trade_by_id(trade_offer.trade_id)
status: TradeStatus = TradeStatus(trade.status)
assert status is TradeStatus.CONFIRMED
@pytest.mark.asyncio
async def test_create_offer_with_zero_val(self, wallets_prefarm):
# Wallet A Wallet B
# CCWallet id 2: 50 CCWallet id 2: 50
# CCWallet id 3: 50 CCWallet id 2: 50
# Wallet A will
# Wallet A will create a new CC and wallet B will create offer to buy that coin
wallet_node_a, wallet_node_b, full_node = wallets_prefarm
wallet_a = wallet_node_a.wallet_state_manager.main_wallet
wallet_b = wallet_node_b.wallet_state_manager.main_wallet
trade_manager_a: TradeManager = wallet_node_a.wallet_state_manager.trade_manager
trade_manager_b: TradeManager = wallet_node_b.wallet_state_manager.trade_manager
cc_a_4: CCWallet = await CCWallet.create_new_cc(wallet_node_a.wallet_state_manager, wallet_a, uint64(100))
for i in range(0, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
await time_out_assert(15, cc_a_4.get_confirmed_balance, 100)
colour = cc_a_4.get_colour()
cc_b_4: CCWallet = await CCWallet.create_wallet_for_cc(wallet_node_b.wallet_state_manager, wallet_b, colour)
cc_balance = await cc_a_4.get_confirmed_balance()
cc_balance_2 = await cc_b_4.get_confirmed_balance()
offer_dict = {1: -30, cc_a_4.id(): 50}
file = "test_offer_file.offer"
file_path = Path(file)
if file_path.exists():
file_path.unlink()
success, offer, error = await trade_manager_b.create_offer_for_ids(offer_dict, file)
success, trade_a, reason = await trade_manager_a.respond_to_offer(file_path)
for i in range(0, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
await time_out_assert(15, cc_a_4.get_confirmed_balance, cc_balance - 50)
await time_out_assert(15, cc_b_4.get_confirmed_balance, cc_balance_2 + 50)
async def assert_func():
assert trade_a is not None
trade = await trade_manager_a.get_trade_by_id(trade_a.trade_id)
assert trade is not None
return trade.status
async def assert_func_b():
assert offer is not None
trade = await trade_manager_b.get_trade_by_id(offer.trade_id)
assert trade is not None
return trade.status
await time_out_assert(15, assert_func, TradeStatus.CONFIRMED.value)
await time_out_assert(15, assert_func_b, TradeStatus.CONFIRMED.value)
@pytest.mark.asyncio
async def test_cc_trade_cancel_insecure(self, wallets_prefarm):
# Wallet A Wallet B
# CCWallet id 2: 50 CCWallet id 2: 50
# CCWallet id 3: 50 CCWallet id 3: 50
# CCWallet id 4: 40 CCWallet id 4: 60
# Wallet A will create offer, cancel it by deleting from db only
wallet_node_a, wallet_node_b, full_node = wallets_prefarm
wallet_a = wallet_node_a.wallet_state_manager.main_wallet
trade_manager_a: TradeManager = wallet_node_a.wallet_state_manager.trade_manager
file = "test_offer_file.offer"
file_path = Path(file)
if file_path.exists():
file_path.unlink()
spendable_chives = await wallet_a.get_spendable_balance()
offer_dict = {1: 10, 2: -30, 3: 30}
success, trade_offer, error = await trade_manager_a.create_offer_for_ids(offer_dict, file)
spendable_chives_after = await wallet_a.get_spendable_balance()
locked_coin = await trade_manager_a.get_locked_coins(wallet_a.id())
locked_sum = 0
for name, record in locked_coin.items():
locked_sum += record.coin.amount
assert spendable_chives == spendable_chives_after + locked_sum
assert success is True
assert trade_offer is not None
# Cancel offer 1 by just deleting from db
await trade_manager_a.cancel_pending_offer(trade_offer.trade_id)
spendable_after_cancel_1 = await wallet_a.get_spendable_balance()
# Spendable should be the same as it was before making offer 1
assert spendable_chives == spendable_after_cancel_1
trade_a = await trade_manager_a.get_trade_by_id(trade_offer.trade_id)
assert trade_a is not None
assert trade_a.status == TradeStatus.CANCELED.value
@pytest.mark.asyncio
async def test_cc_trade_cancel_secure(self, wallets_prefarm):
# Wallet A Wallet B
# CCWallet id 2: 50 CCWallet id 2: 50
# CCWallet id 3: 50 CCWallet id 3: 50
# CCWallet id 4: 40 CCWallet id 4: 60
# Wallet A will create offer, cancel it by spending coins back to self
wallet_node_a, wallet_node_b, full_node = wallets_prefarm
wallet_a = wallet_node_a.wallet_state_manager.main_wallet
trade_manager_a: TradeManager = wallet_node_a.wallet_state_manager.trade_manager
file = "test_offer_file.offer"
file_path = Path(file)
if file_path.exists():
file_path.unlink()
spendable_chives = await wallet_a.get_spendable_balance()
offer_dict = {1: 10, 2: -30, 3: 30}
success, trade_offer, error = await trade_manager_a.create_offer_for_ids(offer_dict, file)
spendable_chives_after = await wallet_a.get_spendable_balance()
locked_coin = await trade_manager_a.get_locked_coins(wallet_a.id())
locked_sum = 0
for name, record in locked_coin.items():
locked_sum += record.coin.amount
assert spendable_chives == spendable_chives_after + locked_sum
assert success is True
assert trade_offer is not None
# Cancel offer 1 by spending coins that were offered
await trade_manager_a.cancel_pending_offer_safely(trade_offer.trade_id)
for i in range(0, buffer_blocks):
await full_node.farm_new_transaction_block(FarmNewBlockProtocol(token_bytes()))
await time_out_assert(15, wallet_a.get_spendable_balance, spendable_chives)
# Spendable should be the same as it was before making offer 1
async def get_status():
assert trade_offer is not None
trade_a = await trade_manager_a.get_trade_by_id(trade_offer.trade_id)
assert trade_a is not None
return trade_a.status
await time_out_assert(15, get_status, TradeStatus.CANCELED.value)
| 40.020089
| 117
| 0.713258
|
6700d0a71356f861641e3d6af88928869bdf61b4
| 10,649
|
pyw
|
Python
|
source/nvda.pyw
|
SWEN-712/screen-reader-brandonp728
|
e30c25ad2d10ce632fac0548696a61a872328f59
|
[
"bzip2-1.0.6"
] | null | null | null |
source/nvda.pyw
|
SWEN-712/screen-reader-brandonp728
|
e30c25ad2d10ce632fac0548696a61a872328f59
|
[
"bzip2-1.0.6"
] | null | null | null |
source/nvda.pyw
|
SWEN-712/screen-reader-brandonp728
|
e30c25ad2d10ce632fac0548696a61a872328f59
|
[
"bzip2-1.0.6"
] | null | null | null |
# -*- coding: UTF-8 -*-
#nvda.pyw
#A part of NonVisual Desktop Access (NVDA)
#Copyright (C) 2006-2019 NV Access Limited, Aleksey Sadovoy, Babbage B.V., Joseph Lee, Łukasz Golonka
#This file is covered by the GNU General Public License.
#See the file COPYING for more details.
"""The NVDA launcher. It can handle some command-line arguments (including help). It sets up logging, and then starts the core."""
import sys
import os
if getattr(sys, "frozen", None):
# We are running as an executable.
# Append the path of the executable to sys so we can import modules from the dist dir.
sys.path.append(sys.prefix)
os.chdir(sys.prefix)
else:
import sourceEnv
#We should always change directory to the location of this module (nvda.pyw), don't rely on sys.path[0]
os.chdir(os.path.normpath(os.path.dirname(__file__)))
import ctypes
import locale
import gettext
try:
gettext.translation('nvda',localedir='locale',languages=[locale.getdefaultlocale()[0]]).install(True)
except:
gettext.install('nvda')
import time
import argparse
import globalVars
import config
import logHandler
from logHandler import log
import winUser
import winKernel
# Find out if NVDA is running as a Windows Store application
bufLen=ctypes.c_int()
try:
GetCurrentPackageFullName=ctypes.windll.kernel32.GetCurrentPackageFullName
except AttributeError:
config.isAppX=False
else:
bufLen=ctypes.c_int()
# Use GetCurrentPackageFullName to detect if we are running as a store app.
# #8362: error 15700 (not a package) error is returned if this is not a Windows Store package.
config.isAppX=(GetCurrentPackageFullName(ctypes.byref(bufLen),None)!=15700)
class NoConsoleOptionParser(argparse.ArgumentParser):
"""A commandline option parser that shows its messages using dialogs, as this pyw file has no dos console window associated with it"""
def print_help(self, file=None):
"""Shows help in a standard Windows message dialog"""
winUser.MessageBox(0, self.format_help(), u"Help", 0)
def error(self, message):
"""Shows an error in a standard Windows message dialog, and then exits NVDA"""
out = ""
out = self.format_usage()
out += "\nerror: %s" % message
winUser.MessageBox(0, out, u"Error", 0)
sys.exit(2)
globalVars.startTime=time.time()
# Check OS version requirements
import winVersion
if not winVersion.isSupportedOS():
winUser.MessageBox(0, ctypes.FormatError(winUser.ERROR_OLD_WIN_VERSION), None, winUser.MB_ICONERROR)
sys.exit(1)
def stringToBool(string):
"""Wrapper for configobj.validate.is_boolean to raise the proper exception for wrong values."""
from configobj.validate import is_boolean, ValidateError
try:
return is_boolean(string)
except ValidateError as e:
raise argparse.ArgumentTypeError(e.message)
#Process option arguments
parser=NoConsoleOptionParser()
quitGroup = parser.add_mutually_exclusive_group()
quitGroup.add_argument('-q','--quit',action="store_true",dest='quit',default=False,help="Quit already running copy of NVDA")
parser.add_argument('-k','--check-running',action="store_true",dest='check_running',default=False,help="Report whether NVDA is running via the exit code; 0 if running, 1 if not running")
parser.add_argument('-f','--log-file',dest='logFileName',type=str,help="The file where log messages should be written to")
parser.add_argument('-l','--log-level',dest='logLevel',type=int,default=0,choices=[10, 12, 15, 20, 30, 40, 50, 100],help="The lowest level of message logged (debug 10, input/output 12, debugwarning 15, info 20, warning 30, error 40, critical 50, off 100), default is info")
parser.add_argument('-c','--config-path',dest='configPath',default=None,type=str,help="The path where all settings for NVDA are stored")
parser.add_argument('-m','--minimal',action="store_true",dest='minimal',default=False,help="No sounds, no interface, no start message etc")
parser.add_argument('-s','--secure',action="store_true",dest='secure',default=False,help="Secure mode (disable Python console)")
parser.add_argument('--disable-addons',action="store_true",dest='disableAddons',default=False,help="Disable all add-ons")
parser.add_argument('--debug-logging',action="store_true",dest='debugLogging',default=False,help="Enable debug level logging just for this run. This setting will override any other log level (--loglevel, -l) argument given, as well as no logging option.")
parser.add_argument('--no-logging',action="store_true",dest='noLogging',default=False,help="Disable logging completely for this run. This setting can be overwritten with other log level (--loglevel, -l) switch or if debug logging is specified.")
parser.add_argument('--no-sr-flag',action="store_false",dest='changeScreenReaderFlag',default=True,help="Don't change the global system screen reader flag")
installGroup = parser.add_mutually_exclusive_group()
installGroup.add_argument('--install',action="store_true",dest='install',default=False,help="Installs NVDA (starting the new copy after installation)")
installGroup.add_argument('--install-silent',action="store_true",dest='installSilent',default=False,help="Installs NVDA silently (does not start the new copy after installation).")
installGroup.add_argument('--create-portable',action="store_true",dest='createPortable',default=False,help="Creates a portable copy of NVDA (starting the new copy after installation)")
installGroup.add_argument('--create-portable-silent',action="store_true",dest='createPortableSilent',default=False,help="Creates a portable copy of NVDA silently (does not start the new copy after installation).")
parser.add_argument('--portable-path',dest='portablePath',default=None,type=str,help="The path where a portable copy will be created")
parser.add_argument('--launcher',action="store_true",dest='launcher',default=False,help="Started from the launcher")
parser.add_argument('--enable-start-on-logon',metavar="True|False",type=stringToBool,dest='enableStartOnLogon',default=None,
help="When installing, enable NVDA's start on the logon screen")
# This option is passed by Ease of Access so that if someone downgrades without uninstalling
# (despite our discouragement), the downgraded copy won't be started in non-secure mode on secure desktops.
# (Older versions always required the --secure option to start in secure mode.)
# If this occurs, the user will see an obscure error,
# but that's far better than a major security hazzard.
# If this option is provided, NVDA will not replace an already running instance (#10179)
parser.add_argument('--ease-of-access',action="store_true",dest='easeOfAccess',default=False,help="Started by Windows Ease of Access")
(globalVars.appArgs,globalVars.appArgsExtra)=parser.parse_known_args()
def terminateRunningNVDA(window):
processID,threadID=winUser.getWindowThreadProcessID(window)
winUser.PostMessage(window,winUser.WM_QUIT,0,0)
h=winKernel.openProcess(winKernel.SYNCHRONIZE,False,processID)
if not h:
# The process is already dead.
return
try:
res=winKernel.waitForSingleObject(h,4000)
if res==0:
# The process terminated within the timeout period.
return
finally:
winKernel.closeHandle(h)
# The process is refusing to exit gracefully, so kill it forcefully.
h = winKernel.openProcess(winKernel.PROCESS_TERMINATE | winKernel.SYNCHRONIZE, False, processID)
if not h:
raise OSError("Could not open process for termination")
try:
winKernel.TerminateProcess(h, 1)
winKernel.waitForSingleObject(h, 2000)
finally:
winKernel.closeHandle(h)
#Handle running multiple instances of NVDA
try:
oldAppWindowHandle=winUser.FindWindow(u'wxWindowClassNR',u'NVDA')
except:
oldAppWindowHandle=0
if not winUser.isWindow(oldAppWindowHandle):
oldAppWindowHandle=0
if oldAppWindowHandle and not globalVars.appArgs.easeOfAccess:
if globalVars.appArgs.check_running:
# NVDA is running.
sys.exit(0)
try:
terminateRunningNVDA(oldAppWindowHandle)
except:
sys.exit(1)
if globalVars.appArgs.quit or (oldAppWindowHandle and globalVars.appArgs.easeOfAccess):
sys.exit(0)
elif globalVars.appArgs.check_running:
# NVDA is not running.
sys.exit(1)
UOI_NAME = 2
def getDesktopName():
desktop = ctypes.windll.user32.GetThreadDesktop(ctypes.windll.kernel32.GetCurrentThreadId())
name = ctypes.create_unicode_buffer(256)
ctypes.windll.user32.GetUserObjectInformationW(desktop, UOI_NAME, ctypes.byref(name), ctypes.sizeof(name), None)
return name.value
#Ensure multiple instances are not fully started by using a mutex
ERROR_ALREADY_EXISTS=0XB7
desktopName=getDesktopName()
mutex=ctypes.windll.kernel32.CreateMutexW(None,True,u"Local\\NVDA_%s"%desktopName)
if not mutex or ctypes.windll.kernel32.GetLastError()==ERROR_ALREADY_EXISTS:
if mutex: ctypes.windll.kernel32.CloseHandle(mutex)
sys.exit(1)
isSecureDesktop = desktopName == "Winlogon"
if isSecureDesktop:
import winreg
try:
k = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r"SOFTWARE\NVDA")
if not winreg.QueryValueEx(k, u"serviceDebug")[0]:
globalVars.appArgs.secure = True
except WindowsError:
globalVars.appArgs.secure = True
globalVars.appArgs.changeScreenReaderFlag = False
globalVars.appArgs.minimal = True
globalVars.appArgs.configPath = os.path.join(sys.prefix, "systemConfig")
#os.environ['PYCHECKER']="--limit 10000 -q --changetypes"
#import pychecker.checker
#Initial logging and logging code
# #8516: because config manager isn't ready yet, we must let start and exit messages be logged unless disabled via --no-logging switch.
# However, do log things if debug logging or log level other than 0 (not set) is requested from command line switches.
logHandler.initialize()
if logHandler.log.getEffectiveLevel() is log.DEBUG:
log.debug("Provided arguments: {}".format(sys.argv[1:]))
import buildVersion
log.info("Starting NVDA version %s" % buildVersion.version)
log.debug("Debug level logging enabled")
if globalVars.appArgs.changeScreenReaderFlag:
winUser.setSystemScreenReaderFlag(True)
#Accept wm_quit from other processes, even if running with higher privilages
if not ctypes.windll.user32.ChangeWindowMessageFilter(winUser.WM_QUIT,1):
raise WinError()
# Make this the last application to be shut down and don't display a retry dialog box.
winKernel.SetProcessShutdownParameters(0x100, winKernel.SHUTDOWN_NORETRY)
if not isSecureDesktop and not config.isAppX:
import easeOfAccess
easeOfAccess.notify(3)
try:
import core
core.main()
except:
log.critical("core failure",exc_info=True)
sys.exit(1)
finally:
if not isSecureDesktop and not config.isAppX:
easeOfAccess.notify(2)
if globalVars.appArgs.changeScreenReaderFlag:
winUser.setSystemScreenReaderFlag(False)
ctypes.windll.kernel32.CloseHandle(mutex)
log.info("NVDA exit")
sys.exit(globalVars.exitCode)
| 46.70614
| 273
| 0.78289
|
6d934ad2474618683f2401c3b9d4b6c24abda169
| 324
|
py
|
Python
|
daily_coding_problem/931.py
|
mhetrerajat/ds-challenge
|
3208df5c29612b0dfe60c1c082da1f31ad220b49
|
[
"MIT"
] | null | null | null |
daily_coding_problem/931.py
|
mhetrerajat/ds-challenge
|
3208df5c29612b0dfe60c1c082da1f31ad220b49
|
[
"MIT"
] | 1
|
2021-05-18T07:30:16.000Z
|
2021-05-18T07:30:16.000Z
|
daily_coding_problem/931.py
|
mhetrerajat/ds-challenge
|
3208df5c29612b0dfe60c1c082da1f31ad220b49
|
[
"MIT"
] | null | null | null |
from typing import List
def get_gcd(arr: List[int]) -> int:
def gcd(a, b):
while b:
a, b = b, a % b
return a
a = arr[0]
for b in arr[1:]:
a = gcd(a, b)
return a
if __name__ == "__main__":
assert get_gcd([42, 56, 14]) == 14
assert get_gcd([2, 4, 6, 8]) == 2
| 16.2
| 38
| 0.475309
|
ca509d87fe9940938fcc93383f847d5a401b56bc
| 6,289
|
py
|
Python
|
py3status/modules/diskdata.py
|
laxd/py3status
|
749e31419e65b5133b9c6b4a4ef4a4904d5cfe91
|
[
"BSD-3-Clause"
] | null | null | null |
py3status/modules/diskdata.py
|
laxd/py3status
|
749e31419e65b5133b9c6b4a4ef4a4904d5cfe91
|
[
"BSD-3-Clause"
] | null | null | null |
py3status/modules/diskdata.py
|
laxd/py3status
|
749e31419e65b5133b9c6b4a4ef4a4904d5cfe91
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Display advanced disk usage information
Configuration parameters:
cache_timeout: how often we refresh this module in seconds.
(default 10)
disk: disk or partition whose stat to check. Set to None to get global stats.
(default None)
format: format of the output.
(default "{disk}: {used_percent}% ({total})")
format_rate: format for the rates value
(default "[\?min_length=11 {value:.1f} {unit}]")
format_space: format for the disk space values
(default "[\?min_length=5 {value:.1f}]")
sector_size: size of the disk's sectors.
(default 512)
si_units: use SI units
(default False)
thresholds: thresholds to use for color changes
*(default {'free': [(0, 'bad'), (10, 'degraded'), (100, 'good')],
'total': [(0, "good"), (1024, 'degraded'), (1024 * 1024, 'bad')]})*
unit: unit to use. If the unit contains a multiplier prefix, only this
exact unit will ever be used
(default "B/s")
Format placeholders:
{disk} the selected disk
{free} free space on disk in GB
{used} used space on disk in GB
{used_percent} used space on disk in %
{read} reading rate
{total} total IO rate
{write} writing rate
format_rate placeholders:
{unit} name of the unit
{value} numeric value of the rate
format_space placeholders:
{value} numeric value of the free/used space on the device
Color thresholds:
{free} Change color based on the value of free
{used} Change color based on the value of used_percent
{read} Change color based on the value of read
{total} Change color based on the value of total
{write} Change color based on the value of write
@author guiniol
@license BSD
"""
from __future__ import division # python2 compatibility
from time import time
import subprocess
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 10
disk = None
format = "{disk}: {used_percent}% ({total})"
format_rate = "[\?min_length=11 {value:.1f} {unit}]"
format_space = "[\?min_length=5 {value:.1f}]"
sector_size = 512
si_units = False
thresholds = {
'free': [(0, "bad"), (10, "degraded"), (100, "good")],
'total': [(0, "good"), (1024, "degraded"), (1024 * 1024, "bad")]
}
unit = "B/s"
def __init__(self, *args, **kwargs):
"""
Format of total, up and down placeholders under self.format.
As default, substitutes self.left_align and self.precision as %s and %s
Placeholders:
value - value (float)
unit - unit (string)
"""
self.last_interface = None
self.last_stat = self._get_io_stats(self.disk)
self.last_time = time()
def space_and_io(self):
self.values = {'disk': self.disk if self.disk else 'all'}
if '{read}' in self.format or '{write}' in self.format or '{total}' in self.format:
# time from previous check
ios = self._get_io_stats(self.disk)
timedelta = time() - self.last_time
read = ios[0] - self.last_stat[0]
write = ios[1] - self.last_stat[1]
# update last_ info
self.last_stat = self._get_io_stats(self.disk)
self.last_time = time()
read /= timedelta
write /= timedelta
total = read + write
self.values['read'] = self._format_rate(read)
self.values['total'] = self._format_rate(total)
self.values['write'] = self._format_rate(write)
self.py3.threshold_get_color(read, 'read')
self.py3.threshold_get_color(total, 'total')
self.py3.threshold_get_color(write, 'write')
if '{free}' in self.format or '{used' in self.format:
free, used, used_percent = self._get_free_space(self.disk)
self.values['free'] = self.py3.safe_format(self.format_space, {'value': free})
self.values['used'] = self.py3.safe_format(self.format_space, {'value': used})
self.values['used_percent'] = self.py3.safe_format(self.format_space,
{'value': used_percent})
self.py3.threshold_get_color(free, 'free')
self.py3.threshold_get_color(used, 'used')
return {'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': self.py3.safe_format(self.format, self.values)}
def _get_free_space(self, disk):
if disk and not disk.startswith('/dev/'):
disk = '/dev/' + disk
total = 0
used = 0
free = 0
df = subprocess.check_output(['df']).decode('utf-8')
for line in df.splitlines():
if (disk and line.startswith(disk)) or (disk is None and line.startswith('/dev/')):
data = line.split()
total += int(data[1]) / 1024 / 1024
used += int(data[2]) / 1024 / 1024
free += int(data[3]) / 1024 / 1024
if total == 0:
return free, used, 'err'
return free, used, 100 * used / total
def _get_io_stats(self, disk):
if disk and disk.startswith('/dev/'):
disk = disk[5:]
read = 0
write = 0
with open('/proc/diskstats', 'r') as fd:
for line in fd:
if disk and disk in line:
data = line.split()
read += int(data[5]) * self.sector_size
write += int(data[9]) * self.sector_size
else:
data = line.split()
if data[1] == '0':
read += int(data[5]) * self.sector_size
write += int(data[9]) * self.sector_size
return read, write
def _format_rate(self, value):
"""
Return formatted string
"""
value, unit = self.py3.format_units(value, unit=self.unit, si=self.si_units)
return self.py3.safe_format(self.format_rate, {'value': value, 'unit': unit})
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| 34.745856
| 95
| 0.57529
|
a2748af3cd8fc7e0a27b104009a95a46335189ba
| 28,004
|
py
|
Python
|
rich/pretty.py
|
msaroufim/rich
|
51165f4b21e2b3af896eca750ae6a080fc4d7cf5
|
[
"MIT"
] | 7
|
2021-09-29T09:46:36.000Z
|
2022-03-24T08:30:41.000Z
|
rich/pretty.py
|
msaroufim/rich
|
51165f4b21e2b3af896eca750ae6a080fc4d7cf5
|
[
"MIT"
] | 23
|
2021-07-05T01:25:33.000Z
|
2022-03-28T01:30:36.000Z
|
rich/pretty.py
|
ewels/rich
|
03b2c71193bc62a6e25bfff48fe7ea218c2cb57c
|
[
"MIT"
] | 1
|
2021-09-29T22:34:05.000Z
|
2021-09-29T22:34:05.000Z
|
import builtins
import os
import sys
from array import array
from collections import Counter, defaultdict, deque, UserDict, UserList
from dataclasses import dataclass, fields, is_dataclass
from inspect import isclass
from itertools import islice
from typing import (
DefaultDict,
TYPE_CHECKING,
Any,
Callable,
Dict,
Iterable,
List,
Optional,
Set,
Union,
Tuple,
)
from types import MappingProxyType
try:
import attr as _attr_module
except ImportError: # pragma: no cover
_attr_module = None # type: ignore
def _is_attr_object(obj: Any) -> bool:
"""Check if an object was created with attrs module."""
return _attr_module is not None and _attr_module.has(type(obj))
def _get_attr_fields(obj: Any) -> Iterable["_attr_module.Attribute[Any]"]:
"""Get fields for an attrs object."""
return _attr_module.fields(type(obj)) if _attr_module is not None else []
from .highlighter import ReprHighlighter
from . import get_console
from ._loop import loop_last
from ._pick import pick_bool
from .abc import RichRenderable
from .cells import cell_len
from .highlighter import ReprHighlighter
from .jupyter import JupyterMixin, JupyterRenderable
from .measure import Measurement
from .text import Text
if TYPE_CHECKING:
from .console import (
Console,
ConsoleOptions,
HighlighterType,
JustifyMethod,
OverflowMethod,
RenderResult,
)
def install(
console: Optional["Console"] = None,
overflow: "OverflowMethod" = "ignore",
crop: bool = False,
indent_guides: bool = False,
max_length: Optional[int] = None,
max_string: Optional[int] = None,
expand_all: bool = False,
) -> None:
"""Install automatic pretty printing in the Python REPL.
Args:
console (Console, optional): Console instance or ``None`` to use global console. Defaults to None.
overflow (Optional[OverflowMethod], optional): Overflow method. Defaults to "ignore".
crop (Optional[bool], optional): Enable cropping of long lines. Defaults to False.
indent_guides (bool, optional): Enable indentation guides. Defaults to False.
max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
Defaults to None.
max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to None.
expand_all (bool, optional): Expand all containers. Defaults to False
"""
from rich import get_console
from .console import ConsoleRenderable # needed here to prevent circular import
console = console or get_console()
assert console is not None
def display_hook(value: Any) -> None:
"""Replacement sys.displayhook which prettifies objects with Rich."""
if value is not None:
assert console is not None
builtins._ = None # type: ignore
console.print(
value
if isinstance(value, RichRenderable)
else Pretty(
value,
overflow=overflow,
indent_guides=indent_guides,
max_length=max_length,
max_string=max_string,
expand_all=expand_all,
),
crop=crop,
)
builtins._ = value # type: ignore
def ipy_display_hook(value: Any) -> None: # pragma: no cover
assert console is not None
# always skip rich generated jupyter renderables or None values
if isinstance(value, JupyterRenderable) or value is None:
return
# on jupyter rich display, if using one of the special representations dont use rich
if console.is_jupyter and any(attr.startswith("_repr_") for attr in dir(value)):
return
if hasattr(value, "_repr_mimebundle_"):
return
# certain renderables should start on a new line
if isinstance(value, ConsoleRenderable):
console.line()
console.print(
value
if isinstance(value, RichRenderable)
else Pretty(
value,
overflow=overflow,
indent_guides=indent_guides,
max_length=max_length,
max_string=max_string,
expand_all=expand_all,
margin=12,
),
crop=crop,
)
try: # pragma: no cover
ip = get_ipython() # type: ignore
from IPython.core.formatters import BaseFormatter
# replace plain text formatter with rich formatter
rich_formatter = BaseFormatter()
rich_formatter.for_type(object, func=ipy_display_hook)
ip.display_formatter.formatters["text/plain"] = rich_formatter
except Exception:
sys.displayhook = display_hook
class Pretty(JupyterMixin):
"""A rich renderable that pretty prints an object.
Args:
_object (Any): An object to pretty print.
highlighter (HighlighterType, optional): Highlighter object to apply to result, or None for ReprHighlighter. Defaults to None.
indent_size (int, optional): Number of spaces in indent. Defaults to 4.
justify (JustifyMethod, optional): Justify method, or None for default. Defaults to None.
overflow (OverflowMethod, optional): Overflow method, or None for default. Defaults to None.
no_wrap (Optional[bool], optional): Disable word wrapping. Defaults to False.
indent_guides (bool, optional): Enable indentation guides. Defaults to False.
max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
Defaults to None.
max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to None.
expand_all (bool, optional): Expand all containers. Defaults to False.
margin (int, optional): Subtrace a margin from width to force containers to expand earlier. Defaults to 0.
insert_line (bool, optional): Insert a new line if the output has multiple new lines. Defaults to False.
"""
def __init__(
self,
_object: Any,
highlighter: Optional["HighlighterType"] = None,
*,
indent_size: int = 4,
justify: Optional["JustifyMethod"] = None,
overflow: Optional["OverflowMethod"] = None,
no_wrap: Optional[bool] = False,
indent_guides: bool = False,
max_length: Optional[int] = None,
max_string: Optional[int] = None,
expand_all: bool = False,
margin: int = 0,
insert_line: bool = False,
) -> None:
self._object = _object
self.highlighter = highlighter or ReprHighlighter()
self.indent_size = indent_size
self.justify = justify
self.overflow = overflow
self.no_wrap = no_wrap
self.indent_guides = indent_guides
self.max_length = max_length
self.max_string = max_string
self.expand_all = expand_all
self.margin = margin
self.insert_line = insert_line
def __rich_console__(
self, console: "Console", options: "ConsoleOptions"
) -> "RenderResult":
pretty_str = pretty_repr(
self._object,
max_width=options.max_width - self.margin,
indent_size=self.indent_size,
max_length=self.max_length,
max_string=self.max_string,
expand_all=self.expand_all,
)
pretty_text = Text(
pretty_str,
justify=self.justify or options.justify,
overflow=self.overflow or options.overflow,
no_wrap=pick_bool(self.no_wrap, options.no_wrap),
style="pretty",
)
pretty_text = (
self.highlighter(pretty_text)
if pretty_text
else Text(
f"{type(self._object)}.__repr__ returned empty string",
style="dim italic",
)
)
if self.indent_guides and not options.ascii_only:
pretty_text = pretty_text.with_indent_guides(
self.indent_size, style="repr.indent"
)
if self.insert_line and "\n" in pretty_text:
yield ""
yield pretty_text
def __rich_measure__(
self, console: "Console", options: "ConsoleOptions"
) -> "Measurement":
pretty_str = pretty_repr(
self._object,
max_width=options.max_width,
indent_size=self.indent_size,
max_length=self.max_length,
max_string=self.max_string,
)
text_width = (
max(cell_len(line) for line in pretty_str.splitlines()) if pretty_str else 0
)
return Measurement(text_width, text_width)
def _get_braces_for_defaultdict(_object: DefaultDict[Any, Any]) -> Tuple[str, str, str]:
return (
f"defaultdict({_object.default_factory!r}, {{",
"})",
f"defaultdict({_object.default_factory!r}, {{}})",
)
def _get_braces_for_array(_object: "array[Any]") -> Tuple[str, str, str]:
return (f"array({_object.typecode!r}, [", "])", "array({_object.typecode!r})")
_BRACES: Dict[type, Callable[[Any], Tuple[str, str, str]]] = {
os._Environ: lambda _object: ("environ({", "})", "environ({})"),
array: _get_braces_for_array,
defaultdict: _get_braces_for_defaultdict,
Counter: lambda _object: ("Counter({", "})", "Counter()"),
deque: lambda _object: ("deque([", "])", "deque()"),
dict: lambda _object: ("{", "}", "{}"),
UserDict: lambda _object: ("{", "}", "{}"),
frozenset: lambda _object: ("frozenset({", "})", "frozenset()"),
list: lambda _object: ("[", "]", "[]"),
UserList: lambda _object: ("[", "]", "[]"),
set: lambda _object: ("{", "}", "set()"),
tuple: lambda _object: ("(", ")", "()"),
MappingProxyType: lambda _object: ("mappingproxy({", "})", "mappingproxy({})"),
}
_CONTAINERS = tuple(_BRACES.keys())
_MAPPING_CONTAINERS = (dict, os._Environ, MappingProxyType, UserDict)
def is_expandable(obj: Any) -> bool:
"""Check if an object may be expanded by pretty print."""
return (
isinstance(obj, _CONTAINERS)
or (is_dataclass(obj))
or (hasattr(obj, "__rich_repr__"))
or _is_attr_object(obj)
) and not isclass(obj)
@dataclass
class Node:
"""A node in a repr tree. May be atomic or a container."""
key_repr: str = ""
value_repr: str = ""
open_brace: str = ""
close_brace: str = ""
empty: str = ""
last: bool = False
is_tuple: bool = False
children: Optional[List["Node"]] = None
key_separator = ": "
separator: str = ", "
def iter_tokens(self) -> Iterable[str]:
"""Generate tokens for this node."""
if self.key_repr:
yield self.key_repr
yield self.key_separator
if self.value_repr:
yield self.value_repr
elif self.children is not None:
if self.children:
yield self.open_brace
if self.is_tuple and len(self.children) == 1:
yield from self.children[0].iter_tokens()
yield ","
else:
for child in self.children:
yield from child.iter_tokens()
if not child.last:
yield self.separator
yield self.close_brace
else:
yield self.empty
def check_length(self, start_length: int, max_length: int) -> bool:
"""Check the length fits within a limit.
Args:
start_length (int): Starting length of the line (indent, prefix, suffix).
max_length (int): Maximum length.
Returns:
bool: True if the node can be rendered within max length, otherwise False.
"""
total_length = start_length
for token in self.iter_tokens():
total_length += cell_len(token)
if total_length > max_length:
return False
return True
def __str__(self) -> str:
repr_text = "".join(self.iter_tokens())
return repr_text
def render(
self, max_width: int = 80, indent_size: int = 4, expand_all: bool = False
) -> str:
"""Render the node to a pretty repr.
Args:
max_width (int, optional): Maximum width of the repr. Defaults to 80.
indent_size (int, optional): Size of indents. Defaults to 4.
expand_all (bool, optional): Expand all levels. Defaults to False.
Returns:
str: A repr string of the original object.
"""
lines = [_Line(node=self, is_root=True)]
line_no = 0
while line_no < len(lines):
line = lines[line_no]
if line.expandable and not line.expanded:
if expand_all or not line.check_length(max_width):
lines[line_no : line_no + 1] = line.expand(indent_size)
line_no += 1
repr_str = "\n".join(str(line) for line in lines)
return repr_str
@dataclass
class _Line:
"""A line in repr output."""
parent: Optional["_Line"] = None
is_root: bool = False
node: Optional[Node] = None
text: str = ""
suffix: str = ""
whitespace: str = ""
expanded: bool = False
last: bool = False
@property
def expandable(self) -> bool:
"""Check if the line may be expanded."""
return bool(self.node is not None and self.node.children)
def check_length(self, max_length: int) -> bool:
"""Check this line fits within a given number of cells."""
start_length = (
len(self.whitespace) + cell_len(self.text) + cell_len(self.suffix)
)
assert self.node is not None
return self.node.check_length(start_length, max_length)
def expand(self, indent_size: int) -> Iterable["_Line"]:
"""Expand this line by adding children on their own line."""
node = self.node
assert node is not None
whitespace = self.whitespace
assert node.children
if node.key_repr:
new_line = yield _Line(
text=f"{node.key_repr}{node.key_separator}{node.open_brace}",
whitespace=whitespace,
)
else:
new_line = yield _Line(text=node.open_brace, whitespace=whitespace)
child_whitespace = self.whitespace + " " * indent_size
tuple_of_one = node.is_tuple and len(node.children) == 1
for last, child in loop_last(node.children):
separator = "," if tuple_of_one else node.separator
line = _Line(
parent=new_line,
node=child,
whitespace=child_whitespace,
suffix=separator,
last=last and not tuple_of_one,
)
yield line
yield _Line(
text=node.close_brace,
whitespace=whitespace,
suffix=self.suffix,
last=self.last,
)
def __str__(self) -> str:
if self.last:
return f"{self.whitespace}{self.text}{self.node or ''}"
else:
return (
f"{self.whitespace}{self.text}{self.node or ''}{self.suffix.rstrip()}"
)
def traverse(
_object: Any, max_length: Optional[int] = None, max_string: Optional[int] = None
) -> Node:
"""Traverse object and generate a tree.
Args:
_object (Any): Object to be traversed.
max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
Defaults to None.
max_string (int, optional): Maximum length of string before truncating, or None to disable truncating.
Defaults to None.
Returns:
Node: The root of a tree structure which can be used to render a pretty repr.
"""
def to_repr(obj: Any) -> str:
"""Get repr string for an object, but catch errors."""
if (
max_string is not None
and isinstance(obj, (bytes, str))
and len(obj) > max_string
):
truncated = len(obj) - max_string
obj_repr = f"{obj[:max_string]!r}+{truncated}"
else:
try:
obj_repr = repr(obj)
except Exception as error:
obj_repr = f"<repr-error '{error}'>"
return obj_repr
visited_ids: Set[int] = set()
push_visited = visited_ids.add
pop_visited = visited_ids.remove
def _traverse(obj: Any, root: bool = False) -> Node:
"""Walk the object depth first."""
obj_type = type(obj)
py_version = (sys.version_info.major, sys.version_info.minor)
children: List[Node]
def iter_rich_args(rich_args: Any) -> Iterable[Union[Any, Tuple[str, Any]]]:
for arg in rich_args:
if isinstance(arg, tuple):
if len(arg) == 3:
key, child, default = arg
if default == child:
continue
yield key, child
elif len(arg) == 2:
key, child = arg
yield key, child
elif len(arg) == 1:
yield arg[0]
else:
yield arg
if hasattr(obj, "__rich_repr__") and not isclass(obj):
angular = getattr(obj.__rich_repr__, "angular", False)
args = list(iter_rich_args(obj.__rich_repr__()))
class_name = obj.__class__.__name__
if args:
children = []
append = children.append
if angular:
node = Node(
open_brace=f"<{class_name} ",
close_brace=">",
children=children,
last=root,
separator=" ",
)
else:
node = Node(
open_brace=f"{class_name}(",
close_brace=")",
children=children,
last=root,
)
for last, arg in loop_last(args):
if isinstance(arg, tuple):
key, child = arg
child_node = _traverse(child)
child_node.last = last
child_node.key_repr = key
child_node.key_separator = "="
append(child_node)
else:
child_node = _traverse(arg)
child_node.last = last
append(child_node)
else:
node = Node(
value_repr=f"<{class_name}>" if angular else f"{class_name}()",
children=[],
last=root,
)
elif _is_attr_object(obj):
children = []
append = children.append
attr_fields = _get_attr_fields(obj)
if attr_fields:
node = Node(
open_brace=f"{obj.__class__.__name__}(",
close_brace=")",
children=children,
last=root,
)
def iter_attrs() -> Iterable[
Tuple[str, Any, Optional[Callable[[Any], str]]]
]:
"""Iterate over attr fields and values."""
for attr in attr_fields:
if attr.repr:
try:
value = getattr(obj, attr.name)
except Exception as error:
# Can happen, albeit rarely
yield (attr.name, error, None)
else:
yield (
attr.name,
value,
attr.repr if callable(attr.repr) else None,
)
for last, (name, value, repr_callable) in loop_last(iter_attrs()):
if repr_callable:
child_node = Node(value_repr=str(repr_callable(value)))
else:
child_node = _traverse(value)
child_node.last = last
child_node.key_repr = name
child_node.key_separator = "="
append(child_node)
else:
node = Node(
value_repr=f"{obj.__class__.__name__}()", children=[], last=root
)
elif (
is_dataclass(obj)
and not isinstance(obj, type)
and (
"__create_fn__" in obj.__repr__.__qualname__ or py_version == (3, 6)
) # Check if __repr__ wasn't overridden
):
obj_id = id(obj)
if obj_id in visited_ids:
# Recursion detected
return Node(value_repr="...")
push_visited(obj_id)
children = []
append = children.append
node = Node(
open_brace=f"{obj.__class__.__name__}(",
close_brace=")",
children=children,
last=root,
)
for last, field in loop_last(fields(obj)):
if field.repr:
child_node = _traverse(getattr(obj, field.name))
child_node.key_repr = field.name
child_node.last = last
child_node.key_separator = "="
append(child_node)
pop_visited(obj_id)
elif isinstance(obj, _CONTAINERS):
for container_type in _CONTAINERS:
if isinstance(obj, container_type):
obj_type = container_type
break
obj_id = id(obj)
if obj_id in visited_ids:
# Recursion detected
return Node(value_repr="...")
push_visited(obj_id)
open_brace, close_brace, empty = _BRACES[obj_type](obj)
if obj_type.__repr__ != type(obj).__repr__:
node = Node(value_repr=to_repr(obj), last=root)
elif obj:
children = []
node = Node(
open_brace=open_brace,
close_brace=close_brace,
children=children,
last=root,
)
append = children.append
num_items = len(obj)
last_item_index = num_items - 1
if isinstance(obj, _MAPPING_CONTAINERS):
iter_items = iter(obj.items())
if max_length is not None:
iter_items = islice(iter_items, max_length)
for index, (key, child) in enumerate(iter_items):
child_node = _traverse(child)
child_node.key_repr = to_repr(key)
child_node.last = index == last_item_index
append(child_node)
else:
iter_values = iter(obj)
if max_length is not None:
iter_values = islice(iter_values, max_length)
for index, child in enumerate(iter_values):
child_node = _traverse(child)
child_node.last = index == last_item_index
append(child_node)
if max_length is not None and num_items > max_length:
append(Node(value_repr=f"... +{num_items-max_length}", last=True))
else:
node = Node(empty=empty, children=[], last=root)
pop_visited(obj_id)
else:
node = Node(value_repr=to_repr(obj), last=root)
node.is_tuple = isinstance(obj, tuple)
return node
node = _traverse(_object, root=True)
return node
def pretty_repr(
_object: Any,
*,
max_width: int = 80,
indent_size: int = 4,
max_length: Optional[int] = None,
max_string: Optional[int] = None,
expand_all: bool = False,
) -> str:
"""Prettify repr string by expanding on to new lines to fit within a given width.
Args:
_object (Any): Object to repr.
max_width (int, optional): Desired maximum width of repr string. Defaults to 80.
indent_size (int, optional): Number of spaces to indent. Defaults to 4.
max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
Defaults to None.
max_string (int, optional): Maximum length of string before truncating, or None to disable truncating.
Defaults to None.
expand_all (bool, optional): Expand all containers regardless of available width. Defaults to False.
Returns:
str: A possibly multi-line representation of the object.
"""
if isinstance(_object, Node):
node = _object
else:
node = traverse(_object, max_length=max_length, max_string=max_string)
repr_str = node.render(
max_width=max_width, indent_size=indent_size, expand_all=expand_all
)
return repr_str
def pprint(
_object: Any,
*,
console: Optional["Console"] = None,
indent_guides: bool = True,
max_length: Optional[int] = None,
max_string: Optional[int] = None,
expand_all: bool = False,
) -> None:
"""A convenience function for pretty printing.
Args:
_object (Any): Object to pretty print.
console (Console, optional): Console instance, or None to use default. Defaults to None.
max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation.
Defaults to None.
max_string (int, optional): Maximum length of strings before truncating, or None to disable. Defaults to None.
indent_guides (bool, optional): Enable indentation guides. Defaults to True.
expand_all (bool, optional): Expand all containers. Defaults to False.
"""
_console = get_console() if console is None else console
_console.print(
Pretty(
_object,
max_length=max_length,
max_string=max_string,
indent_guides=indent_guides,
expand_all=expand_all,
overflow="ignore",
),
soft_wrap=True,
)
if __name__ == "__main__": # pragma: no cover
class BrokenRepr:
def __repr__(self) -> str:
1 / 0
return "this will fail"
d = defaultdict(int)
d["foo"] = 5
data = {
"foo": [
1,
"Hello World!",
100.123,
323.232,
432324.0,
{5, 6, 7, (1, 2, 3, 4), 8},
],
"bar": frozenset({1, 2, 3}),
"defaultdict": defaultdict(
list, {"crumble": ["apple", "rhubarb", "butter", "sugar", "flour"]}
),
"counter": Counter(
[
"apple",
"orange",
"pear",
"kumquat",
"kumquat",
"durian" * 100,
]
),
"atomic": (False, True, None),
"Broken": BrokenRepr(),
}
data["foo"].append(data) # type: ignore
from rich import print
print(Pretty(data, indent_guides=True, max_string=20))
| 35.493029
| 134
| 0.553064
|
d415690c5f2862e432e10f9c3eb77f4ae650a1ab
| 13,435
|
py
|
Python
|
pkgs/ipython-1.2.1-py27_0/lib/python2.7/site-packages/IPython/kernel/manager.py
|
wangyum/anaconda
|
6e5a0dbead3327661d73a61e85414cf92aa52be6
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
pkgs/ipython-1.2.1-py27_0/lib/python2.7/site-packages/IPython/kernel/manager.py
|
wangyum/anaconda
|
6e5a0dbead3327661d73a61e85414cf92aa52be6
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
pkgs/ipython-1.2.1-py27_0/lib/python2.7/site-packages/IPython/kernel/manager.py
|
wangyum/anaconda
|
6e5a0dbead3327661d73a61e85414cf92aa52be6
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
"""Base class to manage a running kernel
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import absolute_import
# Standard library imports
import re
import signal
import sys
import time
import zmq
# Local imports
from IPython.config.configurable import LoggingConfigurable
from IPython.utils.importstring import import_item
from IPython.utils.localinterfaces import LOCAL_IPS
from IPython.utils.traitlets import (
Any, Instance, Unicode, List, Bool, Type, DottedObjectName
)
from IPython.kernel import (
make_ipkernel_cmd,
launch_kernel,
)
from .connect import ConnectionFileMixin
from .zmq.session import Session
from .managerabc import (
KernelManagerABC
)
#-----------------------------------------------------------------------------
# Main kernel manager class
#-----------------------------------------------------------------------------
class KernelManager(LoggingConfigurable, ConnectionFileMixin):
"""Manages a single kernel in a subprocess on this host.
This version starts kernels with Popen.
"""
# The PyZMQ Context to use for communication with the kernel.
context = Instance(zmq.Context)
def _context_default(self):
return zmq.Context.instance()
# The Session to use for communication with the kernel.
session = Instance(Session)
def _session_default(self):
return Session(parent=self)
# the class to create with our `client` method
client_class = DottedObjectName('IPython.kernel.blocking.BlockingKernelClient')
client_factory = Type()
def _client_class_changed(self, name, old, new):
self.client_factory = import_item(str(new))
# The kernel process with which the KernelManager is communicating.
# generally a Popen instance
kernel = Any()
kernel_cmd = List(Unicode, config=True,
help="""The Popen Command to launch the kernel.
Override this if you have a custom
"""
)
def _kernel_cmd_changed(self, name, old, new):
self.ipython_kernel = False
ipython_kernel = Bool(True)
# Protected traits
_launch_args = Any()
_control_socket = Any()
_restarter = Any()
autorestart = Bool(False, config=True,
help="""Should we autorestart the kernel if it dies."""
)
def __del__(self):
self._close_control_socket()
self.cleanup_connection_file()
#--------------------------------------------------------------------------
# Kernel restarter
#--------------------------------------------------------------------------
def start_restarter(self):
pass
def stop_restarter(self):
pass
def add_restart_callback(self, callback, event='restart'):
"""register a callback to be called when a kernel is restarted"""
if self._restarter is None:
return
self._restarter.add_callback(callback, event)
def remove_restart_callback(self, callback, event='restart'):
"""unregister a callback to be called when a kernel is restarted"""
if self._restarter is None:
return
self._restarter.remove_callback(callback, event)
#--------------------------------------------------------------------------
# create a Client connected to our Kernel
#--------------------------------------------------------------------------
def client(self, **kwargs):
"""Create a client configured to connect to our kernel"""
if self.client_factory is None:
self.client_factory = import_item(self.client_class)
kw = {}
kw.update(self.get_connection_info())
kw.update(dict(
connection_file=self.connection_file,
session=self.session,
parent=self,
))
# add kwargs last, for manual overrides
kw.update(kwargs)
return self.client_factory(**kw)
#--------------------------------------------------------------------------
# Kernel management
#--------------------------------------------------------------------------
def format_kernel_cmd(self, **kw):
"""replace templated args (e.g. {connection_file})"""
if self.kernel_cmd:
cmd = self.kernel_cmd
else:
cmd = make_ipkernel_cmd(
'from IPython.kernel.zmq.kernelapp import main; main()',
**kw
)
ns = dict(connection_file=self.connection_file)
ns.update(self._launch_args)
pat = re.compile(r'\{([A-Za-z0-9_]+)\}')
def from_ns(match):
"""Get the key out of ns if it's there, otherwise no change."""
return ns.get(match.group(1), match.group())
return [ pat.sub(from_ns, arg) for arg in cmd ]
def _launch_kernel(self, kernel_cmd, **kw):
"""actually launch the kernel
override in a subclass to launch kernel subprocesses differently
"""
return launch_kernel(kernel_cmd, **kw)
# Control socket used for polite kernel shutdown
def _connect_control_socket(self):
if self._control_socket is None:
self._control_socket = self.connect_control()
self._control_socket.linger = 100
def _close_control_socket(self):
if self._control_socket is None:
return
self._control_socket.close()
self._control_socket = None
def start_kernel(self, **kw):
"""Starts a kernel on this host in a separate process.
If random ports (port=0) are being used, this method must be called
before the channels are created.
Parameters:
-----------
**kw : optional
keyword arguments that are passed down to build the kernel_cmd
and launching the kernel (e.g. Popen kwargs).
"""
if self.transport == 'tcp' and self.ip not in LOCAL_IPS:
raise RuntimeError("Can only launch a kernel on a local interface. "
"Make sure that the '*_address' attributes are "
"configured properly. "
"Currently valid addresses are: %s"%LOCAL_IPS
)
# write connection file / get default ports
self.write_connection_file()
# save kwargs for use in restart
self._launch_args = kw.copy()
# build the Popen cmd
kernel_cmd = self.format_kernel_cmd(**kw)
# launch the kernel subprocess
self.kernel = self._launch_kernel(kernel_cmd,
ipython_kernel=self.ipython_kernel,
**kw)
self.start_restarter()
self._connect_control_socket()
def _send_shutdown_request(self, restart=False):
"""TODO: send a shutdown request via control channel"""
content = dict(restart=restart)
msg = self.session.msg("shutdown_request", content=content)
self.session.send(self._control_socket, msg)
def shutdown_kernel(self, now=False, restart=False):
"""Attempts to the stop the kernel process cleanly.
This attempts to shutdown the kernels cleanly by:
1. Sending it a shutdown message over the shell channel.
2. If that fails, the kernel is shutdown forcibly by sending it
a signal.
Parameters:
-----------
now : bool
Should the kernel be forcible killed *now*. This skips the
first, nice shutdown attempt.
restart: bool
Will this kernel be restarted after it is shutdown. When this
is True, connection files will not be cleaned up.
"""
# Stop monitoring for restarting while we shutdown.
self.stop_restarter()
# FIXME: Shutdown does not work on Windows due to ZMQ errors!
if sys.platform == 'win32':
self._kill_kernel()
return
if now:
if self.has_kernel:
self._kill_kernel()
else:
# Don't send any additional kernel kill messages immediately, to give
# the kernel a chance to properly execute shutdown actions. Wait for at
# most 1s, checking every 0.1s.
self._send_shutdown_request(restart=restart)
for i in range(10):
if self.is_alive():
time.sleep(0.1)
else:
break
else:
# OK, we've waited long enough.
if self.has_kernel:
self._kill_kernel()
if not restart:
self.cleanup_connection_file()
self.cleanup_ipc_files()
else:
self.cleanup_ipc_files()
def restart_kernel(self, now=False, **kw):
"""Restarts a kernel with the arguments that were used to launch it.
If the old kernel was launched with random ports, the same ports will be
used for the new kernel. The same connection file is used again.
Parameters
----------
now : bool, optional
If True, the kernel is forcefully restarted *immediately*, without
having a chance to do any cleanup action. Otherwise the kernel is
given 1s to clean up before a forceful restart is issued.
In all cases the kernel is restarted, the only difference is whether
it is given a chance to perform a clean shutdown or not.
**kw : optional
Any options specified here will overwrite those used to launch the
kernel.
"""
if self._launch_args is None:
raise RuntimeError("Cannot restart the kernel. "
"No previous call to 'start_kernel'.")
else:
# Stop currently running kernel.
self.shutdown_kernel(now=now, restart=True)
# Start new kernel.
self._launch_args.update(kw)
self.start_kernel(**self._launch_args)
# FIXME: Messages get dropped in Windows due to probable ZMQ bug
# unless there is some delay here.
if sys.platform == 'win32':
time.sleep(0.2)
@property
def has_kernel(self):
"""Has a kernel been started that we are managing."""
return self.kernel is not None
def _kill_kernel(self):
"""Kill the running kernel.
This is a private method, callers should use shutdown_kernel(now=True).
"""
if self.has_kernel:
# Signal the kernel to terminate (sends SIGKILL on Unix and calls
# TerminateProcess() on Win32).
try:
self.kernel.kill()
except OSError as e:
# In Windows, we will get an Access Denied error if the process
# has already terminated. Ignore it.
if sys.platform == 'win32':
if e.winerror != 5:
raise
# On Unix, we may get an ESRCH error if the process has already
# terminated. Ignore it.
else:
from errno import ESRCH
if e.errno != ESRCH:
raise
# Block until the kernel terminates.
self.kernel.wait()
self.kernel = None
else:
raise RuntimeError("Cannot kill kernel. No kernel is running!")
def interrupt_kernel(self):
"""Interrupts the kernel by sending it a signal.
Unlike ``signal_kernel``, this operation is well supported on all
platforms.
"""
if self.has_kernel:
if sys.platform == 'win32':
from .zmq.parentpoller import ParentPollerWindows as Poller
Poller.send_interrupt(self.kernel.win32_interrupt_event)
else:
self.kernel.send_signal(signal.SIGINT)
else:
raise RuntimeError("Cannot interrupt kernel. No kernel is running!")
def signal_kernel(self, signum):
"""Sends a signal to the kernel.
Note that since only SIGTERM is supported on Windows, this function is
only useful on Unix systems.
"""
if self.has_kernel:
self.kernel.send_signal(signum)
else:
raise RuntimeError("Cannot signal kernel. No kernel is running!")
def is_alive(self):
"""Is the kernel process still running?"""
if self.has_kernel:
if self.kernel.poll() is None:
return True
else:
return False
else:
# we don't have a kernel
return False
#-----------------------------------------------------------------------------
# ABC Registration
#-----------------------------------------------------------------------------
KernelManagerABC.register(KernelManager)
| 34.715762
| 83
| 0.553554
|
f68165f0308efe3ba2b1ed8a26cd1d16e9461a95
| 7,123
|
py
|
Python
|
airflow/utils/log/gcs_task_handler.py
|
snjypl/incubator-airflow
|
a3c5783f51763901ffba399aaad6dd485b4eda14
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1
|
2019-05-09T08:59:41.000Z
|
2019-05-09T08:59:41.000Z
|
airflow/utils/log/gcs_task_handler.py
|
snjypl/incubator-airflow
|
a3c5783f51763901ffba399aaad6dd485b4eda14
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
airflow/utils/log/gcs_task_handler.py
|
snjypl/incubator-airflow
|
a3c5783f51763901ffba399aaad6dd485b4eda14
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from airflow import configuration
from airflow.exceptions import AirflowException
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.log.file_task_handler import FileTaskHandler
class GCSTaskHandler(FileTaskHandler, LoggingMixin):
"""
GCSTaskHandler is a python log handler that handles and reads
task instance logs. It extends airflow FileTaskHandler and
uploads to and reads from GCS remote storage. Upon log reading
failure, it reads from host machine's local disk.
"""
def __init__(self, base_log_folder, gcs_log_folder, filename_template):
super(GCSTaskHandler, self).__init__(base_log_folder, filename_template)
self.remote_base = gcs_log_folder
self.log_relative_path = ''
self._hook = None
self.closed = False
self.upload_on_close = True
def _build_hook(self):
remote_conn_id = configuration.get('core', 'REMOTE_LOG_CONN_ID')
try:
from airflow.contrib.hooks.gcs_hook import GoogleCloudStorageHook
return GoogleCloudStorageHook(
google_cloud_storage_conn_id=remote_conn_id
)
except Exception as e:
self.log.error(
'Could not create a GoogleCloudStorageHook with connection id '
'"{}". {}\n\nPlease make sure that airflow[gcp_api] is installed '
'and the GCS connection exists.'.format(remote_conn_id, str(e))
)
@property
def hook(self):
if self._hook is None:
self._hook = self._build_hook()
return self._hook
def set_context(self, ti):
super(GCSTaskHandler, self).set_context(ti)
# Log relative path is used to construct local and remote
# log path to upload log files into GCS and read from the
# remote location.
self.log_relative_path = self._render_filename(ti, ti.try_number)
self.upload_on_close = not ti.is_raw
def close(self):
"""
Close and upload local log file to remote storage S3.
"""
# When application exit, system shuts down all handlers by
# calling close method. Here we check if logger is already
# closed to prevent uploading the log to remote storage multiple
# times when `logging.shutdown` is called.
if self.closed:
return
super(GCSTaskHandler, self).close()
if not self.upload_on_close:
return
local_loc = os.path.join(self.local_base, self.log_relative_path)
remote_loc = os.path.join(self.remote_base, self.log_relative_path)
if os.path.exists(local_loc):
# read log and remove old logs to get just the latest additions
with open(local_loc, 'r') as logfile:
log = logfile.read()
self.gcs_write(log, remote_loc)
# Mark closed so we don't double write if close is called twice
self.closed = True
def _read(self, ti, try_number):
"""
Read logs of given task instance and try_number from GCS.
If failed, read the log from task instance host machine.
:param ti: task instance object
:param try_number: task instance try_number to read logs from
"""
# Explicitly getting log relative path is necessary as the given
# task instance might be different than task instance passed in
# in set_context method.
log_relative_path = self._render_filename(ti, try_number)
remote_loc = os.path.join(self.remote_base, log_relative_path)
try:
remote_log = self.gcs_read(remote_loc)
log = '*** Reading remote log from {}.\n{}\n'.format(
remote_loc, remote_log)
except Exception as e:
log = '*** Unable to read remote log from {}\n*** {}\n\n'.format(
remote_loc, str(e))
self.log.error(log)
log += super(GCSTaskHandler, self)._read(ti, try_number)
return log
def gcs_read(self, remote_log_location):
"""
Returns the log found at the remote_log_location.
:param remote_log_location: the log's location in remote storage
:type remote_log_location: string (path)
"""
bkt, blob = self.parse_gcs_url(remote_log_location)
return self.hook.download(bkt, blob).decode()
def gcs_write(self, log, remote_log_location, append=True):
"""
Writes the log to the remote_log_location. Fails silently if no hook
was created.
:param log: the log to write to the remote_log_location
:type log: string
:param remote_log_location: the log's location in remote storage
:type remote_log_location: string (path)
:param append: if False, any existing log file is overwritten. If True,
the new log is appended to any existing logs.
:type append: bool
"""
if append:
try:
old_log = self.gcs_read(remote_log_location)
except Exception as e:
if not hasattr(e, 'resp') or e.resp.get('status') != '404':
old_log = '*** Previous log discarded: {}\n\n'.format(str(e))
log = '\n'.join([old_log, log]) if old_log else log
try:
bkt, blob = self.parse_gcs_url(remote_log_location)
from tempfile import NamedTemporaryFile
with NamedTemporaryFile(mode='w+') as tmpfile:
tmpfile.write(log)
# Force the file to be flushed, since we're doing the
# upload from within the file context (it hasn't been
# closed).
tmpfile.flush()
self.hook.upload(bkt, blob, tmpfile.name)
except Exception as e:
self.log.error('Could not write logs to %s: %s', remote_log_location, e)
def parse_gcs_url(self, gsurl):
"""
Given a Google Cloud Storage URL (gs://<bucket>/<blob>), returns a
tuple containing the corresponding bucket and blob.
"""
# Python 3
try:
from urllib.parse import urlparse
# Python 2
except ImportError:
from urlparse import urlparse
parsed_url = urlparse(gsurl)
if not parsed_url.netloc:
raise AirflowException('Please provide a bucket name')
else:
bucket = parsed_url.netloc
blob = parsed_url.path.strip('/')
return bucket, blob
| 40.016854
| 84
| 0.633581
|
8ec015b75a3a319f98ba4312f332f43671bb2ed3
| 1,690
|
py
|
Python
|
Sequential LDA Runs/Volatility_SeqLDA.py
|
mehasadasivam/NLP
|
174ea8c4cac1db53613bd7625962b0c087d0004c
|
[
"MIT"
] | null | null | null |
Sequential LDA Runs/Volatility_SeqLDA.py
|
mehasadasivam/NLP
|
174ea8c4cac1db53613bd7625962b0c087d0004c
|
[
"MIT"
] | null | null | null |
Sequential LDA Runs/Volatility_SeqLDA.py
|
mehasadasivam/NLP
|
174ea8c4cac1db53613bd7625962b0c087d0004c
|
[
"MIT"
] | null | null | null |
#!/apps/anaconda3/bin/python
from config import *
import gensim
import glob
import os
import re
import numpy as np
import pandas as pd
import time
import json
import logging
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
from gensim.models import ldaseqmodel
def main():
THEME = 'Volatility'
time_slices = pd.read_csv('Summary Stat Tables/%s_Article_Count.csv' % THEME, index_col=0)
time_slices.sort_index(inplace=True)
time_slices.index = pd.to_datetime(time_slices.index)
# time_slices.groupby(time_slices.index.year)['No. of Volatility Articles'].sum()
# Only upto 2018
yearly_slices = time_slices.groupby(time_slices.index.year)['No. of %s Articles' % THEME].sum().values
# Load dictionary and corpus
dictionary_all = gensim.corpora.Dictionary.load(TEMP_PATH + '/%s/%s_clean.dict' % (THEME, THEME))
corpus_all = gensim.corpora.MmCorpus(TEMP_PATH + '/%s/%s_clean.mm' % (THEME, THEME))
tic = time.time()
ldaseq = ldaseqmodel.LdaSeqModel(corpus=corpus_all,
id2word=dictionary_all,
time_slice=yearly_slices,
passes=2,
num_topics=10,
em_min_iter=1,
em_max_iter=1,
chunksize=12000)
print('LDA Seq Model Created. Time Taken: %d seconds' % int(time.time() - tic))
# Save the model
ldaseq.save(TEMP_PATH + '/%s/%s_LDASeqModel_yearly_10_Topics' % (THEME, THEME))
if __name__ == '__main__':
main()
| 30.727273
| 106
| 0.604142
|
d51ac1857679b14aad7f6736ac1a50d07b4be515
| 3,910
|
py
|
Python
|
test/mitmproxy/tutils.py
|
ning1022/mitmproxy
|
66267ad2768686f6af0ec20dfa89d1a281fc7f83
|
[
"MIT"
] | 1
|
2019-08-20T11:54:06.000Z
|
2019-08-20T11:54:06.000Z
|
test/mitmproxy/tutils.py
|
ning1022/mitmproxy
|
66267ad2768686f6af0ec20dfa89d1a281fc7f83
|
[
"MIT"
] | null | null | null |
test/mitmproxy/tutils.py
|
ning1022/mitmproxy
|
66267ad2768686f6af0ec20dfa89d1a281fc7f83
|
[
"MIT"
] | null | null | null |
import os
import shutil
import tempfile
import argparse
import sys
from six.moves import cStringIO as StringIO
from contextlib import contextmanager
from unittest.case import SkipTest
import netlib.tutils
from mitmproxy import utils, controller
from mitmproxy.models import (
ClientConnection, ServerConnection, Error, HTTPRequest, HTTPResponse, HTTPFlow
)
def _skip_windows(*args):
raise SkipTest("Skipped on Windows.")
def skip_windows(fn):
if os.name == "nt":
return _skip_windows
else:
return fn
def skip_not_windows(fn):
if os.name == "nt":
return fn
else:
return _skip_windows
def _skip_appveyor(*args):
raise SkipTest("Skipped on AppVeyor.")
def skip_appveyor(fn):
if "APPVEYOR" in os.environ:
return _skip_appveyor
else:
return fn
def tflow(client_conn=True, server_conn=True, req=True, resp=None, err=None):
"""
@type client_conn: bool | None | mitmproxy.proxy.connection.ClientConnection
@type server_conn: bool | None | mitmproxy.proxy.connection.ServerConnection
@type req: bool | None | mitmproxy.protocol.http.HTTPRequest
@type resp: bool | None | mitmproxy.protocol.http.HTTPResponse
@type err: bool | None | mitmproxy.protocol.primitives.Error
@return: bool | None | mitmproxy.protocol.http.HTTPFlow
"""
if client_conn is True:
client_conn = tclient_conn()
if server_conn is True:
server_conn = tserver_conn()
if req is True:
req = netlib.tutils.treq()
if resp is True:
resp = netlib.tutils.tresp()
if err is True:
err = terr()
if req:
req = HTTPRequest.wrap(req)
if resp:
resp = HTTPResponse.wrap(resp)
f = HTTPFlow(client_conn, server_conn)
f.request = req
f.response = resp
f.error = err
f.reply = controller.DummyReply()
return f
def tclient_conn():
"""
@return: mitmproxy.proxy.connection.ClientConnection
"""
c = ClientConnection.from_state(dict(
address=dict(address=("address", 22), use_ipv6=True),
clientcert=None,
ssl_established=False,
timestamp_start=1,
timestamp_ssl_setup=2,
timestamp_end=3,
))
c.reply = controller.DummyReply()
return c
def tserver_conn():
"""
@return: mitmproxy.proxy.connection.ServerConnection
"""
c = ServerConnection.from_state(dict(
address=dict(address=("address", 22), use_ipv6=True),
source_address=dict(address=("address", 22), use_ipv6=True),
ip_address=None,
cert=None,
timestamp_start=1,
timestamp_tcp_setup=2,
timestamp_ssl_setup=3,
timestamp_end=4,
ssl_established=False,
sni="address",
via=None
))
c.reply = controller.DummyReply()
return c
def terr(content="error"):
"""
@return: mitmproxy.protocol.primitives.Error
"""
err = Error(content)
return err
def get_body_line(last_displayed_body, line_nb):
return last_displayed_body.contents()[line_nb + 2]
@contextmanager
def chdir(dir):
orig_dir = os.getcwd()
os.chdir(dir)
yield
os.chdir(orig_dir)
@contextmanager
def tmpdir(*args, **kwargs):
temp_workdir = tempfile.mkdtemp(*args, **kwargs)
with chdir(temp_workdir):
yield temp_workdir
shutil.rmtree(temp_workdir)
class MockParser(argparse.ArgumentParser):
"""
argparse.ArgumentParser sys.exits() by default.
Make it more testable by throwing an exception instead.
"""
def error(self, message):
raise Exception(message)
raises = netlib.tutils.raises
@contextmanager
def capture_stderr(command, *args, **kwargs):
out, sys.stderr = sys.stderr, StringIO()
command(*args, **kwargs)
yield sys.stderr.getvalue()
sys.stderr = out
test_data = utils.Data(__name__)
| 23.413174
| 82
| 0.659335
|
c879813447387dbdb13e6d2c5edd82685a42dfc7
| 1,665
|
py
|
Python
|
homeassistant/components/nws/const.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 6
|
2020-07-18T16:33:25.000Z
|
2021-09-26T09:52:04.000Z
|
homeassistant/components/nws/const.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 47
|
2020-07-23T07:13:11.000Z
|
2022-03-31T06:01:46.000Z
|
homeassistant/components/nws/const.py
|
klauern/home-assistant-core
|
c18ba6aec0627e6afb6442c678edb5ff2bb17db6
|
[
"Apache-2.0"
] | 5
|
2020-03-29T00:29:13.000Z
|
2021-09-06T20:58:40.000Z
|
"""Constants for National Weather Service Integration."""
DOMAIN = "nws"
CONF_STATION = "station"
ATTRIBUTION = "Data from National Weather Service/NOAA"
ATTR_FORECAST_DETAILED_DESCRIPTION = "detailed_description"
ATTR_FORECAST_PRECIP_PROB = "precipitation_probability"
ATTR_FORECAST_DAYTIME = "daytime"
CONDITION_CLASSES = {
"exceptional": [
"Tornado",
"Hurricane conditions",
"Tropical storm conditions",
"Dust",
"Smoke",
"Haze",
"Hot",
"Cold",
],
"snowy": ["Snow", "Sleet", "Blizzard"],
"snowy-rainy": [
"Rain/snow",
"Rain/sleet",
"Freezing rain/snow",
"Freezing rain",
"Rain/freezing rain",
],
"hail": [],
"lightning-rainy": [
"Thunderstorm (high cloud cover)",
"Thunderstorm (medium cloud cover)",
"Thunderstorm (low cloud cover)",
],
"lightning": [],
"pouring": [],
"rainy": [
"Rain",
"Rain showers (high cloud cover)",
"Rain showers (low cloud cover)",
],
"windy-variant": ["Mostly cloudy and windy", "Overcast and windy"],
"windy": [
"Fair/clear and windy",
"A few clouds and windy",
"Partly cloudy and windy",
],
"fog": ["Fog/mist"],
"clear": ["Fair/clear"], # sunny and clear-night
"cloudy": ["Mostly cloudy", "Overcast"],
"partlycloudy": ["A few clouds", "Partly cloudy"],
}
DAYNIGHT = "daynight"
HOURLY = "hourly"
NWS_DATA = "nws data"
COORDINATOR_OBSERVATION = "coordinator_observation"
COORDINATOR_FORECAST = "coordinator_forecast"
COORDINATOR_FORECAST_HOURLY = "coordinator_forecast_hourly"
| 26.428571
| 71
| 0.603003
|
9f6260fce4299eff6c005a82cf6fa289376665e6
| 45,039
|
py
|
Python
|
hvac/api/secrets_engines/transit.py
|
lenaing/hvac
|
7911a01a9d1774cba61d8b3d654893fa05e80479
|
[
"Apache-2.0"
] | null | null | null |
hvac/api/secrets_engines/transit.py
|
lenaing/hvac
|
7911a01a9d1774cba61d8b3d654893fa05e80479
|
[
"Apache-2.0"
] | null | null | null |
hvac/api/secrets_engines/transit.py
|
lenaing/hvac
|
7911a01a9d1774cba61d8b3d654893fa05e80479
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Transit methods module."""
from hvac import exceptions, utils
from hvac.api.vault_api_base import VaultApiBase
from hvac.constants import transit as transit_constants
DEFAULT_MOUNT_POINT = 'transit'
class Transit(VaultApiBase):
"""Transit Secrets Engine (API).
Reference: https://www.vaultproject.io/api/secret/transit/index.html
"""
def create_key(self, name, convergent_encryption=None, derived=None, exportable=None, allow_plaintext_backup=None,
key_type=None, mount_point=DEFAULT_MOUNT_POINT):
"""Create a new named encryption key of the specified type.
The values set here cannot be changed after key creation.
Supported methods:
POST: /{mount_point}/keys/{name}. Produces: 204 (empty body)
:param name: Specifies the name of the encryption key to create. This is specified as part of the URL.
:type name: str | unicode
:param convergent_encryption: If enabled, the key will support convergent encryption, where the same plaintext
creates the same ciphertext. This requires derived to be set to true. When enabled, each
encryption(/decryption/rewrap/datakey) operation will derive a nonce value rather than randomly generate it.
:type convergent_encryption: bool
:param derived: Specifies if key derivation is to be used. If enabled, all encrypt/decrypt requests to this
named key must provide a context which is used for key derivation.
:type derived: bool
:param exportable: Enables keys to be exportable. This allows for all the valid keys in the key ring to be
exported. Once set, this cannot be disabled.
:type exportable: bool
:param allow_plaintext_backup: If set, enables taking backup of named key in the plaintext format. Once set,
this cannot be disabled.
:type allow_plaintext_backup: bool
:param key_type: Specifies the type of key to create. The currently-supported types are:
* **aes256-gcm96**: AES-256 wrapped with GCM using a 96-bit nonce size AEAD
* **chacha20-poly1305**: ChaCha20-Poly1305 AEAD (symmetric, supports derivation and convergent encryption)
* **ed25519**: ED25519 (asymmetric, supports derivation).
* **ecdsa-p256**: ECDSA using the P-256 elliptic curve (asymmetric)
* **rsa-2048**: RSA with bit size of 2048 (asymmetric)
* **rsa-4096**: RSA with bit size of 4096 (asymmetric)
:type key_type: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
if convergent_encryption and not derived:
raise exceptions.ParamValidationError('derived must be set to True when convergent_encryption is True')
if key_type is not None and key_type not in transit_constants.ALLOWED_KEY_TYPES:
error_msg = 'invalid key_type argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=key_type,
allowed_types=', '.join(transit_constants.ALLOWED_KEY_TYPES),
))
params = utils.remove_nones({
'convergent_encryption': convergent_encryption,
'derived': derived,
'exportable': exportable,
'allow_plaintext_backup': allow_plaintext_backup,
'type': key_type,
})
api_path = utils.format_url(
'/v1/{mount_point}/keys/{name}',
mount_point=mount_point,
name=name,
)
return self._adapter.post(
url=api_path,
json=params,
)
def read_key(self, name, mount_point=DEFAULT_MOUNT_POINT):
"""Read information about a named encryption key.
The keys object shows the creation time of each key version; the values are not the keys themselves. Depending
on the type of key, different information may be returned, e.g. an asymmetric key will return its public key in
a standard format for the type.
Supported methods:
GET: /{mount_point}/keys/{name}. Produces: 200 application/json
:param name: Specifies the name of the encryption key to read. This is specified as part of the URL.
:type name: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the read_key request.
:rtype: requests.Response
"""
api_path = utils.format_url(
'/v1/{mount_point}/keys/{name}',
mount_point=mount_point,
name=name,
)
response = self._adapter.get(
url=api_path,
)
return response.json()
def list_keys(self, mount_point=DEFAULT_MOUNT_POINT):
"""List keys.
Only the key names are returned (not the actual keys themselves).
Supported methods:
LIST: /{mount_point}/keys. Produces: 200 application/json
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
api_path = utils.format_url('/v1/{mount_point}/keys', mount_point=mount_point)
response = self._adapter.list(
url=api_path
)
return response.json()
def delete_key(self, name, mount_point=DEFAULT_MOUNT_POINT):
"""Delete a named encryption key.
It will no longer be possible to decrypt any data encrypted with the named key. Because this is a potentially
catastrophic operation, the deletion_allowed tunable must be set in the key's /config endpoint.
Supported methods:
DELETE: /{mount_point}/keys/{name}. Produces: 204 (empty body)
:param name: Specifies the name of the encryption key to delete. This is specified as part of the URL.
:type name: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
api_path = utils.format_url(
'/v1/{mount_point}/keys/{name}',
mount_point=mount_point,
name=name,
)
return self._adapter.delete(
url=api_path,
)
def update_key_configuration(self, name, min_decryption_version=None, min_encryption_version=None, deletion_allowed=None,
exportable=None, allow_plaintext_backup=None, mount_point=DEFAULT_MOUNT_POINT):
"""Tune configuration values for a given key.
These values are returned during a read operation on the named key.
Supported methods:
POST: /{mount_point}/keys/{name}/config. Produces: 204 (empty body)
:param name: Specifies the name of the encryption key to update configuration for.
:type name: str | unicode
:param min_decryption_version: Specifies the minimum version of ciphertext allowed to be decrypted. Adjusting
this as part of a key rotation policy can prevent old copies of ciphertext from being decrypted, should they
fall into the wrong hands. For signatures, this value controls the minimum version of signature that can be
verified against. For HMACs, this controls the minimum version of a key allowed to be used as the key for
verification.
:type min_decryption_version: int
:param min_encryption_version: Specifies the minimum version of the key that can be used to encrypt plaintext,
sign payloads, or generate HMACs. Must be 0 (which will use the latest version) or a value greater or equal
to min_decryption_version.
:type min_encryption_version: int
:param deletion_allowed: Specifies if the key is allowed to be deleted.
:type deletion_allowed: bool
:param exportable: Enables keys to be exportable. This allows for all the valid keys in the key ring to be
exported. Once set, this cannot be disabled.
:type exportable: bool
:param allow_plaintext_backup: If set, enables taking backup of named key in the plaintext format. Once set,
this cannot be disabled.
:type allow_plaintext_backup: bool
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
if min_encryption_version is not None and min_decryption_version is not None:
if min_encryption_version != 0 and min_encryption_version <= min_decryption_version:
raise exceptions.ParamValidationError('min_encryption_version must be 0 or > min_decryption_version')
params = utils.remove_nones({
'min_decryption_version': min_decryption_version,
'min_encryption_version': min_encryption_version,
'deletion_allowed': deletion_allowed,
'exportable': exportable,
'allow_plaintext_backup': allow_plaintext_backup,
})
api_path = utils.format_url(
'/v1/{mount_point}/keys/{name}/config',
mount_point=mount_point,
name=name,
)
return self._adapter.post(
url=api_path,
json=params,
)
def rotate_key(self, name, mount_point=DEFAULT_MOUNT_POINT):
"""Rotate the version of the named key.
After rotation, new plaintext requests will be encrypted with the new version of the key. To upgrade ciphertext
to be encrypted with the latest version of the key, use the rewrap endpoint. This is only supported with keys
that support encryption and decryption operations.
Supported methods:
POST: /{mount_point}/keys/{name}/rotate. Produces: 204 (empty body)
:param name: Specifies the name of the key to read information about. This is specified as part of the URL.
:type name: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
api_path = utils.format_url(
'/v1/{mount_point}/keys/{name}/rotate',
mount_point=mount_point,
name=name,
)
return self._adapter.post(
url=api_path,
)
def export_key(self, name, key_type, version=None, mount_point=DEFAULT_MOUNT_POINT):
"""Return the named key.
The keys object shows the value of the key for each version. If version is specified, the specific version will
be returned. If latest is provided as the version, the current key will be provided. Depending on the type of
key, different information may be returned. The key must be exportable to support this operation and the version
must still be valid.
Supported methods:
GET: /{mount_point}/export/{key_type}/{name}(/{version}). Produces: 200 application/json
:param name: Specifies the name of the key to read information about. This is specified as part of the URL.
:type name: str | unicode
:param key_type: Specifies the type of the key to export. This is specified as part of the URL. Valid values are:
encryption-key
signing-key
hmac-key
:type key_type: str | unicode
:param version: Specifies the version of the key to read. If omitted, all versions of the key will be returned.
If the version is set to latest, the current key will be returned.
:type version: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
if key_type not in transit_constants.ALLOWED_EXPORT_KEY_TYPES:
error_msg = 'invalid key_type argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=key_type,
allowed_types=', '.join(transit_constants.ALLOWED_EXPORT_KEY_TYPES),
))
api_path = utils.format_url(
'/v1/{mount_point}/export/{key_type}/{name}',
mount_point=mount_point,
key_type=key_type,
name=name,
)
if version is not None:
api_path = self._adapter.urljoin(api_path, version)
response = self._adapter.get(
url=api_path,
)
return response.json()
def encrypt_data(self, name, plaintext, context=None, key_version=None, nonce=None, batch_input=None, type=None,
convergent_encryption=None, mount_point=DEFAULT_MOUNT_POINT):
"""Encrypt the provided plaintext using the named key.
This path supports the create and update policy capabilities as follows: if the user has the create capability
for this endpoint in their policies, and the key does not exist, it will be upserted with default values
(whether the key requires derivation depends on whether the context parameter is empty or not). If the user only
has update capability and the key does not exist, an error will be returned.
Supported methods:
POST: /{mount_point}/encrypt/{name}. Produces: 200 application/json
:param name: Specifies the name of the encryption key to encrypt against. This is specified as part of the URL.
:type name: str | unicode
:param plaintext: Specifies base64 encoded plaintext to be encoded.
:type plaintext: str | unicode
:param context: Specifies the base64 encoded context for key derivation. This is required if key derivation is
enabled for this key.
:type context: str | unicode
:param key_version: Specifies the version of the key to use for encryption. If not set, uses the latest version.
Must be greater than or equal to the key's min_encryption_version, if set.
:type key_version: int
:param nonce: Specifies the base64 encoded nonce value. This must be provided if convergent encryption is
enabled for this key and the key was generated with Vault 0.6.1. Not required for keys created in 0.6.2+.
The value must be exactly 96 bits (12 bytes) long and the user must ensure that for any given context (and
thus, any given encryption key) this nonce value is never reused.
:type nonce: str | unicode
:param batch_input: Specifies a list of items to be encrypted in a single batch. When this parameter is set, if
the parameters 'plaintext', 'context' and 'nonce' are also set, they will be ignored. The format for the
input is: [dict(context="b64_context", plaintext="b64_plaintext"), ...]
:type batch_input: List[dict]
:param type: This parameter is required when encryption key is expected to be created. When performing an
upsert operation, the type of key to create.
:type type: str | unicode
:param convergent_encryption: This parameter will only be used when a key is expected to be created. Whether to
support convergent encryption. This is only supported when using a key with key derivation enabled and will
require all requests to carry both a context and 96-bit (12-byte) nonce. The given nonce will be used in
place of a randomly generated nonce. As a result, when the same context and nonce are supplied, the same
ciphertext is generated. It is very important when using this mode that you ensure that all nonces are
unique for a given context. Failing to do so will severely impact the ciphertext's security.
:type convergent_encryption: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
params = {
'plaintext': plaintext,
}
params.update(
utils.remove_nones({
'context': context,
'key_version': key_version,
'nonce': nonce,
'batch_input': batch_input,
'type': type,
'convergent_encryption': convergent_encryption,
})
)
api_path = utils.format_url(
'/v1/{mount_point}/encrypt/{name}',
mount_point=mount_point,
name=name,
)
response = self._adapter.post(
url=api_path,
json=params,
)
return response.json()
def decrypt_data(self, name, ciphertext, context=None, nonce=None, batch_input=None, mount_point=DEFAULT_MOUNT_POINT):
"""Decrypt the provided ciphertext using the named key.
Supported methods:
POST: /{mount_point}/decrypt/{name}. Produces: 200 application/json
:param name: Specifies the name of the encryption key to decrypt against. This is specified as part of the URL.
:type name: str | unicode
:param ciphertext: the ciphertext to decrypt.
:type ciphertext: str | unicode
:param context: Specifies the base64 encoded context for key derivation. This is required if key derivation is
enabled.
:type context: str | unicode
:param nonce: Specifies a base64 encoded nonce value used during encryption. Must be provided if convergent
encryption is enabled for this key and the key was generated with Vault 0.6.1. Not required for keys created
in 0.6.2+.
:type nonce: str | unicode
:param batch_input: Specifies a list of items to be decrypted in a single batch. When this parameter is set, if
the parameters 'ciphertext', 'context' and 'nonce' are also set, they will be ignored. Format for the input
goes like this: [dict(context="b64_context", ciphertext="b64_plaintext"), ...]
:type batch_input: List[dict]
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
params = {
'ciphertext': ciphertext,
}
params.update(
utils.remove_nones({
'context': context,
'nonce': nonce,
'batch_input': batch_input,
})
)
api_path = utils.format_url(
'/v1/{mount_point}/decrypt/{name}',
mount_point=mount_point,
name=name,
)
response = self._adapter.post(
url=api_path,
json=params,
)
return response.json()
def rewrap_data(self, name, ciphertext, context=None, key_version=None, nonce=None, batch_input=None,
mount_point=DEFAULT_MOUNT_POINT):
"""Rewrap the provided ciphertext using the latest version of the named key.
Because this never returns plaintext, it is possible to delegate this functionality to untrusted users or scripts.
Supported methods:
POST: /{mount_point}/rewrap/{name}. Produces: 200 application/json
:param name: Specifies the name of the encryption key to re-encrypt against. This is specified as part of the URL.
:type name: str | unicode
:param ciphertext: Specifies the ciphertext to re-encrypt.
:type ciphertext: str | unicode
:param context: Specifies the base64 encoded context for key derivation. This is required if key derivation is
enabled.
:type context: str | unicode
:param key_version: Specifies the version of the key to use for the operation. If not set, uses the latest
version. Must be greater than or equal to the key's min_encryption_version, if set.
:type key_version: int
:param nonce: Specifies a base64 encoded nonce value used during encryption. Must be provided if convergent
encryption is enabled for this key and the key was generated with Vault 0.6.1. Not required for keys created
in 0.6.2+.
:type nonce: str | unicode
:param batch_input: Specifies a list of items to be decrypted in a single batch. When this parameter is set, if
the parameters 'ciphertext', 'context' and 'nonce' are also set, they will be ignored. Format for the input
goes like this: [dict(context="b64_context", ciphertext="b64_plaintext"), ...]
:type batch_input: List[dict]
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
params = {
'ciphertext': ciphertext,
}
params.update(
utils.remove_nones({
'context': context,
'key_version': key_version,
'nonce': nonce,
'batch_input': batch_input,
})
)
api_path = utils.format_url(
'/v1/{mount_point}/rewrap/{name}',
mount_point=mount_point,
name=name,
)
response = self._adapter.post(
url=api_path,
json=params,
)
return response.json()
def generate_data_key(self, name, key_type, context=None, nonce=None, bits=None, mount_point=DEFAULT_MOUNT_POINT):
"""Generates a new high-entropy key and the value encrypted with the named key.
Optionally return the plaintext of the key as well. Whether plaintext is returned depends on the path; as a
result, you can use Vault ACL policies to control whether a user is allowed to retrieve the plaintext value of a
key. This is useful if you want an untrusted user or operation to generate keys that are then made available to
trusted users.
Supported methods:
POST: /{mount_point}/datakey/{key_type}/{name}. Produces: 200 application/json
:param name: Specifies the name of the encryption key to use to encrypt the datakey. This is specified as part
of the URL.
:type name: str | unicode
:param key_type: Specifies the type of key to generate. If plaintext, the plaintext key will be returned along
with the ciphertext. If wrapped, only the ciphertext value will be returned. This is specified as part of
the URL.
:type key_type: str | unicode
:param context: Specifies the key derivation context, provided as a base64-encoded string. This must be provided
if derivation is enabled.
:type context: str | unicode
:param nonce: Specifies a nonce value, provided as base64 encoded. Must be provided if convergent encryption is
enabled for this key and the key was generated with Vault 0.6.1. Not required for keys created in 0.6.2+.
The value must be exactly 96 bits (12 bytes) long and the user must ensure that for any given context (and
thus, any given encryption key) this nonce value is never reused.
:type nonce: str | unicode
:param bits: Specifies the number of bits in the desired key. Can be 128, 256, or 512.
:type bits: int
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
if key_type not in transit_constants.ALLOWED_DATA_KEY_TYPES:
error_msg = 'invalid key_type argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=key_type,
allowed_types=', '.join(transit_constants.ALLOWED_DATA_KEY_TYPES),
))
if bits is not None and bits not in transit_constants.ALLOWED_DATA_KEY_BITS:
error_msg = 'invalid bits argument provided "{arg}", supported values: "{allowed_values}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=bits,
allowed_values=', '.join([str(b) for b in transit_constants.ALLOWED_DATA_KEY_BITS]),
))
params = utils.remove_nones({
'context': context,
'nonce': nonce,
'bits': bits,
})
api_path = utils.format_url(
'/v1/{mount_point}/datakey/{key_type}/{name}',
mount_point=mount_point,
key_type=key_type,
name=name,
)
response = self._adapter.post(
url=api_path,
json=params,
)
return response.json()
def generate_random_bytes(self, n_bytes=None, output_format=None, mount_point=DEFAULT_MOUNT_POINT):
"""Return high-quality random bytes of the specified length.
Supported methods:
POST: /{mount_point}/random(/{bytes}). Produces: 200 application/json
:param n_bytes: Specifies the number of bytes to return. This value can be specified either in the request body,
or as a part of the URL.
:type n_bytes: int
:param output_format: Specifies the output encoding. Valid options are hex or base64.
:type output_format: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
params = utils.remove_nones({
'bytes': n_bytes,
'format': output_format,
})
api_path = utils.format_url('/v1/{mount_point}/random', mount_point=mount_point)
response = self._adapter.post(
url=api_path,
json=params,
)
return response.json()
def hash_data(self, hash_input, algorithm=None, output_format=None, mount_point=DEFAULT_MOUNT_POINT):
"""Return the cryptographic hash of given data using the specified algorithm.
Supported methods:
POST: /{mount_point}/hash(/{algorithm}). Produces: 200 application/json
:param hash_input: Specifies the base64 encoded input data.
:type hash_input: str | unicode
:param algorithm: Specifies the hash algorithm to use. This can also be specified as part of the URL.
Currently-supported algorithms are: sha2-224, sha2-256, sha2-384, sha2-512
:type algorithm: str | unicode
:param output_format: Specifies the output encoding. This can be either hex or base64.
:type output_format: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
if algorithm is not None and algorithm not in transit_constants.ALLOWED_HASH_DATA_ALGORITHMS:
error_msg = 'invalid algorithm argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=algorithm,
allowed_types=', '.join(transit_constants.ALLOWED_HASH_DATA_ALGORITHMS),
))
if output_format is not None and output_format not in transit_constants.ALLOWED_HASH_DATA_FORMATS:
error_msg = 'invalid output_format argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=output_format,
allowed_types=', '.join(transit_constants.ALLOWED_HASH_DATA_FORMATS),
))
params = {
'input': hash_input,
}
params.update(
utils.remove_nones({
'algorithm': algorithm,
'format': output_format,
})
)
api_path = utils.format_url('/v1/{mount_point}/hash', mount_point=mount_point)
response = self._adapter.post(
url=api_path,
json=params,
)
return response.json()
def generate_hmac(self, name, hash_input, key_version=None, algorithm=None, mount_point=DEFAULT_MOUNT_POINT):
"""Return the digest of given data using the specified hash algorithm and the named key.
The key can be of any type supported by transit; the raw key will be marshaled into bytes to be used for the
HMAC function. If the key is of a type that supports rotation, the latest (current) version will be used.
Supported methods:
POST: /{mount_point}/hmac/{name}(/{algorithm}). Produces: 200 application/json
:param name: Specifies the name of the encryption key to generate hmac against. This is specified as part of the
URL.
:type name: str | unicode
:param hash_input: Specifies the base64 encoded input data.
:type input: str | unicode
:param key_version: Specifies the version of the key to use for the operation. If not set, uses the latest
version. Must be greater than or equal to the key's min_encryption_version, if set.
:type key_version: int
:param algorithm: Specifies the hash algorithm to use. This can also be specified as part of the URL.
Currently-supported algorithms are: sha2-224, sha2-256, sha2-384, sha2-512
:type algorithm: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
if algorithm is not None and algorithm not in transit_constants.ALLOWED_HASH_DATA_ALGORITHMS:
error_msg = 'invalid algorithm argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=algorithm,
allowed_types=', '.join(transit_constants.ALLOWED_HASH_DATA_ALGORITHMS),
))
params = {
'input': hash_input,
}
params.update(
utils.remove_nones({
'key_version': key_version,
'algorithm': algorithm,
})
)
api_path = utils.format_url(
'/v1/{mount_point}/hmac/{name}',
mount_point=mount_point,
name=name,
)
resposne = self._adapter.post(
url=api_path,
json=params,
)
return resposne.json()
def sign_data(self, name, hash_input, key_version=None, hash_algorithm=None, context=None, prehashed=None,
signature_algorithm=None, marshaling_algorithm=None, mount_point=DEFAULT_MOUNT_POINT):
"""Return the cryptographic signature of the given data using the named key and the specified hash algorithm.
The key must be of a type that supports signing.
Supported methods:
POST: /{mount_point}/sign/{name}(/{hash_algorithm}). Produces: 200 application/json
:param name: Specifies the name of the encryption key to use for signing. This is specified as part of the URL.
:type name: str | unicode
:param hash_input: Specifies the base64 encoded input data.
:type hash_input: str | unicode
:param key_version: Specifies the version of the key to use for signing. If not set, uses the latest version.
Must be greater than or equal to the key's min_encryption_version, if set.
:type key_version: int
:param hash_algorithm: Specifies the hash algorithm to use for supporting key types (notably, not including
ed25519 which specifies its own hash algorithm). This can also be specified as part of the URL.
Currently-supported algorithms are: sha2-224, sha2-256, sha2-384, sha2-512
:type hash_algorithm: str | unicode
:param context: Base64 encoded context for key derivation. Required if key derivation is enabled; currently only
available with ed25519 keys.
:type context: str | unicode
:param prehashed: Set to true when the input is already hashed. If the key type is rsa-2048 or rsa-4096, then
the algorithm used to hash the input should be indicated by the hash_algorithm parameter. Just as the value
to sign should be the base64-encoded representation of the exact binary data you want signed, when set, input
is expected to be base64-encoded binary hashed data, not hex-formatted. (As an example, on the command line,
you could generate a suitable input via openssl dgst -sha256 -binary | base64.)
:type prehashed: bool
:param signature_algorithm: When using a RSA key, specifies the RSA signature algorithm to use for signing.
Supported signature types are: pss, pkcs1v15
:type signature_algorithm: str | unicode
:param marshaling_algorithm: Specifies the way in which the signature should be marshaled. This currently only applies to ECDSA keys.
Supported types are: asn1, jws
:type marshaling_algorithm: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
if hash_algorithm is not None and hash_algorithm not in transit_constants.ALLOWED_HASH_DATA_ALGORITHMS:
error_msg = 'invalid hash_algorithm argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=hash_algorithm,
allowed_types=', '.join(transit_constants.ALLOWED_HASH_DATA_ALGORITHMS),
))
if signature_algorithm is not None and signature_algorithm not in transit_constants.ALLOWED_SIGNATURE_ALGORITHMS:
error_msg = 'invalid signature_algorithm argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=signature_algorithm,
allowed_types=', '.join(transit_constants.ALLOWED_SIGNATURE_ALGORITHMS),
))
if marshaling_algorithm is not None and marshaling_algorithm not in transit_constants.ALLOWED_MARSHALING_ALGORITHMS:
error_msg = 'invalid marshaling_algorithm argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=marshaling_algorithm,
allowed_types=', '.join(transit_constants.ALLOWED_MARSHALING_ALGORITHMS),
))
params = {
'input': hash_input,
}
params.update(
utils.remove_nones({
'key_version': key_version,
'hash_algorithm': hash_algorithm,
'context': context,
'prehashed': prehashed,
'signature_algorithm': signature_algorithm,
})
)
api_path = utils.format_url(
'/v1/{mount_point}/sign/{name}',
mount_point=mount_point,
name=name,
)
response = self._adapter.post(
url=api_path,
json=params,
)
return response.json()
def verify_signed_data(self, name, hash_input, signature=None, hmac=None, hash_algorithm=None, context=None,
prehashed=None, signature_algorithm=None, marshaling_algorithm=None, mount_point=DEFAULT_MOUNT_POINT):
"""Return whether the provided signature is valid for the given data.
Supported methods:
POST: /{mount_point}/verify/{name}(/{hash_algorithm}). Produces: 200 application/json
:param name: Specifies the name of the encryption key that was used to generate the signature or HMAC.
:type name: str | unicode
:param hash_input: Specifies the base64 encoded input data.
:type input: str | unicode
:param signature: Specifies the signature output from the /transit/sign function. Either this must be supplied
or hmac must be supplied.
:type signature: str | unicode
:param hmac: Specifies the signature output from the /transit/hmac function. Either this must be supplied or
signature must be supplied.
:type hmac: str | unicode
:param hash_algorithm: Specifies the hash algorithm to use. This can also be specified as part of the URL.
Currently-supported algorithms are: sha2-224, sha2-256, sha2-384, sha2-512
:type hash_algorithm: str | unicode
:param context: Base64 encoded context for key derivation. Required if key derivation is enabled; currently only
available with ed25519 keys.
:type context: str | unicode
:param prehashed: Set to true when the input is already hashed. If the key type is rsa-2048 or rsa-4096, then
the algorithm used to hash the input should be indicated by the hash_algorithm parameter.
:type prehashed: bool
:param signature_algorithm: When using a RSA key, specifies the RSA signature algorithm to use for signature
verification. Supported signature types are: pss, pkcs1v15
:type signature_algorithm: str | unicode
:param marshaling_algorithm: Specifies the way in which the signature should be marshaled. This currently only applies to ECDSA keys.
Supported types are: asn1, jws
:type marshaling_algorithm: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
if (signature is None and hmac is None) or (signature is not None and hmac is not None):
error_msg = 'either "signature" or "hmac" argument (but not both) must be provided to verify signature'
raise exceptions.ParamValidationError(error_msg)
if hash_algorithm is not None and hash_algorithm not in transit_constants.ALLOWED_HASH_DATA_ALGORITHMS:
error_msg = 'invalid hash_algorithm argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=hash_algorithm,
allowed_types=', '.join(transit_constants.ALLOWED_HASH_DATA_ALGORITHMS),
))
if signature_algorithm is not None and signature_algorithm not in transit_constants.ALLOWED_SIGNATURE_ALGORITHMS:
error_msg = 'invalid signature_algorithm argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=signature_algorithm,
allowed_types=', '.join(transit_constants.ALLOWED_SIGNATURE_ALGORITHMS),
))
if marshaling_algorithm is not None and marshaling_algorithm not in transit_constants.ALLOWED_MARSHALING_ALGORITHMS:
error_msg = 'invalid marshaling_algorithm argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=marshaling_algorithm,
allowed_types=', '.join(transit_constants.ALLOWED_MARSHALING_ALGORITHMS),
))
params = {
'name': name,
'input': hash_input,
}
params.update(
utils.remove_nones({
'hash_algorithm': hash_algorithm,
'signature': signature,
'hmac': hmac,
'context': context,
'prehashed': prehashed,
'signature_algorithm': signature_algorithm,
})
)
api_path = utils.format_url('/v1/{mount_point}/verify/{name}', mount_point=mount_point, name=name)
response = self._adapter.post(
url=api_path,
json=params,
)
return response.json()
def backup_key(self, name, mount_point=DEFAULT_MOUNT_POINT):
"""Return a plaintext backup of a named key.
The backup contains all the configuration data and keys of all the versions along with the HMAC key. The
response from this endpoint can be used with the /restore endpoint to restore the key.
Supported methods:
GET: /{mount_point}/backup/{name}. Produces: 200 application/json
:param name: Name of the key.
:type name: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
api_path = utils.format_url(
'/v1/{mount_point}/backup/{name}',
mount_point=mount_point,
name=name,
)
response = self._adapter.get(
url=api_path,
)
return response.json()
def restore_key(self, backup, name=None, force=None, mount_point=DEFAULT_MOUNT_POINT):
"""Restore the backup as a named key.
This will restore the key configurations and all the versions of the named key along with HMAC keys. The input
to this endpoint should be the output of /backup endpoint. For safety, by default the backend will refuse to
restore to an existing key. If you want to reuse a key name, it is recommended you delete the key before
restoring. It is a good idea to attempt restoring to a different key name first to verify that the operation
successfully completes.
Supported methods:
POST: /{mount_point}/restore(/name). Produces: 204 (empty body)
:param backup: Backed up key data to be restored. This should be the output from the /backup endpoint.
:type backup: str | unicode
:param name: If set, this will be the name of the restored key.
:type name: str | unicode
:param force: If set, force the restore to proceed even if a key by this name already exists.
:type force: bool
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
params = {
'backup': backup,
}
params.update(
utils.remove_nones({
'force': force,
})
)
api_path = utils.format_url('/v1/{mount_point}/restore', mount_point=mount_point)
if name is not None:
api_path = self._adapter.urljoin(api_path, name)
return self._adapter.post(
url=api_path,
json=params,
)
def trim_key(self, name, min_version, mount_point=DEFAULT_MOUNT_POINT):
"""Trims older key versions setting a minimum version for the keyring.
Once trimmed, previous versions of the key cannot be recovered.
Supported methods:
POST: /{mount_point}/keys/{name}/trim. Produces: 200 application/json
:param name: Specifies the name of the key to be trimmed.
:type name: str | unicode
:param min_version: The minimum version for the key ring. All versions before this version will be permanently
deleted. This value can at most be equal to the lesser of min_decryption_version and min_encryption_version.
This is not allowed to be set when either min_encryption_version or min_decryption_version is set to zero.
:type min_version: int
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
params = {
'min_available_version': min_version,
}
api_path = utils.format_url(
'/v1/{mount_point}/keys/{name}/trim',
mount_point=mount_point,
name=name,
)
return self._adapter.post(
url=api_path,
json=params,
)
| 50.662542
| 141
| 0.648349
|
29b44c6fda350fc8267f6017e744b23501eec88c
| 278
|
py
|
Python
|
bindings/python-boost/test-service-server.py
|
DanialLiu/bluezero
|
3747a57b965e63243b1777d8d993dbc032ebbadd
|
[
"Apache-2.0"
] | 13
|
2020-02-13T16:20:04.000Z
|
2022-01-20T10:00:40.000Z
|
bindings/python-boost/test-service-server.py
|
DanialLiu/bluezero
|
3747a57b965e63243b1777d8d993dbc032ebbadd
|
[
"Apache-2.0"
] | 3
|
2020-01-26T17:48:17.000Z
|
2021-09-28T06:54:44.000Z
|
bindings/python-boost/test-service-server.py
|
DanialLiu/bluezero
|
3747a57b965e63243b1777d8d993dbc032ebbadd
|
[
"Apache-2.0"
] | 2
|
2020-10-20T01:32:11.000Z
|
2021-06-04T10:31:26.000Z
|
import pyb0 as b0
def callback(req):
print('Received request "%s"' % req)
rep = 'hi'
print('Sending reply "%s"...' % rep)
return rep
node = b0.Node('python-service-server')
srv = b0.ServiceServer(node, 'control', callback)
node.init()
node.spin()
node.cleanup()
| 23.166667
| 49
| 0.647482
|
43a5b92d5054b3fd65b7361b36d77932292e665d
| 2,431
|
py
|
Python
|
examples/customization/tst-introspect.py
|
eLBati/pyxb
|
14737c23a125fd12c954823ad64fc4497816fae3
|
[
"Apache-2.0"
] | 123
|
2015-01-12T06:43:22.000Z
|
2022-03-20T18:06:46.000Z
|
examples/customization/tst-introspect.py
|
eLBati/pyxb
|
14737c23a125fd12c954823ad64fc4497816fae3
|
[
"Apache-2.0"
] | 103
|
2015-01-08T18:35:57.000Z
|
2022-01-18T01:44:14.000Z
|
examples/customization/tst-introspect.py
|
eLBati/pyxb
|
14737c23a125fd12c954823ad64fc4497816fae3
|
[
"Apache-2.0"
] | 54
|
2015-02-15T17:12:00.000Z
|
2022-03-07T23:02:32.000Z
|
import pyxb
import introspect as custom
import raw.custom as raw_custom
import unittest
class TestComplex (unittest.TestCase):
def setUp (self):
xmls = open('test.xml').read()
self.instance = custom.CreateFromDocument(xmls)
def testRawSubclassHierarchy (self):
self.assertTrue(issubclass(raw_custom.tc01, raw_custom.ta0))
self.assertTrue(issubclass(raw_custom.tc02, raw_custom.ta0))
self.assertTrue(issubclass(raw_custom.tc03, raw_custom.ta0))
self.assertTrue(issubclass(raw_custom.ta04, raw_custom.ta0))
self.assertTrue(issubclass(raw_custom.tc041, raw_custom.ta04))
self.assertTrue(issubclass(raw_custom.tc042, raw_custom.ta04))
def testCustomSubclassesRaw (self):
self.assertTrue(issubclass(custom.tc01, raw_custom.tc01))
self.assertTrue(issubclass(custom.tc02, raw_custom.tc02))
self.assertTrue(issubclass(custom.tc03, raw_custom.tc03))
self.assertTrue(issubclass(custom.ta04, raw_custom.ta04))
self.assertTrue(issubclass(custom.tc041, raw_custom.tc041))
self.assertTrue(issubclass(custom.tc042, raw_custom.tc042))
def testCustomConcreteHierarchy (self):
self.assertTrue(issubclass(custom.tc041, custom.ta04))
self.assertTrue(issubclass(custom.tc042, custom.ta04))
def testSupersedureReplacement (self):
self.assertEqual(custom.ta0, raw_custom.ta0)
self.assertEqual(custom.tc01, raw_custom.tc01)
self.assertEqual(custom.tc02, raw_custom.tc02)
self.assertEqual(custom.tc03, raw_custom.tc03)
self.assertNotEqual(custom.ta04, raw_custom.ta04)
self.assertNotEqual(custom.tc041, raw_custom.tc041)
self.assertNotEqual(custom.tc042, raw_custom.tc042)
def test_c041 (self):
ec041 = self.instance.ec041
self.assertTrue(isinstance(ec041, custom.tc041))
self.assertEqual(ec041.ea0, 'ec041')
self.assertEqual(ec041.ea04_s, 'a04')
self.assertEqual(ec041.ec041_s, 'c041')
self.assertEqual(ec041.xa04(), 'extend ta04')
def test_c042 (self):
ec042 = self.instance.ec042
self.assertTrue(isinstance(ec042, custom.tc042))
self.assertEqual(ec042.ea0, 'ec042')
self.assertEqual(ec042.ea04_s, 'a04')
self.assertEqual(ec042.ec042_i, 42)
self.assertEqual(ec042.xa04(), 'extend ta04')
if __name__ == '__main__':
unittest.main()
| 39.209677
| 70
| 0.703414
|
7704d290d140019f8ef06f81bfed7ea79311cf6f
| 6,586
|
py
|
Python
|
invoices/migrations/0001_initial.py
|
callmewind/billdev
|
fcd53cb98284677fb619abeafb17a88035aabfd6
|
[
"MIT"
] | null | null | null |
invoices/migrations/0001_initial.py
|
callmewind/billdev
|
fcd53cb98284677fb619abeafb17a88035aabfd6
|
[
"MIT"
] | null | null | null |
invoices/migrations/0001_initial.py
|
callmewind/billdev
|
fcd53cb98284677fb619abeafb17a88035aabfd6
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.7 on 2018-07-05 10:20
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_countries.fields
import localflavor.generic.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='BillingProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=300, verbose_name='name')),
('billing_id', models.CharField(blank=True, max_length=50, verbose_name='billing id')),
('email', models.EmailField(max_length=254, verbose_name='email')),
('address', models.CharField(max_length=500, verbose_name='address')),
('zip_code', models.CharField(max_length=50, verbose_name='zip code')),
('city', models.CharField(max_length=100, verbose_name='city')),
('country', django_countries.fields.CountryField(max_length=2)),
('vat_id', models.CharField(blank=True, max_length=50, verbose_name='vat id')),
('bic', localflavor.generic.models.BICField(blank=True, max_length=11)),
('iban', localflavor.generic.models.IBANField(blank=True, include_countries=None, max_length=34, use_nordea_extensions=False)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Client',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=300, verbose_name='name')),
('billing_id', models.CharField(blank=True, max_length=50, verbose_name='billing id')),
('email', models.EmailField(max_length=254, verbose_name='email')),
('address', models.CharField(max_length=500, verbose_name='address')),
('zip_code', models.CharField(max_length=50, verbose_name='zip code')),
('city', models.CharField(max_length=100, verbose_name='city')),
('country', django_countries.fields.CountryField(max_length=2)),
('vat_id', models.CharField(blank=True, max_length=50, verbose_name='vat id')),
('internal_notes', models.TextField(blank=True, verbose_name='description')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='clients', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='IssuedInvoice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=300, verbose_name='name')),
('billing_id', models.CharField(blank=True, max_length=50, verbose_name='billing id')),
('email', models.EmailField(max_length=254, verbose_name='email')),
('address', models.CharField(max_length=500, verbose_name='address')),
('zip_code', models.CharField(max_length=50, verbose_name='zip code')),
('city', models.CharField(max_length=100, verbose_name='city')),
('country', django_countries.fields.CountryField(max_length=2)),
('vat_id', models.CharField(blank=True, max_length=50, verbose_name='vat id')),
('date', models.DateField(default=datetime.date.today, verbose_name='date')),
('series', models.CharField(max_length=20, verbose_name='series')),
('number', models.PositiveIntegerField(editable=False, verbose_name='number')),
('irpf_rate', models.DecimalField(decimal_places=2, default=0, max_digits=5, verbose_name='irpf rate')),
('net', models.DecimalField(decimal_places=2, default=0, editable=False, max_digits=10, verbose_name='net')),
('vat', models.DecimalField(decimal_places=2, default=0, editable=False, max_digits=10, verbose_name='vat')),
('irpf', models.DecimalField(decimal_places=2, default=0, editable=False, max_digits=10, verbose_name='irpf')),
('amount', models.DecimalField(decimal_places=2, default=0, editable=False, max_digits=10, verbose_name='amount')),
('client', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='invoices', to='invoices.Client')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='IssuedInvoiceLine',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('concept', models.CharField(max_length=300, verbose_name='concept')),
('quantity', models.DecimalField(decimal_places=2, default=1, max_digits=10, verbose_name='quantity')),
('unit_price', models.DecimalField(blank=True, decimal_places=2, help_text='Leave it blank for auto-calculate', max_digits=10, verbose_name='unit_price')),
('net', models.DecimalField(blank=True, decimal_places=2, help_text='Leave it blank for auto-calculate', max_digits=10, verbose_name='net')),
('vat_rate', models.DecimalField(decimal_places=2, max_digits=5, verbose_name='vat rate')),
('vat', models.DecimalField(blank=True, decimal_places=2, help_text='Leave it blank for auto-calculate', max_digits=10, verbose_name='vat')),
('amount', models.DecimalField(blank=True, decimal_places=2, help_text='Leave it blank for auto-calculate', max_digits=10, verbose_name='amount')),
('invoice', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines', to='invoices.IssuedInvoice')),
],
),
migrations.AlterUniqueTogether(
name='issuedinvoice',
unique_together={('series', 'number')},
),
]
| 64.568627
| 171
| 0.631187
|
144cc17f6632fc77ee57c48740622d3a53f403f2
| 1,885
|
py
|
Python
|
setup.py
|
snavalm/torchio
|
3e07b78da16d6db4da7193325b3f9cb31fc0911a
|
[
"Apache-2.0"
] | 1
|
2021-07-08T09:24:00.000Z
|
2021-07-08T09:24:00.000Z
|
setup.py
|
snavalm/torchio
|
3e07b78da16d6db4da7193325b3f9cb31fc0911a
|
[
"Apache-2.0"
] | 5
|
2020-02-07T15:29:56.000Z
|
2021-01-06T14:46:32.000Z
|
setup.py
|
snavalm/torchio
|
3e07b78da16d6db4da7193325b3f9cb31fc0911a
|
[
"Apache-2.0"
] | 4
|
2020-01-13T16:03:40.000Z
|
2022-01-04T10:22:00.000Z
|
#!/usr/bin/env python
"""The setup script."""
from setuptools import setup, find_packages
with open('README.md', encoding='utf8') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst', encoding='utf8') as history_file:
history = history_file.read()
requirements = [
'Deprecated',
'SimpleITK!=2.0.*', # https://github.com/SimpleITK/SimpleITK/issues/1239
'click',
'humanize',
'nibabel',
'numpy>=1.15',
'scipy',
'torch>=1.1',
'tqdm',
]
setup(
author='Fernando Perez-Garcia',
author_email='fepegar@gmail.com',
python_requires='>=3.6',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
description=(
'Tools for loading, augmenting and writing 3D medical images'
' on PyTorch.'
),
entry_points={
'console_scripts': [
'torchio-transform=torchio.cli.apply_transform:main',
'tiotr=torchio.cli.apply_transform:main',
'tiohd=torchio.cli.print_info:main',
],
},
extras_require={
'plot': ['matplotlib'],
},
install_requires=requirements,
license='Apache license',
long_description=readme + '\n\n' + history,
long_description_content_type='text/markdown',
include_package_data=True,
keywords='torchio',
name='torchio',
packages=find_packages(include=['torchio', 'torchio.*']),
setup_requires=[],
test_suite='tests',
tests_require=[],
url='https://github.com/fepegar/torchio',
version='0.18.71',
zip_safe=False,
)
| 27.318841
| 77
| 0.614324
|
e5c47ee2b6b1f62c0c9e84b13d9f012167bb988d
| 1,061
|
py
|
Python
|
tfx/experimental/templates/taxi/preprocessing_test.py
|
BioGeek/tfx
|
3d30ae8a1e2f33367c592ca86562cf555193cfb6
|
[
"Apache-2.0"
] | null | null | null |
tfx/experimental/templates/taxi/preprocessing_test.py
|
BioGeek/tfx
|
3d30ae8a1e2f33367c592ca86562cf555193cfb6
|
[
"Apache-2.0"
] | null | null | null |
tfx/experimental/templates/taxi/preprocessing_test.py
|
BioGeek/tfx
|
3d30ae8a1e2f33367c592ca86562cf555193cfb6
|
[
"Apache-2.0"
] | null | null | null |
# Lint as: python2, python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tfx.experimental.templates.taxi import preprocessing
class PreprocessingTest(tf.test.TestCase):
def testPreprocessingFn(self):
self.assertTrue(callable(preprocessing.preprocessing_fn))
if __name__ == '__main__':
tf.test.main()
| 32.151515
| 79
| 0.781338
|
2708cedcc8b3deeb13a14cbf4c3f108f5523769f
| 876
|
py
|
Python
|
dsb3_networks/nodule_segmentation/LIDC-annotations_2_nodule-seg_annotations/LIDC_preprocessing/1_get_xml_paths/write_xml_path_lst.py
|
NDKoehler/DataScienceBowl2017_7th_place
|
638542c3cde5af45bf34d0391695ab0e54ce78b8
|
[
"MIT"
] | 8
|
2017-05-19T10:30:20.000Z
|
2022-03-12T05:17:19.000Z
|
dsb3a_assets/LIDC-annotations_2_nodule-seg_annotations/LIDC_preprocessing/1_get_xml_paths/write_xml_path_lst.py
|
NDKoehler/DataScienceBowl2017_7th_place
|
638542c3cde5af45bf34d0391695ab0e54ce78b8
|
[
"MIT"
] | 5
|
2017-07-03T10:55:29.000Z
|
2018-09-10T18:05:14.000Z
|
dsb3a_assets/LIDC-annotations_2_nodule-seg_annotations/LIDC_preprocessing/1_get_xml_paths/write_xml_path_lst.py
|
NDKoehler/DataScienceBowl2017_7th_place
|
638542c3cde5af45bf34d0391695ab0e54ce78b8
|
[
"MIT"
] | 6
|
2017-05-12T00:58:05.000Z
|
2019-01-22T05:08:09.000Z
|
import os,sys
import numpy as np
doi_root_path = "/media/philipp/qnap/LIDC/download/DOI/"
lidc_idri_folders = []
study_instances = []
series_instances = []
xml_path_lst = []
for doi_folder in os.listdir(doi_root_path):
lidc_idri_folder = doi_root_path+doi_folder+"/"
lidc_idri_folders += [lidc_idri_folder]
for lidc_folder in os.listdir(lidc_idri_folder):
study_instance = lidc_idri_folder+lidc_folder+"/"
study_instances += [study_instance]
for study_folder in os.listdir(study_instance):
series_instance = study_instance+study_folder+"/"
for file in os.listdir(series_instance): # check if folder contains valid xml file
if(file.endswith(".xml")):
series_instances += [series_instance]
xml_path_lst += [series_instance+file]
break
print len(xml_path_lst)
np.savetxt("xml_path_lst.csv", xml_path_lst,fmt="%s")
| 31.285714
| 88
| 0.731735
|
3171200d6176145b9bd402e9637a3791e7ac4f5f
| 9,802
|
bzl
|
Python
|
kythe/go/indexer/testdata/go_indexer_test.bzl
|
rob-opsi/kythe
|
b3377d4b28566eb36c075187d57046e6aed2ff11
|
[
"Apache-2.0"
] | 1
|
2019-01-20T19:57:57.000Z
|
2019-01-20T19:57:57.000Z
|
kythe/go/indexer/testdata/go_indexer_test.bzl
|
DalavanCloud/kythe
|
b3377d4b28566eb36c075187d57046e6aed2ff11
|
[
"Apache-2.0"
] | 1
|
2022-02-27T23:39:06.000Z
|
2022-02-27T23:39:06.000Z
|
kythe/go/indexer/testdata/go_indexer_test.bzl
|
DalavanCloud/kythe
|
b3377d4b28566eb36c075187d57046e6aed2ff11
|
[
"Apache-2.0"
] | 2
|
2019-01-20T19:57:48.000Z
|
2021-05-11T00:57:45.000Z
|
#
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Bazel rules to extract Go compilations from library targets for testing the
# Go cross-reference indexer.
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
"GoSource",
)
load(
"//tools/build_rules/verifier_test:verifier_test.bzl",
"kythe_integration_test",
"verifier_test",
"KytheEntries",
)
# Emit a shell script that sets up the environment needed by the extractor to
# capture dependencies and runs the extractor.
def _emit_extractor_script(ctx, mode, script, output, srcs, deps, ipath, data):
tmpdir = output.dirname + '/tmp'
srcdir = tmpdir + '/src/' + ipath
pkgdir = tmpdir + '/pkg/%s_%s' % (mode.goos, mode.goarch)
outdir = output.path + '_dir'
extras = []
cmds = ['set -e', 'mkdir -p ' + pkgdir, 'mkdir -p ' + srcdir]
# Link the source files and dependencies into a common temporary directory.
# Source files need to be made relative to the temp directory.
ups = srcdir.count('/') + 1
cmds += ['ln -s "%s%s" "%s"' % ('../'*ups, src.path, srcdir)
for src in srcs]
for path, dpath in deps.items():
fullpath = '/'.join([pkgdir, dpath])
tups = fullpath.count('/')
cmds += [
'mkdir -p ' + fullpath.rsplit('/', 1)[0],
"ln -s '%s%s' '%s.a'" % ('../'*tups, path, fullpath),
]
# Gather any extra data dependencies.
for target in data:
for f in target.files:
cmds.append('ln -s "%s%s" "%s"' % ('../'*ups, f.path, srcdir))
extras.append(srcdir + '/' + f.path.rsplit('/', 1)[-1])
# Invoke the extractor on the temp directory.
goroot = '/'.join(ctx.files._sdk_files[0].path.split('/')[:-2])
cmds.append(' '.join([
ctx.files._extractor[-1].path,
'-kindex',
'-output', outdir,
'-goroot', goroot,
'-gopath', tmpdir,
'-extra_files', "'%s'" % ','.join(extras),
ipath,
]))
# Pack the results into a ZIP archive, so we have a single output.
cmds += [
'cd ' + output.dirname,
"mv '%s'/*.kindex '%s'" % (output.basename+'_dir', output.basename),
]
f = ctx.new_file(ctx.configuration.bin_dir, script)
ctx.file_action(output=f, content='\n'.join(cmds), executable=True)
return f
def _go_extract(ctx):
gosrc = ctx.attr.library[GoSource]
mode = gosrc.mode
srcs = gosrc.srcs
deps = {} # TODO(schroederc): support dependencies
ipath = gosrc.library.importpath
data = ctx.attr.data
output = ctx.outputs.kindex
script = _emit_extractor_script(ctx, mode, ctx.label.name+'-extract.sh',
output, srcs, deps, ipath, data)
extras = []
for target in data:
extras += target.files.to_list()
tools = ctx.files._extractor + ctx.files._sdk_files
ctx.action(
mnemonic = 'GoExtract',
executable = script,
outputs = [output],
inputs = srcs + extras + [] + tools,
)
return struct(kindex = output)
# Generate a kindex with the compilations captured from a single Go
# library or binary rule.
go_extract = rule(
_go_extract,
attrs = {
"library": attr.label(
providers = [GoSource],
mandatory = True,
),
# Additional data files to include in each compilation.
"data": attr.label_list(
allow_empty = True,
allow_files = True,
),
"_extractor": attr.label(
default = Label("//kythe/go/extractors/cmd/gotool"),
executable = True,
cfg = "host",
),
"_sdk_files": attr.label(
allow_files = True,
default = "@go_sdk//:files",
),
},
outputs = {"kindex": "%{name}.kindex"},
toolchains = ["@io_bazel_rules_go//go:toolchain"],
)
def _go_entries(ctx):
kindex = ctx.attr.kindex.kindex
indexer = ctx.files._indexer[-1]
iargs = [indexer.path]
output = ctx.outputs.entries
# If the test wants marked source, enable support for it in the indexer.
if ctx.attr.has_marked_source:
iargs.append('-code')
# If the test wants linkage metadata, enable support for it in the indexer.
if ctx.attr.metadata_suffix:
iargs += ['-meta', ctx.attr.metadata_suffix]
iargs += [kindex.path, '| gzip >'+output.path]
cmds = ['set -e', 'set -o pipefail', ' '.join(iargs), '']
ctx.action(
mnemonic = 'GoIndexer',
command = '\n'.join(cmds),
outputs = [output],
inputs = [kindex] + ctx.files._indexer,
)
return [KytheEntries(files=depset(), compressed=depset([output]))]
# Run the Kythe indexer on the output that results from a go_extract rule.
go_entries = rule(
_go_entries,
attrs = {
# The go_indexpack output to pass to the indexer.
"kindex": attr.label(
providers = ["kindex"],
mandatory = True,
),
# Whether to enable explosion of MarkedSource facts.
"has_marked_source": attr.bool(default = False),
# The suffix used to recognize linkage metadata files, if non-empty.
"metadata_suffix": attr.string(default = ""),
# The location of the Go indexer binary.
"_indexer": attr.label(
default = Label("//kythe/go/indexer/cmd/go_indexer"),
executable = True,
cfg = "data",
),
},
outputs = {"entries": "%{name}.entries.gz"},
)
def _go_verifier_test(ctx):
entries = ctx.attr.entries.kythe_entries
verifier = ctx.file._verifier
vargs = [verifier.short_path,
'--use_file_nodes', '--show_goals', '--check_for_singletons']
if ctx.attr.log_entries:
vargs.append('--show_protos')
if ctx.attr.allow_duplicates:
vargs.append('--ignore_dups')
# If the test wants marked source, enable support for it in the verifier.
if ctx.attr.has_marked_source:
vargs.append('--convert_marked_source')
cmds = ['set -e', 'set -o pipefail', ' '.join(
['zcat', entries.short_path, '|'] + vargs),
'']
ctx.file_action(output=ctx.outputs.executable,
content='\n'.join(cmds), executable=True)
return struct(
runfiles = ctx.runfiles([verifier, entries]),
)
def go_verifier_test(name, entries, size="small", tags=[],
log_entries=False, has_marked_source=False,
allow_duplicates=False):
opts = ['--use_file_nodes', '--show_goals', '--check_for_singletons']
if log_entries:
opts.append('--show_protos')
if allow_duplicates:
opts.append('--ignore_dups')
# If the test wants marked source, enable support for it in the verifier.
if has_marked_source:
opts.append('--convert_marked_source')
return verifier_test(
name = name,
size = size,
tags = tags,
deps = [entries],
opts=opts
)
# Shared extract/index logic for the go_indexer_test/go_integration_test rules.
def _go_indexer(name, srcs, deps=[], importpath=None,
data=None,
has_marked_source=False,
allow_duplicates=False,
metadata_suffix=''):
if len(deps) > 0:
# TODO(schroederc): support dependencies
fail("ERROR: go_indexer_test.deps not supported")
if importpath == None:
importpath = native.package_name() + '/' + name
lib = name + '_lib'
go_library(
name = lib,
srcs = srcs,
deps = deps,
importpath = importpath,
)
kindex = name + '_kindex'
go_extract(
name = kindex,
data = data,
library = lib,
)
entries = name+'_entries'
go_entries(
name = entries,
kindex = ':'+kindex,
has_marked_source = has_marked_source,
metadata_suffix = metadata_suffix,
)
return entries
# A convenience macro to generate a test library, pass it to the Go indexer,
# and feed the output of indexing to the Kythe schema verifier.
def go_indexer_test(name, srcs, deps=[], import_path=None,
size=None, tags=None,
log_entries=False, data=None,
has_marked_source=False,
allow_duplicates=False,
metadata_suffix=''):
entries = _go_indexer(
name = name,
srcs = srcs,
deps = deps,
data = data,
importpath = import_path,
has_marked_source = has_marked_source,
metadata_suffix = metadata_suffix,
)
go_verifier_test(
name = name,
size = size,
tags = tags,
entries = ':'+entries,
log_entries = log_entries,
has_marked_source = has_marked_source,
allow_duplicates = allow_duplicates,
)
# A convenience macro to generate a test library, pass it to the Go indexer,
# and feed the output of indexing to the Kythe integration test pipeline.
def go_integration_test(name, srcs, deps=[], data=None,
file_tickets=[],
import_path=None, size='small',
has_marked_source=False,
metadata_suffix=''):
entries = _go_indexer(
name = name,
srcs = srcs,
deps = deps,
data = data,
import_path = import_path,
has_marked_source = has_marked_source,
metadata_suffix = metadata_suffix,
)
kythe_integration_test(
name = name,
size = size,
srcs = [':'+entries],
file_tickets = file_tickets,
)
| 31.517685
| 79
| 0.617833
|
06879171fdcd6c4b49ee4d8001681451b5fa8947
| 756
|
py
|
Python
|
dp4py_sanic/api/request/__init__.py
|
ONSdigital/dp4py-sanic
|
5e6f77e7eb326bb5b355425f635519295a9e146f
|
[
"MIT"
] | null | null | null |
dp4py_sanic/api/request/__init__.py
|
ONSdigital/dp4py-sanic
|
5e6f77e7eb326bb5b355425f635519295a9e146f
|
[
"MIT"
] | null | null | null |
dp4py_sanic/api/request/__init__.py
|
ONSdigital/dp4py-sanic
|
5e6f77e7eb326bb5b355425f635519295a9e146f
|
[
"MIT"
] | 2
|
2021-04-11T08:11:27.000Z
|
2022-01-04T19:04:11.000Z
|
"""
Wraps the Sanic request and adds request context
"""
from sanic import request
from uuid import uuid4
class Request(request.Request):
request_id_header = "X-Request-Id"
def __init__(self, *args, **kwargs):
"""
Initialise the request object with a unique ID (either supplied as a header or generated)
:param args:
:param kwargs:
"""
super(Request, self).__init__(*args, **kwargs)
# Init empty request ID
self.request_id = None
# Check for existing ID
if self.request_id_header in self.headers:
self.request_id = self.headers.get(self.request_id_header)
else:
# Generate a random uuid4
self.request_id = str(uuid4())
| 27
| 97
| 0.624339
|
eff997ba4e17833e1a47da842eb3c30a9b295c08
| 4,436
|
py
|
Python
|
etlt/helper/Type2JoinHelper.py
|
SetBased/py-etlt
|
542e4d1dc974dad60f4e338a334c932a40b45ee2
|
[
"MIT"
] | 4
|
2016-06-21T21:13:54.000Z
|
2017-11-30T21:14:14.000Z
|
etlt/helper/Type2JoinHelper.py
|
PyETLT/etlt
|
542e4d1dc974dad60f4e338a334c932a40b45ee2
|
[
"MIT"
] | null | null | null |
etlt/helper/Type2JoinHelper.py
|
PyETLT/etlt
|
542e4d1dc974dad60f4e338a334c932a40b45ee2
|
[
"MIT"
] | 4
|
2016-06-21T21:02:24.000Z
|
2019-04-24T14:59:36.000Z
|
from etlt.helper.Type2Helper import Type2Helper
class Type2JoinHelper(Type2Helper):
"""
A helper class for joining data sets with date intervals.
"""
# ------------------------------------------------------------------------------------------------------------------
@staticmethod
def _intersect(start1, end1, start2, end2):
"""
Returns the intersection of two intervals. Returns (None,None) if the intersection is empty.
:param int start1: The start date of the first interval.
:param int end1: The end date of the first interval.
:param int start2: The start date of the second interval.
:param int end2: The end date of the second interval.
:rtype: tuple[int|None,int|None]
"""
start = max(start1, start2)
end = min(end1, end2)
if start > end:
return None, None
return start, end
# ------------------------------------------------------------------------------------------------------------------
def _additional_rows_date2int(self, keys, rows):
"""
Replaces start and end dates of the additional date intervals in the row set with their integer representation
:param list[tuple[str,str]] keys: The other keys with start and end date.
:param list[dict[str,T]] rows: The list of rows.
:rtype: list[dict[str,T]]
"""
for row in rows:
for key_start_date, key_end_date in keys:
if key_start_date not in [self._key_start_date, self._key_end_date]:
row[key_start_date] = self._date2int(row[key_start_date])
if key_end_date not in [self._key_start_date, self._key_end_date]:
row[key_end_date] = self._date2int(row[key_end_date])
# ------------------------------------------------------------------------------------------------------------------
def _intersection(self, keys, rows):
"""
Computes the intersection of the date intervals of two or more reference data sets. If the intersection is empty
the row is removed from the group.
:param list[tuple[str,str]] keys: The other keys with start and end date.
:param list[dict[str,T]] rows: The list of rows.
:rtype: list[dict[str,T]]
"""
# If there are no other keys with start and end date (i.e. nothing to merge) return immediately.
if not keys:
return rows
ret = list()
for row in rows:
start_date = row[self._key_start_date]
end_date = row[self._key_end_date]
for key_start_date, key_end_date in keys:
start_date, end_date = Type2JoinHelper._intersect(start_date,
end_date,
row[key_start_date],
row[key_end_date])
if not start_date:
break
if key_start_date not in [self._key_start_date, self._key_end_date]:
del row[key_start_date]
if key_end_date not in [self._key_start_date, self._key_end_date]:
del row[key_end_date]
if start_date:
row[self._key_start_date] = start_date
row[self._key_end_date] = end_date
ret.append(row)
return ret
# ------------------------------------------------------------------------------------------------------------------
def merge(self, keys):
"""
Merges the join on pseudo keys of two or more reference data sets.
:param list[tuple[str,str]] keys: For each data set the keys of the start and end date.
"""
deletes = []
for pseudo_key, rows in self._rows.items():
self._additional_rows_date2int(keys, rows)
rows = self._intersection(keys, rows)
if rows:
rows = self._rows_sort(rows)
self._rows[pseudo_key] = self._merge_adjacent_rows(rows)
else:
deletes.append(pseudo_key)
for pseudo_key in deletes:
del self._rows[pseudo_key]
# ----------------------------------------------------------------------------------------------------------------------
| 41.849057
| 120
| 0.495041
|
bb43cd8577e44bcecb4e5b57918445f538fdf754
| 2,696
|
py
|
Python
|
src/aoc2015/day14.py
|
emauton/aoc2015
|
f321571b623a0e7acaa173be57506e64bd32765f
|
[
"MIT"
] | null | null | null |
src/aoc2015/day14.py
|
emauton/aoc2015
|
f321571b623a0e7acaa173be57506e64bd32765f
|
[
"MIT"
] | null | null | null |
src/aoc2015/day14.py
|
emauton/aoc2015
|
f321571b623a0e7acaa173be57506e64bd32765f
|
[
"MIT"
] | null | null | null |
'''Advent of Code 2015 day 14: Reindeer Olympics
https://adventofcode.com/2015/day/14'''
import re
class Reindeer(object):
def __init__(self, name, speed, duration, rest):
self.name = name
self.speed = speed
self.duration = duration
self.rest = rest
self.distance = 0
self.time = 0
self.traveling = duration
self.resting = 0
self.points = 0
def __eq__(self, other):
return (self.name == other.name and
self.speed == other.speed and
self.duration == other.duration and
self.rest == other.rest)
def tick(self):
# On each tick, we are either traveling or resting.
# We decrement the appropriate counter, and "flip" to the other state
# when we hit zero. Using fewer state variables here would be fine but
# not more readable, I think.
self.time += 1
if self.traveling:
self.distance += self.speed
self.traveling -= 1
if self.traveling == 0:
self.resting = self.rest
else:
self.resting -= 1
if self.resting == 0:
self.traveling = self.duration
def score(self):
self.points += 1
# E.g. 'Comet can fly 14 km/s for 10 seconds, but then must rest for 127
# seconds.'
REINDEER_PATTERN = re.compile(r'^(.+) can fly (\d+) km/s for (\d+) seconds, '
r'but then must rest for (\d+) seconds\.$')
def parse(reindeer):
'''Return Reindeer tuple for a reindeer entry as above'''
m = REINDEER_PATTERN.match(reindeer)
return Reindeer(m.group(1), int(m.group(2)),
int(m.group(3)), int(m.group(4)))
def race(lines, seconds):
'''Run a race for the reindeer defined by lines over seconds'''
reindeer = [parse(line) for line in lines]
for i in range(0, seconds):
for r in reindeer:
r.tick()
distance = 0
for r in reindeer:
if r.distance > distance:
distance = r.distance
for r in reindeer:
if r.distance == distance:
r.score()
return reindeer
def run(args): # pragma: no cover
filename = args[0]
with open(filename) as f:
lines = [line.strip() for line in f.readlines()]
reindeer = race(lines, 2503)
distance = 0
for r in reindeer:
distance = max(distance, r.distance)
print(f'The winning reindeer travels {distance} km')
points = 0
for r in reindeer:
points = max(points, r.points)
print(f'After changing the rules, the winner has {points} points')
| 27.793814
| 78
| 0.567507
|
cf6f865cd1876c9b81645ca3b6aa047ec5afc97a
| 1,511
|
py
|
Python
|
tests/mapping/test_map.py
|
davidbailey/dpd
|
29bce937e34afa2161788a5c4a911e590a388229
|
[
"MIT"
] | 6
|
2020-08-13T22:21:25.000Z
|
2021-09-15T19:12:51.000Z
|
tests/mapping/test_map.py
|
davidbailey/dpd
|
29bce937e34afa2161788a5c4a911e590a388229
|
[
"MIT"
] | 3
|
2018-01-25T09:11:01.000Z
|
2020-12-22T17:31:24.000Z
|
tests/mapping/test_map.py
|
davidbailey/dpd
|
29bce937e34afa2161788a5c4a911e590a388229
|
[
"MIT"
] | null | null | null |
from itertools import combinations
import unittest
from astropy import units
from shapely.geometry import Point, LineString
from dpd.mapping import Intersection, Map, Link
def build_test_map():
map_ = Map()
for x in range(2):
for y in range(2):
map_.intersections[str([x, y])] = Intersection(str([x, y]), Point(x, y))
for input_intersection, output_intersection in combinations(
map_.intersections.values(), 2
):
name = input_intersection.name + " to " + output_intersection.name
geometry = LineString(
[input_intersection.geometry, output_intersection.geometry]
)
segments = []
map_.links[name] = Link(
name,
geometry,
segments,
input_intersection,
output_intersection,
number_of_lanes=1,
sidewalk=True,
cycleway="track",
max_speed=25 * units.imperial.mile / units.hour,
)
map_.links[name].update_segments_from_osm(
number_of_lanes=1, sidewalk=True, cycleway="track"
)
return map_
class TestMap(unittest.TestCase):
def test_map(self):
self.map_ = build_test_map()
self.map_.plot(include_intersections=True)
self.map_.links.crs = "EPSG:4326"
self.map_.intersections.crs = "EPSG:4326"
self.map_.plot_folium(include_intersections=True)
self.map_.to_geodigraph()
if __name__ == "__main__":
unittest.main()
| 29.057692
| 84
| 0.622766
|
8ddf3022c68a9546b1221ebace078525953a74e0
| 5,320
|
py
|
Python
|
boto/dynamodb2/results.py
|
smartdec/boto
|
8ee4b1f45be3c9e0ece49079b00cd09bf0930b1e
|
[
"MIT"
] | 1
|
2019-07-29T02:53:51.000Z
|
2019-07-29T02:53:51.000Z
|
boto/dynamodb2/results.py
|
premanandchandrasekar/boto
|
a00851636307f971b03e72e1ce812cd3242816f3
|
[
"MIT"
] | 1
|
2021-09-11T14:30:32.000Z
|
2021-09-11T14:30:32.000Z
|
boto/dynamodb2/results.py
|
premanandchandrasekar/boto
|
a00851636307f971b03e72e1ce812cd3242816f3
|
[
"MIT"
] | 2
|
2016-12-19T02:27:46.000Z
|
2019-07-29T02:53:54.000Z
|
class ResultSet(object):
"""
A class used to lazily handle page-to-page navigation through a set of
results.
It presents a transparent iterator interface, so that all the user has
to do is use it in a typical ``for`` loop (or list comprehension, etc.)
to fetch results, even if they weren't present in the current page of
results.
This is used by the ``Table.query`` & ``Table.scan`` methods.
Example::
>>> users = Table('users')
>>> results = ResultSet()
>>> results.to_call(users.query, username__gte='johndoe')
# Now iterate. When it runs out of results, it'll fetch the next page.
>>> for res in results:
... print res['username']
"""
def __init__(self):
super(ResultSet, self).__init__()
self.the_callable = None
self.call_args = []
self.call_kwargs = {}
self._results = []
self._offset = -1
self._results_left = True
self._last_key_seen = None
@property
def first_key(self):
return 'exclusive_start_key'
def _reset(self):
"""
Resets the internal state of the ``ResultSet``.
This prevents results from being cached long-term & consuming
excess memory.
Largely internal.
"""
self._results = []
self._offset = 0
def __iter__(self):
return self
def next(self):
self._offset += 1
if self._offset >= len(self._results):
if self._results_left is False:
raise StopIteration()
self.fetch_more()
# It's possible that previous call to ``fetch_more`` may not return
# anything useful but there may be more results. Loop until we get
# something back, making sure we guard for no results left.
while not len(self._results) and self._results_left:
self.fetch_more()
if self._offset < len(self._results):
return self._results[self._offset]
else:
raise StopIteration()
def to_call(self, the_callable, *args, **kwargs):
"""
Sets up the callable & any arguments to run it with.
This is stored for subsequent calls so that those queries can be
run without requiring user intervention.
Example::
# Just an example callable.
>>> def squares_to(y):
... for x in range(1, y):
... yield x**2
>>> rs = ResultSet()
# Set up what to call & arguments.
>>> rs.to_call(squares_to, y=3)
"""
if not callable(the_callable):
raise ValueError(
'You must supply an object or function to be called.'
)
self.the_callable = the_callable
self.call_args = args
self.call_kwargs = kwargs
def fetch_more(self):
"""
When the iterator runs out of results, this method is run to re-execute
the callable (& arguments) to fetch the next page.
Largely internal.
"""
self._reset()
args = self.call_args[:]
kwargs = self.call_kwargs.copy()
if self._last_key_seen is not None:
kwargs[self.first_key] = self._last_key_seen
results = self.the_callable(*args, **kwargs)
new_results = results.get('results', [])
self._last_key_seen = results.get('last_key', None)
if len(new_results):
self._results.extend(results['results'])
# Decrease the limit, if it's present.
if self.call_kwargs.get('limit'):
self.call_kwargs['limit'] -= len(results['results'])
# and if limit hits zero, we don't have any more
# results to look for
if 0 == self.call_kwargs['limit']:
self._results_left = False
if self._last_key_seen is None:
self._results_left = False
class BatchGetResultSet(ResultSet):
def __init__(self, *args, **kwargs):
self._keys_left = kwargs.pop('keys', [])
self._max_batch_get = kwargs.pop('max_batch_get', 100)
super(BatchGetResultSet, self).__init__(*args, **kwargs)
def fetch_more(self):
self._reset()
args = self.call_args[:]
kwargs = self.call_kwargs.copy()
# Slice off the max we can fetch.
kwargs['keys'] = self._keys_left[:self._max_batch_get]
self._keys_left = self._keys_left[self._max_batch_get:]
results = self.the_callable(*args, **kwargs)
if not len(results.get('results', [])):
self._results_left = False
return
self._results.extend(results['results'])
for offset, key_data in enumerate(results.get('unprocessed_keys', [])):
# We've got an unprocessed key. Reinsert it into the list.
# DynamoDB only returns valid keys, so there should be no risk of
# missing keys ever making it here.
self._keys_left.insert(offset, key_data)
if len(self._keys_left) <= 0:
self._results_left = False
# Decrease the limit, if it's present.
if self.call_kwargs.get('limit'):
self.call_kwargs['limit'] -= len(results['results'])
| 31.47929
| 79
| 0.583271
|
69496bee973f892d7e8b80ba3f379b525271bb87
| 1,203
|
py
|
Python
|
cogs/create.py
|
ParthJinturkar/TeachersPetBot
|
b157b350574ad3e8db93c3e81710edb83de9c862
|
[
"MIT"
] | null | null | null |
cogs/create.py
|
ParthJinturkar/TeachersPetBot
|
b157b350574ad3e8db93c3e81710edb83de9c862
|
[
"MIT"
] | 32
|
2021-11-12T00:11:51.000Z
|
2021-12-03T22:31:43.000Z
|
cogs/create.py
|
ParthJinturkar/TeachersPetBot
|
b157b350574ad3e8db93c3e81710edb83de9c862
|
[
"MIT"
] | 2
|
2021-11-27T01:21:17.000Z
|
2022-02-01T01:08:22.000Z
|
import csv
import os
import discord
from discord.ext import commands
# ----------------------------------------------------------------------------------------------
# Returns the ping of the bot, useful for testing bot lag and as a simple functionality command
# ----------------------------------------------------------------------------------------------
from src import event_creation
class Create(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name='create', help='Create a new event.')
# @commands.dm_only()
@commands.has_role('Instructor')
async def create_event(self, ctx):
"""
Function:
create_event
Description:
ommand to create event and send to event_creation module
Inputs:
- ctx: context of the command
Outputs:
Options to create event
"""
TESTING_MODE = False
await event_creation.create_event(ctx, self.bot, False)
# -------------------------------------
# add the file to the bot's cog system
# -------------------------------------
def setup(bot):
bot.add_cog(Create(bot))
# Copyright (c) 2021 War-Keeper
| 27.976744
| 96
| 0.499584
|
18abd8b86c05ba265dd35915d881d3e9a9a7bb1f
| 3,457
|
py
|
Python
|
lcd/i2c_pcf8574_interface.py
|
DoctorShields/CircuitPython_LCD
|
d8faaf042df2e1bf43dae91365abae2cfd54f504
|
[
"MIT"
] | null | null | null |
lcd/i2c_pcf8574_interface.py
|
DoctorShields/CircuitPython_LCD
|
d8faaf042df2e1bf43dae91365abae2cfd54f504
|
[
"MIT"
] | null | null | null |
lcd/i2c_pcf8574_interface.py
|
DoctorShields/CircuitPython_LCD
|
d8faaf042df2e1bf43dae91365abae2cfd54f504
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2017 Dan Halbert
# Adapted from https://github.com/dbrgn/RPLCD, Copyright (C) 2013-2016 Danilo Bargen
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Low-level interface to PCF8574."""
import busio
import board
from adafruit_bus_device.i2c_device import I2CDevice
from .lcd import LCD_4BITMODE, LCD_BACKLIGHT, LCD_NOBACKLIGHT, PIN_ENABLE
class I2CPCF8574Interface:
# Bit values to turn backlight on/off. Indexed by a boolean.
_BACKLIGHT_VALUES = (LCD_NOBACKLIGHT, LCD_BACKLIGHT)
def __init__(self, address):
"""
CharLCD via PCF8574 I2C port expander.
Pin mapping::
7 | 6 | 5 | 4 | 3 | 2 | 1 | 0
D7 | D6 | D5 | D4 | BL | EN | RW | RS
:param address: The I2C address of your LCD.
"""
self.address = address
self._backlight_pin_state = LCD_BACKLIGHT
self.i2c = busio.I2C(board.SCL, board.SDA)
self.i2c_device = I2CDevice(self.i2c, self.address)
self.data_buffer = bytearray(1)
def deinit(self):
self.i2c.deinit()
@property
def data_bus_mode(self):
return LCD_4BITMODE
@property
def backlight(self):
return self._backlight_pin_state == LCD_BACKLIGHT
@backlight.setter
def backlight(self, value):
self._backlight_pin_state = _BACKLIGHT_VALUES[value]
self._i2c_write(self._backlight_pin_state)
# Low level commands
def send(self, value, rs_mode):
"""Send the specified value to the display in 4-bit nibbles.
The rs_mode is either ``_RS_DATA`` or ``_RS_INSTRUCTION``."""
self._write4bits(rs_mode | (value & 0xF0) | self._backlight_pin_state)
self._write4bits(rs_mode | ((value << 4) & 0xF0) | self._backlight_pin_state)
def _write4bits(self, value):
"""Pulse the `enable` flag to process value."""
with self.i2c_device:
self._i2c_write(value & ~PIN_ENABLE)
# This 1us delay is probably unnecessary, given the time needed
# to execute the statements.
microcontroller.delay_us(1)
self._i2c_write(value | PIN_ENABLE)
microcontroller.delay_us(1)
self._i2c_write(value & ~PIN_ENABLE)
# Wait for command to complete.
microcontroller.delay_us(100)
def _i2c_write(self, value):
self.data_buffer[0] = value
self.i2c_device.write(self.data_buffer)
| 36.776596
| 85
| 0.685276
|
7ccc3c868ed44c825efa968ae34baaae94b79b6c
| 446
|
py
|
Python
|
backend/l11n/admin.py
|
tughi/localizappion
|
5c963dc8f7262cdb411cfbcc29ab4fac8fffc6c8
|
[
"BSD-3-Clause"
] | null | null | null |
backend/l11n/admin.py
|
tughi/localizappion
|
5c963dc8f7262cdb411cfbcc29ab4fac8fffc6c8
|
[
"BSD-3-Clause"
] | 1
|
2018-05-15T19:27:51.000Z
|
2018-05-15T19:29:30.000Z
|
backend/l11n/admin.py
|
tughi/localizappion
|
5c963dc8f7262cdb411cfbcc29ab4fac8fffc6c8
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib import admin
from .models import Project
from .models import Session
from .models import String
from .models import Suggestion
from .models import Translation
from .models import Translator
from .models import Vote
admin.site.register(Project)
admin.site.register(Session)
admin.site.register(String)
admin.site.register(Suggestion)
admin.site.register(Translation)
admin.site.register(Translator)
admin.site.register(Vote)
| 24.777778
| 32
| 0.827354
|
f95e9432bd866e9fec1f3bf345925c00ea29c1ab
| 17,297
|
py
|
Python
|
neutron/cmd/sanity/checks.py
|
knodir/neutron
|
ac4e28478ac8a8a0c9f5c5785f6a6bcf532c66b8
|
[
"Apache-2.0"
] | null | null | null |
neutron/cmd/sanity/checks.py
|
knodir/neutron
|
ac4e28478ac8a8a0c9f5c5785f6a6bcf532c66b8
|
[
"Apache-2.0"
] | 5
|
2019-08-14T06:46:03.000Z
|
2021-12-13T20:01:25.000Z
|
neutron/cmd/sanity/checks.py
|
knodir/neutron
|
ac4e28478ac8a8a0c9f5c5785f6a6bcf532c66b8
|
[
"Apache-2.0"
] | 2
|
2020-03-15T01:24:15.000Z
|
2020-07-22T20:34:26.000Z
|
# Copyright (c) 2014 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import shutil
import tempfile
import netaddr
from neutron_lib import constants as n_consts
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import uuidutils
from neutron.agent.common import ovs_lib
from neutron.agent.l3 import ha_router
from neutron.agent.l3 import namespaces
from neutron.agent.linux import external_process
from neutron.agent.linux import ip_lib
from neutron.agent.linux import ip_link_support
from neutron.agent.linux import keepalived
from neutron.agent.linux import utils as agent_utils
from neutron.cmd import runtime_checks
from neutron.common import constants
from neutron.common import utils as common_utils
from neutron.plugins.ml2.drivers.openvswitch.agent.common \
import constants as ovs_const
LOG = logging.getLogger(__name__)
MINIMUM_DNSMASQ_VERSION = 2.67
DNSMASQ_VERSION_DHCP_RELEASE6 = 2.76
MINIMUM_DIBBLER_VERSION = '1.0.1'
def ovs_vxlan_supported(from_ip='192.0.2.1', to_ip='192.0.2.2'):
name = common_utils.get_rand_device_name(prefix='vxlantest-')
with ovs_lib.OVSBridge(name) as br:
port = br.add_tunnel_port(from_ip, to_ip, n_consts.TYPE_VXLAN)
return port != ovs_lib.INVALID_OFPORT
def ovs_geneve_supported(from_ip='192.0.2.3', to_ip='192.0.2.4'):
name = common_utils.get_rand_device_name(prefix='genevetest-')
with ovs_lib.OVSBridge(name) as br:
port = br.add_tunnel_port(from_ip, to_ip, n_consts.TYPE_GENEVE)
return port != ovs_lib.INVALID_OFPORT
def iproute2_vxlan_supported():
ip = ip_lib.IPWrapper()
name = common_utils.get_rand_device_name(prefix='vxlantest-')
port = ip.add_vxlan(name, 3000)
ip.del_veth(name)
return name == port.name
def patch_supported():
name, peer_name, patch_name = common_utils.get_related_rand_device_names(
['patchtest-', 'peertest0-', 'peertest1-'])
with ovs_lib.OVSBridge(name) as br:
port = br.add_patch_port(patch_name, peer_name)
return port != ovs_lib.INVALID_OFPORT
def nova_notify_supported():
try:
import neutron.notifiers.nova # noqa since unused
return True
except ImportError:
return False
def ofctl_arg_supported(cmd, **kwargs):
"""Verify if ovs-ofctl binary supports cmd with **kwargs.
:param cmd: ovs-ofctl command to use for test.
:param **kwargs: arguments to test with the command.
:returns: a boolean if the supplied arguments are supported.
"""
br_name = common_utils.get_rand_device_name(prefix='br-test-')
with ovs_lib.OVSBridge(br_name) as test_br:
full_args = ["ovs-ofctl", cmd, test_br.br_name,
ovs_lib._build_flow_expr_str(kwargs, cmd.split('-')[0],
False)]
try:
agent_utils.execute(full_args, run_as_root=True)
except RuntimeError as e:
LOG.debug("Exception while checking supported feature via "
"command %s. Exception: %s", full_args, e)
return False
except Exception:
LOG.exception("Unexpected exception while checking supported"
" feature via command: %s", full_args)
return False
else:
return True
def arp_responder_supported():
mac = netaddr.EUI('dead:1234:beef', dialect=netaddr.mac_unix)
ip = netaddr.IPAddress('240.0.0.1')
actions = ovs_const.ARP_RESPONDER_ACTIONS % {'mac': mac, 'ip': ip}
return ofctl_arg_supported(cmd='add-flow',
table=21,
priority=1,
proto='arp',
dl_vlan=42,
nw_dst='%s' % ip,
actions=actions)
def arp_header_match_supported():
return ofctl_arg_supported(cmd='add-flow',
table=24,
priority=1,
proto='arp',
arp_op='0x2',
arp_spa='1.1.1.1',
actions="NORMAL")
def icmpv6_header_match_supported():
return ofctl_arg_supported(cmd='add-flow',
table=ovs_const.ARP_SPOOF_TABLE,
priority=1,
dl_type=constants.ETHERTYPE_IPV6,
nw_proto=n_consts.PROTO_NUM_IPV6_ICMP,
icmp_type=n_consts.ICMPV6_TYPE_NA,
nd_target='fdf8:f53b:82e4::10',
actions="NORMAL")
def _vf_management_support(required_caps):
is_supported = True
try:
vf_section = ip_link_support.IpLinkSupport.get_vf_mgmt_section()
for cap in required_caps:
if not ip_link_support.IpLinkSupport.vf_mgmt_capability_supported(
vf_section, cap):
is_supported = False
LOG.debug("ip link command does not support "
"vf capability '%(cap)s'", {'cap': cap})
except ip_link_support.UnsupportedIpLinkCommand:
LOG.exception("Unexpected exception while checking supported "
"ip link command")
return False
return is_supported
def vf_management_supported():
required_caps = (
ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_STATE,
ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_SPOOFCHK,
ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_RATE)
return _vf_management_support(required_caps)
def vf_extended_management_supported():
required_caps = (
ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_STATE,
ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_SPOOFCHK,
ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_RATE,
ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_MIN_TX_RATE,
)
return _vf_management_support(required_caps)
def netns_read_requires_helper():
nsname = "netnsreadtest-" + uuidutils.generate_uuid()
ip_lib.create_network_namespace(nsname)
try:
# read without root_helper. if exists, not required.
exists = ip_lib.network_namespace_exists(nsname)
finally:
ip_lib.delete_network_namespace(nsname)
return not exists
def get_minimal_dnsmasq_version_supported():
return MINIMUM_DNSMASQ_VERSION
def get_dnsmasq_version_with_dhcp_release6():
return DNSMASQ_VERSION_DHCP_RELEASE6
def dnsmasq_local_service_supported():
cmd = ['dnsmasq', '--test', '--local-service']
env = {'LC_ALL': 'C'}
obj, cmd = agent_utils.create_process(cmd, addl_env=env)
_stdout, _stderr = obj.communicate()
returncode = obj.returncode
if returncode == 127:
LOG.debug("Exception while checking dnsmasq version. "
"dnsmasq: No such file or directory")
return False
elif returncode == 1:
return False
return True
def dnsmasq_version_supported():
try:
cmd = ['dnsmasq', '--version']
env = {'LC_ALL': 'C'}
out = agent_utils.execute(cmd, addl_env=env)
m = re.search(r"version (\d+\.\d+)", out)
ver = float(m.group(1)) if m else 0
if ver < MINIMUM_DNSMASQ_VERSION:
return False
except (OSError, RuntimeError, IndexError, ValueError) as e:
LOG.debug("Exception while checking minimal dnsmasq version. "
"Exception: %s", e)
return False
return True
def dhcp_release6_supported():
return runtime_checks.dhcp_release6_supported()
def bridge_firewalling_enabled():
for proto in ('arp', 'ip', 'ip6'):
knob = 'net.bridge.bridge-nf-call-%stables' % proto
cmd = ['sysctl', '-b', knob]
try:
out = agent_utils.execute(cmd)
except (OSError, RuntimeError, IndexError, ValueError) as e:
LOG.debug("Exception while extracting %(knob)s. "
"Exception: %(e)s", {'knob': knob, 'e': e})
return False
if out == '0':
return False
return True
class KeepalivedIPv6Test(object):
def __init__(self, ha_port, gw_port, gw_vip, default_gw):
self.ha_port = ha_port
self.gw_port = gw_port
self.gw_vip = gw_vip
self.default_gw = default_gw
self.manager = None
self.config = None
self.config_path = None
self.nsname = "keepalivedtest-" + uuidutils.generate_uuid()
self.pm = None
self.orig_interval = cfg.CONF.AGENT.check_child_processes_interval
def configure(self):
config = keepalived.KeepalivedConf()
instance1 = keepalived.KeepalivedInstance('MASTER', self.ha_port, 1,
['169.254.192.0/18'],
advert_int=5)
instance1.track_interfaces.append(self.ha_port)
# Configure keepalived with an IPv6 address (gw_vip) on gw_port.
vip_addr1 = keepalived.KeepalivedVipAddress(self.gw_vip, self.gw_port)
instance1.vips.append(vip_addr1)
# Configure keepalived with an IPv6 default route on gw_port.
gateway_route = keepalived.KeepalivedVirtualRoute(n_consts.IPv6_ANY,
self.default_gw,
self.gw_port)
instance1.virtual_routes.gateway_routes = [gateway_route]
config.add_instance(instance1)
self.config = config
def start_keepalived_process(self):
# Disable process monitoring for Keepalived process.
cfg.CONF.set_override('check_child_processes_interval', 0, 'AGENT')
self.pm = external_process.ProcessMonitor(cfg.CONF, 'router')
# Create a temp directory to store keepalived configuration.
self.config_path = tempfile.mkdtemp()
# Instantiate keepalived manager with the IPv6 configuration.
self.manager = keepalived.KeepalivedManager(
'router1', self.config,
namespace=self.nsname, process_monitor=self.pm,
conf_path=self.config_path)
self.manager.spawn()
def verify_ipv6_address_assignment(self, gw_dev):
process = self.manager.get_process()
common_utils.wait_until_true(lambda: process.active)
def _gw_vip_assigned():
iface_ip = gw_dev.addr.list(ip_version=6, scope='global')
if iface_ip:
return self.gw_vip == iface_ip[0]['cidr']
common_utils.wait_until_true(_gw_vip_assigned)
def __enter__(self):
ip_lib.create_network_namespace(self.nsname)
return self
def __exit__(self, exc_type, exc_value, exc_tb):
if self.pm:
self.pm.stop()
if self.manager:
self.manager.disable()
if self.config_path:
shutil.rmtree(self.config_path, ignore_errors=True)
ip_lib.delete_network_namespace(self.nsname)
cfg.CONF.set_override('check_child_processes_interval',
self.orig_interval, 'AGENT')
def keepalived_ipv6_supported():
"""Check if keepalived supports IPv6 functionality.
Validation is done as follows.
1. Create a namespace.
2. Create OVS bridge with two ports (ha_port and gw_port)
3. Move the ovs ports to the namespace.
4. Spawn keepalived process inside the namespace with IPv6 configuration.
5. Verify if IPv6 address is assigned to gw_port.
6. Verify if IPv6 default route is configured by keepalived.
"""
br_name, ha_port, gw_port = common_utils.get_related_rand_device_names(
['ka-test-', ha_router.HA_DEV_PREFIX, namespaces.INTERNAL_DEV_PREFIX])
gw_vip = 'fdf8:f53b:82e4::10/64'
expected_default_gw = 'fe80:f816::1'
with ovs_lib.OVSBridge(br_name) as br:
with KeepalivedIPv6Test(ha_port, gw_port, gw_vip,
expected_default_gw) as ka:
br.add_port(ha_port, ('type', 'internal'))
br.add_port(gw_port, ('type', 'internal'))
ha_dev = ip_lib.IPDevice(ha_port)
gw_dev = ip_lib.IPDevice(gw_port)
ha_dev.link.set_netns(ka.nsname)
gw_dev.link.set_netns(ka.nsname)
ha_dev.link.set_up()
gw_dev.link.set_up()
ha_dev.addr.add('169.254.192.8/18')
ka.configure()
ka.start_keepalived_process()
ka.verify_ipv6_address_assignment(gw_dev)
default_gw = gw_dev.route.get_gateway(ip_version=6)
if default_gw:
default_gw = default_gw['gateway']
return expected_default_gw == default_gw
def ovsdb_native_supported():
# Running the test should ensure we are configured for OVSDB native
try:
ovs = ovs_lib.BaseOVS()
ovs.get_bridges()
return True
except ImportError as ex:
LOG.error("Failed to import required modules. Ensure that the "
"python-openvswitch package is installed. Error: %s",
ex)
except Exception:
LOG.exception("Unexpected exception occurred.")
return False
def ovs_conntrack_supported():
br_name = common_utils.get_rand_device_name(prefix="ovs-test-")
with ovs_lib.OVSBridge(br_name) as br:
try:
br.add_protocols(*["OpenFlow%d" % i for i in range(10, 15)])
except RuntimeError as e:
LOG.debug("Exception while checking ovs conntrack support: %s", e)
return False
return ofctl_arg_supported(cmd='add-flow', ct_state='+trk', actions='drop')
def ebtables_supported():
try:
cmd = ['ebtables', '--version']
agent_utils.execute(cmd)
return True
except (OSError, RuntimeError, IndexError, ValueError) as e:
LOG.debug("Exception while checking for installed ebtables. "
"Exception: %s", e)
return False
def ipset_supported():
try:
cmd = ['ipset', '--version']
agent_utils.execute(cmd)
return True
except (OSError, RuntimeError, IndexError, ValueError) as e:
LOG.debug("Exception while checking for installed ipset. "
"Exception: %s", e)
return False
def ip6tables_supported():
try:
cmd = ['ip6tables', '--version']
agent_utils.execute(cmd)
return True
except (OSError, RuntimeError, IndexError, ValueError) as e:
LOG.debug("Exception while checking for installed ip6tables. "
"Exception: %s", e)
return False
def conntrack_supported():
try:
cmd = ['conntrack', '--version']
agent_utils.execute(cmd)
return True
except (OSError, RuntimeError, IndexError, ValueError) as e:
LOG.debug("Exception while checking for installed conntrack. "
"Exception: %s", e)
return False
def get_minimal_dibbler_version_supported():
return MINIMUM_DIBBLER_VERSION
def dibbler_version_supported():
try:
cmd = ['dibbler-client',
'help']
out = agent_utils.execute(cmd)
return '-w' in out
except (OSError, RuntimeError, IndexError, ValueError) as e:
LOG.debug("Exception while checking minimal dibbler version. "
"Exception: %s", e)
return False
def _fix_ip_nonlocal_bind_root_value(original_value):
current_value = ip_lib.get_ip_nonlocal_bind(namespace=None)
if current_value != original_value:
ip_lib.set_ip_nonlocal_bind(value=original_value, namespace=None)
def ip_nonlocal_bind():
nsname1 = "ipnonlocalbind1-" + uuidutils.generate_uuid()
nsname2 = "ipnonlocalbind2-" + uuidutils.generate_uuid()
ip_lib.create_network_namespace(nsname1)
try:
ip_lib.create_network_namespace(nsname2)
try:
original_value = ip_lib.get_ip_nonlocal_bind(namespace=None)
try:
ip_lib.set_ip_nonlocal_bind(value=0, namespace=nsname1)
ip_lib.set_ip_nonlocal_bind(value=1, namespace=nsname2)
ns1_value = ip_lib.get_ip_nonlocal_bind(namespace=nsname1)
finally:
_fix_ip_nonlocal_bind_root_value(original_value)
except RuntimeError as e:
LOG.debug("Exception while checking ip_nonlocal_bind. "
"Exception: %s", e)
return False
finally:
ip_lib.delete_network_namespace(nsname2)
finally:
ip_lib.delete_network_namespace(nsname1)
return ns1_value == 0
| 35.372188
| 79
| 0.637683
|
15aa592786bd5bef2e7c5037a2b080c153f469d2
| 2,700
|
py
|
Python
|
ln-tldr/main.py
|
kennyzha/light-novel-tldr
|
b68f7034f8613855a11fcaf18fb0474a170f4f50
|
[
"MIT"
] | null | null | null |
ln-tldr/main.py
|
kennyzha/light-novel-tldr
|
b68f7034f8613855a11fcaf18fb0474a170f4f50
|
[
"MIT"
] | null | null | null |
ln-tldr/main.py
|
kennyzha/light-novel-tldr
|
b68f7034f8613855a11fcaf18fb0474a170f4f50
|
[
"MIT"
] | null | null | null |
import praw
import re
import requests
import json
import os
from urllib import parse
import psycopg2
## Connecting to reddit
reddit = praw.Reddit(client_id=os.environ['CLIENT_ID'],
client_secret=os.environ['CLIENT_SECRET'],
password=os.environ['REDDIT_PASSWORD'],
user_agent='light novel chapter summarizer',
username=os.environ['REDDIT_USERNAME'])
## conn = sqlite3.connect('database.db')
## Connecting to database
parse.uses_netloc.append("postgres")
url = parse.urlparse(os.environ["DATABASE_URL"])
conn = psycopg2.connect(
database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port
)
c = conn.cursor()
c.execute('''CREATE TABLE IF NOT EXISTS posts(
id text, title text, submission text)''')
subreddit = reddit.subreddit("noveltranslations")
API_KEY = os.environ['API_KEY']
def parse_text(text):
chapter_regex_pattern = r'\[.*Chapter [0-9]+.*\]\(.*\)'
chapter_iters = re.finditer(chapter_regex_pattern, text)
return chapter_iters
def get_url(matchObj):
url_regex_pattern = r'(http|ftp|https):\/\/([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?'
return re.search(url_regex_pattern, matchObj.group())
smmry_url = "http://api.smmry.com/"
def summarize(api_key, sm_length, chapter_url):
payload = {"SM_API_KEY" : api_key, "SM_LENGTH" : sm_length, "SM_URL" : chapter_url}
return requests.get(smmry_url, payload)
for submission in subreddit.new(limit=10):
c.execute("SELECT count(*) FROM posts WHERE id = ?", (submission.id,))
data = c.fetchone()[0]
if data == 1:
print("Post id %s with title %s has been looked at." % (submission.id, submission.title))
continue
print("Post id %s with title %s has not been summarized." % (submission.id, submission.title))
print("Title: ", submission.title)
chapter_urls = parse_text(submission.selftext)
submission_post = ''
if not chapter_urls:
continue
for chapter_url in chapter_urls:
submission_post += '[Chapter Preview:](/s "'
url = get_url(chapter_url)
print("Link:", url.group())
summary = summarize(API_KEY, 7, url.group()).json()
if summary.get("smi_api_error") is None and summary.get("smi_api_content") is not None:
submission_post += (summary.get("sm_api_content") + '")' + /n/n)
else:
print("There was an error summarizing link ", url.group(), "from post", submission.title)
submission_post = "Error"
print("Submission_post is ", submission_post)
post = (submission.id, submission.title, submission_post,)
c.execute("INSERT INTO posts VALUES (?,?,?)", post)
conn.commit()
submission.reply(submission_post)
conn.close()
| 30.681818
| 109
| 0.682593
|
bf90e0af5667e19e4c2aec899e8cbd7e08416e99
| 981
|
py
|
Python
|
src/napari_error_dialogs/_tests/test_reader.py
|
dpshelio/napari-error-dialogs
|
29a6bbfa40ed445c9b60cd9752f8d62ecfab35aa
|
[
"BSD-3-Clause"
] | null | null | null |
src/napari_error_dialogs/_tests/test_reader.py
|
dpshelio/napari-error-dialogs
|
29a6bbfa40ed445c9b60cd9752f8d62ecfab35aa
|
[
"BSD-3-Clause"
] | null | null | null |
src/napari_error_dialogs/_tests/test_reader.py
|
dpshelio/napari-error-dialogs
|
29a6bbfa40ed445c9b60cd9752f8d62ecfab35aa
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
from napari_error_dialogs import napari_get_reader
# tmp_path is a pytest fixture
def test_reader(tmp_path):
"""An example of how you might test your plugin."""
# write some fake data using your supported file format
my_test_file = str(tmp_path / "myfile.npy")
original_data = np.random.rand(20, 20)
np.save(my_test_file, original_data)
# try to read it back in
reader = napari_get_reader(my_test_file)
assert callable(reader)
# make sure we're delivering the right format
layer_data_list = reader(my_test_file)
assert isinstance(layer_data_list, list) and len(layer_data_list) > 0
layer_data_tuple = layer_data_list[0]
assert isinstance(layer_data_tuple, tuple) and len(layer_data_tuple) > 0
# make sure it's the same as it started
np.testing.assert_allclose(original_data, layer_data_tuple[0])
def test_get_reader_pass():
reader = napari_get_reader("fake.file")
assert reader is None
| 31.645161
| 76
| 0.740061
|
1bd9a2c53ba7c40703c395c02c2c6806fe806d15
| 2,098
|
py
|
Python
|
smartnlp/topic/ida_topic.py
|
msgi/nlp-tour
|
ffed8c32da69c2427c92a7043f47bfc91e7feb64
|
[
"Apache-2.0"
] | 1,559
|
2019-05-27T03:43:29.000Z
|
2022-03-31T05:35:04.000Z
|
smartnlp/topic/ida_topic.py
|
msgi/nlp-tour
|
ffed8c32da69c2427c92a7043f47bfc91e7feb64
|
[
"Apache-2.0"
] | 5
|
2019-07-10T11:55:05.000Z
|
2020-05-08T12:01:31.000Z
|
smartnlp/topic/ida_topic.py
|
msgi/nlp-tour
|
ffed8c32da69c2427c92a7043f47bfc91e7feb64
|
[
"Apache-2.0"
] | 403
|
2019-06-14T03:36:17.000Z
|
2022-03-30T08:09:08.000Z
|
import pickle
from gensim.corpora import Dictionary
from gensim.models import LdaModel, TfidfModel
import jieba
import os
class LdaTopicModel(object):
def __init__(self, model_path,
config_path,
train=False,
file_path=None):
self.model_path = model_path
self.config_path = config_path
if not train:
self.dictionary, self.tf_idf = self.load_config()
self.model = self.load_model()
else:
self.file_path = file_path
self.dictionary, self.tf_idf, self.model = self.train()
def train(self):
corpus = self.preprocess()
dictionary = Dictionary(corpus)
doc2bow = [dictionary.doc2bow(text) for text in corpus]
tf_idf = TfidfModel(doc2bow)
corpus_tf_idf = tf_idf[doc2bow]
model = LdaModel(corpus_tf_idf, num_topics=2)
return dictionary, tf_idf, model
def save_model(self):
self.model.save(self.model_path)
def load_model(self):
try:
model = LdaModel.load(self.model_path)
except FileNotFoundError:
model = None
return model
def predict(self, text):
line_cut = list(jieba.cut(text))
doc2bow = self.dictionary.doc2bow(line_cut)
corpus_tf_idf = self.tf_idf[doc2bow]
return self.model[corpus_tf_idf]
def save_config(self):
with open(self.config_path, 'wb') as file:
pickle.dump((self.dictionary, self.tf_idf), file)
def load_config(self):
with open(self.config_path, 'rb') as file:
dictionary, tf_idf = pickle.load(file)
return dictionary, tf_idf
def preprocess(self):
# 读取文件夹下所有的文件名
files = os.listdir(self.file_path)
corpus = []
for file in files:
dir_ = os.path.join(self.file_path, file)
with open(dir_, 'r', encoding='utf-8') as file_:
line = file_.read()
line_cut = list(jieba.cut(line))
corpus.append(line_cut)
return corpus
| 29.549296
| 67
| 0.601049
|
05b7403d2d2cd86f12fb368ca5e978943b45697e
| 277
|
py
|
Python
|
setup.py
|
OEP/bigdir
|
12b86a5e39a03bfd99bf7830171c31ef0392fb21
|
[
"MIT"
] | null | null | null |
setup.py
|
OEP/bigdir
|
12b86a5e39a03bfd99bf7830171c31ef0392fb21
|
[
"MIT"
] | null | null | null |
setup.py
|
OEP/bigdir
|
12b86a5e39a03bfd99bf7830171c31ef0392fb21
|
[
"MIT"
] | null | null | null |
from setuptools import setup, Extension
ext_modules = [
Extension('_bigdir', sources=['_bigdir.c']),
]
setup(
name='bigdir',
version='0.1.0',
description='bigdir lets you read really big directories',
py_modules=['bigdir'],
ext_modules=ext_modules,
)
| 19.785714
| 62
| 0.67509
|
2d3d4fd2583f933e2374e518af04fd8a06e6cade
| 41
|
py
|
Python
|
tator_online/__init__.py
|
kristianmk/tator
|
0eb75ee9333316b06f773de2b75e8e797a98ffdb
|
[
"MIT"
] | 50
|
2019-09-18T14:32:18.000Z
|
2022-03-31T16:26:07.000Z
|
tator_online/__init__.py
|
kristianmk/tator
|
0eb75ee9333316b06f773de2b75e8e797a98ffdb
|
[
"MIT"
] | 566
|
2019-09-18T16:33:40.000Z
|
2022-03-31T20:01:38.000Z
|
tator_online/__init__.py
|
kristianmk/tator
|
0eb75ee9333316b06f773de2b75e8e797a98ffdb
|
[
"MIT"
] | 19
|
2019-09-21T20:08:12.000Z
|
2022-03-17T14:53:11.000Z
|
from .middleware import StatsdMiddleware
| 20.5
| 40
| 0.878049
|
eb23dbf6291d212c98aec0d467476d9e59851a85
| 8,453
|
py
|
Python
|
voltha/adapters/microsemi_olt/PAS5211_constants.py
|
sathishms77/test
|
bf8df6fc16c41720c7d99ed1ff17a64b543e9672
|
[
"Apache-2.0"
] | 72
|
2017-01-18T02:36:34.000Z
|
2022-02-12T15:28:30.000Z
|
voltha/adapters/microsemi_olt/PAS5211_constants.py
|
sathishms77/test
|
bf8df6fc16c41720c7d99ed1ff17a64b543e9672
|
[
"Apache-2.0"
] | 11
|
2017-03-03T17:56:09.000Z
|
2022-02-11T03:38:03.000Z
|
voltha/adapters/microsemi_olt/PAS5211_constants.py
|
sathishms77/test
|
bf8df6fc16c41720c7d99ed1ff17a64b543e9672
|
[
"Apache-2.0"
] | 120
|
2017-02-02T23:26:11.000Z
|
2022-03-13T05:30:23.000Z
|
#
# Copyright 2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from enum import Enum
"""
PAS5211 Constants
"""
# from enum PON_true_false_t
PON_FALSE = 0
PON_TRUE = 1
# from enum PON_enable_disable_t
PON_DISABLE = 0
PON_ENABLE = 1
# from enym PON_mac_t
PON_MII = 0
PON_GMII = 1
PON_TBI = 2
PON_POLARITY_ACTIVE_LOW = 0
PON_POLARITY_ACTIVE_HIGH = 1
PON_OPTICS_VOLTAGE_IF_UNDEFINED = 0
PON_OPTICS_VOLTAGE_IF_CML = 1
PON_OPTICS_VOLTAGE_IF_LVPECL = 2
PON_SD_SOURCE_LASER_SD = 0
PON_SD_SOURCE_BCDR_LOCK = 1
PON_SD_SOURCE_BCDR_SD = 2
PON_RESET_TYPE_DELAY_BASED = 0
PON_RESET_TYPE_SINGLE_RESET = 1
PON_RESET_TYPE_DOUBLE_RESET = 2
PON_RESET_TYPE_NORMAL_START_BURST_BASED = 0
PON_RESET_TYPE_NORMAL_END_BURST_BASED = 1
PON_GPIO_LINE_0 = 0
PON_GPIO_LINE_1 = 1
PON_GPIO_LINE_2 = 2
PON_GPIO_LINE_3 = 3
PON_GPIO_LINE_4 = 4
PON_GPIO_LINE_5 = 5
PON_GPIO_LINE_6 = 6
PON_GPIO_LINE_7 = 7
def PON_EXT_GPIO_LINE(line):
return line + 8
PON_ALARM_SOFTWARE_ERROR = 0
PON_ALARM_LOS = 1
PON_ALARM_LOSI = 2
PON_ALARM_DOWI = 3
PON_ALARM_LOFI = 4
PON_ALARM_RDII = 5
PON_ALARM_LOAMI = 6
PON_ALARM_LCDGI = 7
PON_ALARM_LOAI = 8
PON_ALARM_SDI = 9
PON_ALARM_SFI = 10
PON_ALARM_PEE = 11
PON_ALARM_DGI = 12
PON_ALARM_LOKI = 13
PON_ALARM_TIWI = 14
PON_ALARM_TIA = 15
PON_ALARM_AUTH_FAILED_IN_REGISTRATION_ID_MODE = 20
PON_ALARM_SUFI = 21
PON_DOWNSTREAM_PLOAM_MESSAGE_ENCRYPTED_PORT_ID=8,
PON_DOWNSTREAM_PLOAM_MESSAGE_ASSIGN_ALLOC_ID=10,
PON_DOWNSTREAM_PLOAM_MESSAGE_CONFIGURE_PORT_ID=14,
PON_DOWNSTREAM_PLOAM_MESSAGE_BER_INTERVAL=18,
PON_DOWNSTREAM_PLOAM_MESSAGE_KEY_SWITCHING=19
PON_ALARM_SDI_RAISE=0,
PON_ALARM_SDI_CLEAR=1
PON_ALARM_RAISE=0
PON_ALARM_CLEAR=1
PON_ALARM_VIRTUAL_SCOPE_ONU_LASER_ALWAYS_ON = 16
PON_ALARM_VIRTUAL_SCOPE_ONU_SIGNAL_DEGRADATION = 17
PON_ALARM_VIRTUAL_SCOPE_ONU_EOL = 18
PON_ALARM_VIRTUAL_SCOPE_ONU_EOL_DATABASE_IS_FULL = 19
PON_ALARM_LAST_ALARM = 22
# from enum PON_general_parameters_type_t
PON_COMBINED_LOSI_LOFI = 1000
PON_TX_ENABLE_DEFAULT = 1001
# Enable or disable False queue full event from DBA
PON_FALSE_Q_FULL_EVENT_MODE = 1002
# Set PID_AID_MISMATCH min silence period. 0 - disable, Else - period in secs
PON_PID_AID_MISMATCH_MIN_SILENCE_PERIOD = 1003
# Set if FW generate clear alarm. 0 - generate clear alarm, Else - don't
# generate clear alarm
PON_ENABLE_CLEAR_ALARM = 1004
# Enable or disabl send assign alloc id ploam. 0 - disable, 1 - enable
PON_ASSIGN_ALLOC_ID_PLOAM = 1005
# BIP error polling period, 200 - 65000, 0 - Disabled, Recommended: 5000
# (default)
PON_BIP_ERR_POLLING_PERIOD_MS = 1006
# Ignore SN when decatived 0 - consider SN (deactivate the onu if received
# same SN when activated (default) 1 - Ignore
PON_IGNORE_SN_WHEN_ACTIVE = 1007
# 0xffffffff - Disabled (default). Any other value (0 - 0xfffe) indicates
# that PA delay is enabled, with the specified delay value and included in
# the US_OVERHEAD PLOAM
PON_ONU_PRE_ASSIGNED_DELAY = 1008
# Enable or disable DS fragmentation, 0 disable, 1 enable
PON_DS_FRAGMENTATION = 1009
# Set if fw report rei alarm when errors is 0, 0 disable (default), 1 enable
PON_REI_ERRORS_REPORT_ALL = 1010
# Set if igonre sfi deactivation, 0 disable (default), 1 enable
PON_IGNORE_SFI_DEACTIVATION = 1011
# Allows to override the allocation overhead set by optic-params
# configuration. This configuration is only allowed when the the pon channel
# is disabled
PON_OVERRIDE_ALLOCATION_OVERHEAD = 1012
# Optics timeline offset, -128-127, : this parameter is very sensitive and
# requires coordination with PMC
PON_OPTICS_TIMELINE_OFFSET = 1013
# Last general meter
PON_LAST_GENERAL_PARAMETER = PON_OPTICS_TIMELINE_OFFSET
# from enum PON_dba_mode_t
PON_DBA_MODE_NOT_LOADED = 0
PON_DBA_MODE_LOADED_NOT_RUNNING = 1
PON_DBA_MODE_RUNNING = 2
PON_DBA_MODE_LAST = 3
# from enum type typedef enum PON_port_frame_destination_t
PON_PORT_PON = 0
PON_PORT_SYSTEM = 1
PON_PORT_TYPE_GEM = 0
PON_PORT_TYPE_TDM = 1
PON_PORT_TYPE_OMCI = 2
PON_PORT_DESTINATION_CNI0 = 0
PON_PORT_DESTINATION_CNI1 = 1
PON_PORT_DESTINATION_CNI2 = 2
PON_PORT_DESTINATION_CPU = 3
# from enum PON_olt_hw_classification_t
PON_OLT_HW_CLASSIFICATION_PAUSE = 0
PON_OLT_HW_CLASSIFICATION_LINK_CONSTRAINT = 1
PON_OLT_HW_CLASSIFICATION_IGMP = 2
PON_OLT_HW_CLASSIFICATION_MPCP = 3
PON_OLT_HW_CLASSIFICATION_OAM = 4
PON_OLT_HW_CLASSIFICATION_802_1X = 5
PON_OLT_HW_CLASSIFICATION_PPPOE_DISCOVERY = 6
PON_OLT_HW_CLASSIFICATION_PPPOE_SESSION = 7
PON_OLT_HW_CLASSIFICATION_DHCP_V4 = 8
PON_OLT_HW_CLASSIFICATION_PIM = 9
PON_OLT_HW_CLASSIFICATION_DHCP_V6 = 10
PON_OLT_HW_CLASSIFICATION_ICMP_V4 = 11
PON_OLT_HW_CLASSIFICATION_MLD = 12
PON_OLT_HW_CLASSIFICATION_ARP = 13
PON_OLT_HW_CLASSIFICATION_CONF_DA = 14
PON_OLT_HW_CLASSIFICATION_CONF_RULE = 15
PON_OLT_HW_CLASSIFICATION_DA_EQ_SA = 16
PON_OLT_HW_CLASSIFICATION_DA_EQ_MAC = 17
PON_OLT_HW_CLASSIFICATION_DA_EQ_SEC_MAC = 18
PON_OLT_HW_CLASSIFICATION_SA_EQ_MAC = 19
PON_OLT_HW_CLASSIFICATION_SA_EQ_SEC_MAC = 20
PON_OLT_HW_CLASSIFICATION_ETHERNET_MANAGEMENT = 100
PON_OLT_HW_CLASSIFICATION_IPV4_LOCAL_MULTICAST = 101
PON_OLT_HW_CLASSIFICATION_IPV4_MANAGEMENT = 102
PON_OLT_HW_CLASSIFICATION_ALL_IPV4_MULTICAST = 103
PON_OLT_HW_CLASSIFICATION_IPV6_LOCAL_MULTICAST = 104
PON_OLT_HW_CLASSIFICATION_IPV6_MANAGEMENT = 105
PON_OLT_HW_CLASSIFICATION_ALL_IPV6_MULTICAST = 106
PON_OLT_HW_CLASSIFICATION_OTHER = 107
PON_OLT_HW_CLASSIFICATION_LAST_RULE = 108
PON_ACTIVATION_AUTH_AUTO = 0
PON_ACTIVATION_AUTH_HOST_CONTROLLED_SEPARATE_EVENTS = 1 # Host controlled: Separate events
PON_ACTIVATION_AUTH_HOST_CONTROLLED_LUMPED_SN = 2 # Host controlled: Lumped-SN-Response
PON_ACTIVATION_AUTH_REGISTRATION_ID_RAW = 3 # Registration-ID Raw
PON_ACTIVATION_AUTH_REGISTRATION_ID_LEARN = 4 # Registration-ID Learn
PON_LOGICAL_OBJECT_TYPE_ALLOC_ID = 0
PON_LOGICAL_OBJECT_TYPE_ONU_ID_BY_ALLOC_ID = 1
PON_LOGICAL_OBJECT_TYPE_MAX = PON_LOGICAL_OBJECT_TYPE_ONU_ID_BY_ALLOC_ID
PMC_OFAL_MAX_BI_DIRECTIONAL_FLOW_PER_ONU = 16
PMC_OFAL_START_FLOW_ID_BASE = 1000
PON_DL_VLAN_SVLAN_REMOVE = 3
PON_DL_VLAN_CVLAN_NO_CHANGE = 0
PON_VLAN_DONT_CHANGE_TAG = 0
PON_VLAN_CHANGE_TAG = 1
PON_VLAN_CHANGE_OTHER_TAG = 2
PON_VLAN_CHANGE_REMOVE_TAG = 3
PON_VLAN_UNUSED_TAG = -1
PON_VLAN_DEST_DATAPATH = 1
PON_VLAN_UNCHANGED_PRIORITY = -1
PON_VLAN_REPLACE_PRIORITY = -2
PON_VLAN_UNUSED_PRIORITY = -1
PON_OUTPUT_VLAN_PRIO_HANDLE_DONT_CHANGE = 0
PON_OUTPUT_VLAN_PRIO_HANDLE_INCOMING_VLAN = 1
PON_OUTPUT_VLAN_PRIO_HANDLE_DL_VLAN_TABLE = 2
PON_N_TO_1_VLAN_MODE = 0
PON_1_TO_1_VLAN_MODE = 1
# OMCI
OMCI_8021P_MSP_UNMARKED_FRAME_TAG_FRAME = 1
OMCI_8021P_MSP_TP_TYPE_NULL = 0
# GEM interworking option for MAC Bridge LAN
OMCI_GEM_IWTP_IW_OPT_8021P_MAPPER = 5
#LLC frame encapsulation method
OMCI_MAC_BRIDGE_PCD_ENCAP_METHOD_LLC = 1
# Frame Check Sequence will forward
OMCI_MAC_BRIDGE_PCD_LANFCS_FORWARDED = 0
# in DS, perform the inverse of US operation
OMCI_EX_VLAN_TAG_OCD_DS_MODE_US_INVERSE = 0
# Physical path termination point Ethernet UNI
OMCI_EX_VLAN_TAG_OCD_ASSOCIATION_TYPE_PPTP_ETH_UNI = 2
# GEM Port Network CTP
# Direction definitions
GEM_DIR_BIDIRECT = 3 # bi-directional
# PMC
PMC_UPSTREAM_PORT = 129
# SLA
PYTHAGORAS_UPDATE_AID_SLA = 21
PYTHAGORAS_DBA_DATA_COS = 0
PYTHAGORAS_DBA_STATUS_REPORT_NSR = 0
PYTHAGORAS_SET_SLA_RESP_SIZE = 10
# Random values found in PMC code
SLA_gr_bw_gros = 30
SLA_be_bw_gros = 300
SLA_gr_bw_fine = 0
SLA_be_bw_fine = 0
PON_MAX_DS_POLICING_CONFIGURATION_INDEX = 255
PMC_OFAL_NO_POLICY = PON_MAX_DS_POLICING_CONFIGURATION_INDEX+1
#Enum classes to set alarm parameters
class AlarmEventType(Enum):
COMMUNICATION = 0
ENVIRONMENT = 1
EQUIPTMENT = 2
SERVICE = 3
PROCESSING = 4
SECURITY = 5
class AlarmEventCategory(Enum):
PON = 0
class AlarmEventState(Enum):
RAISED = 1
CLEARED = 0
class AlarmEventSeverity(Enum):
INDETERMINATE = 0
WARNING = 1
MINOR = 2
MAJOR = 3
CRITICAL = 4
AlarmEventStateDefault = 1
UPSTREAM = 1
DOWNSTREAM = 2
PASCOMM_RETRIES = 5
| 27.355987
| 90
| 0.827399
|
8ec68f00430ad6d29d8526aae66a01897e6071ef
| 3,877
|
py
|
Python
|
server/CloudantDB.py
|
AnsgarSchmidt/sensomatic
|
f7a0040df0a7ad278cdb2986982b60747763e119
|
[
"Apache-2.0"
] | 4
|
2017-02-15T17:33:47.000Z
|
2021-03-18T19:01:44.000Z
|
server/CloudantDB.py
|
AnsgarSchmidt/sensomatic
|
f7a0040df0a7ad278cdb2986982b60747763e119
|
[
"Apache-2.0"
] | 1
|
2015-07-05T09:27:45.000Z
|
2016-10-30T17:37:38.000Z
|
server/CloudantDB.py
|
AnsgarSchmidt/sensomatic
|
f7a0040df0a7ad278cdb2986982b60747763e119
|
[
"Apache-2.0"
] | 3
|
2016-08-15T19:06:50.000Z
|
2020-10-06T13:28:34.000Z
|
import os
import time
import redis
import threading
import ConfigParser
from cloudant.client import Cloudant
class CloudantDB(threading.Thread):
def _readConfig(self):
update = False
if not os.path.isdir(self._homeDir):
print "Creating homeDir"
os.makedirs(self._homeDir)
if os.path.isfile(self._configFileName):
self._config.read(self._configFileName)
else:
print "Config file not found"
update = True
if not self._config.has_section('REDIS'):
print "Adding Redis part"
update = True
self._config.add_section("REDIS")
if not self._config.has_option("REDIS", "ServerAddress"):
print "No Server Address"
update = True
self._config.set("REDIS", "ServerAddress", "<ServerAddress>")
if not self._config.has_option("REDIS", "ServerPort"):
print "No Server Port"
update = True
self._config.set("REDIS", "ServerPort", "6379")
if not self._config.has_section('CLOUDANT'):
print "Adding Cloudant part"
update = True
self._config.add_section("CLOUDANT")
if not self._config.has_option("CLOUDANT", "ServerAddress"):
print "No Server Address"
update = True
self._config.set("CLOUDANT", "ServerAddress", "<ServerAddress>")
if not self._config.has_option("CLOUDANT", "Username"):
print "No Username"
update = True
self._config.set("CLOUDANT", "Username", "Didditulle")
if not self._config.has_option("CLOUDANT", "Password"):
print "No Password"
update = True
self._config.set("CLOUDANT", "Password", "geheim")
if update:
with open(self._configFileName, 'w') as f:
self._config.write(f)
def __init__(self):
threading.Thread.__init__(self)
self.setDaemon(True)
self._homeDir = os.path.expanduser("~/.sensomatic")
self._configFileName = self._homeDir + '/config.ini'
self._config = ConfigParser.ConfigParser()
self._readConfig()
self._redis = redis.StrictRedis(host=self._config.get("REDIS", "ServerAddress"), port=self._config.get("REDIS", "ServerPort"), db=0)
self._cloudant = Cloudant(self._config.get("CLOUDANT", "Username"), self._config.get("CLOUDANT", "Password"), url=self._config.get("CLOUDANT", "ServerAddress"))
self.checkDB()
def checkDB(self):
self._cloudant.connect()
if "usshorizon" in self._cloudant.all_dbs():
self._database = self._cloudant['usshorizon']
return True
else:
print "Create DB"
self._database = self._cloudant.create_database('usshorizon')
if self._database.exists():
print "Success"
return True
else:
print "Error"
return False
def getData(self):
data = {'timestamp':time.time()}
for key in self._redis.keys():
k = key.split('/')
l = len(k)
w = data
for i in range(l):
if k[i] not in w:
w[k[i]] = {}
w = w[k[i]]
if i == l - 1:
w['value'] = self._redis.get(key)
return data
def run(self):
while True:
d = self.getData()
# print d
try:
if self.checkDB():
self._database.create_document(d)
except Exception as e:
print e
time.sleep(60)
time.sleep(10)
if __name__ == '__main__':
c = CloudantDB()
c.start()
time.sleep(5)
| 32.579832
| 174
| 0.542946
|
005ddfde1c6c58dd848360517277f3da64cfbde5
| 1,650
|
py
|
Python
|
cohesity_management_sdk/models/ssl_verification.py
|
nick6655/management-sdk-python
|
88e792cb83e5c24a22af495b220c145d0c45841d
|
[
"Apache-2.0"
] | 18
|
2019-09-24T17:35:53.000Z
|
2022-03-25T08:08:47.000Z
|
cohesity_management_sdk/models/ssl_verification.py
|
nick6655/management-sdk-python
|
88e792cb83e5c24a22af495b220c145d0c45841d
|
[
"Apache-2.0"
] | 18
|
2019-03-29T19:32:29.000Z
|
2022-01-03T23:16:45.000Z
|
cohesity_management_sdk/models/ssl_verification.py
|
nick6655/management-sdk-python
|
88e792cb83e5c24a22af495b220c145d0c45841d
|
[
"Apache-2.0"
] | 16
|
2019-02-27T06:54:12.000Z
|
2021-11-16T18:10:24.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2021 Cohesity Inc.
class SslVerification(object):
"""Implementation of the 'SslVerification' model.
Specifies information about SSL verification when registering certain
sources.
Attributes:
ca_certificate (string): Contains the contents of CA cert/cert chain.
is_enabled (bool): Whether SSL verification should be performed.
"""
# Create a mapping from Model property names to API property names
_names = {
"ca_certificate":'caCertificate',
"is_enabled":'isEnabled'
}
def __init__(self,
ca_certificate=None,
is_enabled=None):
"""Constructor for the SslVerification class"""
# Initialize members of the class
self.ca_certificate = ca_certificate
self.is_enabled = is_enabled
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
ca_certificate = dictionary.get('caCertificate')
is_enabled = dictionary.get('isEnabled')
# Return an object of this model
return cls(ca_certificate,
is_enabled)
| 27.5
| 81
| 0.629697
|
2121e152d4dc122535e3396b64787e0da5277147
| 659
|
py
|
Python
|
salemali/models.py
|
mablue/Specialized-Procurement-and-Sales-Management-System-for-East-Azarbaijan-Gas-Company
|
da071bd199a92d571228a1d823a7fa1f52275604
|
[
"MIT"
] | 30
|
2019-08-18T14:57:39.000Z
|
2022-03-15T22:09:09.000Z
|
salemali/models.py
|
mablue/Specialized-Procurement-and-Sales-Management-System-for-East-Azarbaijan-Gas-Company
|
da071bd199a92d571228a1d823a7fa1f52275604
|
[
"MIT"
] | 3
|
2021-03-19T00:46:32.000Z
|
2021-09-22T01:29:08.000Z
|
salemali/models.py
|
mablue/Specialized-Procurement-and-Sales-Management-System-for-East-Azarbaijan-Gas-Company
|
da071bd199a92d571228a1d823a7fa1f52275604
|
[
"MIT"
] | 7
|
2019-08-20T14:04:28.000Z
|
2021-06-29T06:43:08.000Z
|
from django.db import models
# Create your models here.
# سال مالی
class Salemali(models.Model):
sal = models.IntegerField(verbose_name="سال")
arzeshe_afzude = models.IntegerField(verbose_name="درصد ارزش افزوده", default=0)
nerkhe_tafkik = models.IntegerField(verbose_name='نرخ تفکیک معامله ها ', )
# is_active = models.BooleanField(default=False, verbose_name="فعال")
class Meta:
verbose_name = "سال مالی"
verbose_name_plural = "سالهای مالی"
# app_label = 'My APP name'
def __str__(self):
return "سال {} با ارزش افزوده {} و نرخ تفکیک {}".format(self.sal, self.arzeshe_afzude, self.nerkhe_tafkik)
| 32.95
| 114
| 0.694992
|
0185df70f21d9c4c1d308cb2a5a598cada91a759
| 5,275
|
py
|
Python
|
broadface/loss.py
|
kakaoenterprise/BroadFace
|
e6a021dc2993ef94fe58a4f5a4a11a1fa19d289b
|
[
"Apache-2.0"
] | 20
|
2021-02-16T09:04:35.000Z
|
2021-11-05T12:01:12.000Z
|
broadface/loss.py
|
kakaoenterprise/BroadFace
|
e6a021dc2993ef94fe58a4f5a4a11a1fa19d289b
|
[
"Apache-2.0"
] | 3
|
2021-05-19T17:08:58.000Z
|
2021-07-11T21:51:04.000Z
|
broadface/loss.py
|
kakaoenterprise/BroadFace
|
e6a021dc2993ef94fe58a4f5a4a11a1fa19d289b
|
[
"Apache-2.0"
] | 2
|
2021-02-16T09:04:39.000Z
|
2021-08-25T10:01:59.000Z
|
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
class ArcFace(nn.Module):
def __init__(self, in_features, out_features, scale_factor=64.0, margin=0.50):
super(ArcFace, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.criterion = nn.CrossEntropyLoss()
self.margin = margin
self.scale_factor = scale_factor
self.weight = nn.Parameter(torch.FloatTensor(out_features, in_features))
nn.init.xavier_uniform_(self.weight)
self.cos_m = math.cos(margin)
self.sin_m = math.sin(margin)
self.th = math.cos(math.pi - margin)
self.mm = math.sin(math.pi - margin) * margin
def forward(self, input, label):
# input is not l2 normalized
cosine = F.linear(F.normalize(input), F.normalize(self.weight))
sine = torch.sqrt(1.0 - torch.pow(cosine, 2))
phi = cosine * self.cos_m - sine * self.sin_m
phi = torch.where(cosine > self.th, phi, cosine - self.mm)
one_hot = torch.zeros(cosine.size(), device=input.device)
one_hot.scatter_(1, label.view(-1, 1).long(), 1)
logit = (one_hot * phi) + ((1.0 - one_hot) * cosine)
logit *= self.scale_factor
loss = self.criterion(logit, label)
return loss
class BroadFaceArcFace(nn.Module):
def __init__(
self,
in_features,
out_features,
scale_factor=64.0,
margin=0.50,
queue_size=10000,
compensate=False,
):
super(BroadFaceArcFace, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.criterion = nn.CrossEntropyLoss(reduction="none")
self.margin = margin
self.scale_factor = scale_factor
self.weight = nn.Parameter(torch.FloatTensor(out_features, in_features))
nn.init.xavier_uniform_(self.weight)
self.cos_m = math.cos(margin)
self.sin_m = math.sin(margin)
self.th = math.cos(math.pi - margin)
self.mm = math.sin(math.pi - margin) * margin
feature_mb = torch.zeros(0, in_features)
label_mb = torch.zeros(0, dtype=torch.int64)
proxy_mb = torch.zeros(0, in_features)
self.register_buffer("feature_mb", feature_mb)
self.register_buffer("label_mb", label_mb)
self.register_buffer("proxy_mb", proxy_mb)
self.queue_size = queue_size
self.compensate = compensate
def update(self, input, label):
self.feature_mb = torch.cat([self.feature_mb, input.data], dim=0)
self.label_mb = torch.cat([self.label_mb, label.data], dim=0)
self.proxy_mb = torch.cat(
[self.proxy_mb, self.weight.data[label].clone()], dim=0
)
over_size = self.feature_mb.shape[0] - self.queue_size
if over_size > 0:
self.feature_mb = self.feature_mb[over_size:]
self.label_mb = self.label_mb[over_size:]
self.proxy_mb = self.proxy_mb[over_size:]
assert (
self.feature_mb.shape[0] == self.label_mb.shape[0] == self.proxy_mb.shape[0]
)
def compute_arcface(self, x, y, w):
cosine = F.linear(F.normalize(x), F.normalize(w))
sine = torch.sqrt(1.0 - torch.pow(cosine, 2))
phi = cosine * self.cos_m - sine * self.sin_m
phi = torch.where(cosine > self.th, phi, cosine - self.mm)
one_hot = torch.zeros(cosine.size(), device=x.device)
one_hot.scatter_(1, y.view(-1, 1).long(), 1)
logit = (one_hot * phi) + ((1.0 - one_hot) * cosine)
logit *= self.scale_factor
ce_loss = self.criterion(logit, y)
return ce_loss.mean()
def forward(self, input, label):
# input is not l2 normalized
weight_now = self.weight.data[self.label_mb]
delta_weight = weight_now - self.proxy_mb
if self.compensate:
update_feature_mb = (
self.feature_mb
+ (
self.feature_mb.norm(p=2, dim=1, keepdim=True)
/ self.proxy_mb.norm(p=2, dim=1, keepdim=True)
)
* delta_weight
)
else:
update_feature_mb = self.feature_mb
large_input = torch.cat([update_feature_mb, input.data], dim=0)
large_label = torch.cat([self.label_mb, label], dim=0)
batch_loss = self.compute_arcface(input, label, self.weight.data)
broad_loss = self.compute_arcface(large_input, large_label, self.weight)
self.update(input, label)
return batch_loss + broad_loss
if __name__ == "__main__":
batch_size = 128
embedding_size = 512
num_classes = 10000
arcface_criterion = ArcFace(embedding_size, num_classes)
broadface_criterion = BroadFaceArcFace(embedding_size, num_classes, queue_size=2000, compensate=True)
for _ in range(100):
embedding = torch.randn(batch_size, embedding_size)
labels = torch.randint(0, num_classes, (batch_size,))
arcface_loss = arcface_criterion(embedding, labels)
broadface_loss = broadface_criterion(embedding, labels)
print(arcface_loss.detach())
print(broadface_loss.detach())
| 33.814103
| 105
| 0.620664
|
ae8fed8f3e86351054f921f027a31bb5c7528fd0
| 912
|
py
|
Python
|
dbaas/physical/admin/replication_topology.py
|
didindinn/database-as-a-service
|
747de31ff8546f7874ddd654af860e130afd17a0
|
[
"BSD-3-Clause"
] | 303
|
2015-01-08T10:35:54.000Z
|
2022-02-28T08:54:06.000Z
|
dbaas/physical/admin/replication_topology.py
|
nouraellm/database-as-a-service
|
5e655c9347bea991b7218a01549f5e44f161d7be
|
[
"BSD-3-Clause"
] | 124
|
2015-01-14T12:56:15.000Z
|
2022-03-22T20:45:11.000Z
|
dbaas/physical/admin/replication_topology.py
|
nouraellm/database-as-a-service
|
5e655c9347bea991b7218a01549f5e44f161d7be
|
[
"BSD-3-Clause"
] | 110
|
2015-01-02T11:59:48.000Z
|
2022-02-28T08:54:06.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.contrib import admin
from ..forms.replication_topology import ReplicationTopologyForm
from ..models import TopologyParameterCustomValue
class ParamCustomValueInline(admin.TabularInline):
model = TopologyParameterCustomValue
class ReplicationTopologyAdmin(admin.ModelAdmin):
form = ReplicationTopologyForm
list_filter = ("has_horizontal_scalability", "engine")
search_fields = ("name",)
list_display = ("name", "versions", "has_horizontal_scalability")
filter_horizontal = ("parameter",)
save_on_top = True
inlines = [ParamCustomValueInline]
change_form_template = "admin/physical/replicationtopology/change_form.html"
add_form_template = "admin/change_form.html"
def versions(self, obj):
return ", ".join([str(engine.version) for engine in obj.engine.all()])
| 32.571429
| 80
| 0.757675
|
fd997d087677f50868070b3cde675da2267006b5
| 1,225
|
py
|
Python
|
fdi/migrations/0029_auto_20180214_1451.py
|
uktrade/export-wins-data
|
46caa444812e89abe504bec8c15aa7f7ba1a247e
|
[
"MIT"
] | 5
|
2016-09-12T12:52:45.000Z
|
2020-03-24T14:43:13.000Z
|
fdi/migrations/0029_auto_20180214_1451.py
|
uktrade/export-wins-data
|
46caa444812e89abe504bec8c15aa7f7ba1a247e
|
[
"MIT"
] | 435
|
2016-10-18T12:51:39.000Z
|
2021-06-09T17:22:08.000Z
|
fdi/migrations/0029_auto_20180214_1451.py
|
uktrade/export-wins-data
|
46caa444812e89abe504bec8c15aa7f7ba1a247e
|
[
"MIT"
] | 2
|
2016-12-06T10:37:21.000Z
|
2017-02-22T17:27:43.000Z
|
# Generated by Django 2.0.2 on 2018-02-14 14:51
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('fdi', '0028_auto_20180213_1050'),
]
operations = [
migrations.CreateModel(
name='OverseasRegion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='OverseasRegionMarket',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('market', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='fdi.Market')),
('overseas_region', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='fdi.OverseasRegion')),
],
),
migrations.AddField(
model_name='overseasregion',
name='markets',
field=models.ManyToManyField(through='fdi.OverseasRegionMarket', to='fdi.Market'),
),
]
| 35
| 125
| 0.600816
|
74ae2f974420ea3cbbb1b46cd8d611490cf4dcc3
| 1,568
|
py
|
Python
|
test_machine.py
|
raspihats/pipnp
|
2ffc2f083d1301966066d34595a9e3d0391e9c65
|
[
"MIT"
] | null | null | null |
test_machine.py
|
raspihats/pipnp
|
2ffc2f083d1301966066d34595a9e3d0391e9c65
|
[
"MIT"
] | null | null | null |
test_machine.py
|
raspihats/pipnp
|
2ffc2f083d1301966066d34595a9e3d0391e9c65
|
[
"MIT"
] | null | null | null |
from app.machine import machine
import logging
logging.basicConfig(
format='%(asctime)s %(levelname)s %(message)s',
level=logging.DEBUG,
datefmt='%m/%d/%Y %I:%M:%S %p'
)
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def main():
# offset = {'x': 95.8, 'y': 23.5}
# components = """LD1,47.47,3.97,180,OSG50603C1E,CHIPLED_0603
# LD2,48.97,3.97,180,OSG50603C1E,CHIPLED_0603
# LD3,50.48,3.97,180,OSG50603C1E,CHIPLED_0603
# LD4,51.99,3.97,180,OSG50603C1E,CHIPLED_0603
# LD5,53.50,3.97,180,OSG50603C1E,CHIPLED_0603
# LD6,55.01,3.97,180,OSG50603C1E,CHIPLED_0603
# LD7,56.52,3.97,180,OSG50603C1E,CHIPLED_0603
# LD8,58.02,3.97,180,OSG50603C1E,CHIPLED_0603
# LD9,65.09,3.97,180,OSG50603C1E,CHIPLED_0603
# LD10,66.60,3.97,180,OSG50603C1E,CHIPLED_0603
# LD11,68.10,3.97,180,OSG50603C1E,CHIPLED_0603
# LD12,69.61,3.97,180,OSG50603C1E,CHIPLED_0603
# LD13,71.12,3.97,180,OSG50603C1E,CHIPLED_0603
# LD14,72.63,3.97,180,OSG50603C1E,CHIPLED_0603
# LD15,74.14,3.97,180,OSG50603C1E,CHIPLED_0603
# LD16,75.64,3.97,180,OSG50603C1E,CHIPLED_0603"""
# job = []
# lines = components.split('\n')
# for line in lines:
# data = line.split(',')
# job.append({'name': data[0], 'x': float(data[1]), 'y': float(data[2])})
try:
machine.logger = logger
machine.open()
machine.home()
machine.run_job("DI16ac-top")
finally:
machine.close()
if __name__ == '__main__':
main()
| 29.584906
| 81
| 0.63648
|
8feb4a1656f5250e2e07cab701b6ce47540cee34
| 3,277
|
py
|
Python
|
setup.py
|
twitty-onica/runway
|
e292c7b770fa639d3ed834cf041a7d7641f7f160
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
twitty-onica/runway
|
e292c7b770fa639d3ed834cf041a7d7641f7f160
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
twitty-onica/runway
|
e292c7b770fa639d3ed834cf041a7d7641f7f160
|
[
"Apache-2.0"
] | null | null | null |
"""Packaging settings."""
from codecs import open as codecs_open
from os.path import abspath, dirname, join
from setuptools import find_packages, setup
THIS_DIR = abspath(dirname(__file__))
def local_scheme(version): # pylint: disable=unused-argument
# type: (str) -> str
"""Skip the local version (eg. +xyz) to upload to Test PyPI."""
return ""
with codecs_open(join(THIS_DIR, "README.md"), encoding="utf-8") as readfile:
LONG_DESCRIPTION = readfile.read()
INSTALL_REQUIRES = [
"Send2Trash",
"awacs", # for embedded hooks
# awscli included for embedded hooks and aws subcommand
"awscli>=1.16.308<2.0",
'backports.tempfile; python_version < "3.2"',
"botocore>=1.12.111", # matching awscli/boto3 requirement
"boto3>=1.9.111<2.0",
"cfn_flip>=1.2.1", # 1.2.1+ require PyYAML 4.1+
"cfn-lint",
"click>=7.1",
"coloredlogs",
"docker",
"requests",
"future",
"pyhcl~=0.4", # does not support HCL2, possibly move to extras_require in the future
'python-hcl2>=0.3.0,<1.0.0; python_version >= "3.6"', # only support >=3.6
"gitpython",
'importlib-metadata; python_version < "3.8"',
"packaging", # component of setuptools needed for version compare
"pyOpenSSL", # For embedded hook & associated script usage
"PyYAML>=4.1,<5.3", # match awscli top-end
"six>=1.13.0",
'typing;python_version<"3.5"',
"yamllint",
"zgitignore", # for embedded hooks
"troposphere>=2.4.2",
# botocore pins its urllib3 dependency like this, so we need to do the
# same to ensure v1.25+ isn't pulled in by pip
"urllib3>=1.20,<1.25",
# dependency of importlib-metadata, dependency of pytest, cfn-lint, & others
# 2.0.0 drops support for python 3.5
"zipp~=1.0.0",
# inherited from stacker 1.7.0 requirements
"jinja2>=2.7,<3.0",
"schematics>=2.0.1,<2.1.0",
"formic2",
]
setup(
name="runway",
description="Simplify infrastructure/app testing/deployment",
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
url="https://github.com/onicagroup/runway",
author="Onica Group LLC",
author_email="opensource@onica.com",
license="Apache License 2.0",
classifiers=[
"Intended Audience :: Developers",
"Topic :: Utilities",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
python_requires=">=2.6",
keywords="cli",
packages=find_packages(exclude=("integration*", "tests*")),
install_requires=INSTALL_REQUIRES,
setup_requires=["setuptools_scm"],
use_scm_version={"local_scheme": local_scheme},
entry_points={"console_scripts": ["runway=runway._cli.main:cli"]},
scripts=[
"scripts/stacker-runway",
"scripts/stacker-runway.cmd",
"scripts/tf-runway",
"scripts/tf-runway.cmd",
"scripts/tfenv-runway",
"scripts/tfenv-runway.cmd",
],
include_package_data=True, # needed for templates,blueprints,hooks
)
| 33.783505
| 89
| 0.643271
|
9d14a559b6674731323c0963af1326ea216bed45
| 6,227
|
py
|
Python
|
LunarLander/Listing_24-1.py
|
PrinceChou/Play-Python-with-Alisa
|
808ab2744a99c548de4633b5707af27112bcdccf
|
[
"Apache-2.0"
] | null | null | null |
LunarLander/Listing_24-1.py
|
PrinceChou/Play-Python-with-Alisa
|
808ab2744a99c548de4633b5707af27112bcdccf
|
[
"Apache-2.0"
] | null | null | null |
LunarLander/Listing_24-1.py
|
PrinceChou/Play-Python-with-Alisa
|
808ab2744a99c548de4633b5707af27112bcdccf
|
[
"Apache-2.0"
] | null | null | null |
# Listing_24-1.py
# Copyright Warren & Csrter Sande, 2013
# Released under MIT license http://www.opensource.org/licenses/mit-license.php
# Version $version ----------------------------
# Lunar Lander
# simulation game of landing a spacecraft
# initialize - get everything ready
import pygame, sys
pygame.init()
screen = pygame.display.set_mode([400,600])
screen.fill([0, 0, 0])
ship = pygame.image.load('lunarlander.png')
moon = pygame.image.load('moonsurface.png')
ground = 540 #landing pad is y = 540
start = 90
clock = pygame.time.Clock()
ship_mass = 5000.0
fuel = 5000.0
velocity = -100.0
gravity = 10
height = 2000
thrust = 0
delta_v = 0
y_pos = 90
held_down = False
# make the throttle
class ThrottleClass(pygame.sprite.Sprite):
def __init__(self, location = [0,0]):
pygame.sprite.Sprite.__init__(self)
image_surface = pygame.surface.Surface([30, 10])
image_surface.fill([128,128,128])
self.image = image_surface.convert()
self.rect = self.image.get_rect()
self.rect.left, self.rect.centery = location
# calcualte position, motion, acceleration, fuel
def calculate_velocity():
global thrust, fuel, velocity, delta_v, height, y_pos
delta_t = 1/fps
thrust = (500 - myThrottle.rect.centery) * 5.0 # turn throttle position
# into amount of thrust
fuel -= thrust /(10 * fps) # use up fuel
if fuel < 0: fuel = 0.0
if fuel < 0.1: thrust = 0.0
delta_v = delta_t * (-gravity + 200 * thrust / (ship_mass + fuel))
velocity = velocity + delta_v
delta_h = velocity * delta_t
height = height + delta_h
y_pos = ground - (height * (ground - start) / 2000) - 90
# display the text with the speed, height, etc.
def display_stats():
v_str = "velocity: %i m/s" % velocity
h_str = "height: %.1f" % height
t_str = "thrust: %i" % thrust
a_str = "acceleration: %.1f" % (delta_v * fps)
f_str = "fuel: %i" % fuel
v_font = pygame.font.Font(None, 26)
v_surf = v_font.render(v_str, 1, (255, 255, 255))
screen.blit(v_surf, [10, 50])
a_font = pygame.font.Font(None, 26)
a_surf = a_font.render(a_str, 1, (255, 255, 255))
screen.blit(a_surf, [10, 100])
h_font = pygame.font.Font(None, 26)
h_surf = h_font.render(h_str, 1, (255, 255, 255))
screen.blit(h_surf, [10, 150])
t_font = pygame.font.Font(None, 26)
t_surf = t_font.render(t_str, 1, (255, 255, 255))
screen.blit(t_surf, [10, 200])
f_font = pygame.font.Font(None, 26)
f_surf = f_font.render(f_str, 1, (255, 255, 255))
screen.blit(f_surf, [60, 300])
# display the ship's flames - size depends on the amount of thrust
def display_flames():
flame_size = thrust / 15
for i in range (2):
startx = 252 - 10 + i * 19
starty = y_pos + 83
pygame.draw.polygon(screen, [255, 109, 14], [(startx, starty),
(startx + 4, starty + flame_size),
(startx + 8, starty)], 0)
# display final stats when the game is over
def display_final():
final1 = "Game over"
final2 = "You landed at %.1f m/s" % velocity
if velocity > -5:
final3 = "Nice landing!"
final4 = "I hear NASA is hiring!"
elif velocity > -15:
final3 = "Ouch! A bit rough, but you survived."
final4 = "You'll do better next time."
else:
final3 = "Yikes! You crashed a 30 Billion dollar ship."
final4 = "How are you getting home?"
pygame.draw.rect(screen, [0, 0, 0], [5, 5, 350, 280],0)
f1_font = pygame.font.Font(None, 70)
f1_surf = f1_font.render(final1, 1, (255, 255, 255))
screen.blit(f1_surf, [20, 50])
f2_font = pygame.font.Font(None, 40)
f2_surf = f2_font.render(final2, 1, (255, 255, 255))
screen.blit(f2_surf, [20, 110])
f3_font = pygame.font.Font(None, 26)
f3_surf = f3_font.render(final3, 1, (255, 255, 255))
screen.blit(f3_surf, [20, 150])
f4_font = pygame.font.Font(None, 26)
f4_surf = f4_font.render(final4, 1, (255, 255, 255))
screen.blit(f4_surf, [20, 180])
pygame.display.flip()
myThrottle = ThrottleClass([15, 500])
# main loop
running = True
while running:
clock.tick(30)
fps = clock.get_fps()
if fps < 1: fps = 30
if height > 0.01:
calculate_velocity()
screen.fill([0, 0, 0])
display_stats()
pygame.draw.rect(screen, [0, 0, 255], [80, 350, 24, 100], 2)
fuelbar = 96 * fuel / 5000
pygame.draw.rect(screen, [0,255,0], [84,448-fuelbar,18, fuelbar], 0)
pygame.draw.rect(screen, [255, 0, 0], [25, 300, 10, 200],0) #thrust bar
screen.blit(moon, [0, 500, 400, 100]) #moon
pygame.draw.rect(screen, [60, 60, 60], [220, 535, 70, 5],0) #landing pad
screen.blit(myThrottle.image, myThrottle.rect) #thrust handle
display_flames() #flames
screen.blit(ship, [230, y_pos, 50, 90]) #ship
instruct1 = "Land softly without running out of fuel"
instruct2 = "Good landing: < 15 m/s Great landing: < 5m/s"
inst1_font = pygame.font.Font(None, 24)
inst1_surf = inst1_font.render(instruct1, 1, (255, 255, 255))
screen.blit(inst1_surf, [50, 550])
inst2_font = pygame.font.Font(None, 24)
inst2_surf = inst1_font.render(instruct2, 1, (255, 255, 255))
screen.blit(inst2_surf, [20, 575])
pygame.display.flip()
else: #game over - print final score
display_final()
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
elif event.type == pygame.MOUSEBUTTONDOWN:
held_down = True
elif event.type == pygame.MOUSEBUTTONUP:
held_down = False
elif event.type == pygame.MOUSEMOTION:
if held_down:
myThrottle.rect.centery = event.pos[1]
if myThrottle.rect.centery < 300:
myThrottle.rect.centery = 300
if myThrottle.rect.centery > 500:
myThrottle.rect.centery = 500
pygame.quit()
| 37.969512
| 83
| 0.591617
|
ebb123de6b961e067de53c7e7fa15171ad8ab766
| 9,995
|
py
|
Python
|
segment_trainer.py
|
FeiLyu/SR-TTT
|
802795fa63bd536ab1ff82e94f149e3291adbb7f
|
[
"Apache-2.0"
] | null | null | null |
segment_trainer.py
|
FeiLyu/SR-TTT
|
802795fa63bd536ab1ff82e94f149e3291adbb7f
|
[
"Apache-2.0"
] | null | null | null |
segment_trainer.py
|
FeiLyu/SR-TTT
|
802795fa63bd536ab1ff82e94f149e3291adbb7f
|
[
"Apache-2.0"
] | null | null | null |
import itertools
import os
import time
import datetime
import numpy as np
import cv2
import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
from torch.utils.data import DataLoader
from torch.nn import functional as F
#import network
import segment_network
#import resnet_version
import segment_train_dataset
import segment_test_dataset
import segment_utils
import torch.autograd as autograd
from torch.autograd import Variable
import segment_tester
import random
from torch import optim
import math
# Save the model if pre_train == True
def save_model(net, epoch, opt, name, save_folder):
model_name = name + '_epoch%d.pth' % (epoch+1)
model_name = os.path.join(save_folder, model_name)
torch.save(net.state_dict(), model_name)
print('The trained model is successfully saved at epoch %d' % (epoch))
# baseline: directly train a segmentation network
def seg_trainer_baseline(opt):
# ----------------------------------------
# Initialize training parameters
# ----------------------------------------
cv2.setNumThreads(0)
cv2.ocl.setUseOpenCL(False)
seed = 66
print("[ Using Seed : ", seed, " ]")
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.cuda.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
os.environ["PYTHONHASHSEED"] = str(seed)
# configurations
save_folder = opt.save_path
sample_folder = opt.sample_path
if not os.path.exists(save_folder):
os.makedirs(save_folder)
if not os.path.exists(sample_folder):
os.makedirs(sample_folder)
# Build networks
segmentor = segment_utils.create_Unet(opt)
# To device
segmentor = segmentor.cuda()
# Optimizers
optimizer_s = torch.optim.Adam(segmentor.parameters(), lr = opt.lr, betas=(0.9, 0.99))
# ----------------------------------------
# Initialize training dataset
# ----------------------------------------
# Define the dataset
trainset = segment_train_dataset.SegmentTrainDataset(opt)
print('The overall number of images equals to %d' % len(trainset))
# Define the dataloader
dataloader = DataLoader(trainset, batch_size = opt.batch_size, shuffle = True, num_workers = opt.num_workers, pin_memory = True)
# ----------------------------------------
# Training
# ----------------------------------------
# Initialize start time
prev_time = time.time()
# Training loop
for epoch in range(opt.epochs):
print("Start epoch ", epoch+1, "!")
for batch_idx, (img, synthesis_img, synthesis_mask, liver_mask) in enumerate(dataloader):
# sent images to cuda
img = img.cuda()
synthesis_img = synthesis_img.cuda()
synthesis_mask = synthesis_mask.cuda()
# sent to network
seg_input = synthesis_img
seg_output = segmentor(seg_input)
# loss and optimizer
optimizer_s.zero_grad()
pos_weight = (opt.loss_weight*torch.ones([1])).cuda()
loss_criterion = torch.nn.BCEWithLogitsLoss(pos_weight=pos_weight)
loss_S = loss_criterion(seg_output, synthesis_mask)
loss_S.backward()
optimizer_s.step()
# Determine approximate time left
batches_done = epoch * len(dataloader) + batch_idx
batches_left = opt.epochs * len(dataloader) - batches_done
time_left = datetime.timedelta(seconds=batches_left * (time.time() - prev_time))
prev_time = time.time()
# Print log
print("\r[Epoch %d/%d] [Batch %d/%d] [S Loss: %.5f] " % ((epoch + 1), opt.epochs, (batch_idx+1), len(dataloader), loss_S.item()))
# Save the model
save_model(segmentor, epoch , opt, save_folder)
# our mrthod
def seg_trainer_ttt(opt):
# ----------------------------------------
# Initialize training parameters
# ----------------------------------------
cv2.setNumThreads(0)
cv2.ocl.setUseOpenCL(False)
seed = 66
print("[ Using Seed : ", seed, " ]")
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.cuda.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
os.environ["PYTHONHASHSEED"] = str(seed)
# configurations
save_folder = opt.save_path
sample_folder = opt.sample_path
if not os.path.exists(save_folder):
os.makedirs(save_folder)
if not os.path.exists(sample_folder):
os.makedirs(sample_folder)
# Build networks
generator = segment_utils.create_Unet(opt)
reconstructor = segment_utils.create_Unet(opt, in_channels=2)
segmentor = segment_utils.create_Unet(opt)
# To device
generator = generator.cuda()
segmentor = segmentor.cuda()
reconstructor = reconstructor.cuda()
# Optimizers
parameterg = list(generator.parameters())
optimizer_g = torch.optim.Adam(parameterg, lr = opt.lr, betas=(0.9, 0.99))
parameters = list(segmentor.parameters()) + list(reconstructor.parameters())
optimizer_s = torch.optim.Adam(parameters, lr = opt.lr, betas=(0.9, 0.99))
# ----------------------------------------
# Initialize training dataset
# ----------------------------------------
# Define the dataset
trainset = segment_train_dataset.SegmentTrainDataset(opt)
print('The overall number of images equals to %d' % len(trainset))
# Define the dataloader
dataloader = DataLoader(trainset, batch_size = opt.batch_size, shuffle = True, num_workers = opt.num_workers, pin_memory = True)
# ----------------------------------------
# Training and Testing
# ----------------------------------------
# Initialize start time
prev_time = time.time()
# Training loop
for epoch in range(opt.epochs):
print("Start epoch ", epoch+1, "!")
for batch_idx, (img, synthesis_img, synthesis_mask, liver_mask) in enumerate(dataloader):
teacher_forcing_ratio = 1-1.0/2.0*(epoch + (batch_idx+1.0)/len(dataloader))
if teacher_forcing_ratio<0:
teacher_forcing_ratio = 0
print("teacher_forcing_ratio ", str(teacher_forcing_ratio), "!")
# sent images to cuda
img = img.cuda()
liver_mask = liver_mask.cuda()
synthesis_img = synthesis_img.cuda()
synthesis_mask = synthesis_mask.cuda()
# step 1: update segmentor and reconstructor
optimizer_s.zero_grad()
# segmentor
seg_output_tumor = segmentor(synthesis_img)
seg_output_healthy = segmentor(img)
# generator
gen_output = torch.sigmoid(generator(synthesis_img))
#---------------------------------------------------------------------------------------------------------
# different input to reconstructor
re_input = teacher_forcing_ratio*img + (1-teacher_forcing_ratio)*gen_output.detach()
re_output = torch.sigmoid(reconstructor(torch.cat((re_input, torch.sigmoid(seg_output_tumor)), 1)))
# refine reoutput
re_output_liver = img * (1 - liver_mask) + re_output * liver_mask
# calculate reconstruction loss
loss_criterion_L1 = torch.nn.L1Loss()
loss_r = loss_criterion_L1(re_output_liver, synthesis_img)
# calculate segmentation loss
pos_weight = (opt.loss_weight*torch.ones([1])).cuda()
loss_criterion_s = torch.nn.BCEWithLogitsLoss(pos_weight=pos_weight)
loss_s_tumor = loss_criterion_s(seg_output_tumor, synthesis_mask)
loss_s_healthy = loss_criterion_s(seg_output_healthy, 0*synthesis_mask)
# total loss
w_r = 1
w_st = 2
w_sh = 1
loss_total_s = w_r*loss_r + w_st*loss_s_tumor + w_sh*loss_s_healthy
loss_total_s.backward()
optimizer_s.step()
# step 2: update generator
optimizer_g.zero_grad()
# generator
gen_output = torch.sigmoid(generator(synthesis_img))
# refine geoutput
gen_output_liver = img * (1 - liver_mask) + gen_output * liver_mask
# calculate generation loss
loss_g = loss_criterion_L1(gen_output_liver, img)
# calculate segmentation loss
seg_output_gen = segmentor(gen_output_liver)
loss_gen_s = loss_criterion_s(seg_output_gen, 0*synthesis_mask)
# total loss
w_g = 1
w_gs = 0.1
loss_total_g = w_g*loss_g + w_gs*loss_gen_s
loss_total_g.backward()
optimizer_g.step()
# Determine approximate time left
batches_done = epoch * len(dataloader) + batch_idx
batches_left = opt.epochs * len(dataloader) - batches_done
time_left = datetime.timedelta(seconds=batches_left * (time.time() - prev_time))
prev_time = time.time()
# Print log
print("\r[Epoch %d/%d] [Batch %d/%d] [Ratio:%d/%d/%d/%d/%d][loss_r: %.5f] [loss_s_tumor: %.5f] [loss_s_healthy: %.5f] [loss_g: %.5f] [loss_gen_s: %.5f]" %
((epoch + 1), opt.epochs, (batch_idx+1), len(dataloader), w_r, w_st, w_sh, w_g, w_gs, loss_r.item(), loss_s_tumor.item(), loss_s_healthy.item(), loss_g.item(), loss_gen_s.item()))
# Save the model
save_model(generator, epoch , opt, 'generator', save_folder)
save_model(segmentor, epoch , opt, 'segmentor', save_folder)
save_model(reconstructor, epoch , opt, 'reconstructor', save_folder)
| 36.213768
| 195
| 0.597099
|
555f293be54b50383bc03d41952b3a353b46aa54
| 770
|
py
|
Python
|
2021/day11/test.py
|
MartinSeeler/Advent-of-Code
|
efc1972ba0a3e17177d61d163ac79ce7880fab55
|
[
"MIT"
] | 5
|
2020-12-18T15:37:20.000Z
|
2021-12-18T20:01:21.000Z
|
2021/day11/test.py
|
MartinSeeler/Advent-of-Code-2020
|
efc1972ba0a3e17177d61d163ac79ce7880fab55
|
[
"MIT"
] | null | null | null |
2021/day11/test.py
|
MartinSeeler/Advent-of-Code-2020
|
efc1972ba0a3e17177d61d163ac79ce7880fab55
|
[
"MIT"
] | 1
|
2021-12-18T05:29:35.000Z
|
2021-12-18T05:29:35.000Z
|
import pytest
from solution import solve_part_1, solve_part_2
@pytest.mark.parametrize(
"quiz_input,expected_result",
[
(
"""5483143223
2745854711
5264556173
6141336146
6357385478
4167524645
2176841721
6882881134
4846848554
5283751526""",
1656,
)
],
)
def test_part_1_solution(quiz_input, expected_result):
assert solve_part_1(quiz_input) == expected_result
@pytest.mark.parametrize(
"quiz_input,expected_result",
[
(
"""5483143223
2745854711
5264556173
6141336146
6357385478
4167524645
2176841721
6882881134
4846848554
5283751526""",
195,
)
],
)
def test_part_2_solution(quiz_input, expected_result):
assert solve_part_2(quiz_input) == expected_result
| 16.041667
| 54
| 0.694805
|
ac2872729ff51271869ecfdcd716af409a9fe355
| 5,371
|
py
|
Python
|
daemon/core/enumerations.py
|
shanv82/core
|
70abb8cc1426ffceb53a03e84edc26f56f9ed4c0
|
[
"BSD-2-Clause"
] | 1
|
2020-10-13T12:44:12.000Z
|
2020-10-13T12:44:12.000Z
|
daemon/core/emulator/enumerations.py
|
lmerat46/core
|
e11ec020ebbd8df0d1c78d4be249de3c87190587
|
[
"BSD-2-Clause"
] | null | null | null |
daemon/core/emulator/enumerations.py
|
lmerat46/core
|
e11ec020ebbd8df0d1c78d4be249de3c87190587
|
[
"BSD-2-Clause"
] | null | null | null |
"""
Contains all legacy enumerations for interacting with legacy CORE code.
"""
from enum import Enum
CORE_API_VERSION = "1.23"
CORE_API_PORT = 4038
class MessageTypes(Enum):
"""
CORE message types.
"""
NODE = 0x01
LINK = 0x02
EXECUTE = 0x03
REGISTER = 0x04
CONFIG = 0x05
FILE = 0x06
INTERFACE = 0x07
EVENT = 0x08
SESSION = 0x09
EXCEPTION = 0x0A
class MessageFlags(Enum):
"""
CORE message flags.
"""
ADD = 0x01
DELETE = 0x02
CRI = 0x04
LOCAL = 0x08
STRING = 0x10
TEXT = 0x20
TTY = 0x40
class NodeTlvs(Enum):
"""
Node type, length, value enumerations.
"""
NUMBER = 0x01
TYPE = 0x02
NAME = 0x03
IP_ADDRESS = 0x04
MAC_ADDRESS = 0x05
IP6_ADDRESS = 0x06
MODEL = 0x07
EMULATION_SERVER = 0x08
SESSION = 0x0A
X_POSITION = 0x20
Y_POSITION = 0x21
CANVAS = 0x22
EMULATION_ID = 0x23
NETWORK_ID = 0x24
SERVICES = 0x25
LATITUDE = 0x30
LONGITUDE = 0x31
ALTITUDE = 0x32
ICON = 0x42
OPAQUE = 0x50
class NodeTypes(Enum):
"""
Node types.
"""
DEFAULT = 0
PHYSICAL = 1
TBD = 3
SWITCH = 4
HUB = 5
WIRELESS_LAN = 6
RJ45 = 7
TUNNEL = 8
KTUNNEL = 9
EMANE = 10
TAP_BRIDGE = 11
PEER_TO_PEER = 12
CONTROL_NET = 13
EMANE_NET = 14
class Rj45Models(Enum):
"""
RJ45 model types.
"""
LINKED = 0
WIRELESS = 1
INSTALLED = 2
# Link Message TLV Types
class LinkTlvs(Enum):
"""
Link type, length, value enumerations.
"""
N1_NUMBER = 0x01
N2_NUMBER = 0x02
DELAY = 0x03
BANDWIDTH = 0x04
PER = 0x05
DUP = 0x06
JITTER = 0x07
MER = 0x08
BURST = 0x09
SESSION = 0x0A
MBURST = 0x10
TYPE = 0x20
GUI_ATTRIBUTES = 0x21
UNIDIRECTIONAL = 0x22
EMULATION_ID = 0x23
NETWORK_ID = 0x24
KEY = 0x25
INTERFACE1_NUMBER = 0x30
INTERFACE1_IP4 = 0x31
INTERFACE1_IP4_MASK = 0x32
INTERFACE1_MAC = 0x33
INTERFACE1_IP6 = 0x34
INTERFACE1_IP6_MASK = 0x35
INTERFACE2_NUMBER = 0x36
INTERFACE2_IP4 = 0x37
INTERFACE2_IP4_MASK = 0x38
INTERFACE2_MAC = 0x39
INTERFACE2_IP6 = 0x40
INTERFACE2_IP6_MASK = 0x41
INTERFACE1_NAME = 0x42
INTERFACE2_NAME = 0x43
OPAQUE = 0x50
class LinkTypes(Enum):
"""
Link types.
"""
WIRELESS = 0
WIRED = 1
class ExecuteTlvs(Enum):
"""
Execute type, length, value enumerations.
"""
NODE = 0x01
NUMBER = 0x02
TIME = 0x03
COMMAND = 0x04
RESULT = 0x05
STATUS = 0x06
SESSION = 0x0A
class RegisterTlvs(Enum):
"""
Register type, length, value enumerations.
"""
WIRELESS = 0x01
MOBILITY = 0x02
UTILITY = 0x03
EXECUTE_SERVER = 0x04
GUI = 0x05
EMULATION_SERVER = 0x06
SESSION = 0x0A
class ConfigTlvs(Enum):
"""
Configuration type, length, value enumerations.
"""
NODE = 0x01
OBJECT = 0x02
TYPE = 0x03
DATA_TYPES = 0x04
VALUES = 0x05
CAPTIONS = 0x06
BITMAP = 0x07
POSSIBLE_VALUES = 0x08
GROUPS = 0x09
SESSION = 0x0A
INTERFACE_NUMBER = 0x0B
NETWORK_ID = 0x24
OPAQUE = 0x50
class ConfigFlags(Enum):
"""
Configuration flags.
"""
NONE = 0x00
REQUEST = 0x01
UPDATE = 0x02
RESET = 0x03
class ConfigDataTypes(Enum):
"""
Configuration data types.
"""
UINT8 = 0x01
UINT16 = 0x02
UINT32 = 0x03
UINT64 = 0x04
INT8 = 0x05
INT16 = 0x06
INT32 = 0x07
INT64 = 0x08
FLOAT = 0x09
STRING = 0x0A
BOOL = 0x0B
class FileTlvs(Enum):
"""
File type, length, value enumerations.
"""
NODE = 0x01
NAME = 0x02
MODE = 0x03
NUMBER = 0x04
TYPE = 0x05
SOURCE_NAME = 0x06
SESSION = 0x0A
DATA = 0x10
COMPRESSED_DATA = 0x11
class InterfaceTlvs(Enum):
"""
Interface type, length, value enumerations.
"""
NODE = 0x01
NUMBER = 0x02
NAME = 0x03
IP_ADDRESS = 0x04
MASK = 0x05
MAC_ADDRESS = 0x06
IP6_ADDRESS = 0x07
IP6_MASK = 0x08
TYPE = 0x09
SESSION = 0x0A
STATE = 0x0B
EMULATION_ID = 0x23
NETWORK_ID = 0x24
class EventTlvs(Enum):
"""
Event type, length, value enumerations.
"""
NODE = 0x01
TYPE = 0x02
NAME = 0x03
DATA = 0x04
TIME = 0x05
SESSION = 0x0A
class EventTypes(Enum):
"""
Event types.
"""
NONE = 0
DEFINITION_STATE = 1
CONFIGURATION_STATE = 2
INSTANTIATION_STATE = 3
RUNTIME_STATE = 4
DATACOLLECT_STATE = 5
SHUTDOWN_STATE = 6
START = 7
STOP = 8
PAUSE = 9
RESTART = 10
FILE_OPEN = 11
FILE_SAVE = 12
SCHEDULED = 13
RECONFIGURE = 14
INSTANTIATION_COMPLETE = 15
class SessionTlvs(Enum):
"""
Session type, length, value enumerations.
"""
NUMBER = 0x01
NAME = 0x02
FILE = 0x03
NODE_COUNT = 0x04
DATE = 0x05
THUMB = 0x06
USER = 0x07
OPAQUE = 0x0A
class ExceptionTlvs(Enum):
"""
Exception type, length, value enumerations.
"""
NODE = 0x01
SESSION = 0x02
LEVEL = 0x03
SOURCE = 0x04
DATE = 0x05
TEXT = 0x06
OPAQUE = 0x0A
class ExceptionLevels(Enum):
"""
Exception levels.
"""
NONE = 0
FATAL = 1
ERROR = 2
WARNING = 3
NOTICE = 4
| 16.889937
| 71
| 0.587042
|
7d7744ded7bed8aaa298b4161286ae2a053915bd
| 732
|
py
|
Python
|
setup.py
|
naegelejd/pyasteroid
|
8c4a5d200457e6ea8d050d0a755f3ed582f2edef
|
[
"MIT"
] | null | null | null |
setup.py
|
naegelejd/pyasteroid
|
8c4a5d200457e6ea8d050d0a755f3ed582f2edef
|
[
"MIT"
] | null | null | null |
setup.py
|
naegelejd/pyasteroid
|
8c4a5d200457e6ea8d050d0a755f3ed582f2edef
|
[
"MIT"
] | null | null | null |
#from distutils.core import setup, Extension
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Extension
vector = Extension("asteroids.vector", sources=["src/vector/vectormodule.c"])
setup(
name = "pyasteroid",
version = '0.0.2',
author = "Joseph Naegele",
author_email = "joseph.naegele@gmail.com",
description = "A basic 2D vector class.",
license = "MIT",
keywords = "asteroids vector 2D",
url="https://github.com/naegelejd/pyasteroid",
install_requires = ['Pygame>=1.8'],
packages=['asteroids'],
package_data= {'':['*.png']},
entry_points = {'console_scripts': ['asteroids = asteroids.main:main']},
ext_modules=[vector],
)
| 29.28
| 77
| 0.678962
|
ab0c57a2271894667059d28a3cd479db33ba0c8f
| 5,124
|
py
|
Python
|
run.py
|
Alvin-21/password-manager
|
a2c21f92f81d9f82a39ffc99c8cebf0489e2a7c4
|
[
"Unlicense"
] | null | null | null |
run.py
|
Alvin-21/password-manager
|
a2c21f92f81d9f82a39ffc99c8cebf0489e2a7c4
|
[
"Unlicense"
] | null | null | null |
run.py
|
Alvin-21/password-manager
|
a2c21f92f81d9f82a39ffc99c8cebf0489e2a7c4
|
[
"Unlicense"
] | null | null | null |
from user_credentials import User, Credentials
def create_user(fname, lname, username, password):
"""
Creates a new user.
"""
return User(fname, lname, username, password)
def create_credentials(app, username, password):
"""
Creates new user credentials.
"""
return Credentials(app, username, password)
def main():
print("Hello, welcome to Password Manager")
while True:
print("\nPlease use these short codes to execute your desired task: ca - create new account, li - login to account, ex - exit program")
short_code = input().lower().strip()
if short_code == "ca":
fname = input("Enter your first name: ").strip()
lname = input("Enter your last name: ").strip()
username = input("Enter your preferred username: ").strip()
if User.user_exist(username):
print("\nThat username has already been taken. Please enter a different username.")
username = input("Enter your preferred username: ").strip()
password = input("Enter your password: ")
new_user = create_user(fname, lname, username, password)
new_user.save_user()
print(f"\nYour new account has been created with the following details:\nName: {fname} {lname}\nUsername: {username}\nPassword: {password}\n")
elif short_code == "li":
print("\nPlease enter your user details to login.")
login_username = input("Enter your username: ").strip()
login_password = input("Enter your password: ")
user_verification = User.verify_user(login_username, login_password)
if user_verification:
print(f"\nHello {login_username}, please use the following codes to select a task.")
while True:
print("\ncc - Create Credentials\ndelc - Delete Credential\ndc -Display Credentials\ndsc - Display Specific Credential\nex - Exit")
code = input().lower().strip()
if code == "cc":
app = input("\nEnter the name of the app: ")
credential_username = input("Enter your username: ")
while True:
print("\nUse the following options to select which password you prefer\ncp - Custom Password\nap - Auto-Generated Password\nex - Exit")
option = input().lower().strip()
if option == "cp":
credential_password = input("\nEnter your password: ")
break
elif option == "ap":
credential_password = Credentials.password()
break
elif option == "ex":
break
else:
print("\nInvalid input. Check options and try again.")
new_credential = create_credentials(app, credential_username, credential_password)
new_credential.save_credentials()
print(f"\nNewly created credential details:\nApp Name: {app}\nUsername: {credential_username}\nPassword: {credential_password}")
elif code == "delc":
delete_app = input("\nEnter the app name of the credential you wish to delete: ")
Credentials.delete_credential(delete_app)
print(f"{delete_app} Credentials has been deleted.")
elif code == "dc":
if Credentials.display_credentials():
for credential in Credentials.display_credentials():
print(f"\nApp: {credential.app}\nUsername: {credential.username}\nPassword: {credential.password}\n")
else:
print("\nYou haven't created any credentials yet.")
elif code == "dsc":
app_credential = input("\nEnter app name of the credential you wish to be displayed: ")
credential_information = Credentials.display_app_credential(app_credential)
if credential_information:
print(f"\nApp: {credential_information.app}\nUsername: {credential_information.username}\nPassword: {credential_information.password}")
else:
print("\nThat credential cannot be found. Please try again")
elif code == "ex":
break
else:
print("\nInvalid input. Please check the code and try again.")
elif short_code == "ex":
break
else:
print("\nInvalid input. Please check your entry and try again.")
if __name__ == '__main__':
main()
| 45.345133
| 163
| 0.531616
|
ba426144b394e8d020eafa22fa30dc8e53d22120
| 21,642
|
py
|
Python
|
ludwig/models/modules/recurrent_modules.py
|
JakeConnors376W/ludwig
|
d16488aed4821cf89642c967e06f8961f2ab53bd
|
[
"Apache-2.0"
] | 90
|
2019-03-05T08:52:28.000Z
|
2020-09-25T10:27:23.000Z
|
ludwig/models/modules/recurrent_modules.py
|
wjapollo/ludwig
|
181d435e8c9698e2c8da23355608e997c9361f2c
|
[
"Apache-2.0"
] | 2
|
2019-02-15T08:32:24.000Z
|
2021-01-30T19:35:51.000Z
|
ludwig/models/modules/recurrent_modules.py
|
wjapollo/ludwig
|
181d435e8c9698e2c8da23355608e997c9361f2c
|
[
"Apache-2.0"
] | 29
|
2019-03-05T08:59:22.000Z
|
2020-09-26T22:52:05.000Z
|
# coding=utf-8
# Copyright (c) 2019 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import collections
import logging
import tensorflow as tf
from tensorflow.contrib.rnn import MultiRNNCell, LSTMStateTuple
from tensorflow.python.framework import dtypes, tensor_shape
from tensorflow.python.framework import ops
from tensorflow.python.util import nest
from ludwig.models.modules.fully_connected_modules import fc_layer
from ludwig.models.modules.initializer_modules import get_initializer
from ludwig.models.modules.reduction_modules import reduce_sequence
from ludwig.utils.tf_utils import sequence_length_3D, sequence_length_2D
def get_cell_fun(cell_type):
if cell_type == 'rnn':
cell_fn = tf.nn.rnn_cell.BasicRNNCell
elif cell_type == 'lstm':
# allows for optional peephole connections and cell clipping
cell_fn = tf.nn.rnn_cell.LSTMCell
elif cell_type == 'lstm_block':
# Faster version of basic LSTM
cell_fn = tf.contrib.rnn.LSTMBlockCell
elif cell_type == 'lstm_ln':
cell_fn = tf.contrib.rnn.LayerNormBasicLSTMCell
elif cell_type == 'lstm_cudnn':
cell_fn = tf.contrib.cudnn_rnn.CudnnCompatibleLSTMCell
elif cell_type == 'gru':
cell_fn = tf.nn.rnn_cell.GRUCell
elif cell_type == 'gru_block':
# Faster version of GRU (25% faster in my tests)
cell_fn = tf.contrib.rnn.GRUBlockCell
elif cell_type == 'gru_cudnn':
# Faster version of GRU (25% faster in my tests)
cell_fn = tf.contrib.cudnn_rnn.CudnnCompatibleGRUCell
else:
cell_fn = tf.nn.rnn_cell.BasicRNNCell
return cell_fn
class Projection(tf.layers.Layer):
def __init__(self, projection_weights, projection_biases, name=None,
**kwargs):
super(Projection, self).__init__(name=name, **kwargs)
self.projection_weights = projection_weights
self.projection_biases = projection_biases
def call(self, inputs, **kwargs):
inputs_shape = inputs.shape.as_list()
weights_shape = self.projection_weights.shape.as_list()
assert inputs_shape[-1] == weights_shape[0]
inputs = tf.reshape(inputs, [-1, inputs_shape[-1]])
outputs = tf.matmul(inputs, self.projection_weights)
if self.projection_biases is not None:
outputs = tf.nn.bias_add(outputs, self.projection_biases)
outputs_shape = inputs_shape
outputs_shape[0] = -1 # batch_size
outputs_shape[-1] = weights_shape[1]
outputs = tf.reshape(outputs, outputs_shape)
return outputs
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
output_shape = input_shape
output_shape[-1] = self.projection_biases.shape.as_list()[0]
# output_shape = [input_shape[0], self.projection_biases.shape.as_list()[0]]
return tensor_shape.TensorShape(output_shape)
class BasicDecoderOutput(
collections.namedtuple('BasicDecoderOutput',
('rnn_output', 'sample_id', 'projection_input'))):
pass
class BasicDecoder(tf.contrib.seq2seq.BasicDecoder):
def _projection_input_size(self):
return self._cell.output_size
@property
def output_size(self):
return BasicDecoderOutput(
rnn_output=self._rnn_output_size(),
sample_id=self._helper.sample_ids_shape,
projection_input=self._projection_input_size())
@property
def output_dtype(self):
dtype = nest.flatten(self._initial_state)[0].dtype
return BasicDecoderOutput(
nest.map_structure(lambda _: dtype, self._rnn_output_size()),
self._helper.sample_ids_dtype,
nest.map_structure(lambda _: dtype, self._projection_input_size()))
def step(self, time, inputs, state, name=None):
with ops.name_scope(name, 'BasicDecoderStep', (time, inputs, state)):
cell_outputs, cell_state = self._cell(inputs, state)
projection_inputs = cell_outputs # get projection_inputs to compute sampled_softmax_cross_entropy_loss
if self._output_layer is not None:
cell_outputs = self._output_layer(cell_outputs)
sample_ids = self._helper.sample(
time=time, outputs=cell_outputs, state=cell_state)
(finished, next_inputs, next_state) = self._helper.next_inputs(
time=time,
outputs=cell_outputs,
state=cell_state,
sample_ids=sample_ids)
outputs = BasicDecoderOutput(cell_outputs, sample_ids,
projection_inputs)
return (outputs, next_state, next_inputs, finished)
class TimeseriesTrainingHelper(tf.contrib.seq2seq.TrainingHelper):
def sample(self, time, outputs, name=None, **unused_kwargs):
with ops.name_scope(name, 'TrainingHelperSample', [time, outputs]):
return tf.zeros(tf.shape(outputs)[:-1], dtype=dtypes.int32)
class RecurrentStack:
def __init__(
self,
state_size=256,
cell_type='rnn',
num_layers=1,
bidirectional=False,
dropout=False,
regularize=True,
reduce_output='last',
**kwargs
):
self.state_size = state_size
self.cell_type = cell_type
self.num_layers = num_layers
self.bidirectional = bidirectional
self.dropout = dropout
self.regularize = regularize
self.reduce_output = reduce_output
def __call__(
self,
input_sequence,
regularizer,
dropout_rate,
is_training=True
):
if not self.regularize:
regularizer = None
# Calculate the length of input_sequence and the batch size
sequence_length = sequence_length_3D(input_sequence)
# RNN cell
cell_fn = get_cell_fun(self.cell_type)
# initial state
# init_state = tf.get_variable(
# 'init_state',
# [1, state_size],
# initializer=tf.constant_initializer(0.0),
# )
# init_state = tf.tile(init_state, [batch_size, 1])
# main RNN operation
with tf.variable_scope('rnn_stack', reuse=tf.AUTO_REUSE,
regularizer=regularizer) as vs:
if self.bidirectional:
# forward direction cell
fw_cell = lambda state_size: cell_fn(state_size)
bw_cell = lambda state_size: cell_fn(state_size)
fw_cells = [fw_cell(self.state_size) for _ in
range(self.num_layers)]
bw_cells = [bw_cell(self.state_size) for _ in
range(self.num_layers)]
rnn_outputs, final_state_fw, final_state_bw = tf.contrib.rnn.stack_bidirectional_dynamic_rnn(
cells_fw=fw_cells,
cells_bw=bw_cells,
dtype=tf.float32,
sequence_length=sequence_length,
inputs=input_sequence
)
else:
cell = lambda state_size: cell_fn(state_size)
cells = MultiRNNCell(
[cell(self.state_size) for _ in range(self.num_layers)],
state_is_tuple=True)
rnn_outputs, final_state = tf.nn.dynamic_rnn(
cells,
input_sequence,
sequence_length=sequence_length,
dtype=tf.float32)
# initial_state=init_state)
for v in tf.global_variables():
if v.name.startswith(vs.name):
logging.debug(' {}: {}'.format(v.name, v))
logging.debug(' rnn_outputs: {0}'.format(rnn_outputs))
rnn_output = reduce_sequence(rnn_outputs, self.reduce_output)
logging.debug(' reduced_rnn_output: {0}'.format(rnn_output))
# dropout
if self.dropout and dropout_rate is not None:
rnn_output = tf.layers.dropout(
rnn_output,
rate=dropout_rate,
training=is_training
)
logging.debug(' dropout_rnn: {0}'.format(rnn_output))
return rnn_output, rnn_output.shape.as_list()[-1]
def recurrent_decoder(encoder_outputs, targets, max_sequence_length, vocab_size,
cell_type='rnn', state_size=256, embedding_size=50,
num_layers=1,
attention_mechanism=None, beam_width=1, projection=True,
tied_target_embeddings=True, embeddings=None,
initializer=None, regularizer=None,
is_timeseries=False):
with tf.variable_scope('rnn_decoder', reuse=tf.AUTO_REUSE,
regularizer=regularizer):
# ================ Setup ================
if beam_width > 1 and is_timeseries:
raise ValueError('Invalid beam_width: {}'.format(beam_width))
GO_SYMBOL = vocab_size
END_SYMBOL = 0
batch_size = tf.shape(encoder_outputs)[0]
# ================ Projection ================
# Project the encoder outputs to the size of the decoder state
encoder_outputs_size = encoder_outputs.shape[-1]
if projection and encoder_outputs_size != state_size:
with tf.variable_scope('projection'):
encoder_output_rank = len(encoder_outputs.shape)
if encoder_output_rank > 2:
sequence_length = tf.shape(encoder_outputs)[1]
encoder_outputs = tf.reshape(encoder_outputs,
[-1, encoder_outputs_size])
encoder_outputs = fc_layer(encoder_outputs,
encoder_outputs.shape[-1],
state_size,
activation=None,
initializer=initializer)
encoder_outputs = tf.reshape(encoder_outputs,
[-1, sequence_length,
state_size])
else:
encoder_outputs = fc_layer(encoder_outputs,
encoder_outputs.shape[-1],
state_size,
activation=None,
initializer=initializer)
# ================ Targets sequence ================
# Calculate the length of inputs and the batch size
with tf.variable_scope('sequence'):
targets_sequence_length = sequence_length_2D(targets)
start_tokens = tf.tile([GO_SYMBOL], [batch_size])
end_tokens = tf.tile([END_SYMBOL], [batch_size])
if is_timeseries:
start_tokens = tf.cast(start_tokens, tf.float32)
end_tokens = tf.cast(end_tokens, tf.float32)
targets_with_go = tf.concat([
tf.expand_dims(start_tokens, 1),
targets,
tf.expand_dims(end_tokens, 1)], 1)
logging.debug(' targets_with_go: {0}'.format(targets_with_go))
targets_sequence_length_with_eos = targets_sequence_length + 1 # the EOS symbol is 0 so it's not increasing the real length of the sequence
# ================ Embeddings ================
if is_timeseries:
targets_embedded = tf.expand_dims(targets_with_go, -1)
targets_embeddings = None
else:
with tf.variable_scope('embedding'):
if embeddings is not None:
embedding_size = embeddings.shape.as_list()[-1]
if tied_target_embeddings:
state_size = embedding_size
elif tied_target_embeddings:
embedding_size = state_size
if embeddings is not None:
embedding_go = tf.get_variable('embedding_GO',
initializer=tf.random_uniform(
[1, embedding_size],
-1.0, 1.0))
targets_embeddings = tf.concat([embeddings, embedding_go],
axis=0)
else:
initializer_obj = get_initializer(initializer)
targets_embeddings = tf.get_variable(
'embeddings',
initializer=initializer_obj(
[vocab_size + 1, embedding_size]),
regularizer=regularizer
)
logging.debug(
' targets_embeddings: {0}'.format(targets_embeddings))
targets_embedded = tf.nn.embedding_lookup(targets_embeddings,
targets_with_go,
name='decoder_input_embeddings')
logging.debug(' targets_embedded: {0}'.format(targets_embedded))
# ================ Class prediction ================
if tied_target_embeddings:
class_weights = tf.transpose(targets_embeddings)
else:
initializer_obj = get_initializer(initializer)
class_weights = tf.get_variable(
'class_weights',
initializer=initializer_obj([state_size, vocab_size + 1]),
regularizer=regularizer
)
logging.debug(' class_weights: {0}'.format(class_weights))
class_biases = tf.get_variable('class_biases', [vocab_size + 1])
logging.debug(' class_biases: {0}'.format(class_biases))
projection_layer = Projection(class_weights, class_biases)
# ================ RNN ================
initial_state = encoder_outputs
with tf.variable_scope('rnn_cells') as vs:
# Cell
cell_fun = get_cell_fun(cell_type)
if num_layers == 1:
cell = cell_fun(state_size)
if cell_type.startswith('lstm'):
initial_state = LSTMStateTuple(c=initial_state,
h=initial_state)
elif num_layers > 1:
cell = MultiRNNCell(
[cell_fun(state_size) for _ in range(num_layers)],
state_is_tuple=True)
if cell_type.startswith('lstm'):
initial_state = LSTMStateTuple(c=initial_state,
h=initial_state)
initial_state = tuple([initial_state] * num_layers)
else:
raise ValueError('num_layers in recurrent decoser: {}. '
'Number of layers in a recurrenct decoder cannot be <= 0'.format(
num_layers))
# Attention
if attention_mechanism is not None:
if attention_mechanism == 'bahdanau':
attention_mechanism = tf.contrib.seq2seq.BahdanauAttention(
num_units=state_size, memory=encoder_outputs,
memory_sequence_length=sequence_length_3D(
encoder_outputs))
elif attention_mechanism == 'luong':
attention_mechanism = tf.contrib.seq2seq.LuongAttention(
num_units=state_size, memory=encoder_outputs,
memory_sequence_length=sequence_length_3D(
encoder_outputs))
else:
raise ValueError(
'Attention mechanism {} not supported'.format(
attention_mechanism))
cell = tf.contrib.seq2seq.AttentionWrapper(
cell, attention_mechanism, attention_layer_size=state_size)
initial_state = cell.zero_state(dtype=tf.float32,
batch_size=batch_size)
for v in tf.global_variables():
if v.name.startswith(vs.name):
logging.debug(' {}: {}'.format(v.name, v))
# ================ Decoding ================
def decode(initial_state, cell, helper, beam_width=1,
projection_layer=None):
# The decoder itself
if beam_width > 1:
# Tile inputs for beam search decoder
beam_initial_state = tf.contrib.seq2seq.tile_batch(
initial_state, beam_width)
decoder = tf.contrib.seq2seq.BeamSearchDecoder(
cell=cell,
embedding=targets_embeddings,
start_tokens=start_tokens,
end_token=END_SYMBOL,
initial_state=beam_initial_state,
beam_width=beam_width,
output_layer=projection_layer)
else:
decoder = BasicDecoder(
cell=cell, helper=helper,
initial_state=initial_state,
output_layer=projection_layer)
# The decoding operation
outputs = tf.contrib.seq2seq.dynamic_decode(
decoder=decoder,
output_time_major=False,
impute_finished=False if beam_width > 1 else True,
maximum_iterations=max_sequence_length
)
return outputs
# ================ Decoding helpers ================
if is_timeseries:
train_helper = TimeseriesTrainingHelper(
inputs=targets_embedded,
sequence_length=targets_sequence_length_with_eos)
final_outputs_pred, final_state_pred, final_sequence_lengths_pred = decode(
initial_state,
cell,
train_helper,
projection_layer=projection_layer)
eval_logits = final_outputs_pred.rnn_output
train_logits = final_outputs_pred.projection_input
predictions_sequence = tf.reshape(eval_logits, [batch_size, -1])
predictions_sequence_length_with_eos = final_sequence_lengths_pred
else:
train_helper = tf.contrib.seq2seq.TrainingHelper(
inputs=targets_embedded,
sequence_length=targets_sequence_length_with_eos)
final_outputs_train, final_state_train, final_sequence_lengths_train, = decode(
initial_state,
cell,
train_helper,
projection_layer=projection_layer)
eval_logits = final_outputs_train.rnn_output
train_logits = final_outputs_train.projection_input
# train_predictions = final_outputs_train.sample_id
pred_helper = tf.contrib.seq2seq.GreedyEmbeddingHelper(
embedding=targets_embeddings,
start_tokens=start_tokens,
end_token=END_SYMBOL)
final_outputs_pred, final_state_pred, final_sequence_lengths_pred = decode(
initial_state,
cell,
pred_helper,
beam_width,
projection_layer=projection_layer)
if beam_width > 1:
predictions_sequence = final_outputs_pred.beam_search_decoder_output.predicted_ids[
:, :, 0]
# final_outputs_pred..predicted_ids[:,:,0] would work too, but it contains -1s for padding
predictions_sequence_scores = final_outputs_pred.beam_search_decoder_output.scores[
:, :, 0]
predictions_sequence_length_with_eos = final_sequence_lengths_pred[
:, 0]
else:
predictions_sequence = final_outputs_pred.sample_id
predictions_sequence_scores = final_outputs_pred.rnn_output
predictions_sequence_length_with_eos = final_sequence_lengths_pred
logging.debug(' train_logits: {0}'.format(train_logits))
logging.debug(' eval_logits: {0}'.format(eval_logits))
logging.debug(' predictions_sequence: {0}'.format(predictions_sequence))
logging.debug(' predictions_sequence_scores: {0}'.format(
predictions_sequence_scores))
return predictions_sequence, predictions_sequence_scores, predictions_sequence_length_with_eos, \
targets_sequence_length_with_eos, eval_logits, train_logits, class_weights, class_biases
| 44.807453
| 152
| 0.570419
|
93f4103d4c1d0451cd3bc88fef28854430753c22
| 29,606
|
py
|
Python
|
library/azure_rm_deployment.py
|
joaocc/azure_preview_modules
|
2413dafa6f979a2070843b073830901cc1b1d868
|
[
"MIT"
] | 46
|
2018-01-24T08:39:15.000Z
|
2021-08-20T04:41:16.000Z
|
library/azure_rm_deployment.py
|
joaocc/azure_preview_modules
|
2413dafa6f979a2070843b073830901cc1b1d868
|
[
"MIT"
] | 226
|
2017-12-12T21:46:31.000Z
|
2022-02-18T05:17:03.000Z
|
library/azure_rm_deployment.py
|
joaocc/azure_preview_modules
|
2413dafa6f979a2070843b073830901cc1b1d868
|
[
"MIT"
] | 60
|
2018-01-25T10:03:59.000Z
|
2022-03-08T10:19:54.000Z
|
#!/usr/bin/python
#
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_deployment
short_description: Create or destroy Azure Resource Manager template deployments
version_added: "2.1"
description:
- Create or destroy Azure Resource Manager template deployments via the Azure SDK for Python.
- You can find some quick start templates in GitHub here U(https://github.com/azure/azure-quickstart-templates).
- For more information on Azure Resource Manager templates see U(https://azure.microsoft.com/en-us/documentation/articles/resource-group-template-deploy/).
options:
resource_group:
description:
- The resource group name to use or create to host the deployed template.
required: true
aliases:
- resource_group_name
name:
description:
- The name of the deployment to be tracked in the resource group deployment history.
- Re-using a deployment name will overwrite the previous value in the resource group's deployment history.
default: ansible-arm
aliases:
- deployment_name
location:
description:
- The geo-locations in which the resource group will be located.
default: westus
deployment_mode:
description:
- In incremental mode, resources are deployed without deleting existing resources that are not included in the template.
- In complete mode resources are deployed and existing resources in the resource group not included in the template are deleted.
default: incremental
choices:
- complete
- incremental
template:
description:
- A hash containing the templates inline. This parameter is mutually exclusive with I(template_link).
- Either I(template) or I(template_link) is required if I(state=present).
type: dict
template_link:
description:
- Uri of file containing the template body. This parameter is mutually exclusive with I(template).
- Either I(template) or I(template_link) is required if I(state=present).
parameters:
description:
- A hash of all the required template variables for the deployment template. This parameter is mutually exclusive with I(parameters_link).
- Either I(parameters_link) or I(parameters) is required if I(state=present).
type: dict
parameters_link:
description:
- Uri of file containing the parameters body. This parameter is mutually exclusive with I(parameters).
- Either I(parameters_link) or I(parameters) is required if I(state=present).
wait_for_deployment_completion:
description:
- Whether or not to block until the deployment has completed.
type: bool
default: 'yes'
wait_for_deployment_polling_period:
description:
- Time (in seconds) to wait between polls when waiting for deployment completion.
default: 10
state:
description:
- If I(state=present), template will be created.
- If I(state=present) and deployment exists, it will be updated.
- If I(state=absent), stack will be removed.
default: present
choices:
- present
- absent
extends_documentation_fragment:
- azure
- azure_tags
author:
- David Justice (@devigned)
- Laurent Mazuel (@lmazuel)
- Andre Price (@obsoleted)
'''
EXAMPLES = '''
# Destroy a template deployment
- name: Destroy Azure Deploy
azure_rm_deployment:
resource_group: myResourceGroup
name: myDeployment
state: absent
# Create or update a template deployment based on uris using parameter and template links
- name: Create Azure Deploy
azure_rm_deployment:
resource_group: myResourceGroup
name: myDeployment
template_link: 'https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/101-vm-simple-linux/azuredeploy.json'
parameters_link: 'https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/101-vm-simple-linux/azuredeploy.parameters.json'
# Create or update a template deployment based on a uri to the template and parameters specified inline.
# This deploys a VM with SSH support for a given public key, then stores the result in 'azure_vms'. The result is then
# used to create a new host group. This host group is then used to wait for each instance to respond to the public IP SSH.
---
- name: Create Azure Deploy
azure_rm_deployment:
resource_group: myResourceGroup
name: myDeployment
parameters:
newStorageAccountName:
value: devopsclestorage1
adminUsername:
value: devopscle
dnsNameForPublicIP:
value: devopscleazure
location:
value: West US
vmSize:
value: Standard_A2
vmName:
value: ansibleSshVm
sshKeyData:
value: YOUR_SSH_PUBLIC_KEY
template_link: 'https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/101-vm-sshkey/azuredeploy.json'
register: azure
- name: Add new instance to host group
add_host:
hostname: "{{ item['ips'][0].public_ip }}"
groupname: azure_vms
loop: "{{ azure.deployment.instances }}"
# Deploy an Azure WebApp running a hello world'ish node app
- name: Create Azure WebApp Deployment at http://devopscleweb.azurewebsites.net/hello.js
azure_rm_deployment:
resource_group: myResourceGroup
name: myDeployment
parameters:
repoURL:
value: 'https://github.com/devigned/az-roadshow-oss.git'
siteName:
value: devopscleweb
hostingPlanName:
value: someplan
siteLocation:
value: westus
sku:
value: Standard
template_link: 'https://raw.githubusercontent.com/azure/azure-quickstart-templates/master/201-web-app-github-deploy/azuredeploy.json'
# Create or update a template deployment based on an inline template and parameters
- name: Create Azure Deploy
azure_rm_deployment:
resource_group: myResourceGroup
name: myDeployment
template:
$schema: "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"
contentVersion: "1.0.0.0"
parameters:
newStorageAccountName:
type: "string"
metadata:
description: "Unique DNS Name for the Storage Account where the Virtual Machine's disks will be placed."
adminUsername:
type: "string"
metadata:
description: "User name for the Virtual Machine."
adminPassword:
type: "securestring"
metadata:
description: "Password for the Virtual Machine."
dnsNameForPublicIP:
type: "string"
metadata:
description: "Unique DNS Name for the Public IP used to access the Virtual Machine."
ubuntuOSVersion:
type: "string"
defaultValue: "14.04.2-LTS"
allowedValues:
- "12.04.5-LTS"
- "14.04.2-LTS"
- "15.04"
metadata:
description: >
The Ubuntu version for the VM. This will pick a fully patched image of this given Ubuntu version.
Allowed values: 12.04.5-LTS, 14.04.2-LTS, 15.04."
variables:
location: "West US"
imagePublisher: "Canonical"
imageOffer: "UbuntuServer"
OSDiskName: "osdiskforlinuxsimple"
nicName: "myVMNic"
addressPrefix: "192.0.2.0/24"
subnetName: "Subnet"
subnetPrefix: "10.0.0.0/24"
storageAccountType: "Standard_LRS"
publicIPAddressName: "myPublicIP"
publicIPAddressType: "Dynamic"
vmStorageAccountContainerName: "vhds"
vmName: "MyUbuntuVM"
vmSize: "Standard_D1"
virtualNetworkName: "MyVNET"
vnetID: "[resourceId('Microsoft.Network/virtualNetworks',variables('virtualNetworkName'))]"
subnetRef: "[concat(variables('vnetID'),'/subnets/',variables('subnetName'))]"
resources:
- type: "Microsoft.Storage/storageAccounts"
name: "[parameters('newStorageAccountName')]"
apiVersion: "2015-05-01-preview"
location: "[variables('location')]"
properties:
accountType: "[variables('storageAccountType')]"
- apiVersion: "2015-05-01-preview"
type: "Microsoft.Network/publicIPAddresses"
name: "[variables('publicIPAddressName')]"
location: "[variables('location')]"
properties:
publicIPAllocationMethod: "[variables('publicIPAddressType')]"
dnsSettings:
domainNameLabel: "[parameters('dnsNameForPublicIP')]"
- type: "Microsoft.Network/virtualNetworks"
apiVersion: "2015-05-01-preview"
name: "[variables('virtualNetworkName')]"
location: "[variables('location')]"
properties:
addressSpace:
addressPrefixes:
- "[variables('addressPrefix')]"
subnets:
-
name: "[variables('subnetName')]"
properties:
addressPrefix: "[variables('subnetPrefix')]"
- type: "Microsoft.Network/networkInterfaces"
apiVersion: "2015-05-01-preview"
name: "[variables('nicName')]"
location: "[variables('location')]"
dependsOn:
- "[concat('Microsoft.Network/publicIPAddresses/', variables('publicIPAddressName'))]"
- "[concat('Microsoft.Network/virtualNetworks/', variables('virtualNetworkName'))]"
properties:
ipConfigurations:
-
name: "ipconfig1"
properties:
privateIPAllocationMethod: "Dynamic"
publicIPAddress:
id: "[resourceId('Microsoft.Network/publicIPAddresses',variables('publicIPAddressName'))]"
subnet:
id: "[variables('subnetRef')]"
- type: "Microsoft.Compute/virtualMachines"
apiVersion: "2015-06-15"
name: "[variables('vmName')]"
location: "[variables('location')]"
dependsOn:
- "[concat('Microsoft.Storage/storageAccounts/', parameters('newStorageAccountName'))]"
- "[concat('Microsoft.Network/networkInterfaces/', variables('nicName'))]"
properties:
hardwareProfile:
vmSize: "[variables('vmSize')]"
osProfile:
computername: "[variables('vmName')]"
adminUsername: "[parameters('adminUsername')]"
adminPassword: "[parameters('adminPassword')]"
storageProfile:
imageReference:
publisher: "[variables('imagePublisher')]"
offer: "[variables('imageOffer')]"
sku: "[parameters('ubuntuOSVersion')]"
version: "latest"
osDisk:
name: "osdisk"
vhd:
uri: >
[concat('http://',parameters('newStorageAccountName'),'.blob.core.windows.net/',variables('vmStorageAccountContainerName'),'/',
variables('OSDiskName'),'.vhd')]
caching: "ReadWrite"
createOption: "FromImage"
networkProfile:
networkInterfaces:
-
id: "[resourceId('Microsoft.Network/networkInterfaces',variables('nicName'))]"
diagnosticsProfile:
bootDiagnostics:
enabled: "true"
storageUri: "[concat('http://',parameters('newStorageAccountName'),'.blob.core.windows.net')]"
parameters:
newStorageAccountName:
value: devopsclestorage
adminUsername:
value: devopscle
adminPassword:
value: Password1!
dnsNameForPublicIP:
value: devopscleazure
'''
RETURN = '''
deployment:
description: Deployment details.
type: complex
returned: always
contains:
group_name:
description:
- Name of the resource group.
type: str
returned: always
sample: myResourceGroup
id:
description:
- The Azure ID of the deployment.
type: str
returned: always
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Resources/deployments/myDeployment"
instances:
description:
- Provides the public IP addresses for each VM instance.
type: list
returned: always
contains:
ips:
description:
- List of Public IP addresses.
type: list
returned: always
contains:
dns_settings:
description:
- DNS Settings.
type: complex
returned: always
contains:
domain_name_label:
description:
- Domain Name Label.
type: str
returned: always
sample: myvirtualmachine
fqdn:
description:
- Fully Qualified Domain Name.
type: str
returned: always
sample: myvirtualmachine.eastus2.cloudapp.azure.com
id:
description:
- Public IP resource id.
returned: always
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Network/p
ublicIPAddresses/myPublicIP"
name:
description:
- Public IP resource name.
returned: always
type: str
sample: myPublicIP
public_ip:
description:
- Public IP address value.
returned: always
type: str
sample: 104.209.244.123
public_ip_allocation_method:
description:
- Public IP allocation method.
returned: always
type: str
sample: Dynamic
vm_name:
description:
- Virtual machine name.
returned: always
type: str
sample: myvirtualmachine
name:
description:
- Name of the deployment.
type: str
returned: always
sample: myDeployment
outputs:
description:
- Dictionary of outputs received from the deployment.
type: complex
returned: always
sample: { "hostname": { "type": "String", "value": "myvirtualmachine.eastus2.cloudapp.azure.com" } }
'''
import time
try:
from azure.common.credentials import ServicePrincipalCredentials
import time
import yaml
except ImportError as exc:
IMPORT_ERROR = "Error importing module prerequisites: %s" % exc
try:
from itertools import chain
from azure.common.exceptions import CloudError
from azure.mgmt.resource.resources import ResourceManagementClient
from azure.mgmt.network import NetworkManagementClient
except ImportError:
# This is handled in azure_rm_common
pass
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
class AzureRMDeploymentManager(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True, aliases=['resource_group_name']),
name=dict(type='str', default="ansible-arm", aliases=['deployment_name']),
state=dict(type='str', default='present', choices=['present', 'absent']),
template=dict(type='dict', default=None),
parameters=dict(type='dict', default=None),
template_link=dict(type='str', default=None),
parameters_link=dict(type='str', default=None),
location=dict(type='str', default="westus"),
deployment_mode=dict(type='str', default='incremental', choices=['complete', 'incremental']),
wait_for_deployment_completion=dict(type='bool', default=True),
wait_for_deployment_polling_period=dict(type='int', default=10)
)
mutually_exclusive = [('template', 'template_link'),
('parameters', 'parameters_link')]
self.resource_group = None
self.state = None
self.template = None
self.parameters = None
self.template_link = None
self.parameters_link = None
self.location = None
self.deployment_mode = None
self.name = None
self.wait_for_deployment_completion = None
self.wait_for_deployment_polling_period = None
self.tags = None
self.append_tags = None
self.results = dict(
deployment=dict(),
changed=False,
msg=""
)
super(AzureRMDeploymentManager, self).__init__(derived_arg_spec=self.module_arg_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=False)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()) + ['append_tags', 'tags']:
setattr(self, key, kwargs[key])
if self.state == 'present':
deployment = self.deploy_template()
if deployment is None:
self.results['deployment'] = dict(
name=self.name,
group_name=self.resource_group,
id=None,
outputs=None,
instances=None
)
else:
self.results['deployment'] = dict(
name=deployment.name,
group_name=self.resource_group,
id=deployment.id,
outputs=deployment.properties.outputs,
instances=self._get_instances(deployment)
)
self.results['changed'] = True
self.results['msg'] = 'deployment succeeded'
else:
try:
if self.get_resource_group(self.resource_group):
self.destroy_resource_group()
self.results['changed'] = True
self.results['msg'] = "deployment deleted"
except CloudError:
# resource group does not exist
pass
return self.results
def deploy_template(self):
"""
Deploy the targeted template and parameters
:param module: Ansible module containing the validated configuration for the deployment template
:param client: resource management client for azure
:param conn_info: connection info needed
:return:
"""
deploy_parameter = self.rm_models.DeploymentProperties(mode=self.deployment_mode)
if not self.parameters_link:
deploy_parameter.parameters = self.parameters
else:
deploy_parameter.parameters_link = self.rm_models.ParametersLink(
uri=self.parameters_link
)
if not self.template_link:
deploy_parameter.template = self.template
else:
deploy_parameter.template_link = self.rm_models.TemplateLink(
uri=self.template_link
)
if self.append_tags and self.tags:
try:
# fetch the RG directly (instead of using the base helper) since we don't want to exit if it's missing
rg = self.rm_client.resource_groups.get(self.resource_group)
if rg.tags:
self.tags = dict(self.tags, **rg.tags)
except CloudError:
# resource group does not exist
pass
params = self.rm_models.ResourceGroup(location=self.location, tags=self.tags)
try:
self.rm_client.resource_groups.create_or_update(self.resource_group, params)
except CloudError as exc:
self.fail("Resource group create_or_update failed with status code: %s and message: %s" %
(exc.status_code, exc.message))
try:
result = self.rm_client.deployments.create_or_update(self.resource_group,
self.name,
deploy_parameter)
deployment_result = None
if self.wait_for_deployment_completion:
deployment_result = self.get_poller_result(result)
while deployment_result.properties is None or deployment_result.properties.provisioning_state not in ['Canceled', 'Failed', 'Deleted',
'Succeeded']:
time.sleep(self.wait_for_deployment_polling_period)
deployment_result = self.rm_client.deployments.get(self.resource_group, self.name)
except CloudError as exc:
failed_deployment_operations = self._get_failed_deployment_operations(self.name)
self.log("Deployment failed %s: %s" % (exc.status_code, exc.message))
self.fail("Deployment failed with status code: %s and message: %s" % (exc.status_code, exc.message),
failed_deployment_operations=failed_deployment_operations)
if self.wait_for_deployment_completion and deployment_result.properties.provisioning_state != 'Succeeded':
self.log("provisioning state: %s" % deployment_result.properties.provisioning_state)
failed_deployment_operations = self._get_failed_deployment_operations(self.name)
self.fail('Deployment failed. Deployment id: %s' % deployment_result.id,
failed_deployment_operations=failed_deployment_operations)
return deployment_result
def destroy_resource_group(self):
"""
Destroy the targeted resource group
"""
try:
result = self.rm_client.resource_groups.delete(self.resource_group)
result.wait() # Blocking wait till the delete is finished
except CloudError as e:
if e.status_code == 404 or e.status_code == 204:
return
else:
self.fail("Delete resource group and deploy failed with status code: %s and message: %s" %
(e.status_code, e.message))
def _get_failed_nested_operations(self, current_operations):
new_operations = []
for operation in current_operations:
if operation.properties.provisioning_state == 'Failed':
new_operations.append(operation)
if operation.properties.target_resource and \
'Microsoft.Resources/deployments' in operation.properties.target_resource.id:
nested_deployment = operation.properties.target_resource.resource_name
try:
nested_operations = self.rm_client.deployment_operations.list(self.resource_group,
nested_deployment)
except CloudError as exc:
self.fail("List nested deployment operations failed with status code: %s and message: %s" %
(exc.status_code, exc.message))
new_nested_operations = self._get_failed_nested_operations(nested_operations)
new_operations += new_nested_operations
return new_operations
def _get_failed_deployment_operations(self, name):
results = []
# time.sleep(15) # there is a race condition between when we ask for deployment status and when the
# # status is available.
try:
operations = self.rm_client.deployment_operations.list(self.resource_group, name)
except CloudError as exc:
self.fail("Get deployment failed with status code: %s and message: %s" %
(exc.status_code, exc.message))
try:
results = [
dict(
id=op.id,
operation_id=op.operation_id,
status_code=op.properties.status_code,
status_message=op.properties.status_message,
target_resource=dict(
id=op.properties.target_resource.id,
resource_name=op.properties.target_resource.resource_name,
resource_type=op.properties.target_resource.resource_type
) if op.properties.target_resource else None,
provisioning_state=op.properties.provisioning_state,
)
for op in self._get_failed_nested_operations(operations)
]
except Exception:
# If we fail here, the original error gets lost and user receives wrong error message/stacktrace
pass
self.log(dict(failed_deployment_operations=results), pretty_print=True)
return results
def _get_instances(self, deployment):
dep_tree = self._build_hierarchy(deployment.properties.dependencies)
vms = self._get_dependencies(dep_tree, resource_type="Microsoft.Compute/virtualMachines")
vms_and_nics = [(vm, self._get_dependencies(vm['children'], "Microsoft.Network/networkInterfaces"))
for vm in vms]
vms_and_ips = [(vm['dep'], self._nic_to_public_ips_instance(nics))
for vm, nics in vms_and_nics]
return [dict(vm_name=vm.resource_name, ips=[self._get_ip_dict(ip)
for ip in ips]) for vm, ips in vms_and_ips if len(ips) > 0]
def _get_dependencies(self, dep_tree, resource_type):
matches = [value for value in dep_tree.values() if value['dep'].resource_type == resource_type]
for child_tree in [value['children'] for value in dep_tree.values()]:
matches += self._get_dependencies(child_tree, resource_type)
return matches
def _build_hierarchy(self, dependencies, tree=None):
tree = dict(top=True) if tree is None else tree
for dep in dependencies:
if dep.resource_name not in tree:
tree[dep.resource_name] = dict(dep=dep, children=dict())
if isinstance(dep, self.rm_models.Dependency) and dep.depends_on is not None and len(dep.depends_on) > 0:
self._build_hierarchy(dep.depends_on, tree[dep.resource_name]['children'])
if 'top' in tree:
tree.pop('top', None)
keys = list(tree.keys())
for key1 in keys:
for key2 in keys:
if key2 in tree and key1 in tree[key2]['children'] and key1 in tree:
tree[key2]['children'][key1] = tree[key1]
tree.pop(key1)
return tree
def _get_ip_dict(self, ip):
ip_dict = dict(name=ip.name,
id=ip.id,
public_ip=ip.ip_address,
public_ip_allocation_method=str(ip.public_ip_allocation_method)
)
if ip.dns_settings:
ip_dict['dns_settings'] = {
'domain_name_label': ip.dns_settings.domain_name_label,
'fqdn': ip.dns_settings.fqdn
}
return ip_dict
def _nic_to_public_ips_instance(self, nics):
return [self.network_client.public_ip_addresses.get(public_ip_id.split('/')[4], public_ip_id.split('/')[-1])
for nic_obj in (self.network_client.network_interfaces.get(self.resource_group,
nic['dep'].resource_name) for nic in nics)
for public_ip_id in [ip_conf_instance.public_ip_address.id
for ip_conf_instance in nic_obj.ip_configurations
if ip_conf_instance.public_ip_address]]
def main():
AzureRMDeploymentManager()
if __name__ == '__main__':
main()
| 42.113798
| 159
| 0.585591
|
5e383ff32866bc18159d3ca0d169464a7ce081d0
| 12,840
|
py
|
Python
|
torchvision/transforms/functional_pil.py
|
xuzhao9/vision
|
71d2bb0bc67044f55d38bfddf04e05be0343deab
|
[
"BSD-3-Clause"
] | 1
|
2022-03-04T07:33:40.000Z
|
2022-03-04T07:33:40.000Z
|
torchvision/transforms/functional_pil.py
|
xuzhao9/vision
|
71d2bb0bc67044f55d38bfddf04e05be0343deab
|
[
"BSD-3-Clause"
] | null | null | null |
torchvision/transforms/functional_pil.py
|
xuzhao9/vision
|
71d2bb0bc67044f55d38bfddf04e05be0343deab
|
[
"BSD-3-Clause"
] | null | null | null |
import numbers
from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
import numpy as np
import torch
from PIL import Image, ImageOps, ImageEnhance
from typing_extensions import Literal
try:
import accimage
except ImportError:
accimage = None
@torch.jit.unused
def _is_pil_image(img: Any) -> bool:
if accimage is not None:
return isinstance(img, (Image.Image, accimage.Image))
else:
return isinstance(img, Image.Image)
@torch.jit.unused
def get_dimensions(img: Any) -> List[int]:
if _is_pil_image(img):
channels = len(img.getbands())
width, height = img.size
return [channels, height, width]
raise TypeError(f"Unexpected type {type(img)}")
@torch.jit.unused
def get_image_size(img: Any) -> List[int]:
if _is_pil_image(img):
return list(img.size)
raise TypeError(f"Unexpected type {type(img)}")
@torch.jit.unused
def get_image_num_channels(img: Any) -> int:
if _is_pil_image(img):
return len(img.getbands())
raise TypeError(f"Unexpected type {type(img)}")
@torch.jit.unused
def hflip(img: Image.Image) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
return img.transpose(Image.FLIP_LEFT_RIGHT)
@torch.jit.unused
def vflip(img: Image.Image) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
return img.transpose(Image.FLIP_TOP_BOTTOM)
@torch.jit.unused
def adjust_brightness(img: Image.Image, brightness_factor: float) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
enhancer = ImageEnhance.Brightness(img)
img = enhancer.enhance(brightness_factor)
return img
@torch.jit.unused
def adjust_contrast(img: Image.Image, contrast_factor: float) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
enhancer = ImageEnhance.Contrast(img)
img = enhancer.enhance(contrast_factor)
return img
@torch.jit.unused
def adjust_saturation(img: Image.Image, saturation_factor: float) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
enhancer = ImageEnhance.Color(img)
img = enhancer.enhance(saturation_factor)
return img
@torch.jit.unused
def adjust_hue(img: Image.Image, hue_factor: float) -> Image.Image:
if not (-0.5 <= hue_factor <= 0.5):
raise ValueError(f"hue_factor ({hue_factor}) is not in [-0.5, 0.5].")
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
input_mode = img.mode
if input_mode in {"L", "1", "I", "F"}:
return img
h, s, v = img.convert("HSV").split()
np_h = np.array(h, dtype=np.uint8)
# uint8 addition take cares of rotation across boundaries
with np.errstate(over="ignore"):
np_h += np.uint8(hue_factor * 255)
h = Image.fromarray(np_h, "L")
img = Image.merge("HSV", (h, s, v)).convert(input_mode)
return img
@torch.jit.unused
def adjust_gamma(
img: Image.Image,
gamma: float,
gain: float = 1.0,
) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
if gamma < 0:
raise ValueError("Gamma should be a non-negative real number")
input_mode = img.mode
img = img.convert("RGB")
gamma_map = [int((255 + 1 - 1e-3) * gain * pow(ele / 255.0, gamma)) for ele in range(256)] * 3
img = img.point(gamma_map) # use PIL's point-function to accelerate this part
img = img.convert(input_mode)
return img
@torch.jit.unused
def pad(
img: Image.Image,
padding: Union[int, List[int], Tuple[int, ...]],
fill: Optional[Union[float, List[float], Tuple[float, ...]]] = 0,
padding_mode: Literal["constant", "edge", "reflect", "symmetric"] = "constant",
) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
if not isinstance(padding, (numbers.Number, tuple, list)):
raise TypeError("Got inappropriate padding arg")
if not isinstance(fill, (numbers.Number, str, tuple)):
raise TypeError("Got inappropriate fill arg")
if not isinstance(padding_mode, str):
raise TypeError("Got inappropriate padding_mode arg")
if isinstance(padding, list):
padding = tuple(padding)
if isinstance(padding, tuple) and len(padding) not in [1, 2, 4]:
raise ValueError(f"Padding must be an int or a 1, 2, or 4 element tuple, not a {len(padding)} element tuple")
if isinstance(padding, tuple) and len(padding) == 1:
# Compatibility with `functional_tensor.pad`
padding = padding[0]
if padding_mode not in ["constant", "edge", "reflect", "symmetric"]:
raise ValueError("Padding mode should be either constant, edge, reflect or symmetric")
if padding_mode == "constant":
opts = _parse_fill(fill, img, name="fill")
if img.mode == "P":
palette = img.getpalette()
image = ImageOps.expand(img, border=padding, **opts)
image.putpalette(palette)
return image
return ImageOps.expand(img, border=padding, **opts)
else:
if isinstance(padding, int):
pad_left = pad_right = pad_top = pad_bottom = padding
if isinstance(padding, tuple) and len(padding) == 2:
pad_left = pad_right = padding[0]
pad_top = pad_bottom = padding[1]
if isinstance(padding, tuple) and len(padding) == 4:
pad_left = padding[0]
pad_top = padding[1]
pad_right = padding[2]
pad_bottom = padding[3]
p = [pad_left, pad_top, pad_right, pad_bottom]
cropping = -np.minimum(p, 0)
if cropping.any():
crop_left, crop_top, crop_right, crop_bottom = cropping
img = img.crop((crop_left, crop_top, img.width - crop_right, img.height - crop_bottom))
pad_left, pad_top, pad_right, pad_bottom = np.maximum(p, 0)
if img.mode == "P":
palette = img.getpalette()
img = np.asarray(img)
img = np.pad(img, ((pad_top, pad_bottom), (pad_left, pad_right)), mode=padding_mode)
img = Image.fromarray(img)
img.putpalette(palette)
return img
img = np.asarray(img)
# RGB image
if len(img.shape) == 3:
img = np.pad(img, ((pad_top, pad_bottom), (pad_left, pad_right), (0, 0)), padding_mode)
# Grayscale image
if len(img.shape) == 2:
img = np.pad(img, ((pad_top, pad_bottom), (pad_left, pad_right)), padding_mode)
return Image.fromarray(img)
@torch.jit.unused
def crop(
img: Image.Image,
top: int,
left: int,
height: int,
width: int,
) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
return img.crop((left, top, left + width, top + height))
@torch.jit.unused
def resize(
img: Image.Image,
size: Union[Sequence[int], int],
interpolation: int = Image.BILINEAR,
max_size: Optional[int] = None,
) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
if not (isinstance(size, int) or (isinstance(size, Sequence) and len(size) in (1, 2))):
raise TypeError(f"Got inappropriate size arg: {size}")
if isinstance(size, Sequence) and len(size) == 1:
size = size[0]
if isinstance(size, int):
w, h = img.size
short, long = (w, h) if w <= h else (h, w)
new_short, new_long = size, int(size * long / short)
if max_size is not None:
if max_size <= size:
raise ValueError(
f"max_size = {max_size} must be strictly greater than the requested "
f"size for the smaller edge size = {size}"
)
if new_long > max_size:
new_short, new_long = int(max_size * new_short / new_long), max_size
new_w, new_h = (new_short, new_long) if w <= h else (new_long, new_short)
if (w, h) == (new_w, new_h):
return img
else:
return img.resize((new_w, new_h), interpolation)
else:
if max_size is not None:
raise ValueError(
"max_size should only be passed if size specifies the length of the smaller edge, "
"i.e. size should be an int or a sequence of length 1 in torchscript mode."
)
return img.resize(size[::-1], interpolation)
@torch.jit.unused
def _parse_fill(
fill: Optional[Union[float, List[float], Tuple[float, ...]]],
img: Image.Image,
name: str = "fillcolor",
) -> Dict[str, Optional[Union[float, List[float], Tuple[float, ...]]]]:
# Process fill color for affine transforms
num_bands = len(img.getbands())
if fill is None:
fill = 0
if isinstance(fill, (int, float)) and num_bands > 1:
fill = tuple([fill] * num_bands)
if isinstance(fill, (list, tuple)):
if len(fill) != num_bands:
msg = "The number of elements in 'fill' does not match the number of bands of the image ({} != {})"
raise ValueError(msg.format(len(fill), num_bands))
fill = tuple(fill)
return {name: fill}
@torch.jit.unused
def affine(
img: Image.Image,
matrix: List[float],
interpolation: int = Image.NEAREST,
fill: Optional[Union[float, List[float], Tuple[float, ...]]] = 0,
) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
output_size = img.size
opts = _parse_fill(fill, img)
return img.transform(output_size, Image.AFFINE, matrix, interpolation, **opts)
@torch.jit.unused
def rotate(
img: Image.Image,
angle: float,
interpolation: int = Image.NEAREST,
expand: bool = False,
center: Optional[Tuple[int, int]] = None,
fill: Optional[Union[float, List[float], Tuple[float, ...]]] = 0,
) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
opts = _parse_fill(fill, img)
return img.rotate(angle, interpolation, expand, center, **opts)
@torch.jit.unused
def perspective(
img: Image.Image,
perspective_coeffs: float,
interpolation: int = Image.BICUBIC,
fill: Optional[Union[float, List[float], Tuple[float, ...]]] = 0,
) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
opts = _parse_fill(fill, img)
return img.transform(img.size, Image.PERSPECTIVE, perspective_coeffs, interpolation, **opts)
@torch.jit.unused
def to_grayscale(img: Image.Image, num_output_channels: int) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
if num_output_channels == 1:
img = img.convert("L")
elif num_output_channels == 3:
img = img.convert("L")
np_img = np.array(img, dtype=np.uint8)
np_img = np.dstack([np_img, np_img, np_img])
img = Image.fromarray(np_img, "RGB")
else:
raise ValueError("num_output_channels should be either 1 or 3")
return img
@torch.jit.unused
def invert(img: Image.Image) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
return ImageOps.invert(img)
@torch.jit.unused
def posterize(img: Image.Image, bits: int) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
return ImageOps.posterize(img, bits)
@torch.jit.unused
def solarize(img: Image.Image, threshold: int) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
return ImageOps.solarize(img, threshold)
@torch.jit.unused
def adjust_sharpness(img: Image.Image, sharpness_factor: float) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
enhancer = ImageEnhance.Sharpness(img)
img = enhancer.enhance(sharpness_factor)
return img
@torch.jit.unused
def autocontrast(img: Image.Image) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
return ImageOps.autocontrast(img)
@torch.jit.unused
def equalize(img: Image.Image) -> Image.Image:
if not _is_pil_image(img):
raise TypeError(f"img should be PIL Image. Got {type(img)}")
return ImageOps.equalize(img)
| 31.240876
| 117
| 0.636526
|
4e8dc9ed113322c29144a75eb1c89731b9ef220e
| 12,728
|
py
|
Python
|
daal4py/sklearn/decomposition/_pca.py
|
cmsxbc/scikit-learn-intelex
|
a442e84efdf3e2baf7fb7c0cdd114b2698e82d90
|
[
"Apache-2.0"
] | 1
|
2021-08-13T13:39:17.000Z
|
2021-08-13T13:39:17.000Z
|
daal4py/sklearn/decomposition/_pca.py
|
raoberman/daal4py
|
65e74dd90342bebbfbb51f1057db9a78ec818b9c
|
[
"Apache-2.0"
] | null | null | null |
daal4py/sklearn/decomposition/_pca.py
|
raoberman/daal4py
|
65e74dd90342bebbfbb51f1057db9a78ec818b9c
|
[
"Apache-2.0"
] | null | null | null |
#===============================================================================
# Copyright 2014-2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#===============================================================================
import numpy as np
import numbers
from math import sqrt
from scipy.sparse import issparse
from sklearn.utils import check_array
from sklearn.utils.validation import check_is_fitted
from sklearn.utils.extmath import stable_cumsum
import daal4py
from .._utils import getFPType, get_patch_message, sklearn_check_version
import logging
if sklearn_check_version('0.22'):
from sklearn.decomposition._pca import PCA as PCA_original
else:
from sklearn.decomposition.pca import PCA as PCA_original
if sklearn_check_version('0.23'):
from sklearn.decomposition._pca import _infer_dimension
elif sklearn_check_version('0.22'):
from sklearn.decomposition._pca import _infer_dimension_
else:
from sklearn.decomposition.pca import _infer_dimension_
class PCA(PCA_original):
def __init__(self, n_components=None, copy=True, whiten=False,
svd_solver='auto', tol=0.0, iterated_power='auto',
random_state=None):
self.n_components = n_components
self.copy = copy
self.whiten = whiten
self.svd_solver = svd_solver
self.tol = tol
self.iterated_power = iterated_power
self.random_state = random_state
def _validate_n_components(self, n_components, n_samples, n_features):
if n_components == 'mle':
if n_samples < n_features:
raise ValueError("n_components='mle' is only supported "
"if n_samples >= n_features")
elif not 0 <= n_components <= min(n_samples, n_features):
raise ValueError("n_components=%r must be between 0 and "
"min(n_samples, n_features)=%r with "
"svd_solver='full'"
% (n_components, min(n_samples, n_features)))
elif n_components >= 1:
if not isinstance(n_components, numbers.Integral):
raise ValueError("n_components=%r must be of type int "
"when greater than or equal to 1, "
"was of type=%r"
% (n_components, type(n_components)))
def _fit_full_daal4py(self, X, n_components):
n_samples, n_features = X.shape
n_sf_min = min(n_samples, n_features)
if n_components == 'mle':
daal_n_components = n_features
elif n_components < 1:
daal_n_components = n_sf_min
else:
daal_n_components = n_components
fpType = getFPType(X)
covariance_algo = daal4py.covariance(
fptype=fpType, outputMatrixType='covarianceMatrix')
covariance_res = covariance_algo.compute(X)
self.mean_ = covariance_res.mean.ravel()
covariance = covariance_res.covariance
variances_ = np.array([covariance[i, i] for i in range(n_features)])
pca_alg = daal4py.pca(
fptype=fpType,
method='correlationDense',
resultsToCompute='eigenvalue',
isDeterministic=True,
nComponents=daal_n_components
)
pca_res = pca_alg.compute(X, covariance)
components_ = pca_res.eigenvectors
explained_variance_ = np.maximum(pca_res.eigenvalues.ravel(), 0)
tot_var = explained_variance_.sum()
explained_variance_ratio_ = explained_variance_ / tot_var
if n_components == 'mle':
if sklearn_check_version('0.23'):
n_components = _infer_dimension(explained_variance_, n_samples)
else:
n_components = \
_infer_dimension_(explained_variance_, n_samples, n_features)
elif 0 < n_components < 1.0:
ratio_cumsum = stable_cumsum(explained_variance_ratio_)
n_components = np.searchsorted(ratio_cumsum, n_components,
side='right') + 1
if n_components < n_sf_min:
if explained_variance_.shape[0] == n_sf_min:
self.noise_variance_ = explained_variance_[n_components:].mean()
else:
resid_var_ = variances_.sum()
resid_var_ -= explained_variance_[:n_components].sum()
self.noise_variance_ = resid_var_ / (n_sf_min - n_components)
else:
self.noise_variance_ = 0.
self.n_samples_, self.n_features_ = n_samples, n_features
self.components_ = components_[:n_components]
self.n_components_ = n_components
self.explained_variance_ = explained_variance_[:n_components]
self.explained_variance_ratio_ = explained_variance_ratio_[:n_components]
self.singular_values_ = np.sqrt((n_samples - 1) * self.explained_variance_)
def _fit_full(self, X, n_components):
n_samples, n_features = X.shape
self._validate_n_components(n_components, n_samples, n_features)
self._fit_full_daal4py(X, min(X.shape))
U = None
V = self.components_
S = self.singular_values_
if n_components == 'mle':
if sklearn_check_version('0.23'):
n_components = _infer_dimension(self.explained_variance_, n_samples)
else:
n_components = \
_infer_dimension_(self.explained_variance_, n_samples, n_features)
elif 0 < n_components < 1.0:
ratio_cumsum = stable_cumsum(self.explained_variance_ratio_)
n_components = np.searchsorted(ratio_cumsum, n_components,
side='right') + 1
if n_components < min(n_features, n_samples):
self.noise_variance_ = self.explained_variance_[n_components:].mean()
else:
self.noise_variance_ = 0.
self.n_samples_, self.n_features_ = n_samples, n_features
self.components_ = self.components_[:n_components]
self.n_components_ = n_components
self.explained_variance_ = self.explained_variance_[:n_components]
self.explained_variance_ratio_ = self.explained_variance_ratio_[:n_components]
self.singular_values_ = self.singular_values_[:n_components]
return U, S, V
def _fit(self, X):
if issparse(X):
raise TypeError('PCA does not support sparse input. See '
'TruncatedSVD for a possible alternative.')
if sklearn_check_version('0.23'):
X = self._validate_data(X, dtype=[np.float64, np.float32],
ensure_2d=True, copy=False)
else:
X = check_array(X, dtype=[np.float64, np.float32], ensure_2d=True, copy=False)
if self.n_components is None:
if self.svd_solver != 'arpack':
n_components = min(X.shape)
else:
n_components = min(X.shape) - 1
else:
n_components = self.n_components
self._fit_svd_solver = self.svd_solver
shape_good_for_daal = X.shape[1] / X.shape[0] < 2
if self._fit_svd_solver == 'auto':
if n_components == 'mle':
self._fit_svd_solver = 'full'
else:
n, p, k = X.shape[0], X.shape[1], n_components
# These coefficients are result of training of Logistic Regression
# (max_iter=10000, solver="liblinear", fit_intercept=False)
# on different datasets and number of components. X is a dataset with
# npk, np^2, and n^2 columns. And y is speedup of patched scikit-learn's
# full PCA against stock scikit-learn's randomized PCA.
regression_coefs = np.array([
[9.779873e-11, n * p * k],
[-1.122062e-11, n * p * p],
[1.127905e-09, n ** 2],
])
if n_components >= 1 \
and np.dot(regression_coefs[:, 0], regression_coefs[:, 1]) <= 0:
self._fit_svd_solver = 'randomized'
else:
self._fit_svd_solver = 'full'
if not shape_good_for_daal or self._fit_svd_solver != 'full':
if sklearn_check_version('0.23'):
X = self._validate_data(X, copy=self.copy)
else:
X = check_array(X, copy=self.copy)
if self._fit_svd_solver == 'full':
if shape_good_for_daal:
logging.info(
"sklearn.decomposition.PCA."
"fit: " + get_patch_message("daal"))
result = self._fit_full(X, n_components)
else:
logging.info(
"sklearn.decomposition.PCA."
"fit: " + get_patch_message("sklearn"))
result = PCA_original._fit_full(self, X, n_components)
elif self._fit_svd_solver in ['arpack', 'randomized']:
logging.info("sklearn.decomposition.PCA.fit: " + get_patch_message("sklearn"))
result = self._fit_truncated(X, n_components, self._fit_svd_solver)
else:
raise ValueError("Unrecognized svd_solver='{0}'"
"".format(self._fit_svd_solver))
return result
def _transform_daal4py(self, X, whiten=False, scale_eigenvalues=True, check_X=True):
if sklearn_check_version('0.22'):
check_is_fitted(self)
else:
check_is_fitted(self, ['mean_', 'components_'], all_or_any=all)
X = check_array(X, dtype=[np.float64, np.float32], force_all_finite=check_X)
fpType = getFPType(X)
tr_data = dict()
if self.mean_ is not None:
tr_data['mean'] = self.mean_.reshape((1, -1))
if whiten:
if scale_eigenvalues:
tr_data['eigenvalue'] = \
(self.n_samples_ - 1) * self.explained_variance_.reshape((1, -1))
else:
tr_data['eigenvalue'] = self.explained_variance_.reshape((1, -1))
elif scale_eigenvalues:
tr_data['eigenvalue'] = np.full(
(1, self.explained_variance_.shape[0]),
self.n_samples_ - 1.0, dtype=X.dtype)
if X.shape[1] != self.n_features_:
raise ValueError(
(f'X has {X.shape[1]} features, '
f'but PCA is expecting {self.n_features_} features as input'))
tr_res = daal4py.pca_transform(
fptype=fpType
).compute(X, self.components_, tr_data)
return tr_res.transformedData
def transform(self, X):
if self.n_components_ > 0:
logging.info(
"sklearn.decomposition.PCA."
"transform: " + get_patch_message("daal"))
return self._transform_daal4py(X, whiten=self.whiten,
check_X=True, scale_eigenvalues=False)
else:
logging.info(
"sklearn.decomposition.PCA."
"transform: " + get_patch_message("sklearn"))
return PCA_original.transform(self, X)
def fit_transform(self, X, y=None):
U, S, _ = self._fit(X)
if U is None:
if self.n_components_ > 0:
logging.info(
"sklearn.decomposition.PCA."
"fit_transform: " + get_patch_message("daal"))
result = self._transform_daal4py(
X, whiten=self.whiten, check_X=False, scale_eigenvalues=False)
else:
result = np.empty((self.n_samples_, 0), dtype=X.dtype)
else:
logging.info(
"sklearn.decomposition.PCA."
"fit_transform: " + get_patch_message("sklearn"))
U = U[:, :self.n_components_]
if self.whiten:
U *= sqrt(X.shape[0] - 1)
else:
U *= S[:self.n_components_]
result = U
return result
| 40.406349
| 90
| 0.583124
|
37d07dbb1ab0401171906916c0ecc29f867b5cfc
| 389
|
py
|
Python
|
Problems/Patients/task.py
|
gabrielizalo/jetbrains-academy-python-coffee-machine
|
e22cb502f7998855ef4afbc4ef7ecb8226418225
|
[
"MIT"
] | null | null | null |
Problems/Patients/task.py
|
gabrielizalo/jetbrains-academy-python-coffee-machine
|
e22cb502f7998855ef4afbc4ef7ecb8226418225
|
[
"MIT"
] | null | null | null |
Problems/Patients/task.py
|
gabrielizalo/jetbrains-academy-python-coffee-machine
|
e22cb502f7998855ef4afbc4ef7ecb8226418225
|
[
"MIT"
] | null | null | null |
class Patient:
def __init__(self, name, last_name, age):
self.name = name
self.last_name = last_name
self.age = age
# create methods here
def __str__(self):
return f"{self.name} {self.last_name}. {self.age}"
def __repr__(self):
return f"Object of the class Patient. name: {self.name}, last_name: {self.last_name}, age: {self.age}"
| 29.923077
| 110
| 0.622108
|
9e2a4dff21d0902fc6d929c0589aecf0c5b92d86
| 615
|
py
|
Python
|
Interview-Preparation/Facebook/ArraysStrings-multiply-strings.py
|
shoaibur/SWE
|
1e114a2750f2df5d6c50b48c8e439224894d65da
|
[
"MIT"
] | 1
|
2020-11-14T18:28:13.000Z
|
2020-11-14T18:28:13.000Z
|
Interview-Preparation/Facebook/ArraysStrings-multiply-strings.py
|
shoaibur/SWE
|
1e114a2750f2df5d6c50b48c8e439224894d65da
|
[
"MIT"
] | null | null | null |
Interview-Preparation/Facebook/ArraysStrings-multiply-strings.py
|
shoaibur/SWE
|
1e114a2750f2df5d6c50b48c8e439224894d65da
|
[
"MIT"
] | null | null | null |
class Solution:
def multiply(self, num1: str, num2: str) -> str:
def str2int(s):
num = 0
for i in range(len(s)-1,-1,-1):
num += int(s[i]) * 10**(len(s)-1-i)
return num
def int2str(num):
s = []
while num:
num, digit = divmod(num, 10)
s.append(str(digit))
return ''.join(s[::-1])
num1 = str2int(num1)
num2 = str2int(num2)
num = num1 * num2
num = str(num)
return num
# return int2str(num)
| 24.6
| 52
| 0.4
|
eb31c7e8c2f0f06b54ce1a2e16478c267249b5b7
| 7,101
|
py
|
Python
|
cfi-lowering/AsmEditorBase.py
|
dbrumley/recfi
|
39b32ad9e406a79e37ddf94dfcdc4f00acf0c23d
|
[
"MIT"
] | 17
|
2015-02-04T05:21:14.000Z
|
2021-05-30T21:03:48.000Z
|
cfi-lowering/AsmEditorBase.py
|
dbrumley/recfi
|
39b32ad9e406a79e37ddf94dfcdc4f00acf0c23d
|
[
"MIT"
] | null | null | null |
cfi-lowering/AsmEditorBase.py
|
dbrumley/recfi
|
39b32ad9e406a79e37ddf94dfcdc4f00acf0c23d
|
[
"MIT"
] | 3
|
2018-03-18T23:11:44.000Z
|
2019-09-05T11:47:19.000Z
|
'''
#------------------------------ AsmEditorBase.py ------------------------------#
# #
# This is the abstract base class for a ReCFI Asm Editor object. The #
# point of this class is to take in annotated asm and operate on it. #
# Two main operates are exposed: id lowering, and check lowering. #
# Before lowering each annotation to valid asm, it needs to be shifted #
# back into place, because during llvm code generation it might have #
# moved. #
# #
#------------------------------------------------------------------------------#
'''
import re
from abc import ABCMeta, abstractmethod
class AsmEditorBase:
''' Base class for lowering cfi asm annotations '''
__metaclass__ = ABCMeta
def __init__(self, asm, encode_type, intrinsics):
self.asm = asm
self.encode_type = encode_type
self.intrinsics = intrinsics
@abstractmethod
def error(self, msg):
''' throw error '''
raise NotImplementedError()
@abstractmethod
def is_transfer_instr(self, instr_split):
''' determine if instruction is a control transfer instruction.
instr_split: instruction in stripped, split, string format
return: True if instruction is a transfer, else false
'''
raise NotImplementedError()
@abstractmethod
def insert_id(self, asm_new, id_str):
''' insert a cfi id into the assembly.
asm_new: assembly instructions to insert ID into
id_str: id to insert, in format "#12345"
No return value
'''
raise NotImplementedError()
@abstractmethod
def insert_check(self, line, split, asm_new, ids, check_tar):
''' try to insert cfi id checking code into the asm.
line: asm line to insert check before
split: split version of asm line
asm_new: assembly instructions to insert check into
ids: list of IDs that need to be checked
check_tar: flag for type of check (check_tar vs check_ret)
return: True on sucessful insertion, else false
'''
raise NotImplementedError()
def is_cfi_intrinsic(self, instr_split, int_name):
''' determine if an instruction is an intrinsic annotation.
shouldnt need to be re-implemented in subclass
instr_split: instruction in stripped, split, string format
int_name: intrinsic key to check against
return: True if cfi intrinsic, else false
'''
if len(instr_split) >= 2 and instr_split[0] == self.intrinsics[int_name]:
return True
return False
def is_func_begin(self, instr_split):
''' determine if instruction marks a function beginning.
This is done by looking for function start labels.
if our label starts with ".L", it is an LLVM inserted
label, and (hopefully) can't correspond to a function start.
"@" denotes a comment, and a label without a colon isn't
really a label. Might need to be re-implemented in subclass
instr_split: instruction in stripped, split, string format
return: True if function start, else false
'''
#ignore empty lines
if len(instr_split) < 1:
return False
label = instr_split[0]
if not label.startswith(".l") \
and not label.startswith("@") \
and ":" in label:
return True
return False
def lower_ids(self):
''' algorithm for lowering cfi ids from annotations to valid asm
Iterate backward over entire asm program.
When encountering an id annotation:
-- pick it up
-- slide it backward to function begin or callsite
-- lower it
-- ... continue
Done in a single pass over the asm.
input: self.asm, self.intrinsics
modifies: self.asm
'''
# transformed asm
asm_new = []
id_found = False
id_str = ''
sep = re.compile("\s*[,{}\[\]!^]\s*|\s*")
# ids need to be moved _backward_, so iterate in reverse
self.asm.reverse()
# for each line in the asm
for asm_line in self.asm:
# eliminate empty lines
split = filter(lambda x: x != '', sep.split(asm_line))
# make all chars lower-case
split = map(lambda x: x.lower(), split)
# if holding an ID
if id_found:
# check if line is beginning of basic block, if so, insert ID
if self.is_transfer_instr(split) or self.is_func_begin(split):
self.insert_id(asm_new, id_str)
id_found = False
# try to pick up an ID
elif self.is_cfi_intrinsic(split, "insert"):
id_found = True
id_str = split[1]
continue
asm_new.append(asm_line)
# undo reverse
asm_new.reverse()
# replace original asm with modified asm
self.asm = asm_new
def lower_checks(self):
''' algorithm for lowering cfi checks from annotations to valid asm
Iterate forwards over asm program, looking for checkret, checktar
When encountering a check annotation:
-- pick it up
-- slide it forwards to transfer site
-- lower it to monitor code
-- ... continue
Done in a single pass over the asm.
input: self.asm, self.encode_type, self.intrinsics
modifies: self.asm
'''
check_found = False
check_tar = False
ids = []
asm_new = []
sep = re.compile("\s*[,{}\[\]!^]\s*|\s*")
for line in self.asm:
split = filter(lambda x: x != '', sep.split(line))
split = map(lambda x: x.lower(), split)
if self.is_cfi_intrinsic(split, "check_ret"):
check_found = True
check_tar = False
ids.append(split[1])
elif self.is_cfi_intrinsic(split, "check_tar"):
check_found = True
check_tar = True
ids.append(split[1])
elif check_found:
if self.insert_check(line, split, asm_new, ids, check_tar):
check_found = False
ids = []
else:
asm_new.append(line)
else:
asm_new.append(line)
self.asm = asm_new
| 35.863636
| 81
| 0.522602
|
7435aee8c22d86354004021b4b4c69c45916b45a
| 61,580
|
py
|
Python
|
Plugins/ActionEngine/Action.py
|
davidhstocker/Tioga
|
f7b66b4b94ca2b7e5eb845ce11162b1053715da8
|
[
"Apache-2.0"
] | 1
|
2020-09-07T13:33:32.000Z
|
2020-09-07T13:33:32.000Z
|
Plugins/ActionEngine/Action.py
|
davidhstocker/Tioga
|
f7b66b4b94ca2b7e5eb845ce11162b1053715da8
|
[
"Apache-2.0"
] | null | null | null |
Plugins/ActionEngine/Action.py
|
davidhstocker/Tioga
|
f7b66b4b94ca2b7e5eb845ce11162b1053715da8
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python2
"""Angela RML Interpreter - Action Queue (aQ) management engine service plugin
Created by the project angela team
http://sourceforge.net/projects/projectangela/
http://www.projectangela.org"""
__license__ = "GPL"
__version__ = "$Revision: 0.1 $"
__author__ = 'David Stocker'
# ***** BEGIN GPL LICENSE BLOCK *****
#
# Module copyright (C) David Stocker
#
# This module is part of the Angela RML Engine.
# Angela is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Angela is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Angela. If not, see <http://www.gnu.org/licenses/>.
#
# ***** END GPL LICENCE BLOCK *****
# --------------------------------------------------------------------------
import copy
import uuid
import Graphyne.Graph as Graph
import Graphyne.Scripting
from ... import Engine
from ... import Angela
from ... import Exceptions
#remote debugger support for pydev
#import pydevd
#globals
moduleName = 'ActionEngine.Action'
logType = Graph.logTypes.CONTENT
logLevel = Graph.LogLevel()
actionInsertionTypes = Angela.ActionInsertionType()
api = None
class Action(object):
className = 'Action'
actionIndex = {} # parameter is the action engine's action index and is used later to inflate member lists
def initialize(self, script, uuid, actionID):
method = moduleName + '.' + self.className + '.' + 'initialize'
"""
uuid = the uuid of the child action element (KeyFrame, Catch, Throw, etc.)
actionID = the uuid of the parent Action element
"""
Graph.logQ.put( [logType , logLevel.DEBUG , method , "entering"])
try:
self.uuid = uuid
self.meme = script.getEntityMemeType(actionID)
self.actionID = actionID
self.instanceID = None
except Exception as e:
errorMsg = "Unknown error initializing action %s. Traceback = %s" %(actionID, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
#Graph.logQ.put( [logType , logLevel.DEBUG , method , "exiting"])
def refreshInstanceID(self):
"""
Actions are singletons and self.uuid points back to the uuid of the memetic entity in the entity repository.
Actions are initialized as singletons for performance reasons (to frontload the initialization overhead to server startup)
and because actions of a given type are fungible. However, we still want to have each instance of an action to have a
unique tracking ID for the lag-log's action life cycle tracking.
Calling this method will generate a new UUID
"""
method = moduleName + '.' + self.className + '.' + 'refreshInstanceID'
try:
self.instanceID = uuid.uuid1()
except Exception as e:
errorMsg = "Unknown error refreshing instance UUID on action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
def getInflatedMemberList(self, unusedScript):
method = moduleName + '.' + self.className + '.' + 'getInflatedMemberList'
try:
return [self.meme]
except:
errorMsg = "Can't run getInflatedMemberList() on initialized action"
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
return []
def inflateMembers(self, script):
#this method is only relevant for sets
pass
def addLandMarks(self, script):
"""
Find all of the landmarks attached to the keyframe
"""
method = moduleName + '.' + self.className + '.' + 'addLandMarks'
#Graph.logQ.put( [logType , logLevel.DEBUG , method , "entering"])
try:
# The template paths of the various types of landmarks
lmExPath = "Action.RequiredLandmarks::Action.RequiredlandmarksExclusive::Action.RequiredLandmark::Agent.Landmark"
lmMPath = "Action.RequiredLandmarks::Action.MasterLandmark::Action.RequiredLandmark::Agent.Landmark"
lmNoExPath = "Action.RequiredLandmarks::Action.RequiredLandmark::Agent.Landmark"
# Get the actual uuids of the various landmarks
self.landmarksNonExclusive = script.getLinkCounterpartsByMetaMemeType(self.uuid, lmNoExPath)
self.landmarksExclusive = script.getLinkCounterpartsByMetaMemeType(self.uuid, lmExPath)
masterLandmarkList = script.getLinkCounterpartsByMetaMemeType(self.uuid, lmMPath)
try:
self.masterLandmark = masterLandmarkList[0]
except:
errorMsg = "Action %s has no master landmark defined" %self.meme
raise Exceptions.MemeMembershipValidationError(errorMsg)
#Remote Debugger
#pydevd.settrace()
self.landmarkTransforms = []
reqLMRootPath = "**::Action.RequiredLandmark"
reqLMPath = "Agent.Landmark"
reqLMTransformPath = "Action.LandmarkTransform"
reqLMRoots = script.getLinkCounterpartsByMetaMemeType(self.uuid, reqLMRootPath)
for reqLMRoot in reqLMRoots:
reqLMs = script.getLinkCounterpartsByMetaMemeType(reqLMRoot, reqLMPath)
reqLMTransforms = script.getLinkCounterpartsByMetaMemeType(reqLMRoot, reqLMTransformPath)
# Action.LandmarkTransform is optional, but a transform element only makes sense if one exists
if len(reqLMTransforms) > 0:
#Agent.Offset
deltaX = None
deltaY = None
deltaZ = None
offsetDelta = script.getLinkCounterpartsByMetaMemeType(reqLMTransforms[0], "Agent.Offset")
if len(offsetDelta) > 0:
deltaX = script.getEntityPropertyValue(offsetDelta[0], "x")
deltaY = script.getEntityPropertyValue(offsetDelta[0], "y")
deltaZ = script.getEntityPropertyValue(offsetDelta[0], "z")
#Agent.EuerAngles
rotationX = None
rotationY = None
rotationZ = None
euerAngles = script.getLinkCounterpartsByMetaMemeType(reqLMTransforms[0], "Agent.EuerAngles")
if len(euerAngles) > 0:
rotationXList = script.getLinkCounterpartsByMetaMemeType(euerAngles[0], "Agent.RotationX")
rotationYList = script.getLinkCounterpartsByMetaMemeType(euerAngles[0], "Agent.RotationY")
rotationZList = script.getLinkCounterpartsByMetaMemeType(euerAngles[0], "Agent.RotationZ")
rotationX = script.getEntityPropertyValue(rotationXList[0], "Angle")
rotationY = script.getEntityPropertyValue(rotationYList[0], "Angle")
rotationZ = script.getEntityPropertyValue(rotationZList[0], "Angle")
transformDict = {"deltaX" : deltaX, "deltaY" : deltaY, "deltaZ" : deltaZ, "rotationX" : rotationX, "rotationY" : rotationY, "rotationZ" : rotationZ}
self.landmarkTransforms.append([reqLMs[0], transformDict])
except Exceptions.MemeMembershipValidationError as e:
Graph.logQ.put( [logType , logLevel.WARNING , method , e])
except Exception as e:
errorMsg = "Unknown error adding landmarks to keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
#Graph.logQ.put( [logType , logLevel.DEBUG , method , "exiting"])
def checkLandmarks(self, script, agentUUID):
method = moduleName + '.' + self.className + '.' + 'checkLandmarks'
allTrue = False
try:
exTrue = self.checkExLists(script, agentUUID)
nonExTrue = script.map(self.mapFunctionLandmarks, self.landmarksNonExclusive, agentUUID)
masterTrue = script.map(self.mapFunctionLandmarks, [self.masterLandmark], agentUUID)
allLandmarks = []
allLandmarks.extend(exTrue)
allLandmarks.extend(nonExTrue)
allLandmarks.extend(masterTrue)
if False not in allLandmarks:
allTrue = True
except Exception as e:
errorMsg = "Unknown error checking landmarks for keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
finally: return allTrue
def checkExLists(self, script, agentUUID):
method = moduleName + '.' + self.className + '.' + 'checkExLists'
try:
exTrue = script.map(self.mapFunctionLandmarks, self.landmarksExclusive, agentUUID)
return exTrue
except Exception as e:
errorMsg = "Unknown error checking exclusive landmarks for keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
return False
def mapFunctionLandmarks(self, landMarkID, agentUUID):
method = moduleName + '.' + self.className + '.' + 'mapFunctionLandmarks'
try:
api = Graph.api.getAPI()
landMarkPath = api.getEntityMemeType(landMarkID)
localResult = api.getHasCounterpartsByType(agentUUID, landMarkPath)
return localResult
except Exception as e:
errorMsg = "Unknown error mapping landmark %s for keyframe object of action %s. Traceback = %s" %(landMarkPath, self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
return False
def bootstrap(self):
pass
class ConditionalAction(object):
className = 'ConditionalAction'
def addConditions(self, script):
method = moduleName + '.' + self.className + '.' + 'addConditions'
#Graph.logQ.put( [logType , logLevel.DEBUG , method , "entering"])
try:
self.conditions = []
""" Adds conditions to those actions (KeyFrame, Throw) that require them """
conditionPath = "Graphyne.Condition.Condition"
conditionElements = script.getLinkCounterpartsByMetaMemeType(self.uuid, conditionPath)
for conditionElement in conditionElements:
Graph.logQ.put( [logType , logLevel.DEBUG , method , "adding condition %s to action %s" %(conditionElement, self.uuid)])
self.conditions.append(conditionElement)
except Exception as e:
actionID = None
try: actionID = self.meme
except: pass
errorMsg = "Unknown error adding conditions to action %s. Traceback = %s" %(actionID, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
#Graph.logQ.put( [logType , logLevel.DEBUG , method , "exiting"])
def mapFunctionConditions(self, script, conditionUUID, argumentMap):
method = moduleName + '.' + self.className + '.' + 'mapFunctionConditions'
try:
localResult = script.evaluateEntity(conditionUUID, argumentMap, argumentMap["actionID"], argumentMap["subjectID"], argumentMap["controllerID"])
return localResult
except Exception as e:
actionID = None
try: actionID = self.meme
except: pass
errorMsg = "Unknown error testing individual condition on action %s. Traceback = %s" %(actionID, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
return False
def checkConditions(self, script, argumentMap):
method = moduleName + '.' + self.className + '.' + 'checkConditions'
try:
conditionResults = script.map(self.mapFunctionConditions, self.conditions, argumentMap)
conditionsTrue = True
if False in conditionResults:
conditionsTrue = False
return conditionsTrue
except Exception as e:
actionID = None
try: actionID = self.meme
except: pass
errorMsg = "Unknown error testing conditions on action %s. Traceback = %s" %(actionID, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
return False
class ActionSet(Action):
className = 'ActionSet'
def bootstrap(self, script):
method = moduleName + '.' + self.className + '.' + 'bootstrap'
try:
self.memberList = []
self.packedMemberList = []
self.addLandMarks(script)
actionSetChildren = script.getLinkCounterpartsByMetaMemeType(self.uuid, "Action.ChoreographyStep")
tempPrio = {}
try: #lv2
for actionSetChild in actionSetChildren:
priority = script.getEntityPropertyValue(actionSetChild, "Priority")
action = script.getLinkCounterpartsByMetaMemeType(actionSetChild, "Action.Action")
tempPrio[priority] = action[0]#there should only be one action counterpart per ChoreographyStep
try: #lv3
implicitCatch = script.getEntityPropertyValue(self.uuid, "ImplicitCatch")
if implicitCatch == True:
#If implicitCatch is true, then create a Action.DefaultCatch
# and append it to self.packedMemberList before adding any other members
landmarkPath = "Action.RequiredLandmarks::Action.MasterLandmark::Action.RequiredLandmark::Agent.Landmark"
landmarkID = script.getLinkCounterpartsByMetaMemeType(self.uuid, landmarkPath)
defaultCatchID = script.getEntityPropertyValue(landmarkID[0], 'DefaultCatch')
defaultCatchUUID = uuid.UUID(defaultCatchID)
defaultCatchMeme = script.getEntityMemeType(defaultCatchUUID)
self.packedMemberList.append(defaultCatchMeme)
except Exception as e:
#level 3
pass
try: #lv4
prioList = sorted(tempPrio.keys())
for prio in prioList:
sortedMemberUUID = tempPrio[prio]
sortedMember = script.getEntityMemeType(sortedMemberUUID)
#debug
#errorMsg = "Entity meme %s uuid = %s" %(sortedMemberUUID, tempPrio[prio])
#Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
#/debug
self.packedMemberList.append(sortedMember)
except Exception as e:
errorMsg = "Unknown error setting up ChoreographyStep members on action %s.Traceback = %s" %(self.meme, e)
sortedMember = script.getEntityMemeType(sortedMemberUUID)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
except Exception as e:
#level 2
pass
except Exception as e:
errorMsg = "Unknown error bootstrapping choreography %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
#debug
try:
self.addLandMarks(script)
actionSetChildren = script.getLinkCounterpartsByMetaMemeType(self.uuid, "Action.ChoreographyStep")
tempPrio = {}
for actionSetChild in actionSetChildren:
priority = script.getEntityPropertyValue(actionSetChild, "Priority")
action = script.getLinkCounterpartsByMetaMemeType(actionSetChild, "Action.Action")
tempPrio[priority] = action
implicitCatch = script.getEntityPropertyValue(self.uuid, "ImplicitCatch")
if implicitCatch == True:
#If implicitCatch is true, then create a Action.DefaultCatch
# and append it to self.packedMemberList before adding any other members
landmarkPath = "Action.RequiredLandmarks::Action.MasterLandmark::Action.RequiredLandmark::Agent.Landmark"
landmarkID = script.getLinkCounterpartsByMetaMemeType(self.uuid, landmarkPath)
defaultCatchID = script.getEntityPropertyValue(landmarkID[0], 'DefaultCatch')
defaultCatchUUID = uuid.UUID(defaultCatchID)
defaultCatchMeme = script.getEntityMemeType(defaultCatchUUID)
self.packedMemberList.append(defaultCatchMeme)
prioList = sorted(tempPrio)
for prio in prioList:
sortedMemberUUID = uuid.UUID(tempPrio[prio])
sortedMember = script.getEntityMemeType(sortedMemberUUID)
self.packedMemberList.append(sortedMember)
except:
pass
def getInflatedMemberList(self, script):
method = moduleName + '.' + self.className + '.' + 'getInflatedMemberList'
returnList = []
for taskItem in self.packedMemberList:
#First, assert that we even have this action indexed
try:
assert taskItem in self.actionIndex
memberEntity = self.actionIndex[taskItem]
memberEntityMembers = memberEntity.getInflatedMemberList(script)
returnList.extend(memberEntityMembers)
except AssertionError:
errorMessage = "Action set %s has member %s, which is not indexed in action engine" %(self.meme, taskItem)
Graph.logQ.put( [logType , logLevel.ERROR , method , errorMessage])
#debug
#debugMessage = "Action set %s has the following members: %s" %(self.meme, returnList)
#Graph.logQ.put( [logType , logLevel.DEBUG , method , debugMessage])
#/debug
return returnList
def inflateMembers(self, script):
inflatedmemberList = self.getInflatedMemberList(script)
self.memberList = inflatedmemberList
class KeyFrame(Action, ConditionalAction):
className = 'KeyFrame'
def bootstrap(self, script):
self.addLandMarks(script)
self.addConditions(script)
self.addObjectSelectionConditions(script)
self.addStateChanges(script)
self.addStimuli(script)
self.addControllers(script)
self.addRestrictedView(script)
self.addTimescale(script)
def addObjectSelectionConditions(self, script):
method = moduleName + '.' + self.className + '.' + 'addObjectSelectionConditions'
#Graph.logQ.put( [logType , logLevel.DEBUG , method , "entering"])
try:
conditionPath = "Action.ObjectSelectionCondition::Graphyne.Condition.Condition"
self.objectSelectionConditions = script.getLinkCounterpartsByMetaMemeType(self.uuid, conditionPath)
except Exception as e:
errorMsg = "Unknown error adding object selection conditions to keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
def addStateChanges(self, script):
method = moduleName + '.' + self.className + '.' + 'addStateChanges'
#Action.StateChangeSet
#Graph.logQ.put( [logType , logLevel.DEBUG , method , "entering"])
try:
self.stateChangesSimple = []
self.stateChangesJoin = []
self.stateChangesBreak = []
self.stateChangeSuccessor = []
stateChangeElements = script.getLinkCounterpartsByMetaMemeType(self.uuid, "Action.StateChangeSet")
if len(stateChangeElements) > 0:
#StateChangeSet is a switch and will have one of the following children:
# SimpleStateChange, LinkJoin, LinkBreak or SuccessorAction
scElements = script.getLinkCounterpartsByMetaMemeType(stateChangeElements[0], "Action.SimpleStateChange")
ljElements = script.getLinkCounterpartsByMetaMemeType(stateChangeElements[0], "Action.LinkJoin")
lbElements = script.getLinkCounterpartsByMetaMemeType(stateChangeElements[0], "Action.LinkBreak")
saElements = script.getLinkCounterpartsByMetaMemeType(stateChangeElements[0], "Action.SuccessorAction")
for scElement in scElements:
#SimpleStateChange have two mandatory elements, a Change and a State, the latter of which extends Tioga.Condition.AgentAttributeArgument
changeElements = script.getLinkCounterpartsByMetaMemeType(scElement, "Action.Change")
conditionIDs = script.getLinkCounterpartsByMetaMemeType(scElement, "Graphyne.Condition.Condition")
stateElements = script.getLinkCounterpartsByMetaMemeType(scElement, "Action.State")
statePath = script.getEntityPropertyValue(stateElements[0], "SubjectArgumentPath")
conditionalStimuli = self.getConditionalStimuli(script, scElement)
stateChange = StateChangeSimple(conditionIDs[0], conditionalStimuli)
stateChange.prime(changeElements[0], statePath)
self.stateChangesSimple.append(stateChange)
for ljElement in ljElements:
conditionIDs = script.getLinkCounterpartsByMetaMemeType(ljElement, "Graphyne.Condition.Condition")
subjectPath = script.getEntityPropertyValue(ljElement, "SubjectArgumentPath")
objectPath = script.getEntityPropertyValue(ljElement, "ObjectArgumentPath")
linkTypeStr = script.getEntityPropertyValue(ljElement, "LinkType")
linkType = 0
if linkTypeStr == "SubAtomic":
linkType = 1
conditionalStimuli = self.getConditionalStimuli(script, ljElement)
stateChange = StateChangeJoin(conditionIDs[0], conditionalStimuli)
stateChange.prime(subjectPath, objectPath, linkType)
self.stateChangesJoin.append(stateChange)
for lbElement in lbElements:
conditionIDs = script.getLinkCounterpartsByMetaMemeType(lbElement, "Graphyne.Condition.Condition")
subjectPath = script.getEntityPropertyValue(lbElement, "SubjectArgumentPath")
objectPath = script.getEntityPropertyValue(lbElement, "ObjectArgumentPath")
conditionalStimuli = self.getConditionalStimuli(script, lbElement)
stateChange = StateChangeBreak(conditionIDs[0], conditionalStimuli)
stateChange.prime(subjectPath, objectPath)
self.stateChangesBreak.append(stateChange)
for saElement in saElements:
conditionIDs = script.getLinkCounterpartsByMetaMemeType(saElement, "Graphyne.Condition.Condition")
priority = script.getEntityPropertyValue(conditionIDs[0], "priority")
followOnActions = script.getLinkCounterpartsByMetaMemeType(saElement, "Action.Action")
insertionTypeStr = script.getEntityPropertyValue(saElement, "InsertionType")
insertionType = actionInsertionTypes.APPEND
if insertionTypeStr == "Head":
linkType = 1
elif insertionTypeStr == "HeadClear":
linkType = 2
conditionalStimuli = self.getConditionalStimuli(script, saElement)
stateChange = StateChangeSuccessorAction(conditionIDs[0], conditionalStimuli)
stateChange.prime(followOnActions[0], insertionType, priority)
self.stateChangeSuccessor.append(stateChange)
#Lastly, resort the successor action list to ensure that the new SA is positioned by priority
tempMap = {}
for currentEntry in self.stateChangeSuccessor:
tempMap[currentEntry.priority] = currentEntry
prioList = sorted(tempMap)
prioList.reverse()
self.stateChangeSuccessor = []
for prio in prioList:
self.stateChangeSuccessor.append(tempMap[prio])
except Exception as e:
errorMsg = "Unknown error adding state change information to kexframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
def addStimuli(self, script):
method = moduleName + '.' + self.className + '.' + 'addStimuli'
#Graph.logQ.put( [logType , logLevel.DEBUG , method , "entering"])
#Stimulus.ConditionalStimulus
try:
self.conditionalStimuli = self.getConditionalStimuli(script, self.uuid)
except Exception as e:
errorMsg = "Unknown error adding stimuli information to keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
def getConditionalStimuli(self, script, rootNodeID):
"""
Keyframes may link to ConditionalStimulus elements directly, or indirectly via StateChange.
Also, general keyframe conditional stimuli are stored directly on the keyframe, while
those associated with a state change belong to the state change and are only added
self.conditionalStimuli immediately prior to stimuli distribution, which follows state changes.
"""
method = moduleName + '.' + self.className + '.' + 'getConditionalStimuli'
try:
#Stimulus.StimulusChoice
conditionalStimuli = []
conditionalStimuli = script.getLinkCounterpartsByMetaMemeType(rootNodeID, "Stimulus.StimulusChoice")
return conditionalStimuli
except Exception as e:
errorMsg = "Unknown error getting conditional stimuli for keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
return []
def addRequiredCondition(self):
#toto
pass
def addControllers(self, script):
#Todo
method = moduleName + '.' + self.className + '.' + 'addControllers'
#Graph.logQ.put( [logType , logLevel.DEBUG , method , "entering"])
try:
controllerBlacklist = None
controllerWhitelist = None
self.controllerBlacklist = controllerBlacklist
self.controllerWhitelist = controllerWhitelist
except Exception as e:
errorMsg = "Unknown error adding controllers to keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
def addTimescale(self, script):
method = moduleName + '.' + self.className + '.' + 'addTimescale'
#Graph.logQ.put( [logType , logLevel.DEBUG , method , "entering"])
try:
self.timescale = None
timescaleElem = script.getLinkCounterpartsByMetaMemeType(self.uuid, "Action.Timescale")
if len(timescaleElem) > 1:
self.timescale = timescaleElem[0]
except Exception as e:
errorMsg = "Unknown error adding tiimescale to keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
def addRestrictedView(self, script):
method = moduleName + '.' + self.className + '.' + 'addRestrictedView'
#Graph.logQ.put( [logType , logLevel.DEBUG , method , "entering"])
try:
self.view = None
viewElem = script.getLinkCounterpartsByMetaMemeType(self.uuid, "Action.View::Agent.Page")
if len(viewElem) > 1:
self.view = viewElem[0]
except Exception as e:
errorMsg = "Unknown error adding view to keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
def mapFunctionObjects(self, script, objectID, rtParams):
#We'll be adding objectID, passing on to script.map and really don't need any concurrency nonsense
# Hence the deepcopy
method = moduleName + '.' + self.className + '.' + 'mapFunctionObjects'
try:
argumentMap = {}
try:
#If _angela_actionEngineModTest_responseQueue is a key in rtParams, then we are running in test mode.
# The key in question holds a queue object for the test action script. Queue objects can't be copied!
# So we need to remove it from rtParams before making the copy and then re-add it to the copy.
assert '_angela_actionEngineModTest_responseQueue' in rtParams
responseQueue = rtParams['_angela_actionEngineModTest_responseQueue']
del rtParams['_angela_actionEngineModTest_responseQueue']
argumentMap = copy.deepcopy(rtParams)
#now add the queue back to rtParams and to argumentMap...
argumentMap['_angela_actionEngineModTest_responseQueue'] = responseQueue
rtParams['_angela_actionEngineModTest_responseQueue'] = responseQueue
except AssertionError:
#We are not in test mode and can blindly take rtParams
argumentMap = copy.deepcopy(rtParams)
except copy.Error as e:
raise e
except Exception as e:
errorMsg = "Copy Error. Traceback = %s" %(e)
raise Exception(errorMsg)
argumentMap["objectID"] = objectID
localResult = None
conditionResultSet = script.map(self.mapFunctionConditions, self.childConditions, argumentMap)
if False not in conditionResultSet:
localResult = objectID
return localResult
except Exception as e:
errorMsg = "Unknown error mapping objects for keyframe object of action %s. rtparams = %s Traceback = %s" %(self.meme, rtParams, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
return None
def mapFunctionCheckEulerTransforms(self, landmarkTransform):
method = moduleName + '.' + self.className + '.' + 'mapFunctionCheckEulerTransforms'
try:
transformDict = landmarkTransform[1]
transformResult = self.checkEulerAngles(landmarkTransform[0], transformDict["rotationX"], transformDict["rotationY"], transformDict["rotationZ"])
return transformResult
except Exception as e:
errorMsg = "Unknown error mapping euler transforms for keyframe object of action %s. landmarkTransform = %s Traceback = %s" %(self.meme, landmarkTransform[1], e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
return False
def mapFunctionCheckDeltaTransforms(self, landmarkTransform):
method = moduleName + '.' + self.className + '.' + 'mapFunctionCheckDeltaTransforms'
try:
transformDict = landmarkTransform[1]
transformResult = self.checkDeltas(landmarkTransform[0], transformDict["deltaX"], transformDict["deltaY"], transformDict["deltaZ"])
return transformResult
except Exception as e:
errorMsg = "Unknown error mapping transform deltas for keyframe object of action %s. landmarkTransform = %s Traceback = %s" %(self.meme, landmarkTransform[1], e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
return False
def mapFunctionStateChangesInner(self, script, stateChange, argumentMap):
#self.conditionID = conditionID
#self.stateChangeStimuli = stateChangeStimuli
method = moduleName + '.' + self.className + '.' + 'mapFunctionStateChangesInner'
try:
conditionResult = script.evaluateEntity(stateChange.conditionID, argumentMap, argumentMap["actionID"], argumentMap["subjectID"], argumentMap["controllerID"])
if conditionResult == True:
stateChange.execute(argumentMap["subjectID"], argumentMap["objectID"])
self.conditionalStimuli.extend(stateChange.stateChangeStimuli)
except Exception as e:
errorMsg = "Unknown error mapping state change for keyframe object of action %s. argumentMap = %s Traceback = %s" %(self.meme, argumentMap, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
finally: return None
def mapFunctionStateChangesOuter(self, objectID, rtParams):
method = moduleName + '.' + self.className + '.' + 'mapFunctionStateChangesOuter'
try:
argumentMap = {}
try:
#If _angela_actionEngineModTest_responseQueue is a key in rtParams, then we are running in test mode.
# The key in question holds a queue object for the test action script. Queue objects can't be copied!
# So we need to remove it from rtParams before making the copy and then re-add it to the copy.
assert '_angela_actionEngineModTest_responseQueue' in rtParams
responseQueue = rtParams['_angela_actionEngineModTest_responseQueue']
del rtParams['_angela_actionEngineModTest_responseQueue']
argumentMap = copy.deepcopy(rtParams)
#now add the queue back to rtParams and to argumentMap...
argumentMap['_angela_actionEngineModTest_responseQueue'] = responseQueue
rtParams['_angela_actionEngineModTest_responseQueue'] = responseQueue
except AssertionError:
#We are not in test mode and can blindly take rtParams
argumentMap = copy.deepcopy(rtParams)
except copy.Error as e:
raise e
except Exception as e:
errorMsg = "Copy Error. Traceback = %s" %(e)
raise Exception(errorMsg)
#argumentMap = copy.deepcopy(rtParams)
argumentMap["objectID"] = objectID
unusedReturn = self.script.map(self.mapFunctionStateChangesInner, self.stateChangesBreak, argumentMap)
unusedReturn = self.script.map(self.mapFunctionStateChangesInner, self.stateChangesJoin, argumentMap)
unusedReturn = self.script.map(self.mapFunctionStateChangesInner, self.stateChangesSimple, argumentMap)
unusedReturn = self.script.map(self.mapFunctionStateChangesInner, self.stateChangeSuccessor, argumentMap)
except copy.Error as e:
#Logged as error instead of warning because an uncopyable paramater payload from a client may be indicative of an attempted attack.
errorMsg = "Unable to map state change for keyframe object of action %s because runtime parameters contains an uncopyable object! rtParams = %s" %(self.meme, rtParams)
Graph.logQ.put( [logType , logLevel.ERROR , method , errorMsg])
except Exception as e:
errorMsg = "Unknown error mapping state change for keyframe object of action %s. rtParams = %s Traceback = %s" %(self.meme, rtParams, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
finally: return None
def mapFunctionSetEulerTransforms(self, script, landmarkTransform):
method = moduleName + '.' + self.className + '.' + 'mapFunctionSetEulerTransforms'
try:
transformDict = landmarkTransform[1]
landmarkID = landmarkTransform[0]
eulerElem = script.getLinkCounterpartsByMetaMemeType(landmarkID, "Agent.Offset::Agent.EuerAngles")
if len(eulerElem) > 0:
eulerXElem = script.getLinkCounterpartsByMetaMemeType(eulerElem, "Agent.RotationX")
eulerYElem = script.getLinkCounterpartsByMetaMemeType(eulerElem, "Agent.RotationX")
eulerZElem = script.getLinkCounterpartsByMetaMemeType(eulerElem, "Agent.RotationX")
unusedEulerX = script.setEntityPropertyValue(eulerXElem[0], "Angle", transformDict["rotationX"])
unusedEulerY = script.setEntityPropertyValue(eulerYElem[0], "Angle", transformDict["rotationY"])
unusedEulerZ = script.setEntityPropertyValue(eulerZElem[0], "Angle", transformDict["rotationZ"])
except Exception as e:
errorMsg = "Unknown error mapping euler transforms for keyframe object of action %s. landmarkTransform = %s Traceback = %s" %(self.meme, landmarkTransform[1], e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
finally: return True
def mapFunctionSetDeltaTransforms(self, script, landmarkTransform):
method = moduleName + '.' + self.className + '.' + 'mapFunctionSetDeltaTransforms'
try:
transformDict = landmarkTransform[1]
landmarkID = landmarkTransform[0]
offsetElem = script.getLinkCounterpartsByMetaMemeType(landmarkID, "Agent.Offset")
if len(offsetElem) > 0:
unusedDeltaX = script.setEntityPropertyValue(offsetElem[0], "x", transformDict["deltaX"])
unusedDeltaY = script.setEntityPropertyValue(offsetElem[0], "y", transformDict["deltaY"])
unusedDeltaZ = script.setEntityPropertyValue(offsetElem[0], "z", transformDict["deltaZ"])
except Exception as e:
errorMsg = "Unknown error mapping delta transforms for keyframe object of action %s. landmarkTransform = %s Traceback = %s" %(self.meme, landmarkTransform[1], e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
finally: return True
def checkEulerAngles(self, script, landmarkUUID, rotationX, rotationY, rotationZ):
"""
If the euler angles of the keyframe are not all None, then check them against constraints
"""
method = moduleName + '.' + self.className + '.' + 'checkEulerAngles'
try:
if (rotationX is None) and (rotationY is None) and (rotationZ is None):
return
anglesMax = script.getLinkCounterpartsByMetaMemeType(landmarkUUID, "Agent.EulerAnglesMax")
#First the max
if len(anglesMax) > 0:
maxRotationXElem = script.getLinkCounterpartsByMetaMemeType(anglesMax[0], "Agent.RotationX")
maxRotationYElem = script.getLinkCounterpartsByMetaMemeType(anglesMax[0], "Agent.RotationX")
maxRotationZElem = script.getLinkCounterpartsByMetaMemeType(anglesMax[0], "Agent.RotationX")
maxRotationX = script.getEntityPropertyValue(maxRotationXElem[0], "Angle")
maxRotationY = script.getEntityPropertyValue(maxRotationYElem[0], "Angle")
maxRotationZ = script.getEntityPropertyValue(maxRotationZElem[0], "Angle")
if (rotationX is not None) and (maxRotationX is not None):
if rotationX > maxRotationX:
raise Exceptions.EntityMaxXAngleExceeded("%s > %s" %[rotationX, maxRotationX])
if (rotationY is not None) and (maxRotationY is not None):
if rotationY > maxRotationY:
raise Exceptions.EntityMaxYAngleExceeded("%s > %s" %[rotationX, maxRotationX])
if (rotationZ is not None) and (maxRotationZ is not None):
if rotationZ > maxRotationZ:
raise Exceptions.EntityMaxZAngleExceeded("%s > %s" %[rotationX, maxRotationX])
#Then the min
anglesMin = script.getLinkCounterpartsByMetaMemeType(landmarkUUID, "Agent.EulerAnglesMin")
if len(anglesMin) > 0:
minRotationXElem = script.getLinkCounterpartsByMetaMemeType(anglesMin[0], "Agent.RotationX")
minRotationYElem = script.getLinkCounterpartsByMetaMemeType(anglesMin[0], "Agent.RotationX")
minRotationZElem = script.getLinkCounterpartsByMetaMemeType(anglesMin[0], "Agent.RotationX")
minRotationX = script.getEntityPropertyValue(minRotationXElem[0], "Angle")
minRotationY = script.getEntityPropertyValue(minRotationYElem[0], "Angle")
minRotationZ = script.getEntityPropertyValue(minRotationZElem[0], "Angle")
if (rotationX is not None) and (minRotationX is not None):
if rotationX < minRotationX:
raise Exceptions.EntityMaxXAngleExceeded("%s < %s" %[rotationX, maxRotationX])
if (rotationY is not None) and (minRotationY is not None):
if rotationY < minRotationY:
raise Exceptions.EntityMaxYAngleExceeded("%s < %s" %[rotationX, maxRotationX])
if (rotationZ is not None) and (minRotationZ is not None):
if rotationZ < minRotationZ:
raise Exceptions.EntityMaxZAngleExceeded("%s < %s" %[rotationX, maxRotationX])
except Exception as e:
errorMsg = "Unknown error checking euler angles for keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
def checkDeltas(self, script, landmarkUUID, deltaX, deltaY, deltaZ):
method = moduleName + '.' + self.className + '.' + 'checkDeltas'
try:
def checkAngles(self, landmarkUUID, rotationX, rotationY, rotationZ):
#First the max
deltasMax = script.getLinkCounterpartsByMetaMemeType(landmarkUUID, "Agent.OffsetMax")
if len(deltasMax) > 0:
maxDeltaX = script.getEntityPropertyValue(deltasMax[0], "x")
maxDeltaY = script.getEntityPropertyValue(deltasMax[0], "y")
maxDeltaZ = script.getEntityPropertyValue(deltasMax[0], "z")
if (deltaX is not None) and (maxDeltaX is not None):
if deltaX > maxDeltaX:
raise Exceptions.EntityMaxXOffsetExceeded("%s > %s" %[deltaX, maxDeltaX])
if (deltaY is not None) and (maxDeltaY is not None):
if deltaY > maxDeltaY:
raise Exceptions.EntityMaxYOffsetExceeded("%s > %s" %[deltaY, maxDeltaY])
if (deltaZ is not None) and (maxDeltaZ is not None):
if deltaZ > maxDeltaZ:
raise Exceptions.EntityMaxZOffsetExceeded("%s > %s" %[deltaZ, maxDeltaZ])
#Then the min
deltasMin = script.getLinkCounterpartsByMetaMemeType(landmarkUUID, "Agent.OffsetMin")
if len(deltasMin) > 0:
minDeltaX = script.getEntityPropertyValue(deltasMax[0], "x")
minDeltaY = script.getEntityPropertyValue(deltasMax[0], "y")
minDeltaZ = script.getEntityPropertyValue(deltasMax[0], "z")
if (deltaX is not None) and (minDeltaX is not None):
if deltaX > minDeltaX:
raise Exceptions.EntityMinXOffsetExceeded("%s > %s" %[deltaX, minDeltaX])
if (deltaY is not None) and (minDeltaY is not None):
if deltaY > minDeltaY:
raise Exceptions.EntityMinYOffsetExceeded("%s > %s" %[deltaY, minDeltaY])
if (deltaZ is not None) and (minDeltaZ is not None):
if deltaZ > minDeltaZ:
raise Exceptions.EntityMinZOffsetExceeded("%s > %s" %[deltaZ, minDeltaZ])
except Exception as e:
errorMsg = "Unknown error checking entity displacement (deltas) for keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
def checkTransforms(self, script, agentID):
method = moduleName + '.' + self.className + '.' + 'checkTransforms'
try:
#transformDict = {"deltaX" : deltaX, "deltaY" : deltaY, "deltaZ" : deltaZ, "rotationX" : rotationX, "rotationY" : rotationY, "rotationZ" : rotationZ}
#self.landmarkTransforms.append([reqLMs[0], transformDict])
transformsOK = []
eulerOK = script.map(self.mapFunctionCheckEulerTransforms, self.landmarkTransforms, agentID)
deltaOK = script.map(self.mapFunctionCheckDeltaTransforms, self.landmarkTransforms, agentID)
transformsOK.extend(eulerOK)
transformsOK.extend(deltaOK)
if False in transformsOK:
return False
else:
return True
except Exception as e:
errorMsg = "Unknown error checking transforms for keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
return False
# /Landmarks
# objects
def selectObjects(self, script, rtParams, objectID = None):
"""
Select all object agents in scope of view that also meet the conditions required for selection:
'Action.ObjectSelectionCondition::Graphyne.Condition.Condition'
Here are the rules:
If there is a view with an action perspective, we limit ourselves to that scope
If there are no selection conditions and no objectID, all agents in scope are selected
If objectID is selected and it is not in scope, the action is dropped
If objectID is selected and in scope, the action goes to that object, plus others in scope meeting conditions
If objectID is not in scope, but other objects are and meet the conditions, they get the action, but not objectID
If there is no action perspective (View directly off of KeyFrame instead of via Landmark on subject)
If there are no selection conditions and no objectID; dropped
If there are no selection conditions, but objectID; the action goes to that object
"""
method = moduleName + '.' + self.className + '.' + 'selectObjects'
try:
if self.view is not None:
#Use 'action perspective' view
if (len(self.objectSelectionConditions) < 1) and (objectID is None):
viewList = script.getAllAgentsInSpecifiedPage(self.view)
return viewList
elif (len(self.objectSelectionConditions) < 1) and (objectID is not None):
viewList = script.getAllAgentsInSpecifiedPage(self.view)
if objectID in viewList:
return [objectID]
else:
return []
else:
intersectedObjects = script.getAllAgentsInAgentView(rtParams["subjectID"])
viewList = script.map(self.mapFunctionObjects, intersectedObjects, rtParams)
viewList.remove(None)
return viewList
else:
#Use 'subject perspective' view
if (len(self.objectSelectionConditions) < 1) and (objectID is None):
return []
elif (len(self.objectSelectionConditions) < 1) and (objectID is not None):
return [objectID]
elif objectID is not None:
intersectedObjects = script.getAllAgentsInAgentView(rtParams["subjectID"])
viewList = script.map(self.mapFunctionObjects, intersectedObjects, rtParams)
viewList.remove(None)
if objectID not in viewList:
viewList.append(objectID)
return viewList
else:
intersectedObjects = script.getAllAgentsInAgentView(rtParams["subjectID"])
viewList = script.map(self.mapFunctionObjects, intersectedObjects, rtParams)
viewList.remove(None)
return viewList
except Exception as e:
errorMsg = "Unknown error selecting object entities for keyframe object of action %s. rtParams = %s Traceback = %s" %(self.meme, rtParams, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
return []
# /objects
# State Changes
def changeStates(self, script, rtParams):
method = moduleName + '.' + self.className + '.' + 'changeStates'
try:
self.script = script
stateChangeStimuli = script.map(self.mapFunctionStateChangesOuter, rtParams["objectID"], rtParams)
self.conditionalStimuli.extend(stateChangeStimuli)
except Exception as e:
errorMsg = "Unknown error changing states for keyframe object of action %s. rtParams = %s Traceback = %s" %(self.meme, rtParams, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
#/ State Changes
#Transforms
def setTransforms(self, script, objectIDs):
method = moduleName + '.' + self.className + '.' + 'setTransforms'
try:
unusedEulerDone = script.map(self.mapFunctionSetEulerTransforms, self.landmarkTransforms, objectIDs)
unusedDeltaDone = script.map(self.mapFunctionSetDeltaTransforms, self.landmarkTransforms, objectIDs)
except Exception as e:
errorMsg = "Unknown error setting transforms for keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
#/Transforms
# Stimuli
def broadcastStimuli(self, script, rtParams):
method = moduleName + '.' + self.className + '.' + 'broadcastStimuli'
try:
for conditionalStimulus in self.conditionalStimuli:
if conditionalStimulus is not None:
stimulusMessage = None
#Angela.StimulusMessage def __init__(self, stimulusID, argumentMap, targetAgents = []):
if ("stimuliRecipients" in rtParams) == True:
targets = rtParams["stimuliRecipients"]
stimulusMessage = Angela.StimulusMessage(conditionalStimulus, rtParams, targets)
else:
stimulusMessage = Angela.StimulusMessage(conditionalStimulus, rtParams, [])
Engine.siQ.put(stimulusMessage)
except Exception as e:
errorMsg = "Unknown error broadcasting stimuli for keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
def invoke(self, script, rtParams):
method = moduleName + '.' + self.className + '.' + 'invoke'
try:
#todo - refactor script.evaluateEntity to add objects
script.evaluateEntity(self.uuid, rtParams, rtParams['actionID'], rtParams['subjectID'], rtParams['objectID'])
except Exception as e:
errorMsg = "Unknown error invoking keyframe object of action %s. Traceback = %s" %(self.meme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method , errorMsg])
class Catch(Action, ConditionalAction):
className = 'Catch'
def bootstrap(self, script):
self.addConditions(script)
self.addLandMarks(script)
class Throw(Action, ConditionalAction):
className = 'Throw'
def bootstrap(self, script):
self.addConditions(script)
self.addLandMarks(script)
class StateChange(object):
def __init__(self, conditionID, stateChangeStimuli = []):
self.conditionID = conditionID
self.stateChangeStimuli = stateChangeStimuli
class StateChangeBreak(StateChange):
def prime(self, subjectPath, objectPath):
self.subjectPath = subjectPath
self.objectPath = objectPath
def execute(self, script, subjectID, objectID):
script.removeEntityLink(subjectID, objectID)
self
class StateChangeJoin(StateChange):
def prime(self, subjectPath, objectPath, linkType):
self.linkType = linkType
self.subjectPath = subjectPath
self.objectPath = objectPath
def execute(self, script, subjectID, objectID):
subjectMountPoint = script.getLinkCounterpartsByMetaMemeType(subjectID, self.subjectPath)
objectMountPoint = script.getLinkCounterpartsByMetaMemeType(subjectID, self.objectPath)
script.addEntityLink(subjectMountPoint[0], objectMountPoint[0], {}, self.linkType)
class StateChangeSimple(StateChange):
def prime(self, changeID, path):
#channgeID is the uuid of the relevant Numeric.Function entity
#stateID is the path to be changed
self.changeID = changeID
self.path = path
def execute(self, script, subjectID, objectID):
#todo - thuis requires an overhaul of how Numeric.Function is processed
delta = script.evaluateEntity(self.changeID)
oldPropValue = script.getEntityPropertyValue(objectID, self.path)
newPropValue = oldPropValue + delta
script.setEntityPropertyValue(objectID, self.path, newPropValue)
class StateChangeSuccessorAction(StateChange):
def prime(self, actionID, insertionType, priority):
self.actionID = actionID
self.insertionType = insertionType
self.priority = priority
def execute(self, subjectID, objectID):
#todo -
actionInvoc = {"actionID" : self.actionID, "subjectID" : subjectID, "objectID" : objectID, "controllerID" : None, "insertionType" : self.insertionType, "rtparams" : {}}
Engine.aQ.put(actionInvoc)
#globals
def getActionIndexItem(script, toBeIndexed):
method = moduleName + '.' + 'getActionIndexItem'
Graph.logQ.put( [logType , logLevel.DEBUG , method, " - entering"])
try:
actionMemes = []
action = None
actionMemes = script.getLinkCounterpartsByMetaMemeType(toBeIndexed, "Action.Throw")
if len(actionMemes) > 0:
memeName = script.getEntityMemeType(toBeIndexed)
Graph.logQ.put( [logType , logLevel.DEBUG , method, "Action %s is a Throw" %memeName])
try:
action = Throw()
action.initialize(script, actionMemes[0], toBeIndexed)
except Exception as e:
actionMeme = None
try: actionMeme = actionMemes[0]
except: pass
errorMsg = "Member Action.Throw entity %s is invalid" %actionMeme
raise Exceptions.TemplatePathError(errorMsg)
else:
actionMemes = script.getLinkCounterpartsByMetaMemeType(toBeIndexed, "Action.Catch")
if len(actionMemes) > 0:
Graph.logQ.put( [logType , logLevel.DEBUG , method, "Action %s is a Catch" %toBeIndexed])
try:
action = Catch()
action.initialize(script, actionMemes[0], toBeIndexed)
except Exception as e:
actionMeme = None
try: actionMeme = actionMemes[0]
except: pass
errorMsg = "Member Action.Catch entity %s is invalid" %actionMeme
raise Exceptions.TemplatePathError(errorMsg)
else:
memeName = Graph.api.getEntityMemeType(toBeIndexed)
actionMemes = script.getLinkCounterpartsByMetaMemeType(toBeIndexed, "Action.Choreography")
if len(actionMemes) > 0:
Graph.logQ.put( [logType , logLevel.DEBUG , method, "Action %s is a Choreography" %memeName])
try:
action = ActionSet()
action.initialize(script, actionMemes[0], toBeIndexed)
except Exception as e:
actionMeme = None
try: actionMeme = actionMemes[0]
except: pass
errorMsg = "Member Action.Choreography entity %s is invalid" %actionMeme
raise Exceptions.TemplatePathError(errorMsg)
else:
actionMemes = script.getLinkCounterpartsByMetaMemeType(toBeIndexed, "Action.KeyFrame")
if len(actionMemes) > 0:
Graph.logQ.put( [logType , logLevel.DEBUG , method, "Action %s is a KeyFrame" %memeName])
try:
action = KeyFrame()
action.initialize(script, actionMemes[0], toBeIndexed)
except Exception as e:
actionMeme = None
try: actionMeme = actionMemes[0]
except: pass
errorMsg = "Member Action.KeyFrame entity %s is invalid" %actionMeme
raise Exceptions.TemplatePathError(errorMsg)
else:
linkOverview = script.getEntityCounterparts(toBeIndexed)
errorMsg = "Action %s has no valid child type. Link overview = %s" %(memeName, linkOverview)
Graph.logQ.put( [logType , logLevel.WARNING , method, errorMsg])
#now finish creating the action object
action.bootstrap(script)
Graph.logQ.put( [logType , logLevel.DEBUG , method, "Bootstrapped %s %s" %(type(action), action.meme)])
Graph.logQ.put( [logType , logLevel.DEBUG , method, " - exiting"])
return action
except Exceptions.ScriptError as e:
actionMeme = script.getEntityMemeType(toBeIndexed)
errorMsg = "Error in method while creating action index item %s. Traceback = %s" %(actionMeme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method, errorMsg])
raise e
except Exception as e:
actionMeme = script.getEntityMemeType(toBeIndexed)
errorMsg = "Error creating action index item %s. Traceback = %s" %(actionMeme, e)
Graph.logQ.put( [logType , logLevel.WARNING , method, errorMsg])
raise e
def usage():
print(__doc__)
def main(argv):
pass
if __name__ == "__main__":
pass
| 54.495575
| 181
| 0.599968
|
be62c7098cbe3ce95d1a60eb0451c63426055ea2
| 1,109
|
py
|
Python
|
pepys_import/utils/sqlite_utils.py
|
debrief/pepys-import
|
12d29c0e0f69e1119400334983947893e7679b6b
|
[
"Apache-2.0"
] | 4
|
2021-05-14T08:22:47.000Z
|
2022-02-04T19:48:25.000Z
|
pepys_import/utils/sqlite_utils.py
|
debrief/pepys-import
|
12d29c0e0f69e1119400334983947893e7679b6b
|
[
"Apache-2.0"
] | 1,083
|
2019-11-06T17:01:07.000Z
|
2022-03-25T10:26:51.000Z
|
pepys_import/utils/sqlite_utils.py
|
debrief/pepys-import
|
12d29c0e0f69e1119400334983947893e7679b6b
|
[
"Apache-2.0"
] | 4
|
2019-11-06T12:00:45.000Z
|
2021-06-09T04:18:28.000Z
|
import os
import platform
SYSTEM = platform.system()
if SYSTEM == "Linux":
PLATFORM_EXTENSION_PATH = "mod_spatialite"
elif SYSTEM == "Darwin": # Darwin is MacOS pragma: no cover (tests only run on Linux)
PLATFORM_EXTENSION_PATH = "mod_spatialite"
elif SYSTEM == "Windows": # pragma: no cover (tests only run on Linux)
PLATFORM_EXTENSION_PATH = "mod_spatialite"
def load_spatialite(connection, connection_record):
"""
Loads the spatialite library into the SQLite database
Tries to load the library located in the PEPYS_SPATIALITE_PATH environment variable first
and otherwise falls back to the platform-specific paths defined in this file
"""
connection.enable_load_extension(True)
environment_path = os.environ.get("PEPYS_SPATIALITE_PATH")
if environment_path:
connection.load_extension(environment_path)
else:
connection.load_extension(PLATFORM_EXTENSION_PATH)
def set_sqlite_foreign_keys_on(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
| 32.617647
| 93
| 0.753832
|
2418d3b5095a43d66e829a5915407433354e796d
| 2,837
|
py
|
Python
|
test/snake/test_object_collisions.py
|
TeamSerpentine/retro-baselines
|
9b2c725604496aca9c382a53f456d31fdbcaa5b1
|
[
"BSD-3-Clause"
] | 2
|
2019-12-09T08:41:13.000Z
|
2020-10-22T02:29:22.000Z
|
test/snake/test_object_collisions.py
|
TeamSerpentine/retro-baselines
|
9b2c725604496aca9c382a53f456d31fdbcaa5b1
|
[
"BSD-3-Clause"
] | null | null | null |
test/snake/test_object_collisions.py
|
TeamSerpentine/retro-baselines
|
9b2c725604496aca9c382a53f456d31fdbcaa5b1
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
from snake import constants
from snake.objects.constants import LEN_SNAKE_START
from snake.boards.classic import Board
from snake.objects.wall import Wall
from snake.objects.ground import Ground
from snake.objects.apple import Apple
from snake.objects.snake import Snake
from snake.objects.utils import Point
class TestCollision(unittest.TestCase):
def setUp(self) -> None:
self.objects_name = ["wall", "ground", "apple", "snake"]
self.wall = Wall(Point(0, 0))
self.ground = Ground(Point(10, 10))
self.apple = Apple(Point(20, 20))
self.snake = Snake(Point(30, 30))
self.wall_same = Wall(Point(0, 0))
self.ground_same = Ground(Point(0, 0))
self.apple_same = Apple(Point(0, 0))
self.snake_same = Snake(Point(0, 0))
self.objects_diff = [getattr(self, name) for name in self.objects_name]
self.objects_same = [getattr(self, name + "_same") for name in self.objects_name]
def test_collisions(self):
for each in self.objects_name:
test_objects_diff = [item.clone() for item in self.objects_diff]
test_objects_diff.pop(self.objects_name.index(each))
result = [getattr(self, each).collide(item) for item in test_objects_diff]
self.assertEqual([False, False, False], result, "Incorrect collision detected")
test_objects_same = [item.clone() for item in self.objects_same]
test_objects_same.pop(self.objects_name.index(each))
result = [getattr(self, each + "_same").collide(item) for item in test_objects_same]
self.assertEqual([True, True, True], result, "Incorrect collision detected")
def test_snake_eat_apple(self):
self.board = Board(width=50, height=50)
self.board.add_object("snake", Point(x=20, y=33))
self.board.add_object("apple", Point(x=20, y=32))
snake = self.board.objects['snake'][0]
_, reward, _, _ = self.board.step(constants.GET_ACTION_MEANING.index("UP"))
self.assertEqual(constants.DEFAULT_REWARD_PER_APPLE + constants.DEFAULT_REWARD_PER_STEP, reward,
"Snake eating apple incorrect points awarded")
self.assertEqual(LEN_SNAKE_START+1, len(snake),
"Snake length is not increased after eating apple")
def test_snake_wall(self):
self.board = Board(width=50, height=50)
self.board.add_object("snake", Point(x=1, y=30))
snake = self.board.objects['snake'][0]
_, _, done, _ = self.board.step(constants.GET_ACTION_MEANING.index("LEFT"))
self.board.step(1)
self.assertEqual(True, done, "Game over is not detected upon dying")
self.assertEqual(False, snake.alive,
"Snake didn't die upon hitting the wall")
| 39.957746
| 104
| 0.657032
|
85796a3618e57a47a527ef93fa03ee7a6d3bcf19
| 454
|
py
|
Python
|
shadowrun_prototype/defs/mach.py
|
holy-crust/reclaimer
|
0aa693da3866ce7999c68d5f71f31a9c932cdb2c
|
[
"MIT"
] | null | null | null |
shadowrun_prototype/defs/mach.py
|
holy-crust/reclaimer
|
0aa693da3866ce7999c68d5f71f31a9c932cdb2c
|
[
"MIT"
] | null | null | null |
shadowrun_prototype/defs/mach.py
|
holy-crust/reclaimer
|
0aa693da3866ce7999c68d5f71f31a9c932cdb2c
|
[
"MIT"
] | null | null | null |
from ...hek.defs.mach import *
from .obje import *
# replace the object_type enum one that uses
# the correct default value for this object
obje_attrs = dict(obje_attrs)
obje_attrs[0] = dict(obje_attrs[0], DEFAULT=7)
mach_body = dict(mach_body)
mach_body[0] = obje_attrs
def get():
return mach_def
mach_def = TagDef("mach",
blam_header('mach'),
mach_body,
ext=".device_machine", endian=">", tag_cls=MachTag
)
| 21.619048
| 55
| 0.671806
|
96beaba21dfc7ed4699c76f171e32e5cdbe15346
| 22,874
|
py
|
Python
|
tests/integration/cattletest/core/common_fixtures.py
|
mbrukman/rancher-cattle
|
ac7caffb97346f601043458411391d2d00fd6129
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/cattletest/core/common_fixtures.py
|
mbrukman/rancher-cattle
|
ac7caffb97346f601043458411391d2d00fd6129
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/cattletest/core/common_fixtures.py
|
mbrukman/rancher-cattle
|
ac7caffb97346f601043458411391d2d00fd6129
|
[
"Apache-2.0"
] | null | null | null |
import gdapi
import cattle
import os
import pytest
import random
import time
import inspect
from datetime import datetime, timedelta
NOT_NONE = object()
DEFAULT_TIMEOUT = 90
DEFAULT_AGENT_URI = 'ssh://root@localhost:22'
DEFAULT_AGENT_UUID = 'test-agent'
SLEEP_DELAY = 0.5
ACCOUNT_LIST = ['admin', 'agent', 'user', 'agentRegister',
'readAdmin', 'token', 'superadmin', 'service', 'project']
ADMIN_HEADERS = dict(gdapi.HEADERS)
ADMIN_HEADERS['X-API-Project-Id'] = 'USER'
@pytest.fixture(scope='session')
def cattle_url():
default_url = 'http://localhost:8080/v1/schemas'
return os.environ.get('CATTLE_URL', default_url)
def _admin_client():
return cattle.from_env(url=cattle_url(),
headers=ADMIN_HEADERS,
cache=False,
access_key='admin',
secret_key='adminpass')
def _client_for_user(name, accounts):
return cattle.from_env(url=cattle_url(),
cache=False,
access_key=accounts[name][0],
secret_key=accounts[name][1])
def client_for_project(project):
access_key = random_str()
secret_key = random_str()
admin_client = _admin_client()
active_cred = None
account = project
for cred in account.credentials():
if cred.kind == 'apiKey' and cred.publicValue == access_key\
and cred.secretValue == secret_key:
active_cred = cred
break
if active_cred is None:
active_cred = admin_client.create_api_key({
'accountId': account.id,
'publicValue': access_key,
'secretValue': secret_key
})
active_cred = wait_success(admin_client, active_cred)
if active_cred.state != 'active':
wait_success(admin_client, active_cred.activate())
return cattle.from_env(url=cattle_url(),
cache=False,
access_key=access_key,
secret_key=secret_key)
def create_user(admin_client, user_name, kind=None):
if kind is None:
kind = user_name
password = user_name + 'pass'
account = create_type_by_uuid(admin_client, 'account', user_name,
kind=kind,
name=user_name)
active_cred = None
for cred in account.credentials():
if cred.kind == 'apiKey' and cred.publicValue == user_name \
and cred.secretValue == password:
active_cred = cred
break
if active_cred is None:
active_cred = admin_client.create_api_key({
'accountId': account.id,
'publicValue': user_name,
'secretValue': password
})
active_cred = wait_success(admin_client, active_cred)
if active_cred.state != 'active':
wait_success(admin_client, active_cred.activate())
return [user_name, password, account]
@pytest.fixture(scope='session')
def accounts():
result = {}
admin_client = _admin_client()
for user_name in ACCOUNT_LIST:
result[user_name] = create_user(admin_client,
user_name,
kind=user_name)
result['admin'] = create_user(admin_client, 'admin')
system_account = admin_client.list_account(kind='system', uuid='system')[0]
result['system'] = [None, None, system_account]
return result
@pytest.fixture(scope='session')
def clients(accounts):
clients = {}
for account in ACCOUNT_LIST:
clients[account] = _client_for_user(account, accounts)
return clients
@pytest.fixture(scope='session')
def system_account(accounts):
return accounts['system'][2]
@pytest.fixture(scope='session')
def admin_account(accounts):
return accounts['admin'][2]
@pytest.fixture(scope='session')
def user_account(accounts):
return accounts['user'][2]
@pytest.fixture(scope='session')
def token_account(accounts):
return accounts['token'][2]
@pytest.fixture(scope='session')
def super_account(accounts):
return accounts['superadmin'][2]
@pytest.fixture(scope='session')
def client(accounts):
return _client_for_user('user', accounts)
@pytest.fixture(scope='session')
def super_admin_client(accounts):
return _client_for_user('superadmin', accounts)
@pytest.fixture(scope='session')
def admin_client(accounts):
return _client_for_user('admin', accounts)
@pytest.fixture(scope='session')
def super_client(request, accounts):
ret = _client_for_user('superadmin', accounts)
request.addfinalizer(
lambda: delete_sim_instances(ret))
return ret
@pytest.fixture(scope='session')
def token_client(accounts):
return _client_for_user('token', accounts)
@pytest.fixture(scope='session')
def agent_client(accounts):
return _client_for_user('agent', accounts)
@pytest.fixture(scope='session')
def service_client(accounts):
return _client_for_user('service', accounts)
def create_sim_context(super_client, uuid, ip=None, account=None,
public=False):
context = kind_context(super_client,
'sim',
external_pool=True,
account=account,
uri='sim://' + uuid,
uuid=uuid,
host_public=public)
context['imageUuid'] = 'sim:{}'.format(random_num())
host = context['host']
if len(host.ipAddresses()) == 0 and ip is not None:
ip = create_and_activate(super_client, 'ipAddress',
address=ip,
isPublic=public)
map = super_client.create_host_ip_address_map(hostId=host.id,
ipAddressId=ip.id)
map = super_client.wait_success(map)
assert map.state == 'active'
if len(host.ipAddresses()):
context['hostIp'] = host.ipAddresses()[0]
return context
@pytest.fixture(scope='session')
def sim_context(request, super_client):
context = create_sim_context(super_client, 'simagent1', ip='192.168.10.10',
public=True)
return context
@pytest.fixture(scope='session')
def sim_context2(super_client):
return create_sim_context(super_client, 'simagent2', ip='192.168.10.11',
public=True)
@pytest.fixture(scope='session')
def sim_context3(super_client):
return create_sim_context(super_client, 'simagent3', ip='192.168.10.12',
public=True)
@pytest.fixture
def new_sim_context(super_client):
uri = 'sim://' + random_str()
sim_context = kind_context(super_client, 'sim', uri=uri, uuid=uri)
sim_context['imageUuid'] = 'sim:{}'.format(random_num())
for i in ['host', 'pool', 'agent']:
sim_context[i] = super_client.wait_success(sim_context[i])
host = sim_context['host']
pool = sim_context['pool']
agent = sim_context['agent']
assert host is not None
assert pool is not None
assert agent is not None
return sim_context
@pytest.fixture(scope='session')
def user_sim_context(super_client, user_account):
return create_sim_context(super_client, 'usersimagent', ip='192.168.11.1',
account=user_account)
@pytest.fixture(scope='session')
def user_sim_context2(super_client, user_account):
return create_sim_context(super_client, 'usersimagent2', ip='192.168.11.2',
account=user_account)
@pytest.fixture(scope='session')
def user_sim_context3(super_client, user_account):
return create_sim_context(super_client, 'usersimagent3', ip='192.168.11.3',
account=user_account)
def activate_resource(admin_client, obj):
if obj.state == 'inactive':
obj = wait_success(admin_client, obj.activate())
return obj
def find_by_uuid(admin_client, type, uuid, activate=True, **kw):
objs = admin_client.list(type, uuid=uuid)
assert len(objs) == 1
obj = wait_success(admin_client, objs[0])
if activate:
return activate_resource(admin_client, obj)
return obj
def create_type_by_uuid(admin_client, type, uuid, activate=True, validate=True,
**kw):
opts = dict(kw)
opts['uuid'] = uuid
objs = admin_client.list(type, uuid=uuid)
obj = None
if len(objs) == 0:
obj = admin_client.create(type, **opts)
else:
obj = objs[0]
obj = wait_success(admin_client, obj)
if activate and obj.state == 'inactive':
obj.activate()
obj = wait_success(admin_client, obj)
if validate:
for k, v in opts.items():
assert getattr(obj, k) == v
return obj
def random_num():
return random.randint(0, 1000000)
@pytest.fixture
def random_str():
return 'random-{0}'.format(random_num())
def wait_all_success(client, objs, timeout=DEFAULT_TIMEOUT):
ret = []
for obj in objs:
obj = wait_success(client, obj, timeout)
ret.append(obj)
return ret
def wait_success(client, obj, timeout=DEFAULT_TIMEOUT):
return client.wait_success(obj, timeout=timeout)
def wait_transitioning(client, obj, timeout=DEFAULT_TIMEOUT):
return client.wait_transitioning(obj, timeout=timeout)
@pytest.fixture
def wait_for_condition(client, resource, check_function, fail_handler=None,
timeout=DEFAULT_TIMEOUT):
start = time.time()
resource = client.reload(resource)
while not check_function(resource):
if time.time() - start > timeout:
exceptionMsg = 'Timeout waiting for ' + resource.kind + \
' to satisfy condition: ' + \
inspect.getsource(check_function)
if (fail_handler):
exceptionMsg = exceptionMsg + fail_handler(resource)
raise Exception(exceptionMsg)
time.sleep(.5)
resource = client.reload(resource)
return resource
def assert_fields(obj, fields):
assert obj is not None
for k, v in fields.items():
assert k in obj
if v is None:
assert obj[k] is None
elif v is NOT_NONE:
assert obj[k] is not None
else:
assert obj[k] == v
def assert_removed_fields(obj):
assert obj.removed is not None
assert obj.removeTime is not None
assert obj.removeTimeTS > obj.removedTS
def assert_restored_fields(obj):
assert obj.removed is None
assert obj.removeTime is None
def now():
return datetime.utcnow()
def format_time(time):
return (time - timedelta(microseconds=time.microsecond)).isoformat() + 'Z'
def get_agent(admin_client, name, default_uri=DEFAULT_AGENT_URI,
default_agent_uuid=DEFAULT_AGENT_UUID, account=None):
name = name.upper()
uri_name = '{0}_URI'.format(name.upper())
uuid_name = '{0}_AGENT_UUID'.format(name.upper())
uri = os.getenv(uri_name, default_uri)
uuid = os.getenv(uuid_name, default_agent_uuid)
data = {}
if account is not None:
account_id = get_plain_id(admin_client, account)
data['agentResourcesAccountId'] = account_id
agent = create_type_by_uuid(admin_client, 'agent', uuid, validate=False,
uri=uri, data=data)
if account is not None:
assert agent.data.agentResourcesAccountId == account_id
while len(agent.hosts()) == 0:
time.sleep(SLEEP_DELAY)
return agent
def kind_context(admin_client, kind, external_pool=False,
uri=DEFAULT_AGENT_URI,
uuid=DEFAULT_AGENT_UUID,
host_public=False,
agent=None,
account=None):
if agent is None:
kind_agent = get_agent(admin_client, kind, default_agent_uuid=uuid,
default_uri=uri, account=account)
else:
kind_agent = agent
hosts = filter(lambda x: x.kind == kind and x.removed is None,
kind_agent.hosts())
assert len(hosts) == 1
kind_host = activate_resource(admin_client, hosts[0])
if kind_host.isPublic != host_public:
kind_host = admin_client.update(kind_host, isPublic=host_public)
assert kind_host.isPublic == host_public
assert kind_host.accountId == kind_agent.accountId or \
get_plain_id(admin_client, kind_host.account()) == \
str(kind_agent.data.agentResourcesAccountId)
pools = kind_host.storagePools()
assert len(pools) == 1
kind_pool = activate_resource(admin_client, pools[0])
assert kind_pool.accountId == kind_agent.accountId or \
get_plain_id(admin_client, kind_pool.account()) == \
str(kind_agent.data.agentResourcesAccountId)
context = {
'host': kind_host,
'pool': kind_pool,
'agent': kind_agent
}
if external_pool:
pools = admin_client.list_storagePool(kind=kind, external=True)
assert len(pools) == 1
context['external_pool'] = activate_resource(admin_client, pools[0])
assert pools[0].accountId is not None
return context
def assert_required_fields(method, **kw):
method(**kw)
for k in kw.keys():
args = dict(kw)
del args[k]
try:
method(**args)
# This is supposed to fail
assert k == ''
except cattle.ApiError as e:
assert e.error.code == 'MissingRequired'
assert e.error.fieldName == k
def get_plain_id(admin_client, obj):
ret = admin_client.list(obj.type, uuid=obj.uuid, _plainId='true')
assert len(ret) == 1
return ret[0].id
def get_by_plain_id(super_client, type, id):
obj = super_client.by_id(type, id, _plainId='true')
if obj is None:
return None
objs = super_client.list(type, uuid=obj.uuid)
if len(objs) == 0:
return None
return objs[0]
def create_and_activate(client, type, **kw):
obj = client.create(type, **kw)
obj = client.wait_success(obj)
if obj.state == 'inactive':
obj = client.wait_success(obj.activate())
assert obj.state == 'active'
return obj
def delete_sim_instances(admin_client):
to_delete = []
to_delete.extend(admin_client.list_instance(state='running', limit=1000))
to_delete.extend(admin_client.list_instance(state='starting', limit=1000))
to_delete.extend(admin_client.list_instance(state='stopped', limit=1000))
for c in to_delete:
hosts = c.hosts()
if len(hosts) and hosts[0].kind == 'sim':
nsps = c.networkServiceProviders()
if len(nsps) > 0 and nsps[0].uuid == 'nsp-test-nsp':
continue
try:
admin_client.delete(c)
except:
pass
for state in ['active', 'reconnecting']:
for a in admin_client.list_agent(state=state, include='instances',
uri_like='delegate%'):
if not callable(a.instances):
for i in a.instances:
if i.state != 'running' and len(i.hosts()) > 0 and \
i.hosts()[0].agent().uri.startswith('sim://'):
a.deactivate()
break
def one(method, *args, **kw):
ret = method(*args, **kw)
assert len(ret) == 1
return ret[0]
def process_instances(admin_client, obj, id=None, type=None):
if id is None:
id = get_plain_id(admin_client, obj)
if type is None:
type = obj.type
return admin_client.list_process_instance(resourceType=type, resourceId=id,
sort='startTime')
def auth_check(schema, id, access, props=None):
type = schema.types[id]
access_actual = set()
try:
if 'GET' in type.collectionMethods:
access_actual.add('r')
except AttributeError:
pass
try:
if 'GET' in type.resourceMethods:
access_actual.add('r')
except AttributeError:
pass
try:
if 'POST' in type.collectionMethods:
access_actual.add('c')
except AttributeError:
pass
try:
if 'DELETE' in type.resourceMethods:
access_actual.add('d')
except AttributeError:
pass
try:
if 'PUT' in type.resourceMethods:
access_actual.add('u')
except AttributeError:
pass
assert access_actual == set(access)
if props is None:
return 1
for i in ['name', 'description']:
if i not in props and i in type.resourceFields:
acl = set('r')
if 'c' in access_actual:
acl.add('c')
if 'u' in access_actual:
acl.add('u')
props[i] = ''.join(acl)
for i in ['created', 'removed', 'transitioning', 'transitioningProgress',
'removeTime', 'transitioningMessage', 'id', 'uuid', 'kind',
'state']:
if i not in props and i in type.resourceFields:
props[i] = 'r'
prop = set(props.keys())
prop_actual = set(type.resourceFields.keys())
assert prop_actual == prop
for name, field in type.resourceFields.items():
assert name in props
prop = set(props[name])
prop_actual = set('r')
prop.add(name)
prop_actual.add(name)
if field.create and 'c' in access_actual:
prop_actual.add('c')
if field.update and 'u' in access_actual:
prop_actual.add('u')
assert prop_actual == prop
return 1
def resource_action_check(schema, id, actions):
action_keys = set(actions)
keys_received = set(schema.types[id].resourceActions.keys())
assert keys_received == action_keys
def wait_for(callback, timeout=DEFAULT_TIMEOUT):
start = time.time()
ret = callback()
while ret is None or ret is False:
time.sleep(.5)
if time.time() - start > timeout:
raise Exception('Timeout waiting for condition')
ret = callback()
return ret
def find_one(method, *args, **kw):
return find_count(1, method, *args, **kw)[0]
def find_count(count, method, *args, **kw):
ret = method(*args, **kw)
assert len(ret) == count
return ret
def create_sim_container(admin_client, sim_context, *args, **kw):
c = admin_client.create_container(*args,
imageUuid=sim_context['imageUuid'],
**kw)
c = admin_client.wait_success(c)
assert c.state == 'running'
return c
def create_agent_instance_nsp(admin_client, sim_context):
accountId = sim_context['host'].accountId
network = create_and_activate(admin_client, 'hostOnlyNetwork',
hostVnetUri='test:///',
dynamicCreateVnet=True,
accountId=accountId)
create_and_activate(admin_client, 'subnet',
networkAddress='192.168.0.0',
networkId=network.id,
accountId=accountId)
return create_and_activate(admin_client, 'agentInstanceProvider',
networkId=network.id,
agentInstanceImageUuid=sim_context['imageUuid'],
accountId=accountId)
@pytest.fixture(scope='session')
def test_network(super_client, sim_context):
network = create_type_by_uuid(super_client, 'hostOnlyNetwork',
'nsp-test-network',
hostVnetUri='test:///',
dynamicCreateVnet=True)
create_type_by_uuid(super_client, 'subnet',
'nsp-test-subnet',
networkAddress='192.168.0.0',
networkId=network.id)
nsp = create_type_by_uuid(super_client, 'agentInstanceProvider',
'nsp-test-nsp',
networkId=network.id,
agentInstanceImageUuid='sim:test-nsp')
create_type_by_uuid(super_client, 'portService',
'nsp-test-port-service',
networkId=network.id,
networkServiceProviderId=nsp.id)
for i in nsp.instances():
i = super_client.wait_success(i)
if i.state != 'running':
super_client.wait_success(i.start())
agent = super_client.wait_success(i.agent())
if agent.state != 'active':
super_client.wait_success(agent.activate())
return network
def resource_pool_items(admin_client, obj, type=None, qualifier=None):
id = get_plain_id(admin_client, obj)
if type is None:
type = obj.type
if qualifier is None:
return admin_client.list_resource_pool(ownerType=type,
ownerId=id)
else:
return admin_client.list_resource_pool(ownerType=type,
ownerId=id,
qualifier=qualifier)
@pytest.fixture(scope='session')
def network(super_client):
network = create_type_by_uuid(super_client, 'network', 'test_vm_network',
isPublic=True)
subnet = create_type_by_uuid(super_client, 'subnet', 'test_vm_subnet',
isPublic=True,
networkId=network.id,
networkAddress='192.168.0.0',
cidrSize=24)
vnet = create_type_by_uuid(super_client, 'vnet', 'test_vm_vnet',
networkId=network.id,
uri='fake://')
create_type_by_uuid(super_client, 'subnetVnetMap', 'test_vm_vnet_map',
subnetId=subnet.id,
vnetId=vnet.id)
return network
@pytest.fixture(scope='session')
def subnet(admin_client, network):
subnets = network.subnets()
assert len(subnets) == 1
return subnets[0]
@pytest.fixture(scope='session')
def vnet(admin_client, subnet):
vnets = subnet.vnets()
assert len(vnets) == 1
return vnets[0]
def wait_setting_active(api_client, setting, timeout=45):
start = time.time()
setting = api_client.by_id('setting', setting.id)
while setting.value != setting.activeValue:
time.sleep(.5)
setting = api_client.by_id('setting', setting.id)
if time.time() - start > timeout:
msg = 'Timeout waiting for [{0}] to be done'.format(setting)
raise Exception(msg)
return setting
| 28.991128
| 79
| 0.59972
|
e8daaf516e404a7a66da0c1a71f341a4de61e5f2
| 678
|
py
|
Python
|
setup.py
|
Kiracorp/data-structures
|
33cc5387a4636031193a8214ff6f737689e304c4
|
[
"MIT"
] | null | null | null |
setup.py
|
Kiracorp/data-structures
|
33cc5387a4636031193a8214ff6f737689e304c4
|
[
"MIT"
] | null | null | null |
setup.py
|
Kiracorp/data-structures
|
33cc5387a4636031193a8214ff6f737689e304c4
|
[
"MIT"
] | null | null | null |
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="data-structures-kiracorp",
version="0.0.1",
author="Kyler Green",
author_email="kairaanomeiru@gmail.com",
description="Implementation for common data structues",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/Kiracorp/data-structures",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
)
| 30.818182
| 59
| 0.676991
|
e0d1cf8573f776730d90b69474e28765ddef3bad
| 5,451
|
py
|
Python
|
tools/bin/query-recipe.py
|
jxtxinbing/ops-build
|
9008de2d8e100f3f868c66765742bca9fa98f3f9
|
[
"Apache-2.0"
] | 16
|
2017-01-17T15:20:43.000Z
|
2021-03-19T05:45:14.000Z
|
tools/bin/query-recipe.py
|
jxtxinbing/ops-build
|
9008de2d8e100f3f868c66765742bca9fa98f3f9
|
[
"Apache-2.0"
] | 415
|
2016-12-20T17:20:45.000Z
|
2018-09-23T07:59:23.000Z
|
tools/bin/query-recipe.py
|
jxtxinbing/ops-build
|
9008de2d8e100f3f868c66765742bca9fa98f3f9
|
[
"Apache-2.0"
] | 10
|
2016-12-20T13:24:50.000Z
|
2021-03-19T05:46:43.000Z
|
#!/usr/bin/env python
# Copyright (C) 2016 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Parse the appropriate bitbake files for a recipe and return the value for the
requested variables
"""
from __future__ import print_function
import os
import sys
import argparse
import re
# tell python where to find poky devtool python libraries
this_path = os.path.dirname(os.path.realpath(__file__))
poky_path = os.path.realpath(os.path.join(this_path, '..', '..',
'yocto', 'poky'))
sys.path.append(os.path.join(poky_path, 'scripts', 'lib'))
def _parse_recipe(workspace_path, tinfoil, pn, appends=True):
"""Parse package recipe - adapted from poky devtool scripts"""
import oe.recipeutils
# Find top recipe file
recipefile = oe.recipeutils.pn_to_recipe(tinfoil.cooker, pn)
if not recipefile:
skipreasons = oe.recipeutils.get_unavailable_reasons(tinfoil.cooker, pn)
if skipreasons:
sys.exit('\n'.join(skipreasons))
else:
sys.exit("Unable to find any recipe file matching %s" % pn)
# gather append files
if appends:
append_files = tinfoil.cooker.collection.get_file_appends(recipefile)
else:
append_files = []
return oe.recipeutils.parse_recipe(recipefile, append_files,
tinfoil.config_data)
def _get_git_uri(rd):
"""Extract git url & extras from SCI_URL"""
for piece in rd.getVar('SRC_URI', expand=True).split():
if piece.startswith('git://'):
parts = re.split(r';', piece)
base = parts.pop(0)
extras = {}
for part in parts:
key, value = re.split(r'=', part)
extras[key] = value
if 'protocol' in extras:
base = re.sub(r'^git', extras['protocol'], base)
return base, extras
return None, None
def _getvar(rd, var):
if var == 'gitrepo':
uri, _ = _get_git_uri(rd)
return uri
elif var == 'gitbranch':
uri , extras = _get_git_uri(rd)
if extras and 'branch' in extras:
return extras['branch']
if uri:
return 'master'
else:
return None
else:
return rd.getVar(var, expand=True)
def main():
# get and validate commandline args
parser = argparse.ArgumentParser(description=
'Query one or more variable values from a yocto package recipe.')
parser.add_argument('-v', '--var',
action='append',
dest='var',
help='Variable to fetch.')
parser.add_argument('--gitrepo',
action='append_const',
dest='var',
const='gitrepo',
help='Extract the git repo from SRC_URI.')
parser.add_argument('--gitbranch',
action='append_const',
dest='var',
const='gitbranch',
help='Extract the git branch from SRC_URI. (default: "master" if SRC_URI is git, but no branch specified)')
parser.add_argument('packages', nargs='+',
help='Name of package to query')
parser.add_argument('-s', '--shellsyntax', action='store_true',
help='Print the output in shell syntax')
global args
args = parser.parse_args()
if not args.var:
sys.exit("You must specify at least one var to get")
# validate ruinning environment
basepath = os.environ.get('BUILDDIR')
if not basepath:
sys.exit("BUILDDIR must be set in the environment")
os.chdir(basepath)
workspace_path = os.path.join(basepath, 'workspace')
# setting up tinfoil is very chatty to console
old_stdout = sys.stdout
old_stderr = sys.stderr
sys.stdout = open(os.devnull, 'w')
sys.stderr = sys.stdout
import devtool
tinfoil = devtool.setup_tinfoil()
# restore console streams
sys.stdout = old_stdout
sys.stderr = old_stderr
# parse the recipe file and print the requested variables
for package in args.packages:
rd = _parse_recipe(workspace_path, tinfoil, package)
if len(args.var) == 1:
val = _getvar(rd, args.var[0])
if not val is None:
if args.shellsyntax :
print("export {}_{}={}".format(args.var[0],package.replace("-","_"),val))
else:
print("{}".format(val))
else:
for var in args.var:
val = _getvar(rd, var)
if args.shellsyntax :
print("export {}={}".format(var, '' if val is None else val))
else:
print("{}={}".format(var, '' if val is None else val))
if __name__ == "__main__":
main()
| 32.640719
| 131
| 0.588883
|
e1ccf4066c1c3d75ad06001a3fd24d32e45e89df
| 1,010
|
py
|
Python
|
hpt/inventory/admin.py
|
hollowpoint/hollowpoint
|
6f9d6c6f7147a3cec20d6e772567a29cebe2a365
|
[
"Apache-2.0"
] | 1
|
2016-03-31T19:52:03.000Z
|
2016-03-31T19:52:03.000Z
|
hpt/inventory/admin.py
|
hollowpoint/hollowpoint
|
6f9d6c6f7147a3cec20d6e772567a29cebe2a365
|
[
"Apache-2.0"
] | null | null | null |
hpt/inventory/admin.py
|
hollowpoint/hollowpoint
|
6f9d6c6f7147a3cec20d6e772567a29cebe2a365
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from . import models
# Register your models here.
class NetDeviceAdmin(admin.ModelAdmin):
"""Customizing the view of devices in the admin"""
list_display = ('nodeName', 'manufacturer', 'deviceType', 'make', 'model')
list_filter = ('manufacturer', 'deviceType', 'site')
search_fields = ('node_name',)
fieldsets = (
('Basics', {
'fields': ('node_name', 'nodePort'),
}),
('Hardware Info', {
'fields': ('manufacturer', 'deviceType', 'make', 'model', 'serialNumber')
}),
('Administrivia', {
'fields': (
'adminStatus', 'assetID', 'budgetCode', 'budgetName',
'enablePW', 'owningTeam', 'owner', 'onCallName',
'operationStatus', 'lifecycleStatus',
'projectName'),
}),
('Location', {
'fields': ('site', 'room', 'coordinate'),
}),
)
admin.site.register(models.NetDevice, NetDeviceAdmin)
| 34.827586
| 85
| 0.549505
|
2ebffbe91a22a7b6f751e844336d1cd788710162
| 5,412
|
py
|
Python
|
dags/dbm_to_storage_dag.py
|
danieldjewell/starthinker
|
3327d5874f01d7563603b8a82c1ecd6615b9768d
|
[
"Apache-2.0"
] | 1
|
2020-12-04T17:13:35.000Z
|
2020-12-04T17:13:35.000Z
|
dags/dbm_to_storage_dag.py
|
hgrias/starthinker
|
b9ed33e23b4ffd72565a31ebb8a8041d346bfca2
|
[
"Apache-2.0"
] | null | null | null |
dags/dbm_to_storage_dag.py
|
hgrias/starthinker
|
b9ed33e23b4ffd72565a31ebb8a8041d346bfca2
|
[
"Apache-2.0"
] | null | null | null |
###########################################################################
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
'''
--------------------------------------------------------------
Before running this Airflow module...
Install StarThinker in cloud composer ( recommended ):
From Release: pip install starthinker
From Open Source: pip install git+https://github.com/google/starthinker
Or push local code to the cloud composer plugins directory ( if pushing local code changes ):
source install/deploy.sh
4) Composer Menu
l) Install All
--------------------------------------------------------------
If any recipe task has "auth" set to "user" add user credentials:
1. Ensure an RECIPE['setup']['auth']['user'] = [User Credentials JSON]
OR
1. Visit Airflow UI > Admin > Connections.
2. Add an Entry called "starthinker_user", fill in the following fields. Last step paste JSON from authentication.
- Conn Type: Google Cloud Platform
- Project: Get from https://github.com/google/starthinker/blob/master/tutorials/cloud_project.md
- Keyfile JSON: Get from: https://github.com/google/starthinker/blob/master/tutorials/deploy_commandline.md#optional-setup-user-credentials
--------------------------------------------------------------
If any recipe task has "auth" set to "service" add service credentials:
1. Ensure an RECIPE['setup']['auth']['service'] = [Service Credentials JSON]
OR
1. Visit Airflow UI > Admin > Connections.
2. Add an Entry called "starthinker_service", fill in the following fields. Last step paste JSON from authentication.
- Conn Type: Google Cloud Platform
- Project: Get from https://github.com/google/starthinker/blob/master/tutorials/cloud_project.md
- Keyfile JSON: Get from: https://github.com/google/starthinker/blob/master/tutorials/cloud_service.md
--------------------------------------------------------------
DV360 To Storage
Move existing DV360 report into a Storage bucket.
- Specify either report name or report id to move a report.
- The most recent valid file will be moved to the bucket.
--------------------------------------------------------------
This StarThinker DAG can be extended with any additional tasks from the following sources:
- https://google.github.io/starthinker/
- https://github.com/google/starthinker/tree/master/dags
'''
from starthinker.airflow.factory import DAG_Factory
INPUTS = {
'auth_read': 'user', # Credentials used for reading data.
'dbm_report_id': '', # DV360 report ID given in UI, not needed if name used.
'auth_write': 'service', # Credentials used for writing data.
'dbm_report_name': '', # Name of report, not needed if ID used.
'dbm_bucket': '', # Google cloud bucket.
'dbm_path': '', # Path and filename to write to.
}
RECIPE = {
'tasks': [
{
'dbm': {
'auth': {
'field': {
'name': 'auth_read',
'kind': 'authentication',
'order': 1,
'default': 'user',
'description': 'Credentials used for reading data.'
}
},
'report': {
'report_id': {
'field': {
'name': 'dbm_report_id',
'kind': 'integer',
'order': 1,
'default': '',
'description': 'DV360 report ID given in UI, not needed if name used.'
}
},
'name': {
'field': {
'name': 'dbm_report_name',
'kind': 'string',
'order': 2,
'default': '',
'description': 'Name of report, not needed if ID used.'
}
}
},
'out': {
'storage': {
'auth': {
'field': {
'name': 'auth_write',
'kind': 'authentication',
'order': 1,
'default': 'service',
'description': 'Credentials used for writing data.'
}
},
'bucket': {
'field': {
'name': 'dbm_bucket',
'kind': 'string',
'order': 3,
'default': '',
'description': 'Google cloud bucket.'
}
},
'path': {
'field': {
'name': 'dbm_path',
'kind': 'string',
'order': 4,
'default': '',
'description': 'Path and filename to write to.'
}
}
}
}
}
}
]
}
DAG_FACTORY = DAG_Factory('dbm_to_storage', RECIPE, INPUTS)
DAG = DAG_FACTORY.generate()
if __name__ == "__main__":
DAG_FACTORY.print_commandline()
| 32.8
| 145
| 0.532151
|
ea84f4d18f3bd48ebcd4eb56c8d74626f0a0063b
| 7,382
|
py
|
Python
|
ingest/subscription/subscription_sage.py
|
ourresearch/journalsdb
|
169feb9be684eac59f3294dccdb319eb10fe1958
|
[
"MIT"
] | 8
|
2021-02-01T21:00:20.000Z
|
2022-01-25T09:51:24.000Z
|
ingest/subscription/subscription_sage.py
|
ourresearch/journalsdb
|
169feb9be684eac59f3294dccdb319eb10fe1958
|
[
"MIT"
] | 43
|
2021-04-28T00:20:53.000Z
|
2022-03-09T00:39:56.000Z
|
ingest/subscription/subscription_sage.py
|
ourresearch/journalsdb
|
169feb9be684eac59f3294dccdb319eb10fe1958
|
[
"MIT"
] | null | null | null |
import pandas as pd
from app import db
import regex as re
from models.journal import Journal
from models.location import Region
from models.price import MiniBundle, SubscriptionPrice
from ingest.subscription.subscription_base import SubscriptionImport
from ingest.utils import get_or_create
class Sage(SubscriptionImport):
"""
Takes a CSV of sage prices and adds them into the database.
"""
def __init__(self, year):
self.data_source = (
"https://us.sagepub.com/en-us/nam/sage-journals-and-subscription-info"
)
regions_and_currencies = [("USA", "USD"), ("GBR", "GBP")]
super().__init__(
year,
None,
regions_and_currencies,
"SAGE",
)
self.in_electronic_price = True
def format_sage_dataframe(self, excel_file_path):
"""
Loads the Sage Price List into a parsable dataframe.
"""
xls = pd.ExcelFile(excel_file_path)
self.df = pd.read_excel(xls, "List Price")
def set_issn(self, cell):
if (
pd.isnull(cell)
or not isinstance(cell, str)
or not re.match(r"^\s?\w{4}-\w{4}\s?\s?$", cell)
):
self.issn = None
else:
self.issn = cell.split(",")[0].strip()
def import_prices(self):
"""
Iterate through the dataframe and import the Sage Price List into the
SubscriptionPrice model.
"""
for index, row in self.df.iterrows():
self.set_journal_name(row["Title"])
self.set_issn(row["E-ISSN"])
self.set_journal()
self.set_product_id(row["Product"])
self.in_electronic_price = False
for region, currency_acronym in self.regions_and_currencies:
self.set_currency(currency_acronym)
self.set_country(region)
column = currency_acronym + " Price " + str(self.year)
self.set_price(row[column])
media_type = row["Product Description"]
price_category = row["Price Category Description"]
self.add_prices(media_type, price_category)
db.session.commit()
def add_prices(self, media_type, price_category):
if (
self.journal
and media_type == "Electronic Only"
and price_category == "Inst-Standard"
):
self.add_price_to_db()
self.in_electronic_price = True
def set_region(self, region):
"""
Queries the region from the database and sets this as a class variable.
"""
try:
self.current_region = (
db.session.query(Region)
.filter_by(name=region, publisher_id=self.publisher.id)
.first()
)
except:
print("Could not find region:", region)
class SageMiniBundle(SubscriptionImport):
"""
Takes a CSV of sage mini bundle prices and adds them into the database.
"""
def __init__(self, year):
regions_and_currencies = [("USA", "USD"), ("GBR", "GBP")]
super().__init__(
year,
None,
regions_and_currencies,
"SAGE",
)
self.mini_bundle_name = None
self.issns = []
self.in_electronic_price = True
def format_sage_dataframe(self, excel_file_path):
"""
Loads the Sage Price List into a parsable dataframe.
"""
xls = pd.ExcelFile(excel_file_path)
self.df = pd.read_excel(xls, "List Price")
def set_mini_bundle_name(self, cell):
"""
Sets the mini bundle name given a dataframe cell
"""
if not pd.isnull(cell):
self.mini_bundle_name = cell
def set_issns(self, cell):
cell = cell if cell else ""
issns = re.findall(r"\w{4}-\w{4}", cell)
[self.issns.append(issn) for issn in issns]
def import_prices(self):
"""
Iterate through the dataframe and import the Sage Price List into the
SubscriptionPrice model.
"""
for index, row in self.df.iterrows():
self.set_mini_bundle_name(row["Title"])
self.set_issns(row["E-ISSN"])
self.set_product_id(row["Product"])
self.in_electronic_price = False
for region, currency_acronym in self.regions_and_currencies:
self.set_currency(currency_acronym)
self.set_country(region)
column = currency_acronym + " Price " + str(self.year)
self.set_price(row[column])
media_type = row["Product Description"]
price_category = row["Price Category Description"]
self.add_prices(media_type, price_category)
self.issns = []
db.session.commit()
def add_prices(self, media_type, price_category):
if (
len(self.issns) > 1
and media_type == "Electronic Only"
and price_category == "Inst-Standard"
and self.price
):
self.add_price_to_db()
self.in_electronic_price = True
def add_price_to_db(self):
"""
Adds a SubscriptionPrice entry into the database.
"""
mb = get_or_create(
db.session,
MiniBundle,
name=self.mini_bundle_name,
publisher_id=self.publisher.id,
)
# create price if it does not exist
currency = self.currency
country = self.country
price_found = False
for p in mb.subscription_prices:
if (
p.price == self.price
and p.country == country
and p.currency == currency
):
print(
"Price already exists for mini bundle {} with price {}".format(
self.mini_bundle_name, self.price
)
)
price_found = True
if not price_found:
new_price = SubscriptionPrice(
price=self.price,
country=country,
currency=currency,
year=self.year,
)
db.session.add(new_price)
# match price to mini bundle
mb.subscription_prices.append(new_price)
print(
"Adding price {} {} to mini bundle {}".format(
self.price, self.currency.acronym, self.mini_bundle_name
)
)
db.session.commit()
# assign journals to mini bundle
for issn in self.issns:
j = Journal.find_by_issn(issn)
if j and j not in mb.journals:
print(
"assigning journal with issn {} to mini bundle {}".format(
j.issn_l, self.mini_bundle_name
)
)
mb.journals.append(j)
elif j:
print(
"Journal with issn {} already assigned to mini bundle {}".format(
j.issn_l, self.mini_bundle_name
)
)
else:
print("Journal does not exist for issn {}".format(issn))
db.session.commit()
| 32.808889
| 85
| 0.5424
|
06d2a7d8de218a287fa9de01d3f14ce5da491670
| 393
|
py
|
Python
|
app/fangnaer/wsgi.py
|
sylardie/fangnaer
|
44192e03977ac2c53c8f67027ce93dea297dfb41
|
[
"MIT"
] | 2
|
2018-06-22T00:59:02.000Z
|
2018-06-22T15:43:10.000Z
|
app/fangnaer/wsgi.py
|
sylardie/fangnaer
|
44192e03977ac2c53c8f67027ce93dea297dfb41
|
[
"MIT"
] | 2
|
2020-07-17T15:07:30.000Z
|
2021-05-09T21:49:14.000Z
|
app/fangnaer/wsgi.py
|
sylardie/fangnaer
|
44192e03977ac2c53c8f67027ce93dea297dfb41
|
[
"MIT"
] | 1
|
2018-06-22T00:55:25.000Z
|
2018-06-22T00:55:25.000Z
|
"""
WSGI config for fangnaer project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fangnaer.settings")
application = get_wsgi_application()
| 23.117647
| 78
| 0.78626
|
4698e3ca0670fa4d739bf474712d688092eb7b34
| 1,165
|
py
|
Python
|
tests/test_nn_activation.py
|
frgfm/torch-zoo
|
c97beacf3d49eaa34398abf47f378ea6b48a70f3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_nn_activation.py
|
frgfm/torch-zoo
|
c97beacf3d49eaa34398abf47f378ea6b48a70f3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_nn_activation.py
|
frgfm/torch-zoo
|
c97beacf3d49eaa34398abf47f378ea6b48a70f3
|
[
"Apache-2.0"
] | null | null | null |
import inspect
import torch
from holocron.nn import functional as F
from holocron.nn.modules import activation
def _test_activation_function(fn, input_shape):
# Optional testing
fn_args = inspect.signature(fn).parameters.keys()
cfg = {}
if "inplace" in fn_args:
cfg["inplace"] = [False, True]
# Generate inputs
x = torch.rand(input_shape)
# Optional argument testing
kwargs = {}
for inplace in cfg.get("inplace", [None]):
if isinstance(inplace, bool):
kwargs["inplace"] = inplace
out = fn(x, **kwargs)
assert out.shape == x.shape
if kwargs.get("inplace", False):
assert x.data_ptr() == out.data_ptr()
def test_hard_mish():
_test_activation_function(F.hard_mish, (4, 3, 32, 32))
assert repr(activation.HardMish()) == "HardMish()"
def test_nl_relu():
_test_activation_function(F.nl_relu, (4, 3, 32, 32))
assert repr(activation.NLReLU()) == "NLReLU()"
def test_frelu():
mod = activation.FReLU(8).eval()
with torch.no_grad():
_test_activation_function(mod.forward, (4, 8, 32, 32))
assert len(repr(mod).split("\n")) == 4
| 25.326087
| 62
| 0.63691
|
55cd47caddb5c15e11f6a6a38a57db992155de07
| 1,276
|
py
|
Python
|
src/linux/snipeFile.py
|
EH30/snipeFiles
|
11eed6f0387e3cb4afdcb94dfae0b5f874b8cafe
|
[
"Unlicense"
] | null | null | null |
src/linux/snipeFile.py
|
EH30/snipeFiles
|
11eed6f0387e3cb4afdcb94dfae0b5f874b8cafe
|
[
"Unlicense"
] | null | null | null |
src/linux/snipeFile.py
|
EH30/snipeFiles
|
11eed6f0387e3cb4afdcb94dfae0b5f874b8cafe
|
[
"Unlicense"
] | null | null | null |
import os
import sys
import argparse
#Created By: EH
def file_writer(filename, data):
opn = open(filename, "a")
opn.write(data+"\n")
opn.close()
def getData(paths, name, multi):
print("[+]Searching for: ", name)
found = 0
for root, dirname, filename in os.walk(paths):
for x in filename:
if x != []:
if name == x:
file_writer("Found.txt", "[+]Found {0} in {1}".format(name, root))
print("[+]Found {0} in {1}".format(name, root))
found = 1
if multi == "y" or multi.upper() == "Y":
continue
return 0
if found != 0:
return 0
return 1
parser = argparse.ArgumentParser(description="Enter You're Path then the File Name")
parser.add_argument("path", type=str, help="Enter The Path To Search For Files")
parser.add_argument("filename", type=str, help="Enter The File Name To Search")
parser.add_argument("multi", type=str, help="This is to Search multiple Files With Same Name Enter y/n")
args = parser.parse_args()
if getData(args.path, args.filename, args.multi) == 1:
print("[-]File Not Found")
| 29.674419
| 105
| 0.544671
|
139fc141d936c57f86cb8e854699266458a6b45d
| 1,423
|
py
|
Python
|
rqalpha/examples/IF_macd.py
|
terencehk/rqalpha
|
349e6a0a8e45449646acd6063cdec06df3bc1171
|
[
"Apache-2.0"
] | 44
|
2017-06-17T09:43:17.000Z
|
2022-03-02T11:08:03.000Z
|
rqalpha/examples/IF_macd.py
|
terencehk/rqalpha
|
349e6a0a8e45449646acd6063cdec06df3bc1171
|
[
"Apache-2.0"
] | 2
|
2017-08-26T13:10:17.000Z
|
2018-06-14T12:28:08.000Z
|
rqalpha/examples/IF_macd.py
|
terencehk/rqalpha
|
349e6a0a8e45449646acd6063cdec06df3bc1171
|
[
"Apache-2.0"
] | 28
|
2017-06-26T10:06:29.000Z
|
2021-12-17T04:26:04.000Z
|
from rqalpha.api import *
import talib
# 在这个方法中编写任何的初始化逻辑。context对象将会在你的算法策略的任何方法之间做传递
def init(context):
# context内引入全局变量s1,存储目标合约信息
context.s1 = 'IF1606'
# 使用MACD需要设置长短均线和macd平均线的参数
context.SHORTPERIOD = 12
context.LONGPERIOD = 26
context.SMOOTHPERIOD = 9
context.OBSERVATION = 50
# 初始化时订阅合约行情。订阅之后的合约行情会在handle_bar中进行更新
subscribe(context.s1)
# 你选择的期货数据更新将会触发此段逻辑,例如日线或分钟线更新
def handle_bar(context, bar_dict):
# 开始编写你的主要的算法逻辑
# 获取历史收盘价序列,history_bars函数直接返回ndarray,方便之后的有关指标计算
prices = history_bars(context.s1, context.OBSERVATION, '1d', 'close')
# 用Talib计算MACD取值,得到三个时间序列数组,分别为macd,signal 和 hist
macd, signal, hist = talib.MACD(prices, context.SHORTPERIOD,
context.LONGPERIOD, context.SMOOTHPERIOD)
# macd 是长短均线的差值,signal是macd的均线,如果短均线从下往上突破长均线,为入场信号,进行买入开仓操作
if macd[-1] - signal[-1] > 0 and macd[-2] - signal[-2] < 0:
sell_qty = context.portfolio.positions[context.s1].sell_quantity
# 先判断当前卖方仓位,如果有,则进行平仓操作
if sell_qty > 0:
buy_close(context.s1, 1)
# 买入开仓
buy_open(context.s1, 1)
if macd[-1] - signal[-1] < 0 and macd[-2] - signal[-2] > 0:
buy_qty = context.portfolio.positions[context.s1].buy_quantity
# 先判断当前买方仓位,如果有,则进行平仓操作
if buy_qty > 0:
sell_close(context.s1, 1)
# 卖出开仓
sell_open(context.s1, 1)
| 30.276596
| 77
| 0.662684
|
0ce8a9e492fcee7fa9706ded2452df88d112b748
| 1,238
|
py
|
Python
|
setup.py
|
drpresq/sshepherd
|
e730b6b12a11f91a843e4e4cb2fdc606fa75ea47
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
drpresq/sshepherd
|
e730b6b12a11f91a843e4e4cb2fdc606fa75ea47
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
drpresq/sshepherd
|
e730b6b12a11f91a843e4e4cb2fdc606fa75ea47
|
[
"BSD-2-Clause"
] | null | null | null |
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_desc = fh.read()
setup(
name='sshepherd',
version="0.2",
packages=["sshepherd"],
package_dir={'': "src"},
scripts=['scripts/sshepherd'],
author="George",
author_email="drpresq@gmail.com",
description="SSHepherd: Automated SSH User Management",
long_description=long_desc,
long_description_content_type="text/markdown",
url="https://github.com/drpresq/sshepherd",
install_requires=[
"paramiko>=2.7.2",
"scp==0.13.6"
],
extras_require={
'dev': [
'pytest>=6.2.4'
]
},
keywords="",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Utilities'
],
)
| 29.47619
| 59
| 0.590468
|
88676be69b4e5d677e93bb1afc12fbf23ad51615
| 2,857
|
py
|
Python
|
Model/lib/logger.py
|
uetke/UUTrap
|
38d1f954c89df1f8b68ad9f155ffc611cdb92004
|
[
"MIT"
] | 1
|
2019-08-20T19:44:43.000Z
|
2019-08-20T19:44:43.000Z
|
Model/lib/logger.py
|
uetke/UUTrap
|
38d1f954c89df1f8b68ad9f155ffc611cdb92004
|
[
"MIT"
] | 1
|
2018-03-13T14:06:43.000Z
|
2018-03-13T14:06:43.000Z
|
Model/lib/logger.py
|
uetke/UUTrap
|
38d1f954c89df1f8b68ad9f155ffc611cdb92004
|
[
"MIT"
] | 1
|
2018-08-14T14:46:22.000Z
|
2018-08-14T14:46:22.000Z
|
from collections import OrderedDict
import inspect
import logging
import logging.config
def logger(name='logfile.log',filelevel=logging.INFO, streamlevel=logging.WARNING):
# create logger with 'spam_application'
logger = logging.getLogger('__main__')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler(name)
fh.setLevel(filelevel)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(streamlevel)
# create formatter and add it to the handlers
if filelevel>=logging.INFO:
formatter = logging.Formatter('%(asctime)s - %(levelname)-8s - %(message)-60s - %(funcName)s')
elif filelevel<logging.INFO:
formatter = logging.Formatter('%(asctime)s - %(levelname)-8s - %(message)-60s - %(name)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
return logger
def get_all_caller():
cont = True
i=2
name = []
while cont:
name.append(caller_name(i))
if name[-1] == '':
cont = False
#print(i)
i += 1
name = ".".join(name[::-1])
return '.'.join(list(OrderedDict.fromkeys(name.split('.')))[1:])
def caller_name(skip=2):
"""Get a name of a caller in the format module.class.method
`skip` specifies how many levels of stack to skip while getting caller
name. skip=1 means "who calls me", skip=2 "who calls my caller" etc.
An empty string is returned if skipped levels exceed stack height
"""
stack = inspect.stack()
start = 0 + skip
if len(stack) < start+1:
return ''
parentframe = stack[start][0]
name = []
module = inspect.getmodule(parentframe)
# `modname` can be None when frame is executed directly in console
# TODO(techtonik): consider using __main__
if module:
name.append(module.__name__)
# detect classname
if 'self' in parentframe.f_locals:
# I don't know any way to detect call from the object method
# XXX: there seems to be no way to detect static method call - it will
# be just a function call
name.append(parentframe.f_locals['self'].__class__.__name__)
codename = parentframe.f_code.co_name
if codename != '<module>': # top level usually
name.append( codename ) # function or a method
del parentframe
return ".".join(name)
if __name__ == '__main__':
filename = 'test'
import os
cwd = os.getcwd()
savedir = cwd + '\\' + 'test' + '\\'
if not os.path.exists(savedir):
os.makedirs(savedir)
print(savedir)
logger = logger('%s\\%s.log' %(savedir,filename))
logger.critical('test')
| 32.101124
| 102
| 0.635982
|
f78af8042882d289d1ee2376039ba417133acf45
| 5,291
|
py
|
Python
|
databench_py/singlethread/meta.py
|
springcoil/databench
|
dba2cb412da099a3eb970e4b0cf257a62b012958
|
[
"MIT"
] | null | null | null |
databench_py/singlethread/meta.py
|
springcoil/databench
|
dba2cb412da099a3eb970e4b0cf257a62b012958
|
[
"MIT"
] | null | null | null |
databench_py/singlethread/meta.py
|
springcoil/databench
|
dba2cb412da099a3eb970e4b0cf257a62b012958
|
[
"MIT"
] | null | null | null |
"""Meta class for Databench Python kernel."""
import zmq
import time
import logging
class Meta(object):
"""Class providing Meta information about analyses.
For Python kernels.
Args:
name (str): Name of this analysis.
import_name (str): Usually the file name ``__name__`` where this
analysis is instantiated.
description (str): Usually the ``__doc__`` string of the analysis.
analysis (Analysis): Analysis class.
"""
def __init__(self, name, import_name, description, analysis):
self.name = name
self.import_name = import_name
self.description = description
self.analysis_class = analysis
self.analysis_instances = {}
self._init_zmq()
print 'Language kernel for '+self.name+' initialized.'
def _init_zmq(self, sub_port=8041):
"""Initialize zmq messaging. Listen on sub_port. This port might at
some point receive the message to start publishing on a certain
port, but until then, no publishing."""
self.zmq_publish = None
self.zmq_sub = zmq.Context().socket(zmq.SUB)
self.zmq_sub.connect('tcp://127.0.0.1:'+str(sub_port))
self.zmq_sub.setsockopt(zmq.SUBSCRIBE, '')
@staticmethod
def run_action(analysis, fn_name, message):
"""Executes an action in the analysis with the given message. It
also handles the start and stop signals in case an action_id
is given.
This method is exactly the same as in databench.Analysis.
"""
# detect action_id
action_id = None
if isinstance(message, dict) and '__action_id' in message:
action_id = message['__action_id']
del message['__action_id']
if action_id:
analysis.emit('__action', {'id': action_id, 'status': 'start'})
fn = getattr(analysis, fn_name)
# Check whether this is a list (positional arguments)
# or a dictionary (keyword arguments).
if isinstance(message, list):
fn(*message)
elif isinstance(message, dict):
fn(**message)
else:
fn(message)
if action_id:
analysis.emit('__action', {'id': action_id, 'status': 'end'})
def event_loop(self):
"""Event loop."""
while True:
msg = self.zmq_sub.recv_json()
logging.debug('kernel msg: '+str(msg))
if 'analysis' not in msg or \
msg['analysis'] != self.name:
continue
del msg['analysis']
logging.debug('kernel processing msg')
if 'instance_id' in msg and \
msg['instance_id'] not in self.analysis_instances:
# instance does not exist yet
logging.debug('kernel creating analysis instance ' +
str(msg['instance_id']))
i = self.analysis_class()
def emit(signal, message):
self.emit(signal, message, msg['instance_id'])
i.set_emit_fn(emit)
self.analysis_instances[msg['instance_id']] = i
# init message
if 'publish_on_port' in msg and not self.zmq_publish:
port = msg['publish_on_port']
self.zmq_publish = zmq.Context().socket(zmq.PUB)
self.zmq_publish.bind('tcp://127.0.0.1:'+str(port))
logging.debug('kernel publishing on: tcp://127.0.0.1:' +
str(port))
# wait for slow tcp bind
time.sleep(0.5)
# sending hello
self.zmq_publish.send_json({
'analysis': self.name,
'description': self.description,
})
if 'instance_id' not in msg:
continue
instance_id = msg['instance_id']
i = self.analysis_instances[instance_id]
if 'frame' not in msg or \
'signal' not in msg['frame'] or \
'load' not in msg['frame'] or \
not hasattr(i, 'on_'+msg['frame']['signal']):
continue
# standard message
fn_name = 'on_'+msg['frame']['signal']
logging.debug('kernel processing '+fn_name)
Meta.run_action(i, fn_name, msg['frame']['load'])
def emit(self, signal, message, instance_id):
"""Emit signal to main.
Args:
signal (str): Name of the signal to be emitted.
message: Message to be sent.
instance_id: Identifies the instance of this analysis.
"""
logging.debug(
'backend (namespace='+self.name+', analysis='+str(instance_id) +
', signal='+signal + '): ' + (
(str(message)[:60] + '...')
if len(str(message)) > 65
else str(message)
)
)
if self.zmq_publish:
self.zmq_publish.send_json({
'analysis': self.name,
'instance_id': instance_id,
'frame': {'signal': signal, 'load': message},
})
else:
logging.debug('zmq_socket_pub not defined yet.')
| 33.27673
| 76
| 0.543754
|
35bf0eaee1ae4ae5df8be759a5256da3238b73b3
| 12,132
|
bzl
|
Python
|
internal/pkg_npm/pkg_npm.bzl
|
Aghassi/rules_nodejs
|
3eb42603c440f7e8496f2e6812337eb47827ff6a
|
[
"Apache-2.0"
] | null | null | null |
internal/pkg_npm/pkg_npm.bzl
|
Aghassi/rules_nodejs
|
3eb42603c440f7e8496f2e6812337eb47827ff6a
|
[
"Apache-2.0"
] | null | null | null |
internal/pkg_npm/pkg_npm.bzl
|
Aghassi/rules_nodejs
|
3eb42603c440f7e8496f2e6812337eb47827ff6a
|
[
"Apache-2.0"
] | null | null | null |
"""npm packaging
Note, this is intended for sharing library code with non-Bazel consumers.
If all users of your library code use Bazel, they should just add your library
to the `deps` of one of their targets.
"""
load("//:providers.bzl", "DeclarationInfo", "JSModuleInfo", "LinkablePackageInfo", "NODE_CONTEXT_ATTRS", "NodeContextInfo")
_DOC = """The pkg_npm rule creates a directory containing a publishable npm artifact.
Example:
```python
load("@build_bazel_rules_nodejs//:index.bzl", "pkg_npm")
pkg_npm(
name = "my_package",
srcs = ["package.json"],
deps = [":my_typescript_lib"],
substitutions = {"//internal/": "//"},
)
```
You can use a pair of `// BEGIN-INTERNAL ... // END-INTERNAL` comments to mark regions of files that should be elided during publishing.
For example:
```javascript
function doThing() {
// BEGIN-INTERNAL
// This is a secret internal-only comment
doInternalOnlyThing();
// END-INTERNAL
}
```
With the Bazel stamping feature, pkg_npm will replace any placeholder version in your package with the actual version control tag.
See the [stamping documentation](https://github.com/bazelbuild/rules_nodejs/blob/master/docs/index.md#stamping)
Usage:
`pkg_npm` yields four labels. Build the package directory using the default label:
```sh
$ bazel build :my_package
Target //:my_package up-to-date:
bazel-out/fastbuild/bin/my_package
$ ls -R bazel-out/fastbuild/bin/my_package
```
Dry-run of publishing to npm, calling `npm pack` (it builds the package first if needed):
```sh
$ bazel run :my_package.pack
INFO: Running command line: bazel-out/fastbuild/bin/my_package.pack
my-package-name-1.2.3.tgz
$ tar -tzf my-package-name-1.2.3.tgz
```
Actually publish the package with `npm publish` (also builds first):
```sh
# Check login credentials
$ bazel run @nodejs//:npm_node_repositories who
# Publishes the package
$ bazel run :my_package.publish
```
You can pass arguments to npm by escaping them from Bazel using a double-hyphen, for example:
`bazel run my_package.publish -- --tag=next`
It is also possible to use the resulting tar file file from the `.pack` as an action input via the `.tar` label:
```python
my_rule(
name = "foo",
srcs = [
"//:my_package.tar",
],
)
```
"""
# Used in angular/angular /packages/bazel/src/ng_package/ng_package.bzl
PKG_NPM_ATTRS = dict(NODE_CONTEXT_ATTRS, **{
"deps": attr.label_list(
doc = """Other targets which produce files that should be included in the package, such as `rollup_bundle`""",
allow_files = True,
),
"nested_packages": attr.label_list(
doc = """Other pkg_npm rules whose content is copied into this package.""",
allow_files = True,
),
"package_name": attr.string(
doc = """Optional package_name that this npm package may be imported as.""",
),
"srcs": attr.label_list(
doc = """Files inside this directory which are simply copied into the package.""",
allow_files = True,
),
"substitutions": attr.string_dict(
doc = """Key-value pairs which are replaced in all the files while building the package.
You can use values from the workspace status command using curly braces, for example
`{"0.0.0-PLACEHOLDER": "{STABLE_GIT_VERSION}"}`.
See the section on stamping in the [README](stamping)
""",
),
"vendor_external": attr.string_list(
doc = """External workspaces whose contents should be vendored into this workspace.
Avoids `external/foo` path segments in the resulting package.""",
),
"_npm_script_generator": attr.label(
default = Label("//internal/pkg_npm:npm_script_generator"),
cfg = "host",
executable = True,
),
"_packager": attr.label(
default = Label("//internal/pkg_npm:packager"),
cfg = "host",
executable = True,
),
"_run_npm_bat_template": attr.label(
default = Label("@nodejs//:run_npm.bat.template"),
allow_single_file = True,
),
"_run_npm_template": attr.label(
default = Label("@nodejs//:run_npm.sh.template"),
allow_single_file = True,
),
})
# Used in angular/angular /packages/bazel/src/ng_package/ng_package.bzl
PKG_NPM_OUTPUTS = {
"pack_bat": "%{name}.pack.bat",
"pack_sh": "%{name}.pack.sh",
"publish_bat": "%{name}.publish.bat",
"publish_sh": "%{name}.publish.sh",
}
# Takes a depset of files and returns a corresponding list of file paths without any files
# that aren't part of the specified package path. Also include files from external repositories
# that explicitly specified in the vendor_external list.
def _filter_out_external_files(ctx, files, package_path):
result = []
for file in files:
# NB: package_path may be an empty string
if file.short_path.startswith(package_path) and not file.short_path.startswith("../"):
result.append(file.path)
else:
for v in ctx.attr.vendor_external:
if file.short_path.startswith("../%s/" % v):
result.append(file.path)
return result
# Used in angular/angular /packages/bazel/src/ng_package/ng_package.bzl
def create_package(ctx, deps_files, nested_packages):
"""Creates an action that produces the npm package.
It copies srcs and deps into the artifact and produces the .pack and .publish
scripts.
Args:
ctx: the skylark rule context
deps_files: list of files to include in the package which have been
specified as dependencies
nested_packages: list of TreeArtifact outputs from other actions which are
to be nested inside this package
Returns:
The tree artifact which is the publishable directory.
"""
stamp = ctx.attr.node_context_data[NodeContextInfo].stamp
all_files = deps_files + ctx.files.srcs
if not stamp and len(all_files) == 1 and all_files[0].is_directory and len(ctx.files.nested_packages) == 0:
# Special case where these is a single dep that is a directory artifact and there are no
# source files or nested_packages; in that case we assume the package is contained within
# that single directory and there is no work to do
package_dir = all_files[0]
_create_npm_scripts(ctx, package_dir)
return package_dir
package_dir = ctx.actions.declare_directory(ctx.label.name)
package_path = ctx.label.package
# List of dependency sources which are local to the package that defines the current
# target. Also include files from external repositories that explicitly specified in
# the vendor_external list. We only want to package deps files which are inside of the
# current package unless explicitely specified.
filtered_deps_sources = _filter_out_external_files(ctx, deps_files, package_path)
args = ctx.actions.args()
inputs = ctx.files.srcs + deps_files + nested_packages
args.use_param_file("%s", use_always = True)
args.add(package_dir.path)
args.add(package_path)
args.add_joined([s.path for s in ctx.files.srcs], join_with = ",", omit_if_empty = False)
args.add(ctx.bin_dir.path)
args.add(ctx.genfiles_dir.path)
args.add_joined(filtered_deps_sources, join_with = ",", omit_if_empty = False)
args.add_joined([p.path for p in nested_packages], join_with = ",", omit_if_empty = False)
args.add(ctx.attr.substitutions)
if stamp:
# The version_file is an undocumented attribute of the ctx that lets us read the volatile-status.txt file
# produced by the --workspace_status_command.
# Similarly info_file reads the stable-status.txt file.
# That command will be executed whenever
# this action runs, so we get the latest version info on each execution.
# See https://github.com/bazelbuild/bazel/issues/1054
args.add(ctx.version_file.path)
inputs.append(ctx.version_file)
args.add(ctx.info_file.path)
inputs.append(ctx.info_file)
else:
args.add_all(["", ""])
args.add_joined(ctx.attr.vendor_external, join_with = ",", omit_if_empty = False)
ctx.actions.run(
progress_message = "Assembling npm package %s" % package_dir.short_path,
mnemonic = "AssembleNpmPackage",
executable = ctx.executable._packager,
inputs = inputs,
outputs = [package_dir],
arguments = [args],
)
_create_npm_scripts(ctx, package_dir)
return package_dir
def _create_npm_scripts(ctx, package_dir):
args = ctx.actions.args()
args.add_all([
package_dir.path,
ctx.outputs.pack_sh.path,
ctx.outputs.publish_sh.path,
ctx.file._run_npm_template.path,
ctx.outputs.pack_bat.path,
ctx.outputs.publish_bat.path,
ctx.file._run_npm_bat_template.path,
])
ctx.actions.run(
progress_message = "Generating npm pack & publish scripts",
mnemonic = "GenerateNpmScripts",
executable = ctx.executable._npm_script_generator,
inputs = [ctx.file._run_npm_template, ctx.file._run_npm_bat_template, package_dir],
outputs = [ctx.outputs.pack_sh, ctx.outputs.publish_sh, ctx.outputs.pack_bat, ctx.outputs.publish_bat],
arguments = [args],
# Must be run local (no sandbox) so that the pwd is the actual execroot
# in the script which is used to generate the path in the pack & publish
# scripts.
execution_requirements = {"local": "1"},
)
def _pkg_npm(ctx):
deps_files_depsets = []
for dep in ctx.attr.deps:
# Collect whatever is in the "data"
deps_files_depsets.append(dep.data_runfiles.files)
# Only collect DefaultInfo files (not transitive)
deps_files_depsets.append(dep.files)
# All direct & transitive JavaScript-producing deps
if JSModuleInfo in dep:
deps_files_depsets.append(dep[JSModuleInfo].sources)
# Include all transitive declerations
if DeclarationInfo in dep:
deps_files_depsets.append(dep[DeclarationInfo].transitive_declarations)
# Note: to_list() should be called once per rule!
deps_files = depset(transitive = deps_files_depsets).to_list()
package_dir = create_package(ctx, deps_files, ctx.files.nested_packages)
package_dir_depset = depset([package_dir])
result = [
DefaultInfo(
files = package_dir_depset,
runfiles = ctx.runfiles([package_dir]),
),
]
if ctx.attr.package_name:
result.append(LinkablePackageInfo(
package_name = ctx.attr.package_name,
path = package_dir.path,
files = package_dir_depset,
))
return result
pkg_npm = rule(
implementation = _pkg_npm,
attrs = PKG_NPM_ATTRS,
doc = _DOC,
outputs = PKG_NPM_OUTPUTS,
)
def pkg_npm_macro(name, **kwargs):
pkg_npm(
name = name,
**kwargs
)
native.alias(
name = name + ".pack",
actual = select({
"@bazel_tools//src/conditions:host_windows": name + ".pack.bat",
"//conditions:default": name + ".pack.sh",
}),
)
native.alias(
name = name + ".publish",
actual = select({
"@bazel_tools//src/conditions:host_windows": name + ".publish.bat",
"//conditions:default": name + ".publish.sh",
}),
)
native.genrule(
name = "%s.tar" % name,
outs = ["%s.tgz" % name],
cmd = "$(location :%s.pack) | xargs -I {} cp {} $@" % name,
# NOTE(mattem): on windows, it seems to output a buch of other stuff on stdout when piping, so pipe to tail
# and grab the last line
cmd_bat = "$(location :%s.pack) | tail -1 | xargs -I {} cp {} $@" % name,
tools = [
":%s.pack" % name,
],
# tagged as manual so this doesn't case two actions for each input with builds for "host" (as used as a tool)
tags = [
"local",
"manual",
],
visibility = kwargs.get("visibility"),
)
| 34.174648
| 136
| 0.662463
|
928d07e3d3b8ce38d242ea72aeef648210099d07
| 9,023
|
py
|
Python
|
tenable/io/assets.py
|
allenmichael/pyTenable
|
8372cfdf3ced99de50227f6fbb37d6db2b26291e
|
[
"MIT"
] | null | null | null |
tenable/io/assets.py
|
allenmichael/pyTenable
|
8372cfdf3ced99de50227f6fbb37d6db2b26291e
|
[
"MIT"
] | 1
|
2021-08-18T17:26:30.000Z
|
2021-08-18T17:26:30.000Z
|
tenable/io/assets.py
|
allenmichael/pyTenable
|
8372cfdf3ced99de50227f6fbb37d6db2b26291e
|
[
"MIT"
] | null | null | null |
'''
assets
======
The following methods allow for interaction into the Tenable.io
:devportal:`assets <assets>` API endpoints.
Methods available on ``tio.assets``:
.. rst-class:: hide-signature
.. autoclass:: AssetsAPI
.. automethod:: asset_import
.. automethod:: delete
.. automethod:: details
.. automethod:: import_job_details
.. automethod:: list
.. automethod:: list_import_jobs
.. automethod:: tags
.. automethod:: bulk_delete
'''
from tenable.io.base import TIOEndpoint
class AssetsAPI(TIOEndpoint):
'''
This will contain all methods related to Assets
'''
def list(self):
'''
Returns a list of assets.
:devportal:`assets: list-assets <assets-list-assets>`
Returns:
:obj:`list`:
List of asset records.
Examples:
>>> for asset in tio.assets.list():
... pprint(asset)
'''
return self._api.get('assets').json()['assets']
def delete(self, uuid):
'''
Deletes the asset.
:devportal:`workbenches: asset-delete <workbenches-asset-delete>`
Args:
asset_uuid (str): The unique identifier for the asset.
Returns:
:obj:`None`:
Examples:
>>> asset_id = '00000000-0000-0000-0000-000000000000'
>>> tio.asset.delete(asset_id)
'''
self._api.delete('workbenches/assets/{}'.format(
self._check('uuid', uuid, 'uuid')))
def details(self, uuid):
'''
Retrieves the details about a specific asset.
:devportal:`assets: asset-info <assets-asset-info>`
Args:
uuid (str):
The UUID (unique identifier) for the asset.
Returns:
:obj:`dict`:
Asset resource definition.
Examples:
>>> asset = tio.assets.details(
... '00000000-0000-0000-0000-000000000000')
'''
return self._api.get(
'assets/{}'.format(
self._check('uuid', uuid, str)
)).json()
def assign_tags(self, action, assets, tags):
'''
Add/remove tags for asset(s).
:devportal:`tags: assign-asset-tags <tags-assign-asset-tags>`
Args:
action (str):
Specifies whether to add or remove tags. Valid values: add, remove.
assets (List[str]):
An array of asset UUIDs.
tags (List[str]):
An array of tag value UUIDs.
Returns:
:obj:`dict`:
The job Resource record.
Examples:
>>> asset = tio.assets.assign_tags(
... 'add', ['00000000-0000-0000-0000-000000000000'],
... ['00000000-0000-0000-0000-000000000000'])
'''
return self._api.post(
'tags/assets/assignments', json={
'action': self._check('action', action, str, choices=['add', 'remove']),
'assets': [self._check('asset', i, 'uuid') for i in assets],
'tags': [self._check('source', i, 'uuid') for i in tags]
}).json()
def tags(self, uuid):
'''
Retrieves the details about a specific asset.
:devportal:`tags: asset-tags <tags-list-asset-tags>`
Args:
uuid (str):
The UUID (unique identifier) for the asset.
Returns:
:obj:`dict`:
Asset resource definition.
Examples:
>>> asset = tio.assets.tags(
... '00000000-0000-0000-0000-000000000000')
'''
return self._api.get(
'tags/assets/{}/assignments'.format(
self._check('uuid', uuid, 'uuid')
)).json()
def asset_import(self, source, *assets):
'''
Imports asset information into Tenable.io from an external source.
:devportal:`assets: import <assets-import>`
Imports a list of asset definition dictionaries. Each asset record must
contain at least one of the following attributes: ``fqdn``, ``ipv4``,
``netbios_name``, ``mac_address``. Each record may also contain
additional properties.
Args:
*assets (list):
The list of asset dictionaries
source (str):
An identifier to be used to upload the assets.
Returns:
:obj:`str`:
The job UUID.
Examples:
>>> tio.assets.asset_import('example_source', {
... 'fqdn': ['example.py.test'],
... 'ipv4': ['192.168.254.1'],
... 'netbios_name': 'example',
... 'mac_address': ['00:00:00:00:00:00']
... })
'''
# We will likely want to perform some more stringent checking of the
# asset resources that are being defined, however a simple type check
# should suffice for now.
return self._api.post(
'import/assets', json={
'assets': [self._check('asset', i, dict) for i in assets],
'source': self._check('source', source, str)
}).json()['asset_import_job_uuid']
def list_import_jobs(self):
'''
Returns a list of asset import jobs.
:devportal:`assets: list-import-jobs <assets-list-import-jobs>`
Returns:
:obj:`list`:
List of job records.
Examples:
>>> for job in tio.assets.list_import_jobs():
... pprint(job)
'''
return self._api.get('import/asset-jobs').json()['asset_import_jobs']
def import_job_details(self, uuid):
'''
Returns the details about a specific asset import job.
:devportal:`assets: import-job-info <assets-import-job-info>`
uuid (str):
The UUID (unique identifier) for the job.
Returns:
:obj:`dict`:
The job Resource record.
Examples:
>>> job = tio.assets.import_job_details(
... '00000000-0000-0000-0000-000000000000')
>>> pprint(job)
'''
return self._api.get(
'import/asset-jobs/{}'.format(
self._check('uuid', uuid, str)
)).json()
def move_assets(self, source, destination, targets):
'''
Moves assets from the specified network to another network.
:devportal:`assets: move-assets <assets-bulk-move>`
source (str):
The UUID of the network currently associated with the assets.
destination (str):
The UUID of the network to associate with the specified assets.
targets (list):
The IPv4 addresses of the assets to move.
Returns:
:obj:`int`:
Returns the number of moved assets.
Examples:
>>> asset = tio.assets.move_assets('00000000-0000-0000-0000-000000000000',
... '10000000-0000-0000-0000-000000000001', ["127.0.0.1"])
>>> pprint(asset)
'''
payload = {
'source': self._check('source', source, 'uuid'),
'destination': self._check('destination', destination, 'uuid'),
'targets': ','.join(self._check('targets', targets, list))
}
return self._api.post('api/v2/assets/bulk-jobs/move-to-network', json=payload).json()
def bulk_delete(self, *filters, filter_type=None):
'''
Deletes the specified assets.
:devportal:`assets: bulk_delete <assets-bulk-delete>`
Args:
*filters (tuple):
A defined filter tuple consisting of the name, operator, and
value. Example: ``('host.hostname', 'match', 'asset.com')``.
filter_type (str, optional):
If multiple filters are defined, the filter_type toggles the
behavior as to how these filters are used. Either all of the
filters have to match (``AND``) or any of the filters have to
match (``OR``). If not specified, the default behavior is to
assume filter_type is ``AND``.
Returns:
:obj:`dict`:
Returns the number of deleted assets.
Examples:
>>> asset = tio.assets.bulk_delete(
... ('host.hostname', 'match', 'asset.com'), filter_type='or')
>>> pprint(asset)
'''
payload = dict()
# run the rules through the filter parser...
filter_type = self._check('filter_type', filter_type, str,
choices=['and', 'or'], default='and', case='lower')
parsed = self._parse_filters(
filters, self._api.filters.workbench_asset_filters(), rtype='assets')['asset']
payload['query'] = {filter_type: parsed}
return self._api.post('api/v2/assets/bulk-jobs/delete', json=payload).json()
| 31.883392
| 93
| 0.537959
|
eb49d73535098c0a5ac7f1ebbd99a09011f1e3d8
| 1,706
|
py
|
Python
|
profiles_api/migrations/0001_initial.py
|
tekrajchhetri/django-rest-api
|
33da0a912d197cf104fe8eeb5a56728ac67e9784
|
[
"MIT"
] | null | null | null |
profiles_api/migrations/0001_initial.py
|
tekrajchhetri/django-rest-api
|
33da0a912d197cf104fe8eeb5a56728ac67e9784
|
[
"MIT"
] | 1
|
2020-02-12T07:24:12.000Z
|
2020-02-12T07:24:12.000Z
|
profiles_api/migrations/0001_initial.py
|
tekrajchhetri/django-rest-api
|
33da0a912d197cf104fe8eeb5a56728ac67e9784
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2 on 2020-02-08 10:22
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
| 50.176471
| 266
| 0.638921
|
8f65a154e2bee6b539c2b1fa066bfcd3c19b64db
| 2,078
|
tac
|
Python
|
Data/TACs/pabloModel_0vb/0noise/fullTAC0.2sigma.tac
|
ExampleOne/Fields-Project-11
|
427d7fd0314106d1ffcee99a4d5e2f5515cb6f66
|
[
"MIT"
] | 5
|
2018-08-07T15:40:38.000Z
|
2018-08-21T15:01:18.000Z
|
Data/TACs/pabloModel_0vb/0noise/fullTAC0.2sigma.tac
|
ExampleOne/Fields-Project-11
|
427d7fd0314106d1ffcee99a4d5e2f5515cb6f66
|
[
"MIT"
] | null | null | null |
Data/TACs/pabloModel_0vb/0noise/fullTAC0.2sigma.tac
|
ExampleOne/Fields-Project-11
|
427d7fd0314106d1ffcee99a4d5e2f5515cb6f66
|
[
"MIT"
] | null | null | null |
Start End Cerebellar Temporal Frontal Caudate Putamen Thalamus Anterior_cingulate Pons
0.00 76.00 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0.02
76.00 106.00 88.27 99.01 97.30 94.73 105.24 108.87 89.92 72.03
106.00 136.00 253.21 283.11 278.68 275.32 303.57 313.93 257.64 207.85
136.00 166.00 396.86 442.55 436.41 438.05 479.13 495.50 403.97 328.09
166.00 196.00 480.20 534.33 528.07 539.98 585.16 605.47 489.99 400.66
196.00 226.00 519.37 577.28 571.85 596.37 640.03 662.97 532.57 438.06
226.00 271.00 534.67 594.65 590.84 631.40 669.63 695.13 553.69 457.90
271.00 331.00 529.30 591.30 589.87 650.59 679.69 708.44 558.75 463.91
331.00 391.00 510.59 575.75 576.67 656.12 675.61 707.98 553.68 459.92
391.00 481.00 485.59 556.28 559.35 656.11 666.29 702.90 545.85 452.02
481.00 601.00 457.21 536.67 541.58 655.05 656.15 697.78 538.61 443.32
601.00 811.00 427.71 519.24 525.41 655.42 647.56 694.74 533.24 435.74
811.00 1111.00 400.95 504.43 510.98 657.93 640.76 693.31 528.64 430.16
1111.00 1411.00 381.04 491.78 498.08 660.82 634.67 691.65 523.58 426.01
1411.00 1711.00 365.52 480.13 486.03 662.98 628.49 689.22 518.11 422.15
1711.00 2011.00 351.61 468.73 474.22 664.43 621.94 686.16 512.29 418.11
2011.00 2311.00 338.54 457.49 462.58 665.25 615.07 682.56 506.22 413.86
2311.00 2611.00 326.08 446.40 451.12 665.51 607.91 678.48 499.93 409.40
2611.00 2911.00 314.14 435.48 439.85 665.24 600.50 673.97 493.44 404.76
2911.00 3211.00 302.70 424.73 428.76 664.46 592.86 669.06 486.79 399.95
3211.00 3511.00 291.71 414.15 417.87 663.21 585.03 663.77 479.99 394.98
3511.00 3811.00 281.17 403.75 407.17 661.52 577.01 658.15 473.07 389.89
3811.00 4111.00 271.04 393.53 396.67 659.40 568.85 652.21 466.03 384.68
4111.00 4411.00 261.32 383.49 386.36 656.90 560.55 645.98 458.90 379.36
4411.00 4711.00 251.98 373.63 376.26 654.02 552.13 639.49 451.69 373.95
4711.00 5011.00 243.00 363.96 366.36 650.80 543.62 632.75 444.42 368.46
5011.00 5311.00 234.37 354.48 356.65 647.25 535.03 625.80 437.10 362.91
5311.00 5611.00 226.08 345.18 347.15 643.39 526.37 618.65 429.74 357.30
| 69.266667
| 86
| 0.713186
|
8ed3382bceaea539e4825a518b903ef277914a5e
| 1,498
|
py
|
Python
|
freeproxy/modules/proxies/fatezero.py
|
bibilii/FreeProxy
|
b39e0c64fd2b356d18e23479a274ead125a7dfbb
|
[
"MIT"
] | 15
|
2022-02-21T00:47:11.000Z
|
2022-03-25T02:31:37.000Z
|
freeproxy/modules/proxies/fatezero.py
|
bibilii/FreeProxy
|
b39e0c64fd2b356d18e23479a274ead125a7dfbb
|
[
"MIT"
] | 1
|
2022-03-09T07:55:57.000Z
|
2022-03-09T08:25:17.000Z
|
freeproxy/modules/proxies/fatezero.py
|
bibilii/FreeProxy
|
b39e0c64fd2b356d18e23479a274ead125a7dfbb
|
[
"MIT"
] | 5
|
2022-02-21T00:47:13.000Z
|
2022-03-18T01:52:07.000Z
|
'''
Function:
fatezero代理
Author:
Charles
微信公众号:
Charles的皮卡丘
'''
import json
import requests
from .base import BaseProxy
'''fatezero代理'''
class FatezeroProxy(BaseProxy):
def __init__(self, **kwargs):
super(FatezeroProxy, self).__init__(**kwargs)
self.http_proxies = []
self.https_proxies = []
self.http_https_proxies = []
'''刷新代理'''
def refreshproxies(self):
# 初始化
self.http_proxies = []
self.https_proxies = []
proxies_format = '{ip}:{port}'
# 获得代理
url = 'http://proxylist.fatezero.org/proxy.list'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.102 Safari/537.36',
}
response = requests.get(url, headers=headers, verify=False)
for item in response.text.split('\n'):
if not item.strip(): continue
item = json.loads(item)
ip = item['host']
port = item['port']
proxy_type = item['type']
if proxy_type.lower() == 'http':
self.http_proxies.append({'http': proxies_format.format(ip=ip, port=port)})
else:
self.https_proxies.append({'https': proxies_format.format(ip=ip, port=port)})
self.http_https_proxies = self.http_proxies.copy() + self.https_proxies.copy()
# 返回
return self.http_proxies, self.https_proxies, self.http_https_proxies
| 33.288889
| 144
| 0.59279
|
ae3c69d0182e2eb4cfb2b6560d6cf522b01c8dbd
| 4,682
|
py
|
Python
|
train-mtcnn-zq-mxnet/core/metric_cls_bbox_landmark.py
|
zzzkk2009/anti-spoofing
|
ac3992547c430619e236b338575109d7ecbba654
|
[
"MIT"
] | 13
|
2018-12-19T07:43:46.000Z
|
2020-06-30T13:10:08.000Z
|
train-mtcnn-zq-mxnet/core/metric_cls_bbox_landmark.py
|
zzzkk2009/anti-spoofing
|
ac3992547c430619e236b338575109d7ecbba654
|
[
"MIT"
] | 1
|
2020-04-28T02:18:29.000Z
|
2020-04-28T02:18:29.000Z
|
train-mtcnn-zq-mxnet/core/metric_cls_bbox_landmark.py
|
zzzkk2009/anti-spoofing
|
ac3992547c430619e236b338575109d7ecbba654
|
[
"MIT"
] | 5
|
2018-12-19T07:43:48.000Z
|
2020-06-15T12:14:41.000Z
|
import mxnet as mx
import numpy as np
from config import config
class Accuracy(mx.metric.EvalMetric):
def __init__(self):
super(Accuracy, self).__init__('acc')
def update(self, labels, preds):
# output: cls_prob_output, cls_keep_inds, bbox_pred_output,bbox_keep_inds, landmark_pred_output, landmark_keep_inds
# label: type_label, label, bbox_target, landmark_target
pred_label = mx.ndarray.argmax_channel(preds[0]).asnumpy().astype('int32')
label = labels[1].asnumpy()
# negative mining
cls_keep = preds[1].asnumpy()
keep = np.where(cls_keep == 1)[0]
pred_label = pred_label[keep]
label = label[keep]
self.sum_metric += (pred_label.flat == label.flat).sum()
self.num_inst += len(pred_label.flat)
class LogLoss(mx.metric.EvalMetric):
def __init__(self):
super(LogLoss, self).__init__('loss')
def update(self, labels, preds):
# output: cls_prob_output, cls_keep_inds, bbox_pred_output,bbox_keep_inds, landmark_pred_output, landmark_keep_inds
# label: type_label, label, bbox_target, landmark_target
pred_cls = preds[0].asnumpy()
label = labels[1].asnumpy().astype('int32')
cls_keep = preds[1].asnumpy()
keep = np.where(cls_keep == 1)[0]
pred_cls = pred_cls[keep].reshape(-1, 2)
label = label[keep]
cls = pred_cls[np.arange(label.shape[0]), label.flat]
cls += config.EPS
cls_loss = -1 * np.log(cls)
cls_loss = np.sum(cls_loss)
self.sum_metric += cls_loss
self.num_inst += label.shape[0]
class BBOX_MSE(mx.metric.EvalMetric):
def __init__(self):
super(BBOX_MSE, self).__init__('boxL2')
def update(self,labels, preds):
# output: cls_prob_output, cls_keep_inds, bbox_pred_output,bbox_keep_inds, landmark_pred_output, landmark_keep_inds
# label: type_label, label, bbox_target, landmark_target
pred_delta = preds[2].asnumpy()
bbox_target = labels[2].asnumpy()
bbox_keep = preds[3].asnumpy()
keep = np.where(bbox_keep == 1)[0]
pred_delta = pred_delta[keep]
bbox_target = bbox_target[keep]
#print(preds)
#print(labels)
e = (pred_delta - bbox_target)**2
error = np.sum(e)
self.sum_metric += error
self.num_inst += e.size
class BBOX_L1(mx.metric.EvalMetric):
def __init__(self):
super(BBOX_L1, self).__init__('boxL1')
def update(self,labels, preds):
# output: cls_prob_output, cls_keep_inds, bbox_pred_output,bbox_keep_inds, landmark_pred_output, landmark_keep_inds
# label: type_label, label, bbox_target, landmark_target
pred_delta = preds[2].asnumpy()
bbox_target = labels[2].asnumpy()
bbox_keep = preds[3].asnumpy()
keep = np.where(bbox_keep == 1)[0]
pred_delta = pred_delta[keep]
bbox_target = bbox_target[keep]
#print(preds)
#print(labels)
e = abs(pred_delta - bbox_target)
error = np.sum(e)
self.sum_metric += error
self.num_inst += e.size
class LANDMARK_MSE(mx.metric.EvalMetric):
def __init__(self):
super(LANDMARK_MSE, self).__init__('lmL2')
def update(self,labels, preds):
# output: cls_prob_output, cls_keep_inds, bbox_pred_output,bbox_keep_inds, landmark_pred_output, landmark_keep_inds
# label: type_label, label, bbox_target, landmark_target
pred_delta = preds[4].asnumpy()
landmark_target = labels[3].asnumpy()
landmark_keep = preds[5].asnumpy()
keep = np.where(landmark_keep == 1)[0]
pred_delta = pred_delta[keep]
landmark_target = landmark_target[keep]
#print(preds)
#print(labels)
e = (pred_delta - landmark_target)**2
error = np.sum(e)
self.sum_metric += error
self.num_inst += e.size
class LANDMARK_L1(mx.metric.EvalMetric):
def __init__(self):
super(LANDMARK_L1, self).__init__('lmL1')
def update(self,labels, preds):
# output: cls_prob_output, cls_keep_inds, bbox_pred_output,bbox_keep_inds, landmark_pred_output, landmark_keep_inds
# label: type_label, label, bbox_target, landmark_target
pred_delta = preds[4].asnumpy()
landmark_target = labels[3].asnumpy()
landmark_keep = preds[5].asnumpy()
keep = np.where(landmark_keep == 1)[0]
pred_delta = pred_delta[keep]
landmark_target = landmark_target[keep]
e = abs(pred_delta - landmark_target)
error = np.sum(e)
self.sum_metric += error
self.num_inst += e.size
| 33.927536
| 123
| 0.644383
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.