repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
dachrillz/Python-Rags
|
tests/caching/test_cache.py
|
<reponame>dachrillz/Python-Rags
from unittest import TestCase
from abc import ABC
from src.library import inh, eq, syn, Weaver
#################################################
# Grammar Specification
#################################################
class MinTree:
"""
This is a class that defines the attributes we want to give to the a tree.
That is, this is the class where one defines the reference attribute grammar.
This is also the class we expect the user to define!
The user of this package Declares attributes, and then Defines equations for these attributes.
"""
def __init__(self):
# Declare attributes
# Global min attribute
inh(Node, "globalmin")
# First Argument is the node that is to contain the equation
eq(Program, 'globalmin', lambda n: n.node.localmin())
# Local min attributes
syn(Node, "localmin")
# Define their equations
eq(Leaf, 'localmin', lambda x: x.value)
eq(Pair, 'localmin', lambda x: min(x.left.localmin(), x.right.localmin()))
#################################################
# Tree Specification
#################################################
class Program:
def __init__(self, node):
self.node = node
def traverse_tree_aux(self, node, result):
"""
Support Function for the tree traversal
"""
result.append(node)
if isinstance(node.left, Leaf):
result.append(node.left)
else:
self.traverse_tre_aux(node.left, result)
if isinstance(node.right, Leaf):
result.append(node.right)
else:
self.traverse_tree_aux(node.right, result)
def traverse(self):
"""
In order traversal of the tree.
Returns all the nodes in a list.
"""
result = [self]
first_node = self.node
self.traverse_tree_aux(first_node, result)
return result
def get_children(self):
return [self.node]
class Node(ABC):
def __init__(self):
super().__init__()
class Pair(Node):
def __init__(self, left, right):
super().__init__()
self.left = left
self.right = right
def get_children(self):
return [self.left, self.right]
class Leaf(Node):
def __init__(self, value):
super().__init__()
self.value = value
def get_children(self):
return []
#################################################
# Test Classes
#################################################
class MinTreeTest(TestCase):
def setUp(self):
Weaver(MinTree) # Just give the reference to the RAG class
instance = Program(Pair(Leaf(1), Pair(Leaf(2), Leaf(3))))
Weaver.infer_parents(instance)
# simply get all the nodes in tree after attribution, so one can get the nodes to check the result.
self.allnodes = instance.traverse()
def test_time_inherited_attributes(self):
import time
start_time = time.time()
temp_node = self.allnodes[2]
for i in range(int(1e6)):
temp_node.globalmin()
print("Inherited attributes: --- %s seconds ---" % (time.time() - start_time))
def test_local_min(self):
import time
start_time = time.time()
temp_node = self.allnodes[2]
for i in range(int(1e6)):
temp_node.localmin()
print("Synthesized attributes: --- %s seconds ---" % (time.time() - start_time))
|
dachrillz/Python-Rags
|
tests/modularization/rag_first_module.py
|
"""
The files in this folder tests so that the library can be modularized.
That is, can the RAG be defined in multiple files.
This file defines the circular attributes
"""
from src.library import eq, syn
#################################################
# Grammar Specification
#################################################
from tests.modularization.tree_definition import Program, Leaf, Pair
class MinTreeFirst:
"""
This is a class that defines the attributes we want to give to the a tree.
That is, this is the class where one defines the reference attribute grammar.
This is also the class we expect the user to define!
The user of this package Declares attributes, and then Defines equations for these attributes.
"""
def __init__(self):
# Declare attributes
# Local min attributes
syn(Program, "localmin")
syn(Leaf, "localmin")
syn(Pair, "localmin")
# Define their equations
eq(Leaf, 'localmin', lambda x: x.value)
eq(Pair, 'localmin', lambda x: min(x.left.localmin(), x.right.localmin()))
eq(Program, 'localmin', lambda x: 0)
|
dachrillz/Python-Rags
|
tests/inh/test_inh_basic_2.py
|
<gh_stars>1-10
from unittest import TestCase
from src.library import inh, eq, Weaver
#################################################
# Grammar Specification
#################################################
class RAG:
def __init__(self):
# Declare attributes
inh(Node, 'root')
eq(Root, 'root', lambda n: n)
#################################################
# Tree Specification
#################################################
class Root:
def __init__(self):
self.node = None
def set_node(self, node):
self.node = node
def get_children(self):
if self.node is None:
return []
else:
return [self.node]
class Node:
def __init__(self):
self.node = None
def set_node(self, node):
self.node = node
def get_children(self):
if self.node is None:
return []
else:
return [self.node]
#################################################
# Test Classes
#################################################
class TestClass(TestCase):
def setUp(self):
Weaver(RAG)
self.root = Root()
self.root.set_node(Node())
self.root.node.set_node(Node())
Weaver.infer_parents(self.root)
self.root.node.node.root()
print(self.root.node.node.root())
print(self.root)
def test_basic_inheritance1(self):
self.assertEqual(self.root.node.node.root(), self.root)
|
dachrillz/Python-Rags
|
tests/library_functionality/test_infer_parents.py
|
<gh_stars>1-10
from unittest import TestCase
from abc import ABC
from src.library import Weaver
#################################################
# Tree Specification
#################################################
class Program:
def __init__(self, value, node):
self.value = value
self.node = node
def get_children(self):
return [self.node]
def traverse_tree_aux(self, node, result):
"""
Support Function for the tree traversal
"""
result.append(node)
if isinstance(node.left, Leaf):
result.append(node.left)
else:
self.traverse_tre_aux(node.left, result)
if isinstance(node.right, Leaf):
result.append(node.right)
else:
self.traverse_tree_aux(node.right, result)
def traverse(self):
"""
In order traversal of the tree.
Returns all the nodes in a list.
"""
result = [self]
first_node = self.node
self.traverse_tree_aux(first_node, result)
return result
class Node(ABC):
def __init__(self):
super().__init__()
class Pair(Node):
def __init__(self, value, left, right):
super().__init__()
self.value = value
self.left = left
self.right = right
def get_children(self):
return [self.left, self.right]
class Leaf(Node):
def __init__(self, value):
super().__init__()
self.value = value
def get_children(self):
return []
def get_children(self):
return []
#################################################
# Test Classes
#################################################
class RAG:
pass
class MinTreeTest(TestCase):
def setUp(self):
self.weaver = Weaver(RAG) # Just give the reference to the RAG class
self.instance = Program(0, Pair(1, Leaf(2), Pair(3, Leaf(4), Leaf(5))))
def test_infer_parents(self):
self.weaver.infer_parents(self.instance)
pair1 = self.instance.get_children()[0]
self.assertEqual(pair1.get_parent(), self.instance)
leaf2 = pair1.get_children()[0]
pair3 = pair1.get_children()[1]
self.assertEqual(leaf2.get_parent(), pair1)
self.assertEqual(pair3.get_parent(), pair1)
leaf4 = pair3.get_children()[0]
leaf5 = pair3.get_children()[1]
self.assertEqual(leaf4.get_parent(), pair3)
self.assertEqual(leaf5.get_parent(), pair3)
|
dachrillz/Python-Rags
|
main.py
|
<reponame>dachrillz/Python-Rags
import sys
from pprint import pprint
import example.min_tree as example
import example.state_machine as state_example
import example.calc as calc_example
def display_help():
pprint("Possible args -- test, example, help")
pprint("test -- Run tests")
pprint("exampletree -- run example Min Tree in example folder")
pprint("examplestate -- run example State Machine in example folder")
pprint("examplecalc -- run example Calc in example folder")
pprint("help -- Diplsay this text")
if len(sys.argv) != 2:
display_help()
else:
first_arg = sys.argv[1]
if first_arg == 'test':
import subprocess
subprocess.Popen("pytest")
elif first_arg == 'exampletree':
example.run_example()
elif first_arg == 'examplestate':
state_example.run_example()
elif first_arg == 'examplecalc':
calc_example.run_example()
elif first_arg == 'help':
display_help()
else:
display_help()
|
dachrillz/Python-Rags
|
tests/modularization/test_modularization.py
|
<reponame>dachrillz/Python-Rags<filename>tests/modularization/test_modularization.py
"""
The files in this folder tests so that the library can be modularized.
That is, can the RAG be defined in multiple files.
This is the main file that binds all the other files together.
"""
from unittest import TestCase
from src.library import Weaver
from tests.modularization.rag_first_module import MinTreeFirst
from tests.modularization.rag_second_module import MinTreeSecond
from tests.modularization.tree_definition import Program, Pair, Leaf
#################################################
# Test Classes
#################################################
class MinTreeTest(TestCase):
def setUp(self):
Weaver(MinTreeFirst) # Just give the reference to the RAG class
Weaver(MinTreeSecond)
instance = Program(Pair(Leaf(1), Pair(Leaf(2), Leaf(3))))
Weaver.infer_parents(instance)
# simply get all the nodes in tree after attribution, so one can get the nodes to check the result.
self.allnodes = instance.traverse()
def test_global_min(self):
for item in self.allnodes:
if not isinstance(item, Program):
self.assertEqual(item.globalmin(), 42)
def test_local_min(self):
for i in range(len(self.allnodes)):
if i is 0:
self.assertEqual(0, self.allnodes[i].localmin())
elif i is 1:
self.assertEqual(1, self.allnodes[i].localmin())
elif i is 2:
self.assertEqual(1, self.allnodes[i].localmin())
elif i is 3:
self.assertEqual(2, self.allnodes[i].localmin())
elif i is 4:
self.assertEqual(2, self.allnodes[i].localmin())
elif i is 5:
self.assertEqual(3, self.allnodes[i].localmin())
|
dachrillz/Python-Rags
|
example/calc.py
|
<filename>example/calc.py
from abc import ABC
from src.library import inh, eq, syn, Weaver
#################################################
# Grammar Specification
#################################################
class Calc:
"""
This is a class that defines the attributes we want to give to the a tree.
That is, this is the class where one defines the reference attribute grammar.
This is also the class we expect the user to define!
The user of this package Declares attributes, and then Defines equations for these attributes.
"""
def __init__(self):
# Declare attributes
# Final computation
inh(Expr, "finalCalc")
# First Argument is the node that is to contain the equation
eq(Program, 'finalCalc', lambda n: n.node.localCalc())
# Local computation
syn(Mul, "localCalc")
syn(Add, "localCalc")
syn(Div, "localCalc")
syn(Sub, "localCalc")
syn(Numeral, "localCalc")
# Define their equations
eq(Mul, 'localCalc', lambda x: x.left.localCalc()* x.right.localCalc())
eq(Add, 'localCalc', lambda x: x.left.localCalc()+ x.right.localCalc())
eq(Div, 'localCalc', lambda x: x.left.localCalc()/ x.right.localCalc())
eq(Sub, 'localCalc', lambda x: x.left.localCalc()- x.right.localCalc())
eq(Numeral, 'localCalc', lambda x: x.value)
#################################################
# Tree Specification
#################################################
class Program:
def __init__(self, node):
self.node = node
def traverse_tree_aux(self, node, result):
"""
Support Function for the tree traversal
"""
result.append(node)
if isinstance(node.left, Expr):
result.append(node.left)
else:
self.traverse_tre_aux(node.left, result)
if isinstance(node.right, Expr):
result.append(node.right)
else:
self.traverse_tree_aux(node.right, result)
def traverse(self):
"""
In order traversal of the tree.
Returns all the nodes in a list.
"""
result = [self]
first_node = self.node
self.traverse_tree_aux(first_node, result)
return result
def get_children(self):
return [self.node]
class Expr(ABC):
def __init__(self):
super().__init__()
class Mul(Expr):
def __init__(self, left, right):
super().__init__()
self.left = left
self.right = right
def get_children(self):
return [self.left, self.right]
class Sub(Expr):
def __init__(self, left, right):
super().__init__()
self.left = left
self.right = right
def get_children(self):
return [self.left, self.right]
class Add(Expr):
def __init__(self, left, right):
super().__init__()
self.left = left
self.right = right
def get_children(self):
return [self.left, self.right]
class Div(Expr):
def __init__(self, left, right):
super().__init__()
self.left = left
self.right = right
def get_children(self):
return [self.left, self.right]
class Numeral(Expr):
def __init__(self, value):
super().__init__()
self.value = value
def get_children(self):
return []
#################################################
# Driver Code
#################################################
class CalcExample:
def __init__(self):
Weaver(Calc) # Just give the reference to the RAG class
instance = Program(Mul(Add(Numeral(1), Numeral(3)), Numeral(2))) # Create an instance of the tree
Weaver.infer_parents(instance) #Infer parents for the tree
# simply get all the nodes in tree after attribution, so one can get the nodes to check the result.
self.allnodes = instance.traverse()
# Instance of the weaver class
def print_final_Calc(self):
for item in self.allnodes:
if not isinstance(item, Program):
print(item.finalCalc())
def print_local_Calc(self):
for i in range(len(self.allnodes)):
if not isinstance(self.allnodes[i], Program):
print(self.allnodes[i].localCalc())
def run_example():
a = CalcExample()
print("Print the local calc in each node.")
a.print_local_Calc()
print("Print the final calc in each node.")
a.print_final_Calc()
|
dachrillz/Python-Rags
|
tests/modularization/tree_definition.py
|
<reponame>dachrillz/Python-Rags<gh_stars>1-10
"""
The files in this folder tests so that the library can be modularized.
That is, can the RAG be defined in multiple files.
This file defines the tree that is to be attributed.
It has to be defined in a separate file in order to avoid circular imports.
"""
from abc import ABC
class Program:
def __init__(self, node):
self.node = node
def traverse_tree_aux(self, node, result):
"""
Support Function for the tree traversal
"""
result.append(node)
if isinstance(node.left, Leaf):
result.append(node.left)
else:
self.traverse_tre_aux(node.left, result)
if isinstance(node.right, Leaf):
result.append(node.right)
else:
self.traverse_tree_aux(node.right, result)
def traverse(self):
"""
In order traversal of the tree.
Returns all the nodes in a list.
"""
result = [self]
first_node = self.node
self.traverse_tree_aux(first_node, result)
return result
def get_children(self):
return [self.node]
class Node(ABC):
def __init__(self):
super().__init__()
class Pair(Node):
def __init__(self, left, right):
super().__init__()
self.left = left
self.right = right
def get_children(self):
return [self.left, self.right]
class Leaf(Node):
def __init__(self, value):
super().__init__()
self.value = value
def get_children(self):
return []
|
dachrillz/Python-Rags
|
tests/modularization/rag_second_module.py
|
"""
The files in this folder tests so that the library can be modularized.
That is, can the RAG be defined in multiple files.
This file defines the inherited attributes
"""
from src.library import inh, eq
from tests.modularization.tree_definition import Node, Program
#################################################
# Grammar Specification
#################################################
class MinTreeSecond:
"""
This is a class that defines the attributes we want to give to the a tree.
That is, this is the class where one defines the reference attribute grammar.
This is also the class we expect the user to define!
The user of this package Declares attributes, and then Defines equations for these attributes.
"""
def __init__(self):
# Declare attributes
# Global min attribute
inh(Node, "globalmin")
# First Argument is the node that is to contain the equation
eq(Program, 'globalmin', lambda n: 42)
|
dachrillz/Python-Rags
|
example/min_tree.py
|
<reponame>dachrillz/Python-Rags
from abc import ABC
from src.library import inh, eq, syn, Weaver
#################################################
# Grammar Specification
#################################################
class MinTree:
"""
This is a class that defines the attributes we want to give to the a tree.
That is, this is the class where one defines the reference attribute grammar.
This is also the class we expect the user to define!
The user of this package Declares attributes, and then Defines equations for these attributes.
"""
def __init__(self):
# Declare attributes
# Global min attribute
inh(Node, "globalmin")
# First Argument is the node that is to contain the equation
eq(Program, 'globalmin', lambda n: Program.node.localmin())
# Local min attributes
syn(Program, "localmin")
syn(Leaf, "localmin")
syn(Pair, "localmin")
# Define their equations
eq(Leaf, 'localmin', lambda x: x.value)
eq(Pair, 'localmin', lambda x: min(x.left.localmin(), x.right.localmin()))
eq(Program, 'localmin', lambda x: 0)
#################################################
# Tree Specification
#################################################
class Program:
def __init__(self, node):
self.node = node
def traverse_tree_aux(self, node, result):
"""
Support Function for the tree traversal
"""
result.append(node)
if isinstance(node.left, Leaf):
result.append(node.left)
else:
self.traverse_tre_aux(node.left, result)
if isinstance(node.right, Leaf):
result.append(node.right)
else:
self.traverse_tree_aux(node.right, result)
def traverse(self):
"""
In order traversal of the tree.
Returns all the nodes in a list.
"""
result = [self]
first_node = self.node
self.traverse_tree_aux(first_node, result)
return result
def get_children(self):
return [self.node]
class Node(ABC):
def __init__(self):
super().__init__()
class Pair(Node):
def __init__(self, left, right):
super().__init__()
self.left = left
self.right = right
def get_children(self):
return [self.left, self.right]
class Leaf(Node):
def __init__(self, value):
super().__init__()
self.value = value
def get_children(self):
return []
#################################################
# Driver Code
#################################################
class MinTreeExample:
def __init__(self):
Weaver(MinTree) # Just give the reference to the RAG class
instance = Program(Pair(Leaf(1), Pair(Leaf(2), Leaf(3)))) # Create an instance of the tree
Weaver.infer_parents(instance) #Infer parents for the tree
# simply get all the nodes in tree after attribution, so one can get the nodes to check the result.
self.allnodes = instance.traverse()
# Instance of the weaver class
def print_global_min(self):
for item in self.allnodes:
if not isinstance(item, Program):
print(item.globalmin())
def print_local_min(self):
for i in range(len(self.allnodes)):
print(self.allnodes[i].localmin())
def run_example():
a = MinTreeExample()
print("This is the same tree as in Assignment 3, from the Compiler Course")
print("Printing the global mins")
a.print_global_min()
print("Printing the local mins")
a.print_local_min()
|
dachrillz/Python-Rags
|
src/library.py
|
########################################################
#
# LIBRARY STUFF!
#
########################################################
from functools import lru_cache
def syn(type_of_class, attribute_name, equation=None):
"""
@TODO: this is somewhat inconsistent when one gives the equation directly, rewrite that part
:param type_of_class:
:param attribute_name:
:param equation:
:return:
"""
@lru_cache(maxsize=None)
def lookup_function(self):
closure_name = '__eq__' + attribute_name
if hasattr(self.__class__, closure_name):
return getattr(self.__class__, closure_name)(self)
else:
return getattr(type_of_class, closure_name)(self)
if equation is None:
setattr(type_of_class, attribute_name, lookup_function)
else:
setattr(type_of_class, attribute_name, equation)
def inh(type_of_class, attribute_name):
# We use closures to be able to pass the attribute_name into the function later
@lru_cache(maxsize=None)
def get_function_from_parent(self, *args):
closure_attribute = "__eq__" + attribute_name
parent = self.get_parent()
if parent is not None:
if hasattr(parent, "defines") and parent.defines(closure_attribute):
attribute = getattr(parent, closure_attribute)
return attribute(*args)
else:
return get_function_from_parent(parent)
setattr(type_of_class, attribute_name, get_function_from_parent)
def eq(type_of_class, attribute_name, equation):
attribute_name = '__eq__' + attribute_name
#If the parent class defins
if hasattr(type_of_class.__bases__[0], attribute_name):
syn(type_of_class, attribute_name)
setattr(type_of_class, attribute_name, equation)
else:
setattr(type_of_class, attribute_name, equation)
setattr(type_of_class, "defines", lambda self, n: hasattr(self, n))
class Weaver:
"""
This is the class that weaves together the attribute grammar with a tree.
"""
def __init__(self, attribute_class):
self.attribute_class = attribute_class() # An instance of the user defined attribute class
# Get declared self variables from the attribute class and add them to a list so that we can iterate over them
@staticmethod
def inheritors(class_):
subclasses = set()
work = [class_]
while work:
parent = work.pop()
for child in parent.__subclasses__():
if child not in subclasses:
subclasses.add(child)
work.append(child)
return subclasses
@staticmethod
def infer_parents(root_of_tree):
"""
Assumes that each node has a function called get_children,
which returns a list of each child
:param root_of_tree:
:return:
"""
setattr(root_of_tree, "get_parent", lambda: None)
def inorder_traversal(node):
children = node.get_children()
if len(children) is not 0:
for child in children:
setattr(child, "get_parent", lambda: node)
inorder_traversal(child)
inorder_traversal(root_of_tree)
|
dachrillz/Python-Rags
|
example/state_machine.py
|
from abc import ABC
from src.library import inh, eq, syn, Weaver
#################################################
# Grammar Specification
#################################################
class MinTree:
"""
This is a class that defines the attributes we want to give to the a tree.
That is, this is the class where one defines the reference attribute grammar.
This is also the class we expect the user to define!
The user of this package Declares attributes, and then Defines equations for these attributes.
"""
@staticmethod
def lookup_equation(reference_to_self, label):
for item in reference_to_self.declaration_list:
if item.label == label:
return item
return None
@staticmethod
def local_lookup_equation(reference_to_self, label):
if label == reference_to_self.label:
return reference_to_label
else:
return None
def __init__(self):
# Declare attributes
syn(Transition, "source_attribute", lambda n : n.source)
syn(Transition, "target_attribute", lambda n : n.target)
inh(Declaration, "lookup")
eq(StateMachine, "lookup", lambda n, label : MinTree.lookup_equation(n, label))
#syn(State, "localLookup", lambda n: None)
eq(State, "localLookup", lambda n, label : MinTree.local_lookup_equation(n, label))
#################################################
# Tree Specification
#################################################
class StateMachine:
def __init__(self):
self.declaration_list = []
def add_declaration(self, decl):
setattr(self, decl.label, decl)
self.declaration_list.append(getattr(self, decl.label))
def traverse_tree_aux(self, node, result):
"""
Support Function for the tree traversal
"""
result.append(node)
if isinstance(node.left, Leaf):
result.append(node.left)
else:
self.traverse_tre_aux(node.left, result)
if isinstance(node.right, Leaf):
result.append(node.right)
else:
self.traverse_tree_aux(node.right, result)
def traverse(self):
"""
In order traversal of the tree.
Returns all the nodes in a list.
"""
result = [self]
first_node = self.node
self.traverse_tree_aux(first_node, result)
return result
def get_children(self):
return self.declaration_list
class Declaration(ABC):
def __init__(self):
super().__init__()
class State(Declaration):
def __init__(self, label):
super().__init__()
self.label = label
def get_children(self):
return []
class Transition(Declaration):
def __init__(self, label, source, target):
super().__init__()
self.label = label
self.source = source
self.target = target
def get_children(self):
return []
#################################################
# Driver Code
#################################################
class MinTreeExample:
def __init__(self):
Weaver(MinTree) # Just give the reference to the RAG class
self.m = StateMachine()
self.m.add_declaration(State("S1"))
self.m.add_declaration(State("S2"))
self.m.add_declaration(State("S3"))
self.m.add_declaration(Transition("a", "S1", "S2"))
self.m.add_declaration(Transition("b", "S2", "S1"))
self.m.add_declaration(Transition("a", "S2", "S3"))
Weaver.infer_parents(self.m) #Infer parents for the tree
def run_example():
a = MinTreeExample()
machine = a.m
for item in a.m.declaration_list:
if isinstance(item, State):
pass
#print(item.label)
else:
print(str(item.source_attribute()), end = '')
print(" - > ", end = '')
print(str(item.target_attribute()))
print("Doing some lookup")
print(item.lookup("S1"))
print(item.lookup("S2"))
print(item.lookup("S3"))
print(item.lookup("S4"))
|
dachrillz/Python-Rags
|
tests/inh/test_inh_basic_1.py
|
<gh_stars>1-10
from unittest import TestCase
from src.library import inh, eq, Weaver
#################################################
# Grammar Specification
#################################################
class RAG:
def __init__(self):
# Declare attributes
inh(Node, "inhAttr")
eq(Root, "inhAttr", lambda n: -1)
eq(Node, "inhAttr", lambda n: 0)
#################################################
# Tree Specification
#################################################
class Root:
def __init__(self):
self.node = None
def set_node(self, node):
self.node = node
def get_children(self):
return [self.node]
class Node:
def __init__(self):
self.node = None
def set_node(self, node):
self.node = node
def get_children(self):
if self.node is None:
return []
else:
return [self.node]
#################################################
# Test Classes
#################################################
class TestClass(TestCase):
"""
Test inspiration: https://bitbucket.org/jastadd/jastadd-test/src/master/tests/inh/basic_01p/
"""
def setUp(self):
weaver = Weaver(RAG)
self.root = Root()
self.root.set_node(Node())
self.root.node.set_node(Node())
Weaver.infer_parents(self.root)
self.root.node.node.inhAttr()
def test_basic_inheritance1(self):
self.assertEqual(self.root.node.node.inhAttr(), 0)
def test_basic_inheritance2(self):
self.assertEqual(self.root.node.inhAttr(), -1)
|
dachrillz/Python-Rags
|
tests/inh/test_inh_basic_3.py
|
from unittest import TestCase
from src.library import inh, eq, Weaver
#################################################
# Grammar Specification
#################################################
class RAG:
def __init__(self):
# Declare attributes
inh(A, 'value')
eq(Node, 'value', lambda n: 77)
eq(Node, 'value', lambda n: -1)
#################################################
# Tree Specification
#################################################
class Node:
def __init__(self, left, right):
self.left = left
self.right = right
@staticmethod
def get_parent_class():
return []
def get_children(self):
return [self.left, self.right]
class A:
def __init__(self):
self.node = None
def get_children(self):
if self.node is None:
return []
else:
return [self.node]
class Left(A):
def __init__(self):
super().__init__()
def get_children(self):
return []
class Right(A):
def __init__(self):
super().__init__()
def get_children(self):
return []
#################################################
# Test Classes
#################################################
class TestClass(TestCase):
def setUp(self):
weaver = Weaver(RAG)
left = Left()
right = Right()
self.node = Node(left, right)
Weaver.infer_parents(self.node)
def test_basic_inheritance1(self):
self.assertEqual(-1, self.node.left.value())
self.assertEqual(-1, self.node.right.value())
|
g-dolphin/ECP
|
plots.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 30 13:40:15 2021
@author: GD
"""
# Script for plots
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
plt.style.use('ggplot')
plt.rcParams['figure.figsize'] = (8,6)
plt.rcParams['font.size'] = 12
pd.set_option('display.max_columns', None)
user_root_path = "/Users/gd/GitHub"
git_repo_path = "/ECP"
output_dir = ""
# COVERAGE
df_cov = pd.read_csv(user_root_path+git_repo_path+"/coverage/total_coverage.csv")
df_cov = df_cov.loc[df_cov['Year']>=1990]
df_cov = df_cov.loc[df_cov['Year']<=2018]
# only keep countries for which coverage becomes >0 at least once in the sample
jur_list = df_cov.loc[df_cov['cov_tax_ets_share_jurGHG']>0, "Jurisdiction"].unique()
df_cov = df_cov.loc[df_cov.Jurisdiction.isin(jur_list), :]
## Heatmap
df_cov_hm = df_cov.pivot(index='Jurisdiction', columns='Year', values='cov_tax_ets_share_jurGHG')
plt.subplots(figsize=(20,15))
#ax=subplot(111)
cmap = sns.cm.rocket_r
#sns.set(font_scale=1)
sns.heatmap(df_cov_hm, vmin=0, vmax=1,cmap=cmap,annot_kws={"size": 36})
plt.savefig(output_dir+"/cov_hm.pdf")
plt.close()
# PRICES
df_prices_econ = pd.read_csv(user_root_path+git_repo_path+"/price/ecp_economy/ecp_vw/ecp.csv")
df_prices_sect = pd.read_csv(user_root_path+git_repo_path+"/price/ecp_sectors/ecp_vw/ecp_sector.csv")
cntries_list = ['Argentina', 'Austria', 'Belgium', 'Bulgaria', 'Chile', 'Colombia',
'Croatia', 'Cyprus', 'Czech Republic', 'Denmark', 'Estonia',
'Finland', 'France', 'Germany', 'Greece', 'Hungary', 'Iceland',
'Ireland', 'Italy', 'Japan', 'Latvia', 'Lithuania', 'Luxembourg',
'Malta', 'Mexico', 'Netherlands', 'Norway', 'Poland', 'Portugal',
'Romania', 'Slovak Republic', 'Slovenia', 'Spain', 'Sweden',
'Switzerland', 'Ukraine', 'United Kingdom']
## Jurisdiction
fig = plt.figure(figsize=(18,12))
for ctry in cntries_list:
temp_econ = df_prices_econ.loc[df_prices_econ.Jurisdiction==ctry, :]
plt.plot(temp_econ.Year, temp_econ.ECP_tax_ets_jurGHG_2019USD, label=ctry)
plt.title("Emission coverage by carbon pricing schemes, by jurisdiction", fontsize=26)
plt.ylabel("% of jurisdiction's total GHG emissions", fontsize=24)
plt.xlim([1990, 2018])
plt.legend(loc='upper center', bbox_to_anchor=(0.5, -0.2), fancybox=True,
shadow=False, ncol=6)
plt.tight_layout()
plt.show()
plt.close()
## Sectors
sectors = ["ABFLOW003", "ABFLOW012", "ABFLOW028"]
selec_conditions = (df_prices_sect.Jurisdiction.isin(cntries_list)) & (df_prices_sect.Flow.isin(sectors)) & (df_prices_sect.Year==2018)
temp_sect = df_prices_sect.loc[selec_conditions, ["Jurisdiction", "Year", "Flow", "Total_ew_price_sector_2019USD"]]
fig = plt.figure(figsize=(18,12))
# set width of bars
barWidth = 0.25
# set heights of bars
bars1 = np.array(temp_sect.loc[temp_sect.Flow=="ABFLOW003", "Total_ew_price_sector_2019USD"])
bars2 = np.array(temp_sect.loc[temp_sect.Flow=="ABFLOW012", "Total_ew_price_sector_2019USD"])
bars3 = np.array(temp_sect.loc[temp_sect.Flow=="ABFLOW028", "Total_ew_price_sector_2019USD"])
# Set position of bar on X axis
r1 = np.arange(len(bars1))
r2 = [x + barWidth for x in r1]
r3 = [x + barWidth for x in r2]
# Make the plot
plt.bar(r1, bars1, color='#7f6d5f', width=barWidth, edgecolor='white', label='Power')
plt.bar(r2, bars2, color='#557f2d', width=barWidth, edgecolor='white', label='Industry')
plt.bar(r3, bars3, color='#2d7f5e', width=barWidth, edgecolor='white', label='Transport')
# Add xticks on the middle of the group bars
plt.xlabel('group', fontweight='bold')
plt.xticks([r + barWidth for r in range(len(bars1))], cntries_list, rotation=90)
# Create legend & Show graphic
plt.legend()
plt.show()
|
Jiafauser/News_blog
|
apps/doc/urls.py
|
<filename>apps/doc/urls.py
from django.urls import path
from . import views
app_name = 'docs'
urlpatterns = [
path('', views.doc_list, name='doc_list'),
]
|
Jiafauser/News_blog
|
apps/verifications/urls.py
|
<filename>apps/verifications/urls.py
from django.urls import path
from . import views
app_name = 'verifications'
urlpatterns = [
path('image_codes/<uuid:image_code_id>/', views.ImageCode.as_view(), name='image_codes'),
]
|
Jiafauser/News_blog
|
apps/doc/views.py
|
from django.shortcuts import render
# Create your views here.
def doc_list(request):
return render(request, 'doc/docDownload.html')
|
Jiafauser/News_blog
|
apps/user/models.py
|
<filename>apps/user/models.py<gh_stars>0
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.contrib.auth.models import UserManager as _UserManager
# Create your models here.
# 方法重写
class UserManager(_UserManager):
def create_superuser(self, username, password, email=None, **extra_fields):
# 用super()调用_UserManager内的create_superuser
super().create_superuser(username=username, password=password, email=email, **extra_fields)
class Users(AbstractUser):
objects = UserManager()
REQUIRED_FIELDS = ['mobile']
mobile = models.CharField(max_length=11, unique=True, verbose_name='手机号', help_text='手机号', error_messages={
'unique': '此手机号不可使用'
})
email_ac = models.BooleanField(default=False, verbose_name='邮箱状态')
class Meta:
db_table = 'tb_users'
verbose_name = '用户'
def __str__(self):
return self.username
|
Jiafauser/News_blog
|
apps/course/views.py
|
from django.shortcuts import render
from django.views import View
# Create your views here.
def course_list(request):
return render(request, 'course/course.html')
|
Jiafauser/News_blog
|
apps/course/urls.py
|
<gh_stars>0
from django.urls import path
from . import views
app_name = 'course'
urlpatterns = [
path('', views.course_list, name='course_list'),
]
|
Jiafauser/News_blog
|
apps/news/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from django.views import View
# Create your views here.
class IndexView(View):
def get(self, request):
return render(request, 'news/index.html')
class SearchView(View):
def get(self, request):
return render(request, 'news/search.html')
|
Jiafauser/News_blog
|
apps/news/urls.py
|
from django.urls import path
from . import views
app_name = 'news'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('search/', views.SearchView.as_view(), name='search'),
]
|
Jiafauser/News_blog
|
apps/verifications/views.py
|
<filename>apps/verifications/views.py<gh_stars>0
from django.shortcuts import render
from utils.captcha.captcha import captcha
from django.http import HttpResponse
# Create your views here.
from django.views import View
class ImageCode(View):
def get(self, request, image_code_id):
text, image = captcha.generate_captcha()
return HttpResponse(content=image, content_type='image/jpg')
|
Camnooten/django-ban
|
fts/Ban.py
|
<filename>fts/Ban.py
from ._base import BaseTestCase
from datetime import datetime, timedelta, timezone
import dateutil.parser
from django.conf import settings
from django.contrib.auth.models import User
from django.core.management import call_command
from django.test import override_settings
from ban.models import Ban, Warn
class TestBan(BaseTestCase):
def setUp(self):
super(TestBan, self).setUp()
self.harriet = User.objects.get(pk=1)
self.florence = User.objects.get(pk=2)
def assert_can_ban_user_for_period(self, period_name, period_length):
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She bans Florence for requested period of time.
self.select_admin_object(self.florence.pk)
self.admin_action('Ban selected users for {}'.format(period_name))
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees a ban for Florence ending after specified period.
row = self.browser.find_element_by_class_name('row1').text
found_end_text = row.replace(self.florence.username, '').replace(self.harriet.username, '')
found_end_ts = dateutil.parser.parse(found_end_text).replace(tzinfo=timezone.utc).timestamp()
expected_end_ts = (datetime.now(timezone.utc) + timedelta(days=period_length)).timestamp()
self.assertTrue(row.startswith(self.florence.username))
self.assertTrue(row.endswith(self.harriet.username))
self.assertAlmostEqual(expected_end_ts, found_end_ts, delta=60)
def test_can_ban_user_permanently(self):
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She bans Florence permanently.
self.select_admin_object(self.florence.pk)
self.admin_action('Ban selected users permanently')
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees a ban for Florence with no end date.
self.assertEqual(
self.browser.find_element_by_class_name('row1').text,
'test_user1 (None) admin',
)
def test_can_ban_user_for_month(self):
self.assert_can_ban_user_for_period('month', 30)
def test_can_ban_user_for_week(self):
self.assert_can_ban_user_for_period('week', 7)
def test_can_ban_user_for_day(self):
self.assert_can_ban_user_for_period('day', 1)
def test_can_warn_user(self):
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She warns Florence.
self.select_admin_object(self.florence.pk)
self.admin_action('Warn selected users')
# She goes to the admin panel for warns.
self.get('/admin/ban/warn')
# She sees a warn for Florence.
self.assertEqual(
self.browser.find_element_by_class_name('row1').text,
'test_user1 admin',
)
def test_banned_user_cannot_log_in(self):
# Florence was banned some time ago.
Ban.objects.create(creator=self.harriet, receiver=self.florence)
# She tries to log in.
self.login_as_test_user()
# She is redirected to the login page.
self.assertEqual(self.browser.current_url, '{}{}'.format(self.live_server_url, settings.LOGIN_URL))
# She sees a message that she was banned.
self.assertIn('This account has been banned.', self.get_text())
def test_banned_user_can_log_in_after_ban_period(self):
# Florence was banned some time ago, but is active now.
end_date = datetime.now(timezone.utc) - timedelta(days=1)
Ban.objects.create(creator=self.harriet, receiver=self.florence, end_date=end_date)
# She logs in.
self.login_as_test_user()
# She is redirected to the login redirect url.
self.assertEqual(self.browser.current_url, '{}{}'.format(self.live_server_url, settings.LOGIN_REDIRECT_URL))
# She doesn't see a message that she was banned.
self.assertNotIn('This account has been banned.', self.get_text())
@override_settings(WARNS_THRESHOLD=3)
def test_user_gets_banned_after_too_many_warnings(self):
# Florence has been warned two times already.
Warn.objects.create(receiver=self.florence, creator=self.harriet)
Warn.objects.create(receiver=self.florence, creator=self.harriet)
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She warns Florence.
self.select_admin_object(self.florence.pk)
self.admin_action('Warn selected users')
# She goes to the admin panel for warns.
self.get('/admin/ban/warn')
# She sees no warns there.
self.assertIn('0 warns', self.get_text())
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees a ban for Florence with no end date.
self.assertEqual(
self.browser.find_element_by_class_name('row1').text,
'test_user1 (None) (None)',
)
def test_multiple_bans_merge_into_one(self):
# Florence was banned some time ago.
end_date = datetime.now(timezone.utc) + timedelta(days=1)
Ban.objects.create(creator=self.harriet, receiver=self.florence, end_date=end_date)
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She bans Florence permanently.
self.select_admin_object(self.florence.pk)
self.admin_action('Ban selected users permanently')
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees a permanent ban for Florence with no end date.
self.assertEqual(
self.browser.find_element_by_class_name('row1').text,
'test_user1 (None) admin',
)
# She does not see any other bans for Florence as they were merged into one.
self.assertIn('1 ban', self.get_text())
def test_cannot_warn_banned_user(self):
# Florence was banned some time ago.
Ban.objects.create(creator=self.harriet, receiver=self.florence)
# Harriet logs in as an admin.
self.login_as_admin()
# She hits the admin panel for users.
self.get('/admin/auth/user')
# She warns Florence.
self.select_admin_object(self.florence.pk)
self.admin_action('Warn selected users')
# She goes to the admin panel for warns.
self.get('/admin/ban/warn')
# She sees no warns there.
self.assertIn('0 warns', self.get_text())
def test_can_clean_inactive_bans(self):
# There are some inactive bans.
end_date = datetime.now(timezone.utc) - timedelta(days=1)
Ban.objects.create(creator=self.harriet, receiver=User.objects.get(pk=3), end_date=end_date)
Ban.objects.create(creator=self.harriet, receiver=User.objects.get(pk=4), end_date=end_date)
# And also one active one.
Ban.objects.create(creator=self.harriet, receiver=self.florence)
# Harriet calls management command to clean up inactive bans.
call_command('clean_inactive_bans')
# She logs in as an admin.
self.login_as_admin()
# She goes to the admin panel for bans.
self.get('/admin/ban/ban')
# She sees only one ban there.
self.assertIn('1 ban', self.get_text())
|
Camnooten/django-ban
|
ban/admin.py
|
from datetime import datetime, timedelta, timezone
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth import get_user_model
from ban.models import Ban, Warn
USER_MODEL = get_user_model()
class ExtendedUserAdmin(UserAdmin):
actions = [
'ban_selected_users_permanently',
'ban_selected_users_for_month',
'ban_selected_users_for_week',
'ban_selected_users_for_day',
'warn_selected_users',
]
def warn_selected_users(self, request, queryset):
for user in queryset:
Warn.objects.create(receiver=user, creator=request.user)
self.message_user(request, "Successfully warned selected users.")
def ban_selected_users_permanently(self, request, queryset):
for user in queryset:
Ban.objects.create(receiver=user, creator=request.user)
self.message_user(request, "Successfully banned selected users permanently.")
def ban_selected_users_for_month(self, request, queryset):
self._ban(request, queryset, 30)
self.message_user(request, "Successfully banned selected users for a month.")
def ban_selected_users_for_week(self, request, queryset):
self._ban(request, queryset, 7)
self.message_user(request, "Successfully banned selected users for a week.")
def ban_selected_users_for_day(self, request, queryset):
self._ban(request, queryset, 1)
self.message_user(request, "Successfully banned selected users for a day.")
def _ban(self, request, queryset, days):
end_date = datetime.now(timezone.utc) + timedelta(days=days)
for user in queryset:
Ban.objects.create(receiver=user, creator=request.user, end_date=end_date)
class BanAdmin(admin.ModelAdmin):
list_display = ('receiver', 'end_date', 'creator')
class WarnAdmin(admin.ModelAdmin):
list_display = ('receiver', 'creator')
admin.site.unregister(USER_MODEL)
admin.site.register(USER_MODEL, ExtendedUserAdmin)
admin.site.register(Ban, BanAdmin)
admin.site.register(Warn, WarnAdmin)
|
Camnooten/django-ban
|
demo/main/urls.py
|
<gh_stars>1-10
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url(
r'^$',
views.dummy,
name='dummy',
),
url(
'^login/',
auth_views.login,
{'template_name': 'main/login.html'},
name='login',
),
]
|
Camnooten/django-ban
|
ban/urls.py
|
from django.conf.urls import patterns, url
urlpatterns = patterns('',
# url(r'^$', someview, name='someview'),
)
|
Camnooten/django-ban
|
demo/main/views.py
|
<gh_stars>1-10
from django.http import HttpResponse
def dummy(request):
return HttpResponse('DummyResponse')
|
Camnooten/django-ban
|
fabfile.py
|
from contextlib import contextmanager
from fabric.api import cd, local, prefix, shell_env
VENV_PATH = '~/.virtualenvs/ban'
PYTHON_PATH = '/usr/bin/python3.4'
SETTINGS_MODULE = 'demo.settings'
DEMO_PATH = 'demo'
def install():
local('virtualenv -p {} {}'.format(PYTHON_PATH, VENV_PATH))
with _venv_local():
with cd(DEMO_PATH):
local('pip install -r {}/requirements.txt'.format(DEMO_PATH))
_django_local('makemigrations')
_django_local('migrate')
_django_local('loaddata base.json')
def runserver():
with _venv_local():
_django_local('runserver')
def updatedb():
with _venv_local():
_django_local('makemigrations')
_django_local('migrate')
def ftest(target):
with _venv_local():
_django_local('test fts.{} -v 2'.format(target))
def utest():
with _venv_local():
_django_local('test ban.tests -v 2')
def testall():
with _venv_local():
local('rm -f {}/.coverage*'.format(DEMO_PATH))
local('coverage run -p {}/manage.py test ban.tests -v 2'.format(DEMO_PATH))
local('coverage run -p {}/manage.py test fts --pattern="*" -v 2'.format(DEMO_PATH))
local('coverage combine')
local('coverage report -m --omit="{}/*"'.format(VENV_PATH))
local('rm -f {}/.coverage*'.format(DEMO_PATH))
def _django_local(command):
return local(
'python {}/manage.py {}'.format(DEMO_PATH, command)
)
@contextmanager
def _venv_local():
with shell_env(DJANGO_SETTINGS_MODULE=SETTINGS_MODULE):
with prefix('. %s/bin/activate' % VENV_PATH):
yield
|
Camnooten/django-ban
|
ban/management/commands/clean_inactive_bans.py
|
from datetime import datetime, timezone
from django.core.management.base import BaseCommand
from ban.models import Ban
class Command(BaseCommand):
help = 'Cleans up inactive bans.'
def handle(self, *args, **options):
now = datetime.now(timezone.utc)
Ban.objects.filter(end_date__lte=now).delete()
self.stdout.write('Successfully cleaned up inactive bans.')
|
Chatha-Sphere/mousa
|
train.py
|
from helpers import prepare_batches, get_target_tensor
import torch
import torch.nn as nn
import time
from random import shuffle
def train(model, training_data, validation_data,
epochs, lr, evaluate_per, batch_size):
model.train() #short hand to begin tracking the gradient
optimizer = torch.optim.Adam(model.parameters(), lr = lr) #i don't know...? gradient descent equiv
#this is equivalent to a log softmax activation layer + negative log likelihood
loss_function = nn.CrossEntropyLoss()
if torch.cuda.is_available():
#device = torch.device("cuda")
model.cuda()
print("GPU is available")
else:
#device = torch.device("cpu")
print("GPU not available, CPU used")
for e in range(epochs):
start_time = time.time()
training_batches = prepare_batches(training_data, batch_size) #returning batches of a given size
# input and target sequences, latter one time step ahead
hx = None #this is the hidden state, None means: initializes to zeros
# hidden state is the "internal representation" of the sequence
for input_sequences, target_sequences in training_batches:
#skip batches that are undersized
if len(input_sequences) != batch_size:
continue
y_hat, hx = model(input_sequences, hx)
y = get_target_tensor(target_sequences)
loss = loss_function(y_hat.flatten(0,1), y)
#don't want to be backpropagating through every timestep, so hidden state
#is detached from the graph
hx = tuple(h.detach() for h in hx)
#clear old gradients from previous step
model.zero_grad()
#compute derivative of loss w/r/t parameters
loss.backward()
#consider clipping grad norm
#optimizer takes a step based on gradient
optimizer.step()
training_loss = loss.item()
print(f"epoch: {e+1}/{epochs} | time: {time.time() - start_time:.0f}s")
print(f"training loss: {training_loss :.2f}")
shuffle(training_data)
if (e + 1) % evaluate_per == 0:
#deactivate backprop for evaluation
model.eval()
validation_batches = prepare_batches(validation_data,
batch_size)
#get loss per batch
val_loss = 0
n_batches = 0
for input_sequences, target_sequences in validation_batches:
if len(input_sequences) != batch_size:
continue
sequence_lengths = [len(sequence) for sequence in input_sequences]
y_hat, hx = model(input_sequences, hx)
y = get_target_tensor(target_sequences)
loss = loss_function(y_hat.flatten(0,1), y)
val_loss += loss.item()
n_batches += 1
model.train()
print(f"validation loss: {val_loss / n_batches:.2f}")
shuffle(validation_data)
#TODO
#exception for keyboard interrupt
#anneal learning rate if no improvement has been observed?
#save model with best validation loss
|
Chatha-Sphere/mousa
|
model.py
|
import torch.nn as nn
import torch.nn.functional as F
from helpers import one_hot
from torch import stack
class CharRNN(nn.Module):
def __init__(self, n_chars, hidden_size, n_rnn_layers=1, dropout=0):
super().__init__()
self.n_layers = n_rnn_layers
self.n_hidden = hidden_size
self.n_chars = n_chars
#input size corresponds to the number of unique characters
self.lstm = nn.LSTM(n_chars, hidden_size, n_rnn_layers, dropout=dropout,
batch_first=True)
#decoder layer
self.dense = nn.Linear(hidden_size, n_chars)
def forward(self, input_sequences, hx):
#one hot encode a list of sequences
encoded_sequences = [one_hot(sequence, self.n_chars)
for sequence in input_sequences]
#batch has dimensions (n_sequences x batch_size x n_chars)
#fixinf a dimension
batch = stack(encoded_sequences, dim=0)
#pass into the LSTM
recurrent_output, hidden = self.lstm(batch, hx)
#"dense" layer just projects it back down to the space of available characters
linear_output = self.dense(recurrent_output)
return linear_output, hidden
|
Chatha-Sphere/mousa
|
helpers.py
|
<gh_stars>1-10
import torch
import torch.nn.functional as F
from torch.distributions.multinomial import Multinomial
def one_hot(sequence, n_states):
"""
Given a list of integers and the maximal number of unique values found
in the list, return a one-hot encoded tensor of shape (m, n)
where m is sequence length and n is n_states.
"""
if torch.cuda.is_available():
return torch.eye(n_states)[sequence,:].cuda()
else:
return torch.eye(n_states)[sequence,:]
def decode_one_hot(vector):
'''
Given a one-hot encoded vector, return the non-zero index
'''
return vector.nonzero().item()
def prepare_batches(sequences, batch_size):
"""
Splits a list of sequences into batches of a fixed size. Each sequence yields an input sequence
and a target sequence, with the latter one time step ahead. For example, the sequence "to be or not
to be" gives an input sequence of "to be or not to b" and a target sequence of "o be or not to be."
"""
n_sequences = len(sequences)
for i in range(0, n_sequences, batch_size):
batch = sequences[i:i+batch_size]
input_sequences, target_sequences = [], []
for sequence in batch:
input_sequences.append(sequence[:-1])
target_sequences.append(sequence[1:])
yield input_sequences, target_sequences
def get_target_tensor(target_sequences):
"""
Flattens a batch of target sequences into one long tensor (length: number_sequences * sequence_length)
"""
target_tensors = [torch.tensor(s) for s in target_sequences]
if torch.cuda.is_available():
return torch.stack(target_tensors).flatten().cuda()
else:
return torch.stack(target_tensors).flatten()
def make_sequences(text, sequence_length=100):
"""
Split a text into sequences of the same length
"""
n_sequences = len(text) // sequence_length
sequences = []
for i in range(0, n_sequences):
sequence = text[i*sequence_length : (i+1)*sequence_length]
sequences.append(sequence)
return sequences
def sample(model, encoder, size, prime_str, temperature, topk=None):
"""
Randomly generate text from a trained model.
Args:
model (nn.module object): trained LSTM neural network
encoder (CharEncoder object): contains character-to-integer mappings
size (int): length of generated sample
prime_str (str): input to initialize the network's hidden state
temperature (float in range (0,1]): dampens the model's character probability distribution:
values less than 1 make the model more conservative, giving additional weight to
high-probability guesses and penalizing low-probability guesses. As temperature goes to
zero, softmax becomes "argmax," and the most probable character is picked almost certainly.
Returns:
output_str (str): Randomly-generated text sample of length (size)
"""
#deactivate training mode
model.eval()
#initialize output string
output_str = prime_str
#vectorize input string as a sequence of ints
input_sequence = [encoder.char2int[char] for char in prime_str]
#initialize hidden state to None
hx = None
for i in range(size): #generate characters of output string one at a time
#get model output and hidden state (short-term and long-term memory)
out, hx = model([input_sequence], hx)
hx = tuple(h.detach() for h in hx)
#ignore batch dimension because our batch is of size 1
out = out.squeeze()
#interpreting output as unnormalized logits, obtain probabilities of the next character, scaled
#by temperature, conditioned on the input sequence.
#a higher temperature means a softer probability distribution, i.e. less conservative predictions.
probs = F.softmax(out/ temperature, dim=-1)
#If probs are generated on a string of multiple characters,
#keep prediction of next character only
if len(probs.shape) > 1:
probs = probs[-1,:]
if topk is not None:
#sample from only the top k most probable characters
values, indices = probs.topk(topk)
if torch.cuda.is_available():
zeros = torch.zeros(encoder.n_chars).cuda()
else:
zeros = torch.zeros(encoder.n_chars)
probs = torch.scatter(zeros, 0, indices, values)
#sample a random character from the probability distribution
next_char_ix = torch.multinomial(probs,1).item()
#set the new input sequence as just the next predicted character while retaining hidden state
input_sequence = [next_char_ix]
#add the next character to the output string
output_str += encoder.int2char[next_char_ix]
return output_str
|
Chatha-Sphere/mousa
|
shakespeare.py
|
<gh_stars>1-10
from corpora import CharEncoder, extract_shakespeare_data
from random import seed, shuffle
from model import CharRNN
from train import train
from helpers import make_sequences, sample
import argparse
def main():
parser = argparse.ArgumentParser("Char-RNN on the complete works of Shakespeare")
parser.add_argument("--test", type=bool, default=False,
help = "if true, keep only a thousand lines from the Shakespeare corpus")
args = parser.parse_args()
seed(1616)
text = extract_shakespeare_data("data/t8.shakespeare.txt")
char_encoder = CharEncoder(text)
#get sequences of 100 characters
sequences = make_sequences(text)
#vectorize with numeric labeling
#each character gets mapped to an integer & vice versa
sequences = char_encoder.label_sequences(sequences)
if args.test:
print("Test: downsizing data to 1,000 sequences...")
sequences = sequences[:1000]
shuffle(sequences)
n_training_sequences = int(.9 * len(sequences))
#split the dataset into training and validation sets
training = sequences[:n_training_sequences]
validation = sequences[n_training_sequences:]
hidden_size = 128
rnn = CharRNN(char_encoder.n_chars, hidden_size)
train(rnn, training, validation, epochs = 4, lr = 0.01, evaluate_per = 2, batch_size = 20)
print(sample(rnn, prime_str = "Macbeth", size = 100, encoder = char_encoder,
temperature=.9))
if __name__ == "__main__":
main()
|
Chatha-Sphere/mousa
|
corpora.py
|
"""Contains classes to extract text data and to encode text corpora"""
import pdb
class CharEncoder():
"""
Contains data on an encoded text corpus with labelled characters, including unique characters and
mappings to/from characters to integers.
"""
def __init__(self, corpus):
"""
Args:
corpus (list of str): a list containing every word in the text, including duplicates
"""
self.chars = tuple(set(corpus))
self.n_chars = len(self.chars)
self.int2char = dict(enumerate(self.chars))
self.char2int = {value: key for key, value in self.int2char.items()}
def label_sequences(self, text_sequences):
#this may be called "vectorizing?
return [[self.char2int[char] for char in sequence] for sequence in text_sequences]
def extract_shakespeare_data(path = "data/t8.shakespeare.txt"):
"""
Load the MIT online Shakespeare corpus from a text file.
Args:
path (str): path to Shakespare text file
Returns:
cleaned_text (str): entire cleaned text stripped of header/notes
"""
with open(path) as f:
text = f.read()
cleaned_text = ""
skip = False
for line in text.split("\n")[244:-1]:
if line[:2] == "<<":
skip = True
elif line[-2:] == ">>":
skip = False
continue
if skip or line == "":
continue
line = line+"\n"
cleaned_text += line
return cleaned_text
def extract_kjv_data(path = "data/kjv.txt"):
"""
Load the King James Version of the Bible.
"""
with open(path) as f:
text = f.read()
text = text[996:-18730]
return text
def make_sequences(text, sequence_length=100):
"""
Split a text into sequences of the same length in characters.
"""
n_sequences = len(text) // sequence_length
sequences = []
for i in range(0, n_sequences):
sequence = text[i*sequence_length : (i+1)*sequence_length]
sequences.append(sequence)
return sequences
|
SMSEdge/API-Python
|
smsedge_api_python/__init__.py
|
<filename>smsedge_api_python/__init__.py
from smsedge_api_python.sms_edge_api import SmsEdgeApi
|
SMSEdge/API-Python
|
smsedge_api_python/sms_edge_api.py
|
<filename>smsedge_api_python/sms_edge_api.py<gh_stars>0
import requests
from cerberus import Validator
class SmsEdgeApi(object):
# MAIN
endpoint = 'https://api.smsedge.io/v1/'
apiKey = ''
def __init__(self, api_key):
self.api_key = api_key
# REFERENCES
def get_functions(self):
"""
This function returns all available API functions
:return:
"""
return self._validate_and_run('references/functions/')
def get_http_statuses(self):
"""
This function returns all HTTP response status codes
:return:
"""
return self._validate_and_run('references/statuses/')
def get_countries(self):
"""
This function returns list of countries
:return:
"""
return self._validate_and_run('references/countries/')
# SMS
def send_single_sms(self, fields):
"""
Send a single SMS message
:param fields:
:return:
"""
rules = {
'from': {'required': True, 'type': 'string'},
'to': {'required': True, 'type': 'integer', 'maxlength': 64},
'text': {'required': True, 'type': 'string'},
'name': {'type': 'string'},
'email': {'type': 'string', 'regex': '^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$'},
'country_id': {'type': 'integer', 'maxlength': 32},
'reference': {'type': 'string'},
'shorten_url': {'type': 'integer'},
'list_id': {'type': 'integer', 'maxlength': 32},
'transactional': {'type': 'integer', 'maxlength': 32},
'preferred_route_id': {'type': 'integer', 'maxlength': 32},
'delay': {'type': 'integer', 'maxlength': 32},
}
return self._validate_and_run('sms/send-single/', fields, rules)
def send_list(self, fields):
"""
Send SMS messages to all good numbers in a list
:param fields:
:return:
"""
rules = {
'list_id': {'required': True, 'type': 'integer', 'maxlength': 32},
'from': {'required': True, 'type': 'string'},
'text': {'required': True, 'type': 'string'},
'shorten_url': {'type': 'integer'},
'preferred_route_id': {'type': 'integer', 'maxlength': 32}
}
return self._validate_and_run('sms/send-list/', fields, rules)
def get_sms_info(self, fields):
"""
Get information about sent SMS messages
:param fields:
:return:
"""
rules = {
'ids': {'required': True, 'type': 'string'}
}
return self._validate_and_run('sms/get/', fields, rules)
# LISTS OF NUMBERS
def create_list(self, fields):
"""
Creating A new list
:param fields:
:return:
"""
rules = {
'name': {'required': True, 'type': 'string'}
}
return self._validate_and_run('lists/create/', fields, rules)
def delete_list(self, fields):
"""
Deleting an existing list
:param fields:
:return:
"""
rules = {
'id': {'required': True, 'type': 'integer'}
}
return self._validate_and_run('lists/delete/', fields, rules)
def get_list_info(self, fields):
"""
Get all info about a list, including sending stats and numbers segmentation
:param fields:
:return:
"""
rules = {
'id': {'required': True, 'type': 'integer'}
}
return self._validate_and_run('lists/info/', fields, rules)
def get_all_lists(self):
"""
Get all the lists that user created, with information about stored numbers
:return:
"""
return self._validate_and_run('lists/getall/')
# PHONE NUMBERS
def create_number(self, fields):
"""
Create a new contact to a list
:param fields:
:return:
"""
rules = {
'number': {'required': True, 'type': 'string'},
'list_id': {'required': True, 'type': 'integer', 'maxlength': 32},
'country_id': {'type': 'integer', 'maxlength': 32},
'name': {'type': 'string'},
'email': {'type': 'string', 'regex': '^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$'},
}
return self._validate_and_run('numbers/create/', fields, rules)
def delete_numbers(self, fields):
"""
Delete a record (contact) from an existing list
:param fields:
:return:
"""
rules = {
'ids': {'required': True, 'type': 'string'}
}
return self._validate_and_run('numbers/delete/', fields, rules)
def get_numbers(self, fields):
"""
Get extended information about numbers
:param fields:
:return:
"""
rules = {
'list_id': {'type': 'integer', 'maxlength': 32},
'ids': {'type': 'string'},
'limit': {'type': 'integer', 'maxlength': 32},
'offset': {'type': 'integer', 'maxlength': 32}
}
return self._validate_and_run('numbers/get/', fields, rules)
def get_unsubscribers(self):
"""
Get list of unsubscribed numbers
:return:
"""
return self._validate_and_run('numbers/unsubscribers/')
# ROUTES
def get_routes(self, fields=None):
"""
Get all available Routes with prices for different countries
:param fields:
:return:
"""
rules = {
'country_id': {'type': 'integer', 'maxlength': 32},
'transactional': {'type': 'boolean'},
}
return self._validate_and_run('routes/getall/', fields, rules)
# AUXILIARY TOOLS
def number_simple_verify(self, fields):
"""
Logical verification of number
:param fields:
:return:
"""
rules = {
'number': {'required': True, 'type': 'string'},
'country_id': {'type': 'integer', 'maxlength': 32}
}
return self._validate_and_run('verify/number-simple/', fields, rules)
def number_hlr_verify(self, fields):
"""
Verifying number by request to Home Location Register
:param fields:
:return:
"""
rules = {
'number': {'required': True, 'type': 'string'},
'country_id': {'type': 'integer', 'maxlength': 32}
}
return self._validate_and_run('verify/number-hlr/', fields, rules)
def text_analyzing(self, fields):
"""
Verification of text before sending an SMS
:param fields:
:return:
"""
rules = {
'text': {'required': True, 'type': 'string'}
}
return self._validate_and_run('text/analyze/', fields, rules)
def get_sending_report(self, fields):
"""
This function returns a report about SMS sending process
:param fields:
:return:
"""
rules = {
'status': {'type': 'string'},
'date_from': {'type': 'date'},
'date_to': {'type': 'date'},
'limit': {'type': 'integer', 'maxlength': 32},
'offset': {'type': 'integer', 'maxlength': 32}
}
return self._validate_and_run('reports/sending/', fields, rules)
def get_sending_stats(self, fields):
"""
This function returns a statistics about SMS sending
:param fields:
:return:
"""
rules = {
'country_id': {'required': True, 'type': 'integer', 'maxlength': 32},
'date_from': {'required': True, 'type': 'date'},
'date_to': {'required': True, 'type': 'date'},
'route_id': {'type': 'integer', 'maxlength': 32},
}
return self._validate_and_run('reports/stats/', fields, rules)
# USER
def get_user_details(self):
"""
This functions returns API user details
:return:
"""
return self._validate_and_run('user/details/')
# MAIN CURE FUNCTIONS
def _validate_and_run(self, path, fields=None, rules=None):
"""
Verify rules if exists and run _make_request if validate pass
:param path:
:param fields:
:param rules:
:return:
"""
if fields is None:
fields = {}
if rules is not None:
v = Validator(rules)
is_valid = v.validate(fields)
errors = v.errors
else:
is_valid = True
errors = 'Unknown error'
if is_valid:
return self._make_request(path, fields)
else:
return errors
def _make_request(self, path, fields=None):
"""
Main function of sending request
:param path:
:param fields:
:return:
"""
if fields is None:
fields = {}
fields['api_key'] = self.api_key
try:
r = requests.post(self.endpoint + path, data=fields)
return r.json()
except:
print('Can\'t proceed request')
return None
|
Stepland/marshmallow-jsonschema
|
example/example.py
|
from flask import Flask, jsonify
from marshmallow import Schema, fields
from marshmallow_jsonschema import JSONSchema
app = Flask(__name__)
class UserSchema(Schema):
name = fields.String()
address = fields.String()
@app.route("/schema")
def schema():
schema = UserSchema()
return jsonify(JSONSchema().dump(schema).data)
@app.route("/")
def home():
return """<!DOCTYPE html>
<head>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/brutusin.json-forms/1.3.0/css/brutusin-json-forms.css"><Paste>
<script src="https://code.jquery.com/jquery-1.12.1.min.js" integrity="sha256-I1nTg78tSrZev3kjvfdM5A5Ak/blglGzlaZANLPDl3I=" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/underscore.string/3.3.4/underscore.string.min.js"></script>
<script src="https://cdn.jsdelivr.net/brutusin.json-forms/1.3.0/js/brutusin-json-forms.min.js"></script>
<script>
$(document).ready(function() {
$.ajax({
url: '/schema'
, success: function(data) {
var container = document.getElementById('myform');
var BrutusinForms = brutusin["json-forms"];
var bf = BrutusinForms.create(data);
bf.render(container);
}
});
});
</script>
</head>
<body>
<div id="myform"></div>
</body>
</html>
"""
if __name__ == "__main__":
app.run(host="0.0.0.0", debug=True)
|
Stepland/marshmallow-jsonschema
|
tests/test_validation.py
|
<filename>tests/test_validation.py
import pytest
from marshmallow import Schema, fields, validate
from marshmallow.validate import OneOf, Range
from marshmallow_jsonschema import JSONSchema, UnsupportedValueError
from marshmallow_jsonschema.compat import MARSHMALLOW_2, MARSHMALLOW_3
from . import UserSchema, validate_and_dump
def test_length_validator():
schema = UserSchema()
dumped = validate_and_dump(schema)
props = dumped["definitions"]["UserSchema"]["properties"]
assert props["name"]["minLength"] == 1
assert props["name"]["maxLength"] == 255
assert props["addresses"]["minItems"] == 1
assert props["addresses"]["maxItems"] == 3
assert props["const"]["minLength"] == 50
assert props["const"]["maxLength"] == 50
def test_length_validator_error():
class BadSchema(Schema):
bob = fields.Integer(validate=validate.Length(min=1, max=3))
class Meta:
strict = True
schema = BadSchema()
json_schema = JSONSchema()
with pytest.raises(UnsupportedValueError):
json_schema.dump(schema)
def test_one_of_validator():
schema = UserSchema()
dumped = validate_and_dump(schema)
assert dumped["definitions"]["UserSchema"]["properties"]["sex"]["enum"] == [
"male",
"female",
"non_binary",
"other",
]
assert dumped["definitions"]["UserSchema"]["properties"]["sex"]["enumNames"] == [
"Male",
"Female",
"Non-binary/fluid",
"Other",
]
def test_one_of_empty_enum():
class TestSchema(Schema):
foo = fields.String(validate=OneOf([]))
schema = TestSchema()
dumped = validate_and_dump(schema)
foo_property = dumped["definitions"]["TestSchema"]["properties"]["foo"]
assert foo_property["enum"] == []
assert foo_property["enumNames"] == []
@pytest.mark.skipif(MARSHMALLOW_3, reason="marshmallow 2 only")
def test_range_marshmallow_2():
class TestSchema(Schema):
foo = fields.Integer(validate=Range(min=1, max=3))
schema = TestSchema()
dumped = validate_and_dump(schema)
props = dumped["definitions"]["TestSchema"]["properties"]
assert props["foo"]["minimum"] == 1
assert props["foo"]["maximum"] == 3
@pytest.mark.skipif(MARSHMALLOW_2, reason="marshmallow 3 only")
def test_range_marshmallow_3():
class TestSchema(Schema):
foo = fields.Integer(
validate=Range(min=1, min_inclusive=False, max=3, max_inclusive=False)
)
bar = fields.Integer(validate=Range(min=2, max=4))
schema = TestSchema()
dumped = validate_and_dump(schema)
props = dumped["definitions"]["TestSchema"]["properties"]
assert props["foo"]["exclusiveMinimum"] == 1
assert props["foo"]["exclusiveMaximum"] == 3
assert props["bar"]["minimum"] == 2
assert props["bar"]["maximum"] == 4
def test_range_no_min_or_max():
class SchemaNoMin(Schema):
foo = fields.Integer(validate=validate.Range(max=4))
class SchemaNoMax(Schema):
foo = fields.Integer(validate=validate.Range(min=0))
schema1 = SchemaNoMin()
schema2 = SchemaNoMax()
dumped1 = validate_and_dump(schema1)
dumped2 = validate_and_dump(schema2)
assert dumped1["definitions"]["SchemaNoMin"]["properties"]["foo"]["maximum"] == 4
assert dumped2["definitions"]["SchemaNoMax"]["properties"]["foo"]["minimum"] == 0
def test_range_non_number_error():
class TestSchema(Schema):
foo = fields.String(validate=validate.Range(max=4))
schema = TestSchema()
json_schema = JSONSchema()
with pytest.raises(UnsupportedValueError):
json_schema.dump(schema)
|
Stepland/marshmallow-jsonschema
|
marshmallow_jsonschema/compat.py
|
import sys
import marshmallow
PY2 = int(sys.version_info[0]) == 2
MARSHMALLOW_MAJOR_VERSION = int(marshmallow.__version__.split(".", 1)[0])
MARSHMALLOW_2 = MARSHMALLOW_MAJOR_VERSION == 2
MARSHMALLOW_3 = MARSHMALLOW_MAJOR_VERSION == 3
if PY2:
text_type = unicode
binary_type = str
basestring = basestring
else:
text_type = str
binary_type = bytes
basestring = (str, bytes)
if MARSHMALLOW_2:
RAISE = "raise"
INCLUDE = "include"
EXCLUDE = "exclude"
def dot_data_backwards_compatible(json_schema):
return json_schema.data
def list_inner(list_field):
return list_field.container
else:
from marshmallow import RAISE, INCLUDE, EXCLUDE
def dot_data_backwards_compatible(json_schema):
return json_schema
def list_inner(list_field):
return list_field.inner
__all__ = (
"text_type",
"binary_type",
"basestring",
"list_inner",
"dot_data_backwards_compatible",
"MARSHMALLOW_MAJOR_VERSION",
)
|
NivNayman/XNAS
|
parameters.py
|
<gh_stars>10-100
from collections import namedtuple
Dataset = namedtuple('Dataset',
['num_classes',
'num_channels',
'hw',
'mean',
'std',
'initial_channels_factor',
'is_ImageFolder',
'def_resize'])
datasets = {'CIFAR10':
Dataset(10, 3, [32, 32],
[0.49139968, 0.48215827, 0.44653124],
[0.24703233, 0.24348505, 0.26158768],
1,
False,
None),
'ImageNet':
Dataset(1000, 3, [224, 224],
[0.485, 0.456, 0.406],
[0.229, 0.224, 0.225],
1,
True,
None),
}
|
NivNayman/XNAS
|
model_imagenet.py
|
<reponame>NivNayman/XNAS
import torch
import torch.nn as nn
import operations as ops
from genotypes import Genotype
from model import Cell
from utils import SELayer
class NetworkImageNet(nn.Module):
def __init__(self, C, num_classes, layers, genotype, do_SE=True, C_stem=56):
stem_activation = nn.ReLU
super(NetworkImageNet, self).__init__()
self._layers = layers
self.drop_path_prob = 0
self.do_SE = do_SE
self.C_stem = C_stem
self.stem0 = nn.Sequential(
nn.Conv2d(3, C_stem // 2, kernel_size=3, stride=2, padding=1, bias=False),
nn.BatchNorm2d(C_stem // 2),
stem_activation(inplace=True),
nn.Conv2d(C_stem // 2, C_stem, 3, stride=2, padding=1, bias=False),
nn.BatchNorm2d(C_stem),
)
self.stem1 = nn.Sequential(
stem_activation(inplace=True),
nn.Conv2d(C_stem, C_stem, 3, stride=2, padding=1, bias=False),
nn.BatchNorm2d(C_stem),
)
C_prev_prev, C_prev, C_curr = C_stem, C_stem, C
self.cells = nn.ModuleList()
self.cells_SE = nn.ModuleList()
reduction_prev = True
for i in range(layers):
if i in [layers // 3, 2 * layers // 3]:
C_curr *= 2
reduction = True
else:
reduction = False
cell = Cell(genotype, C_prev_prev, C_prev, C_curr, reduction, reduction_prev)
reduction_prev = reduction
self.cells += [cell]
C_prev_prev, C_prev = C_prev, cell.multiplier * C_curr
if self.do_SE and i <= layers * 2 / 3:
if C_curr == C:
reduction_factor_SE = 4
else:
reduction_factor_SE = 8
self.cells_SE += [SELayer(C_curr * 4, reduction=reduction_factor_SE)]
self.global_pooling = nn.AvgPool2d(7)
self.classifier = nn.Linear(C_prev, num_classes)
def forward(self, input):
s0 = self.stem0(input)
s1 = self.stem1(s0)
for i, cell in enumerate(self.cells):
cell_output = cell(s0, s1, self.drop_path_prob)
if self.do_SE and i <= len(self.cells) * 2 / 3:
cell_output = self.cells_SE[i](cell_output)
s0, s1 = s1, cell_output
out = self.global_pooling(s1)
logits = self.classifier(out.view(out.size(0), -1))
return logits
|
NivNayman/XNAS
|
utils.py
|
<gh_stars>10-100
import os
import torch
import shutil
import time
import logging
import numpy as np
import torchvision.transforms as transforms
from torch.autograd import Variable
import torch.nn as nn
import parameters as params
class AvgrageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.avg = 0
self.sum = 0
self.cnt = 0
def update(self, val, n=1):
self.sum += val * n
self.cnt += n
self.avg = self.sum / self.cnt
def accuracy(output, target, topk=(1,)):
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(100.0/batch_size))
return res
def infer(test_queue, model, report_freq=50):
top1 = AvgrageMeter()
top5 = AvgrageMeter()
model.eval()
samples = 0
infer_time = 0
for step, (input, target) in enumerate(test_queue):
input = Variable(input, requires_grad=False).cuda()
target = Variable(target, requires_grad=False).cuda(async=True)
ts = time.time()
logits = model(input)
te = time.time()
infer_time += (te - ts)
prec1, prec5 = accuracy(logits, target, topk=(1, 5))
n = input.size(0)
top1.update(prec1.data.item(), n)
top5.update(prec5.data.item(), n)
samples += n
if step % report_freq == 0:
logging.info('test %03d %f %f', step, top1.avg, top5.avg)
infer_time = infer_time / samples
return top1.avg, infer_time
class Cutout(object):
def __init__(self, length):
self.length = length
def __call__(self, img):
h, w = img.size(1), img.size(2)
mask = np.ones((h, w), np.float32)
y = np.random.randint(h)
x = np.random.randint(w)
y1 = np.clip(y - self.length // 2, 0, h)
y2 = np.clip(y + self.length // 2, 0, h)
x1 = np.clip(x - self.length // 2, 0, w)
x2 = np.clip(x + self.length // 2, 0, w)
mask[y1: y2, x1: x2] = 0.
mask = torch.from_numpy(mask)
mask = mask.expand_as(img)
img *= mask
return img
def data_transforms_cifar10():
dataset = params.datasets['CIFAR10']
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(dataset.mean, dataset.std),
])
return transform
def data_transforms_imagenet_valid():
dataset = params.datasets['ImageNet']
transform = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(dataset.mean, dataset.std),
])
return transform
def count_parameters_in_MB(model):
return np.sum(np.prod(v.size()) for name, v in model.named_parameters() if "auxiliary" not in name)/1e6
def save_checkpoint(state, is_best, save):
filename = os.path.join(save, 'checkpoint.pth.tar')
torch.save(state, filename)
if is_best:
best_filename = os.path.join(save, 'model_best.pth.tar')
shutil.copyfile(filename, best_filename)
def save(model, model_path):
torch.save(model.state_dict(), model_path)
def load(model, model_path):
checkpoint = torch.load(model_path)
if checkpoint.__contains__('model'):
model.load_state_dict(checkpoint['model'], strict=False)
elif checkpoint.__contains__('state_dict'):
model.load_state_dict(checkpoint['state_dict'], strict=False)
else:
model.load_state_dict(checkpoint, strict=False)
def create_exp_dir(path, scripts_to_save=None):
if not os.path.exists(path):
os.mkdir(path)
print('Experiment dir : {}'.format(path))
if scripts_to_save is not None:
os.mkdir(os.path.join(path, 'scripts'))
for script in scripts_to_save:
dst_file = os.path.join(path, 'scripts', os.path.basename(script))
shutil.copyfile(script, dst_file)
# For uniform_end mode: reduction_indices = num_layers - place_reduction_cells(...)
def place_reduction_cells(num_layers, num_reductions, mode='original', bias=0,
verbose=False):
if mode == 'original':
reduction_indices = np.array(
num_layers * np.arange(1, num_reductions + 1) // (num_reductions + 1))
elif mode == 'uniform_start': # When cant be fully-uniform, bias towards start/end of net.
normal_len = (num_layers - num_reductions) / (
num_reductions + 1) # (real) number of normal cells
# between consecutive reduction cells.
if normal_len == int(normal_len):
reduction_indices = bias + (
(int(normal_len) + 1) * np.arange(1, num_reductions + 1) - 1)
else:
# print("num_layers: ", num_layers, "num_reductions: ", num_reductions, "normal_len: ", normal_len, "bias: ", bias)
if num_reductions == 1:
# print("num_reductions == 1")
reduction_indices = bias + int(normal_len)
else:
reduction_indices = np.concatenate((int(normal_len + bias),
place_reduction_cells(
num_layers - int(normal_len + 1),
num_reductions - 1, mode,
bias + int(normal_len) + 1)),
axis=None)
else:
assert False, "No such mode."
if verbose:
print("Network's reduction cell indices: ", reduction_indices)
return reduction_indices
class SELayer(nn.Module):
def __init__(self, channel, reduction=16):
super(SELayer, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(channel, channel // reduction, bias=False),
nn.ReLU(inplace=True),
nn.Linear(channel // reduction, channel, bias=False),
nn.Sigmoid()
)
def forward(self, x):
b, c, _, _ = x.size()
y = self.avg_pool(x).view(b, c)
y = self.fc(y).view(b, c, 1, 1)
return x * y.expand_as(x)
|
NivNayman/XNAS
|
test.py
|
import os
import sys
import time
import glob
import numpy as np
import torch
import utils
import logging
import argparse
import torch.nn as nn
import genotypes
import torch.utils
import torchvision.datasets as dset
import torch.backends.cudnn as cudnn
from torch.autograd import Variable
from model import Network
import parameters as params
from utils import infer, data_transforms_cifar10, count_parameters_in_MB
parser = argparse.ArgumentParser("cifar10")
parser.add_argument('--dset_name', type=str, default='CIFAR10', help='data set name')
parser.add_argument('--data', type=str, default='../data', help='location of the data corpus')
parser.add_argument('--batch_size', type=int, default=64, help='batch size')
parser.add_argument('--report_freq', type=float, default=50, help='report frequency')
parser.add_argument('--gpu', type=int, default=0, help='gpu device id')
parser.add_argument('--model_path', type=str, default='trained_models/xnas_small_cifar10.t7',
help='path of pretrained model')
parser.add_argument('--arch', type=str, default='XNAS', help='which architecture to use')
parser.add_argument('--calc_flops', action='store_true', default=False, help='calc_flops')
# Model Design
parser.add_argument('--init_channels', type=int, default=36, help='num of init channels')
parser.add_argument('--layers', type=int, default=20, help='total number of layers')
parser.add_argument('--num_reductions', type=int, default=2, help='Number of reduction cells')
parser.add_argument('--reduction_location_mode', type=str, default='uniform_start', help='reduction cells allocation.')
parser.add_argument('--do_SE', action='store_true', default=False)
args = parser.parse_args()
log_format = '%(asctime)s %(message)s'
logging.basicConfig(stream=sys.stdout, level=logging.INFO,
format=log_format, datefmt='%m/%d %I:%M:%S %p')
def main():
if not torch.cuda.is_available():
logging.info('no gpu device available')
sys.exit(1)
torch.cuda.set_device(args.gpu)
cudnn.benchmark = True
cudnn.enabled = True
logging.info('gpu device = %d' % args.gpu)
logging.info("args = %s", args)
genotype = eval("genotypes.%s" % args.arch)
logging.info(genotype)
dataset = params.datasets[args.dset_name]
network_params = {'C': args.init_channels,
'num_classes': dataset.num_classes,
'layers': args.layers,
'num_reductions': args.num_reductions,
'reduction_location_mode': args.reduction_location_mode,
'genotype': genotype,
'stem_multiplier': dataset.num_channels,
'do_SE': args.do_SE}
model = Network(**network_params)
logging.info("Loading model parameters from %s", args.model_path)
utils.load(model, args.model_path)
flops, num_params = None, None
if args.calc_flops:
from thop import profile, clever_format
input = torch.randn(1, dataset.num_channels, dataset.hw[0], dataset.hw[1])
flops, num_params = profile(model, inputs=(input, ))
flops, num_params = clever_format([flops, num_params], "%.2f")
model = model.cuda()
test_transform = data_transforms_cifar10()
test_data = dset.CIFAR10(root=args.data, train=False, download=True, transform=test_transform)
test_queue = torch.utils.data.DataLoader(
test_data, batch_size=args.batch_size, shuffle=False, pin_memory=True, num_workers=0)
with torch.no_grad():
test_acc, infer_time = infer(test_queue, model, args.report_freq)
if args.calc_flops:
logging.info('Test Accuracy: %.2f%% | Number of parameters: %s | Inference time: %2.2fms | Flops: %s',
test_acc, num_params, infer_time * 1000, flops)
else:
logging.info('Test Accuracy: %.2f%% | Inference time: %2.2fms', test_acc, infer_time * 1000)
if __name__ == '__main__':
main()
|
NivNayman/XNAS
|
test_imagenet.py
|
<filename>test_imagenet.py
import os
import sys
import time
import numpy as np
import torch
import utils
import glob
import random
import logging
import argparse
import torch.nn as nn
import genotypes
import torch.utils
import torchvision.datasets as dset
import torchvision.transforms as transforms
import torch.backends.cudnn as cudnn
from torch.autograd import Variable
from model_imagenet import NetworkImageNet as Network
import parameters as params
from utils import infer, data_transforms_imagenet_valid, count_parameters_in_MB
parser = argparse.ArgumentParser("imagenet")
parser.add_argument('--data', type=str, default='../datasets/imagenet/', help='location of the data corpus')
parser.add_argument('--batch_size', type=int, default=32, help='batch size')
parser.add_argument('--report_freq', type=float, default=100, help='report frequency')
parser.add_argument('--gpu', type=int, default=0, help='gpu device id')
parser.add_argument('--model_path', type=str, default='trained_models/imagenet.path.tar',
help='path of pretrained model')
parser.add_argument('--arch', type=str, default='XNAS', help='which architecture to use')
parser.add_argument('--calc_flops', action='store_true', default=False, help='calc_flops')
# Network design
parser.add_argument('--layers', type=int, default=14, help='total number of layers')
parser.add_argument('--init_channels', type=int, default=46, help='init_channels')
args = parser.parse_args()
log_format = '%(asctime)s %(message)s'
logging.basicConfig(stream=sys.stdout, level=logging.INFO,
format=log_format, datefmt='%m/%d %I:%M:%S %p')
def main():
if not torch.cuda.is_available():
logging.info('no gpu device available')
sys.exit(1)
torch.cuda.set_device(args.gpu)
cudnn.benchmark = True
cudnn.enabled = True
logging.info('gpu device = %d' % args.gpu)
logging.info("args = %s", args)
genotype = eval("genotypes.%s" % args.arch)
logging.info(genotype)
dataset = params.datasets['ImageNet']
network_params = {'C': args.init_channels,
'num_classes': dataset.num_classes,
'layers': args.layers,
'genotype': genotype,
}
model = Network(**network_params)
if args.calc_flops:
from thop import profile, clever_format
input = torch.randn(1, dataset.num_channels, dataset.hw[0], dataset.hw[1])
flops, num_params = profile(model, inputs=(input, ))
flops, num_params = clever_format([flops, num_params], "%.2f")
utils.load(model, args.model_path)
model = model.cuda()
val_transform = data_transforms_imagenet_valid()
validdir = os.path.join(args.data, 'val')
valid_data = dset.ImageFolder(validdir, val_transform)
valid_queue = torch.utils.data.DataLoader(
valid_data, batch_size=args.batch_size, shuffle=False, pin_memory=True, num_workers=0)
with torch.no_grad():
val_acc, infer_time = infer(valid_queue, model, args.report_freq)
if args.calc_flops:
logging.info('Validation Accuracy: %.2f%% | Number of parameters: %s | Inference time: %2.2fms | Flops: %s',
val_acc, num_params, infer_time * 1000, flops)
else:
logging.info('Validation Accuracy: %.2f%% | Inference time: %2.2fms', val_acc, infer_time * 1000)
if __name__ == '__main__':
main()
|
NivNayman/XNAS
|
model.py
|
<reponame>NivNayman/XNAS<gh_stars>10-100
import torch
import torch.nn as nn
import utils as utils
import operations as ops
from genotypes import Genotype
from utils import SELayer
class Cell(nn.Module):
def __init__(self, genotype, C_prev_prev, C_prev, C, reduction, reduction_prev):
super(Cell, self).__init__()
if reduction_prev:
self.preprocess0 = ops.FactorizedReduce(C_prev_prev, C)
else:
self.preprocess0 = ops.ReLUConvBN(C_prev_prev, C, 1, 1, 0)
self.preprocess1 = ops.ReLUConvBN(C_prev, C, 1, 1, 0)
if reduction:
op_names, indices = zip(*genotype.reduce)
concat = genotype.reduce_concat
else:
op_names, indices = zip(*genotype.normal)
concat = genotype.normal_concat
self._compile(C, op_names, indices, concat, reduction)
def _compile(self, C, op_names, indices, concat, reduction):
assert len(op_names) == len(indices)
self._steps = len(op_names) // 2
self._concat = concat
self.multiplier = len(concat)
self._ops = nn.ModuleList()
for name, index in zip(op_names, indices):
stride = 2 if reduction and index < 2 else 1
op = ops.OPS[name](C, stride, True)
self._ops += [op]
self._indices = indices
def forward(self, s0_initial, s1_initial, drop_prob):
s0 = self.preprocess0(s0_initial)
s1 = self.preprocess1(s1_initial)
states = [s0, s1]
for i in range(self._steps):
h1 = states[self._indices[2 * i]]
h2 = states[self._indices[2 * i + 1]]
op1 = self._ops[2 * i]
op2 = self._ops[2 * i + 1]
h1 = op1(h1)
h2 = op2(h2)
if self.training and drop_prob > 0.:
if not isinstance(op1, ops.Identity):
drop_path_inplace(h1, drop_prob)
if not isinstance(op2, ops.Identity):
drop_path_inplace(h2, drop_prob)
s = h1 + h2
states += [s]
return torch.cat([states[i] for i in self._concat], dim=1)
class Network(nn.Module):
def __init__(self, C: int, num_classes, layers, num_reductions,
reduction_location_mode, genotype: Genotype, stem_multiplier, do_SE=False):
super(Network, self).__init__()
self._layers = layers
self.do_SE = do_SE
self.drop_path_prob = 0
C_curr = stem_multiplier * C
self.stem = nn.Sequential(
nn.Conv2d(stem_multiplier, C_curr, 3, padding=1, bias=False),
nn.BatchNorm2d(C_curr)
)
C_prev_prev, C_prev, C_curr = C_curr, C_curr, C
self.cells = nn.ModuleList()
self.cells_SE = nn.ModuleList()
reduction_prev = False
self.reduction_indices = utils.place_reduction_cells(num_layers=layers,
num_reductions=num_reductions,
mode=reduction_location_mode)
for i in range(layers):
if i in self.reduction_indices:
C_curr *= 2
reduction = True
else:
reduction = False
cell = Cell(genotype, C_prev_prev, C_prev, C_curr, reduction,
reduction_prev)
reduction_prev = reduction
self.cells += [cell]
C_prev_prev, C_prev = C_prev, cell.multiplier * C_curr
if self.do_SE and i <= layers * 2 / 3:
if C_curr == C:
reduction_factor_SE = 4
else:
reduction_factor_SE = 8
self.cells_SE += [SELayer(C_curr * 4, reduction=reduction_factor_SE)]
self.global_pooling = nn.AdaptiveAvgPool2d(1)
self.classifier = nn.Linear(C_prev, num_classes)
def forward(self, input):
s0 = s1 = self.stem(input)
for i, cell in enumerate(self.cells):
cell_output = cell(s0, s1, self.drop_path_prob)
# SE
if self.do_SE and i <= len(self.cells) * 2 / 3:
cell_output = self.cells_SE[i](cell_output)
s0, s1 = s1, cell_output
out = self.global_pooling(s1)
logits = self.classifier(out.view(out.size(0), -1))
return logits
|
mlinhard/wifi-analysis
|
analyse.py
|
#!/usr/bin/python3
"""Analyse captured packets from Wi-Fi interface
Usage:
analyse.py <pcap-file> <config-file>
Options:
-h --help Show this screen
--version Show version
"""
from docopt import docopt
from scapy.layers.dot11 import Dot11
from scapy.utils import rdpcap
import json
from builtins import set
class Config(object):
def __init__(self, file):
with open(file , "r") as f:
config_json = json.load(f)
base_stations = config_json.get("base_stations")
if not base_stations:
raise "No base stations defined"
self._bssid_set = set(base_stations)
self._known_addrs = config_json.get("known_addresses")
if not self._known_addrs:
self._known_addrs = {}
def get_name(self, addr):
name = self._known_addrs.get(addr)
return name if name else addr
def is_base(self, packet):
if packet.addr1 in self._bssid_set:
return True
if packet.addr2 and packet.addr2 in self._bssid_set:
return True
if packet.addr3 and packet.addr3 in self._bssid_set:
return True
return False
if __name__ == '__main__':
args = docopt(__doc__, version="0.1.0")
config = Config(args['<config-file>'])
print("Loading file {}".format(args['<pcap-file>']))
packets = rdpcap(args['<pcap-file>'])
print("Loaded {} packets".format(len(packets)))
non11_count = 0
tuples = {}
for packet in packets:
if packet.haslayer(Dot11):
if config.is_base(packet):
t = (packet.addr1, packet.addr2, packet.addr3, packet.type, packet.subtype)
tuple_count = tuples.get(t)
tuples[t] = (tuple_count if tuple_count else 0) + 1
else:
non11_count += 1
for bssid in config._bssid_set:
print(config.get_name(bssid) + "\n")
for t, c in tuples.items():
if bssid == t[0]:
print("> {} typ {} sub {} cnt {}".format(t[1], t[3], t[4], c))
elif bssid == t[1]:
print("< {} typ {} sub {} cnt {}".format(t[0], t[3], t[4], c))
elif bssid == t[2]:
print("{} > {} typ {} sub {} cnt {}".format(t[1], t[0], t[3], t[4], c))
else:
print("ERROR typ {} sub {} cnt {}".format(t[3], t[4], c))
|
avashisth/ML-App
|
data_visualization_app.py
|
import streamlit as st
import plotly_express as px
import pandas as pd
from sklearn.experimental import enable_iterative_imputer
from sklearn.impute import KNNImputer
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
from numpy import percentile
from scipy import stats
# configuration
st.set_option('deprecation.showfileUploaderEncoding', False)
# title of the app
st.title("ML Process Guide")
# Setup file upload
uploaded_file = st.sidebar.file_uploader(
label="Upload your CSV or Excel file. (200MB max)",
type=['csv', 'xlsx'])
global df
if uploaded_file is not None:
print(uploaded_file)
print("hello")
try:
df = pd.read_csv(uploaded_file)
except Exception as e:
print(e)
df = pd.read_excel(uploaded_file)
global numeric_columns
global non_numeric_columns
try:
numeric_columns = list(df.select_dtypes(['float', 'int']).columns)
non_numeric_columns = list(df.select_dtypes(['object']).columns)
non_numeric_columns.append(None)
#print(non_numeric_columns)
except Exception as e:
print(e)
st.write("Please upload file to the application.")
def showPlots(chart_select):
if chart_select == 'Scatterplots':
st.sidebar.subheader("Scatterplot Settings")
try:
x_values = st.sidebar.selectbox('X axis', options=numeric_columns)
y_values = st.sidebar.selectbox('Y axis', options=numeric_columns)
color_value = st.sidebar.selectbox("Color", options=non_numeric_columns)
plot = px.scatter(data_frame=df, x=x_values, y=y_values, color=color_value)
# display the chart
st.plotly_chart(plot)
except Exception as e:
print(e)
if chart_select == 'Lineplots':
st.sidebar.subheader("Line Plot Settings")
try:
x_values = st.sidebar.selectbox('X axis', options=numeric_columns)
y_values = st.sidebar.selectbox('Y axis', options=numeric_columns)
color_value = st.sidebar.selectbox("Color", options=non_numeric_columns)
plot = px.line(data_frame=df, x=x_values, y=y_values, color=color_value)
st.plotly_chart(plot)
except Exception as e:
print(e)
if chart_select == 'Histogram':
st.sidebar.subheader("Histogram Settings")
try:
x = st.sidebar.selectbox('Feature', options=numeric_columns)
bin_size = st.sidebar.slider("Number of Bins", min_value=10,
max_value=100, value=40)
color_value = st.sidebar.selectbox("Color", options=non_numeric_columns)
plot = px.histogram(x=x, data_frame=df, color=color_value)
st.plotly_chart(plot)
except Exception as e:
print(e)
if chart_select == 'Boxplot':
st.sidebar.subheader("Boxplot Settings")
try:
y = st.sidebar.selectbox("Y axis", options=numeric_columns)
x = st.sidebar.selectbox("X axis", options=non_numeric_columns)
color_value = st.sidebar.selectbox("Color", options=non_numeric_columns)
plot = px.box(data_frame=df, y=y, x=x, color=color_value)
st.plotly_chart(plot)
except Exception as e:
print(e)
def identifyData():
descVal = st.sidebar.selectbox(label="select the property",
options=['Describe Data','Data Types','Data Shape']
)
if descVal == 'Describe Data':
st.write(df.describe())
elif descVal == 'Data Types':
st.write(df.dtypes)
elif descVal == 'Data Shape':
st.write(df.shape)
check_box = st.sidebar.checkbox(label="Display charts")
if check_box:
chart_select = st.sidebar.selectbox(
label="Select the chart type",
options=['Scatterplots', 'Lineplots', 'Histogram', 'Boxplot']
)
showPlots(chart_select)
def corrAnalysis():
st.write("Correlation Analysis")
st.write(df.corr())
fig = px.density_heatmap(df.corr())
st.plotly_chart(fig)
def anamolyHandle():
st.write("Anamolous data depiction")
def handleNull():
st.write(df.head())
col1, col2 = st.beta_columns(2)
cat_data = df.select_dtypes(include=['object']).copy()
col1.header("Categorical data: ")
col1.write(cat_data.head())
col1.write('Null values: ')
col1.write(cat_data.isna().sum())
num_data = df.select_dtypes(include=['int64','float64']).copy()
col2.header("Numerical data: ")
col2.write(num_data.head())
action = st.sidebar.selectbox( label="Select the action",
options=['Handle null values', 'Handle outliers'])
if action == 'Handle null values':
col2.write('Null values: ')
col2.write(num_data.isna().sum())
imputer = KNNImputer(n_neighbors=4)
imputer.fit(num_data)
Xtrans=imputer.transform(num_data)
st.write("Imputed values: ")
st.dataframe(Xtrans)
elif action == 'Handle outliers':
outliers = []
for (columnName, columnData) in num_data.iteritems():
z=np.abs(stats.zscore(columnData.values))
outliers.append(np.where(z>3))
st.write(outliers)
#main method front page:
usr_model = st.selectbox ('Chose your process: ',('Select a process','Missing values and outlier handling','Identification of variables and data types', 'Correlation Analysis','Outlier and anamoly handling'))
if usr_model == 'Missing values and outlier handling' :
handleNull()
elif usr_model == 'Identification of variables and data types' :
identifyData()
elif usr_model == 'Correlation Analysis' :
corrAnalysis()
elif usr_model == '' :
handleAnamoly()
|
matanwrites/AlgoGist
|
Array-Pascal.py
|
<reponame>matanwrites/AlgoGist
def pascal(numRows):
res = []
for i in range(1, numRows + 1):
r = []
while len(r) < i:
for n in range(i):
if n == 0 or n == i - 1:
r.append(1)
else:
r.append(prev[n-1] + prev[n])
prev = r[:]
res.append(prev)
return res
def getRow(rowIndex):
for i in range(0, rowIndex + 1 + 1):
r = []
while len(r) < i:
for n in range(i):
if n == 0 or n == i - 1:
r.append(1)
else:
r.append(prev[n-1] + prev[n])
prev = r[:]
return r
class TestPascal(object):
def common_assert(self, k, expected):
r = pascal(k)
assert r == expected
def test_1(self):
self.common_assert(1, [ getRow(0) ])
def test_2(self):
self.common_assert(2,[ getRow(0), getRow(1) ])
def test_3(self):
self.common_assert(3,[ getRow(0), getRow(1), getRow(2) ])
class TestGetRow(object):
def common_assert(self, k, expected):
r = getRow(k)
assert r == expected
def test_0(self):
self.common_assert(0, [1])
def test_1(self):
self.common_assert(1, [1,1])
def test_2(self):
self.common_assert(2, [1, 2, 1])
def test_3(self):
self.common_assert(3, [1, 3, 3, 1])
def test_4(self):
self.common_assert(4, [1, 4, 6, 4, 1])
def test_5(self):
self.common_assert(5, [1, 5, 10, 10, 5, 1])
|
matanwrites/AlgoGist
|
Sequence-Collatz.py
|
<gh_stars>0
class RefValue(object):
def __init__(self, val):
self.val = val
def collatz(n, steps):
while n > 1:
steps.val += 1
if n % 2 == 0:
n //= 2
else:
n = 3 * n + 1
return n
def collatz_r(n, steps):
if n == 1:
return 1
steps.val += 1
if n % 2 == 0:
return collatz_r(n / 2, steps)
else:
return collatz_r(3 * n + 1, steps)
class TestCollatz(object):
def common_assert(self, inp, expected):
print("")
steps = RefValue(0)
r = collatz_r(inp, steps)
print("steps = {}".format(steps.val))
assert r == 1
assert steps.val == expected
def test_9(self):
self.common_assert(9, 19)
def test_12(self):
self.common_assert(12, 9)
def test_max(self):
max_steps = 0
starting_number_max_steps = 0
for n in range(1, 100_000):
steps = RefValue(0)
r = collatz_r(n, steps)
if steps.val > max_steps:
max_steps = steps.val
starting_number_max_steps = n
assert r == 1
assert starting_number_max_steps == 77_031
assert max_steps == 350
|
TNRIS/lake-gallery
|
lakegallery/map/admin.py
|
<reponame>TNRIS/lake-gallery
from django.contrib.gis import admin
from .models import (MajorReservoirs, HistoricalAerialLinks,
StoryContent, LakeStatistics, SignificantEvents,
BoatRamps, ChannelMarkers, Hazards, Parks)
class MajorReservoirsAdmin(admin.OSMGeoAdmin):
list_display = ['res_lbl', 'story']
ordering = ['res_lbl']
list_per_page = 50
list_filter = ['story']
search_fields = ['res_lbl']
class HistoricalAerialLinksAdmin(admin.ModelAdmin):
list_display = ('lake', 'year', 'link')
ordering = ('lake', 'year', 'link')
list_per_page = 50
search_fields = ['lake__res_lbl', 'year'] # search related field
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "lake":
kwargs["queryset"] = (MajorReservoirs.objects.all()
.order_by('res_lbl'))
return (super(HistoricalAerialLinksAdmin, self)
.formfield_for_foreignkey(db_field, request, **kwargs))
class StoryContentAdmin(admin.ModelAdmin):
list_display = ['lake']
ordering = ['lake']
fieldsets = [
('Main "Story" Page', {'fields': ['lake', 'summary',
('summary_photo_main',
'summ_main_tag')]}),
('History Page', {'fields': ['history',
('history_photo_main', 'hist_main_tag'),
('history_photo', 'hist_tag')]}),
('Additional Page 1', {'fields':
['section_one_nav',
'section_one_header',
('section_one_photo_main',
'one_main_tag'),
'section_one_content',
('section_one_photo', 'one_tag')]}),
('Additional Page 2', {'fields':
['section_two_nav',
'section_two_header',
('section_two_photo_main',
'two_main_tag'),
'section_two_content',
('section_two_photo', 'two_tag')]}),
('Additional Page 3', {'fields':
['section_three_nav',
'section_three_header',
('section_three_photo_main',
'three_main_tag'),
'section_three_content',
('section_three_photo',
'three_tag')]}),
]
readonly_fields = ('summ_main_tag', 'hist_main_tag', 'hist_tag',
'one_main_tag', 'one_tag', 'two_main_tag', 'two_tag',
'three_main_tag', 'three_tag')
search_fields = ['lake__res_lbl'] # search related res_lbl field
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "lake":
kwargs["queryset"] = (MajorReservoirs.objects.all()
.order_by('res_lbl'))
return (super(StoryContentAdmin, self)
.formfield_for_foreignkey(db_field, request, **kwargs))
class LakeStatisticsAdmin(admin.ModelAdmin):
list_display = ['lake']
ordering = ['lake']
fieldsets = [
(None, {'fields': ['lake', 'wdft_link']}),
('General Statistics', {'fields': ['original_name',
'primary_purposes',
'location',
'construction_dates',
'length_of_lake',
'miles_of_shoreline',
'maximum_width',
'lake_area',
'lake_capacity',
'full_elevation_msl',
'full_elevation_gal',
'maximum_depth',
'average_depth',
'historic_high_msl',
'historic_high_date',
'historic_low_msl',
'historic_low_date']}),
('Dam Statistics', {'fields': ['dam_height',
'dam_width',
'spillway_elevation',
'top_of_dam',
'num_of_floodgates',
'discharge_capacity']})
]
list_filter = ['primary_purposes', 'location']
search_fields = ['lake__res_lbl'] # search related res_lbl field & location
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "lake":
kwargs["queryset"] = (MajorReservoirs.objects.all()
.order_by('res_lbl'))
return (super(LakeStatisticsAdmin, self)
.formfield_for_foreignkey(db_field, request, **kwargs))
class SignificantEventsAdmin(admin.ModelAdmin):
list_display = ('lake', 'event_type', 'date', 'height')
ordering = ('lake', 'event_type', 'date')
list_per_page = 50
list_filter = ['event_type']
search_fields = ['lake__res_lbl'] # search related res_lbl field
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "lake":
kwargs["queryset"] = (MajorReservoirs.objects.all()
.order_by('res_lbl'))
return (super(SignificantEventsAdmin, self)
.formfield_for_foreignkey(db_field, request, **kwargs))
"""
Overlay Layers for stories (points of interest)
"""
class BoatRampsAdmin(admin.OSMGeoAdmin):
list_display = ('lake', 'name', 'operator')
ordering = ('lake', 'name', 'operator')
list_per_page = 50
list_filter = ['operator']
search_fields = ['lake__res_lbl', 'name'] # search related res_lbl field, name & operator
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "lake":
kwargs["queryset"] = (MajorReservoirs.objects.all()
.order_by('res_lbl'))
return (super(BoatRampsAdmin, self)
.formfield_for_foreignkey(db_field, request, **kwargs))
class ChannelMarkersAdmin(admin.OSMGeoAdmin):
list_display = ('lake', 'marker_id', 'year')
ordering = ('lake', 'marker_id', 'year')
list_per_page = 50
list_filter = ['year']
search_fields = ['lake__res_lbl'] # search related res_lbl field
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "lake":
kwargs["queryset"] = (MajorReservoirs.objects.all()
.order_by('res_lbl'))
return (super(ChannelMarkersAdmin, self)
.formfield_for_foreignkey(db_field, request, **kwargs))
class HazardsAdmin(admin.OSMGeoAdmin):
list_display = ('lake', 'hazard_type', 'num_buoys')
ordering = ('lake', 'hazard_type')
list_per_page = 50
list_filter = ['hazard_type']
search_fields = ['lake__res_lbl'] # search related res_lbl field
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "lake":
kwargs["queryset"] = (MajorReservoirs.objects.all()
.order_by('res_lbl'))
return (super(HazardsAdmin, self)
.formfield_for_foreignkey(db_field, request, **kwargs))
class ParksAdmin(admin.OSMGeoAdmin):
list_display = ('lake', 'name', 'park_type')
ordering = ('lake', 'name')
list_per_page = 50
list_filter = ['park_type']
search_fields = ['lake__res_lbl'] # search related res_lbl field
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "lake":
kwargs["queryset"] = (MajorReservoirs.objects.all()
.order_by('res_lbl'))
return (super(ParksAdmin, self)
.formfield_for_foreignkey(db_field, request, **kwargs))
admin.site.register(MajorReservoirs, MajorReservoirsAdmin)
admin.site.register(HistoricalAerialLinks, HistoricalAerialLinksAdmin)
admin.site.register(StoryContent, StoryContentAdmin)
admin.site.register(LakeStatistics, LakeStatisticsAdmin)
admin.site.register(SignificantEvents, SignificantEventsAdmin)
admin.site.register(BoatRamps, BoatRampsAdmin)
admin.site.register(ChannelMarkers, ChannelMarkersAdmin)
admin.site.register(Hazards, HazardsAdmin)
admin.site.register(Parks, ParksAdmin)
admin.site.index_title = "Database Tables"
|
TNRIS/lake-gallery
|
lakegallery/api/renderers.py
|
from rest_framework.renderers import BrowsableAPIRenderer
class BrowsableAPIRendererFormatted(BrowsableAPIRenderer):
"""Renders the browsable api, but formats various details."""
def get_context(self, *args, **kwargs):
ctx = super().get_context(*args, **kwargs)
ctx['name'] = ctx['name'].replace('Rwp As',
'Regional Water Planning Areas')
ctx['name'] = ctx['name'].replace('Api', 'API')
if ctx['name'] == 'API Root':
ctx['description'] = ("Root access to the Major Reservoirs "
"(Lakes) and Regional Water Planning Areas "
"datasets used within TNRIS' Texas Lake "
"Gallery application.")
for idx, val in enumerate(ctx['breadcrumblist']):
label = val[0]
link = val[1]
label = label.replace('Rwp As', 'RWPAs')
label = label.replace('Api', 'API')
ctx['breadcrumblist'][idx] = (label, link)
return ctx
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0025_boatramps_channelmarkers_hazards_parks.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-10 20:04
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('map', '0024_auto_20171010_0847'),
]
operations = [
migrations.CreateModel(
name='BoatRamps',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=30)),
('operator', models.CharField(blank=True, max_length=50)),
('geom', django.contrib.gis.db.models.fields.PointField(srid=4326)),
('lake', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='map.MajorReservoirs')),
],
options={
'verbose_name': 'Boat Ramp',
'verbose_name_plural': 'Boat Ramps',
},
),
migrations.CreateModel(
name='ChannelMarkers',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('odd', models.IntegerField(blank=True)),
('marker_id', models.IntegerField(blank=True)),
('year', models.IntegerField(blank=True)),
('geom', django.contrib.gis.db.models.fields.PointField(srid=4326)),
('lake', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='map.MajorReservoirs')),
],
options={
'verbose_name': 'Channel Marker',
'verbose_name_plural': 'Channel Markers',
},
),
migrations.CreateModel(
name='Hazards',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('hazard_type', models.CharField(blank=True, max_length=35)),
('num_buoys', models.CharField(blank=True, max_length=10)),
('geom', django.contrib.gis.db.models.fields.PolygonField(srid=4326)),
('lake', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='map.MajorReservoirs')),
],
options={
'verbose_name': 'Hazard',
'verbose_name_plural': 'Hazards',
},
),
migrations.CreateModel(
name='Parks',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('park_type', models.CharField(choices=[('Park', 'Park'), ('Undeveloped Recreation Area', 'Undeveloped Recreation Area'), ('Preserve', 'Preserve'), ('Park/Preserve', 'Park/Preserve')], max_length=50)),
('name', models.CharField(blank=True, max_length=100)),
('acres', models.FloatField(blank=True, default=0)),
('area', models.FloatField(blank=True, default=0)),
('perimeter', models.FloatField(blank=True, default=0)),
('geom', django.contrib.gis.db.models.fields.PolygonField(srid=4326)),
('lake', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='map.MajorReservoirs')),
],
options={
'verbose_name': 'Park',
'verbose_name_plural': 'Parks',
},
),
]
|
TNRIS/lake-gallery
|
lakegallery/api/views.py
|
<gh_stars>1-10
from django.contrib.auth.models import User, Group
from rest_framework import viewsets
from .serializers import (ReservoirsSerializer,
ReservoirURLSerializer)
from .filters import URLFilter
from rest_framework_extensions.mixins import NestedViewSetMixin
from map.models import MajorReservoirs
class ReservoirsViewSet(NestedViewSetMixin, viewsets.ReadOnlyModelViewSet):
"""
API endpoint that allows Major Reservoirs to be listed or detailed
"""
serializer_class = ReservoirsSerializer
lookup_field = 'res_lbl'
queryset = MajorReservoirs.objects.all().order_by('res_lbl')
filter_fields = ('res_lbl')
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0027_majorreservoirs_story.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-16 16:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('map', '0026_auto_20171011_0905'),
]
operations = [
migrations.AddField(
model_name='majorreservoirs',
name='story',
field=models.CharField(choices=[('disabled', 'disabled'), ('enabled', 'enabled')], default='disabled', max_length=8),
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0014_auto_20170929_1000.py
|
<reponame>TNRIS/lake-gallery<filename>lakegallery/map/migrations/0014_auto_20170929_1000.py
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-29 15:00
from __future__ import unicode_literals
from django.db import migrations, models
import map.models
class Migration(migrations.Migration):
dependencies = [
('map', '0013_auto_20170929_0952'),
]
operations = [
migrations.AlterField(
model_name='storycontent',
name='section_one_photo',
field=models.ImageField(blank=True, upload_to=map.models.get_upload_path),
),
]
|
TNRIS/lake-gallery
|
lakegallery/api/serializers.py
|
<filename>lakegallery/api/serializers.py
from django.contrib.auth.models import User, Group
from rest_framework import serializers
from rest_framework_gis.serializers import GeoFeatureModelSerializer
from rest_framework.reverse import reverse
from map.models import MajorReservoirs
class ReservoirsSerializer(GeoFeatureModelSerializer):
class Meta:
model = MajorReservoirs
geo_field = 'geom'
auto_bbox = True
fields = ('res_lbl',)
class ReservoirURLSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.SerializerMethodField('get_lake_url')
def get_lake_url(self, obj):
lake_nm = obj.res_lbl
lake_url = reverse('api_reservoirs-detail', args=[lake_nm],
request=self.context.get('request'))
return lake_url
class Meta:
model = MajorReservoirs
fields = ('res_lbl', 'url')
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0012_storycontent_section_one_photo.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-29 14:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('map', '0011_auto_20170929_0857'),
]
operations = [
migrations.AddField(
model_name='storycontent',
name='section_one_photo',
field=models.ImageField(blank=True, upload_to='<django.db.models.fields.related.OneToOneField>'),
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/signals.py
|
<reponame>TNRIS/lake-gallery
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from map.models import MajorReservoirs, StoryContent
@receiver(post_save, sender=StoryContent)
def enable_lake(sender, **kwargs):
instance = kwargs['instance']
# update major reservoir to enabled in app
r = MajorReservoirs.objects.get(res_lbl=str(instance.lake))
r.story = 'enabled'
r.save()
@receiver(post_delete, sender=StoryContent)
def disable_lake(sender, **kwargs):
instance = kwargs['instance']
# update major reservoir to disabled in app
r = MajorReservoirs.objects.get(res_lbl=str(instance.lake))
r.story = 'disabled'
r.save()
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0015_auto_20170929_1507.py
|
<reponame>TNRIS/lake-gallery
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-29 20:07
from __future__ import unicode_literals
from django.db import migrations, models
import map.models
class Migration(migrations.Migration):
dependencies = [
('map', '0014_auto_20170929_1000'),
]
operations = [
migrations.AddField(
model_name='storycontent',
name='history_photo',
field=models.ImageField(blank=True, upload_to=map.models.get_upload_path),
),
migrations.AddField(
model_name='storycontent',
name='section_three_photo',
field=models.ImageField(blank=True, upload_to=map.models.get_upload_path),
),
migrations.AddField(
model_name='storycontent',
name='section_two_photo',
field=models.ImageField(blank=True, upload_to=map.models.get_upload_path),
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-12 19:32
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MajorReservoirs',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('res_name', models.CharField(max_length=50)),
('type', models.CharField(max_length=50)),
('status', models.CharField(max_length=50)),
('res_lbl', models.CharField(max_length=100)),
('region', models.CharField(max_length=50)),
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326)),
],
),
migrations.CreateModel(
name='RWPAs',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('objectid', models.BigIntegerField()),
('reg_name', models.CharField(max_length=25)),
('letter', models.CharField(max_length=1)),
('shape_leng', models.FloatField()),
('shape_area', models.FloatField()),
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326)),
],
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0035_remove_majorreservoirs_region.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-11 19:24
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('map', '0034_delete_rwpas'),
]
operations = [
migrations.RemoveField(
model_name='majorreservoirs',
name='region',
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/tests.py
|
# from django.test import Client, TestCase
# from django.urls import reverse
# from django.conf import settings
# from django.core.exceptions import ValidationError
# from bs4 import BeautifulSoup
# from django.http import HttpRequest
# from django.contrib.staticfiles import finders
# from django.contrib.gis.geos import Point, Polygon, MultiPolygon
# from .models import (MajorReservoirs, RWPAs, HistoricalAerialLinks,
# StoryContent, LakeStatistics, SignificantEvents,
# BoatRamps, ChannelMarkers, Hazards, Parks,
# get_upload_path)
# from .views import (get_region_header_list, get_lake_header_list)
# from .config import overlays
# from .validators import validate_past_dates
# from middleware import MobileDetectionMiddleware
# import string
# import os
# import datetime
# test_point = Point((1, 1), (1, 2))
# p1 = Polygon(((0, 0), (0, 1), (1, 1), (0, 0)))
# p2 = Polygon(((1, 1), (1, 2), (2, 2), (1, 1)))
# test_geom = MultiPolygon(p1, p2)
# class MajorReservoirsModelTests(TestCase):
# def test_string_representation(self):
# """
# Test the string representation of the model return the Reservoir Label
# """
# response = MajorReservoirs(res_lbl="Brand New Reservoir")
# self.assertEqual(str(response), response.res_lbl)
# def test_verbose_name_representations(self):
# """
# Test the name representations are formatted correctly
# """
# self.assertEqual(str(MajorReservoirs._meta.verbose_name),
# "Major Reservoir")
# self.assertEqual(str(MajorReservoirs._meta.verbose_name_plural),
# "Major Reservoirs")
# class HistoricalAerialLinksModelTests(TestCase):
# def test_string_representation(self):
# """
# Test the string representation of the model returns the link URL
# """
# response = HistoricalAerialLinks(link="http://google.com")
# self.assertEqual(str(response), response.link)
# def test_verbose_name_representations(self):
# """
# Test the name representations are formatted correctly
# """
# self.assertEqual(str(HistoricalAerialLinks._meta.verbose_name),
# "Historical Aerial Link")
# self.assertEqual(str(HistoricalAerialLinks._meta.verbose_name_plural),
# "Historical Aerial Links")
# def test_dictionary_method(self):
# """
# Test the dictionary method as a response with the link & year
# """
# response = HistoricalAerialLinks(link="http://google.com", year=1970)
# dictionary = response.as_dict()
# self.assertIs(isinstance(dictionary, dict), True)
# self.assertIs(isinstance(dictionary['link'], str), True)
# self.assertIs(isinstance(dictionary['year'], int), True)
# self.assertEqual(response.link, "http://google.com")
# self.assertEqual(response.year, 1970)
# def test_field_types(self):
# """
# Test the fields will error if not the correct types
# """
# MajorReservoirs(res_lbl="Lake Travis", geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl="Lake Travis")
# response = HistoricalAerialLinks(link="not a URL",
# year="string", lake=m)
# try:
# response.full_clean()
# except ValidationError as e:
# error = dict(e)
# self.assertEqual(error['year'], ["'string' value must be an "
# "integer."])
# self.assertEqual(error['link'], ['Enter a valid URL.'])
# def test_key_relationship_requirement(self):
# """
# Test the 'lake' ForeignKey field requirement
# """
# with self.assertRaises(ValueError) as e:
# HistoricalAerialLinks(link='http://google.com', year=1970,
# lake="lake")
# assert ('"HistoricalAerialLinks.lake" must be a "MajorReservoirs"'
# ' instance' in str(e.exception))
# class StoryContentModelTests(TestCase):
# def test_string_representation(self):
# """
# Test the string representation of the model return the Lake name
# """
# lake_name = "<NAME>"
# MajorReservoirs(res_lbl=lake_name, geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# response = StoryContent(lake=m)
# self.assertEqual(str(response), lake_name)
# def test_verbose_name_representations(self):
# """
# Test the name representations are formatted correctly
# """
# self.assertEqual(str(StoryContent._meta.verbose_name),
# "Story Content")
# self.assertEqual(str(StoryContent._meta.verbose_name_plural),
# "Story Content")
# def test_tags(self):
# """
# Test the photo tag image sources
# """
# lake_name = "<NAME>"
# MajorReservoirs(res_lbl=lake_name, geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# test_file = 'test.png'
# good_url = settings.MEDIA_URL + os.path.join(lake_name, test_file)
# s = StoryContent(lake=m)
# # summary main tag
# s.summary_photo_main = get_upload_path(s, test_file)
# soup = BeautifulSoup(s.summ_main_tag(), "html.parser")
# src = soup.findAll('img')[0]['src']
# self.assertEqual(src, good_url)
# # history main tag
# s.history_photo_main = get_upload_path(s, test_file)
# soup = BeautifulSoup(s.hist_main_tag(), "html.parser")
# src = soup.findAll('img')[0]['src']
# self.assertEqual(src, good_url)
# # history tag
# s.history_photo = get_upload_path(s, test_file)
# soup = BeautifulSoup(s.hist_tag(), "html.parser")
# src = soup.findAll('img')[0]['src']
# self.assertEqual(src, good_url)
# # section one main tag
# s.section_one_photo_main = get_upload_path(s, test_file)
# soup = BeautifulSoup(s.one_main_tag(), "html.parser")
# src = soup.findAll('img')[0]['src']
# self.assertEqual(src, good_url)
# # section one tag
# s.section_one_photo = get_upload_path(s, test_file)
# soup = BeautifulSoup(s.one_tag(), "html.parser")
# src = soup.findAll('img')[0]['src']
# self.assertEqual(src, good_url)
# # section two main tag
# s.section_two_photo_main = get_upload_path(s, test_file)
# soup = BeautifulSoup(s.two_main_tag(), "html.parser")
# src = soup.findAll('img')[0]['src']
# self.assertEqual(src, good_url)
# # section two tag
# s.section_two_photo = get_upload_path(s, test_file)
# soup = BeautifulSoup(s.two_tag(), "html.parser")
# src = soup.findAll('img')[0]['src']
# self.assertEqual(src, good_url)
# # section three main tag
# s.section_three_photo_main = get_upload_path(s, test_file)
# soup = BeautifulSoup(s.three_main_tag(), "html.parser")
# src = soup.findAll('img')[0]['src']
# self.assertEqual(src, good_url)
# # section three tag
# s.section_three_photo = get_upload_path(s, test_file)
# soup = BeautifulSoup(s.three_tag(), "html.parser")
# src = soup.findAll('img')[0]['src']
# self.assertEqual(src, good_url)
# def test_key_relationship_requirement(self):
# """
# Test the 'lake' OneToOne relationship field requirement
# """
# with self.assertRaises(ValueError) as e:
# StoryContent(summary="text here", history="text there",
# lake="lake")
# assert ('"StoryContent.lake" must be a "MajorReservoirs" instance' in
# str(e.exception))
# class LakeStatisticsModelTests(TestCase):
# def test_string_representation(self):
# """
# Test the string representation of the model return the Lake name
# """
# lake_name = "Lake Travis"
# MajorReservoirs(res_lbl=lake_name, geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# response = LakeStatistics(lake=m)
# self.assertEqual(str(response), lake_name)
# def test_verbose_name_representations(self):
# """
# Test the name representations are formatted correctly
# """
# self.assertEqual(str(LakeStatistics._meta.verbose_name),
# "Lake Statistics")
# self.assertEqual(str(LakeStatistics._meta.verbose_name_plural),
# "Lake Statistics")
# def test_lake_only_requirement(self):
# """
# Test the lake property will error if not designated
# """
# response = LakeStatistics()
# try:
# response.full_clean()
# except ValidationError as e:
# error = dict(e)
# self.assertEqual(error['lake'], ['This field cannot be null.'])
# def test_key_relationship_requirement(self):
# """
# Test the 'lake' OneToOne relationship field requirement
# """
# with self.assertRaises(ValueError) as e:
# LakeStatistics(lake="lake")
# assert ('"LakeStatistics.lake" must be a "MajorReservoirs" instance' in
# str(e.exception))
# def test_string_numbers_method(self):
# """
# Test the float numbers to strings method
# """
# lake_name = "<NAME>"
# MajorReservoirs(res_lbl=lake_name, geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# response = LakeStatistics(lake=m, dam_height=4.3)
# str_res = response.string_numbers()
# flds = str_res._meta.get_fields()
# for f in flds:
# attr = getattr(response, f.name)
# attr_type = type(attr)
# self.assertNotEqual(str(attr_type), 'float')
# self.assertNotEqual(str(attr)[-2:], '.0')
# def test_set_displays_method(self):
# """
# Test the set displays formatting method
# """
# lake_name = "<NAME>"
# MajorReservoirs(res_lbl=lake_name, geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# # test display no statistics
# response = LakeStatistics(lake=m)
# dis_res = response.set_displays()
# self.assertIs(dis_res.general_stats, False)
# self.assertIs(dis_res.dam_stats, False)
# self.assertEqual(type(dis_res.primary_purposes), str)
# minimum_defaults = [0.0, 0, "0", "0.0", "", None, "None"]
# for s in dis_res.stat_defaults:
# self.assertTrue(s in minimum_defaults)
# # test display only general stats
# response = LakeStatistics(lake=m, original_name="Lake Water")
# dis_res = response.set_displays()
# self.assertIs(dis_res.general_stats, True)
# self.assertIs(dis_res.dam_stats, False)
# # test display dam stats also displays general stats header
# response = LakeStatistics(lake=m, top_of_dam=32.4)
# dis_res = response.set_displays()
# self.assertIs(dis_res.general_stats, True)
# self.assertIs(dis_res.dam_stats, True)
# class SignificantEventsModelTests(TestCase):
# def test_string_representation(self):
# """
# Test the string representation of the model returns the binary
# event type with the date
# """
# lake_name = "Lake Travis"
# MajorReservoirs(res_lbl=lake_name, geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# today = datetime.datetime.today()
# dt = str(today.year) + "-" + str(today.month) + "-" + str(today.day)
# response = SignificantEvents(lake=m, event_type='High',
# date=dt, height=99.99)
# expected = lake_name + " " + response.event_type + " " + dt
# self.assertEqual(str(response), expected)
# def test_verbose_name_representations(self):
# """
# Test the name representations are formatted correctly
# """
# self.assertEqual(str(SignificantEvents._meta.verbose_name),
# "Significant Event")
# self.assertEqual(str(SignificantEvents._meta.verbose_name_plural),
# "Significant Events")
# def test_key_relationship_requirement(self):
# """
# Test the 'lake' ForeignKey field requirement
# """
# today = datetime.datetime.today()
# dt = str(today.year) + "-" + str(today.month) + "-" + str(today.day)
# with self.assertRaises(ValueError) as e:
# SignificantEvents(lake="lake", event_type='High',
# date=dt, height=99.99)
# assert ('"SignificantEvents.lake" must be a "MajorReservoirs"'
# ' instance' in str(e.exception))
# def test_requirements(self):
# """
# Test the lake, event type, date, and height required
# """
# response = SignificantEvents()
# try:
# response.full_clean()
# except ValidationError as e:
# error = dict(e)
# self.assertEqual(error['lake'], ['This field cannot be null.'])
# self.assertEqual(error['date'], ['This field cannot be null.'])
# self.assertEqual(error['height'], ['This field cannot be null.'])
# def test_dictionary_method(self):
# """
# Test the dictionary method as a response with date, height, and drought
# """
# today = datetime.datetime.today()
# dt = str(today.year) + "-" + str(today.month) + "-" + str(today.day)
# lake_name = "Lake Travis"
# MajorReservoirs(res_lbl=lake_name, geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# response = SignificantEvents(lake=m, date=dt, height=99.99)
# dictionary = response.as_dict()
# self.assertIs(isinstance(dictionary, dict), True)
# self.assertIs(isinstance(dictionary['date'], str), True)
# self.assertIs(isinstance(dictionary['height'], float), True)
# self.assertIs(isinstance(dictionary['drought'], str), True)
# self.assertEqual(dictionary['date'], dt)
# self.assertEqual(dictionary['height'], 99.99)
# self.assertEqual(dictionary['drought'], "")
# # since we're here, might as well test event_type defaults to 'High'
# self.assertEqual(response.event_type, 'High')
# class BoatRampsModelTests(TestCase):
# def test_string_representation(self):
# """
# Test the string representation of the model return boat ramp name
# """
# lake_name = "Lake Travis"
# MajorReservoirs(res_lbl=lake_name, geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# response = BoatRamps(lake=m, name="Rampage")
# self.assertEqual(str(response), response.name)
# def test_verbose_name_representations(self):
# """
# Test the name representations are formatted correctly
# """
# self.assertEqual(str(BoatRamps._meta.verbose_name), "Boat Ramp")
# self.assertEqual(str(BoatRamps._meta.verbose_name_plural),
# "Boat Ramps")
# def test_key_relationship_requirement(self):
# """
# Test the 'lake' ForeignKey field requirement
# """
# with self.assertRaises(ValueError) as e:
# BoatRamps(lake="lake", geom=test_point)
# assert ('"BoatRamps.lake" must be a "MajorReservoirs"'
# ' instance' in str(e.exception))
# class ChannelMarkersModelTests(TestCase):
# def test_string_representation(self):
# """
# Test the string representation of the model return lake & marker id
# """
# lake_name = "<NAME>"
# MajorReservoirs(res_lbl=lake_name, geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# marker_id = 21
# response = ChannelMarkers(lake=m, marker_id=marker_id)
# expected = str(response.lake) + " " + str(response.marker_id)
# self.assertEqual(str(response), expected)
# def test_verbose_name_representations(self):
# """
# Test the name representations are formatted correctly
# """
# self.assertEqual(str(ChannelMarkers._meta.verbose_name),
# "Channel Marker")
# self.assertEqual(str(ChannelMarkers._meta.verbose_name_plural),
# "Channel Markers")
# def test_key_relationship_requirement(self):
# """
# Test the 'lake' ForeignKey field requirement
# """
# with self.assertRaises(ValueError) as e:
# ChannelMarkers(lake="lake", geom=test_point)
# assert ('"ChannelMarkers.lake" must be a "MajorReservoirs"'
# ' instance' in str(e.exception))
# class HazardsModelTests(TestCase):
# def test_string_representation(self):
# """
# Test the string representation of the model return lake & hazard type
# """
# lake_name = "<NAME>"
# MajorReservoirs(res_lbl=lake_name, geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# hzd = 'No Wake'
# response = Hazards(lake=m, hazard_type=hzd)
# expected = str(response.lake) + " " + response.hazard_type
# self.assertEqual(str(response), expected)
# def test_verbose_name_representations(self):
# """
# Test the name representations are formatted correctly
# """
# self.assertEqual(str(Hazards._meta.verbose_name), "Hazard")
# self.assertEqual(str(Hazards._meta.verbose_name_plural),
# "Hazards")
# def test_key_relationship_requirement(self):
# """
# Test the 'lake' ForeignKey field requirement
# """
# with self.assertRaises(ValueError) as e:
# Hazards(lake="lake", geom=test_point)
# assert ('"Hazards.lake" must be a "MajorReservoirs"'
# ' instance' in str(e.exception))
# class ParksModelTests(TestCase):
# def test_string_representation(self):
# """
# Test the string representation of the model returns park name
# """
# lake_name = "Lake Travis"
# MajorReservoirs(res_lbl=lake_name, geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# park_type = 'Preserve'
# nm = "Parky-Park"
# response = Parks(lake=m, park_type=park_type, name=nm)
# self.assertEqual(str(response), response.name)
# def test_verbose_name_representations(self):
# """
# Test the name representations are formatted correctly
# """
# self.assertEqual(str(Hazards._meta.verbose_name), "Hazard")
# self.assertEqual(str(Hazards._meta.verbose_name_plural),
# "Hazards")
# def test_key_relationship_requirement(self):
# """
# Test the 'lake' ForeignKey field requirement
# """
# with self.assertRaises(ValueError) as e:
# Hazards(lake="lake", geom=test_point)
# assert ('"Hazards.lake" must be a "MajorReservoirs"'
# ' instance' in str(e.exception))
# class functionTests(TestCase):
# def test_upload_path(self):
# """
# Test the upload path generator function
# """
# lake_name = "<NAME>"
# MajorReservoirs(res_lbl=lake_name, geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# response = StoryContent(lake=m)
# path = get_upload_path(response, "photo.png")
# self.assertEqual(path, os.path.join(lake_name, "photo.png"))
# def test_date_validator(self):
# """
# Test the past date validator function doesn't allow future dates
# """
# # past date should pass
# yesterday = datetime.date.today() - datetime.timedelta(days=1)
# self.assertEqual(validate_past_dates(yesterday), yesterday)
# # current date should pass
# today = datetime.date.today()
# self.assertEqual(validate_past_dates(today), today)
# # future date should fail
# tomorrow = datetime.date.today() + datetime.timedelta(days=1)
# with self.assertRaises(ValidationError) as e:
# validate_past_dates(tomorrow)
# assert ('The date cannot be in the future!' in str(e.exception))
# class URLTests(TestCase):
# def test_homepage(self):
# """
# Test the homepage URL
# """
# response = self.client.get('/')
# self.assertEqual(response.status_code, 200)
# def test_region_pages(self):
# """
# Test the correct region URLs /A - /P
# """
# good_letters = string.ascii_uppercase[:16]
# for l in good_letters:
# response = self.client.get('/' + l)
# self.assertEqual(response.status_code, 301)
# """
# Test the incorrect region URLs /Q - /Z
# """
# bad_letters = string.ascii_uppercase[16:]
# for l in bad_letters:
# response = self.client.get('/' + l)
# self.assertEqual(response.status_code, 404)
# """
# Test the correct region redirect URLs /a - /p
# """
# good_letters = string.ascii_lowercase[:16]
# for l in good_letters:
# response = self.client.get('/' + l)
# self.assertEqual(response.status_code, 301)
# """
# Test the incorrect region redirect URLs /q - /z
# """
# bad_letters = string.ascii_lowercase[16:]
# for l in bad_letters:
# response = self.client.get('/' + l)
# self.assertEqual(response.status_code, 404)
# def test_story_pages(self):
# """
# Test lake story page urls
# """
# lake_name = "<NAME>"
# lake_region = "K"
# MajorReservoirs(res_lbl=lake_name, region=lake_region,
# geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# StoryContent(lake=m)
# # test successful query for lake
# response = self.client.get('/' + lake_region + '/' + lake_name)
# self.assertEqual(response.status_code, 200)
# # test bad region for lake in URL
# response = self.client.get('/A/' + lake_name)
# self.assertEqual(response.status_code, 404)
# # test redirect if lowercase region supplied
# response = self.client.get('/' + lake_region.lower() + '/' + lake_name)
# self.assertEqual(response.status_code, 302)
# def test_about(self):
# """
# Test the about page URL
# """
# response = self.client.get('/about/')
# self.assertEqual(response.status_code, 200)
# response = self.client.get('/about')
# self.assertEqual(response.status_code, 301)
# class ViewTests(TestCase):
# def test_header_lists(self):
# """
# Test the header lists generated
# """
# region_nm_1, region_lt_1 = 'rwpa 1', 'A'
# region_nm_2, region_lt_2 = 'rwpa 2', 'B'
# RWPAs(objectid=1, reg_name=region_nm_1, letter=region_lt_1,
# shape_leng=10, shape_area=2, geom=test_geom).save()
# RWPAs(objectid=2, reg_name=region_nm_2, letter=region_lt_2,
# shape_leng=10, shape_area=2, geom=test_geom).save()
# res_nm_1, res_lt_1 = 'mr 1', 'A'
# res_nm_2, res_lt_2 = 'mr 2', 'B'
# MajorReservoirs(res_lbl=res_nm_1, region=res_lt_1,
# geom=test_geom).save()
# MajorReservoirs(res_lbl=res_nm_2, region=res_lt_2,
# geom=test_geom).save()
# reg_list = get_region_header_list()
# self.assertEqual(reg_list, [{'name': region_nm_1,
# 'letter': region_lt_1},
# {'name': region_nm_2,
# 'letter': region_lt_2}])
# res_list = get_lake_header_list()
# self.assertEqual(res_list, [{'name': res_nm_1,
# 'region': res_lt_1,
# 'class': 'disabled'},
# {'name': res_nm_2,
# 'region': res_lt_2,
# 'class': 'disabled'}])
# def test_index_context(self):
# """
# Test the index template context; config & header lists
# """
# response = self.client.get(reverse('map:index'))
# config = response.context['layers']
# check_layers = ['rwpas', 'reservoirs']
# check_keys = ['table_name', 'label_field', 'carto_css',
# 'carto_lbl', 'interactivity']
# # check that the 2 layers are in config with proper keys
# for layer in check_layers:
# self.assertIs(layer in config.keys(), True)
# # check the keys of those 2 layers
# layer_info = config[layer]
# for key in check_keys:
# self.assertIs(key in layer_info.keys(), True)
# # check the value type for each key
# self.assertIs(isinstance(layer_info['table_name'], str), True)
# self.assertIs(isinstance(layer_info['label_field'], str), True)
# self.assertIs(isinstance(layer_info['carto_css'], str), True)
# self.assertIs(isinstance(layer_info['carto_lbl'], str), True)
# self.assertIs(isinstance(layer_info['interactivity'], list), True)
# # verify 2 layer fields: 1 for region letter, 1 for name
# self.assertEqual(len(layer_info['interactivity']), 2)
# # although the header functions are tested above, we will retest them
# # here to verify they are in the context
# region_nm_1, region_lt_1 = 'rwpa 1', 'A'
# region_nm_2, region_lt_2 = 'rwpa 2', 'B'
# RWPAs(objectid=1, reg_name=region_nm_1, letter=region_lt_1,
# shape_leng=10, shape_area=2, geom=test_geom).save()
# RWPAs(objectid=2, reg_name=region_nm_2, letter=region_lt_2,
# shape_leng=10, shape_area=2, geom=test_geom).save()
# res_nm_1, res_lt_1 = 'mr 1', 'A'
# res_nm_2, res_lt_2 = 'mr 2', 'B'
# MajorReservoirs(res_lbl=res_nm_1, region=res_lt_1,
# geom=test_geom).save()
# MajorReservoirs(res_lbl=res_nm_2, region=res_lt_2,
# geom=test_geom).save()
# response = self.client.get(reverse('map:index'))
# hdr_reg = response.context['header_regions']
# self.assertEqual(hdr_reg, [{'name': region_nm_1,
# 'letter': region_lt_1},
# {'name': region_nm_2,
# 'letter': region_lt_2}])
# hdr_lks = response.context['header_lakes']
# self.assertEqual(hdr_lks, [{'name': res_nm_1,
# 'region': res_lt_1,
# 'class': 'disabled'},
# {'name': res_nm_2,
# 'region': res_lt_2,
# 'class': 'disabled'}])
# # check the version number in the context
# self.assertIs('version' in response.context, True)
# self.assertEqual(response.context['version'], settings.VERSION)
# def test_region_context(self):
# """
# Test the region template context; config & header lists
# """
# # although the header functions are tested above, we will retest them
# # here to verify they are in the context
# region_nm_1, region_lt_1 = 'rwpa 1', 'A'
# region_nm_2, region_lt_2 = 'rwpa 2', 'B'
# RWPAs(objectid=1, reg_name=region_nm_1, letter=region_lt_1,
# shape_leng=10, shape_area=2, geom=test_geom).save()
# RWPAs(objectid=2, reg_name=region_nm_2, letter=region_lt_2,
# shape_leng=10, shape_area=2, geom=test_geom).save()
# res_nm_1, res_lt_1 = 'mr 1', 'A'
# res_nm_2, res_lt_2 = 'mr 2', 'B'
# MajorReservoirs(res_lbl=res_nm_1, region=res_lt_1,
# geom=test_geom).save()
# MajorReservoirs(res_lbl=res_nm_2, region=res_lt_2,
# geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl='mr 1')
# StoryContent(lake=m, summary="text here", history="text there").save()
# request_region = 'A'
# response = self.client.get(reverse('map:region',
# args=[request_region]))
# hdr_reg = response.context['header_regions']
# self.assertEqual(hdr_reg, [{'name': region_nm_1,
# 'letter': region_lt_1},
# {'name': region_nm_2,
# 'letter': region_lt_2}])
# hdr_lks = response.context['header_lakes']
# # this also doubles down by testing the 'post_save' signal sent by
# # editing a StoryContent record and it auto editing the associated
# # major reservoir to be class=enabled
# self.assertEqual(hdr_lks, [{'name': res_nm_1,
# 'region': res_lt_1,
# 'class': 'enabled'},
# {'name': res_nm_2,
# 'region': res_lt_2,
# 'class': 'disabled'}])
# config = response.context['layers']
# check_layers = ['rwpas', 'reservoirs']
# check_keys = ['table_name', 'label_field', 'carto_css',
# 'carto_lbl', 'interactivity']
# # check that the 2 layers are in config with proper keys
# for layer in check_layers:
# self.assertIs(layer in config.keys(), True)
# # check the keys of those 2 layers
# layer_info = config[layer]
# for key in check_keys:
# self.assertIs(key in layer_info.keys(), True)
# # check the value type for each key
# self.assertIs(isinstance(layer_info['table_name'], str), True)
# self.assertIs(isinstance(layer_info['label_field'], str), True)
# self.assertIs(isinstance(layer_info['carto_css'], str), True)
# self.assertIs(isinstance(layer_info['carto_lbl'], str), True)
# self.assertIs(isinstance(layer_info['interactivity'], list), True)
# # verify 2 layer fields: 1 for region letter, 1 for name
# self.assertEqual(len(layer_info['interactivity']), 2)
# # check context region passed through view function
# self.assertIs('region' in response.context, True)
# self.assertEqual(response.context['region'], request_region)
# # check the extent in context
# self.assertIs('extent' in response.context, True)
# self.assertIs(isinstance(response.context['extent'], list), True)
# self.assertEqual(len(response.context['extent']), 4)
# for coordinate in response.context['extent']:
# self.assertIs(type(coordinate), float)
# def test_story_context(self):
# """
# Test the story template context; header lists
# """
# # although the header functions are tested above, we will retest them
# # here to verify they are in the context
# region_nm_1, region_lt_1 = 'rwpa 1', 'A'
# region_nm_2, region_lt_2 = 'rwpa 2', 'B'
# RWPAs(objectid=1, reg_name=region_nm_1, letter=region_lt_1,
# shape_leng=10, shape_area=2, geom=test_geom).save()
# RWPAs(objectid=2, reg_name=region_nm_2, letter=region_lt_2,
# shape_leng=10, shape_area=2, geom=test_geom).save()
# res_nm_1, res_lt_1 = 'mr one', 'A'
# res_nm_2, res_lt_2 = 'mr two', 'B'
# MajorReservoirs(res_lbl=res_nm_1, region=res_lt_1,
# geom=test_geom).save()
# MajorReservoirs(res_lbl=res_nm_2, region=res_lt_2,
# geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl='mr one')
# StoryContent(lake=m, summary="text here", history="text there").save()
# LakeStatistics(lake=m, dam_height=4.3).save()
# # load up a bunch of significant high and low events
# # more than the max displayed (10) each
# today = datetime.datetime.today()
# dt = str(today.year) + "-" + str(today.month) + "-" + str(today.day)
# counter = 0
# while counter < 15:
# SignificantEvents(lake=m, event_type='High', date=dt,
# height=99.99).save()
# SignificantEvents(lake=m, event_type='Low', date=dt,
# drought="1970-71", height=99.99).save()
# counter += 1
# response = self.client.get(reverse('map:story', args=['A', 'mr one']))
# hdr_reg = response.context['header_regions']
# self.assertEqual(hdr_reg, [{'name': region_nm_1,
# 'letter': region_lt_1},
# {'name': region_nm_2,
# 'letter': region_lt_2}])
# hdr_lks = response.context['header_lakes']
# self.assertEqual(hdr_lks, [{'name': res_nm_1,
# 'region': res_lt_1,
# 'class': 'enabled'},
# {'name': res_nm_2,
# 'region': res_lt_2,
# 'class': 'disabled'}])
# # check the extent in context
# self.assertIs('extent' in response.context, True)
# self.assertIs(isinstance(response.context['extent'], list), True)
# self.assertEqual(len(response.context['extent']), 4)
# for coordinate in response.context['extent']:
# self.assertIs(type(coordinate), float)
# # check the layer info
# config = response.context['layer']
# check_keys = ['table_name', 'label_field', 'carto_css',
# 'carto_lbl', 'interactivity', 'carto_story_css']
# # check the keys of the layer config
# for key in check_keys:
# self.assertIs(key in config.keys(), True)
# # check the value type for each key
# self.assertIs(isinstance(config['table_name'], str), True)
# self.assertIs(isinstance(config['label_field'], str), True)
# self.assertIs(isinstance(config['carto_css'], str), True)
# self.assertIs(isinstance(config['carto_lbl'], str), True)
# self.assertIs(isinstance(config['interactivity'], list), True)
# self.assertIs(isinstance(config['carto_story_css'], str), True)
# # verify 2 layer fields: 1 for region letter, 1 for name
# self.assertEqual(len(config['interactivity']), 2)
# # check the story content in context referencing the table
# c = StoryContent.objects.get(lake=m)
# self.assertEqual(response.context['story'], c)
# # check the lake in context is referencing the url lake
# lake_in_url = response.request['PATH_INFO'].split("/")[2]
# self.assertEqual(response.context['lake'], lake_in_url)
# # check that links is a key in the context
# self.assertIs('links' in response.context, True)
# self.assertIs(isinstance(response.context['links'], list),
# True)
# # check that stats is a key in the context
# self.assertIs('stats' in response.context, True)
# self.assertIs(isinstance(response.context['stats'],
# type(LakeStatistics())), True)
# ls = LakeStatistics.objects.get(lake=m)
# ls = ls.string_numbers()
# ls = ls.set_displays()
# self.assertEqual(response.context['stats'], ls)
# # check that high events is a key in the context
# self.assertIs('high_events' in response.context, True)
# self.assertIs(isinstance(response.context['high_events'], list),
# True)
# # test high events list isn't larger than 10
# self.assertEqual(len(response.context['high_events']), 10)
# # test high events list objects don't have drought but does have rank
# for i in response.context['high_events']:
# self.assertIs('drought' in i, False)
# self.assertIs('rank' in i, True)
# # check that low events is a key in the context
# self.assertIs('low_events' in response.context, True)
# self.assertIs(isinstance(response.context['low_events'], list),
# True)
# # test low events list isn't larger than 10
# self.assertEqual(len(response.context['low_events']), 10)
# # test low events list objects have drought and rank keys
# for i in response.context['low_events']:
# self.assertIs('drought' in i, True)
# self.assertIs('rank' in i, True)
# # check that overlays and overlay_order are context keys
# self.assertIs('overlays' in response.context, True)
# self.assertIs('overlay_order' in response.context, True)
# # test that overlay_order list matches keys in overlays
# self.assertEqual(len(response.context['overlay_order']),
# len(response.context['overlays'].keys()))
# for k in response.context['overlays'].keys():
# self.assertIs(k in response.context['overlay_order'], True)
# for k in response.context['overlay_order']:
# self.assertIs(k in response.context['overlays'].keys(), True)
# # test each overlay at least has table_name, toc_label, carto_css
# for k in response.context['overlays'].keys():
# overlay_config = response.context['overlays'][k]
# self.assertIs('toc_label' in overlay_config.keys(), True)
# self.assertIs('table_name' in overlay_config.keys(), True)
# self.assertIs('carto_css' in overlay_config.keys(), True)
# # test overlay toc_label matches key name
# self.assertEqual(k, overlay_config['toc_label'])
# # check that overlay query in context equaling lake id
# self.assertIs('overlay_query' in response.context, True)
# self.assertIs(isinstance(response.context['overlay_query'], int),
# True)
# self.assertEqual(response.context['overlay_query'], m.id)
# def test_about_context(self):
# """
# Test the about template context; config & header lists
# """
# # although the header functions are tested above, we will retest them
# # here to verify they are in the context
# region_nm_1, region_lt_1 = 'rwpa 1', 'A'
# region_nm_2, region_lt_2 = 'rwpa 2', 'B'
# RWPAs(objectid=1, reg_name=region_nm_1, letter=region_lt_1,
# shape_leng=10, shape_area=2, geom=test_geom).save()
# RWPAs(objectid=2, reg_name=region_nm_2, letter=region_lt_2,
# shape_leng=10, shape_area=2, geom=test_geom).save()
# res_nm_1, res_lt_1 = 'mr 1', 'A'
# res_nm_2, res_lt_2 = 'mr 2', 'B'
# MajorReservoirs(res_lbl=res_nm_1, region=res_lt_1,
# geom=test_geom).save()
# MajorReservoirs(res_lbl=res_nm_2, region=res_lt_2,
# geom=test_geom).save()
# response = self.client.get(reverse('map:about'))
# hdr_reg = response.context['header_regions']
# self.assertEqual(hdr_reg, [{'name': region_nm_1,
# 'letter': region_lt_1},
# {'name': region_nm_2,
# 'letter': region_lt_2}])
# hdr_lks = response.context['header_lakes']
# self.assertEqual(hdr_lks, [{'name': res_nm_1,
# 'region': res_lt_1,
# 'class': 'disabled'},
# {'name': res_nm_2,
# 'region': res_lt_2,
# 'class': 'disabled'}])
# # check the version number in the context
# self.assertIs('version' in response.context, True)
# self.assertEqual(response.context['version'], settings.VERSION)
# def test_templates(self):
# """
# Test view templates include required leaflet and html
# """
# leaflet_templates = ['leaflet/js.html', 'leaflet/css.html',
# 'leaflet/_leaflet_map.html']
# base_template = 'map/base.html'
# index_template = 'map/index.html'
# region_template = 'map/region.html'
# story_template = 'map/story.html'
# story_mobile_template = 'map/story_mobile.html'
# about_template = 'map/about.html'
# # index template
# response = self.client.get('/')
# template_names = []
# for t in response.templates:
# template_names.append(t.name)
# for lt in leaflet_templates:
# self.assertIs(lt in template_names, True)
# self.assertIs(index_template in template_names, True)
# self.assertIs(base_template in template_names, True)
# # region template
# RWPAs(objectid=1, reg_name='Test Region', letter='A',
# shape_leng=10, shape_area=2, geom=test_geom).save()
# response = self.client.get('/A/')
# template_names = []
# for t in response.templates:
# template_names.append(t.name)
# for lt in leaflet_templates:
# self.assertIs(lt in template_names, True)
# self.assertIs(region_template in template_names, True)
# self.assertIs(base_template in template_names, True)
# # story template
# MajorReservoirs(res_lbl='Lake Tester', region='A',
# geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl='Lake Tester')
# StoryContent(lake=m, summary="text here", history="text there").save()
# response = self.client.get('/A/Lake%20Tester')
# template_names = []
# for t in response.templates:
# template_names.append(t.name)
# for lt in leaflet_templates:
# self.assertIs(lt in template_names, True)
# self.assertIs(story_template in template_names, True)
# self.assertIs(base_template in template_names, True)
# # story mobile template
# ipad_user_agent = ('Mozilla/5.0 (iPad; CPU OS 7_0_6 like Mac OS X) '
# 'AppleWebKit/537.51.1 (KHTML, like Gecko) Coast/2'
# '.0.5.71150 Mobile/11B651 Safari/7534.48.3')
# response = self.client.get('/A/Lake%20Tester',
# HTTP_USER_AGENT=ipad_user_agent,
# HTTP_ACCEPT="*/*")
# template_names = []
# for t in response.templates:
# template_names.append(t.name)
# for lt in leaflet_templates:
# self.assertIs(lt in template_names, True)
# self.assertIs(story_mobile_template in template_names, True)
# self.assertIs(base_template in template_names, True)
# # about template
# response = self.client.get('/about/')
# template_names = []
# for t in response.templates:
# template_names.append(t.name)
# self.assertIs(about_template in template_names, True)
# self.assertIs(base_template in template_names, True)
# class SignalTests(TestCase):
# def test_story_content_post_save(self):
# """
# Test StoryContent post_save enables and post_delete
# disables MajorReservoir
# """
# res_nm_1, res_lt_1 = 'mr 1', 'A'
# MajorReservoirs(res_lbl=res_nm_1, region=res_lt_1,
# geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=res_nm_1)
# self.assertEqual(m.story, 'disabled')
# StoryContent(lake=m, summary="text here", history="text there").save()
# m = MajorReservoirs.objects.get(res_lbl=res_nm_1)
# self.assertEqual(m.story, 'enabled')
# s = StoryContent.objects.get(summary="text here")
# self.assertEqual(s.lake, m)
# s.delete()
# m = MajorReservoirs.objects.get(res_lbl=res_nm_1)
# self.assertEqual(m.story, 'disabled')
# class StaticFileTests(TestCase):
# def test_legend_images_exist(self):
# """
# Test the legend images exist
# """
# # iterate the overlays in config, they are the ones to be added
# # to the map
# for k in overlays.keys():
# filename = "map/images/{0}.png".format(k)
# result = finders.find(filename)
# self.assertIs(isinstance(result, str), True)
# # test Lake overlay image as it is not part of the config
# result = finders.find("map/images/Lake.png")
# self.assertIs(isinstance(result, str), True)
# def test_css_exists(self):
# """
# Test the css file exists
# """
# result = finders.find("map/style.css")
# self.assertIs(isinstance(result, str), True)
# def test_base_images(self):
# """
# Test the base html images (logos and such)
# """
# response = self.client.get('/')
# soup = BeautifulSoup(response.content, "html.parser")
# imgs = soup.findAll('img')
# img_srcs = [i['src'] for i in imgs]
# for s in img_srcs:
# fixed_path = s.replace(settings.STATIC_URL, "")
# result = finders.find(fixed_path)
# self.assertIs(isinstance(result, str), True)
# class MiddlewareTests(TestCase):
# def test_mobile_detection(self):
# """
# Test the mobile detection middleware for story template
# """
# lake_name = "Lake Travis"
# lake_region = "K"
# MajorReservoirs(res_lbl=lake_name, region=lake_region,
# geom=test_geom).save()
# m = MajorReservoirs.objects.get(res_lbl=lake_name)
# StoryContent(lake=m)
# ipad_user_agent = ('Mozilla/5.0 (iPad; CPU OS 7_0_6 like Mac OS X) '
# 'AppleWebKit/537.51.1 (KHTML, like Gecko) Coast/2'
# '.0.5.71150 Mobile/11B651 Safari/7534.48.3')
# r = '/' + lake_region + '/' + lake_name
# req = HttpRequest()
# req.path = r
# req.META['HTTP_USER_AGENT'] = ipad_user_agent
# req.META['HTTP_ACCEPT'] = '*/*'
# middleware = MobileDetectionMiddleware()
# req = middleware.process_request(req)
# self.assertIs(req.is_mobile, True)
# req = HttpRequest()
# req.path = r
# req.META['HTTP_USER_AGENT'] = 'Mozilla/5.0'
# req.META['HTTP_ACCEPT'] = '*/*'
# middleware = MobileDetectionMiddleware()
# req = middleware.process_request(req)
# self.assertIs(req.is_mobile, False)
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0028_auto_20171017_1443.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-17 19:43
from __future__ import unicode_literals
from django.db import migrations, models
import map.validators
class Migration(migrations.Migration):
dependencies = [
('map', '0027_majorreservoirs_story'),
]
operations = [
migrations.AddField(
model_name='lakestatistics',
name='wdft_link',
field=models.URLField(help_text='WDFT lake page link. Ex: https://waterdatafortexas.org/reservoirs/individual/<lake name>', null=True),
),
migrations.AlterField(
model_name='significantevents',
name='date',
field=models.DateField(validators=[map.validators.validate_past_dates]),
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0018_auto_20171004_0832.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-04 13:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('map', '0017_auto_20171003_1556'),
]
operations = [
migrations.AlterModelOptions(
name='lakestatistics',
options={'verbose_name': 'Lake Statistics', 'verbose_name_plural': 'Lake Statistics'},
),
migrations.AlterField(
model_name='lakestatistics',
name='num_of_floodgates',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Number of Floodgates'),
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/models.py
|
from django.conf import settings
from django.db import models
from django.contrib.gis.db import models as gismodels
from django.utils.safestring import mark_safe
from multiselectfield import MultiSelectField
from .validators import validate_past_dates
import os
import boto3
import datetime
YEAR_CHOICES = []
for r in range(1920, (datetime.datetime.now().year+1)):
YEAR_CHOICES.append((r, r))
class MajorReservoirs(gismodels.Model):
story_choices = [('disabled', 'disabled'), ('enabled', 'enabled')]
res_name = models.CharField(max_length=50)
type = models.CharField(max_length=50)
status = models.CharField(max_length=50)
res_lbl = models.CharField(max_length=100)
story = models.CharField(max_length=8, choices=story_choices,
default='disabled')
geom = gismodels.MultiPolygonField(srid=4326)
objects = gismodels.GeoManager()
def __str__(self):
return self.res_lbl
class Meta:
verbose_name = "Major Reservoir"
verbose_name_plural = "Major Reservoirs"
ordering = ['res_lbl']
class HistoricalAerialLinks(models.Model):
link = models.URLField()
year = models.IntegerField(choices=YEAR_CHOICES,
default=datetime.datetime.now().year)
lake = models.ForeignKey(MajorReservoirs)
datahub_collection_id = models.CharField(default="", max_length=36, blank=True)
def __str__(self):
return self.link
def as_dict(self):
return {
'link': self.link,
'year': self.year
}
class Meta:
verbose_name = "Historical Aerial Link"
verbose_name_plural = "Historical Aerial Links"
def get_upload_path(instance, filename):
return os.path.join(str(instance.lake), filename)
def remove_s3_media(file):
if str(file) != "":
client = boto3.client('s3')
key = os.path.join('media', str(file))
client.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=key)
class StoryContent(models.Model):
lake = models.OneToOneField(MajorReservoirs, primary_key=True)
summary = models.TextField()
summary_photo_main = models.ImageField(upload_to=get_upload_path,
blank=True)
history = models.TextField(blank=True)
history_photo_main = models.ImageField(upload_to=get_upload_path,
blank=True)
history_photo = models.ImageField(upload_to=get_upload_path,
blank=True)
section_one_nav = models.CharField(max_length=25, blank=True)
section_one_header = models.CharField(max_length=50, blank=True)
section_one_photo_main = models.ImageField(upload_to=get_upload_path,
blank=True)
section_one_content = models.TextField(blank=True)
section_one_photo = models.ImageField(upload_to=get_upload_path,
blank=True)
section_two_nav = models.CharField(max_length=25, blank=True)
section_two_header = models.CharField(max_length=50, blank=True)
section_two_photo_main = models.ImageField(upload_to=get_upload_path,
blank=True)
section_two_content = models.TextField(blank=True)
section_two_photo = models.ImageField(upload_to=get_upload_path,
blank=True)
section_three_nav = models.CharField(max_length=25, blank=True)
section_three_header = models.CharField(max_length=50, blank=True)
section_three_photo_main = models.ImageField(upload_to=get_upload_path,
blank=True)
section_three_content = models.TextField(blank=True)
section_three_photo = models.ImageField(upload_to=get_upload_path,
blank=True)
def summ_main_tag(self):
if self.summary_photo_main != "":
src = "%s%s" % (settings.MEDIA_URL, self.summary_photo_main)
return mark_safe('<img src="%s" style="max-height:150px;" />'
'<p>%s</p>' % (src, src))
else:
return self.summary_photo_main
summ_main_tag.allow_tags = True
def hist_main_tag(self):
if self.history_photo_main != "":
src = "%s%s" % (settings.MEDIA_URL, self.history_photo_main)
return mark_safe('<img src="%s" style="max-height:150px;" />'
'<p>%s</p>' % (src, src))
else:
return self.history_photo_main
hist_main_tag.allow_tags = True
def hist_tag(self):
if self.history_photo != "":
src = "%s%s" % (settings.MEDIA_URL, self.history_photo)
return mark_safe('<img src="%s" style="max-height:150px;" />'
'<p>%s</p>' % (src, src))
else:
return self.history_photo
hist_tag.allow_tags = True
def one_main_tag(self):
if self.section_one_photo_main != "":
src = "%s%s" % (settings.MEDIA_URL, self.section_one_photo_main)
return mark_safe('<img src="%s" style="max-height:150px;" />'
'<p>%s</p>' % (src, src))
else:
return self.section_one_photo_main
one_main_tag.allow_tags = True
def one_tag(self):
if self.section_one_photo != "":
src = "%s%s" % (settings.MEDIA_URL, self.section_one_photo)
return mark_safe('<img src="%s" style="max-height:150px;" />'
'<p>%s</p>' % (src, src))
else:
return self.section_one_photo
one_tag.allow_tags = True
def two_main_tag(self):
if self.section_two_photo_main != "":
src = "%s%s" % (settings.MEDIA_URL, self.section_two_photo_main)
return mark_safe('<img src="%s" style="max-height:150px;" />'
'<p>%s</p>' % (src, src))
else:
return self.section_two_photo_main
two_main_tag.allow_tags = True
def two_tag(self):
if self.section_two_photo != "":
src = "%s%s" % (settings.MEDIA_URL, self.section_two_photo)
return mark_safe('<img src="%s" style="max-height:150px;" />'
'<p>%s</p>' % (src, src))
else:
return self.section_two_photo
two_tag.allow_tags = True
def three_main_tag(self):
if self.section_three_photo_main != "":
src = "%s%s" % (settings.MEDIA_URL, self.section_three_photo_main)
return mark_safe('<img src="%s" style="max-height:150px;" />'
'<p>%s</p>' % (src, src))
else:
return self.section_three_photo_main
three_main_tag.allow_tags = True
def three_tag(self):
if self.section_three_photo != "":
src = "%s%s" % (settings.MEDIA_URL, self.section_three_photo)
return mark_safe('<img src="%s" style="max-height:150px;" />'
'<p>%s</p>' % (src, src))
else:
return self.section_three_photo
three_tag.allow_tags = True
def save(self, *args, **kw):
try:
old = type(self).objects.get(pk=self.pk) if self.pk else None
except:
old = None
super(StoryContent, self).save(*args, **kw)
if old is not None:
photo_fields = ['summary_photo_main',
'history_photo_main', 'history_photo',
'section_one_photo_main', 'section_one_photo',
'section_two_photo_main', 'section_two_photo',
'section_three_photo_main', 'section_three_photo']
for p in photo_fields:
old_attr = getattr(old, p)
new_attr = getattr(self, p)
if old and old_attr != new_attr:
remove_s3_media(old_attr)
def __str__(self):
return str(self.lake)
class Meta:
verbose_name = "Story Content"
verbose_name_plural = "Story Content"
class LakeStatistics(models.Model):
purpose_choices = (('Flood Management', 'Flood Management'),
('Water Storage', 'Water Storage'),
('Hydroelectric Power', 'Hydroelectric Power'))
lake = models.OneToOneField(MajorReservoirs, primary_key=True)
# general stats
original_name = models.CharField(max_length=50, blank=True)
primary_purposes = MultiSelectField(choices=purpose_choices, null=True,
blank=True)
location = models.CharField(max_length=50, blank=True,
help_text="Ex. Travis County, Texas")
construction_dates = models.CharField(max_length=50, blank=True,
help_text="Ex. 1937 to 1942")
length_of_lake = models.FloatField(default=0, blank=True,
help_text="Miles")
miles_of_shoreline = models.FloatField(default=0, blank=True,
help_text="Miles")
maximum_width = models.FloatField(default=0, blank=True, help_text="Miles")
lake_area = models.FloatField(default=0, blank=True, help_text="Acres")
lake_capacity = models.FloatField(default=0, blank=True,
help_text="Acre-feet")
full_elevation_msl = models.FloatField(default=0, blank=True,
help_text="Mean Sea Level")
full_elevation_gal = models.FloatField(default=0, blank=True,
help_text="Gallons of Water")
maximum_depth = models.FloatField(default=0, blank=True, help_text="Feet")
average_depth = models.FloatField(default=0, blank=True, help_text="Feet")
historic_high_msl = models.FloatField(default=0, blank=True,
help_text="Feet above Mean Sea"
" Level")
historic_high_date = models.DateField(null=True, blank=True,
validators=[validate_past_dates])
historic_low_msl = models.FloatField(default=0, blank=True,
help_text="Feet above Mean Sea Level")
historic_low_date = models.DateField(null=True, blank=True,
validators=[validate_past_dates])
# dam stats
dam_height = models.FloatField(default=0, blank=True, help_text="Feet")
dam_width = models.FloatField(default=0, blank=True, help_text="Feet")
spillway_elevation = models.FloatField(default=0, blank=True,
help_text="Feet above Mean Sea"
" Level")
top_of_dam = models.FloatField(default=0, blank=True,
help_text="Feet above Mean Sea Level")
num_of_floodgates = models.PositiveIntegerField(default=0, blank=True,
verbose_name='Number of '
'Floodgates')
discharge_capacity = models.TextField(blank=True, help_text="typically - "
"Cubic Feet per Second")
# current conditions
wdft_link = models.URLField(null=True, help_text="WDFT lake page link. "
"Ex: https://waterdatafortexas.org/reservoirs"
"/individual/travis", blank=True)
def string_numbers(self):
flds = self._meta.get_fields()
for f in flds:
field_type = f.get_internal_type()
if field_type == 'FloatField':
attr = getattr(self, f.name)
string = "{:,}".format(attr)
if string[-2:] == ".0":
string = string[:-2]
setattr(self, f.name, string)
return self
def set_displays(self):
self.stat_defaults = [0.0, 0, "0", "0.0", "", None, "None"]
self.primary_purposes = str(self.primary_purposes)
self.general_stats = False
general_stats = [self.original_name,
self.primary_purposes,
self.location,
self.construction_dates,
self.length_of_lake,
self.miles_of_shoreline,
self.maximum_width,
self.lake_area,
self.lake_capacity,
self.full_elevation_msl,
self.full_elevation_gal,
self.maximum_depth,
self.average_depth,
self.historic_high_msl,
self.historic_high_date,
self.historic_low_msl,
self.historic_low_date]
for g in general_stats:
if g not in self.stat_defaults:
self.general_stats = True
self.dam_stats = False
dam_stats = [self.dam_height,
self.dam_width,
self.spillway_elevation,
self.top_of_dam,
self.num_of_floodgates,
self.discharge_capacity]
for d in dam_stats:
if d not in self.stat_defaults:
self.general_stats = True
self.dam_stats = True
return self
def __str__(self):
return str(self.lake)
class Meta:
verbose_name = "Lake Statistics"
verbose_name_plural = "Lake Statistics"
class SignificantEvents(models.Model):
lake = models.ForeignKey(MajorReservoirs)
event_type = models.CharField(max_length=4,
choices=[('High', 'High'), ('Low', 'Low')],
default='High')
date = models.DateField(validators=[validate_past_dates])
height = models.FloatField(help_text="Feet above mean sea level")
drought = models.CharField(max_length=9, blank=True,
help_text="Year range of drought (low "
"events only). Example: '1947-57'")
def __str__(self):
return str(self.lake) + " " + self.event_type + " " + str(self.date)
def as_dict(self):
return {
'date': self.date,
'height': self.height,
'drought': self.drought
}
class Meta:
verbose_name = "Significant Event"
verbose_name_plural = "Significant Events"
"""
Overlay Layers for stories (points of interest)
"""
class BoatRamps(gismodels.Model):
lake = models.ForeignKey(MajorReservoirs)
name = models.CharField(max_length=30, blank=True)
operator = models.CharField(max_length=50, blank=True)
geom = gismodels.PointField(srid=4326)
objects = gismodels.GeoManager()
def __str__(self):
return self.name
class Meta:
verbose_name = "Boat Ramp"
verbose_name_plural = "Boat Ramps"
class ChannelMarkers(gismodels.Model):
lake = models.ForeignKey(MajorReservoirs)
odd = models.IntegerField(blank=True)
marker_id = models.IntegerField(blank=True)
year = models.IntegerField(blank=True)
geom = gismodels.PointField(srid=4326)
objects = gismodels.GeoManager()
def __str__(self):
return str(self.lake) + " " + str(self.marker_id)
class Meta:
verbose_name = "Channel Marker"
verbose_name_plural = "Channel Markers"
class Hazards(gismodels.Model):
hazard_choices = (('Hazard', 'Hazard'),
('No Boats', 'No Boats'),
('No Wake', 'No Wake'),
('Rocks', 'Rocks'))
lake = models.ForeignKey(MajorReservoirs)
hazard_type = models.CharField(max_length=35, default='Hazard',
choices=hazard_choices)
num_buoys = models.CharField(max_length=10, blank=True)
geom = gismodels.PolygonField(srid=4326)
objects = gismodels.GeoManager()
def __str__(self):
return str(self.lake) + " " + self.hazard_type
class Meta:
verbose_name = "Hazard"
verbose_name_plural = "Hazards"
class Parks(gismodels.Model):
type_choices = (('Park', 'Park'),
('Undeveloped Recreation Area',
'Undeveloped Recreation Area'),
('Preserve', 'Preserve'),
('Park/Preserve', 'Park/Preserve'))
lake = models.ForeignKey(MajorReservoirs)
park_type = models.CharField(max_length=50, choices=type_choices)
name = models.CharField(max_length=100, blank=True)
acres = models.FloatField(default=0, blank=True)
area = models.FloatField(default=0, blank=True)
perimeter = models.FloatField(default=0, blank=True)
geom = gismodels.PolygonField(srid=4326)
objects = gismodels.GeoManager()
def __str__(self):
return self.name
class Meta:
verbose_name = "Park"
verbose_name_plural = "Parks"
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0029_auto_20171018_0822.py
|
<reponame>TNRIS/lake-gallery<gh_stars>1-10
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-18 13:22
from __future__ import unicode_literals
from django.db import migrations, models
import map.models
class Migration(migrations.Migration):
dependencies = [
('map', '0028_auto_20171017_1443'),
]
operations = [
migrations.AddField(
model_name='storycontent',
name='history_photo_main',
field=models.ImageField(blank=True, upload_to=map.models.get_upload_path),
),
migrations.AddField(
model_name='storycontent',
name='section_one_photo_main',
field=models.ImageField(blank=True, upload_to=map.models.get_upload_path),
),
migrations.AddField(
model_name='storycontent',
name='section_three_photo_main',
field=models.ImageField(blank=True, upload_to=map.models.get_upload_path),
),
migrations.AddField(
model_name='storycontent',
name='section_two_photo_main',
field=models.ImageField(blank=True, upload_to=map.models.get_upload_path),
),
migrations.AddField(
model_name='storycontent',
name='summary_photo_main',
field=models.ImageField(blank=True, upload_to=map.models.get_upload_path),
),
migrations.AlterField(
model_name='lakestatistics',
name='wdft_link',
field=models.URLField(help_text='WDFT lake page link. Ex: https://waterdatafortexas.org/reservoirs/individual/travis', null=True),
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0017_auto_20171003_1556.py
|
<filename>lakegallery/map/migrations/0017_auto_20171003_1556.py<gh_stars>1-10
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-03 20:56
from __future__ import unicode_literals
from django.db import migrations, models
import multiselectfield.db.fields
class Migration(migrations.Migration):
dependencies = [
('map', '0016_lakestatistics'),
]
operations = [
migrations.AddField(
model_name='lakestatistics',
name='average_depth',
field=models.FloatField(blank=True, help_text='Feet', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='construction_dates',
field=models.CharField(blank=True, help_text='Ex. 1937 to 1942', max_length=50),
),
migrations.AddField(
model_name='lakestatistics',
name='dam_height',
field=models.FloatField(blank=True, help_text='Feet', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='dam_width',
field=models.FloatField(blank=True, help_text='Feet', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='discharge_capacity',
field=models.TextField(blank=True, help_text='typically - Cubic Feet per Second'),
),
migrations.AddField(
model_name='lakestatistics',
name='full_elevation_gal',
field=models.FloatField(blank=True, help_text='Gallons of Water', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='full_elevation_msl',
field=models.FloatField(blank=True, help_text='Mean Sea Level', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='historic_high_date',
field=models.DateField(blank=True, null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='historic_high_msl',
field=models.FloatField(blank=True, help_text='Feet above Mean Sea Level', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='historic_low_date',
field=models.DateField(blank=True, null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='historic_low_msl',
field=models.FloatField(blank=True, help_text='Feet above Mean Sea Level', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='lake_area',
field=models.FloatField(blank=True, help_text='Acres', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='lake_capacity',
field=models.FloatField(blank=True, help_text='Acre-feet', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='length_of_lake',
field=models.FloatField(blank=True, help_text='Miles', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='location',
field=models.CharField(blank=True, help_text='Ex. Travis County, Texas', max_length=50),
),
migrations.AddField(
model_name='lakestatistics',
name='maximum_depth',
field=models.FloatField(blank=True, help_text='Feet', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='maximum_width',
field=models.FloatField(blank=True, help_text='Miles', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='miles_of_shoreline',
field=models.FloatField(blank=True, help_text='Miles', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='num_of_floodgates',
field=models.FloatField(blank=True, null=True, verbose_name='Number of Floodgates'),
),
migrations.AddField(
model_name='lakestatistics',
name='spillway_elevation',
field=models.FloatField(blank=True, help_text='Feet above Mean Sea Level', null=True),
),
migrations.AddField(
model_name='lakestatistics',
name='top_of_dam',
field=models.FloatField(blank=True, help_text='Feet above Mean Sea Level', null=True),
),
migrations.AlterField(
model_name='lakestatistics',
name='primary_purposes',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Flood Management', 'Flood Management'), ('Water Storage', 'Water Storage'), ('Hydroelectric Power', 'Hydroelectric Power')], max_length=50),
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0020_auto_20171004_1050.py
|
<reponame>TNRIS/lake-gallery
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-04 15:50
from __future__ import unicode_literals
from django.db import migrations
import multiselectfield.db.fields
class Migration(migrations.Migration):
dependencies = [
('map', '0019_auto_20171004_0935'),
]
operations = [
migrations.AlterField(
model_name='lakestatistics',
name='primary_purposes',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[('Flood Management', 'Flood Management'), ('Water Storage', 'Water Storage'), ('Hydroelectric Power', 'Hydroelectric Power')], max_length=50, null=True),
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/validators.py
|
from django.core.exceptions import ValidationError
import datetime
def validate_past_dates(value):
if value > datetime.date.today():
raise ValidationError("The date cannot be in the future!")
return value
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0021_significantevents.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-09 16:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('map', '0020_auto_20171004_1050'),
]
operations = [
migrations.CreateModel(
name='SignificantEvents',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('event_type', models.CharField(choices=[('High', 'High'), ('Low', 'Low')], default='High', max_length=4)),
('date', models.DateField()),
('height', models.FloatField(help_text='Feet above mean sea level')),
('drought', models.CharField(blank=True, help_text="Year range of drought (low events only). Example: '1947-57'", max_length=9)),
('lake', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='map.MajorReservoirs')),
],
options={
'verbose_name': 'Significant Event',
'verbose_name_plural': 'Significant Events',
},
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/views.py
|
<reponame>TNRIS/lake-gallery
from django.shortcuts import render, redirect
from django.http import Http404
from django.shortcuts import render_to_response
# from django.template import RequestContext
from .models import (MajorReservoirs, HistoricalAerialLinks,
StoryContent, LakeStatistics, SignificantEvents)
from django.contrib.gis.geos import GEOSGeometry
from django.conf import settings
"""
utility functions
"""
def get_lake_header_list():
r = MajorReservoirs.objects.values_list('res_lbl', 'story')
res = [{'name': n[0], 'class': n[1]} for n
in r]
res.sort(key=lambda x: x['name'])
return res
"""
views/templates & redirects
"""
def index(request):
res = get_lake_header_list()
context = {'header_lakes': res,
'version': settings.VERSION}
return render(request, 'map/index.html', context)
def story(request, lake):
res = get_lake_header_list()
m = MajorReservoirs.objects.get(res_lbl=lake)
ext = list(GEOSGeometry(m.geom).extent)
n = HistoricalAerialLinks.objects.filter(lake=m)
links = [obj.as_dict() for obj in n]
links.append({'year': 2015, 'link': 'https://webservices.tnris.org/arcgis/services/TOP/TOP15_NC_CIR_50cm/ImageServer/WMSServer'})
links.append({'year': 2016, 'link': 'https://webservices.tnris.org/arcgis/services/NAIP/NAIP16_NC_CIR_1m/ImageServer/WMSServer'})
links.sort(key=lambda x: x['year'])
datahub = []
for obj in n:
datahub.append({'year': obj.year, 'id': obj.datahub_collection_id})
datahub.append({'year': 2015, 'id': 'b7e5b638-99f0-4676-9411-c88d06d49943'})
datahub.append({'year': 2016, 'id': 'a40c2ff9-ccac-4c76-99a1-2382c09cf716'})
datahub.sort(key=lambda x: x['year'])
try:
c = StoryContent.objects.get(lake=m)
except:
c = {}
try:
s = LakeStatistics.objects.get(lake=m)
s = s.string_numbers()
s = s.set_displays()
except:
s = {}
try:
h = SignificantEvents.objects.filter(lake=m, event_type='High')
high_list = [obj.as_dict() for obj in h]
high_list.sort(key=lambda x: x['height'])
high_list.reverse()
for h in high_list:
rank = high_list.index(h)
rank += 1
h['rank'] = rank
del h['drought']
if len(high_list) > 10:
high_list = high_list[:10]
except:
high_list = []
try:
l = SignificantEvents.objects.filter(lake=m, event_type='Low')
low_list = [obj.as_dict() for obj in l]
low_list.sort(key=lambda x: x['height'])
for l in low_list:
rank = low_list.index(l)
rank += 1
l['rank'] = rank
if len(low_list) > 10:
low_list = low_list[:10]
except:
low_list = []
context = {'header_lakes': res, 'extent': ext,
'lake': lake, 'links': links,
'story': c, 'stats': s, 'high_events': high_list,
'low_events': low_list,
'data_hub_ids': datahub,
'version': settings.VERSION}
if request.is_mobile is False:
return render(request, 'map/story.html', context)
else:
return render(request, 'map/story_mobile.html', context)
def about(request):
res = get_lake_header_list()
context = {'header_lakes': res,
'version': settings.VERSION,
'contact_submit_url': settings.CONTACT_SUBMIT_URL,
'recaptcha_site_key': settings.RECAPTCHA_SITE_KEY}
return render(request, 'map/about.html', context)
"""
error code handling
"""
def bad_request(request):
res = get_lake_header_list()
snark = "Maybe this story book is in a different language?"
context = {'header_lakes': res,
'code': 400, 'text': 'Bad Request',
'snark': snark}
response = render_to_response('map/error.html', context)
response.status_code = 400
return response
def permission_denied(request):
res = get_lake_header_list()
snark = "Sorry, this story book is locked down."
context = {'header_lakes': res,
'code': 403, 'text': 'Permission Denied',
'snark': snark}
response = render_to_response('map/error.html', context)
response.status_code = 403
return response
def page_not_found(request):
res = get_lake_header_list()
snark = ("Seems like you're looking for a page that's not in this story "
"book.")
context = {'header_lakes': res,
'code': 404, 'text': 'Page Not Found',
'snark': snark}
response = render_to_response('map/error.html', context)
response.status_code = 404
return response
def server_error(request):
res = get_lake_header_list()
snark = "Looks like the bookshelf fell out from under this story book."
context = {'header_lakes': res,
'code': 500, 'text': 'Server Error',
'snark': snark}
response = render_to_response('map/error.html', context)
response.status_code = 500
return response
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0004_auto_20170925_1138.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-25 16:38
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('map', '0003_auto_20170925_1126'),
]
operations = [
migrations.AlterModelOptions(
name='majorreservoirs',
options={'verbose_name': 'Major Reservoir', 'verbose_name_plural': 'Major Reservoirs'},
),
migrations.AlterModelOptions(
name='rwpas',
options={'verbose_name': 'RWPA', 'verbose_name_plural': 'RWPAs'},
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0026_auto_20171011_0905.py
|
<filename>lakegallery/map/migrations/0026_auto_20171011_0905.py<gh_stars>1-10
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-11 14:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('map', '0025_boatramps_channelmarkers_hazards_parks'),
]
operations = [
migrations.AlterField(
model_name='hazards',
name='hazard_type',
field=models.CharField(choices=[('Hazard', 'Hazard'), ('No Boats', 'No Boats'), ('No Wake', 'No Wake'), ('Rocks', 'Rocks')], default='Hazard', max_length=35),
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0030_auto_20171114_1430.py
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-14 20:30
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('map', '0029_auto_20171018_0822'),
]
operations = [
migrations.AlterModelOptions(
name='majorreservoirs',
options={'ordering': ['res_lbl'], 'verbose_name': 'Major Reservoir', 'verbose_name_plural': 'Major Reservoirs'},
),
migrations.AlterField(
model_name='lakestatistics',
name='wdft_link',
field=models.URLField(blank=True, help_text='WDFT lake page link. Ex: https://waterdatafortexas.org/reservoirs/individual/travis', null=True),
),
]
|
TNRIS/lake-gallery
|
lakegallery/lakegallery/settings.py
|
"""
Django settings for lakegallery project.
Generated by 'django-admin startproject' using Django 1.11.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
VERSION = '2.5.6'
ALLOWED_HOSTS = ['localhost', '0.0.0.0', '.tnris.org', '.tnris.org.']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.gis',
'django_filters',
'rest_framework',
'rest_framework_gis',
'api',
'map',
'leaflet',
'djgeojson',
'bootstrap4',
'storages',
'multiselectfield',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'mobileesp.middleware.MobileDetectionMiddleware',
]
ROOT_URLCONF = 'lakegallery.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'api', 'templates'),
os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'lakegallery.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASSWORD'),
'HOST': os.environ.get('DB_HOST'),
'PORT': os.environ.get('DB_PORT'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Chicago'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
# AWS Bucket Settings for Static & Media
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_S3_REGION_NAME = os.environ.get('AWS_S3_REGION_NAME')
# AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
AWS_S3_CUSTOM_DOMAIN = 'cdn.tnris.org'
CONTACT_SUBMIT_URL = os.environ.get('CONTACT_SUBMIT_URL')
RECAPTCHA_SITE_KEY = os.environ.get('RECAPTCHA_SITE_KEY')
MEDIAFILES_LOCATION = 'media'
DEFAULT_FILE_STORAGE = 'custom_storages.MediaStorage'
MEDIA_URL = 'https://%s/%s/' % (AWS_S3_CUSTOM_DOMAIN, MEDIAFILES_LOCATION)
STATICFILES_LOCATION = 'static'
LAKE_GALLERY_MODE = os.environ.get('LAKE_GALLERY_MODE')
print('**** Project running in %s mode ****' % LAKE_GALLERY_MODE)
if LAKE_GALLERY_MODE == 'PRODUCTION':
# For PROD - References prod static files in AWS
STATICFILES_STORAGE = 'custom_storages.StaticStorage'
STATIC_URL = 'https://%s/%s/' % (AWS_S3_CUSTOM_DOMAIN,
STATICFILES_LOCATION)
else:
# For DEV - Local Development
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
# 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
# 'rest_framework.permissions.IsAdminUser',
'rest_framework.permissions.IsAuthenticatedOrReadOnly'
],
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 10,
'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',),
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'api.renderers.BrowsableAPIRendererFormatted',
)
}
LEAFLET_CONFIG = {
'DEFAULT_CENTER': (31.185216, -99.9),
'DEFAULT_ZOOM': 6,
'MIN_ZOOM': 2,
'RESET_VIEW': False,
'PLUGINS': {
'SIDEBYSIDE': {
'js': 'map/leaflet-side-by-side.js',
'auto-include': True
},
'ESRI': {
'js': 'map/esri/esri-leaflet.js',
'auto-include': True
},
'WMTS': {
'js': 'map/leaflet_wmts/leaflet-tilelayer-wmts.js',
'auto-include': True
},
'EASYBUTTON': {
'js': 'map/leaflet_easybutton/easy-button.js',
'css': 'map/leaflet_easybutton/easy-button.css',
'auto-include': True
}
},
'TILES': []
}
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0034_delete_rwpas.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-11 13:37
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('map', '0033_auto_20190613_1206'),
]
operations = [
migrations.DeleteModel(
name='RWPAs',
),
]
|
TNRIS/lake-gallery
|
lakegallery/lakegallery/urls.py
|
"""lakegallery URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import (include, url, handler400, handler403,
handler404, handler500)
from django.contrib.gis import admin
from django.conf import settings
from api.api import router
from django.views.generic.base import RedirectView
handler400 = 'map.views.bad_request'
handler403 = 'map.views.permission_denied'
handler404 = 'map.views.page_not_found'
handler500 = 'map.views.server_error'
urlpatterns = [
url(r'^api/', include(router.urls)),
# disabled browsable api authentication since nothing is editable
# url(r'^api-auth/', include('rest_framework.urls',
# namespace='rest_framework')),
url(r'^admin/', admin.site.urls),
url(r'^admin', RedirectView.as_view(url='admin/')),
url(r'^', include('map.urls'))
]
|
TNRIS/lake-gallery
|
lakegallery/api/api.py
|
<filename>lakegallery/api/api.py
from rest_framework import routers
from .views import ReservoirsViewSet
from rest_framework_extensions.routers import NestedRouterMixin
class NestedDefaultRouter(NestedRouterMixin, routers.DefaultRouter):
pass
router = NestedDefaultRouter()
# main routes
reservoirs_router = router.register(r'reservoirs', ReservoirsViewSet,
'api_reservoirs')
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0022_auto_20171009_1430.py
|
<reponame>TNRIS/lake-gallery
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-09 19:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('map', '0021_significantevents'),
]
operations = [
migrations.AlterModelOptions(
name='majorreservoirs',
options={'ordering': ['res_lbl'], 'verbose_name': 'Major Reservoir', 'verbose_name_plural': 'Major Reservoirs'},
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0033_auto_20190613_1206.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.21 on 2019-06-13 17:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('map', '0032_auto_20190613_0802'),
]
operations = [
migrations.AlterField(
model_name='historicalaeriallinks',
name='datahub_collection_id',
field=models.CharField(blank=True, default='', max_length=36),
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0008_auto_20170928_1323.py
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-28 18:23
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('map', '0007_storycontent'),
]
operations = [
migrations.RemoveField(
model_name='storycontent',
name='id',
),
migrations.AlterField(
model_name='storycontent',
name='lake',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='map.MajorReservoirs'),
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/load.py
|
import os
from django.contrib.gis.utils import LayerMapping
from .models import (MajorReservoirs, BoatRamps, ChannelMarkers,
Hazards, Parks)
majorreservoirs_mapping = {
'res_name': 'RES_NAME',
'type': 'TYPE',
'status': 'STATUS',
'res_lbl': 'RES_LBL',
'geom': 'MULTIPOLYGON25D',
}
majorreservoirs_shp = os.path.abspath(
os.path.join(os.path.dirname(
__file__), 'data',
'2017_Major_Reservoirs_With_Regions_WGS84.shp'),
)
boatramps_mapping = {
'lake': {'res_lbl': 'Lake'},
'name': 'NAME',
'operator': 'Operator',
'geom': 'POINT',
}
boatramps_shp = os.path.abspath(
os.path.join(os.path.dirname(
__file__), 'data', 'points_of_interest',
'lake_boat_ramps.shp'),
)
channelmarkers_mapping = {
'lake': {'res_lbl': 'Lake'},
'odd': 'ODD',
'marker_id': 'MarkerID',
'year': 'Year',
'geom': 'POINT',
}
channelmarkers_shp = os.path.abspath(
os.path.join(os.path.dirname(
__file__), 'data', 'points_of_interest',
'lake_channel_markers.shp'),
)
hazards_mapping = {
'lake': {'res_lbl': 'Lake'},
'hazard_type': 'TYPE',
'num_buoys': 'NUM_BUOYS',
'geom': 'POLYGON',
}
hazards_shp = os.path.abspath(
os.path.join(os.path.dirname(
__file__), 'data', 'points_of_interest',
'lake_hazards.shp'),
)
parks_mapping = {
'lake': {'res_lbl': 'Lake'},
'park_type': 'TYPE',
'name': 'NAME',
'acres': 'ACRES',
'area': 'AREA',
'perimeter': 'PERIMETER',
'geom': 'POLYGON',
}
parks_shp = os.path.abspath(
os.path.join(os.path.dirname(
__file__), 'data', 'points_of_interest',
'lake_parks.shp'),
)
def run(verbose=True):
lm = LayerMapping(
BoatRamps, boatramps_shp, boatramps_mapping,
transform=False, encoding='iso-8859-1',
)
lm.save(strict=True, verbose=verbose)
lm = LayerMapping(
ChannelMarkers, channelmarkers_shp, channelmarkers_mapping,
transform=False, encoding='iso-8859-1',
)
lm.save(strict=True, verbose=verbose)
lm = LayerMapping(
Hazards, hazards_shp, hazards_mapping,
transform=False, encoding='iso-8859-1',
)
lm.save(strict=True, verbose=verbose)
lm = LayerMapping(
Parks, parks_shp, parks_mapping,
transform=False, encoding='iso-8859-1',
)
lm.save(strict=True, verbose=verbose)
|
TNRIS/lake-gallery
|
lakegallery/map/urls.py
|
from django.conf.urls import url
from . import views
from django.views.generic.base import RedirectView
app_name = 'map'
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^about/$', views.about, name='about'),
url(r'^about', RedirectView.as_view(url='about/')),
url(r'^([\w|\W]+)$', views.story, name='story')
]
|
TNRIS/lake-gallery
|
lakegallery/api/filters.py
|
from django_filters import rest_framework as filters
from map.models import MajorReservoirs
class URLFilter(filters.FilterSet):
url = filters.CharFilter(method='filter_url_string')
class Meta:
model = MajorReservoirs
fields = ('res_lbl', )
# querying based on URL doesn't really make sense - see comments
# in view.
# filters just based on if the lake name is in the URL. Not the
# best way to do this but not many other options.
def filter_url_string(self, queryset, name, value):
for i in queryset:
nm = i.res_lbl
if nm in str(value):
return queryset.filter(res_lbl=nm)
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0016_lakestatistics.py
|
<reponame>TNRIS/lake-gallery
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-03 19:09
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import multiselectfield.db.fields
class Migration(migrations.Migration):
dependencies = [
('map', '0015_auto_20170929_1507'),
]
operations = [
migrations.CreateModel(
name='LakeStatistics',
fields=[
('lake', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='map.MajorReservoirs')),
('original_name', models.CharField(blank=True, max_length=50)),
('primary_purposes', multiselectfield.db.fields.MultiSelectField(choices=[('Flood Management', 'Flood Management'), ('Water Storage', 'Water Storage'), ('Hydroelectric Power', 'Hydroelectric Power')], max_length=50)),
],
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0007_storycontent.py
|
<filename>lakegallery/map/migrations/0007_storycontent.py
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-28 18:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('map', '0006_auto_20170927_0944'),
]
operations = [
migrations.CreateModel(
name='StoryContent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('summary', models.TextField()),
('history', models.TextField()),
('section_one_header', models.CharField(max_length=50)),
('section_one_content', models.TextField()),
('section_two_header', models.CharField(max_length=50)),
('section_two_content', models.TextField()),
('section_three_header', models.CharField(max_length=50)),
('section_three_content', models.TextField()),
('lake', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='map.MajorReservoirs')),
],
options={
'verbose_name_plural': 'Story Content',
'verbose_name': 'Story Content',
},
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0024_auto_20171010_0847.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-10 13:47
from __future__ import unicode_literals
from django.db import migrations, models
import map.validators
class Migration(migrations.Migration):
dependencies = [
('map', '0023_auto_20171009_1436'),
]
operations = [
migrations.AlterField(
model_name='lakestatistics',
name='historic_high_date',
field=models.DateField(blank=True, null=True, validators=[map.validators.validate_past_dates]),
),
migrations.AlterField(
model_name='lakestatistics',
name='historic_low_date',
field=models.DateField(blank=True, null=True, validators=[map.validators.validate_past_dates]),
),
]
|
TNRIS/lake-gallery
|
lakegallery/map/migrations/0019_auto_20171004_0935.py
|
<filename>lakegallery/map/migrations/0019_auto_20171004_0935.py
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-04 14:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('map', '0018_auto_20171004_0832'),
]
operations = [
migrations.AlterField(
model_name='lakestatistics',
name='average_depth',
field=models.FloatField(blank=True, default=0, help_text='Feet'),
),
migrations.AlterField(
model_name='lakestatistics',
name='dam_height',
field=models.FloatField(blank=True, default=0, help_text='Feet'),
),
migrations.AlterField(
model_name='lakestatistics',
name='dam_width',
field=models.FloatField(blank=True, default=0, help_text='Feet'),
),
migrations.AlterField(
model_name='lakestatistics',
name='full_elevation_gal',
field=models.FloatField(blank=True, default=0, help_text='Gallons of Water'),
),
migrations.AlterField(
model_name='lakestatistics',
name='full_elevation_msl',
field=models.FloatField(blank=True, default=0, help_text='Mean Sea Level'),
),
migrations.AlterField(
model_name='lakestatistics',
name='historic_high_msl',
field=models.FloatField(blank=True, default=0, help_text='Feet above Mean Sea Level'),
),
migrations.AlterField(
model_name='lakestatistics',
name='historic_low_msl',
field=models.FloatField(blank=True, default=0, help_text='Feet above Mean Sea Level'),
),
migrations.AlterField(
model_name='lakestatistics',
name='lake_area',
field=models.FloatField(blank=True, default=0, help_text='Acres'),
),
migrations.AlterField(
model_name='lakestatistics',
name='lake_capacity',
field=models.FloatField(blank=True, default=0, help_text='Acre-feet'),
),
migrations.AlterField(
model_name='lakestatistics',
name='length_of_lake',
field=models.FloatField(blank=True, default=0, help_text='Miles'),
),
migrations.AlterField(
model_name='lakestatistics',
name='maximum_depth',
field=models.FloatField(blank=True, default=0, help_text='Feet'),
),
migrations.AlterField(
model_name='lakestatistics',
name='maximum_width',
field=models.FloatField(blank=True, default=0, help_text='Miles'),
),
migrations.AlterField(
model_name='lakestatistics',
name='miles_of_shoreline',
field=models.FloatField(blank=True, default=0, help_text='Miles'),
),
migrations.AlterField(
model_name='lakestatistics',
name='num_of_floodgates',
field=models.PositiveIntegerField(blank=True, default=0, verbose_name='Number of Floodgates'),
),
migrations.AlterField(
model_name='lakestatistics',
name='spillway_elevation',
field=models.FloatField(blank=True, default=0, help_text='Feet above Mean Sea Level'),
),
migrations.AlterField(
model_name='lakestatistics',
name='top_of_dam',
field=models.FloatField(blank=True, default=0, help_text='Feet above Mean Sea Level'),
),
]
|
destroyer92/pyogame
|
setup.py
|
from distutils.core import setup
setup(
name='ogame',
packages=['ogame'],
version='8.1.0.21',
license='MIT',
description='lib for the popular browsergame ogame',
author='PapeprPieceCode',
author_email='<EMAIL>',
url='https://github.com/alaingilbert/pyogame',
download_url='https://github.com/alaingilbert/pyogame.git',
keywords=['OGame', 'lib', 'for bots', 'bot'],
install_requires=['requests', 'bs4', 'html5lib'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.9',
],
)
|
destroyer92/pyogame
|
ogame/test.py
|
<reponame>destroyer92/pyogame
import unittest
from random import randint
from ogame.constants import *
class UnittestOgame(unittest.TestCase):
empire = None
ids = []
def collect_all_ids(self):
self.ids.extend(self.empire.planet_ids())
self.ids.extend(self.empire.moon_ids())
def test_Vars(self):
self.assertTrue(isinstance(self.empire.token, str))
def test_Events(self):
self.assertIsInstance(self.empire.attacked(), bool)
self.assertIsInstance(self.empire.neutral(), bool)
self.assertIsInstance(self.empire.friendly(), bool)
def test_Constants(self):
speed = self.empire.server().Speed
self.assertGreater(speed.universe, 0)
self.assertGreater(speed.fleet, 0)
self.assertIsInstance(self.empire.character_class(), str)
self.assertIsInstance(self.empire.rank(), int)
self.assertGreater(len(self.empire.planet_ids()), 0)
planets_names = self.empire.planet_names()
self.assertGreater(len(planets_names), 0)
self.assertIsInstance(self.empire.id_by_planet_name(planets_names[0]), int)
self.assertGreater(len(self.empire.moon_ids()), -1)
self.assertGreater(len(self.empire.moon_names()), -1)
self.collect_all_ids()
self.assertTrue(buildings.is_supplies(buildings.metal_mine))
self.assertTrue(buildings.is_facilities(buildings.shipyard))
self.assertTrue(buildings.is_defenses(buildings.rocket_launcher(10)))
self.assertTrue(research.is_research(research.energy))
self.assertTrue(ships.is_ship(ships.small_transporter(99)))
self.assertTrue(
ships.ship_name(ships.light_fighter()) == 'light_fighter'
)
self.assertTrue(ships.ship_amount(ships.light_fighter(99)) == 99)
self.assertEqual(resources(99, 99, 99), [99, 99, 99])
self.assertEqual([3459, 864, 0], price(buildings.metal_mine, level=10))
def test_slot_celestial(self):
slot = self.empire.slot_celestial()
self.assertGreater(slot.total, 0)
def test_celestial(self):
celestial = self.empire.celestial(id)
self.assertGreater(celestial.diameter, 0)
self.assertGreater(celestial.free, -1)
self.assertIsInstance(celestial.temperature, list)
def test_celestial_coordinates(self):
for id in self.ids:
celestial_coordinates = self.empire.celestial_coordinates(id)
self.assertIsInstance(celestial_coordinates, list)
self.assertEqual(len(celestial_coordinates), 4)
def test_resources(self):
for id in self.ids:
res = self.empire.resources(id)
self.assertIsInstance(res.resources, list)
self.assertGreater(res.darkmatter, 0)
self.assertIsInstance(res.energy, int)
def test_supply(self):
sup = self.empire.supply(self.empire.planet_ids()[0])
self.assertTrue(0 < sup.metal_mine.level)
def test_facilities(self):
for id in self.empire.planet_ids():
fac = self.empire.facilities(id)
self.assertGreater(fac.robotics_factory.level, -1)
def test_moon_facilities(self):
for id in self.empire.moon_ids():
fac = self.empire.moon_facilities(id)
self.assertGreater(fac.robotics_factory.level, -1)
def test_research(self):
res = self.empire.research()
self.assertGreater(res.energy.level, -1)
def test_ships(self):
ship = self.empire.ships(self.ids[0])
self.assertGreater(ship.light_fighter.amount, -1)
def test_defences(self):
defence = self.empire.defences(self.ids[0])
self.assertGreater(defence.rocket_launcher.amount, -1)
def test_galaxy(self):
for position in self.empire.galaxy(coordinates(1, 1)):
self.assertIsInstance(position.player, str)
self.assertIsInstance(position.list, list)
self.assertIsInstance(position.moon, bool)
def test_ally(self):
self.assertIsInstance(self.empire.ally(), list)
def test_slot_fleet(self):
slot = self.empire.slot_fleet()
self.assertGreater(slot.fleet.total, 0)
def test_fleet(self):
UnittestOgame.test_send_fleet(self)
for fleet in self.empire.fleet():
self.assertIsInstance(fleet.id, int)
if fleet.mission == mission.spy:
self.assertTrue(fleet.mission == mission.spy)
def test_return_fleet(self):
UnittestOgame.test_send_fleet(self)
for fleet in self.empire.fleet():
if fleet.mission == mission.spy and not fleet.returns:
fleet_returning = self.empire.return_fleet(fleet.id)
self.assertTrue(fleet_returning)
def test_build(self):
before = self.empire.defences(
self.ids[0]
).rocket_launcher.amount
self.empire.build(
what=buildings.rocket_launcher(),
id=self.empire.planet_ids()[0]
)
after = self.empire.defences(
self.ids[0]
).rocket_launcher
self.assertTrue(before < after.amount or after.in_construction)
def test_phalanx(self):
Super_Dangereous_TO_test = 'You will get Banned'
def test_send_message(self):
send_message = False
while not send_message:
for position in self.empire.galaxy(
coordinates(randint(1, 6), randint(1, 499))
):
if status.inactive in position.status:
send_message = self.empire.send_message(
position.player_id,
'Hello'
)
break
self.assertEqual(send_message, True)
def test_spyreports(self):
UnittestOgame.test_send_fleet(self)
for report in self.empire.spyreports():
self.assertIsInstance(report.fright, list)
def test_send_fleet(self):
espionage_probe = self.empire.ships(self.ids[0]).espionage_probe.amount
if not 0 < espionage_probe:
self.empire.build(ships.espionage_probe(), self.ids[0])
while self.empire.ships(self.ids[0]).espionage_probe.amount <= 0:
continue
fleet_send = True
while fleet_send:
for planet in self.empire.galaxy(
coordinates(randint(1, 6), randint(1, 499))
):
if status.inactive in planet.status \
and status.vacation not in planet.status:
fleet_send = not self.empire.send_fleet(
mission.spy,
self.ids[0],
where=planet.position,
ships=fleet(espionage_probe=1)
)
break
self.assertTrue(not fleet_send)
def test_collect_rubble_field(self):
self.empire.collect_rubble_field(self.ids[0])
def test_relogin(self):
self.empire.logout()
self.empire.keep_going(self.empire.relogin)
self.assertTrue(self.empire.is_logged_in())
|
destroyer92/pyogame
|
ogame/__init__.py
|
import re
import requests
import unittest
from bs4 import BeautifulSoup
from datetime import datetime
try:
import constants as const
except ImportError:
import ogame.constants as const
class OGame(object):
def __init__(
self,
universe,
username,
password,
token=None, user_agent=None, proxy='',
language=None, server_number=None
):
self.universe = universe
self.username = username
self.password = password
self.user_agent = user_agent
self.proxy = proxy
self.language = language
self.server_number = server_number
self.session = requests.Session()
self.session.proxies.update({'https': self.proxy})
self.token = token
if self.user_agent is None:
self.user_agent = {
'User-Agent':
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 '
'(KHTML, like Gecko) Chrome/88.0.4324.182 Safari/537.36'
}
self.session.headers.update(self.user_agent)
if token is None:
self.login()
else:
self.session.headers.update(
{'authorization': 'Bearer {}'.format(token)}
)
accounts = self.session.get(
url='https://lobby.ogame.gameforge.com'
'/api/users/me/accounts'
).json()
if 'error' in accounts:
del self.session.headers['authorization']
self.login()
servers = self.session.get(
url='https://lobby.ogame.gameforge.com/api/servers'
).json()
for server in servers:
if server['name'] == self.universe:
self.server_number = server['number']
break
elif server['name'] == self.universe and self.language is None:
self.server_number = server['number']
break
assert self.server_number is not None, "Universe not found"
accounts = self.session.get(
url='https://lobby.ogame.gameforge.com/api/users/me/accounts'
).json()
for account in accounts:
if account['server']['number'] == self.server_number \
and account['server']['language'] == self.language:
self.server_id = account['id']
break
elif account['server']['number'] == self.server_number \
and self.language is None:
self.server_id = account['id']
self.language = account['server']['language']
break
self.index_php = 'https://s{}-{}.ogame.gameforge.com/game/index.php?' \
.format(self.server_number, self.language)
login_link = self.session.get(
url='https://lobby.ogame.gameforge.com/api/users/me/loginLink?',
params={'id': self.server_id,
'server[language]': self.language,
'server[number]': self.server_number,
'clickedButton': 'account_list'}
).json()
self.landing_page = self.session.get(login_link['url']).text
self.landing_page = self.session.get(
self.index_php + 'page=ingame'
).text
self.landing_page = BeautifulSoup4(self.landing_page)
self.player = self.landing_page.find(
'meta', {'name': 'ogame-player-name'}
)['content']
self.player_id = int(self.landing_page.find(
'meta', {'name': 'ogame-player-id'}
)['content'])
def login(self):
self.session.get('https://lobby.ogame.gameforge.com/')
login_data = {
'identity': self.username,
'password': <PASSWORD>,
'locale': 'en_EN',
'gfLang': 'en',
'platformGameId': '1dfd8e7e-6e1a-4eb1-8c64-03c3b62efd2f',
'gameEnvironmentId': '0a31d605-ffaf-43e7-aa02-d06df7116fc8',
'autoGameAccountCreation': False
}
response = self.session.post(
'https://gameforge.com/api/v1/auth/thin/sessions',
json=login_data
)
if response.status_code == 409:
self.solve_captcha(
response.headers['gf-challenge-id']
.replace(';https://challenge.gameforge.com', '')
)
self.login()
return True
assert response.status_code != 409, 'Resolve the Captcha'
assert response.status_code == 201, 'Bad Login'
self.token = response.json()['token']
self.session.headers.update(
{'authorization': 'Bearer {}'.format(self.token)}
)
def solve_captcha(self, challenge):
response = self.session.get(
url='https://image-drop-challenge.gameforge.com/challenge/{}/en-GB'
.format(challenge)
).json()
assert response['status'] == 'presented'
response = self.session.post(
url='https://image-drop-challenge.gameforge.com/challenge/{}/en-GB'
.format(challenge),
json={"answer": 0}
).json()
if response['status'] == 'solved':
return True
else:
self.solve_captcha(challenge)
def test(self):
import ogame.test
ogame.test.UnittestOgame.empire = self
suite = unittest.TestLoader().loadTestsFromModule(ogame.test)
return unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful()
def server(self):
class Server:
version = self.landing_page.find('meta', {'name': 'ogame-version'})
class Speed:
universe = self.landing_page.find(
'meta', {'name': 'ogame-universe-speed'}
)
universe = int(universe['content'])
fleet = self.landing_page.find(
'meta', {'name': 'ogame-universe-speed-fleet-peaceful'}
)
fleet = int(fleet['content'])
class Donut:
galaxy = self.landing_page.find(
'meta', {'name': 'ogame-donut-galaxy'}
)['content']
if 1 == int(galaxy):
galaxy = True
else:
galaxy = False
system = self.landing_page.find(
'meta', {'name': 'ogame-donut-system'}
)['content']
if 1 == int(system):
system = True
else:
system = False
return Server
def attacked(self):
response = self.session.get(
url=self.index_php + 'page=componentOnly'
'&component=eventList&action=fetchEventBox&ajax=1&asJson=1',
headers={'X-Requested-With': 'XMLHttpRequest'}
).json()
if 0 < response['hostile']:
return True
else:
return False
def neutral(self):
response = self.session.get(
url=self.index_php + 'page=componentOnly'
'&component=eventList&action=fetchEventBox&ajax=1&asJson=1',
headers={'X-Requested-With': 'XMLHttpRequest'}
).json()
if 0 < response['neutral']:
return True
else:
return False
def friendly(self):
response = self.session.get(
url=self.index_php + 'page=componentOnly'
'&component=eventList&action=fetchEventBox&ajax=1&asJson=1',
headers={'X-Requested-With': 'XMLHttpRequest'}
).json()
if 0 < response['friendly']:
return True
else:
return False
def character_class(self):
character = self.landing_page.find_partial(
class_='sprite characterclass medium')
return character['class'][3]
def rank(self):
rank = self.landing_page.find(id='bar')
rank = rank.find_all('li')[1].text
rank = re.search(r'\((.*)\)', rank).group(1)
return int(rank)
def planet_ids(self):
ids = []
for celestial in self.landing_page.find_all(class_='smallplanet'):
ids.append(int(celestial['id'].replace('planet-', '')))
return ids
def planet_names(self):
return [planet.text for planet in
self.landing_page.find_all(class_='planet-name')]
def id_by_planet_name(self, name):
for planet_name, id in zip(
OGame.planet_names(self), OGame.planet_ids(self)
):
if planet_name == name:
return id
def name_by_planet_id(self, id):
for _id, planet_name in zip(
OGame.planet_ids(self), OGame.planet_names(self)
):
if id == _id:
return planet_name
def moon_ids(self):
moons = []
for moon in self.landing_page.find_all(class_='moonlink'):
moon = moon['href']
moon = re.search('cp=(.*)', moon).group(1)
moons.append(int(moon))
return moons
def moon_names(self):
names = []
for name in self.landing_page.find_all(class_='moonlink'):
name = name['title']
names.append(re.search(r'<b>(.*) \[', name).group(1))
return names
def slot_celestial(self):
class Slot:
planets = self.landing_page.find(
'p',
attrs={'class': 'textCenter'}
).find('span').text.split('/')
planets = [int(planet) for planet in planets]
free = planets[1] - planets[0]
total = planets[1]
return Slot
def celestial(self, id):
response = self.session.get(
url=self.index_php + 'page=ingame&component=overview',
params={'cp': id}
).text
textContent1 = re.search(
r'textContent\[1] = "(.*)km \(<span>(.*)<(.*)<span>(.*)<',
response
)
querys = [
re.compile(r'textContent\[3] = "(.*) \\u00b0C \\u00e0(.*)(.*)\\'),
re.compile(r'textContent\[3] = "(.*)\\u00b0C to (.*)\\u00b0C"'),
re.compile(r'textContent\[3] = "(.*) \\u00b0C (.*) (.*) \\u00b0C"'),
]
textContent3 = None
for query in querys:
textContent3 = query.search(
response
)
if textContent3 is not None:
break
class Celestial:
diameter = int(textContent1.group(1).replace('.', ''))
used = int(textContent1.group(2))
total = int(textContent1.group(4))
free = total - used
temperature = [
int(textContent3.group(1)),
int(textContent3.group(2))
]
coordinates = OGame.celestial_coordinates(self, id)
return Celestial
def celestial_coordinates(self, id):
for celestial in self.landing_page.find_all(class_='smallplanet'):
planet = celestial.find(class_='planetlink')
if str(id) in planet['href']:
coordinates = re.search(r'\[(.*)]', planet['title']).group(1)
coordinates = [int(coords) for coords in coordinates.split(':')]
coordinates.append(const.destination.planet)
return coordinates
moon = celestial.find(class_='moonlink')
if moon and str(id) in moon['href']:
coordinates = re.search(r'\[(.*)]', moon['title']).group(1)
coordinates = [int(coords) for coords in coordinates.split(':')]
coordinates.append(const.destination.moon)
return coordinates
def resources(self, id):
response = self.session.get(
self.index_php + 'page=resourceSettings&cp={}'.format(id)
).text
bs4 = BeautifulSoup4(response)
def to_int(string):
return int(float(string.replace('M', '000').replace('n', '')))
class Resources:
resources = [bs4.find(id='resources_metal')['data-raw'],
bs4.find(id='resources_crystal')['data-raw'],
bs4.find(id='resources_deuterium')['data-raw']]
resources = [to_int(resource) for resource in resources]
metal = resources[0]
crystal = resources[1]
deuterium = resources[2]
day_production = bs4.find(
'tr',
attrs={'class':'summary'}
).find_all(
'td',
attrs={'class':'undermark'}
)
day_production = [
int(day_production[0].span['title'].replace('.','')),
int(day_production[1].span['title'].replace('.','')),
int(day_production[2].span['title'].replace('.',''))
]
storage = bs4.find_all('tr')
for stor in storage:
if len(stor.find_all('td', attrs={'class': 'left2'})) != 0:
storage = stor.find_all('td', attrs={'class': 'left2'})
break
storage = [
int(storage[0].span['title'].replace('.', '')),
int(storage[1].span['title'].replace('.', '')),
int(storage[2].span['title'].replace('.', ''))
]
darkmatter = to_int(bs4.find(id='resources_darkmatter')['data-raw'])
energy = to_int(bs4.find(id='resources_energy')['data-raw'])
return Resources
def isPossible(self: str):
if self == 'on':
return True
else:
return False
def inConstruction(self):
if self == 'active':
return True
else:
return False
def supply(self, id):
response = self.session.get(
url=self.index_php + 'page=ingame&component=supplies&cp={}'
.format(id)
).text
bs4 = BeautifulSoup4(response)
levels = [
int(level['data-value'])
for level in bs4.find_all('span', {'data-value': True})
]
technologyStatus = [
status['data-status']
for status in bs4.find_all('li', {'class': 'technology'})
]
class Supply:
def __init__(self, i):
self.level = levels[i]
self.is_possible = OGame.isPossible(technologyStatus[i])
self.in_construction = OGame.inConstruction(technologyStatus[i])
class Supplies(object):
metal_mine = Supply(0)
crystal_mine = Supply(1)
deuterium_mine = Supply(2)
solar_plant = Supply(3)
fusion_plant = Supply(4)
metal_storage = Supply(7)
crystal_storage = Supply(8)
deuterium_storage = Supply(9)
return Supplies
def facilities(self, id):
response = self.session.get(
self.index_php + 'page=ingame&component=facilities&cp={}'
.format(id)
).text
bs4 = BeautifulSoup4(response)
levels = [
int(level['data-value'])
for level in bs4.find_all(
'span', {'class': 'level', 'data-value': True}
)
]
technologyStatus = [
status['data-status']
for status in bs4.find_all('li', {'class': 'technology'})
]
class Facility:
def __init__(self, i):
self.level = levels[i]
self.is_possible = OGame.isPossible(technologyStatus[i])
self.in_construction = OGame.inConstruction(technologyStatus[i])
class Facilities(object):
robotics_factory = Facility(0)
shipyard = Facility(1)
research_laboratory = Facility(2)
alliance_depot = Facility(3)
missile_silo = Facility(4)
nanite_factory = Facility(5)
terraformer = Facility(6)
repair_dock = Facility(7)
return Facilities
def moon_facilities(self, id):
response = self.session.get(
url='{}page=ingame&component=facilities&cp={}'
.format(self.index_php, id)
).text
bs4 = BeautifulSoup4(response)
levels = [
int(level['data-value'])
for level in bs4.find_all(class_=['targetlevel', 'level']) if level.get('data-value')
]
technologyStatus = [
status['data-status']
for status in bs4.find_all('li', {'class': 'technology'})
]
class Facility:
def __init__(self, i):
self.level = levels[i]
self.is_possible = OGame.isPossible(technologyStatus[i])
self.in_construction = OGame.inConstruction(technologyStatus[i])
class Facilities(object):
robotics_factory = Facility(0)
shipyard = Facility(1)
moon_base = Facility(2)
sensor_phalanx = Facility(3)
jump_gate = Facility(4)
return Facilities
def traider(self, id):
raise NotImplementedError("function not implemented yet PLS contribute")
def research(self, id=None):
if id is None:
id = self.planet_ids()[0]
response = self.session.get(
url=self.index_php,
params={'page': 'ingame', 'component': 'research',
'cp': id}
).text
bs4 = BeautifulSoup4(response)
levels = [
int(level['data-value'])
for level in bs4.find_all(
'span', {'class': 'level', 'data-value': True}
)
]
technologyStatus = [
status['data-status']
for status in bs4.find_all('li', {'class': 'technology'})
]
class Research:
def __init__(self, i):
self.level = levels[i]
self.is_possible = OGame.isPossible(technologyStatus[i])
self.in_construction = OGame.inConstruction(technologyStatus[i])
class Researches(object):
energy = Research(0)
laser = Research(1)
ion = Research(2)
hyperspace = Research(3)
plasma = Research(4)
combustion_drive = Research(5)
impulse_drive = Research(6)
hyperspace_drive = Research(7)
espionage = Research(8)
computer = Research(9)
astrophysics = Research(10)
research_network = Research(11)
graviton = Research(12)
weapons = Research(13)
shielding = Research(14)
armor = Research(15)
return Researches
def ships(self, id):
response = self.session.get(
self.index_php + 'page=ingame&component=shipyard&cp={}'
.format(id)
).text
bs4 = BeautifulSoup4(response)
ships_amount = [
int(level['data-value'])
for level in bs4.find_all(class_='amount')
]
technologyStatus = [
status['data-status']
for status in bs4.find_all('li', {'class': 'technology'})
]
class Ship:
def __init__(self, i):
self.amount = ships_amount[i]
self.is_possible = OGame.isPossible(technologyStatus[i])
self.in_construction = OGame.inConstruction(technologyStatus[i])
class Crawler:
if id not in OGame.moon_ids(self):
amount = ships_amount[16]
self.is_possible = OGame.isPossible(technologyStatus[16])
self.in_construction = OGame.inConstruction(
technologyStatus[16]
)
else:
amount = 0
is_possible = False
in_construction = False
class Ships(object):
light_fighter = Ship(0)
heavy_fighter = Ship(1)
cruiser = Ship(2)
battleship = Ship(3)
interceptor = Ship(4)
bomber = Ship(5)
destroyer = Ship(6)
deathstar = Ship(7)
reaper = Ship(8)
explorer = Ship(9)
small_transporter = Ship(10)
large_transporter = Ship(11)
colonyShip = Ship(12)
recycler = Ship(13)
espionage_probe = Ship(14)
solarSatellite = Ship(15)
crawler = Crawler
return Ships
def defences(self, id):
response = self.session.get(
self.index_php + 'page=ingame&component=defenses&cp={}'
.format(id)
).text
bs4 = BeautifulSoup4(response)
defences_amount = [
int(level['data-value'])
for level in bs4.find_all(class_='amount')
]
technologyStatus = [
status['data-status']
for status in bs4.find_all('li', {'class': 'technology'})
]
class Defence:
def __init__(self, i):
self.amount = defences_amount[i]
self.is_possible = OGame.isPossible(technologyStatus[i])
self.in_construction = OGame.inConstruction(technologyStatus[i])
class Defences(object):
rocket_launcher = Defence(0)
laser_cannon_light = Defence(1)
laser_cannon_heavy = Defence(2)
gauss_cannon = Defence(3)
ion_cannon = Defence(4)
plasma_cannon = Defence(5)
shield_dome_small = Defence(6)
shield_dome_large = Defence(7)
missile_interceptor = Defence(8)
missile_interplanetary = Defence(9)
return Defences
def galaxy(self, coords):
response = self.session.post(
url=self.index_php + 'page=ingame&component=galaxyContent&ajax=1',
data={'galaxy': coords[0], 'system': coords[1]},
headers={'X-Requested-With': 'XMLHttpRequest'}
).json()
bs4 = BeautifulSoup4(response['galaxy'])
def playerId(tag):
numbers = re.search(r'[0-9]+', tag).group()
return int(numbers)
players = bs4.find_all_partial(id='player')
player_name = {
playerId(player['id']): player.h1.span.text
for player in players
}
player_rank = {
playerId(player['id']): int(player.a.text)
for player in players if player.a.text.isdigit()
}
alliances = bs4.find_all_partial(id='alliance')
alliance_name = {
playerId(alliance['id']): alliance.h1.text.strip()
for alliance in alliances
}
planets = []
for row in bs4.select('#galaxytable .row'):
status = row['class']
status.remove('row')
if 'empty_filter' in status:
continue
elif len(status) == 0:
planet_status = [const.status.yourself]
pid = self.player_id
player_name[pid] = self.player
else:
planet_status = [
re.search('(.*)_filter', sta).group(1)
for sta in status
]
player = row.find(rel=re.compile(r'player[0-9]+'))
if not player:
continue
pid = playerId(player['rel'][0])
if pid == const.status.destroyed:
continue
planet = int(row.find(class_='position').text)
planet_cord = const.coordinates(coords[0], coords[1], int(planet))
moon_pos = row.find(rel=re.compile(r'moon[0-9]*'))
alliance_id = row.find(rel=re.compile(r'alliance[0-9]+'))
alliance_id = playerId(
alliance_id['rel']) if alliance_id else None
class Position:
position = planet_cord
name = row.find(id=re.compile(r'planet[0-9]+')).h1.span.text
player = player_name[pid]
player_id = pid
rank = player_rank.get(pid)
status = planet_status
moon = moon_pos is not None
alliance = alliance_name.get(alliance_id)
list = [
name, position, player,
player_id, rank, status, moon, alliance
]
planets.append(Position)
return planets
def ally(self):
alliance = self.landing_page.find(name='ogame-alliance-name')
if alliance:
return alliance
else:
return []
def officers(self):
raise NotImplementedError("function not implemented yet PLS contribute")
def shop(self):
raise NotImplementedError("function not implemented yet PLS contribute")
def fleet_coordinates(self, event, Coords):
coordinate = [
coords.find(class_=Coords).a.text
for coords in event
]
coordinate = [
const.convert_to_coordinates(coords)
for coords in coordinate
]
destination = [
dest.find('figure', {'class': 'planetIcon'})
for dest in event
]
destination = [
const.convert_to_destinations(dest['class'])
for dest in destination
]
coordinates = []
for coords, dest in zip(coordinate, destination):
coords.append(dest)
coordinates.append(coords)
return coordinates
def slot_fleet(self):
response = self.session.get(
self.index_php + 'page=ingame&component=fleetdispatch'
).text
bs4 = BeautifulSoup4(response)
slots = bs4.find('div', attrs={'id':'slots', 'class': 'fleft'})
slots = [
slot.text
for slot in slots.find_all(class_="fleft")
]
fleet = re.search(':(.*)/(.*)', slots[0])
fleet = [fleet.group(1), fleet.group(2)]
expedition = re.search(' (.*)/(.*)\\n', slots[1])
expedition = [
expedition.group(1).replace(' ', ''),
expedition.group(2)
]
class Fleet:
total = int(fleet[1])
free = total - int(fleet[0])
class Expedition:
total = int(expedition[1])
free = total - int(expedition[0])
class Slot:
fleet = Fleet
expedition = Expedition
return Slot
def fleet(self):
fleets = []
fleets.extend(self.hostile_fleet())
fleets.extend(self.friendly_fleet())
return fleets
def friendly_fleet(self):
if not self.friendly():
return []
response = self.session.get(
self.index_php + 'page=ingame&component=movement'
).text
bs4 = BeautifulSoup4(response)
fleetDetails = bs4.find_all(class_='fleetDetails')
fleet_ids = bs4.find_all_partial(id="fleet")
fleet_ids = [id['id'] for id in fleet_ids]
fleet_ids = [
int(re.search('fleet(.*)', id).group(1))
for id in fleet_ids
]
mission_types = [
int(event['data-mission-type'])
for event in fleetDetails
]
return_flights = [
bool(event['data-return-flight'])
for event in fleetDetails
]
arrival_times = [
int(event['data-arrival-time'])
for event in fleetDetails
]
arrival_times = [
datetime.fromtimestamp(timestamp)
for timestamp in arrival_times
]
destinations = self.fleet_coordinates(fleetDetails, 'destinationCoords')
origins = self.fleet_coordinates(fleetDetails, 'originCoords')
fleets = []
for i in range(len(fleet_ids)):
class Fleets:
id = fleet_ids[i]
mission = mission_types[i]
diplomacy = const.diplomacy.friendly
player_name = self.player
player_id = self.player_id
returns = return_flights[i]
arrival = arrival_times[i]
origin = origins[i]
destination = destinations[i]
list = [id, mission, diplomacy, player_name, player_id, returns,
arrival, origin, destination]
fleets.append(Fleets)
return fleets
def hostile_fleet(self):
if not self.attacked():
return []
response = self.session.get(
url=self.index_php + 'page=componentOnly&component=eventList'
).text
bs4 = BeautifulSoup4(response)
eventFleet = bs4.find_all('span', class_='hostile')
eventFleet = [child.parent.parent for child in eventFleet]
fleet_ids = [id['id'] for id in eventFleet]
fleet_ids = [
int(re.search('eventRow-(.*)', id).group(1))
for id in fleet_ids
]
arrival_times = [
int(event['data-arrival-time'])
for event in eventFleet
]
arrival_times = [
datetime.fromtimestamp(timestamp)
for timestamp in arrival_times
]
destinations = self.fleet_coordinates(eventFleet, 'destCoords')
origins = self.fleet_coordinates(eventFleet, 'coordsOrigin')
player_ids = [
int(id.find(class_='sendMail').a['data-playerid'])
for id in eventFleet
]
player_names = [
name.find(class_='sendMail').a['title']
for name in eventFleet
]
fleets = []
for i in range(len(fleet_ids)):
class Fleets:
id = fleet_ids[i]
mission = 1
diplomacy = const.diplomacy.hostile
player_name = player_names[i]
player_id = player_ids[i]
returns = False
arrival = arrival_times[i]
origin = origins[i]
destination = destinations[i]
list = [id, mission, diplomacy, player_name, player_id, returns,
arrival, origin, destination]
fleets.append(Fleets)
return fleets
def phalanx(self, coordinates, id):
raise NotImplemented(
'Phalanx get you banned if used with invalid parameters')
def send_message(self, player_id, msg):
response = self.session.get(self.index_php + 'page=chat').text
chat_token = re.search('var ajaxChatToken = "(.*)"', response).group(1)
response = self.session.post(
url=self.index_php + 'page=ajaxChat',
data={'playerId': player_id,
'text': msg,
'mode': 1,
'ajax': 1,
'token': chat_token},
headers={'X-Requested-With': 'XMLHttpRequest'}
).json()
if 'OK' in response['status']:
return True
else:
return False
def rename_planet(self, id, new_name):
self.session.get(
url=self.index_php,
params={'cp': id})
response = self.session.get(self.index_php,
params={'page': 'planetlayer'},
headers={'Referer': f'{self.index_php}page=ingame&component=overview&cp={id}'}).text
token_rename = re.search("name='token' value='(.*)'", response).group(1)
param = {'page': 'planetRename'}
data = {
'newPlanetName': new_name,
'token': token_rename}
response = self.session.post(
url=self.index_php,
params=param,
data=data,
headers={'Referer': f'{self.index_php}page=ingame&component=overview&cp={id}'}
).json()
return response['status']
def abandon_planet(self, id):
self.session.get(
url=self.index_php,
params={'cp': id})
header = {'Referer': f'{self.index_php}page=ingame&component=overview&cp={id}'}
response = self.session.get(
self.index_php,
params={'page': 'planetlayer'},
headers=header
).text
response = response[response.find('input type="hidden" name="abandon" value="'):]
code_abandon = re.search('name="abandon" value="(.*)"', response).group(1)
token_abandon = re.search("name='token' value='(.*)'", response).group(1)
response = self.session.post(
url=self.index_php,
params={'page': 'checkPassword'},
data={
'abandon': code_abandon,
'token': token_abandon,
'password': <PASSWORD>,
},
headers=header).json()
new_token = None
if response.get("password_checked") and response["password_checked"]:
new_token = response["newToken"]
if new_token:
self.session.post(
url=self.index_php,
params={
'page': 'planetGiveup'
},
data={
'abandon': code_abandon,
'token': <PASSWORD>,
'password': <PASSWORD>,
},
headers=header).json()
self.session.get(url=self.index_php)
return True
else:
return False
def spyreports(self, firstpage=1, lastpage=30):
# get links for the last 30 pages
report_links = []
while firstpage <= lastpage:
try:
response = self.session.get(
url=self.index_php,
params={'page': 'messages',
'tab': 20,
'action': 107,
'messageId': -1,
'pagination': firstpage,
'ajax': 1}
).text
except:
break
bs4 = BeautifulSoup4(response)
for link in bs4.find_all_partial(href='page=messages&messageId'):
if link['href'] not in report_links:
report_links.extend([link['href'] for link in bs4.find_all_partial(href='page=messages&messageId')])
firstpage += 1
reports = []
for link in report_links:
response = self.session.get(link).text
bs4 = BeautifulSoup4(response)
technologys = [tech['class'][0] for tech in bs4.find_all('img')]
amounts = [
tech.parent.parent.find_all('span')[1].text
for tech in bs4.find_all('img')
]
class Report:
fright = [
(tech, amount)
for tech, amount in zip(technologys, amounts)
]
reports.append(Report)
return reports
def send_fleet(
self,
mission,
id,
where,
ships,
resources=(0, 0, 0), speed=10, holdingtime=0
):
response = self.session.get(
url=self.index_php + 'page=ingame&component=fleetdispatch&cp={}'
.format(id)
).text
send_fleet_token = re.search('var fleetSendingToken = "(.*)"', response)
if send_fleet_token is None:
send_fleet_token = re.search('var token = "(.*)"', response)
form_data = {'token': send_fleet_token.group(1)}
for ship in ships:
ship_type = 'am{}'.format(ship[0])
form_data.update({ship_type: ship[1]})
form_data.update(
{
'galaxy': where[0],
'system': where[1],
'position': where[2],
'type': where[3],
'metal': resources[0],
'crystal': resources[1],
'deuterium': resources[2],
'prioMetal': 1,
'prioCrystal': 2,
'prioDeuterium': 3,
'mission': mission,
'speed': speed,
'retreatAfterDefenderRetreat': 0,
'union': 0,
'holdingtime': holdingtime
}
)
response = self.session.post(
url=self.index_php + 'page=ingame&component=fleetdispatch'
'&action=sendFleet&ajax=1&asJson=1',
data=form_data,
headers={'X-Requested-With': 'XMLHttpRequest'}
).json()
return response['success']
def return_fleet(self, fleet_id):
response = self.session.get(
url=self.index_php + 'page=ingame&component=movement'
).text
if "return={}".format(fleet_id) in response:
token = re.search(
'return={}'.format(fleet_id)+'&token=(.*)" ', response
).group(1).split('"')[0]
self.session.get(
url=''.join([
self.index_php,
'page=ingame&component=movement&return={}&token={}'
.format(fleet_id, token)
])
)
return True
else:
return False
def build(self, what, id):
type = what[0]
amount = what[1]
component = what[2]
response = self.session.get(
url=self.index_php +
'page=ingame&component={}&cp={}'
.format(component, id)
).text
build_token = re.search(
"var urlQueueAdd = (.*)token=(.*)';",
response
).group(2)
self.session.get(
url=self.index_php,
params={'page': 'ingame',
'component': component,
'modus': 1,
'token': build_token,
'type': type,
'menge': amount}
)
def collect_rubble_field(self, id):
self.session.get(
url=self.index_php +
'page=ajax&component=repairlayer&component=repairlayer&ajax=1'
'&action=startRepairs&asJson=1&cp={}'
.format(id),
headers={'X-Requested-With': 'XMLHttpRequest'})
def is_logged_in(self):
response = self.session.get(
url='https://lobby.ogame.gameforge.com/api/users/me/accounts'
).json()
if 'error' in response:
return False
else:
return True
def relogin(self, universe=None):
if universe is None:
universe = self.universe
OGame.__init__(self, universe, self.username, self.password,
self.user_agent, self.proxy)
return OGame.is_logged_in(self)
def keep_going(self, function):
try:
function()
except:
self.relogin()
function()
def logout(self):
self.session.get(self.index_php + 'page=logout')
self.session.put(
'https://lobby.ogame.gameforge.com/api/users/me/logout'
)
return not OGame.is_logged_in(self)
def BeautifulSoup4(response):
parsed = BeautifulSoup(response, features="html5lib")
def find_partial(**kwargs):
for key, value in kwargs.items():
kwargs[key] = re.compile(value)
return parsed.find(**kwargs)
def find_all_partial(**kwargs):
for key, value in kwargs.items():
kwargs[key] = re.compile(value)
return parsed.find_all(**kwargs)
parsed.find_partial = find_partial
parsed.find_all_partial = find_all_partial
return parsed
|
fxdemolisher/frano
|
frano/settings.py
|
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
import datetime, os, glob
# build info
BUILD_VERSION = '0.12'
BUILD_DATETIME = datetime.datetime(2011, 9, 27, 7, 44, 0)
# base db set up, the rest is in environment specific setting files
DATABASE_ENGINE = 'mysql'
DATABASE_OPTIONS = { "init_command" : "SET storage_engine=INNODB" }
# locale set up
TIME_ZONE = 'America/New_York'
LANGUAGE_CODE = 'en-us'
USE_I18N = False
# template set up
TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.load_template_source', )
TEMPLATE_DIRS = ( )
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
)
# middleware and app set up
ROOT_URLCONF = 'frano.urls'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
)
INSTALLED_APPS = (
'django.contrib.sessions',
'frano',
'frano.main',
'frano.quotes',
'frano.transactions',
'frano.account',
'frano.positions',
)
# session settings
SESSION_ENGINE = 'django.contrib.sessions.backends.db'
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
# load external settings
settings_dir = os.path.realpath(os.path.dirname(__file__))
settings_files = glob.glob(os.path.join(settings_dir, 'settings/*.py'))
settings_files.sort()
for f in settings_files:
execfile(os.path.abspath(f))
|
fxdemolisher/frano
|
frano/main/view_utils.py
|
<reponame>fxdemolisher/frano
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from datetime import datetime
from django.shortcuts import redirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from models import Portfolio
from models import User
from models import create_user
from settings import BUILD_VERSION
from settings import BUILD_DATETIME
#-------------\
# CONSTANTS |
#-------------/
DEMO_USER_OPEN_ID = 'SAMPLE_USER_ONLY'
#---------------------\
# EXPOSED FUNCTIONS |
#---------------------/
def get_demo_user():
candidate = User.objects.filter(open_id = DEMO_USER_OPEN_ID)
if candidate.count() == 1:
return candidate[0]
else:
return create_user(DEMO_USER_OPEN_ID, DEMO_USER_OPEN_ID)
def render_page(template_name, request, user = None, portfolio = None, portfolios = None, extra_dictionary = None):
dictionary = extra_dictionary
if dictionary == None:
dictionary = { }
if user == None:
user_id = request.session.get('user_id')
if user_id != None:
user = User.objects.filter(id = user_id)[0]
if user != None and portfolios == None:
portfolios = Portfolio.objects.filter(user__id__exact = user.id)
dictionary['BUILD_VERSION'] = BUILD_VERSION
dictionary['BUILD_DATETIME'] = BUILD_DATETIME
dictionary['user'] = user
dictionary['portfolio'] = portfolio
dictionary['portfolios'] = portfolios
dictionary['today'] = datetime.now()
return render_to_response(template_name, dictionary, context_instance = RequestContext(request))
def redirect_to_portfolio_action(action, portfolio, query_string = None):
return redirect("/%d/%s.html%s" % (portfolio.id, action, ('' if query_string == None else "?%s" % query_string)))
def logout_user(request):
request.session['user_id'] = None
del(request.session['user_id'])
return redirect("/index.html")
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
|
fxdemolisher/frano
|
frano/quotes/views.py
|
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from datetime import date
from datetime import datetime
from django.http import HttpResponse
from models import price_as_of
from models import quote_by_symbol
#-------------\
# CONSTANTS |
#-------------/
#---------\
# VIEWS |
#---------/
def price_quote(request):
today = datetime.now().date()
year = int(request.GET.get('year', today.year))
month = int(request.GET.get('month', today.month))
day = int(request.GET.get('day', today.day))
quote = quote_by_symbol(request.GET.get('symbol'))
return HttpResponse("{ \"price\": %f }" % price_as_of(quote, date(year, month, day)), mimetype="application/json")
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
|
fxdemolisher/frano
|
frano/management/commands/refresh_price_history.py
|
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from sys import stdout
from django.core.management.base import BaseCommand
from frano.quotes.models import Quote
from frano.quotes.models import refresh_price_history
class Command(BaseCommand):
help = 'Refreshes the price history for all quotes'
def handle(self, *args, **options):
quotes = Quote.objects.all()
stdout.write('Found %d quotes to refresh price history\nStarting...\n' % quotes.count())
for quote in quotes:
stdout.write('Refreshing price history for: %s\n' % quote.symbol)
refresh_price_history(quote)
stdout.write('Successfully refreshed priced history\n')
|
fxdemolisher/frano
|
frano/account/models.py
|
<filename>frano/account/models.py
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from django.db import models
#-------------\
# CONSTANTS |
#-------------/
#----------\
# MODELS |
#----------/
#------------\
# SERVICES |
#------------/
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
|
fxdemolisher/frano
|
frano/transactions/views.py
|
<filename>frano/transactions/views.py
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from datetime import datetime
from exceptions import Exception
from django import forms
from django.core.mail import EmailMessage
from django.forms.formsets import formset_factory
from django.http import HttpResponse
from django.template.loader import render_to_string
from main.decorators import portfolio_manipulation_decorator
from main.view_utils import redirect_to_portfolio_action
from main.view_utils import render_page
from models import TRANSACTION_TYPES
from models import Transaction
from models import detect_transaction_file_type
from models import parse_transactions
from models import transactions_as_csv
from positions.models import refresh_positions
from quotes.models import CASH_SYMBOL
from quotes.models import quote_by_symbol
#-------------\
# CONSTANTS |
#-------------/
TRANSACTIONS_BEFORE_SEE_ALL = 20
#---------\
# VIEWS |
#---------/
@portfolio_manipulation_decorator
def transactions(request, portfolio, is_sample, read_only):
transactions = Transaction.objects.filter(portfolio__id__exact = portfolio.id).order_by('-as_of_date', '-id')
symbols = set([t.symbol for t in transactions])
symbol_filter = request.GET.get('filter')
if symbol_filter != None and symbol_filter != '':
transactions = transactions.filter(symbol = symbol_filter)
for transaction in transactions:
transaction.fees = abs(transaction.total - (transaction.price * transaction.quantity))
context = {
'symbols' : symbols.difference([CASH_SYMBOL]),
'transaction_sets' : [ transactions[0:TRANSACTIONS_BEFORE_SEE_ALL], transactions[TRANSACTIONS_BEFORE_SEE_ALL:transactions.count()] ],
'current_tab' : 'transactions',
'symbol_filter' : symbol_filter,
}
return render_page('transactions.html', request, portfolio = portfolio, extra_dictionary = context)
@portfolio_manipulation_decorator
def add(request, portfolio, is_sample, read_only):
form = TransactionForm(request.POST)
if form.is_valid():
commission = form.cleaned_data.get('commission')
if commission == None:
commission = 0.0
type = form.cleaned_data.get('type').encode('UTF-8')
symbol = form.cleaned_data.get('symbol').encode('UTF-8').upper()
linked_symbol = None
if type == 'ADJUST':
linked_symbol = symbol
if type in ['DEPOSIT', 'WITHDRAW', 'ADJUST']:
symbol = CASH_SYMBOL
if symbol != None and len(symbol) > 0:
transaction = Transaction()
transaction.portfolio = portfolio
transaction.type = type
transaction.as_of_date = form.cleaned_data.get('as_of_date')
transaction.symbol = symbol
transaction.quantity = form.cleaned_data.get('quantity')
transaction.price = form.cleaned_data.get('price')
transaction.total = (transaction.quantity * transaction.price) + commission
transaction.linked_symbol = linked_symbol
transaction.save()
refresh_positions(portfolio, force = True)
return redirect_to_portfolio_action('transactions', portfolio)
@portfolio_manipulation_decorator
def remove(request, portfolio, is_sample, read_only, transaction_id):
transaction = Transaction.objects.filter(id = transaction_id)[0]
if transaction.portfolio.id == portfolio.id:
transaction.delete()
refresh_positions(portfolio, force = True)
return redirect_to_portfolio_action('transactions', portfolio)
@portfolio_manipulation_decorator
def remove_all(request, portfolio, is_sample, read_only):
Transaction.objects.filter(portfolio__id__exact = portfolio.id).delete()
refresh_positions(portfolio, force = True)
return redirect_to_portfolio_action('importTransactions', portfolio)
@portfolio_manipulation_decorator
def update(request, portfolio, is_sample, read_only, transaction_id):
transaction = Transaction.objects.filter(id = transaction_id)[0]
success = False
if transaction.portfolio.id == portfolio.id:
form = UpdateTransactionForm(request.POST)
if form.is_valid():
current_commission = transaction.total - (transaction.price * transaction.quantity)
type = form.get_if_present('type')
if type != None and type != '':
transaction.type = type.encode('UTF-8')
as_of_date = form.get_if_present('date')
if as_of_date != None:
transaction.as_of_date = as_of_date
symbol = form.get_if_present('symbol')
if symbol != None and symbol != '':
transaction.symbol = symbol.encode('UTF-8').upper()
quantity = form.get_if_present('quantity')
if quantity != None:
transaction.quantity = quantity
transaction.total = (transaction.price * transaction.quantity) + current_commission
price = form.get_if_present('price')
if price != None:
transaction.price = price
transaction.total = (transaction.price * transaction.quantity) + current_commission
total = form.get_if_present('total')
if total != None:
transaction.total = total
if transaction.symbol == CASH_SYMBOL:
transaction.quantity = transaction.total
linked_symbol = form.get_if_present('linkedsymbol')
if linked_symbol != None:
transaction.linked_symbol = (linked_symbol.encode('UTF-8').upper() if linked_symbol.strip() != '' else None)
transaction.save()
refresh_positions(portfolio, force = True)
success = True
return HttpResponse("{ \"success\": \"%s\" }" % success)
@portfolio_manipulation_decorator
def export(request, portfolio, is_sample, read_only, format):
format = format.lower()
name = ('DEMO' if is_sample else portfolio.name)
response = HttpResponse(mimetype = ('text/%s' % format))
response['Content-Disposition'] = 'attachment; filename=transactions-%s-%s.%s' % (name, datetime.now().strftime('%Y%m%d'), format)
if format == 'csv':
transactions_as_csv(response, portfolio)
elif format == 'ofx':
transactions = Transaction.objects.filter(portfolio__id__exact = portfolio.id).order_by('-as_of_date', '-id')
for transaction in transactions:
transaction.commission = abs(transaction.total - (transaction.price * transaction.quantity))
transaction.quantity = ((-transaction.quantity) if transaction.type == 'SELL' else transaction.quantity)
transaction.total = ((-transaction.total) if transaction.type == 'BUY' or transaction.type == 'WITHDRAW' else transaction.total)
quotes = [ quote_by_symbol(symbol) for symbol in set([t.symbol for t in transactions]).difference([CASH_SYMBOL]) ]
response.write(render_to_string('transactions.ofx', {
'portfolio' : portfolio,
'transactions': transactions,
'start_date' : min([t.as_of_date for t in transactions]),
'end_date' : max([t.as_of_date for t in transactions]),
'quotes' : quotes,
}))
return response
@portfolio_manipulation_decorator
def import_form(request, portfolio, is_sample, read_only):
transactions = None
auto_detect_error = False
if request.method == 'POST':
form = ImportForm(request.POST, request.FILES)
if form.is_valid():
type = request.POST.get('type')
if type == 'AUTO':
type = detect_transaction_file_type(request.FILES['file'])
auto_detect_error = (True if type == None else False);
if not auto_detect_error:
transactions = parse_transactions(type, request.FILES['file'])
existing_transactions = Transaction.objects.filter(portfolio__id__exact = portfolio.id)
by_date_map = dict([ (as_of_date, []) for as_of_date in set([ transaction.as_of_date for transaction in existing_transactions]) ])
for transaction in existing_transactions:
by_date_map.get(transaction.as_of_date).append(transaction)
for transaction in transactions:
if len(transaction.symbol) < 1 or len(transaction.symbol) > 10:
raise Exception("Invalid symbol: %s" % transaction.symbol)
is_duplicate = False
possibles = by_date_map.get(transaction.as_of_date)
if possibles != None:
for possible in possibles:
if possible.type == transaction.type and possible.symbol == transaction.symbol and abs(possible.quantity - transaction.quantity) < 0.01 and abs(possible.price - transaction.price) < 0.01:
is_duplicate = True
transaction.is_duplicate = is_duplicate
context = {
'transactions' : transactions,
'current_tab' : 'transactions',
'auto_detect_error' : auto_detect_error
}
return render_page('importTransactions.html', request, portfolio = portfolio, extra_dictionary = context)
@portfolio_manipulation_decorator
def process_import(request, portfolio, is_sample, read_only):
formset = ImportTransactionFormSet(request.POST)
if not formset.is_valid():
raise Exception('Invalid import set');
for form in formset.forms:
cd = form.cleaned_data
if not cd.get('exclude'):
transaction = Transaction()
transaction.portfolio = portfolio
transaction.type = cd.get('type').encode('UTF-8')
transaction.as_of_date = cd.get('as_of_date')
transaction.symbol = cd.get('symbol').encode('UTF-8').upper()
transaction.quantity = cd.get('quantity')
transaction.price = cd.get('price')
transaction.total = cd.get('total')
linked_symbol = cd.get('linked_symbol').encode('UTF-8')
if linked_symbol != None and linked_symbol != '':
transaction.linked_symbol = linked_symbol
transaction.save()
refresh_positions(portfolio, force = True)
return redirect_to_portfolio_action('transactions', portfolio)
@portfolio_manipulation_decorator
def request_import_type(request, portfolio, is_sample, read_only):
form = RequestImportForm(request.POST, request.FILES)
if not form.is_valid():
raise Exception('Bad file for request');
type = request.POST.get('type')
uploaded_file = request.FILES['file']
body = "Request for import for type: %s\nRequest for portfolio: %s (%d)\nRequest made from:%s" % (
type,
('Demo' if is_sample else portfolio.name),
portfolio.id,
('Demo user' if is_sample else portfolio.user.email)
)
email = EmailMessage("Import type requested",
body,
"<EMAIL>",
[ "<EMAIL>" ],
[ ])
email.attach(uploaded_file.name, uploaded_file.read(), uploaded_file.content_type)
email.send(fail_silently = False)
return redirect_to_portfolio_action('importTransactions', portfolio, 'requestSent=true')
#---------\
# FORMS |
#---------/
class TransactionForm(forms.Form):
type = forms.ChoiceField(choices = TRANSACTION_TYPES)
as_of_date = forms.DateField()
symbol = forms.CharField(min_length = 1, max_length = 10, required = False)
quantity = forms.FloatField()
price = forms.FloatField(min_value = 0.01)
commission = forms.FloatField(min_value = 0.01, required = False)
class UpdateTransactionForm(forms.Form):
type = forms.ChoiceField(choices = TRANSACTION_TYPES, required = False)
date = forms.DateField(required = False)
symbol = forms.CharField(required = False, min_length = 1, max_length = 10)
quantity = forms.FloatField(required = False)
price = forms.FloatField(required = False, min_value = 0.01)
total = forms.FloatField(required = False)
linkedsymbol = forms.CharField(required = False, max_length = 10) # underscore removed due to JS split issue
def __init__(self, data):
forms.Form.__init__(self, data)
self.original_data = data
def get_if_present(self, name):
return (self.cleaned_data.get(name) if name in self.original_data else None)
class ImportForm(forms.Form):
TYPE_CHOICES = [
('AUTO', u'AUTO'),
('FRANO', u'FRANO'),
('CHARLES', u'CHARLES'),
('GOOGLE', u'GOOGLE'),
('SCOTTRADE', u'SCOTTRADE'),
('AMERITRADE', u'AMERITRADE'),
('ZECCO', u'ZECCO'),
('FIDELITY', u'FIDELITY'),
('MERCER_401', u'MERCER_401'),
]
type = forms.ChoiceField(choices = TYPE_CHOICES)
file = forms.FileField()
class RequestImportForm(forms.Form):
type = forms.CharField(max_length = 255)
file = forms.FileField()
class ImportTransactionForm(forms.Form):
type = forms.ChoiceField(choices = TRANSACTION_TYPES)
as_of_date = forms.DateField()
symbol = forms.CharField(min_length = 1, max_length = 10)
quantity = forms.FloatField()
price = forms.FloatField(min_value = 0.01)
total = forms.FloatField()
linked_symbol = forms.CharField(max_length = 10, required = False)
exclude = forms.BooleanField(required = False)
ImportTransactionFormSet = formset_factory(ImportTransactionForm)
#-------------------\
# LOCAL FUNCTIONS |
#-------------------/
|
fxdemolisher/frano
|
frano/management/commands/cleanup_sessions.py
|
<reponame>fxdemolisher/frano
# Copyright (c) 2011 <NAME>
# Licensed under the MIT license
# see LICENSE file for copying permission.
from datetime import datetime
from sys import stdout
from django.contrib.sessions.models import Session
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Cleanup any expired sessions'
def handle(self, *args, **options):
expired_sessions = Session.objects.filter(expire_date__lte = datetime.now())
stdout.write('Found %d expired sessions\n' % expired_sessions.count())
expired_sessions.delete()
stdout.write('Successfully removed expired sessions\n')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.