text stringlengths 8 6.05M |
|---|
a = 10
a = 4
a = 80
print("a")
|
from flask import Flask, render_template, request
import xmlrpc.client
server = xmlrpc.client.ServerProxy('http://127.0.0.1:8080')
app = Flask(__name__)
@app.route('/submit', methods=['post', 'get'])
def submit():
data = {}
pesan = [1, 2, 3]
if request.method == 'POST':
data['nama'] = request.form.get('nama')
data['gula_darah'] = request.form.get('gula_darah')
data['status'] = True
server.vote(data['nama'])
print(server.querry())
data['status'] = server.querry()
return render_template('submit.html', data=data, n_pesan=len(pesan))
if __name__ == '__main__':
app.run(debug=True)
|
'''
Contains the L{Parser} class for parsing a header file for
function definitions and user-defined types.
@author: Erik Schmidt
@contact: emschmitty@gmail.com
@organization: Carnegie Mellon University
@since: October 23, 2011
'''
import xml.dom.minidom as xml
import logging
import os
import dllexp
import sys
import re
from morpher.misc import status_reporter
from subprocess import Popen, PIPE
from morpher.pycparser.c_parser import CParser
CPPPATH = r'../../tools/tcc/tcc.exe' if sys.platform == 'win32' else 'cpp'
class Parser(object):
'''
Class documentation
@ivar cfg: The L{Config} object
@ivar log: The L{logging} object
@ivar dllexp: The L{DllExp} object
@ivar targetfile: An array of strings pointing to the header file paths (splits the string from the config file into separate strings delimited by a ';'
@ivar compiler: The address of the pre-processing compiler
@ivar compilerflags: The associated flags for the pre-processing compiler
@ivar numfuncincluded: The counter for recording the coverage of the parser
@ivar doc: An XML document object for the outputted model file
@ivar top: A pointer to the root of the XML tree
@ivar text: A list of the functions that DllExplorer outputs
@ivar xmlMap: A list of pointers into the AST that allow for dynamic XML generation
'''
def __init__ (self, cfg):
'''
Stores the configuration object and initializes the internal data
@param cfg: The configuration object to use
@type cfg: L{Config} object
'''
# The Config object used for configuration info
self.cfg = cfg
# The logging object used for reporting
self.log = logging.getLogger(__name__)
# The dllexp.exe wrapper object for getting export data
self.dllexp = dllexp.DllExp(cfg)
# split header files separated by ';'
tmp = self.cfg.get('parser', 'headerpath')
self.targetfile = tmp.split(';')
self.compiler = self.cfg.get('parser', 'precomppath')
tmp = self.cfg.get('parser', 'compflags')
self.compilerflags = tmp.split(';')
#self.compilerflags = self.cfg.get('parser', 'compflags')
def parse_file(self):
'''
Parse a C file using pycparser.
If parsing is disabled according to the configuration object,
a message saying so is printed to the console and the method exits.
Otherwise, the function uses the C preprocessor passed in to
resolve any other header file dependencies (#includes), macro
definitions (#defines and #pragmas), and merges multiple header files
into one file. It then creates a L{CParser} object, which uses the
Ply parsing engine to parse the file into an Abstract Syntax Tree in
the form of L{Node} objects. The L{Node} object corresponding to the
head of the AST is returned.
@return: The root of the Parsed Abstract Syntax Tree, or None if there is a parsing error
@rtype: L{Node} object
'''
# Generate the command line string to generate the preprocessed data
path_list = [self.compiler]
#if isinstance(self.compilerflags, list):
# path_list += self.compilerflags
#elif self.compilerflags != '':
# path_list += [self.compilerflags]
path_list += self.compilerflags
path_list += self.targetfile
# Open a pipe and generate the preprocessed data
self.log.info("Retrieve the preprocessed code from TCC")
pipe = Popen(path_list, stdout=PIPE, universal_newlines=True)
text = pipe.communicate()[0]
# Make the output pycparser compatible by removing __stdcall instances and attributes
text = re.sub('__stdcall',"",text)
text = re.sub('__attribute__\(\(.*?\)\)*',"",text)
# Using pycparser, generate the preprocessed code and return the AST
self.log.info("Parsing the preprocessed code using pycparser")
parser = CParser(lex_optimize=False, yacc_debug=False, yacc_optimize=False)
return parser.parse(text, self.targetfile)
def parseXML(self, ast, element, name, printflag):
'''
Parse the Abstract Syntax Tree and generate the XML Model
This is a recursive function that iteratively traverses the abstract syntax tree. The
current L{Node}, defined by the ast argument, is used to gather the type info if
appropriate, and the name of the node.
If the current node is a Struct or a Union, it saves the L{Node} to return later if
the Struct or Union is actually used in the target file.
If the current L{Node} is a function definition, and the function is located in the
target header file, it sets the printflag to true, and recalls the parseXML file for the
stored instance of the struct or union definition. It then adds the user defined instances,
and all other newly defined user defined instances to the XML model.
If the current L{Node} is a typedef definition, it maps the definition to the type it
points to, and will return the resolved type on any future calls. Typedefs are not added
to the XML model. They are handled internally.
@param ast: The current L{Node} of the Abstract Syntax Tree
@type ast: L{Node} object
@param element: The current XML instance pertaining to the sub-tree
@type element: XML element object
@param name: Only relevant for function definitions, as the names are defined in a parent node. Used to assign the name of the function definition in the XML Model
@type name: string
@param printflag: Flag defining whether to add XML instance to the XML model
@type printflag: int
@return: A string containing relevant data for the parent node of the AST
@rtype: string
'''
funcName = ast.__class__.__name__
if funcName == "Decl":
# Declaration of an object - get the name and pass it on!
for c in ast.children():
val = self.parseXML(c, element, getattr(ast, ast.attr_names[0]), printflag)
return val
elif funcName == "FuncDecl":
# Function Declaration - Take input from the Decl node for the name, and explore all sub-nodes
# The parameters are added to the XML map, but the return value is ignored
func = self.doc.createElement("function")
func.setAttribute("name", name)
# Set the printflag if the function is in the target header file
if str(name) in self.text:
printflag |= 1
else:
printflag |= 0
# Explore the child nodes (to get the parameters)
for c in ast.children():
val = self.parseXML(c, func, None, printflag)
# If the printflag is set, add the function to the XML model
if printflag == 1:
self.top.appendChild(func)
self.numFuncIncluded = self.numFuncIncluded + 1
return ""
elif funcName == "ParamList":
# The parameter list! List all the parameters!!
for c in ast.children():
param = self.doc.createElement("param")
# Get the string representation for the type of the current parameter
# This will also implicitly add the new user defined types for
# the function if the printflag is enabled
val = self.parseXML(c, param, name, printflag)
if val != None:
if val != "":
# Remove the array size if applicable
if val.find("[") != -1:
val = val[:val.find("[")]
param.setAttribute("type", val)
element.appendChild(param)
elif funcName == "PtrDecl":
# A pointer definition. Get the string representation of the children,
# and append a P to the string!
for c in ast.children():
val = self.parseXML(c, element, name, printflag)
if val != None:
return "P" + val
elif funcName == "TypeDecl":
# Definition of a type - pass through this function
for c in ast.children():
val = self.parseXML(c, element, name, printflag)
if val != None:
return val
elif funcName == "Typename":
# Name of a type - pass through this function
for c in ast.children():
val = self.parseXML(c, element, name, printflag)
if val != None:
return val
elif funcName == "IdentifierType":
# The identifier of a basic data type (or previously defined user defined type)
val = getattr(ast, ast.attr_names[0])
# Return the character code for a basic type
if len(val) > 2:
if val[0] == "unsigned" and val[1] == "long" and val[2] == "long":
return "L"
if len(val) > 1:
if val[0] == "char" and val[1] == "unsigned":
return "B"
elif val[0] == "short" and val[1] == "unsigned":
return "H"
elif val[0] == "int" and val[1] == "unsigned":
return "I"
elif val[0] == "long" and (val[1] == "unsigned"):
return "L"
elif val[0] == "char" and val[1] == "signed":
return "c"
elif val[0] == "short" and val[1] == "signed":
return "h"
elif val[0] == "int" and val[1] == "signed":
return "i"
elif val[0] == "long" and (val[1] == "signed"):
return "l"
elif val[0] == "long" and val[1] == "long":
return "l"
else:
return ""
else:
if val[0] == "char":
return "c"
elif val[0] == "short":
return "h"
elif val[0] == "int":
return "i"
elif val[0] == "long":
return "l"
elif val[0] == "double":
return "d"
elif val[0] == "float":
return "f"
# If a user defined type, get the character code, add to the XML
# model if pertinent, and add any new unique user defined types
# defined in the user defined type.
elif val[0] in self.typeMap:
iterMap = self.typeMap[val[0]]
iterPMap = iterMap
if iterMap.rfind("P") != -1:
iterPMap = iterMap[iterMap.rfind("P")+1:]
if iterPMap in self.xmlMap and printflag == 1:
c = self.xmlMap[iterPMap]
del self.xmlMap[iterPMap]
val = self.parseXML(c, None, None, printflag)
return str(iterMap)
# If not defined anywhere, then return no value
else:
return ""
elif funcName == "Typedef":
# If a typedef, store the string code associated to the value to the
# typeMap for future resolutions.
for c in ast.children():
val = self.parseXML(c, element, name, printflag)
if val != None:
self.typeMap[getattr(ast, ast.attr_names[0])] = val
return None
elif funcName == "Union" or funcName == "Struct":
# Union is pretty much the same as Struct. To see how it works, check
# out the struct and its comments
# Generate a unique usertype id if new, or set the index to the previously
# assigned usertype id
if name == None:
curname = getattr(ast, ast.attr_names[0])
else:
curname = name
if curname == None:
return None
if curname in self.typeMap:
ind = self.typeMap[curname]
elif curname != None and "//" + curname in self.typeMap:
ind = self.typeMap["//" + curname]
else:
ind = self.typeMap['#!@#index']
self.typeMap['#!@#index'] = self.typeMap['#!@#index'] + 1
self.typeMap[curname] = str(ind)
changed = 0
# Set the parameters for the XML model for a user defined type
typex = self.doc.createElement("usertype")
typex.setAttribute("id", str(ind))
if funcName == "Union":
typex.setAttribute("type", "union")
elif funcName == "Struct":
typex.setAttribute("type", "struct")
if str(curname) in self.text:
printflag |= 1
else:
printflag |= 0
# Iterate through the children of the struct to add them to the XML model
for c in ast.children():
val = self.parseXML(c, typex, name, printflag)
if val != None:
if val != "":
total = 1
arrays = val.split("[")
if len(arrays) > 1:
for i in range(len(arrays) - 1):
total *= int(arrays[i+1][:-1])
val = arrays[0]
for i in range(total):
param = self.doc.createElement("param")
param.setAttribute("type", val)
typex.appendChild(param)
changed = 1
# If the current struct isn't supposed to be printed, add it to the
# map which contains pointers to structs to print later. Otherwise,
# if the printflag is set, add to the XML model
if changed == 1 and printflag == 0:
self.xmlMap[str(ind)] = ast
if printflag == 1:
self.top.appendChild(typex)
# Return the string represntation of the struct index
if changed == 1:
return str(ind)
else:
return ""
elif funcName == "Enum":
# Enum type - only the size of an integer
return "i"
elif funcName == "Constant":
# Constant type - used only to define sizes of arrays
for c in ast.children():
val = self.parseXML(c, element, None, printflag)
return "[" + getattr(ast, ast.attr_names[1]) + "]"
elif funcName == "ArrayDecl":
# Array declaration. Get the array type and size, and return string
# representation to parent
val = ""
for c in ast.children():
getVal = self.parseXML(c, element, None, printflag)
if getVal != None:
val += getVal
return val
else:
val = ""
for c in ast.children():
val = self.parseXML(c, element, None, printflag)
return val
def parse(self):
'''
Analyzes the target DLL and header file to retrieve function prototypes.
Outputs a XML file containing a model of the exported prototypes.
It will start by generating an Abstract Syntax Tree (AST) representation of the
target header files. Then iterate through the AST and pull out relevant
function and user defined type definitions and add them to an XML model. It will
then export the XML model to a file and terminate.
'''
# Get relevant configuration information
datadir = self.cfg.get('directories', 'data')
modelpath = os.path.join(datadir, 'model.xml')
# Check if parsing is enabled
if not self.cfg.getboolean('parser', 'enabled') :
self.log.info("Parsing is disabled")
print " Parser DISABLED\n"
return
sr = status_reporter.StatusReporter(total=5)
sr.start(" Parser is running...")
# Parsing is enabled
self.log.info("Beginning parse routine")
self.numFuncIncluded = 0
self.log.info("Getting DLL Explorer functions")
# Retrieve the export table from the DLL
exportlist = self.dllexp.getFunctions()
sr.pulse()
ast = self.parse_file()
sr.pulse()
# Create the XML tree
self.log.info("Creating the XML model")
self.doc = xml.getDOMImplementation().createDocument(None, "dll", None)
self.top = self.doc.documentElement
self.typeMap = {}
self.typeMap['#!@#index'] = 1
self.text = {}
# Create a map of the exported function names of the DLL
for (fname) in exportlist :
self.text[fname] = 1
sr.pulse()
self.xmlMap = {}
# Iterate through the AST and generate the XML model
self.log.info("Iterating through the AST")
self.parseXML(ast, self.top, None, 0)
sr.pulse()
self.log.info("Finished iterating through AST and generating XML content")
self.log.info("Added %d functions out of %d possible functions in the DLL to the XML file", self.numFuncIncluded, len(exportlist))
basedir = self.cfg.get('directories', 'basedir')
yaccpath = os.path.join(basedir, 'yacctab.py')
# Remove temporary files
try:
os.remove(yaccpath)
except:
pass
# Write out the model file
self.log.info("Writing XML tree to model file")
if self.log.isEnabledFor(logging.DEBUG) :
xmlstr = self.top.toprettyxml(indent=" ", newl="\n")
self.log.debug("\n\nXML Tree:\n%s\n", xmlstr)
try :
f = open(modelpath, mode="w")
except :
msg = "Couldn't open %s"
self.log.exception(msg, modelpath)
raise Exception(msg % modelpath)
self.top.writexml(f, addindent=" ", newl="\n")
f.close()
sr.done() |
# coding:utf-8
import threading, Queue, sys
import requests, re
class RedisUN(threading.Thread):
def __init__(self, queue):
threading.Thread.__init__(self)
self._queue = queue
def run(self):
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36',}
while True:
if self._queue.empty():
break
number = self._queue.get(timeout=0.5)
response = requests.get('https://site.ip138.com/' + str(number) , headers=headers)
t = response.content
b = re.findall(r'<li><span class="date">.*</span><a href="/(.*)/" target="_blank">.*</a></li>', t)
file = open('C:/Users/zhx/Desktop/YM.txt', 'a+')
# for i in b:
try:
# u = 'http://' + i
u = 'http://' + b[0]
r = requests.get(url=u, headers=headers,timeout=5).status_code
if r == 200:
# sys.stdout.write('%s\n' % u)
# file.write(str(i) + '\n')
# file.write(str(i) + ' : ' + str(number) + '\n')
file.write(str(b[0]) + ' : ' + str(number) + '\n')
except:
continue
file.close()
def main():
# IP1 = input('่ตทๅงIP:')
# IP2 = input('็ปๆญขIP:')
IP1 = sys.argv[1]
IP2 = sys.argv[2]
# IP1 = '27.223.70.1'
# IP2 = '27.223.70.128'
a = IP1.split('.')[2]
b = IP2.split('.')[2]
c = IP1.split('.')[3]
d = IP2.split('.')[3]
e = IP1.split('.')[0]
f = IP1.split('.')[1]
ipd = []
if a == b:
for i in range(int(c), int(d) + 1):
ip = str(e) + '.' + str(f) + '.' + str(a) + '.' + str(i)
ipd.append(ip)
elif a < b:
for i in range(int(a), int(b) + 1):
for j in range(int(c), int(d) + 1):
ip = str(e) + '.' + str(f) + '.' + str(i) + '.' + str(j)
ipd.append(ip)
thread_count = 20
threads = []
queue = Queue.Queue()
for i in ipd:
queue.put(i)
for i in xrange(thread_count):
threads.append(RedisUN(queue))
for t in threads:
t.start()
for t in threads:
t.join()
if __name__ == '__main__':
main() |
import torch
import torchaudio
import torchvision
import torchtext
# import torchcsprng
print(torch.__version__)
print(torchaudio.__version__)
print(torchvision.__version__)
print(torchtext.__version__)
# print(torchcsprng.__version__)
|
import requests
import json
name = input('User name: ')
r = requests.get(f'https://api.github.com/users/{name}/repos')
r = r.json()
list_of_repo = []
for el in r:
list_of_repo.append(el['name'])
my_dict = {"user": name, "repo": list_of_repo}
json_obj = json.dumps(my_dict)
with open('data.json', 'w') as f:
f.write(json_obj)
|
#!/usr/bin/python
import pprint
pp = pprint.PrettyPrinter(indent=4, width=120, depth=3)
import sys
import numpy as np
import pygame as pg
from genotype import *
from phenotype import *
from neat import *
# from numeric_3d
def in2pi(a):
""" Brings an angle in the range between -pi and +pi """
if a > np.pi:
a = a - int((a+np.pi)/(2*np.pi))*2*np.pi
if a < -np.pi:
a = a - int((a-np.pi)/(2*np.pi))*2*np.pi
return a
in2piV = np.vectorize(in2pi)
class PoleBalanceTask(object):
def __init__(self):
self.g = 9.81 # gravity
self.mc = 1.0 # cart_mass
self.mp = np.array([0.1]) # pole_mass
self.l = np.array([0.5]) # pole_length
self.h = 2.4 # track_limit
self.r = 0.628 # failure_angle = 2*pi/10
self.f = 10.0 # force_magnitude
self.dt = 0.01 # timestep
self.velocities = True # False
self.penalize_oscillation = True
self.max_steps = 1000
x, dx, theta, dtheta = 0.0, 0.0, np.random.normal(0, 0.02, self.l.size), np.zeros(self.l.size)
self.initial_state = (x, dx, theta, dtheta)
@property
def n_inputs(self):
return (1+self.l.shape[0])*(1+int(self.velocities))
@property
def n_outputs(self):
return 1
def _simulation_step_multipole(self, action, state):
# state is a tuple of (x, dx, (p1, p2), (dp1, dp2))
x, dx, theta, dtheta = state
#f = (min(1.0, max(-1.0, action)) - 0.5) * self.f * 2.0;
# Alternate equations
# fi = self.mp * self.l * dtheta**2 * np.sin(theta) + (3.0/4) * self.mp * np.cos(theta) * self.g * np.sin(theta)
# mi = self.mp * (1 - (3.0/4) * np.cos(theta)**2)
# ddx = f + np.sum(fi) / (self.mc + np.sum(mi))
# ddtheta = (- 3.0 / (4 * self.l)) * (ddx * np.cos(theta) + self.g * np.sin(theta))
# Equations from "THE POLE BALANCING PROBLEM"
# _ni = (-f - self.mp * self.l * dtheta**2 * np.sin(theta))
# m = self.mc + np.sum(self.mp)
# _n = self.g * np.sin(theta) + np.cos(theta) * (_ni / m)
# _d = self.l * (4./3. - (self.mp * np.cos(theta)**2) / m)
# ddtheta = (_n / _d)
# ddx = (f + np.sum(self.mp * self.l * np.floor(dtheta**2 * np.sin(theta) - ddtheta * np.cos(theta)))) / m
x += self.dt * dx
dx += self.dt * ddx
theta += self.dt * dtheta
dtheta += self.dt * ddtheta
return (x, dx, theta, dtheta)
def _simulation_step(self, action, state):
# single pole, no friction, point mass at the end
# -f + (mc+mp)*ddx - mp*l*sin(theta)*dtheta**2 + mp*l*cos(theta)*ddtheta
# -mp*l**2*ddtheta + mp*g*l*sin(theta) - mp*l*cos(theta)*ddx
# leads to
# ddx = (dtheta**2*l*mp*sin(theta) + f - g*mp*sin(2*theta)/2)/(mc + mp*sin(theta)**2)
# ddtheta = (g*(mc + mp)*sin(theta) - (dtheta**2*l*mp*sin(theta) + f)*cos(theta))/(l*(mc + mp*sin(theta)**2))
x, dx, theta, dtheta = state
f = action
mc = self.mc
mp = self.mp
l = self.l
g = self.g
s = np.sin(theta)
c = np.cos(theta)
ddx = (dtheta**2*l*mp*s + f - g*mp*s*c)/(mc + mp*s**2)
ddtheta = (g*(mc + mp)*s - (dtheta**2*l*mp*s + f)*c)/(l*(mc + mp*s**2))
state_new = state + np.hstack([dx, ddx, dtheta, ddtheta]) * self.dt
state_new[2] = in2pi(state_new[2])
return state_new
def _step(self, network, state): # evaluate network and simulate one step
if self.velocities:
# Divide velocities by 2.0 because that is what neat-python does
#net_input = np.hstack((x/self.h, dx/2.0, theta/self.r, dtheta/2.0))
net_input = state / np.array([self.h, 2.0, self.r, 2.0])
else:
#net_input = np.hstack((x/self.h, theta/self.r))
net_input = state[::2]
net_output = network.feed(net_input)
#print('net_input ' + str(net_input))
#print('net_output ' + str(net_output))
action = net_output * self.f
state = self._simulation_step(action, state)
return (action, state)
def _loop(self, network, initial_state, max_steps): # evaluate network and simulate all steps
# state = [x, dx, theta1, dtheta1, thetat2, dthetat2...]
if not hasattr(self,'steps_all') or self.steps_all.shape[0] < max_steps:
self.steps_all = np.arange(max_steps)
self.time_all = self.steps_all*self.dt
self.tolerance_all = 2*np.pi*np.exp(-0.4*self.steps_all*self.dt) + self.r
steps = 0
states = []
actions = []
state = initial_state
#while (steps < max_steps and np.abs(x) < self.h and ((np.abs(theta) < self.r).all() or steps < 200)):
#while steps < max_steps and np.abs(state[0]) < self.h and (np.abs(state[2::2]) < self.tolerance_all[steps]).all():
while steps < max_steps and np.abs(state[0]) < self.h:
steps += 1
action, state = self._step(network, state)
states.append(state)
actions.append(action)
#print(states[-1])
return steps, np.array(states), np.array(actions)
def evaluate(self, network):
initial_state = np.hstack(self.initial_state)
steps, states, actions = self._loop(network, initial_state, self.max_steps)
x = states[:,0]
theta = states[:,2]
#score= np.sum( np.abs(theta) < np.pi/4. ) / float(self.max_steps)
score_x = np.sum(1.-np.exp(-0.4*self.h/np.abs(x))) / float(self.max_steps)
score_theta = np.sum(1.-np.exp(-0.12*np.pi/np.abs(theta))) / float(self.max_steps)
score = score_theta * (1+0.5*score_x)
#print('score_x %f score_theta %f score %f' % (score_x, score_theta, score) )
#score = steps/float(self.max_steps)
#if self.penalize_oscillation:
# #penalty = 1.0e3/(sum( abs(dx)/self.dt for (x, dx, theta, dtheta) in states))
# #score = steps/float(self.max_steps) - penalty
# dx = states[:,0]
# ddx = states[:,1]
# dx_mean = np.mean(np.abs(np.array(dx)))/self.dt
# ddx_mean = np.mean(np.abs(np.diff(np.array(ddx))))/(self.dt**2)
# bonus_dx = (2.-np.exp(-1e2/dx_mean)) # bonus for low velocity
# bonus_ddx = (2.-np.exp(-1e2/ddx_mean)) # bonus for low acceleration
# score = score * bonus_dx * bonus_ddx
# #print('raw_score %f bonus_dx %f bonus_ddx %f score %f dx_mean %f ddx_mean %f' % (steps/float(self.max_steps), bonus_dx, bonus_ddx, score, dx_mean, ddx_mean) )
solved = int(steps >= self.max_steps)
return [score, solved]
# based on code from PEAS
def visualize(self, network, filename):
""" Visualize a solution strategy by the given individual
"""
import matplotlib
matplotlib.use('Agg',warn=False)
import matplotlib.pyplot as plt
initial_state = np.hstack(self.initial_state)
steps, states, actions = self._loop(network, initial_state, self.max_steps)
actions = np.array(actions)
#print('%5d'%actions.size, np.histogram(actions)[0], ' min %s max %s'%(min(actions), max(actions)))
g = network.genotype
x = states[:,0]
dx = states[:,1]
theta = states[:,2::2]
dtheta = states[:,3::2]
setps_all = self.steps_all[:x.shape[0]]
toleranc_all = self.tolerance_all[:x.shape[0]]
fig = plt.figure()
top = fig.add_subplot(211) # The top plot (cart position)
top.fill_between(setps_all, -self.h, self.h, facecolor='green', alpha=0.3)
top.plot(x, label=r'$x$')
top.plot(dx, label=r'$\delta x$')
top.legend(loc='lower left', ncol=4, bbox_to_anchor=(0, 0, 1, 1))
foo = 1.-np.exp(-0.4*self.h/np.abs(x))
top.plot(foo*self.h,'k')
bottom = fig.add_subplot(212) # The bottom plot (pole angles)
bottom.plot((0,steps),(0,0), 'c--' )
#bottom.plot((0,steps),(2*np.pi,2*np.pi), 'c--' )
#bottom.plot((0,steps),(-2*np.pi,-2*np.pi), 'c--' )
bottom.plot((0,steps),(np.pi,np.pi), 'r--' )
bottom.plot((0,steps),(-np.pi,-np.pi), 'r--' )
#bottom.fill_between(setps_all, -toleranc_all, toleranc_all, facecolor='green', alpha=0.3)
for i in range(1):
bottom.plot(theta, label=r'$\theta_%d$'%i)
bottom.plot(dtheta, ls='--', label=r'$\delta \theta_%d$'%i)
bottom.legend(loc='lower left', ncol=4, bbox_to_anchor=(0, 0, 1, 1))
bottom.plot(np.abs(theta) < np.pi/4., 'r' )
foo = 1.-np.exp(-0.12*np.pi/np.abs(theta))
bottom.plot(foo,'k')
#bottom.plot(np.cumsum(foo),'b')
fig.text(0.02,0.02,'genome_id %04d steps %d fitness %0.4f solved %d' % (g.id, steps, g.fitness, g.solved))
fig.savefig(filename)
plt.close()
class World:
def __init__(self, task, network):
self.task = task
self.network = network
self.fps = 30
self.display_width = 1200
self.display_height = 400
self.meter_pixel_ratio = 100
self.cart_size = np.array([0.3, 0.1]) * self.meter_pixel_ratio
if not hasattr(self.task,'name'):
self.task.name = type(task).__name__
# initialize pygame
pg.init()
self.display_size = np.array([self.display_width, self.display_height])
self.init_pos = self.display_size/2
self.display = pg.display.set_mode(self.display_size)
pg.display.set_caption(self.task.name)
self.clock = pg.time.Clock()
self.background_color = (255, 255, 255)
# initialize world
self.exit = False
self.reset = True
self.world_loop()
def world_loop(self):
while not self.exit:
if self.reset:
#state = np.hstack(self.task.initial_state)
state = np.array([0.0, 0.0, 2*np.pi*np.random.random(), 0.0])
self.reset = False
# handle events per frame
for event in pg.event.get():
#print(event)
if event.type == pg.QUIT:
self.exit = True
if event.type == pg.KEYDOWN:
if event.key == pg.K_ESCAPE or event.key == pg.K_q:
self.exit = True
if event.key == pg.K_r:
self.reset = True
# update physics
action, state = self.task._step(self.network, state)
#print('action ' + str(action))
#print('state ' + str(state))
(x, dx, theta, dtheta) = state
if abs(x) > self.task.h:
self.reset = True
# redraw world
self.display.fill(self.background_color)
pos = (self.init_pos[0] + x*self.meter_pixel_ratio, self.init_pos[1])
rect = pg.draw.rect(self.display, (0,255,0), (pos[0] - self.cart_size[0]/2, pos[1], self.cart_size[0], self.cart_size[1]), 3)
l = self.task.l[0] * self.meter_pixel_ratio
end_pos = (pos[0]+l*np.sin(theta), pos[1]-l*np.cos(theta) )
pg.draw.line(self.display, (255,0,0), pos, end_pos, 3)
xb = self.task.h*self.meter_pixel_ratio
pg.draw.line(self.display, (0,0,255), (self.init_pos[0]-xb, self.init_pos[1]+self.cart_size[1]), (self.init_pos[0]+xb, self.init_pos[1]+self.cart_size[1]) )
#pg.draw.line(self.display, (255,0,255), pos, (pos[0]+100,pos[1]-100), 3)
pg.display.update()
self.clock.tick(self.fps)
if self.exit:
pg.quit()
quit()
if __name__ == '__main__':
task = PoleBalanceTask()
if len(sys.argv) > 1:
if sys.argv[1] == 'single':
task.max_steps = 2000
task.initial_state = np.array([0.0, 0.0, 1.6, 0.01])
#task.initial_state = np.array([0.0, 0.0, 1.0, 0.01])
#task.initial_state = np.array([0.0, 0.0, 0.517, 0.01])
task.h = 5.0
if sys.argv[1] == 'double':
task.name = 'DoublePoleBalanceTask'
task.mp = np.array([0.1, 0.01])
task.l = np.array([0.5, 0.05])
task.max_steps = 1000
x, dx = 0.0, 0.0
theta = np.array([0.017, 0.0]) # Long pole starts at a fixed 1 degree angle.
dtheta = np.array([0.0, 0.0])
task.initial_state = (x, dx, theta, dtheta)
if sys.argv[1] == 'tumbler':
task.name = 'TumblerPoleBalanceTask'
task.dt = 0.01
task.max_steps = 2000
x, dx, theta, dtheta = 0.0, 0.0, np.array([np.pi]), np.array([0.0])
task.initial_state = (x, dx, theta, dtheta)
if len(sys.argv) > 2:
if len(sys.argv) > 3:
filename = sys.argv[3]
else:
filename = './results/PoleBalanceTask/net-001-002.json'
if sys.argv[2] == 'simulate':
network = Network(None,filename=filename)
world = World(task, network)
sys.exit()
if sys.argv[2] == 'visualize':
network = Network(None,filename=filename)
network.genotype = Object()
fitness, solved = task.evaluate(network)
network.genotype.fitness = fitness
network.genotype.solved = solved
network.visualize('net.png')
task.visualize(network, 'sim.png')
sys.exit()
ga = GeneticAlgorithm(task)
ga.visualization_type = VisualizationType.BEST
for i in range(500):
#import cProfile
#p = cProfile.Profile()
#p.enable()
ga.epoch()
#p.disable()
#p.print_stats('tottime')
pp.pprint(ga.best_ever.__dict__)
sys.exit()
# TODO
# doble plole, serial and parallel
# draw parallel side by side
# use friction
# plot energy
# show fitness
# js eovlution tree
# disturbance, force
# norm input for tumbler into 2*pi
# compare to human
# test sets
# visualize without extra calculation
# run multiple simulation and use min score as fitness
|
import _lib
import re
import time
def StartNodeInteractive(datadir, address, port,comment = ""):
_lib.StartTest("Start node (debug) "+comment)
res = _lib.ExecuteHangNode(['startintnode','-datadir',datadir,'-port',port,'-minter',address],datadir)
_lib.FatalAssertSubstr(res,"Process started","No process start marker")
def GetWallets(datadir):
_lib.StartTest("Get node wallets")
res = _lib.ExecuteNode(['listaddresses','-datadir',datadir])
_lib.FatalAssertSubstr(res,"Wallets (addresses)","No list of wallets")
regex = ur"(1[a-zA-Z0-9]{30,100})"
addresses = re.findall(regex, res)
return addresses
def NodeState(datadir):
_lib.StartTest("Check node state")
res = _lib.ExecuteNode(['nodestate','-datadir',datadir])
_lib.FatalAssertSubstr(res,"Number of blocks","No info about blocks")
state = {}
match = re.search( r'Number of blocks - (\d+)', res)
if not match:
_lib.Fatal("Number of blocks is not found "+res)
state['blocks'] = match.group(1)
match = re.search( r'Number of unapproved transactions - (\d+)', res)
if not match:
_lib.Fatal("Numberof unapproved transactions not found "+res)
state['unapproved'] = match.group(1)
match = re.search( r'Number of unspent transactions outputs - (\d+)', res)
if not match:
_lib.Fatal("Number of unspent transactions outputs - not found "+res)
state['unspent'] = match.group(1)
state['inprogress'] = False
match = re.search( r'Loaded (\d+) of (\d+) blocks', res)
if match:
state['totalnumber'] = match.group(2)
state['inprogress'] = True
return state
def WaitBlocksInState(datadir, explen, maxtime = 10):
i = 0
while True:
state = NodeState(datadir)
if int(state['blocks']) >= explen or i >= maxtime:
break
time.sleep(1)
i = i + 1
return state |
import os
directory = os.path.join('..','logswithexp')
if not os.path.exists(d):
os.makedirs(d)
for root,dirs,files in os.walk('.'):
for f in files:
log = open(f,'r')
for line in log:
if 'SystemAnalysis-Snapshot' in line:
print os.path.basename(f)
os.system('cp os.path.basename(f) ../logswithexp')
break
|
"""
Stuff
"""
import sys
import os
import fbx
from brenpy.qt.bpQtImportUtils import QtCore
from brenpy.qt.bpQtImportUtils import QtWidgets
from brenfbx.utils import bfFbxUtils
from brenpy.qt import bpQtCore
# from brenrig.sandbox import fbx_prototype_01
from brenfbx.fbxsdk.core import bfProperty
from brenpy.qt.item import bpQtItemsModels
from brenfbx.items import bfPropertyItems
class BfFbxPropertyModel(
bpQtItemsModels.BpItemsUndoModel
# bfQtItemsModels.BfItemsModel
):
"""Stuff
TODO check FbxProperty still exists before get or set data!
"""
kSortRole = QtCore.Qt.UserRole
kFilterRole = QtCore.Qt.UserRole + 1
kFbxDataTypeRole = QtCore.Qt.UserRole + 2
kFbxPropertyRole = QtCore.Qt.UserRole + 3
kNamePathRole = QtCore.Qt.UserRole + 4
kNamePathStrRole = QtCore.Qt.UserRole + 5
kValueRole = QtCore.Qt.UserRole + 6
COLUMNS = [
"name",
"value",
"data type name",
]
def __init__(self, parent=None):
super(BfFbxPropertyModel, self).__init__(parent=parent)
self.set_rebuild_on_refresh(True)
def get_fbx_manager(self):
fbx_manager = self.item_manager().fbx_manager()
return fbx_manager
def get_fbx_object(self):
fbx_object = self.item_manager().fbx_object()
return fbx_object
def set_fbx_object(self, value):
self.beginResetModel()
res = self.item_manager().set_fbx_object(value)
self.endResetModel()
return res
def set_root_fbx_property(self, value):
self.beginResetModel()
res = self.item_manager().set_root_fbx_property(value)
self.endResetModel()
return res
def get_fbx_property(self, index):
item = self.get_item(index)
fbx_object = item.fbx_property()
return fbx_object
def get_fbx_scene(self):
fbx_object = self.get_fbx_object()
return fbx_object.GetScene()
def fbx_property_valid(self):
"""TODO check FbxProperty still exists etc
"""
return True
def _get_item_data(cls, item, column, role, deligate_mode=False, icon_manager=None):
"""Overridable method
"""
fbx_property = item.fbx_property()
if role == cls.kFbxPropertyRole:
return fbx_property
bf_environment = item.item_manager().bf_environment()
if fbx_property.IsRoot():
data = cls.get_fbx_root_property_data(fbx_property, column, role)
else:
data = cls.get_fbx_property_data(bf_environment, fbx_property, column, role, deligate_mode=deligate_mode)
return data
def get_fbx_root_property_data(self, fbx_property, column, role):
if role in [QtCore.Qt.DisplayRole, QtCore.Qt.EditRole]:
if column == 0:
return str(fbx_property.GetName())
elif role == self.kNamePathRole:
return [fbx_property.GetFbxObject().GetName()]
elif role == self.kNamePathStrRole:
return fbx_property.GetFbxObject().GetName()
return None
@classmethod
def get_fbx_property_data(cls, bf_environment, fbx_property, column, role, deligate_mode=False):
if role == cls.kNamePathRole:
return [
str(i.GetName()) for i in
fbx_property.GetFbxObject(), fbx_property
]
elif role == cls.kNamePathStrRole:
name_path = cls.get_fbx_object_data(
fbx_property, column, cls.kNamePathRole
)
return ".".join(name_path)
# TODO (find specific Fn class)
# fbx_manager = fbx_property.GetFbxObject().GetScene().GetFbxManager()
property_fn = bfProperty.BfProperty(bf_environment, fbx_property)
if not property_fn.is_valid():
return None
if role in [QtCore.Qt.DisplayRole, QtCore.Qt.EditRole]:
if column == 0:
return str(fbx_property.GetName())
elif column == 1:
# if isinstance(property_fn, bfProperty.FSEnumProperty):
# return property_fn.GetStr()
data_type_enum = fbx_property.GetPropertyDataType().GetType()
if deligate_mode:
if data_type_enum == fbx.eFbxEnum:
enum_value = fbx_property.GetEnumValue(property_fn.get_value())
if enum_value is None:
# sometimes fbx property enums don't have values for some reason
# in these cases fall back to str
return bfFbxUtils.get_property_value_as_str(
property_fn.cast_property()
)
else:
return str(enum_value)
else:
return bfFbxUtils.get_property_value_as_str(
property_fn.cast_property()
)
else:
return bfFbxUtils.get_property_value_as_str(
property_fn.cast_property()
)
elif column == 2:
data_type = fbx_property.GetPropertyDataType()
return data_type.GetName()
# return str(type(prop_fn))
# return property_fn.__class__.__name__
elif column == 3:
return property_fn.get_type_str()
else:
return None
elif role in [QtCore.Qt.DecorationRole]:
if column == 0:
pass
elif role in [cls.kValueRole]:
return property_fn.get_value()
elif role == QtCore.Qt.CheckStateRole:
if column == 1:
if fbx_property.GetPropertyDataType() == fbx.FbxBoolDT:
return QtCore.Qt.Checked if property_fn.get_value() else QtCore.Qt.Unchecked
return None
@classmethod
def _set_item_data(cls, item, column, value, role=QtCore.Qt.EditRole):
"""Overridale method"""
fbx_property = item.fbx_property()
if fbx_property.IsRoot():
# cannot set data on root property
return False
bf_environment = item.item_manager().bf_environment()
res = cls.set_fbx_property_data(bf_environment, fbx_property, column, value, role)
return res
@classmethod
def set_fbx_property_data(cls, bf_environment, fbx_property, column, value, role):
"""
** WIP refactoring **
TODO check if fbx_property is rotation order and use SetRotationOrder
check if fbx_property is preRotation/postRotation and use SetPreRotation etc.
"""
# TODO (find specific Fn class)
# fbx_manager = fbx_property.GetFbxObject().GetFbxManager()
property_fn = bfProperty.BfProperty(bf_environment, fbx_property)
if not property_fn.is_valid():
return False
# note setting property name is not supported and will return False
if column == 0:
if role == QtCore.Qt.EditRole:
res = property_fn.set_name(value)
return res
# for now limit editing data to value only (column 1)
if column == 1:
if role == QtCore.Qt.EditRole:
if False: # TODO isinstance(property_fn, bfProperty.FSEnumProperty):
# enums should be settable from enum str value
property_fn.set_value(value)
return True
else:
value = bfFbxUtils.get_property_value_from_str(
fbx_property,
value
)
property_fn.set_value(value)
return True
elif role == cls.kValueRole:
property_fn.set_value(value)
return True
elif role == QtCore.Qt.CheckStateRole:
if fbx_property.GetPropertyDataType() == fbx.FbxBoolDT:
property_fn.set_value(value == QtCore.Qt.Checked)
return True
return False
def flags(self, index):
""" hard-coded item flags """
if not index.isValid():
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
if self.get_fbx_property(index).IsRoot():
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
if index.column() == 0:
# renaming properties doesn't seem to be possible (yet)
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
# return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEditable
elif index.column() == 1:
data_type = self.get_fbx_property(index).GetPropertyDataType()
if data_type == fbx.FbxBoolDT:
return QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
# return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEditable
else:
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEditable
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
def create_property(self, fbx_data_type, name=None, parent_index=None):
"""
"""
self.beginResetModel()
if parent_index is not None:
parent_item = self.get_item(parent_index)
else:
parent_item = None
fbx_object = self.item_manager().create_fbx_property(
fbx_data_type, name=name, parent_item=parent_item
)
self.endResetModel()
return fbx_object
# def destroy_item(self, item, recursive=True):
# """
# TODO/WIP
# """
#
# print "destroying item: {}".format(item)
# res = self.item_manager().destroy_item(item, recursive=recursive)
# return res
# def removeIndices(self, indices, recursive=False):
# """
# TODO/WIP
# TODO recursive
# """
#
# # get objects to delete
# items = set(self.get_items(indices))
#
# print "DEBUG REMOVE INDICES", items
#
# self.beginResetModel()
#
# for item in items:
# self.destroy_item(item, recursive=recursive)
#
# # self.reset_model()
# self.endResetModel()
# print "done"
# def refresh(self):
# print "MODEL REFRESH"
# self.beginResetModel()
# self.item_manager().rebuild()
# self.endResetModel()
def clear(self):
self.beginResetModel()
self.item_manager().clear(clear_properties=True)
self.endResetModel()
class BFbxPropertyTypeFilterModel(bpQtCore.BpSortFilterProxyModel):
"""Filter by fbx object type
"""
def __init__(self, *args, **kwargs):
super(BFbxPropertyTypeFilterModel, self).__init__(*args, **kwargs)
self._fbx_data_types = []
def set_fbx_data_types(self, fbx_data_types):
self.beginResetModel()
self._fbx_data_types = fbx_data_types
self.endResetModel()
return True
def filterAcceptsRow(self, source_row, source_parent):
# get source model index for current row
source_index = self.sourceModel().index(
source_row, 0, source_parent
)
if source_index.isValid():
fbx_data_type = self.sourceModel().data(
source_index, BfFbxPropertyModel.kFbxDataTypeRole
)
if fbx_data_type in self._fbx_data_types:
return True
else:
return False
# parent call for default behaviour
return super(BFbxPropertyTypeFilterModel, self).filterAcceptsRow(
source_row, source_parent
)
class Test1(object):
def __init__(self, base):
super(Test1, self).__init__()
item_tree = bfPropertyItems.BfFbxPropertyTreeItemManager(base.bf_environment())
fbx_object = base._scene.FindSrcObject("object1")
item_tree.set_fbx_object(fbx_object)
item_tree.create_fbx_property(
fbx.FbxDoubleDT, "poopProperty", parent_item=item_tree.root_item().get_child(2)
)
item_tree.rebuild()
self._model = BfFbxPropertyModel()
self._model.set_item_manager(item_tree)
self._view = QtWidgets.QTreeView()
self._view.setModel(self._model)
self._view.expandAll()
self._view.show()
item_list = bfPropertyItems.BfFbxPropertyItemManager(base.bf_environment())
item_list.set_fbx_object(fbx_object)
self._list_model = BfFbxPropertyModel()
self._list_model.set_item_manager(item_list)
self._list_view = QtWidgets.QTreeView()
self._list_view.setModel(self._list_model)
self._list_view.expandAll()
self._list_view.show()
full_tree = bfPropertyItems.BfFbxPropertySceneTreeItemManager(base.bf_environment())
full_tree.set_fbx_scene(base._scene)
self._full_tree_model = BfFbxPropertyModel()
self._full_tree_model.set_item_manager(full_tree)
self._full_tree_view = QtWidgets.QTreeView()
self._full_tree_view.setModel(self._full_tree_model)
self._full_tree_view.expandAll()
self._full_tree_view.show()
if __name__ == "__main__":
DUMP_DIR = r"D:\Repos\dataDump\brenfbx"
TEST_FILE = "brenfbx_test_scene_01.fbx"
TEST_PATH = os.path.join(DUMP_DIR, TEST_FILE)
app = QtWidgets.QApplication(sys.argv)
from brenfbx.utils import bfEnvironmentUtils
base = bfEnvironmentUtils.BfTestBase(file_path=os.path.join(DUMP_DIR, TEST_FILE))
test_1 = Test1(base)
sys.exit(app.exec_())
|
#coding=utf-8
#2๏ผ how to get bigger version?
version1 = [1,22,2,6,3,1]
version2 = [1,22,2,4,5]
def cmp(s1,s2):
if s1 == s2:
return 0
elif s1 > s2:
return 1
elif s1 < s2:
return -1
def check_ver(v1,v2):
#The base line should be the smaller length List
for i in range(min(len(v1),len(v2))):
flag = cmp(v1[i],v2[i])
if flag !=0:
if flag > 0:
print("s1 is bigger")
return
else:
print("s2 is bigger")
return
if len(v1) == len(v2):
print("really same!")
else:
print("max len list %s is bigger" % (v1 if len(v1) > len(v2) else v2))
version1 = [1,22,2,6,3,1]
version2 = [1,22,2,4,5]
check_ver(version1,version2)
version1 = [1,22,2]
version2 = [1,22,2,3]
check_ver(version1,version2)
version1 = [1,22,2]
version2 = [1,22,2]
check_ver(version1,version2)
# #1 ๆญฃๅ่กจ่พพๅผ๏ผ่ฟไธๅๅค็่พๅฐ๏ผๅฟๅพๅทฎไธๅคไบ
# codeNum = '44012'
# #get 12 from str by re
# import re
# patten = re.compile(r'\d5')
# match = patten.match(codeNum)
# if match:
# print(match.group())
#
# p = re.compile(r'(\w+) (\w+)')
# s = 'i say, hello world!'
#
# match = p.match(s)
# if match:
# print(match.group())
#
# print(re.match('www', 'www.runoob.com').span()) # ๅจ่ตทๅงไฝ็ฝฎๅน้
|
import sys
import collections
sys.path.append('../')
from leetCodeUtil import ListNode
from leetCodeUtil import TreeNode
class Interview(object):
def normalizeString(self, s):
string = s.split()
return ' '.join(string)
def checkPairsIntervalOverlap(self, nums):
nums.sort(key=lambda x: x[0])
for i in range(len(nums)-1):
if nums[i][1]> nums[i+1][0]:
return True
return False
### First Subarray Sums to Target
def combination(self, nums, target):
ret = []
def dfs(nums, start, target, reslist):
if target == 0 and reslist not in ret:
ret.append(reslist)
return
for i in range (start, len(nums)):
if len(ret) > 0:
return
dfs(nums, i + 1, target - nums[i], reslist + [nums[i]])
dfs((nums), 0, target, [])
return ret[0] if len(ret) > 0 else [-1, -1]
def constructBST(self, head):
if not head: return None
slow = fast = last = head
# find out the center of the node
while fast.next and fast.next.next:
last = slow
slow = slow.next
fast = fast.next.next
# right is slow
fast = slow.next
# left part is head
last.next = None
cur = TreeNode(slow.val)
if head != slow:
cur.left = self.constructBST(head)
cur.right = self.constructBST(fast)
return cur
def isHappyNumber(self, num):
table, res = [], num
while not (res in table):
if res == 1:
return True
table.append(res)
input = res
res = 0
while input >0:
res += (input%10) ** 2
input /= 10
return False
def longestConsecutive(self, root):
def dfs(node, value, out, maxLen):
if not node: return
if value == node.value:
out += 1
else:
out = 1
maxLen[0] = max(out, maxLen[0])
dfs(node.left, node.value, out, maxLen)
dfs(node.right, node.value, out, maxLen)
maxLen = [0]
dfs(root, root.value, 1, maxLen)
return maxLen[0]
def findModeInBST(self, root):
from collections import Counter
def inorder(node, c):
if not node: return
inorder(node.left, c)
c[node.value] += 1
inorder(node.right, c)
c = Counter()
inorder(root, c)
maxnum = max(c.values())
res = []
for key, value in c.iteritems():
if value == maxnum:
res.append(key)
return res
def findLIS(self, nums):
length = len(nums)
dp = [1 for _ in range(length+1)]
for i in range(length):
for j in range(0, i):
if nums[j] < nums[i]:
dp[i] = max(dp[j]+1, dp[i])
return max(dp)
def fib(self, n):
dp = [0 for i in range (n+1)]
dp[0], dp[1] = 1, 1
for i in range (2, n+1):
dp[i] = dp[i-1] + dp[i-2]
return dp[n]
def fibRecur(self, n):
if n == 0 or n == 1: return 1
return self.fibRecur(n-1) + self.fibRecur(n-2)
def firstRepeatChar(self, s):
charset = set()
for i in range(len(s)):
if s[i] in charset:
return s[i]
else:
charset.add(s[i])
return None
def firstNonRepeatChar(self, s):
from collections import OrderedDict
charset = OrderedDict()
for i in range(len(s)):
if s[i] not in charset:
charset[s[i]] = 1
else:
charset[s[i]] += 1
for key, value in charset.items():
if value == 1:
return key
return None
def longestPalindromSubseq(self, s):
if len(s) <= 1: return len(s)
n = len(s)
dp = [[0 for _ in range(n)] for _ in range(n)]
for i in range(n): dp[i][i] = 1
for size in range(2,n+1):
for i in range(n-size+1):
j = i + size - 1
if s[i] == s[j]:
dp[i][j] = dp[i+1][j-1] + 2
else:
dp[i][j] = max(dp[i][j-1], dp[i+1][j])
return dp[0][n-1]
if __name__ == '__main__':
sol = Interview()
assert sol.normalizeString(' hello world ') == 'hello world'
nums = [[1,3],[7,9],[2,6]]
assert sol.checkPairsIntervalOverlap(nums) == True
nums = [[1,3],[7,9],[10,16]]
assert sol.checkPairsIntervalOverlap(nums) == False
nums=[4, 3, 5, 7, 8]
exp = sol.combination(nums, 12)
assert exp == [4, 3, 5]
nums=[1, 2, 3, 4]
exp = sol.combination(nums, 15)
assert exp == [-1,-1]
node1 = ListNode(1)
node2 = ListNode(2)
node3 = ListNode(3)
node4 = ListNode(4)
node5 = ListNode(5)
node1.next = node2
node2.next = node3
node3.next = node4
node4.next = node5
result = sol.constructBST(node1)
res = result.getDFS(result)
exp1 = [1, 2, 3, 4, 5]
assert res == exp1
assert sol.isHappyNumber(100) == True
assert sol.isHappyNumber(22) == False
sample2 = TreeNode(2)
sample3 = TreeNode(3)
sample4 = TreeNode(2)
sample5 = TreeNode(1)
sample2.right = sample3
sample3.left = sample4
sample4.left = sample5
assert sol.longestConsecutive(sample2) == 2
root = TreeNode(1)
node1 = TreeNode(2)
node2 = TreeNode(2)
root.right = node1
node1.left = node2
assert sol.findModeInBST(root) == [2]
nums = [10, 9, 2, 5, 3, 7, 101, 18]
assert sol.findLIS(nums) == 4
assert sol.fib(5) == sol.fibRecur(5)
assert sol.firstRepeatChar('abca') == 'a'
assert sol.firstRepeatChar('bcaba') == 'b'
assert sol.firstRepeatChar('abc') == None
assert sol.firstRepeatChar('') == None
assert sol.firstNonRepeatChar('ababcad') == 'c'
assert sol.firstNonRepeatChar('abcdefg') == 'a'
assert sol.firstNonRepeatChar('abab') == None
assert sol.firstNonRepeatChar('') == None
assert sol.longestPalindromSubseq('bbbab') == 4
|
from django.shortcuts import render_to_response
from chaos import settings
from core import SWARM
import datetime
import os
def render_chaos(request):
def decode_dt_param(str_dt):
date, time = str_dt.split('T')
dt = datetime.datetime.strptime(
'%s %s' %
(date, time), '%Y-%m-%d %H:%M:%S')
return dt
original_umask = os.umask(0)
param_name = [
'swchar',
'mod',
'timeFrom',
'timeTo',
'delta',
]
param_dict = {}
for p in param_name:
param_dict[p] = request.GET[p]
dt_from, dt_to = decode_dt_param(
param_dict['timeFrom']), decode_dt_param(
param_dict['timeTo'])
swarm = SWARM(char=param_dict['swchar'], dt_from=dt_from, dt_to=dt_to, delta=int(param_dict['delta']))
print('data prepared...')
led = swarm.plot_map()
os.umask(original_umask)
return render_to_response('image_view.html', {'MEDIA_URL': settings.IMAGES_URL, 'IMAGE_NAME': led})
def chaos_form(request):
return render_to_response('dataserv-chaos-eng.html')
|
# Generated by Django 2.0.7 on 2019-01-05 18:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('basedata', '0029_auto_20190104_1627'),
]
operations = [
migrations.AlterField(
model_name='device',
name='workflow_node',
field=models.IntegerField(blank=True, choices=[(0, 'ๅๅ็ญพ็บฆไบบ'), (1, 'ๅๅก็ป็'), (2, '่ดขๅก็ป็'), (3, '่ฅ้็ป็'), (4, 'ๆป็ป็'), (5, 'ๅทฅ็จ็ป็'), (6, '้กน็ฎ็ป็'), (7, 'ๅทฅ็จ็ป็'), (8, 'ๆๆฏ็ป็'), (9, 'ๆป็ป็')], default=0, null=True, verbose_name='ๅทฅไฝๆต่็น'),
),
migrations.AlterField(
model_name='device_form',
name='file',
field=models.FileField(blank=True, null=True, upload_to='file/', verbose_name='ๅฏผๅ
ฅEXCELๆไปถ'),
),
]
|
import os
import sys
main_dir = os.path.split(os.getcwd())[0]
result_dir = main_dir + '/results'
sys.path.append(main_dir)
from data import fmri_data_cv as fmril
from data import fmri_data_cv_rh as fmrir
from data import meg_data_cv as meg
import scipy.io
main_dir = os.path.split(os.getcwd())[0]
scipy.io.savemat(main_dir + '/fmri_left.mat',
dict(x=fmril.x_data,y=fmril.y_target,subjects=fmril.subjects))
scipy.io.savemat(main_dir + '/fmri_right.mat',
dict(x=fmrir.x_data,y=fmrir.y_target,subjects=fmrir.subjects))
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
if (sys.version_info > (3,)):
import http.client
from http.client import BAD_REQUEST, CONFLICT, NOT_FOUND, OK
else:
import httplib
from httplib import BAD_REQUEST, CONFLICT, NOT_FOUND, OK
from flask import session, request, make_response
from flask_restful import Resource
from cairis.daemon.CairisHTTPError import MalformedJSONHTTPError, ARMHTTPError, ObjectNotFoundHTTPError
from cairis.data.RiskDAO import RiskDAO
from cairis.tools.JsonConverter import json_serialize
from cairis.tools.MessageDefinitions import RiskMessage
from cairis.tools.PseudoClasses import RiskScore
from cairis.tools.SessionValidator import get_session_id, get_model_generator
__author__ = 'Robin Quetin, Shamal Faily'
class RisksAPI(Resource):
def get(self):
session_id = get_session_id(session, request)
constraint_id = request.args.get('constraint_id', -1)
dao = RiskDAO(session_id)
risks = dao.get_risks(constraint_id)
resp = make_response(json_serialize(risks, session_id=session_id), OK)
resp.contenttype = 'application/json'
return resp
def post(self):
session_id = get_session_id(session, request)
dao = RiskDAO(session_id)
risk = dao.from_json(request)
dao.add_risk(risk)
resp_dict = {'message': risk.name() + ' created'}
resp = make_response(json_serialize(resp_dict), OK)
resp.contenttype = 'application/json'
return resp
class RiskByNameAPI(Resource):
def get(self, name):
session_id = get_session_id(session, request)
dao = RiskDAO(session_id)
found_risk = dao.get_risk_by_name(name)
dao.close()
resp = make_response(json_serialize(found_risk, session_id=session_id), OK)
resp.headers['Content-type'] = 'application/json'
return resp
def put(self, name):
session_id = get_session_id(session, request)
dao = RiskDAO(session_id)
new_risk = dao.from_json(request)
dao.update_risk(name, new_risk)
dao.close()
resp_dict = {'message': new_risk.name() + ' updated'}
resp = make_response(json_serialize(resp_dict), OK)
resp.headers['Content-type'] = 'application/json'
return resp
def delete(self, name):
session_id = get_session_id(session, request)
dao = RiskDAO(session_id)
dao.delete_risk(name)
dao.close()
resp_dict = {'message': name + ' deleted'}
resp = make_response(json_serialize(resp_dict), OK)
resp.headers['Content-type'] = 'application/json'
return resp
class RiskAnalysisModelAPI(Resource):
def get(self, environment):
session_id = get_session_id(session, request)
model_generator = get_model_generator()
dim_name = request.args.get('dimension_name', '')
obj_name = request.args.get('object_name', '')
isTagged = request.args.get('tagged', '0')
orientation = request.args.get('orientation','Vertical')
if (isTagged == '1'):
isTagged = True
else:
isTagged = False
model_layout = request.args.get('layout','Hierarchical')
if dim_name == 'all': dim_name = ''
if obj_name == 'all': obj_name = ''
if model_layout == 'Hierarchical':
renderer = 'dot'
elif model_layout == 'Spring':
renderer = 'fdp'
elif model_layout == 'Radial':
renderer = 'twopi'
else:
renderer = 'circo'
rankDir = 'TB'
if (orientation == 'Horizontal'):
rankDir = 'LR'
dao = RiskDAO(session_id)
dot_code = dao.get_risk_analysis_model(environment, dim_name, obj_name, renderer, isTagged, rankDir)
dao.close()
resp = make_response(model_generator.generate(dot_code, model_type='risk', renderer=renderer), OK)
accept_header = request.headers.get('Accept', 'image/svg+xml')
if accept_header.find('text/plain') > -1:
resp.headers['Content-type'] = 'text/plain'
else:
resp.headers['Content-type'] = 'image/svg+xml'
return resp
class RisksScoreByNameAPI(Resource):
def get(self, name, threat, vulnerability, environment):
session_id = get_session_id(session, request)
dao = RiskDAO(session_id)
risk_scores = dao.get_scores_by_rtve(name, threat, vulnerability, environment)
resp = make_response(json_serialize(risk_scores, session_id=session_id), OK)
resp.contenttype = 'application/json'
return resp
class RisksRatingByNameAPI(Resource):
def get(self, threat, vulnerability, environment):
session_id = get_session_id(session, request)
dao = RiskDAO(session_id)
risk_rating = dao.get_risk_rating_by_tve(threat, vulnerability, environment)
resp = make_response(json_serialize(risk_rating, session_id=session_id), OK)
resp.contenttype = 'application/json'
return resp
class RiskAnalysisModelNamesAPI(Resource):
def get(self, environment):
session_id = get_session_id(session, request)
dao = RiskDAO(session_id)
element_names = dao.risk_model_elements(environment)
resp = make_response(json_serialize(element_names, session_id=session_id), OK)
resp.contenttype = 'application/json'
return resp
class RisksSummaryAPI(Resource):
def get(self):
session_id = get_session_id(session, request)
dao = RiskDAO(session_id)
objts = dao.get_risks_summary()
dao.close()
resp = make_response(json_serialize(objts, session_id=session_id))
resp.headers['Content-Type'] = "application/json"
return resp
|
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def findTilt(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if root == None:
return 0
self.tilt = 0
def calc(root):
if root.left != None:
lsum = calc(root.left)
else:
lsum = 0
if root.right != None:
rsum = calc(root.right)
else:
rsum = 0
self.tilt += abs(lsum-rsum)
return lsum+rsum+root.val
calc(root)
return self.tilt
a = TreeNode(1)
b = TreeNode(2)
c = TreeNode(3)
d = TreeNode(4)
a.left = b
a.right = c
b.left = d
print(Solution().findTilt(a)) |
from PIL import Image
def numLivingNeighbours(board, i, j, xDim, yDim):
count = 0
if i>0:
if j > 0:
count += board[i-1][j-1]
count+=board[i-1][j]
if j<xDim-1:
count+=board[i-1][j+1]
if j>0:
count += board[i][j-1]
if j<xDim-1:
count+=board[i][j+1]
if i<yDim-1:
if j>0:
count += board[i+1][j-1]
count+=board[i+1][j]
if j<xDim-1:
count+=board[i+1][j+1]
return count
def evaluate(board, visualise=False):
if visualise:
frames = []
count = 0
boardHistory = []
boardHash = hash(str(board))
xDim = len(board)
yDim = len(board[0])
while(sum(map(sum, board))>0 and boardHash not in boardHistory):
if(visualise):
im = Image.new('RGB',(len(board),len(board[0])))
im.putdata(list(sum(map(lambda x: list(map(lambda y: (int(y)*255,int(y)*255,int(y)*255),x)),board),[])))
frames.append(im)
boardHistory.append(boardHash)
if len(boardHistory)>150:
boardHistory = boardHistory[:-100]
newBoard = []
for i, row in enumerate(board):
newRow = []
for j in range(len(row)):
numLiving = numLivingNeighbours(board, i, j, xDim, yDim)
if board[i][j]:
if numLiving<2:
newRow.append(False)
elif numLiving>3:
newRow.append(False)
else:
newRow.append(True)
else:
if numLiving == 3:
newRow.append(True)
else:
newRow.append(False)
newBoard.append(newRow)
board = newBoard
boardHash = hash(str(board))
count+=1
if visualise:
return count, frames
return count
|
lista1 = ["abacate", "melancia", "abacaxi"]
lista2 = [1, 2, 3, 4, 5]
lista3 = ["abacaxi", 1, 9.98, True]
# tamanho do vetor
tamanho = len(lista2)
print(tamanho, "\n")
# mรฉtodo append() -> adicionar itens
lista1.append("limao")
print(lista1, "\n")
#verificar se existe determinado item a lista
# uso a palavra reservada "in"
if 3 in lista2:
print("3 estรก na lista", "\n")
#remover itens da lista
# uso a palavra reservada "del"
del lista1[1:3]
print(lista1, "\n")
""" ======================================= """
"""vamos ordenar uma nova lista"""
vetor = [124,345,5,72,46,6,7,3,1,7,0]
vetor2 = [124,345,5,72,46,6,7,3,1,7,0]
#duas formas para se ordenar uma lista
#mรฉtodo sort() -> altera uma lista sem precisar armazenar em uma variรกvel
print("lista original ->", vetor)
vetor.sort()
print("mรฉtodo sort()->", vetor)
#funรงรฃo sorted() ordena a lista mas precisa ser armazenada em uma variรกvel
print("lista original 2 ->", vetor2)
ordenada = sorted(vetor2)
print("funรงรฃo 2 sorted()->", ordenada, "\n")
#posso ainda usar outros modos
'''decrescente'''
print("vetor base ->", vetor)
vetor.sort(reverse=True)
print("mรฉtodo sort(reverse=True) ->", vetor, "\n")
'''inverter os valores'''
print("vetor base ->", vetor2)
vetor2.reverse()
print("mรฉtodo reverse() ->", vetor2, "\n")
''' ordem alfabรฉtica '''
lista = ["abacate", "melancia", "abacaxi", "beijo", "zebra", "tentรกculo"]
print("lista base ->", lista)
lista.sort()
print("mรฉtodo sort() ->", lista)
lista.sort(reverse=True)
print("mรฉtodo sort(reverse=True) ->", lista) |
from aiogram import Bot, Dispatcher
from aiogram.contrib.fsm_storage.memory import MemoryStorage
from .configs import bot as config
from .configs import messages
from . import routes
if not config.API_TOKEN:
msg = messages.SPECIFY_TOKEN_TEMPLATE.format(
config.API_TOKEN_ENV,
)
raise RuntimeError(msg)
bot = Bot(token=config.API_TOKEN, proxy=config.PROXY_URL)
storage = MemoryStorage()
dispatcher = Dispatcher(bot, storage=storage)
messengers = {}
routes.apply_routes(dispatcher)
|
# Generated by Django 3.2.5 on 2021-07-12 13:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0005_post_is_featured'),
]
operations = [
migrations.AlterField(
model_name='post',
name='is_featured',
field=models.BooleanField(default=False),
),
]
|
# Load pretrained weights
pretrained_dict = torch.load(pretrained_path)
# Get model state dicts
model_dict = model.state_dict()
# Filter out unnecessary keys
pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict}
# Overwrite entries in the existing state dict
model_dict.update(pretrained_dict)
# Load the new state dict
model.load_state_dict(model_dict)
|
# -*- coding: utf-8 -*-
import logging
from datetime import datetime
import json
import os
import random
import numpy as np
import torch
from torch.utils.data import (DataLoader, SequentialSampler,TensorDataset)
from torch.utils.data.distributed import DistributedSampler
from tqdm import tqdm
from pytorch_transformers import (WEIGHTS_NAME, BertConfig, BertForSequenceClassification, BertTokenizer)
from modules.request_es import request_from_db
from modules.request_es import gen_sentence_for_classifier
from modules.qsc import qsc
from modules.qsc import load_model
from modules.qsc import predict_ontime
from modules.functions import build_keywords_list
from modules.functions import replace_special_characters
# from modules.functions import build_keywords_list_spacy
logger = logging.getLogger(__name__)
# dรฉclare les paramรจtres de l'IR dans un dict
arg_dict = {}
arg_dict['stopwords_csv'] = '../stopwords.csv'
arg_dict['model_type'] = 'bert'
arg_dict['model_name_or_path'] = 'bert-base-multilingual-uncased'
arg_dict['task_name'] = 'QSC'
arg_dict['max_seq_length'] = 512
arg_dict['model_path'] = 'target_link'
arg_dict['per_gpu_eval_batch_size'] = 32
arg_dict['BM25_threshold'] = 0
arg_dict['BM25_nb_max_result'] = 5
arg_dict['tolerance'] = 0.01
arg_dict['language'] = 'french'
arg_dict['user_config_json'] = 'user_config.json'
arg_dict['device'] = torch.device("cuda" if torch.cuda.is_available() else "cpu")
arg_dict['n_gpu'] = torch.cuda.device_count()
arg_dict['db_env'] = 'discovery_dev_clean'
# Setup logging
logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt = '%m/%d/%Y %H:%M:%S',
level = logging.INFO)
logger.warning("Process rank: %s, device: %s, distributed training: %s, 16-bits training: %s",
-1, arg_dict['device'], bool(-1 != -1), '')
# (my_model, my_tokenizer) = load_model(arg_dict)
#run script until end example
query = ''
while(query!='exit'):
query = input("Type something to test this out: ")
print("main:: Ma requete: ", query)
# passe la requรชte complรจte au filtre des stopwords
keywords_list = build_keywords_list(arg_dict['stopwords_csv'], arg_dict['language'], query)
print('main:: keywords_list {}'.format(keywords_list))
# requรชte l'IR et rรฉcupรจre les outputs en base de la requรชte filtrรฉe
my_es_dict = request_from_db(keywords_list.strip(), arg_dict['db_env'])
# df = gen_sentence_for_classifier(my_es_dict, query)
# print(df['seed_url'])
# if(my_es_dict['name']):
# my_predict = predict_ontime(arg_dict, my_model, my_tokenizer, gen_sentence_for_classifier(my_es_dict, query), prefix="")
# for i in range(len(my_predict['pred_explicit_val'])):
# print('main:: my_predict[pred_explicit_val][{}]: {} / my_predict[seed_value][{}]: {}\n'.format(i, my_predict['pred_explicit_val'][i], i, my_predict['seed_value'][i]))
for i in range(len(my_es_dict['seed_url'])):
print('main:: my_es_dict[name][{}]: {} / my_es_dict[seed_url][{}]: {}\n'.format(i, my_es_dict['name'][i], i, my_es_dict['seed_url'][i]))
|
def sqroot(num):
if(type(num)==int):
print 'The square root of the number is ',num**0.5
else:
print 'Please Enter integers'
def addition(num1,num2):
if(type(num1)==int and (type(num2)==int)):
print 'The sum of the numbers is :',num1+num2
else:
print 'Please Enter integers'
def subtraction(num1,num2):
if(type(num1)==int and (type(num2)==int)):
if(num1>=num2):
print 'The difference of the numbers is :',num1-num2
if(num1<num2):
print 'The difference of the numbers is negative:',num1-num2
else:
print 'Please Enter integers'
def multiplication(num1,num2):
if(type(num1)==int and (type(num2)==int)):
print 'The multiplication of the numbers is :',num1*num2
else:
print 'Please Enter integers'
def division(num1,num2):
if(type(num1)==int and (type(num2)==int)):
if(num2 == 0):
print 'Second number is zero, Please Enter a different number to avoid ZeroDivisionError'
else:
print 'The division of the numbers is :',(num1/float(num2))
else:
print 'Please Enter integers'
|
"""
Algoritmo para realizar la suma aritmetica del 1 al n
"""
"""
Aqui puedes cambiar el valor de n
"""
n = 1000000000
suma = 00
suma2 = 00
"""
Algoritmo 1. Se utiliza la formula (n * (n + 1))/2 para realizar la suma
"""
suma = (n * (n + 1)) / 2
print 'La suma es', suma
"""
Algoritmo 2. Se utiliza un ciclo for para realizar la suma
"""
for x in range(1, n + 1):
suma2 += x
print 'La suma es', suma2
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-11-30 06:34
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('web', '0004_remove_deploytask_deploy_servers'),
]
operations = [
migrations.AlterField(
model_name='deployserver',
name='deploy',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.DeployTask', verbose_name='ๅๅธไปปๅก'),
),
migrations.AlterField(
model_name='deployserver',
name='status',
field=models.PositiveSmallIntegerField(choices=[(1, 'ๅพ
ๅๅธ'), (2, 'ๅๅธไธญ'), (3, 'ๅคฑ่ดฅ'), (4, 'ๆๅ')], verbose_name='็ถๆ'),
),
]
|
import os
import numpy as np
from sklearn.ensemble import BaggingClassifier
from sklearn.model_selection import GridSearchCV, StratifiedKFold, train_test_split
from sklearn.neighbors import KNeighborsClassifier
from machine_learning.aux import directories
from machine_learning.aux.persist import save_model
from machine_learning.metrics import model_stats
def learn_bagging(stratified_data_csv_file, save_filepath):
# read the stratified dataset
data = np.genfromtxt(stratified_data_csv_file, delimiter = ',', skip_header = 1)
X, y = data[:, :-1], data[:, -1]
# do a 70-30 train-test split.
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.30, random_state = 10)
############
# testing parameters
params = {
'base_estimator': [None, KNeighborsClassifier()],
'n_estimators': [10, 20, 30],
'max_samples': [0.50, 0.75],
'max_features': [0.50, 0.75]
}
stratified_k_fold = StratifiedKFold(n_splits = 10)
classifier = GridSearchCV(BaggingClassifier(), params, cv = stratified_k_fold, verbose = 5, n_jobs = 3)
classifier.fit(X_train, y_train)
best_classifier = classifier.best_estimator_
y_pred = best_classifier.predict(X_test)
print('Bagging Classifier Statistics')
print('Best params: {}'.format(classifier.best_params_))
model_stats.compute_basic_stats(y_test, y_pred)
model_stats.compute_roc_score(y_test, y_pred)
model_stats.plot_normalized_confusion_matrix(
y_test, y_pred, 'Bagging Classifier Normalized Confusion Matrix'
)
# fit the classifier on the complete dataset once we get best parameters
best_classifier = BaggingClassifier(**classifier.best_params_)
best_classifier.fit(X, y)
# save the model
save_model(best_classifier, save_filepath)
if __name__ == '__main__':
# stage 1
# learn_bagging(
# '/Users/gursimran/Workspace/active-scanning-cause-analysis/codebase/machine_learning/data/classifier_stage_1/training_dataset.csv',
# os.path.join(directories.stage_1_saved_models, 'bagging.pkl')
# )
# stage 2
learn_bagging(
'/Users/gursimran/Workspace/active-scanning-cause-analysis/codebase/machine_learning/data/classifier_stage_2/stratified_training_dataset.csv',
os.path.join(directories.stage_2_saved_models, 'bagging.pkl')
)
|
import os
from flask import render_template, url_for, flash,redirect, request
from flaskblog2 import app, db, bcrypt
from flaskblog2.forms import RegistrationForm, LoginForm,UpdateAccountForm,PostForm
from flaskblog2.models import User, Post
from flask_login import login_user, logout_user, current_user, login_required
@app.route('/')
@app.route('/home')
def home():
posts = Post.query.all()
return render_template('home.html', posts=posts, title='Home page',home_title='Welcome to home tours', content='Welcome to our site get the experience that you have never had')
@app.route('/about')
def about():
return render_template('about.html', title='About page',home_title='Welcome to About page', content='Welcome to our learn about us')
@app.route('/register', methods=["GET", "POST"])
def register():
if current_user.is_authenticated:
return redirect(url_for('home'))
form = RegistrationForm()
if form.validate_on_submit():
pw_hash = bcrypt.generate_password_hash(form.password.data).decode("utf-8")
user = User(username=form.username.data, email=form.email.data, password=pw_hash)
db.session.add(user)
db.session.commit()
flash("Account created suceessful.You can now login",'success')
return redirect(url_for('login'))
return render_template('register.html', title='Registration page', form=form,home_title='Welcome !!Register here', content='Create an account and have an exclusive experience')
@app.route('/login', methods=["GET", "POST"])
def login():
if current_user.is_authenticated:
return redirect(url_for('home'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user and bcrypt.check_password_hash(user.password, form.password.data):
login_user(user, remember=form.remember.data)
flash('You have been logged in', 'success')
return redirect(url_for('home'))
else:
flash('login not successful. PLease check username and password', 'danger')
return redirect(url_for('login'))
return render_template('login.html', title='Login page', form=form,home_title='Welcome !!Login here', content='Get to learn many of the packages that we offer')
@app.route('/logout')
def logout():
logout_user()
flash('Logout successful ', 'success')
return redirect(url_for('home'))
def save_picture(form_picture):
_,f_ext = os.path.splitext(form_picture.filename)
pic_name = _ + f_ext
print(pic_name)
picture_path = os.path.join(app.root_path + "/static/images/profile_imgs", pic_name)
form_picture.save(picture_path)
return pic_name
@app.route('/account', methods=['GET','POST'])
def account():
form = UpdateAccountForm()
if form.validate_on_submit():
if form.picture.data:
picture_file = save_picture(form.picture.data)
current_user.image_file = picture_file
current_user.username = form.username.data
current_user.email = form.email.data
db.session.commit()
flash("Update account successful", 'success')
return redirect(url_for('account'))
form.username.data = current_user.username
form.email.data = current_user.email
image_file = url_for('static', filename='images/profile_imgs/'+ current_user.image_file)
print(current_user.image_file)
return render_template('account.html', title='Account', image_file=image_file, form=form,home_title='As the admin what do have to offer', content='Make a good profile and market well')
def save_post_img(form_picture):
_,f_ext = os.path.splitext(form_picture.filename)
pic_name = _ + f_ext
picture_path = os.path.join(app.root_path + "/static/images/post_imgs", pic_name)
form_picture.save(picture_path)
return pic_name
@app.route('/post/new', methods=['GET','POST'])
@login_required
def new_post():
form = PostForm()
if form.validate_on_submit():
if form.post_image.data:
picture = form.post_image.data
_,f_ext = os.path.splitext(picture.filename)
pic_name = _ + f_ext
picture_path = os.path.join(app.root_path + "/static/images/post_imgs", pic_name)
picture.save(picture_path)
path = url_for('static', filename='images/post_imgs/' + pic_name )
post = Post(title = form.title.data,content=form.content.data,post_image = path, author=current_user)
db.session.add(post)
db.session.commit()
flash('Addition successful', 'success')
return redirect(url_for('home'))
else:
post = Post(title = form.title.data,content=form.content.data, author=current_user)
db.session.add(post)
db.session.commit()
flash('Addition successful', 'success')
return redirect(url_for('home'))
image_file = url_for('static', filename='images/profile_imgs/'+ current_user.image_file)
return render_template('create_post.html', form=form, title="New post",home_title='Add content', content='Advertise the latest packages')
@app.route('/post/detail/<int:post_id>', methods=['GET','POST'])
def post_detail(post_id):
post = Post.query.filter_by(id=post_id).first()
form = PostForm()
if form.validate_on_submit():
if form.post_image.data:
picture_file = form.post_image.data
_, f_ext = os.path.splitext(picture_file.filename)
pic_name = _+f_ext
picture_path = os.path.join(app.root_path + "/static/images/post_imgs", pic_name)
picture_file.save(picture_path)
path = url_for('static', filename='images/post_imgs/' + pic_name )
post.post_image = path
print(path)
post.title = form.title.data
post.content = form.content.data
db.session.add(post)
db.session.commit()
flash('Update successful', 'success')
return redirect(url_for('post_detail', post_id=post.id))
else:
form.title.data = post.title
form.content.data = post.content
image_file = post.post_image
_,f_ext = os.path.splitext(image_file)
pic_path= _ +f_ext
return render_template('post_detail.html',post=post, title="Detail page",form=form,home_title=post.title, content='Get to learn many of the packages that we offer')
@app.route('/post/delete/<int:post_id>')
def delete_post(post_id):
post = Post.query.filter_by(id=post_id).first()
db.session.delete(post)
db.session.commit()
flash("Delete suceessful", 'success')
return redirect(url_for('home'))
|
from django.shortcuts import render
from.serializers import TaskSerializers,UserSerializers
from.models import Task
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated,AllowAny
from django.contrib.auth import get_user_model
from rest_framework.generics import CreateAPIView
from rest_framework import generics
# Create your views here.
class TaskViewset(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = Task.objects.all().order_by('-date_created')
serializer_class = TaskSerializers
class DueTaskViewset(viewsets.ModelViewSet):
queryset = Task.objects.all().order_by('-date_created').filter(completed=False)
serializer_class = TaskSerializers
class CreateuserView(CreateAPIView):
model = get_user_model()
permission_classes = (AllowAny,)
serializer_class = UserSerializers
class CompletedTaskViewset(viewsets.ModelViewSet):
queryset = Task.objects.all().order_by('-date_created').filter(completed=True)
serializer_class = TaskSerializers
class update(generics.RetrieveUpdateDestroyAPIView):
queryset = Task
serializer_class = TaskSerializers |
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
w=[]
n=int(input())
for i in range(1,10):
for j in range(1,5):
w.append(str(i)*j)
for i in range(n):
s=input()
cnt=0
for k in w:
cnt+=len(k)
if s==k:
print(cnt)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Collections',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=200)),
('description', models.TextField(default=b'')),
('cover', models.ImageField(upload_to=b'covers/%Y_%m_%d')),
],
),
migrations.CreateModel(
name='Goods',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=200)),
('price', models.IntegerField(default=0)),
('collection', models.ForeignKey(to='shop.Collections')),
],
),
migrations.CreateModel(
name='Images',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('alt', models.CharField(max_length=200)),
('src', models.ImageField(upload_to=b'goods/%Y_%m_%d')),
('good', models.ForeignKey(to='shop.Goods')),
],
),
]
|
#the application wrapper
# connect data grid with drawing context here...
import sys
import os
import time
import datetime
from progress.bar import Bar
from db import ConfUtil
from GraphContext import GraphCtx # plotting
from CsvContext import CsvContext #no ui mode
class Application:
"""
Bring all blocks toghether.
1. Load .json
2. use arguments to select mode
3. prepare csv builder
4. begin loading data to DataGrid
5. start plot or export needed data
"""
def __init__(self, jsonfile):
ConfUtil.load(jsonfile)
self.headers = []
self.graphs = []
self.log_folder = None
ConfUtil.dumpall()
def Run(self, uimode=True):
"""app runner"""
import Utils
self.log_folder = ConfUtil.path()
self.files = Utils.Utils.get_filesr(ConfUtil.path()) #array of files
os.chdir(self.log_folder)
bar = Bar("Processing:", max=int(len(self.files) * 2))
a = datetime.datetime.now()
if uimode == True:
GraphCtx.SIZE = len(self.files)
GraphCtx.load()
for l in self.files:
bar.next()
self.graphs.append(GraphCtx(l))
for graph in self.graphs:
bar.next()
graph.plot_parsers()
GraphCtx.plot_all()
else: #no ui mode
for l in self.files:
bar.next()
self.graphs.append(CsvContext(l))
for gp in self.graphs:
bar.next()
gp.gen_csv()
bar.finish()
b = datetime.datetime.now()
print ("\r\nTime elapsed : {}".format(b-a))
#application entry point - not unit test !
if __name__ == "__main__":
print (os.name)
jsonfile = None
if len(sys.argv) >= 2:
jsonfile = sys.argv[len(sys.argv)-1]
else:
jsonfile = "db_ver1.json"
app = Application(jsonfile)
app.Run(ConfUtil.ORMMixer.UiMode)
|
import sys
input_file = 'Day 1\\Input.csv'
text_file = open(input_file)
lines = text_file.read().split(', ')
# lines = "R2, L3" # Answer = 5
# lines = "R2, R2, R2" # Answer = 2
# lines = "R5, L5, R5, R3" # Answer = 12
# lines = "R8, R4, R4, R8"
direction = []
blocks = []
# for line in lines.split(', '): # For test cases
for line in lines: # For actual input
direction.append(line[0])
blocks.append(int(line[1:]))
def move_left(cardinal, x, y, blocks):
if cardinal == "N":
x -= blocks
cardinal = "W"
elif cardinal == "S":
x += blocks
cardinal = "E"
elif cardinal == "E":
y -= blocks
cardinal = "N"
elif cardinal == "W":
y += blocks
cardinal = "S"
return x, y, cardinal
def move_right(cardinal, x, y, blocks):
if cardinal == "N":
x += blocks
cardinal = "E"
elif cardinal == "S":
x -= blocks
cardinal = "W"
elif cardinal == "E":
y += blocks
cardinal = "S"
elif cardinal == "W":
y -= blocks
cardinal = "N"
return x, y, cardinal
def move_left_p2(cardinal, x, y, blocks):
if cardinal == "N":
for i in range(x - 1, x - blocks - 1, -1):
if (i, y) in visited:
print("Part 2: " + str(abs(i) + abs(y)))
sys.exit()
else:
visited[i, y] = abs(i) + abs(y)
x -= blocks
cardinal = "W"
elif cardinal == "S":
for i in range(x + 1, x + blocks + 1):
if (i, y) in visited:
print("Part 2: " + str(abs(i) + abs(y)))
sys.exit()
else:
visited[i, y] = abs(i) + abs(y)
x += blocks
cardinal = "E"
elif cardinal == "E":
for i in range(y - 1, y - blocks - 1, -1):
if (x, i) in visited:
print("Part 2: " + str(abs(x) + abs(i)))
sys.exit()
else:
visited[x, i] = abs(x) + abs(i)
y -= blocks
cardinal = "N"
elif cardinal == "W":
for i in range(y + 1, y + blocks + 1):
if (x, i) in visited:
print("Part 2: " + str(abs(x) + abs(i)))
sys.exit()
else:
visited[x, i] = abs(x) + abs(i)
y += blocks
cardinal = "S"
return x, y, cardinal, visited
def move_right_p2(cardinal, x, y, blocks):
if cardinal == "N":
for i in range(x + 1, x + blocks + 1):
if (i, y) in visited:
print("Part 2: " + str(abs(i) + abs(y)))
sys.exit()
else:
visited[i, y] = abs(i) + abs(y)
# print(i, y)
x += blocks
cardinal = "E"
elif cardinal == "S":
for i in range(x - 1, x - blocks - 1, -1):
if (i, y) in visited:
print("Part 2: " + str(abs(i) + abs(y)))
sys.exit()
else:
visited[i, y] = abs(i) + abs(y)
# print(i, y)
x -= blocks
cardinal = "W"
elif cardinal == "E":
for i in range(y + 1, y + blocks + 1):
if (x, i) in visited:
print("Part 2: " + str(abs(x) + abs(i)))
sys.exit()
else:
visited[x, i] = abs(x) + abs(i)
# print(x, i)
y += blocks
cardinal = "S"
elif cardinal == "W":
for i in range(y - 1, y - blocks - 1, -1):
if (x, i) in visited:
print("Part 2: " + str(abs(x) + abs(i)))
sys.exit()
else:
visited[x, i] = abs(x) + abs(i)
# print(x, i)
y -= blocks
cardinal = "N"
return x, y, cardinal, visited
# Part 1
cur_x = 0
cur_y = 0
cardinal = "N"
for d in range(0, len(direction)):
if direction[d] == "L":
cur_x, cur_y, cardinal = move_left(cardinal, cur_x, cur_y, blocks[d])
elif direction[d] == "R":
cur_x, cur_y, cardinal = move_right(cardinal, cur_x, cur_y, blocks[d])
dist = abs(cur_x) + abs(cur_y)
print("Part 1: " + str(dist))
# Part 2
cur_x = 0
cur_y = 0
prev_x = 0
prev_y = 0
cardinal = "N"
visited = dict()
for d in range(0, len(direction)):
prev_x = cur_x
prev_y = cur_y
if direction[d] == "L":
cur_x, cur_y, cardinal, visited = move_left_p2(cardinal, cur_x, cur_y, blocks[d])
elif direction[d] == "R":
cur_x, cur_y, cardinal, visited = move_right_p2(cardinal, cur_x, cur_y, blocks[d])
# Part 2 correct! |
from .Section import *
from .ExperimentImages import *
class SectionImage(Section):
def __init__(self, api, data):
"""
Internal use only: initialize section object
"""
if (not(data==None) & (type(data) == dict) &
("sectionType" in data.keys())
):
if data["sectionType"]=="IMAGE":
super().__init__(api, data)
else:
raise Exception("no image")
else:
raise Exception("no (valid) section data")
def visualize(self):
"""
Visualization
"""
g = super().visualize()
with g.subgraph(name="cluster_content") as g_content:
images = self.get().all()
for imageID in images.index:
image_description = images.loc[imageID]["description"] or images.loc[imageID]["realName"]
image_name = images.loc[imageID]["realName"]
with g_content.subgraph(name="cluster_image_"+str(imageID)) as g_image:
g_image.attr(labelloc="b", tooltip=image_description, label="experimentimageID "+str(imageID), style="filled", color="black", fillcolor="#EEEEEE")
g_image.node("image_"+str(imageID),image_name,{"tooltip": image_description, "shape":"rect", "style": "filled", "fillcolor": "white"})
g_content.edge("section_type","image_"+str(imageID),None,{})
return(g)
def show(self):
"""
Show the content
"""
images = self.get().all()
htmlCode = "<div style=\"border: 1px solid #067172; padding: 10px;\">"
for imageID in images.index:
image_description = images.loc[imageID]["description"] or images.loc[imageID]["realName"]
image_name = images.loc[imageID]["realName"]
buffered = BytesIO()
self.image(imageID,250).save(buffered, format="JPEG")
img = base64.b64encode(buffered.getvalue()).decode("ascii")
htmlCode = htmlCode+"<figure style=\"display: inline-block; margin-right: 10px; margin-bottom: 10px;\">"
htmlCode = htmlCode+"<img title=\""+html.escape(image_description)+"\" src=\"data:image/jpeg;base64,"+format(img)+"\">"
htmlCode = htmlCode+"<figcaption>"+html.escape(image_description)+"</figcaption>"
htmlCode = htmlCode+"</figure>"
htmlCode = htmlCode + "</div>"
return(HTML(htmlCode))
def get(self):
"""
Get the content of this section
"""
return(ExperimentImages(self._eLABJournalObject__api, "Images", "/api/v1/experiments/sections/"+urllib.parse.quote(str(self.id()))+"/images", {}, "experimentFileID", 5, self.image))
def image(self, id, *args, **kwargs):
"""
Get image with the provided id (integer or string) for this section (only if section of type IMAGE).
Parameters (key/value)
----------------------
maxWidth : str, optional
Maximum width image
"""
if isinstance(id,numbers.Integral) | isinstance(id,str):
request = {}
kwargs_special = []
kwargs_keys = ["maxWidth"]
if args is not None:
for arg in args:
check_arg = arg
if isinstance(check_arg,numbers.Integral) | isinstance(check_arg,str):
request["maxWidth"] = str(check_arg)
else:
raise Exception("unsupported object '"+str(type(check_arg))+"'")
if kwargs is not None:
for key, value in kwargs.items():
if key in kwargs_special:
request["$"+key] = value
elif key in kwargs_keys:
request[key] = value
else:
raise Exception("unsupported key '"+key+"'")
rp = self._eLABJournalObject__api._request("/api/v1/experiments/sections/"+urllib.parse.quote(str(self.id()))+"/images/"+urllib.parse.quote(str(id)), "get", request, stream=True)
try:
stream = BytesIO(rp.content)
return(Image.open(stream))
except:
return(None)
else:
raise Exception("incorrect call")
|
#!/usr/bin/env python
import subprocess
n = 100
count = 0
for i in range(n):
res = subprocess.check_output(["./ml_por", "a.param"])
#print(res)
if(res.find("equal", 0, len(res)) != -1):
# equals
count += 1
else:
print("Failed at iteraction no. " + str(i))
print("Worked: " + str(count) + "/" + str(n) + " (" + str(100.0*(0.0+count)/n) + "%)")
|
from pseudoQuicksort import quicksort
FILE = "file.txt"
lst = []
with open(FILE) as file:
for line in file:
lst.append(int(line.replace('\n','')))
quicksort(lst) |
Import('env')
sources = Split("""
g4display.cc
utilities.cc
displayUI.cc
tabs/gcamera.cc
tabs/gslice.cc
""")
lib = env.Library(source = sources, target = "../lib/g4display")
|
class Solution(object):
def getRow(self, rowIndex):
"""
:type rowIndex: int
:rtype: List[int]
"""
rowIndex += 1 # XXX: Quick and dirty fix.
bufs = [[1]*rowIndex, [1]*rowIndex]
current_buf_index = 0
for row_no in range(3, rowIndex + 1):
current_buf = bufs[current_buf_index]
last_buf = bufs[1-current_buf_index]
for i in range(1, row_no - 1):
current_buf[i] = last_buf[i-1] + last_buf[i]
current_buf[row_no - 1] = 1
current_buf_index = 1 - current_buf_index
return bufs[1 - current_buf_index]
if __name__ == "__main__":
print(Solution().getRow(3))
|
def copy(L):
l = []
for i in L:
l.append(i)
return l
def sort(N, L):
for i in range(N):
for j in range(N-i-1):
if(L[j] > L[j+1]):
L[j], L[j+1] = L[j+1], L[j]
return L
def SynchronizingTables(N, ids, salary):
N = N
copy_ids = copy(ids)
copy_ids = sort(N, copy_ids)
index_ids = []
for i in range(N):
for j in range(N):
if(copy_ids[j] == ids[i]):
index_ids.append(j)
continue
copy_salary = copy(salary)
copy_salary = sort(N, copy_salary)
for m in range(N):
for k in range(N):
if(index_ids[k] == m):
salary[k] = copy_salary[m]
break
return salary |
#!/usr/bin/env python3
import os, sys
currentdir = os.path.dirname(os.path.realpath(__file__))
parentdir = os.path.dirname(currentdir)
sys.path.append(parentdir)
import json
from scripts.logger import log
from scripts import utils
def get_alerts_endpoint(namespace: str) -> (str):
prometheus_route = utils.get_cmd_output(
"oc get route -n" + namespace).split("\n")[1].split()[1]
prometheus_alerts_endpoint = prometheus_route + "/api/v1/alerts"
return prometheus_alerts_endpoint
def filter_alerts(severity:str, alerts:list) -> (list):
if(severity == "all"):
return alerts
filtered_alerts = []
for alert in alerts:
if(alert["labels"]["severity"] == severity):
filtered_alerts.append(alert)
return filtered_alerts
def get_all_alerts(prometheus_alerts_endpoint:str) -> (list):
prometheus_json = json.loads(utils.get_cmd_output("curl -s " + prometheus_alerts_endpoint))
return prometheus_json["data"]["alerts"]
def get_alerts(severity:str, namespace:str) -> (list):
log.info("Fetching alerts...")
alerts = get_all_alerts(get_alerts_endpoint(namespace))
if(len(alerts) == 0):
return []
return filter_alerts(severity, alerts)
def is_alert_present(alerts:list, alert_name:str):
for alert in alerts:
if(alert["labels"]["alertname"] == alert_name):
log.info("### SUCCESS ### Alert " + alert_name + " has been triggered")
exit(0)
log.error("### FAILED ### Alert " + alert_name + " has NOT been triggered")
exit(1)
def main():
alerts = get_alerts("all", "managed-services-monitoring-prometheus")
log.info("Number of alerts: " + str(len(alerts)))
if __name__== "__main__":
main()
|
from django.shortcuts import render
from django.views.generic import ListView, DetailView,CreateView,DeleteView,UpdateView
from .models import PnModel
from django.urls import reverse_lazy
# Create your views here.
class PnList(ListView):
template_name = 'list.html'
model = PnModel #่กจ็คบใใใใขใใซใmodels.pyใใ้ธๆใใฆใใขใใซๅใๅ
ฅๅ
class PnDetail(DetailView):
template_name = 'detail.html'
model = PnModel
class PnCreate(CreateView):
template_name = 'create.html'
model = PnModel
fields = ('title','cat')
success_url = reverse_lazy('list')
class PnDelete(DeleteView):
template_name = 'delete.html'
model = PnModel
success_url = reverse_lazy('list')
class PnTest(ListView):
template_name = 'test.html'
model = PnModel |
'''
Charlie has been given an assignment by his Professor to strip the links and the text name from the html pages.
A html link is of the form,
<a href="http://www.hackerrank.com">HackerRank</a>
Where a is the tag and href is an attribute which holds the link charlie is interested in. The text name is HackerRank.
Charlie notices that the text name can sometimes be hidden within multiple tags
<a href="http://www.hackerrank.com"><h1><b>HackerRank</b></h1></a>
Here, the text name is hidden inside the tags h1 and b.
Help Charlie in listing all the links and the text name of the links.
Input Format
The first line contains the number of lines in the fragment (N). This is followed by N lines from a valid HTML document or fragment.
Constraints
N < 100
Number of characters in the test fragments <= 10000 characters.
Characters will be restricted to ASCII. Fragments for the tests will be picked up from Wikipedia. Also, some tests might not have text or names on the links.
Output Format
If there are M links in the document, display each of them in a new line. The link and the text name must be separated by a "," (comma) with no spaces between them.
Strip out any extra spaces at the start and end position of both the link and the text name before printing.
link-1,text name-1
link-2,text name-2
link-3,text name-3
....
link-n,text name-M
Sample Input
Sample Input:1
2
<p><a href="http://www.quackit.com/html/tutorial/html_links.cfm">Example Link</a></p>
<div class="more-info"><a href="http://www.quackit.com/html/examples/html_links_examples.cfm">More Link Examples...</a></div>
Sample Input:2
13
<div class="portal" role="navigation" id='p-navigation'>
<h3>Navigation</h3>
<div class="body">
<ul>
<li id="n-mainpage-description"><a href="/wiki/Main_Page" title="Visit the main page [z]" accesskey="z">Main page</a></li>
<li id="n-contents"><a href="/wiki/Portal:Contents" title="Guides to browsing Wikipedia">Contents</a></li>
<li id="n-featuredcontent"><a href="/wiki/Portal:Featured_content" title="Featured content the best of Wikipedia">Featured content</a></li>
<li id="n-currentevents"><a href="/wiki/Portal:Current_events" title="Find background information on current events">Current events</a></li>
<li id="n-randompage"><a href="/wiki/Special:Random" title="Load a random article [x]" accesskey="x">Random article</a></li>
<li id="n-sitesupport"><a href="//donate.wikimedia.org/wiki/Special:FundraiserRedirector?utm_source=donate&utm_medium=sidebar&utm_campaign=C13_en.wikipedia.org&uselang=en" title="Support us">Donate to Wikipedia</a></li>
</ul>
</div>
</div>
Sample Output
Sample Output:1
http://www.quackit.com/html/tutorial/html_links.cfm,Example Link
http://www.quackit.com/html/examples/html_links_examples.cfm,More Link Examples...
Sample Output:2
/wiki/Main_Page,Main page
/wiki/Portal:Contents,Contents
/wiki/Portal:Featured_content,Featured content
/wiki/Portal:Current_events,Current events
/wiki/Special:Random,Random article
//donate.wikimedia.org/wiki/Special:FundraiserRedirector?utm_source=donate&utm_medium=sidebar&utm_campaign=C13_en.wikipedia.org&uselang=en,Donate to Wikipedia
'''
import re
def scrapeLinks(rawLinks):
scrapedLinks = []
for i in rawLinks:
matchAll = re.findall(r"\ba href=\"([/]{1,2}|http://)(\S+)\".*?>(?:<[a-z]>)*([^<]+)?", i)
scrapedLinks.append(matchAll)
return scrapedLinks
inpLen = int(input())
links = []
for i in range(0, inpLen):
links.append(str(input()))
links = scrapeLinks(links)
links = [x for sublist in links for x in sublist]
for i in links:
print(str(i[0]) + str(i[1]) + "," + str(i[2]).lstrip()) |
from django.db import models
from django.urls import reverse
# Create your models here.
class PageCategory(models.Model):
name = models.CharField(max_length=255, unique=True)
def __str__(self):
return self.name
class Title(models.Model):
title = models.CharField(max_length=255)
category = models.ForeignKey(PageCategory, on_delete=models.CASCADE)
def __str__(self):
return f"{self.title}: in {self.category} category"
class Block(models.Model):
title = models.CharField(max_length=255)
url_to = models.CharField(max_length=255)
icon = models.CharField(max_length=255)
background = models.ImageField(upload_to="ImagesStudy/", null=True, blank=True)
category = models.ForeignKey(PageCategory, on_delete=models.CASCADE)
NOTHING = 'nnnttss'
BLUE = 'gallery__item--purple'
BLUESKY = 'gallery__item--blue'
ORANGE = 'gallery__item--orange'
YEAR_IN_SCHOOL_CHOICES = [
(NOTHING, 'nnnttss'),
(BLUE, 'gallery__item--purple'),
(BLUESKY, 'gallery__item--blue'),
(ORANGE, 'gallery__item--orange'),
]
color = models.CharField(
max_length=40,
choices=YEAR_IN_SCHOOL_CHOICES,
default=NOTHING,
)
def __str__(self):
return f"{self.title}: in {self.category} category"
class Teacher(models.Model):
name = models.CharField(max_length=255)
img = models.ImageField(upload_to="teacherImages/")
experiense = models.CharField(max_length=255)
education = models.CharField(max_length=255)
description = models.TextField(null=True,blank=True)
formAction = models.TextField(null=True, blank=True)
def __str__(self):
return f"{self.name}"
def get_absolute_url(self):
return reverse('teacherUrl', args=(str(self.id)))
class Leeds(models.Model):
numberPhone = models.CharField(max_length=255)
fullName = models.CharField(max_length=255,null=True,blank=True)
description = models.CharField(max_length=255,null=True,blank=True)
teacher = models.ForeignKey(Teacher, null=True, blank=True, on_delete=models.CASCADE)
def __str__(self):
return f"{self.numberPhone}: {self.fullName}"
class ClicksSocial(models.Model):
types_social = [
("instagram", 'instagram'),
("youtube", 'youtube'),
("telegram", 'telegram'),
("whatsapp", 'whatsapp'),
("facebook", 'facebook'),
]
category_name = models.CharField(max_length=255, choices=types_social)
def __str__(self):
return self.category_name
|
import smtplib
#from twilio.rest import Client
#from twython import Twython
def gmail(msg,eml,pwd,teml):
content = msg
mail = smtplib.SMTP('smtp.gmail.com',587)
mail.ehlo()
mail.starttls()
mail.login(eml,pwd)
mail.sendmail('Notification',teml,content)
mail.close();
#def sms(sid,token,tno,fno,msg):
# account_sid = sid
# auth_token = token
# client = Client(account_sid, auth_token)
# message = client.api.account.messages.create(to=tno,from_=fno,body=msg)
#def twitter(msg,key,secret,token,token_secret):
# APP_KEY=key
# APP_SECRET=secret
# OAUTH_TOKEN=token
# OAUTH_TOKEN_SECRET=token_secret
#twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
#twitter.update_status(status=msg)
|
def make_tags(tag, word):
output = "<" + tag + ">" + word + "</" + tag + ">"
return output
def cigar_party(cigars, is_weekend):
if is_weekend == True:
if cigars>=40:
return True
else:
return False
else:
if cigars>=40 and cigars<= 60:
return True
else:
return False
def sum2(nums):
if len(nums) == 0:
return 0
elif len(nums) == 1:
return nums[0]
else:
return nums[0] + nums[1]
print make_tags('i', 'Yay')
print cigar_party(50, False)
print sum2([1, 2, 3])
|
def f(a):
if a in range (10):
print (' '*a +'Hello World!')
f(0)
f(1)
f(2)
f(3)
f(4)
f(5)
f(6)
f(7)
f(8)
f(9)
|
import csv
import faker
import random
fake = faker.Faker()
positions = ('Developer', 'Manager', 'Admin', 'Assistant', 'Analytic')
departments = ('Develop', 'QA', 'Maintenance', 'DevOps', 'EndUser service')
class Employee:
"""Represented single Employee object"""
fio: str
Position: str
Department: str
Estimate: float
Salary: int
def __init__(self):
"""Constructor"""
self.fio = fake.name()
self.Position = positions[random.randrange(0, 5, 1)]
self.Department = departments[random.randrange(0, 5, 1)]
self.Estimate = random.randrange(0, 5)
self.Salary = random.randrange(50000, 150000, 100)
def to_list(self):
return [self.fio, self.Position, self.Department, self.Estimate, self.Salary]
def to_object(self, field_list: list):
self.fio = str(field_list[0])
self.Position = str(field_list[1])
self.Department = str(field_list[2])
self.Estimate = float(field_list[3])
self.Salary = int(field_list[4])
def get_report(employees_list: list) -> dict:
"""
Gets list of employees and calculates metrics for every department as a dictionary record
:param employees_list: list of employees to parse
:return: dictionary with information about department metrics
"""
report: dict = dict()
for employ in employees_list:
if employ.Department not in report.keys():
report.update({employ.Department: [employ.Department, 0, employ.Salary, employ.Salary, 0.0]})
temp: list = report[employ.Department]
temp = calculate_stats(temp, employ)
report.update({employ.Department: temp})
for k in report:
report[k][4] /= report[k][1]
report[k][4] = round(report[k][4], 2)
return report
def calculate_stats(dep_stats: list, employ: Employee) -> list:
"""
Recalculate stats for department according with employee information
:param dep_stats: list with department stats to recalculate
:param employ: current employee
:return: recalculated stats for department
"""
dep_stats[1] += 1
if dep_stats[2] > employ.Salary:
dep_stats[2] = employ.Salary
if dep_stats[3] < employ.Salary:
dep_stats[3] = employ.Salary
dep_stats[4] = (dep_stats[4] + employ.Salary)
return dep_stats
def create_fake_data(size: int, path: str):
"""
Generate Employee list of specified size
:param path: path to create csv data file
:param size: size of the list
"""
temp: list = list()
for _ in range(size):
new_employ = Employee()
temp.append(new_employ)
with open(path, 'w', newline='') as datafile:
writer = csv.writer(datafile, delimiter=';')
for row in temp:
writer.writerow(row.to_list())
def csv_writer(data: dict, path: str, fieldnames: list):
"""
Write selected data to CSV file by specified path
:param fieldnames: headers of csv file
:param data: data to write in file
:param path: path to save file
"""
with open(path, 'w', newline='') as output:
writer = csv.writer(output, dialect='excel')
writer.writerow(fieldnames)
for row in data:
writer.writerow(data[row])
def csv_reader(path: str):
"""
Read CSV file and convert it to list of Employee objects
:param path: path to datafile
:return: list of employees
"""
csv_list = list()
with open(path, 'r') as file_obj:
reader = csv.reader(file_obj, delimiter=';')
for row in reader:
emp = Employee()
emp.to_object(field_list=row)
csv_list.append(emp)
return csv_list
def get_departments():
"""Prints list of departments"""
for dep in departments:
print(dep)
def chooser(var_list: list) -> str:
"""
Just checks if variant is valid and return it
:param var_list: variants of selection
:return: selected variant
"""
print('Select option: ')
print(var_list)
choose = input('Choose one:')
while choose not in var_list:
print('incorrect option')
choose = input('Choose one:')
return choose
if __name__ == '__main__':
variants = ['Departs', 'Create report', 'Save to CSV']
headers = ['Department', 'Employees count', 'Minimal Salary', 'Maximal Salary', 'Average salary']
report_dict: dict = dict()
data_path = input('Input path to data:')
create_fake_data(100, data_path)
employee_list = csv_reader(data_path)
ch = chooser(variants)
if ch == variants[0]:
get_departments()
ch = chooser(variants)
if ch == variants[1]:
report_dict = get_report(employee_list)
for r in report_dict:
print(report_dict[r])
ch = chooser(variants)
if ch == variants[2]:
report_path = input('Path to output file:')
csv_writer(report_dict, report_path, headers)
print('That`s All Folks!')
exit(0)
|
import tensorflow as tf
import pandas as pd
import numpy as np
data = pd.read_csv('indices.csv', delimiter=',', header=None).to_numpy()
x_train, y_train = data[:, 0], data[:, 1]
model = tf.keras.models.load_model('model.h5')
print(model.evaluate(x_train, y_train))
|
"""scanless.exceptions"""
class ScannerNotFound(Exception):
pass
class ScannerRequestError(Exception):
pass
|
# Doubly Linked List
# Triple: [value, triple_or_none, triple_or_none]
# Raymond <--> Rachel <--> Matthew
VALUE, PREV, NEXT = 0, 1, 2
a = ['Raymond', None, None]
b = a[NEXT] = ['Rachel', a, None] # 1st make list, 2nd assign b, 3rd a[NEXT]
c = b[NEXT] = ['Matthew', b, None]
# TODO: Write a recursive algorithm that walks backward
|
import pyowm
owm=pyowm.OWM('fa8be47e49ac1dc839e726120dda317b')
mgr=owm.weather_manager()
place=(input("Enter the city name : "))
obs=mgr.weather_at_place(place)
weather=obs.weather
temp_f=weather.temperature(unit='fahrenheit')['temp']
temp_c=weather.temperature(unit='celsius')['temp']
print(f'The Temperature of {place} is {temp_f} Fahrenheit and {temp_c} Celsius')
#input-Enter the city name : Kolkata
#output-The Temperature of Kolkata is 91.4 Fahrenheit and 33.0 Celsius |
from django import forms
from django.contrib.auth.models import User
from django.forms import inlineformset_factory
from .models import Post, Photo
from django.forms import formset_factory
MAX_PHOTOS = 10
class PostForm(forms.ModelForm):
class Meta:
model = Post
fields = ['description']
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ['username', 'password']
|
from flask import Flask
from flask import Blueprint
from flask import request
from flask import jsonify
from flaskext.mysql import MySQL
from flask_cors import CORS, cross_origin
app= Flask(__name__)
mysql=MySQL()
app.config['MYSQL_DATABASE_USER'] ='root'
app.config['MYSQL_DATABASE_PASSWORD'] ='admi'
app.config['MYSQL_DATABASE_DB'] ='proyecto'
app.config['MYSQL_DATABASE_HOST'] ='127.0.0.1'
mysql.init_app(app)
conn =mysql.connect()
cursor=conn.cursor()
profesor_blueprint = Blueprint('profesor_blueprint', __name__)
#CAMBIAR(2)
@profesor_blueprint.route('/create_profesor',methods=['POST'])
def create_profesor():
print(request.json)
params={
'idprofesor':request.json['idprofesor'],
'Nombre': request.json['Nombre'],
'ApellidoPaterno':request.json['ApellidoPaterno'],
'ApellidoMaterno':request.json['ApellidoMaterno'],
'DNI':request.json['DNI'],
'telefono':request.json['telefono'],
}
#cambiar para cada tabla
query="""insert into profesor (idprofesor, Nombre, ApellidoPaterno, ApellidoMaterno, DNI, telefono)
values (%(idprofesor)s, %(Nombre)s,%(ApellidoPaterno)s,%(ApellidoMaterno)s,%(DNI)s,%(telefono)s)"""
cursor.execute(query,params)
conn.commit()
content={'idprofesor':params['idprofesor'],
'Nombre':params['Nombre'],
'ApellidoPaterno':params['ApellidoPaterno'],
'ApellidoMaterno':params['ApellidoMaterno'],
'DNI':params['DNI'],
'telefono':params['telefono'],
}
return jsonify(content) |
import os, sys
import rasterio
import pandas as pd
import geopandas as gpd
import numpy as np
from scipy import interpolate
from rasterio import features
from rasterio.mask import mask
from rasterio.features import rasterize
from rasterio.warp import reproject, Resampling
def rasterize_od_results(inD, outFile, field):
''' Convert gridded point data frame to raster of commensurate size and resolution
INPUT
inD [ geopandas data frame ] - OD matrix as point data frame
outFile [ string ] - path to save output raster
field [ string ] - field to rasterize
RETURNS
None
'''
#create grid from input shapefile
# get xs, ys, and values from origin points
xs = np.array(inD.geometry.apply(lambda p: p.x))
ys = np.array(inD.geometry.apply(lambda p: p.y))
vals = np.array(inD[field])
# creates a full grid for the entire bounding box (all pairs of xs and ys)
unique_xs = np.unique(xs)
unique_ys = np.unique(ys)
xx, yy = np.meshgrid(unique_xs, unique_ys)
# this creates a new set of values to fill the grid
grid_array = interpolate.griddata((xs,ys), vals, (xx, yy))
x_pixels = grid_array.shape[1]
y_pixels = grid_array.shape[0]
# get the right transformation for raster file
xRes = (xx.max() - xx.min()) / len(unique_xs)
yRes = (yy.max() - yy.min()) / len(unique_ys)
# get the right transformation for raster file
trans = rasterio.transform.from_bounds(xx.min() - (xRes/2), yy.min() - (yRes/2),
xx.max() - (xRes/2), yy.max() - (yRes/2),
x_pixels - 1, y_pixels - 1)
new_dataset = rasterio.open(
outFile, 'w', driver = 'GTiff',
height = y_pixels, width = x_pixels,
count=1, dtype=str(grid_array.dtype),
crs=inD.crs,
transform=trans
)
shapes = ((row.geometry,row[field]) for idx, row in inD.iterrows())
burned = features.rasterize(shapes=shapes, fill=0, out_shape=grid_array.shape, transform=new_dataset.transform)
burned = burned.astype(grid_array.dtype)
new_dataset.write_band(1, burned)
new_dataset.close() |
#!/usr/bin/python2.7
import datetime
import json
import time
import webapp2
import models
class GetLatest(webapp2.RequestHandler):
def get(self):
queries = []
for reporter in models.Reporter.all(keys_only=True):
queries.append(
models.Report.all()
.filter('reporter =', reporter)
.order('-received')
.run(limit=1))
reports = []
for query in queries:
for value in query:
reports.append(value.report)
self.response.out.write('[' + ','.join(reports) + ']')
class Put(webapp2.RequestHandler):
def post(self):
parsed = json.loads(self.request.body)
parsed['server'] = {
'received': int(time.time()),
'from_ip': self.request.remote_addr,
}
reporter = models.Reporter.get_or_insert(parsed['local']['address'])
reporter.num_reports += 1
for ssid, ssid_info in parsed['ssids'].iteritems():
for bssid, bssid_info in ssid_info['bssids'].iteritems():
bssid = models.BSSID.get_or_insert(bssid)
bssid.num_reports += 1
if 'connection' in bssid_info:
connection = bssid_info['connection']
if connection['status'] == 'ok':
reporter.num_conns_success += 1
bssid.num_conns_success += 1
bssid.last_success = datetime.datetime.now()
bssid.mbit_rate = connection['mbit_rate']
bssid.connect_time = connection['connect_time']
bssid.dhcp_time = connection['dhcp_time']
bssid.ipv6_time = connection['ipv6_time']
else:
reporter.num_conns_failure += 1
bssid.num_conns_failure += 1
bssid.save()
reporter.save()
models.Report(
reporter=reporter,
report=json.dumps(parsed)
).save()
json.dump({
'status': 'ok',
}, self.response.out)
class PutMonitoring(webapp2.RequestHandler):
def post(self):
parsed = json.loads(self.request.body)
parsed['server'] = {
'received': int(time.time()),
'from_ip': self.request.remote_addr,
}
reporter = models.Reporter.get_or_insert(parsed['local']['address'])
reporter.num_monitors += 1
reporter.rtt_min = parsed['ping']['rtt_min']
reporter.rtt_max = parsed['ping']['rtt_max']
reporter.rtt_mean = parsed['ping']['rtt_mean']
reporter.rtt_median = parsed['ping']['rtt_median']
reporter.ping_timeouts = parsed['ping']['timeouts']
reporter.save()
models.Monitoring(
reporter=reporter,
report=json.dumps(parsed)
).save()
json.dump({
'status': 'ok',
}, self.response.out)
app = webapp2.WSGIApplication([
('/api/getLatest', GetLatest),
('/api/put', Put),
('/api/putMonitoring', PutMonitoring),
])
|
import numpy as np
try:
import numba
from spikeinterface.sortingcomponents.clustering.isocut5 import isocut5
HAVE_NUMBA = True
except ImportError:
HAVE_NUMBA = False
def test_isocut5():
print("hi", HAVE_NUMBA)
if not HAVE_NUMBA:
return
# test cases generated by calling the matlab implementation
dipscore, cutpoint = isocut5(np.array([0, 1, 1, 2]))
assert dipscore == 0
assert cutpoint == 1.5
z = np.array(
[
0.3012,
0.4709,
0.2305,
0.8443,
0.1948,
0.2259,
0.1707,
0.2277,
0.4357,
0.3111,
]
)
dipscore, cutpoint = isocut5(z)
assert dipscore == 0
assert np.abs(cutpoint - 0.2685) < 0.001
t = np.array(
[
5.32339539, 3.72514244, 6.06074439, 4.72039874, 5.28503105, 5.43523798,
3.52389432, 4.90105762, 6.04214636, 5.22912762, 5.62638777, 4.37264862,
5.35692268, 5.85697279, 5.90419288, 3.42153125, 4.83902449, 3.86442812,
4.3487476, 5.02337161, 6.43031621, 5.20781653, 6.50916649, 3.49466062,
6.10005906, 4.97149243, -6.68120837, -3.77983954, -4.48224226, -4.43533774,
-5.28264468, -5.10141726, -4.72195037, -4.30517775, -3.61894388, -3.22391089,
-4.93935508, -4.13938463, -5.71609654, -4.03895828, -4.86202822, -4.03284017,
-6.78072313, -5.55439593, -4.5964243, -6.23230336, -5.28762918, -6.78708774,
-4.2274206, -5.20307468, -6.86372755, -6.67979293,
]
)
dipscore, cutpoint = isocut5(t)
assert np.abs(dipscore - 1.4456) < 0.001
assert np.abs(cutpoint - 0.9920) < 0.001
if __name__ == "__main__":
test_isocut5()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Image
from .widgets import CropExample
from opps.core.widgets import OppsEditor
class ImageModelForm(forms.ModelForm):
crop_example = forms.CharField(label=_('Crop Example'), required=False,
widget=CropExample())
crop_x1 = forms.CharField(label=_(u'Crop X1'), required=False,
widget=forms.HiddenInput())
crop_x2 = forms.CharField(label=_(u'Crop X2'), required=False,
widget=forms.HiddenInput())
crop_y1 = forms.CharField(label=_(u'Crop Y1'), required=False,
widget=forms.HiddenInput())
crop_y2 = forms.CharField(label=_(u'Crop Y2'), required=False,
widget=forms.HiddenInput())
class Meta:
model = Image
widgets = {'description': OppsEditor()}
|
#!/usr/bin/python3
""" test state file """
from models.state import State
import unittest
from models.city import City
from models.base_model import BaseModel
from sqlalchemy.orm.collections import InstrumentedList
from datetime import datetime
import models
from models.engine.db_storage import DBStorage
from models.engine.file_storage import FileStorage
import os
import pep8
from time import sleep
from tests.test_models.test_base_model import TestBaseModel
NoneType = type(None)
class TestState(unittest.TestCase):
""" """
@classmethod
def setUpClass(cls):
"""executes after all tests"""
try:
os.remove("file.json")
except IOError:
pass
FileStorage._FileStorage__objects = {}
@classmethod
def tearDownClass(cls):
"""executes after all tests"""
try:
os.remove("file.json")
except IOError:
pass
def setUp(self):
"""executes after a test"""
pass
def tearDown(self):
"""executes before a test"""
pass
def test_name3(self):
""" """
new = State()
self.assertEqual(type(new.name), NoneType)
def test_pep8_Review(self):
"""Tests pep8 style"""
style = pep8.StyleGuide(quiet=True)
p = style.check_files(['models/state.py'])
self.assertEqual(p.total_errors, 0, "fix pep8")
def test_checking_for_docstring_State(self):
"""checking for docstrings"""
self.assertIsNotNone(State.__doc__)
def test_attributes_State(self):
"""test State have attributes"""
st = State()
self.assertTrue(hasattr(st, '__tablename__'))
self.assertTrue('id' in st.__dict__)
self.assertTrue('created_at' in st.__dict__)
self.assertTrue('updated_at' in st.__dict__)
self.assertTrue(hasattr(st, 'name'))
def test_attributes(self):
"""test for attributes type"""
st = State()
self.assertEqual(str, type(st.id))
self.assertEqual(datetime, type(st.created_at))
self.assertEqual(datetime, type(st.updated_at))
self.assertTrue(hasattr(st, "name"))
def test_is_subclass_State(self):
"""test if State is subclass of BaseModel"""
st = State()
self.assertTrue(issubclass(st.__class__, BaseModel), True)
@unittest.skipIf(type(models.storage) is DBStorage, "Filestorage")
def test_save_State(self):
"""test save"""
st = State()
sleep(0.6)
st.save()
self.assertNotEqual(st.created_at, st.updated_at)
@unittest.skipIf(type(models.storage) is DBStorage, "Filestorage")
def test_to_dict(self):
"""test to_dict"""
st = State()
st.save()
state_dict = st.to_dict()
self.assertEqual(dict, type(state_dict))
self.assertEqual(st.id, state_dict["id"])
self.assertEqual("State", state_dict["__class__"])
self.assertEqual(st.created_at.isoformat(),
state_dict["created_at"])
self.assertEqual(st.updated_at.isoformat(),
state_dict["updated_at"])
if __name__ == "__main__":
unittest.main(defaultTest="TestBaseModel", exit=False)
|
from os.path import join
import flowiz as fz
import glob
import matplotlib.pyplot as plt
from tqdm import tqdm
import sys
dataset='Mushroom'
datatype=['', 'orig', 'tip', 'inter', 'gdci', ]
typeindex=int(sys.argv[1]) # ๆๆฐๅญไฝไธบๅๆฐไผ ่ฟๆฅ
filepathFlo='./dataset/flo/inference/run.epoch-0-flow-field/'
filepathPng='./dataset/VideoR/'+dataset+'/png/'+datatype[typeindex]+'/'
files = sorted(glob.glob(join(filepathFlo + '*.flo')))
with tqdm(total=len(files), ncols=60) as pbar:
for fi in range(len(files)):
pbar.update(1)
img = fz.convert_from_file(files[fi])
# plt.savefig(img)
plt.imsave(join(filepathPng + ('%05d' % (fi+1)) + '.png'), img)
|
'''
่ฆๆฑ๏ผ
่พๅ
ฅไธไธชๅญ็ฌฆไธฒ๏ผๆๅญๅ
ธๅบๆๅฐๅบ่ฏฅๅญ็ฌฆไธฒ็ๆๆๆๅใ
ไพๅฆ่พๅ
ฅๅญ็ฌฆไธฒabc๏ผๅๆๅฐๅบ็ฑๅญ็ฌฆไธฒa๏ผb๏ผcๆ่ฝๆๅๅบๆฅ็ๆๆๅญ็ฌฆไธฒabc๏ผacb๏ผbac๏ผcabๅcba
ๅๆ๏ผ
ๆฑๆดไธชๅญ็ฌฆไธฒ็ๆๅ๏ผๅฏไปฅ็ๅบไธคๆญฅ๏ผ
้ฆๅ
ๆฑๆๆๅฏ่ฝๅบ็ฐๅจ็ฌฌไธไธชไฝ็ฝฎ็ๅญ็ฌฆ๏ผๆขๆ็ฌฌไธไธชๅญ็ฌฆๅๅ้ข็ๆๆๅญ็ฌฆไบคๆข
็ถๅๅบๅฎ็ฌฌไธไธชๅญ็ฌฆ๏ผๆฑๅ้ขๆๆ็ๅญ็ฌฆ็ๆๅบ๏ผๆญคๆถไปๆๅ้ข็ๅญ็ฌฆ็ๆไธค้จๅ๏ผ็ฌฌไธไธชๅญ็ฌฆๅๅ้ข็ๅญ็ฌฆ
็ถๅ้ๅคไธ่ฟฐๆญฅ้ชค
้่ฟ้ๅฝ็ๆนๅผ๏ผ้ๅฝๅปๅค็็กฎๅฎๅญ็ฌฆๅผๅ็ๆๆๅญ็ฌฆ๏ผ้ๅฝ็็ปๆญขๆกไปถๆฏ้่ฆๅค็็ๅญ็ฌฆ้ฟๅบฆไธบ1
'''
def permutation(data):
if len(data) <=1:
return [data]
list = []
for i in range(len(data)):
for j in permutation(data[0:i]+data[i+1:]):
list.append(data[i]+j)
return list
list = permutation('abc')
print(list)
a = range(10)
print(a) |
import logging
import requests
import base64
import time
class Server(object):
url = 'https://mlb.praetorian.com'
def __init__(self, log=None):
self.session = requests.session()
self.binary = None
self.bin_b64 = None
self.hash = None
self.wins = 0
self.targets = []
self.accuracy = 0.0
if log is not None:
self.log = log
else:
self.log = logging.getLogger(__name__)
def _request(self, route, method='get', data=None):
while True:
try:
if method == 'get':
r = self.session.get(self.url + route)
else:
r = self.session.post(self.url + route, data=data)
if r.status_code == 429:
raise Exception('Rate Limit Exception')
elif r.status_code == 500:
raise Exception('Unknown Server Exception')
elif r.status_code != 200:
self.log.info('Status Code: ' + str(r.status_code))
return r.json()
except Exception as e:
self.log.error(e)
# go home, your drunk (sleep off the aggression)
if r.status_code == 429: # Rate Limit Exception
self.log.info('Waiting 60 seconds before next request')
time.sleep(60)
def get(self):
r = self._request("/challenge")
self.targets = r.get('target', [])
self.bin_b64 = r.get('binary', '')
self.binary = base64.b64decode(self.bin_b64)
return r
def post(self, target):
r = self._request("/solve", method="post", data={"target": target})
self.log.debug("RESPONSE: " + str(r))
self.wins = r.get('correct', 0)
self.hash = r.get('hash', self.hash)
self.ans = r.get('target', 'unknown')
self.accuracy = r.get('accuracy', -1)
return r
|
#-*- encoding=utf8 -*-
#!/usr/bin/env python
import sys, operator, string
path_to_stop_words = '../BasicData/stop_words.txt'
path_to_text = '../BasicData/Pride_And_Prejudice.txt'
def characters(filename):
for line in open(filename):
for c in line:
yield c
def all_words(filename):
start_char = True
for c in characters(filename):
if start_char ==True:
word = ""
if c.isalnum():
word = c.lower()
start_char = False
else:
pass
else:
if c.isalnum():
word += c.lower()
else:
start_char = True
yield word
def non_stop_words(filename):
stop_words = set(open(path_to_stop_words).read().split(',')+list(string.ascii_lowercase))
for w in all_words(filename):
if not w in stop_words:
yield w
def count_and_sort(filename):
freqs,i = {}, 1
for w in non_stop_words(filename):
freqs[w] = 1 if w not in freqs else freqs[w] + 1
if i % 5000 == 0:
yield sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
i = i + 1
yield sorted(freqs.iteritems(), key=operator.itemgetter(1), reverse=True)
for word_freqs in count_and_sort(path_to_text):
print "---------------------------------"
for (w,c) in word_freqs[0:25]:
print w , ' - ',c
|
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from method import *
a = np.array([[1,0,0],[0,0,1],[0,1,0]])
P1 = np.array([[1, 0, 0],\
[0,np.cos(np.pi/4),np.sin(np.pi/4)],\
[0,-np.sin(np.pi/4),np.cos(np.pi/4)]])
P2 = np.array([[np.cos(np.pi/4), 0,-np.sin(np.pi/4)],\
[ 0, 1, 0],\
[-np.sin(np.pi/4), 0, np.cos(np.pi/4)]])
P = np.dot(P1, P2)
print(P)
#print(np.linalg.inv(P))
new_a0 = np.dot(P2, a.T)
new_a1 = np.dot(P1, new_a0)
new_a = np.dot(P, a.T)
print(new_a)
#ใฐใฉใใฎๆ ใไฝใฃใฆใใ
fig = plt.figure()
ax = Axes3D(fig)
#่ปธใซใฉใใซใไปใใใใจใใฏๆธใ
ax.set_xlabel("X")
ax.set_ylabel("Y")
ax.set_zlabel("Z")
X0, Y0, Z0 = Disassemble(new_a0)
X1, Y1, Z1 = Disassemble(new_a1)
X, Y, Z = Disassemble(new_a)
O = [0,0,0]
ax.quiver(O, O, O, X0, Y0, Z0, length=1,color='red', normalize=True)
ax.quiver(O, O, O, X1, Y1, Z1, length=1,color='green', normalize=True)
ax.quiver(O, O, O, X, Y, Z, length=1,color='blue', normalize=True)
plt.show() |
from PyQt5.QtWidgets import QWidget, QDialog, QInputDialog, QFileDialog
from PyQt5.QtWidgets import QPushButton
from PyQt5.QtGui import QImage, QPalette, QBrush
from PyQt5.QtCore import QSize
import os
import shutil
import blank_displaying
SCREEN_SIZE = [700, 700]
class AddCustomLevel(QDialog, QWidget):
def __init__(self):
super().__init__()
self.initUI()
self.cnt = 0
self.btn_num = 0
def initUI(self):
self.setWindowTitle('ะ ะตะดะฐะบัะพั ััะพะฒะฝะตะน')
self.setGeometry(0, 0, *SCREEN_SIZE)
self.setFixedSize(*SCREEN_SIZE)
oImage = QImage('main_menu_background')
sImage = oImage.scaled(QSize(*SCREEN_SIZE))
palette = QPalette()
palette.setBrush(QPalette.Window, QBrush(sImage))
self.setPalette(palette)
self.btn_make_room = QPushButton('ะกะะะะะขะฌ ะะะะะะขะฃ', self)
self.btn_make_room.move(400, 370)
self.btn_make_room.resize(200, 100)
self.btn_make_room.clicked.connect(self.make_room)
self.btn_make_room = QPushButton('ะกะะะะะขะฌ ะะะะะะฃ', self)
self.btn_make_room.move(400, 500)
self.btn_make_room.resize(200, 100)
self.btn_make_room.clicked.connect(self.make_button)
self.btn_delete_room = QPushButton('ะะะะะ ะจะะะะ ะกะะะะะะะฏ', self)
self.btn_delete_room.move(400, 630)
self.btn_delete_room.resize(200, 100)
self.btn_delete_room.clicked.connect(self.finish_add)
self.btn_exit = QPushButton('ะะะะ ะซะขะฌ', self)
self.btn_exit.move(400, 760)
self.btn_exit.resize(200, 100)
self.btn_exit.clicked.connect(self.exit)
self.make_directory()
def make_directory(self):
path, ok_pressed = QInputDialog.getText(self, "ะะฒะตะดะธัะต ะฝะฐะทะฒะฐะฝะธะต",
"ะะฐะบ ะฑัะดะตั ะฝะฐะทัะฒะฐัััั ััะพะฒะตะฝั?")
if ok_pressed:
os.mkdir(path)
with open('new_levels_info/level.txt', 'w', encoding='utf8') as output_file:
output_file.write(path)
with open('new_levels_info/chall_num.txt', 'w', encoding='utf8') as output_file:
output_file.write('1')
def make_room(self):
self.cnt += 1
path = f'room_{self.cnt}'
with open('new_levels_info/level.txt', 'r', encoding='utf8') as input_file:
level = input_file.readline()
os.mkdir(os.path.join(level, path))
with open('new_levels_info/file.txt', 'w', encoding='utf8') as output_file:
output_file.write(path)
with open(f'{level}/{path}/room_history.txt', 'w', encoding='utf8') as output_file:
output_file.write('This room has no history')
self.choose_background(os.path.join(level, path))
self.config_buttons(os.path.join(level, path))
self.btn_num = 1
def choose_background(self, path):
fname = QFileDialog.getOpenFileName(self, 'ะัะฑะตัะธัะต ัะพะฝ ะบะพะผะฝะฐัั', '')[0]
shutil.copy(fname, f'{path}/background.png')
def make_button(self):
with open('new_levels_info/num.txt', 'w', encoding='utf8') as output_file:
output_file.write(str(self.btn_num))
self.blank = blank_displaying.AddCustomLevel()
self.show()
self.btn_num += 1
def config_buttons(self, path):
for num in range(1, 6):
input_file = open(f'{path}/button_{num}.txt', 'w', encoding='utf8')
input_file.write('0 0\n0 0\n\n\n\n')
input_file.close()
def finish_add(self):
with open('new_levels_info/level.txt', 'r', encoding='utf8') as input_file:
level = input_file.readline()
with open('new_levels_info/chall_num.txt', 'r', encoding='utf8') as input_file:
chall_num = input_file.readline()
output_file = open(f'{level}/inventory.txt', 'w', encoding='utf8')
output_file.close()
output_file = open(f'{level}/file.txt', 'w', encoding='utf8')
output_file.close()
output_file = open(f'{level}/inventory.txt', 'w', encoding='utf8')
output_file.close()
output_file = open(f'{level}/completed_tasks.txt', 'w', encoding='utf8')
output_file.close()
output_file = open(f'{level}/sequence_text.txt', 'w', encoding='utf8')
output_file.close()
output_file = open(f'{level}/rooms_history_used.txt', 'w', encoding='utf8')
output_file.close()
answer_d = {'0': 0, '1': 0, '2': 0, '3': 0, '4': 0, '5': 0, '6': 0, '7': 0, '8': 0, '9': 0}
with open(f'{level}/challenge.txt', 'a', encoding='utf8') as output_file:
for _ in range(int(chall_num) - 1):
text, ok_pressed = QInputDialog.getText(self, "ะะฒะตะดะธัะต ัััะพะบั",
"ะะฐะบัั ัะฐััั ะฟะฐัะพะปั ััั ะฝะฐะนะดะตั ะฟะพะปัะทะพะฒะฐัะตะปั?")
if ok_pressed:
output_file.write(text + '\n')
for sym in text:
answer_d[sym] += 1
ans = 0
for key in answer_d:
ans = max(ans, answer_d[key])
key_ans = ''
for key in answer_d:
if answer_d[key] == ans:
key_ans = key
with open(f'{level}/answer.txt', 'w', encoding='utf8') as output_file:
output_file.write(key_ans)
with open('unlock_levels.txt', 'a', encoding='utf8') as output_file:
output_file.write('\n' + level)
self.exit()
def exit(self):
self.close()
|
import numpy as np
import scipy as sp
import fitsio as fi
import glob
import tools.covariance as cv
import tools.n_of_z as nz
dirs={"xip":"shear_xi/", "xim":"shear_xi/"}
datavector_names={"xip":("theta","xiplus"), "xim":("theta","ximinus")}
realspace_lookup={"xip":True, "xim":True}
corr={"xip":("ee","xi+"), "xim":("ee","xi-")}
header_quantity_names = {"xip":("G+R","G+R"), "xim":("G-R","G-R"), "gammat":("G+R","GPR"), "wtheta":("GPR","GPR")}
class fits_object():
def __init__(self,datavectors,covariance=None, nofz_shear=None, nofz_density=None, kernel_shear=None, kernel_density=None, verbosity=1, nongaussian=True):
#Expects a list of datavectors eg ['xi+','xi-','gammat','wtheta']
# And optionally a covariance wrapper object
self.datavectors = datavectors
self.update_kernel_names(kernel_shear,kernel_density)
self.realspace = realspace_lookup[datavectors[0]]
print "loading datavectors:"
statistics=[]
correlations=[]
for name in datavectors:
if verbosity==1:
print name
xname,basename = datavector_names[name]
xpath = "%s/%s.txt"%(dirs[name],xname)
x = np.loadtxt(xpath)
setattr(self,xname,x)
files = glob.glob("%s/%s*.txt"%(dirs[name],basename))
for f in files:
data = np.loadtxt(f)
# Get the bin pairing from the filename
i = int(f.split(".txt")[0].split("_")[-2])
j = int(f.split(".txt")[0].split("_")[-1])
setattr(self,"%s_%d_%d"%(name,i,j),data)
if verbosity>1:
print name, i, j
c, st = corr[name]
correlations.append(c)
statistics.append(st)
if (nofz_shear is not None):
self.nofz_shear = nz.sv_pz(nofz_shear,"skynet")
self.nofz_shear.load_from_txt()
self.do_nz=True
if (nofz_density is not None):
self.nofz_density = nz.sv_pz(nofz_density,"skynet")
self.nofz_density.load_from_txt()
self.do_nz=True
if covariance is not None:
if verbosity>0:
print "Loading covariance matrix"
correlations = list(np.unique(correlations))
# This function handles a lot of the fiddly details of the covariance matrix
# You don't really want to delve into that
# But the covariance wrapper should take care of all the symmetries, so self.cov should
# have all elements, even where duplicated elsewhere in the matrix
cov = covariance.extract_all(correlations,statistics=statistics, verbosity=verbosity, nongaussian=nongaussian)
setattr(self,"cov",cov)
setattr(self,"covariance_source", covariance)
self.get_sampling()
def get_sampling(self):
"""Interpolate the datavectors to the same sampling in scale as the
covariance matrix."""
print "Interpolating to match the covariance matrix"
if self.covariance_source.realspace:
k = 60.0 * 180.0/np.pi
else:
k=1.0
for name in self.datavectors:
xname,basename = datavector_names[name]
files = glob.glob("%s/%s*.txt"%(dirs[name],basename))
if self.covariance_source.realspace:
xname="theta"
else:
xname="ell"
for f in files:
i = int(f.split(".txt")[0].split("_")[-2])
j = int(f.split(".txt")[0].split("_")[-1])
x = getattr(self.covariance_source,xname)
x0 = getattr(self,xname) * k
y0 = getattr(self,"%s_%d_%d"%(name,i,j))
y = np.interp(x,x0,y0)
setattr(self,"%s_%d_%d"%(name,i,j),y)
setattr(self,xname,x)
def export(self, filename, verbosity=1):
fits = fi.FITS(filename, "rw", clobber=True)
order={}
dimension={}
done_nz=[]
if verbosity>0:
print "Reformatting data for export to fits file."
for obs in self.datavectors:
if verbosity>0:
print obs
# Rearrange the data into the columns expected
bin1, bin2, xbin, y, x = self.format_data(obs, verbosity)
# Store this for the ordering of the covariance matrix
order[obs] = zip(bin1,bin2)
dimension[obs] = y.size
if verbosity>1:
print "writing to file..."
# Extract the header information
h = self.create_data_header(x,bin1,bin2,obs)
out = self.write_for_export(bin1, bin2, xbin, y, x)
fits.write(out, header=h)
fits[-1].write_key("EXTNAME",obs)
if verbosity>1:
print "data"
if self.do_nz:
k1,k2 = self.header_kernel_names[obs]
out1, out2 = self.construct_pz_hdus(obs)
if k1 not in done_nz:
fits.write(out1)
fits[-1].write_key("EXTNAME",k1)
done_nz.append(k1)
if k2 not in done_nz:
fits.write(out2)
fits[-1].write_key("EXTNAME",k2)
done_nz.append(k2)
# Also include the covariance matrix if we have one loaded
if hasattr(self, "cov"):
if verbosity>1:
print "covariance"
cov = self.construct_covariance_matrix(order)
h = self.create_covariance_header(dimension)
fits.write(cov, header=h)
fits[-1].write_key("EXTNAME","COV")
if verbosity>1:
print "done"
fits.close()
return 0
def construct_pz_hdus(self, obs):
if obs in ["xip","xim","cl_ee"]:
pz1 = self.nofz_shear
pz2 = self.nofz_shear
elif obs in ["wtheta","cl_nn"]:
pz1 = self.nofz_density
pz2 = self.nofz_density
elif obs in ["gammat","cl_ne"]:
pz1 = self.nofz_shear
pz2 = self.nofz_density
dt1 = [ ('Z_LOW', '>f8'), ('Z_MID', '>f8'), ('Z_HIGH', '>f8')] + [("BIN%d"%(i+1), ">f8") for i in xrange(len(pz1.pz))]
dt2 = [ ('Z_LOW', '>f8'), ('Z_MID', '>f8'), ('Z_HIGH', '>f8')] + [("BIN%d"%(i+1), ">f8") for i in xrange(len(pz2.pz))]
out1 = np.zeros(pz1.z.size, dtype=dt1)
out2 = np.zeros(pz2.z.size, dtype=dt2)
if pz1.edges.size!=pz1.z.size+1:
dz = pz1.z[1]-pz1.z[0]/2
pz1.edges=np.linspace(pz1.z.min()-dz, pz1.z.max()+dz, pz1.z.size+1 )
if pz2.edges.size!=pz2.z.size+1:
dz = pz2.z[1]-pz2.z[0]/2
pz2.edges=np.linspace(pz2.z.min()-dz, pz2.z.max()+dz, pz2.z.size + 1 )
out1["Z_LOW"],out1["Z_HIGH"],out1["Z_MID"] = pz1.edges[:-1], pz1.edges[1:], pz1.z
out2["Z_LOW"],out2["Z_HIGH"],out2["Z_MID"] = pz2.edges[:-1], pz2.edges[1:], pz2.z
for i, p in enumerate(pz1.pz):
out1["BIN%d"%(i+1)] = p
for j, p in enumerate(pz2.pz):
out2["BIN%d"%(j+1)] = p
return out1,out2
def construct_covariance_matrix(self, bin_pairs):
"""Put together a covariance matrix already loaded into memory,
taking a row of blocks at a time"""
for count1, obs in enumerate(self.datavectors):
done=[]
for count2, (i,j) in enumerate(bin_pairs[obs]):
if [(i,j)] in done: continue
row = self.get_row(bin_pairs, obs, i, j)
if count1+count2==0:
full_matrix = row
else:
full_matrix = np.vstack((full_matrix,row))
done.append([(i,j)])
return full_matrix
def get_row(self, bin_pairs, obs1, i, j):
for count1, obs2 in enumerate(self.datavectors):
done=[]
for count2, (k,l) in enumerate(bin_pairs[obs2]):
if [(k,l)] not in done:
correlation1, obs_name1 = corr[obs1]
correlation2, obs_name2 = corr[obs2]
c = tuple(list(correlation1)+list(correlation2))
block = self.cov[(obs_name1,obs_name2)][c][(i,j,k,l)]
if count1+count2==0:
row= block
else:
row = np.hstack((row,block))
done.append([(k,l)])
return row
def write_for_export(self, bin1, bin2, xbin, y, x):
#There must be a neater way to do this.
# We'll go with this for the moment because it works
dt = np.dtype([('BIN1', '>i8'), ('BIN2', '>i8'), ('ANGBIN', '>i8'), ('VALUE', '>f8'), ('ANG', '>f8')])
out=np.zeros(bin1.size, dtype=dt)
out["BIN1"] = bin1
out["BIN2"] = bin2
out["ANGBIN"] = xbin
out["VALUE"] = y
out["ANG"] = x
return out
def create_covariance_header(self, dimension_lookup):
header={}
start=0
for i,dat in enumerate(self.datavectors):
header["NAME_%d"%i] = dat
header["STRT_%d"%i] = start
start += dimension_lookup[dat]
return header
def create_data_header(self,x,b1,b2,obs):
units = "arcmin"
# Assume arcmin for now
nz1,nz2 = np.unique(b1).size, np.unique(b2).size
return {"2PTDATA":True, "QUANT1": header_quantity_names[obs][0], "QUANT2": header_quantity_names[obs][1], "KERNEL_1": self.header_kernel_names[obs][0], "KERNEL_2": self.header_kernel_names[obs][1] ,"WINDOWS": "SAMPLE", "N_ZBIN_1": nz1, "N_ZBIN_2": nz2, "ANG_MIN": x.min(), "ANG_MAX": x.max(), "N_ANG": np.unique(x).size, "TUNIT5": units}
def format_data(self,statistic,verbosity):
bin1=[]
bin2=[]
angbin=[]
value=[]
ang=[]
for datav_name in dir(self):
if statistic not in datav_name:
continue
else:
if verbosity>1:
print datav_name
i,j = int(datav_name.split("_")[-2]), int(datav_name.split("_")[-1])
data = getattr(self,datav_name)
nx = data.size
xname = datavector_names[statistic][0]
x = getattr(self,xname)
bin1.append([i]*nx)
bin2.append([j]*nx)
angbin.append(list(np.linspace(0,nx-1,nx)))
value.append(list(data))
ang.append(list(x))
return np.concatenate(bin1), np.concatenate(bin2), np.concatenate(angbin), np.concatenate(value), np.concatenate(ang)
def update_kernel_names(self, kernel1, kernel2):
self.header_kernel_names={}
if kernel1 is None:
kernel1 = "nz_shape"
if kernel2 is None:
kernel2 = "nz_pos"
k={"e":kernel1,"n":kernel2}
for obs_type in self.datavectors:
c1,c2 = corr[obs_type][0]
self.header_kernel_names[obs_type] = (k[c1],k[c2])
class generator:
@staticmethod
def nz(z,bins, filename):
zedges = (z[1:]+z[:-1])/2.
dz = zedges[1]-zedges[0]
zedges = np.append(z[0] - dz/2,zedges)
zedges = np.append(zedges, z[-1] + dz/2)
edges_lower = zedges[:-1]
edges_upper = zedges[1:]
out = np.array(edges_lower)
out=np.vstack((out, edges_upper))
for bin in bins:
out = np.vstack((out, bin))
np.savetxt(filename, out.T)
def compare_cls(ell, cl1, cl2):
import pylab as plt
import numpy as np
plt.subplot(211)
plt.plot(ell, ell*ell*cl1)
plt.plot(ell, ell*ell*cl2)
plt.xlim(0.1,2500.)
plt.xscale('log') ; plt.yscale('log')
plt.subplot(212)
diff = 1. - cl1/cl2
i = np.argwhere(abs(diff) >= 0.01 )[0]
ell0 = ell[i]
print "1 percent deviation at ell=%e"%ell0
i = np.argwhere(abs(diff) >= 0.05 )[0]
ell1 = ell[i]
print "5 percent deviation at ell=%e"%ell1
i = np.argwhere(abs(diff) >= 0.15 )[0]
ell2 = ell[i]
print "15 percent deviation at ell=%e"%ell2
i = np.argwhere(abs(diff) >= 0.25)[0]
ell3 = ell[i]
print "25 percent deviation at ell=%e"%ell3
plt.plot(ell, diff)
plt.axvline(ell0,linestyle='--', color='k')
plt.axvline(ell1,linestyle='--', color='k')
plt.axvline(ell2,linestyle='--', color='k')
plt.axvline(ell3,linestyle='--', color='k')
plt.axhline(0., color='k')
plt.xlim(0.1,2500) ; plt.ylim(-0.1,0.1)
plt.xscale('log')
plt.subplot(211)
plt.axvline(ell0,linestyle='--', color='k')
plt.axvline(ell1,linestyle='--', color='k')
plt.axvline(ell2,linestyle='--', color='k')
plt.axvline(ell3,linestyle='--', color='k')
plt.show()
class clres:
def __init__(self, path=""):
import glob, os
ggl = glob.glob(os.path.join(path+"galaxy_position_shape_cross_cl_unbinned/bin*.txt"))
shear = glob.glob(os.path.join(path+"galaxy_shape_cl_unbinned/bin*.txt"))
pos = glob.glob(os.path.join(path+"galaxy_position_cl_unbinned/bin*.txt"))
self.ell = np.loadtxt(os.path.join(path+"galaxy_position_shape_cross_cl_unbinned/ell.txt"))
self.ggl={}
self.shear={}
self.pos={}
for p in ggl:
bin_name = os.path.basename(p).replace(".txt", "")
self.ggl[bin_name] = np.loadtxt(p)
print "Loaded GGL C(l) %s"%bin_name
for p in pos:
bin_name = os.path.basename(p).replace(".txt", "")
self.pos[bin_name] = np.loadtxt(p)
print "Loaded position-position C(l) %s"%bin_name
for p in shear:
bin_name = os.path.basename(p).replace(".txt", "")
self.shear[bin_name] = np.loadtxt(p)
print "Loaded shear-shear C(l) %s"%bin_name
def compare_cls(self, cl2, bin1, bin2, cltype="shear"):
import pylab as plt
import numpy as np
if (self.ell!=cl2.ell).all():
print "ERROR: ell sampling does not match."
sp1 = getattr(self, cltype)
sp2 = getattr(cl2, cltype)
bin = "bin_%d_%d"%(bin1,bin2)
diff = 1. - sp1[bin]/sp2[bin]
i = np.argwhere(abs(diff) >= 0.01 )[0]
ell0 = self.ell[i]
print "1 percent deviation at ell=%f"%ell0
i = np.argwhere(abs(diff) >= 0.05 )[0]
ell1 = self.ell[i]
print "5 percent deviation at ell=%f"%ell1
i = np.argwhere(abs(diff) >= 0.10 )[0]
ell2 = self.ell[i]
print "10 percent deviation at ell=%f"%ell2
i = np.argwhere(abs(diff) >= 0.20)[0]
ell3 = self.ell[i]
print "20 percent deviation at ell=%f"%ell3
|
from django.db import models
from django.contrib.auth.models import BaseUserManager,AbstractBaseUser
# Create your models here.
class UserManager(BaseUserManager):
def create_user(self, email, password=None):
if not email:
raise ValueError('User must have email address')
user=self.model(
email=self.normalize_email(email),
)
user.set_password(password)
user.save(using=self.db)
return user
def create_staffuser(self, email, password):
user=self.create_user(
email,
password=password,
)
user.staff=True
user.admin= True
user.save(using=self.db)
def create_superuser(self, email, password):
user=self.create_user(
email,
password=password,
)
user.staff=True
user.admin= True
user.save(using=self.db)
class UserModel(AbstractBaseUser):
email= models.EmailField(
max_length=255,
verbose_name="Email address",
unique=True,
)
active= models.BooleanField(default=True)
staff= models.BooleanField(default=False)
admin= models.BooleanField(default=False)
USERNAME_FIELD="email"
REQUIRED_FIELDS=[]
objects= UserManager()
def get_full_name(self):
return self.email
def get_short_name(self):
return self.email
def has_perm(self, perm,obj=None):
return True
def has_module_perms(self, app_label):
return True
@property
def is_staff(self):
return self.staff
@property
def is_admin(self):
return self.admin
@property
def is_active(self):
return self.active |
inp = 3017957
test = 5
print("Test: " + str(int('0b' + bin(test)[3:] + '1', 2))) # Correct!
print("Part 1: " + str(int('0b' + bin(inp)[3:] + '1', 2))) # Correct!
i = 1
while i * 3 < inp:
i *= 3
print("Part 2: " + str(inp - i)) |
## Importation des modules
import smtplib
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
from email.MIMEBase import MIMEBase
from email import encoders
Fromadd = "matthieu.devalle@telecomnancy.net"
Toadd = "matthieu.devalle@telecomnancy.net" ## Spรฉcification des destinataires
message = MIMEMultipart() ## Crรฉation de l'objet "message"
message['From'] = Fromadd ## Spรฉcification de l'expรฉditeur
message['To'] = Toadd ## Attache du destinataire ร l'objet "message"
message['Subject'] = "SUJET DE VOTRE MAIL" ## Spรฉcification de l'objet de votre mail
msg = "VOTRE MESSAGE" ## Message ร envoyer
messageattach(MIMEText(msg.encode('utf-8'), 'plain', 'utf-8')) ## Attache du message ร l'objet "message", et encodage en UTF-8
nom_fichier = "db.sqlite3" ## Spรฉcification du nom de la piรจce jointe
piece = open("", "rb") ## Ouverture du fichier
part = MIMEBase('application', 'octet-stream') ## Encodage de la piรจce jointe en Base64
part.set_payload((piece).read())
encoders.encode_base64(part)
part.add_header('Content-Disposition', "piece; filename= %s" % nom_fichier)
msg.attach(part) ## Attache de la piรจce jointe ร l'objet "message"
serveur = smtplib.SMTP('smtp.gmail.com', 587) ## Connexion au serveur sortant (en prรฉcisant son nom et son port)
serveur.starttls() ## Spรฉcification de la sรฉcurisation
serveur.login(Fromadd, "") ## Authentification
texte= message.as_string().encode('utf-8') ## Conversion de l'objet "message" en chaine de caractรจre et encodage en UTF-8
serveur.sendmail(Fromadd, Toadd, texte) ## Envoi du mail
serveur.quit() ## Dรฉconnexion du serveur
|
#writing to files
outfile= open('write.txt','w')
outfile.write('testing write ... ')
print ("testing123")
outfile.close() |
#!/usr/bin/env python3
#
# Development Order #3:
#
# This file will determine if this tool can run a test based on a test spec.
#
# Be sure to edit line 19, inserting the names of the tests the tool
# should be compatible with.
#
# exit statuses should be different based on error
import pscheduler
json = pscheduler.json_load(exit_on_error=True);
try:
if json['type'] != 'latency':
pscheduler.succeed_json({
"can-run": False,
"reasons": [ "Unsupported test type" ]
})
except KeyError:
pscheduler.succeed_json({
"can-run": False,
"reasons": [ "Missing test type" ]
})
spec = json['spec']
SUPPORTED_OPTIONS = ["schema",
"source", "source-node", "dest", "dest-node",
"packet-count", "packet-interval",
"packet-timeout", "packet-padding",
"ip-tos", "ip-version",
"bucket-width"
]
errors = []
for option in spec:
if option not in SUPPORTED_OPTIONS:
errors.append(f"halfping does not support {option}")
result = {"can-run": len(errors) == 0}
if len(errors) > 0:
result["reasons"] = errors
pscheduler.succeed_json(result)
|
{
'name': "Database restore",
'summary': 'Backups local restore',
'description': "Restore backups from local files",
'author': "Artem Shelest",
'website': "http://www.lumirang.com",
'category': 'Administration',
'version': '13.0.1.0',
'installable': True,
'depends': ['base'],
'data': [],
}
|
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from starlette import applications
from starlette.responses import PlainTextResponse
from starlette.routing import Route
from starlette.testclient import TestClient
import opentelemetry.instrumentation.starlette as otel_starlette
from opentelemetry.test.test_base import TestBase
class TestStarletteManualInstrumentation(TestBase):
def _create_app(self):
app = self._create_starlette_app()
self._instrumentor.instrument_app(app)
return app
def setUp(self):
super().setUp()
self._instrumentor = otel_starlette.StarletteInstrumentor()
self._app = self._create_app()
self._client = TestClient(self._app)
def test_basic_starlette_call(self):
self._client.get("/foobar")
spans = self.memory_exporter.get_finished_spans()
self.assertEqual(len(spans), 3)
for span in spans:
self.assertIn("/foobar", span.name)
def test_starlette_route_attribute_added(self):
"""Ensure that starlette routes are used as the span name."""
self._client.get("/user/123")
spans = self.memory_exporter.get_finished_spans()
self.assertEqual(len(spans), 3)
for span in spans:
self.assertIn("/user/{username}", span.name)
self.assertEqual(
spans[-1].attributes["http.route"], "/user/{username}"
)
# ensure that at least one attribute that is populated by
# the asgi instrumentation is successfully feeding though.
self.assertEqual(spans[-1].attributes["http.flavor"], "1.1")
@staticmethod
def _create_starlette_app():
def home(_):
return PlainTextResponse("hi")
app = applications.Starlette(
routes=[Route("/foobar", home), Route("/user/{username}", home)]
)
return app
class TestAutoInstrumentation(TestStarletteManualInstrumentation):
"""Test the auto-instrumented variant
Extending the manual instrumentation as most test cases apply
to both.
"""
def _create_app(self):
# instrumentation is handled by the instrument call
self._instrumentor.instrument()
return self._create_starlette_app()
def tearDown(self):
self._instrumentor.uninstrument()
super().tearDown()
class TestAutoInstrumentationLogic(unittest.TestCase):
def test_instrumentation(self):
"""Verify that instrumentation methods are instrumenting and
removing as expected.
"""
instrumentor = otel_starlette.StarletteInstrumentor()
original = applications.Starlette
instrumentor.instrument()
try:
instrumented = applications.Starlette
self.assertIsNot(original, instrumented)
finally:
instrumentor.uninstrument()
should_be_original = applications.Starlette
self.assertIs(original, should_be_original)
|
from autumn.settings import Region
CALIBRATION_START = 100
CALIBRATION_END = 244
# Please keep this code in place
OPTI_REGIONS = [
Region.BELGIUM,
Region.FRANCE,
Region.ITALY,
Region.SPAIN,
Region.SWEDEN,
Region.UNITED_KINGDOM,
]
OPTI_ISO3S = [
"BEL",
"GBR",
"ITA",
"SWE",
"FRA",
"ESP",
]
COUNTRY_TITLES = {
Region.BELGIUM: "Belgium",
Region.FRANCE: "France",
Region.ITALY: "Italy",
Region.SPAIN: "Spain",
Region.SWEDEN: "Sweden",
Region.UNITED_KINGDOM: "United Kingdom",
}
# Definitions of the three phases
PHASE_2_START_TIME = 275 # start on 1 October
DURATION_PHASES_2_AND_3 = 365 + 90
PHASE_2_DURATION = {
"six_months": 183,
"twelve_months": 365,
}
# Mixing factor bounds
MIXING_FACTOR_BOUNDS = [0.1, 1.0]
# Microdistancing
MICRODISTANCING_OPTI_PARAMS = {
# apply mild microdistancing
"behaviour": {
"parameters": {
"start_asymptote": 0.10,
"end_asymptote": 0.10,
}
},
# remove the behaviour adjuster's effect
"behaviour_adjuster": {"parameters": {"start_asymptote": 1.0, "end_asymptote": 1.0}},
}
|
import utils
initial_facts = []
queries = []
def unpack_facts_to_list(facts):
"""
:param facts: string to decompose into a list of chars
:return: list of chars
Unpacking a string into a list of chars, used for initial facts
and queries
"""
lst_facts = []
for c in facts:
if c == '#':
break
elif c.isspace() or c == "=" or c == "?":
continue
lst_facts.append(c)
return lst_facts
def update_initial_fact(line):
pass
def check_initial_facts_cond(fact, initial):
"""
:param fact: string
:return: Bool wheter the string fact is inside the global initial or not
returns True if the fact name is inside initial list or False if it isn't
"""
return True if fact in initial else False
def add_coord_to_class(instance, new_coord):
"""
:param instance (class): class instance we want the coordinates to be updated
:param new_coord (int, int): the coordinates to add the instance coordinates list
-> new_coord correspond to the x (inside line related) and y(wich line) coordinates
of the new element to add to the class
:return: update coordinates list
Update Fact and Comment classes coordinates
"""
instance.coord.append(new_coord)
return instance.coord
class Parsing:
def __init__(self, file_path):
"""
:param file_path(string) : Path to the input file
raw_content(list of class instances) : all the raw file content
queries(list of chars) : all the queries inside the file
comments(list of Comment() instances) : all the comments encountered inside the file
equations(list of Equation() instances) : all the equation encountered inside the file
"""
self.raw_content = []
self.queries = []
self.comments = []
self.equations = []
self.fact_names = []
self.read_input_file(file_path)
def parsing_loop(self):
"""
Main parsing loop
"""
global initial_facts
global queries
for (y, line) in enumerate(self.raw_content):
line_type = utils.check_line_type(line)
if line_type == "Initial Facts":
initial_facts = unpack_facts_to_list(line)
elif line_type == "Query":
queries = unpack_facts_to_list(line)
elif line_type == "Equation":
self.handle_equation(line, y)
elif line_type == "Comment":
self.handle_comment(line, y)
def handle_comment(self, line, y):
self.comments.append(Comment((0, y), line, 0))
def handle_equation(self, line, y):
"""
:param line(string) : line content
:param y(int) : line number, corresponds to a y position
This method deals with equation by splitting the line in left and right parts, reversing it if needed
and storing the facts name + dealing with left and right parts separately by calling
Equation.parse_equation_side(side(left or rigt), lift_of_facts_names, number_of_line)
"""
eq = Equation()
split_line = line.replace(" ", "").split('#')
if "<=>" in split_line[0]:
eq.operator = "<=>"
right, left = split_line[0].split('<=>')
else:
left, right = split_line[0].split('=>')
self.fact_names, eq.left = eq.parse_equation_side(left, self.fact_names, y, "left")
self.fact_names, eq.right = eq.parse_equation_side(right, self.fact_names, y, "right")
# --------------------------------#
# Tmp Part, will be removed later #
print("LEFT : ")
for elem in eq.left: print(elem)
print("\n\nRIGHT :")
for elem in eq.right: print(elem)
# --------------------------------#
self.equations.append(eq)
def read_input_file(self, file_path):
"""
:param file_path(string) : path to the file that we are going to read information from
Reading input file to store every line into a list
"""
self.raw_content = []
with open(file_path, "r+") as fd:
for line in fd:
# Skipping empty lines
if len(line) > 1:
self.raw_content.append(line)
# Don't forget to remove spaces from the lines
# -> We either do it here or inside every handle_type_of_equation method
class Comment:
def __init__(self, coord, line, start_pos):
"""
:param coord(tuple(x, y)) : x, y position of the comment
:param line(string) : full line content
:param start_pos(int) : x position where we start getting the comment content
"""
self.coord = []
self.coord = add_coord_to_class(self, coord)
self.content = line[start_pos:]
class Query:
def __init__(self, compact_queries):
"""
:param compact_queries(string) : queries glued to each other inside a string
queries (list of chars) : list of all the queries
"""
self.queries = []
self.queries = unpack_facts_to_list(compact_queries)
class Equation:
def __init__(self):
"""
neg_bool (tuple(bool, bool)) : if the left/right part of the equation has a negation operator '!'
operator (char) : operator on the right side of the fact, related to the next fact
left (list of class instances) : left side of the equation, list of facts
right (list of class instances) : right side of the equation, list of facts
"""
self.neg_bool = (False, False)
self.operator = ''
self.left = []
self.right = []
def parse_equation_side(self, side, fact_names, y, str_side):
"""
:param side(string): string that cointains a side, left or right
:param fact_names(list): list of fact names already encountered
:param y(int): line number
:return: fact_names(list) updated, new_side(list) which corresponds to left or right side as
a list of Fact instances
"""
# Error parsing needs to be done
prev = None
tmp_negation = False
new_side = []
for (x, elem) in enumerate(side):
if elem.isalpha():
new_side.append(Fact(elem, (x, y)))
new_side[-1].previous = prev
if utils.check_elem_not_in_fact_names(elem, fact_names):
fact_names.append(Fact(elem, (x, y)))
else:
utils.find_fact_and_append_coord(elem, fact_names, (x, y))
prev = elem
if tmp_negation:
new_side[-1].negation = True
tmp_negation = False
else:
if elem == '!' and len(new_side) == 0:
tmp_negation = True
continue
new_side[-1].operator = elem
print(fact_names)
return fact_names, new_side
class Fact:
def __init__(self, c, coord):
"""
cond (bool) : If the fact is true or not regarding to the initial_facts
operator (char) : Operator (after the fact) associated with a fact
name (char) : name of the fact -> ex: A
relative_coord (tuple(x, y)) : x, y coordinates -> x = inside line pos and y = line number
previous (class instance) : left connected element to the current fact
"""
global initial_facts
self.cond = check_initial_facts_cond(c, initial_facts)
self.operator = None
self.negation = False
self.name = c
self.coord = []
self.sides = []
self.coord = add_coord_to_class(self, coord)
self.previous = [None]
def __repr__(self):
return "name : {} .. operator : {} .. negation : {} .. coord : {} .. previous : {}"\
.format(self.name, self.operator, self.negation, self.coord, self.previous)
|
import sys,pickle
from itertools import *
aspect = sys.argv[1]
f_actual = open('../data/input/test/'+aspect,'r')
predicted_labels = pickle.load(open('../data/predicted_labels.pkl','r'))
numerator = 0
denominator_system = 0
denominator_gold = 0
for lineno,line in enumerate(f_actual):
actual_line = int(line.split('\t')[1].strip())
predicted_line = predicted_labels[aspect][lineno]
if predicted_line == 1:
if actual_line == 1:
numerator +=1
denominator_system+=1
if actual_line == 1:
denominator_gold+=1
print denominator_system,denominator_gold
precision = numerator/float(denominator_system)
recall = numerator/float(denominator_gold)
f_measure = (2*precision*recall)/(precision+recall)
print precision, recall, f_measure |
# -*- coding: utf8 -*-
import pandas as pd
import numpy as np
symPath='./prepareTrainSets/Indication.csv'
# disPath='datasets/prepareTrainSets/diseaseMatch.csv'
disDictName = "prepareTrainSets/disease_new2.dic"
symDictName = "prepareTrainSets/symptom_new2.dic"
bodyDictName = "prepareTrainSets/bodyไธญๆ่บซไฝ้จไฝๅ็งฐ.dic"
def loadDict(dicName, inType):
bodyDict = dict()
for item in open(dicName,encoding='utf8'):
bodyDict[item.strip().replace('\n','')] = inType
return bodyDict
###### ๆๅปบ่ฏๅ
ธ
#็พ็
่ฏๅ
ธๅ
ๆฌไบ่็ฝ็ฌๅ็็พ็
ๅ็งฐใ็พ็
ๅซๅใICD10็พ็
ๅ็งฐ๏ผๅป้ๅๅ
ฑ39615ๆกๆฐๆฎ
disDict = loadDict(disDictName, 'DISEASE')
#็็ถไธบไบ่็ฝ็ฌๅ็็็ถๆ่ฟฐ๏ผๅป้ๅๅ
ฑ7457ๆกๆฐๆฎ๏ผ
symDict = loadDict(symDictName, 'SYMPTOM')
#่บซไฝ้จไฝไธบไบ่็ฝ็ฌๅ็่บซไฝ้จไฝๆ่ฟฐ๏ผๅป้ๅๅ
ฑ1929ๆกๆฐๆฎใ
bodyDict = loadDict(bodyDictName, 'BODY')
# ๅ ่ฝฝๅพ
ๅค็็ๆๆฌ,ๅฏน่ง่ๅ็ๅฅๅญ๏ผไฝฟ็จ่ฏๅ
ธไธญ็ๆฏไธช่ฏ่ฟ่กๅ
จๅน้
๏ผ่ฎฐๅฝๅน้
็่ฏใ่ฏ็่ตทๅงindexใ่ฏ็็ปๆindexๅๅฎไฝ็ฑปๅใ
'''row ไธบ่ง่ๅ็ๅฅๅญ
name ไปๅญๅ
ธไธญๆฅ่ฏขๅฅๅญไธญๅญๅจ็็พ็
ๅ็งฐ
typename ไธบๅน้
็ๅญๅ
ธๅ็งฐ'''
def row2ner(result, row, name,typeName):
p = row.find(name,0)
while(p!=-1):
result.append(name+' '+str(p)+' '+str(p+len(name))+' '+typeName)
p = row.find(name, p+1)
# print(result)
'''
ๅฐๆฃๆตๅบ็ๅฎไฝ่ฝฌๅๆBIOๆ ผๅผ
'''
def ner2lable(bio, des,inResult, btype ,itype):
for i in range(len(inResult)):
inStr = inResult[i]
#inArr = inStr.split(" ")
s = int(inStr[1])
e = int(inStr[2])
bio[s] = btype+"-"+inStr[3]
for j in range(s+1, e):
bio[j] = itype+'-'+inStr[3]
#ๆๅ็็ถ
columnName = "้ๅบ็"
#ไฟๅญ็trainๆฐๆฎ
trainPath = "prepareTrainSets/ner_train_data.txt"
# loadDiseaseDatasets(symPath, columnName, trainPath)
# loadDiseaseDatasets(disPath, columnName, trainPathDis)
df_dis = pd.read_csv(symPath)
df_dis = df_dis.dropna()
desList = df_dis[columnName].tolist()#็็ถ
f = open(trainPath, "w", encoding='utf8')
print(len(desList))
for i in range(len(desList)):
print('reading the '+ str(i) +' data from deslist')
des = desList[i] #่ฏปๅ็็ถ็ๅ
ๅฎน
# for des in desList[0:100]:
result = []
# print("des:", len(des),des)
if not des:
continue
# desๆ ผๅผๅ๏ผ bioๅๅงๅไธบO
des = des.replace(' ', '').replace('\t', '').replace('\n', '').replace('ใ', '').strip() #ๅป้คๆข่ก็ฌฆ
# print(des)
bio = ['O' for i in range(len(des))]
# ๆฃ็ดขๆๆ็็พ็
๏ผ่ฎฐๅฝ่ตทๅงไฝ็ฝฎ
typeName = 'DISEASE'
for dis in disDict:
row2ner(result, des, dis, typeName)
# print(result)
# ner2lable(bio, des, result, 'B-DIS','I-DIS')
# ๆฃ็ดขๆๆ็็็ถ๏ผ่ฎฐๅฝ่ตทๅงไฝ็ฝฎ
result1 = []
typeName = 'SYMPTOM'
for sym in symDict:
row2ner(result1, des, sym, typeName)
# print(result1)
# ner2lable(bio, des, result1, 'B-SYM','I-SYM')
# ๆฃ็ดขๆๆ็่บซไฝ้จไฝ๏ผ่ฎฐๅฝ่ตทๅงไฝ็ฝฎ
result2 = []
typeName = 'BODY'
for body in bodyDict:
row2ner(result2, des, body, typeName)
# print(result2)
# ner2lable(bio, des, result2, 'B-BODY','I-BODY')
# print(len(bio),bio)
result4 = result + result1 + result2
# print("result4=",len(result4),result4)
# ๅญ็ฌฆไธฒ่ฝฌไบ็ปดๆฐ็ป
result5 = [[0 for i in range(5)] for j in range(len(result4))]
for i in range(len(result4)):
resArr = result4[i].split(' ')
result5[i][0] = resArr[0]
result5[i][1] = int(resArr[1])
result5[i][2] = int(resArr[2])
result5[i][3] = resArr[3]
result5[i][4] = len(resArr[0])
# idex=np.lexsort([result4[:,1]])
# sorted_data = index[idex, :]
# ๆ็
ง่ตทๅงไฝ็ฝฎๅๅฎไฝ้ฟๅบฆๆๅบ
result5.sort(key=lambda x: (x[1], x[4]))
# print("5=",result5)
# ้ๆฉๅฎไฝ่ฏๆ้ฟ็่ฟ่กๆๅคงๅน้
result6 = [[0 for i in range(5)] for j in range(len(result5))]
maxIndexNum = 0
maxIndexAll = 0
i = 0
# print("len 5 =", len(result5))
# ่ฟญไปฃๆฃ็ดขๅฎไฝ่ฏ๏ผๅฆๆๅ้ข็ๅฎไฝ่ฏๅๅฝๅๅฎไฝ่ฏ่ตทๅง็ดขๅผไธ่ด๏ผๅๆพๆ้ฟ็ๅฎไฝ๏ผไฝไธบๅฝๅ็ดขๅผ็ๅฎไฝ๏ผไธไธไธช่ฏ็่ตทๅง็ดขๅผ่ฆๅคงไบๆ้ฟๅฎไฝ็็ปๆ็ดขๅผ
while i < (len(result5) - 1):
# print("i=",i, result5[i])
indexNew = result5[i][1]
# ๅฝๅๅฎไฝ็ดขๅผๅฐไบไธไธๅฎไฝ็็ปๆ็ดขๅผ๏ผ็ดๆฅ็ฅ่ฟ๏ผๅคๆญไธไธๅฎไฝ
if indexNew < maxIndexAll:
i = i + 1
continue
maxIndex = i
# ่ฎญ็ป้ๅๅ้ข็ๅฎไฝ๏ผๆพๅฐๅ็ดขๅผ็ๆ้ฟๅฎไฝ๏ผ่ฎฐๅฝๅฎไฝ็ปๆ็ดขๅผๅไธไธไธชๅฎไฝ็ๅบๅท
for j in range(i + 1, len(result5)):
# print("j=",j,result5[j])
if result5[j][1] == indexNew:
maxIndex = j
i = maxIndex + 1
else:
maxIndexAll = result5[maxIndex][2]
i = maxIndex + 1
# print("up i=", i, maxIndex,maxIndexNum)
break
# print("maxindex=",maxIndex, result5[maxIndex])
result6[maxIndexNum] = result5[maxIndex]
maxIndexNum += 1
result6 = result6[0:maxIndexNum]
# print("===============6:===========",result6)
ner2lable(bio, des, result6, 'B', 'I')
for nerIndex in range(len(bio)):
f.write(des[nerIndex] + " " + bio[nerIndex] + "\n")
f.write("\n")
f.flush()
f.close()
print('done') |
from database import *
from datetime import *
import types
from copy import deepcopy
config = {
'user': 'root',
'password': '123456',
'host': '127.0.0.1',
'charset': 'utf8',
'db' : 'mtdb',
}
database = database.current(config)
def checkDataType(obj):
attr_s = "%s,"
attr_d = "%d,"
attr_f = "%f,"
if isinstance(obj, int):
return attr_d
elif isinstance(obj, str):
return attr_s
elif isinstance(obj, float):
return attr_f
elif isinstance(obj, datetime.date):
return attr_s
else:
return None
def generateValuesReplaceStr(values):
result = ""
for item in values:
#print type(item)
result += checkDataType(item)
result = result[:-1]
return result
def generateValue(item):
i = 0
values = deepcopy(item.values())
for value in values:
if value == None:
values[i] = 'NULL'
i += 1
ret = str(tuple(values))
ret = ret.replace("'NULL'", 'NULL')
return ret
def generateValues(data):
result = ""
for item in data:
result += generateValue(item) + ","
return result[:-1]
class dbHelper():
@staticmethod
def __init__(config):
database.current(config)
@staticmethod
def setAutoCommit(autoCommit = False):
database.setAutoCommit(autoCommit)
@staticmethod
def insert(table=None, data=None):
"single table insert, data like [{'a':1, 'b':'abc'}] or {'a':1, 'b':'abc'}"
if table == None or data == None:
return None
if isinstance(data, dict):
#if str(type(data)) == "<type 'dict'>":
item0 = data
lenKeys = len(item0.keys())
keys = ",".join(item0.keys())
ss = lenKeys*"%s,"
ss = ss[:-1]
statement = "INSERT INTO " + table + " (" + keys + ") VALUES (" + ss + ")"
dd = tuple(item0.values())
ret = database.execute(statement, dd)
return ret
else:
item0 = data[0]
lenKeys = len(item0.keys())
keys = ",".join(item0.keys())
ss = lenKeys*"%s,"
ss = ss[:-1]
statement = "INSERT INTO " + table + " (" + keys + ") VALUES (" + ss + ")"
dd = []
for item in data:
dd.append(tuple(item.values()))
ret = database.executeMany(statement, dd)
return ret
@staticmethod
def update(table=None, data={}, condition=None):
"single table update, data like {'a':1, 'b':'abc'}, condition like 'where id = 1"
if table == None or not data or condition == None:
return None
st = ''
for key in data.keys():
st += (key + '=' + '%s' + ',')
st = st[:-1]
statement = "UPDATE " + table + " SET " + st + " " + condition
ret = database.execute(statement, data.values())
return ret
@staticmethod
def delete(table=None, condition=None):
"single table delete"
if table == None:
return None
statement = ''
if condition == None:
statement = "DELETE FROM "+ table
else:
statement = "DELETE FROM "+ table + " " + condition
ret = database.execute(statement)
return ret
@staticmethod
def select(table=None, elements=None, condition=None):
"single table select, elements like ['a', 'b'] and None is '*', condition like 'where id = 1"
if table == None:
return None
em = '*'
if elements:
em = ", ".join(elements)
statement = "SELECT " + em + " FROM " + table
if condition:
statement += " " + condition
ret = database.query(statement)
return ret
@staticmethod
def execute(query, params=None):
return database.execute(query, params)
@staticmethod
def executeMany(query, params=None):
return database.executeMany(query, params)
@staticmethod
def query(query, params=None):
return database.execute(query, params)
@staticmethod
def rollback():
database.rollback()
@staticmethod
def commit():
database.commit()
dbHelper(config)
#dbHelper = dbHelper(config)
#data = [
# {'first_name':'Jane', 'hire_date':str(date(2005, 2, 12)), 'sex':0},
# {'first_name':'Joe', 'hire_date':str(date(2006, 5, 23)), 'sex':0},
# {'first_name':'John', 'hire_date':str(date(2010, 10, 3)), 'sex':1},
#]
#data = {'first_name':'jacksonpan', 'hire_date':date(2013, 2, 12), 'sex':1}
#where = 'where id = 2'
#elements = ['id', 'first_name']
#where = 'where id = 31'
#dbHelper(autoCommit=True)
#dbHelper.insert('employees', data)
#dbHelper.commit()
#dbHelper.update('employees', data, where)
#dbHelper.delete('employees', None)
#print dbHelper.select('employees', elements, where)
#print dbHelper.select('employees', None, None)
#data = [
# ('Jane', date(2005, 2, 12), 1),
# ('Joe', date(2006, 5, 23), 1),
# ('John', date(2010, 10, 3), 0),
#]
#stmt = "INSERT INTO employees (first_name, hire_date, sex) VALUES (%s, %s, %s)"
#dbHelper.executeMany(stmt, data)
#insert = "INSERT INTO employees (first_name, hire_date, sex) VALUES (%s, %s, %s)"
#data = ('Jane', date(2005, 2, 12), 1)
#dbHelper.execute(insert, data)
|
from flask import jsonify, session, g
from functools import wraps
from datetime import datetime
from app import redisClient
def check_permission(func):
@wraps(func)
def wrapper(*args, **kwargs):
session_id = session.get('session_id')
if not session_id:
return jsonify(error=False, auth=False, reason="No session_id found in cookies")
user_data = get_user_data(session_id)
if user_data is None:
return jsonify(error=False, auth=False, reason="session_id not found in Redis")
if user_data['exited'] == 'true':
return jsonify(error=False, auth=False, reason="User had previously clicked on logout")
user_data['session_id'] = session_id
renew_poll(session_id)
g.user = user_data
return func(*args, **kwargs)
return wrapper
def get_user_data(session_id):
pipeline = redisClient.pipeline()
pipeline.hget("sessionManager:active:{}".format(session_id), "back")
pipeline.hget("sessionManager:active:{}".format(session_id), "last_poll")
pipeline.hget("sessionManager:active:{}".format(session_id), "max_date")
pipeline.hget("sessionManager:active:{}".format(session_id), "username")
pipeline.hget("sessionManager:active:{}".format(session_id), "exited")
back, last_poll, max_date, username, exited = pipeline.execute()
if last_poll is None:
return None
return {
'back': back,
'last_poll': last_poll,
'max_date': max_date,
'username': username,
'exited': exited
}
def get_last_poll():
last_poll = datetime.now()
last_poll_int = last_poll.strftime('%s') + str(last_poll.microsecond / 1e6)[1:]
return last_poll_int
def renew_poll(session_id):
last_poll_int = get_last_poll()
pipeline = redisClient.pipeline()
pipeline.hget("sessionManager:active:{}".format(session_id), "max_date")
pipeline.hset("sessionManager:active:{}".format(session_id), "last_poll", last_poll_int)
max_date, _ = pipeline.execute()
if max_date is None:
pipeline.delete("sessionManager:active:{}".format(session_id))
def force_exited(session_id):
pipeline = redisClient.pipeline()
pipeline.hget("sessionManager:active:{}".format(session_id), "max_date")
pipeline.hset("sessionManager:active:{}".format(session_id), "exited", "true")
max_date, _ = pipeline.execute()
if max_date is None:
pipeline.delete("sessionManager:active:{}".format(session_id))
def get_time_left(session_id):
current_time = datetime.now()
current_time = float(current_time.strftime('%s') + str(current_time.microsecond / 1e6)[1:])
max_date = redisClient.hget("sessionManager:active:{}".format(session_id), "max_date")
if max_date is None:
return 0
return float(max_date) - current_time
|
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from dtest import util as dtutil
import novaclient
import base
class IpGroupTest(base.BaseIntegrationTest):
"""Test that the IP groups API works as expected."""
def test_create(self):
"""Test that we can create an IP group."""
# Start by creating a random IP group name
name = self.randName()
# Try to create the group--fails for now (operation not
# implemented in nova)
dtutil.assert_raises(novaclient.OpenStackException,
self.os.ipgroups.create, name)
def test_delete(self):
"""Test that we can delete an IP group."""
# In the actual test, we'll want to create a group for
# delete() to act on here
# Try to delete the group--fails for now (operation not
# implemented in nova)
dtutil.assert_raises(novaclient.OpenStackException,
self.os.ipgroups.delete, 1) # change 1 to group
def test_get(self):
"""Test that we can get the details of an IP group."""
# In the actual test, we'll want to create a group for get()
# to act on here
# Try to get the group--fails for now (operation not
# implemented in nova)
dtutil.assert_raises(novaclient.OpenStackException,
self.os.ipgroups.get, 1) # change 1 to group
def test_list(self):
"""Test that we can list the details of an IP group."""
# In the actual test, we'll want to create a group or two for
# list() to act on here
# Try to list the groups--fails for now (operation not
# implemented in nova)
dtutil.assert_raises(novaclient.OpenStackException,
self.os.ipgroups.list)
|
# Copyright 2016 Jochen Kursawe. See the LICENSE file at the top-level directory
# of this distribution and at https://github.com/kursawe/MCSTracker/blob/master/LICENSE.
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import sys
from os import path
from os.path import dirname
def make_all_plots():
first_mesh = mesh.load(path.join(dirname(__file__),
'..','..','..',
'test', 'test_tracking',
'output','multiple_mcs_before.mesh'))
second_mesh = mesh.load(path.join(dirname(__file__),
'..','..','..',
'test', 'test_tracking',
'output','multiple_mcs_before.mesh'))
mesh_centre = second_mesh.calculate_centre()
# pick the node closest to the centre
min_distance = 3*second_mesh.calculate_height()
for node in second_mesh.nodes:
distance = np.linalg.norm(node.position - mesh_centre)
if distance < min_distance:
min_distance = distance
most_central_node = node
# pick a node that shares an edge with this central node
for local_index, element_node in enumerate(most_central_node.adjacent_elements[0].nodes):
if element_node.id == most_central_node.id:
num_nodes_this_element = most_central_node.adjacent_elements[0].get_num_nodes()
one_edge_node = most_central_node.adjacent_elements[0].nodes[(local_index+1)%num_nodes_this_element]
break
second_mesh.perform_t1_swap( most_central_node.id, one_edge_node.id )
first_mesh.plot('first_mesh.pdf')
second_mesh.plot('second_mesh.pdf')
subgraph_finder = tracking.KrissinelMaximumCommonSubgraphFinder( first_mesh,
second_mesh,
0.4 )
subgraph_finder.find_maximum_common_subgraph()
for counter, largest_mapping in enumerate(subgraph_finder.largest_mappings):
tracked_ids = []
for element_one in first_mesh.elements:
element_one.global_id = None
for element_two in second_mesh.elements:
element_two.global_id = None
for global_id, frame_one_id in enumerate(largest_mapping):
first_mesh.get_element_with_frame_id(frame_one_id).global_id = global_id
second_mesh.get_element_with_frame_id(largest_mapping[frame_one_id]).global_id = global_id
tracked_ids.append(global_id)
first_mesh.index_global_ids()
second_mesh.index_global_ids()
first_mesh.plot('first_mesh_tracked_' + str(counter) + '.pdf', color_by_global_id = True,
total_number_of_global_ids = len( tracked_ids ) )
second_mesh.plot('second_mesh_tracked_' + str(counter) + '.pdf', color_by_global_id = True,
total_number_of_global_ids = len( tracked_ids ) )
first_mesh_polygon_list = []
for element in first_mesh.elements:
this_polygon = mpl.patches.Polygon([node.position for node in element.nodes],
fill = True)
if element.id_in_frame in largest_mapping:
this_polygon.set_facecolor('slategrey')
else:
this_polygon.set_facecolor('white')
first_mesh_polygon_list.append(this_polygon)
first_mesh_polygon_collection = mpl.collections.PatchCollection(first_mesh_polygon_list,
match_original = True)
first_figure = plt.figure()
first_figure.gca().add_collection(first_mesh_polygon_collection)
first_figure.gca().set_aspect('equal')
first_figure.gca().autoscale_view()
plt.axis('off')
first_figure.savefig('maping_nr_' + str(counter) +
'_first_mesh.pdf', bbox_inches = 'tight')
plt.close(first_figure)
second_mesh_polygon_list = []
for element in second_mesh.elements:
this_polygon = mpl.patches.Polygon([node.position for node in element.nodes],
fill = True)
if element.id_in_frame in largest_mapping.values():
this_polygon.set_facecolor('slategrey')
else:
this_polygon.set_facecolor('white')
second_mesh_polygon_list.append(this_polygon)
second_mesh_polygon_collection = mpl.collections.PatchCollection( second_mesh_polygon_list,
match_original = True)
second_figure = plt.figure()
second_figure.gca().add_collection(second_mesh_polygon_collection)
second_figure.gca().set_aspect('equal')
second_figure.gca().autoscale_view()
plt.axis('off')
second_figure.savefig('maping_nr_' + str(counter) +
'_second_mesh.pdf', bbox_inches = 'tight')
plt.close(second_figure)
if __name__ == "__main__":
make_all_plots() |
#encoding: cinje
: from .. import table_args, caption_args
: def toprequestors ctx
<div class='table-responsive'>
<table #{table_args}>
<caption #{caption_args}>Top 10 Requestors</caption>
<tr><th># Requests</th><th>Requestor</th><th>Last Request</th></tr>
: for r in ctx.queries.get_top_requestors()
<tr>
<td>${ctx.format_decimal(r.request_count)}</td>
<td>${r.requestor}</td>
<td>${ctx.time_ago(r.last_request)}</td>
</tr>
: end
</table>
</div>
: end
|
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 5 18:10:25 2019
@author: hasee
"""
import cv2 as cv
img = cv.imread("E:\py/test.jpg")
cv.namedWindow("Image")
cv.imshow("Image",img)
cv.waitKey(0)
cv.destroyAllWindows() |
#!/usr/bin/python3
class Vertex:
def __init__(self, key):
self.id = key
self.connectedTo = {}
def addNeighbour(self, nbr, weight=0):
self.connectedTo[nbr] = weight
def __str__(self):
return str(self.id) + ' connectedTo: ' + str([x.id for x in self.connectedTo])
def getConnections(self):
return self.connectedTo.keys()
def getId(self):
return self.id
def getWeight(self,nbr):
return self.connectedTo[nbr]
class Graph:
def __init__(self):
self.vertList = {}
self.numVertices = 0
def addVertex(self, key):
self.numVertices += 1
newVertex = Vertex(key)
self.vertList[key] = newVertex
return newVertex
def addEdge(self, f, t, weight=0):
if f not in self.vertList:
self.addVertex(f)
if t not in self.vertList:
self.addVertex(t)
self.vertList[f].addNeighbor(self.vertList[t], weight)
def getVertex(self, key):
return next(self.vertList[x] for x in self.vertList.keys() if x == key)
def getVertices(self):
return self.vertList.keys()
g = Graph()
for i in range(6):
g.addVertex(i)
print(g.vertList)
|
from collections import OrderedDict
import numpy as np
# This is pasted from massivepy.postprocess
def group_bins(bindata,n0=3):
"""
Group bins into annuli. Does the obvious thing for outer annuli.
Also groups the center single-fiber bins into annuli of approximately
equal radius, based on having n0 as the number of bins in the "first"
(center) annulus, and having the total number of bins enclosed in the nth
annulus scale like n^2.
E.g. for n0=3 the total number of bins enclosed goes 3, 12, 27, 48 etc,
so the number in each annulus goes 3, 9, 15, 21, etc.
Note that the final annulus in the center region will have slightly more
or fewer bins/fibers than this.
Returns a list of arrays, each containing the bin indexes for one annulus.
NOTE, this requires the center bins to be sorted!!
"""
n_singlebins = np.sum(np.isnan(bindata['rmin']))
if n_singlebins==0:
ii_splits = []
else:
n_centerannuli = int(np.rint(np.sqrt(n_singlebins/float(n0))))
ii_splits = list(np.array(range(1,n_centerannuli))**2 * n0)
ii_splits.append(n_singlebins)
jj_annuli = np.nonzero(np.diff(bindata['rmin'][n_singlebins:]))[0]
ii_splits.extend(n_singlebins+jj_annuli+1)
return np.split(range(len(bindata)),ii_splits)
def get_re_averages(moments, bininfo, re):
# get fancy averages and error bars, a la sigma_e
re = float(re)
bin_groups, bin_groups_en = group_bins(bininfo), []
dt = {'names':['r','sigma','sigmaerr','h3','h3err','h4','h4err',
'h5','h5err','h6','h6err'],'formats':11*['f8']}
endata = np.zeros(len(bin_groups),dtype=dt)
for j, group in enumerate(bin_groups):
bin_groups_en.extend(group)
bd = bininfo[np.array(bin_groups_en)]
bm = moments[np.array(bin_groups_en)]
lum = bd['flux']*bd['nfibers']
sqrtN = np.sqrt(len(lum))
if all(np.isnan(bd['rmax'])): endata['r'][j] = np.max(bd['r'])
else: endata['r'][j] = bd['rmax'][-1]
for key in ['sigma','h3','h4','h5','h6']:
endata[key][j] = np.average(bm[key],weights=lum)
endata[key+'err'][j] = np.average(bm[key+'err'],weights=lum)/sqrtN
items = OrderedDict()
for key in ['sigma','h3','h4','h5','h6']:
k = 'sig' if key == 'sigma' else key
items[k+'avg'] = np.interp(re,endata['r'],endata[key])
items[k+'avge'] = np.interp(re,endata['r'],endata[key+'err'])
return items
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 31 14:49:00 2020
@author: Dell
"""
###############################################################################
###############################################################################
###############################################################################
"""####### multiphase fluid flow equation ######"""
###############################################################################
###############################################################################
###############################################################################
import numpy as np
import matplotlib.pyplot as plt
import math
import pandas as pd
print( "\t ########################################################################### \t" )
print( "\t ############# General solution FOR TWO PHASE SYSTEM ######## \t" )
print( "\t ############# Reservoir Simulation implicit solution to 1D ############### \t" )
print( "\t ######## closed boundary or zero flow rate Neumann boundary condition both side ############ \t" )
print( "\t ################## injection in first block(flow rate id given) ############### \t" )
print( "\t ################## production in third block (flow rate id given) ############### \t" )
print( "\t ########################################################################### \t" )
###############################################################################
""" importing data from csv file """
print("\n")
heterogeneous_reservoir_data = pd.read_csv('./General_reservoir_description.csv')
print(heterogeneous_reservoir_data.count())
print("\n")
#print(heterogeneous_reservoir_data)
Kx = np.array(heterogeneous_reservoir_data['permeability(md)'])
dx = np.array(heterogeneous_reservoir_data['gridblock_lenght(ft)'])
q_w = np.array(heterogeneous_reservoir_data['Water_flow_rate(ft3/d)'])
q_o = np.array(heterogeneous_reservoir_data['Oil_flow_rate(ft3/d)'])
print("\n absolute permeability in md \n",Kx)
print("\n distance in ft \n",dx)
print("\n water injection flow rate in ft3/d \n",q_w)
print("\n oil production flow rate in ft3/d \n",q_o)
###############################################################################
###############################################################################
""" Relative permeability data Water Saturation intitial and reduced saturation """
Swi = 0.2
print("\n\tinitial water saturation in the reservoir =", Swi)
Swr = 0.2
print("\n\tresidual water saturation in the reservoir =", Swr)
Sw = 0.2
print("\n\t Water saturation in the reservoir =", Sw)
###############################################################################
###############################################################################
number_nodes = len(Kx)-2
print("\n\tBlock node in the reservoir is ", str(number_nodes))
P0 = 1000
print("\n\tThe intial pressure of the reservoir is "+ str(P0)+ "psia")
P_left = 0
print("\n\tThe pressure at the left boundary of the reservoir is ", str(P_left))
P_right = 0
print("\n\tThe pressure at the left boundary of the reservoir is ", str(P_right))
porosity = 0.2
print("\n\tthe porosity value of the reservoir is ", str(porosity))
viscosity_Water = 1
print("\n\tthe viscosity of water value is "+ str(viscosity_Water) + "cp")
viscosity_oil = 1
print("\n\tthe viscosity of water value is " + str(viscosity_oil) + "cp")
area = 10000
print("\n\tCross sectional area of the reservoir " + str(area) + "ft2")
compressibility_w = 1*10**(-5)
print("\n\tcompressibility of water in the reservoir is ", str(compressibility_w))
compressibility_o = 1*10**(-5)
print("\n\tcompressibility of oil in the reservoir is ", str(compressibility_o))
Bw = 1
print("\n\t water formation volume factor is " +str(Bw)+ " rb/stb" )
Bo = 1
print("\n\t water formation volume factor is " +str(Bo)+ " rb/stb" )
###############################################################################
###############################################################################
#################### final time for simulation is ############################
t_final = 3
print("\n\t the reservoir simulation time should be less than in days is " + str(t_final) + "days")
#################### time step ###############################################
dt_increment = 1
print("\n\t the reservoir simulation incremental time step in days is "+ str(dt_increment)+ "day")
inverse_dt = ( 1 / dt_increment )
###############################################################################
############ pressure and boundary condition #############
pressure_previous = np.ones([number_nodes,1])*P0
print("\n############## pressure distribution ################\n")
print("pressure distribution at day 0 is\n", str(pressure_previous))
###############################################################################
Saturation_previous = np.ones([number_nodes,1])*Sw
print("\n############## saturation distribution ################\n")
print("\nSaturation distribution at day 0 is\n", str(Saturation_previous))
###############################################################################
print("#########################################################")
def permeability(i,Kx,dx):
perm = ( dx[i-1] + dx[i] )/( dx[i-1]/Kx[i-1] + dx[i]/Kx[i] )
return perm
def relative_permeability_water(Sw):
S = ((Sw - Swi)/(1 - Swi - Swr))
relperm_w = 0.2*((S)**3)
#print("Relative permeability of water is = ", relperm_w )
return relperm_w
"""#### relative permeabiity function from Brooks - Corey ####"""
def relative_permeability_oil(Sw):
S = ((Sw - Swi)/(1 - Swi-Swr))
relperm_o = ((1-S)**3)
#print("Relative permeability of Oil is = ", relperm_o )
return relperm_o
def Transmissibility_water(Saturation_previous):
transmissibility_w = np.ones([number_nodes+1,1])
for i in range(2,number_nodes+2):
Sw = Saturation_previous[i-2]
#print(Sw)
transmissibility_w[i-1] = (( permeability(i,Kx,dx)*area*relative_permeability_water(Sw) )/( Bw*viscosity_Water*((dx[i-1] + dx[i])/2) ))
# Kr_Sw[k] = relative_permeability_water(Saturation_previous[0])
# Kr2_Sw[k] = relative_permeability_water(Saturation_previous[1])
# Kr3_Sw[k] = relative_permeability_water(Saturation_previous[2])
if P_left == 0:
transmissibility_w[0] = 0*transmissibility_w[0]
else:
transmissibility_w[0] = transmissibility_w[0]
if P_right == 0:
transmissibility_w[number_nodes] = 0*transmissibility_w[number_nodes]
else:
transmissibility_w[number_nodes] = transmissibility_w[number_nodes]
#print(" transmissibility_w is ",transmissibility_w)
return transmissibility_w
def Water_Transmissibility_matrix_water(Saturation_previous):
transmissibility_w = Transmissibility_water(Saturation_previous)
transmisibility_matrix_w = np.zeros([number_nodes , number_nodes])
for i in range(1,number_nodes,1):
transmisibility_matrix_w[i][i] = transmissibility_w[i] + transmissibility_w[i+1]
transmisibility_matrix_w[i][i-1] = - transmissibility_w[i]
transmisibility_matrix_w[i-1][i] = - transmissibility_w[i]
transmisibility_matrix_w[0][0] = transmissibility_w[0] + transmissibility_w[1]
# print("\n\ntransmisibility_matrix for Water is \n",transmisibility_matrix_w)
return transmisibility_matrix_w
######################################################################################
def Transmissibility_oil(Saturation_previous):
transmissibility_o = np.ones([number_nodes+1,1])
for i in range(2,number_nodes+2):
Sw = Saturation_previous[i-2]
transmissibility_o[i-1] = ( permeability(i,Kx,dx)*area*relative_permeability_oil(Sw) )/( Bo*viscosity_oil*((dx[i-1] + dx[i])/2) )
#print("Transmissibility",transmissibility_o)
# Kr_So[k] = relative_permeability_oil(Saturation_previous[0])
# Kr2_So[k] = relative_permeability_oil(Saturation_previous[1])
# Kr3_So[k] = relative_permeability_oil(Saturation_previous[2])
if P_left == 0:
transmissibility_o[0] = 0*transmissibility_o[0]
else:
transmissibility_o[0] = transmissibility_o[0]
if P_right == 0:
transmissibility_o[number_nodes] = 0*transmissibility_o[number_nodes]
else:
transmissibility_o[number_nodes] = transmissibility_o[number_nodes]
#print(" transmissibility_o is ",transmissibility_o)
return transmissibility_o
def Oil_Transmissibility_matrix_oil(Saturation_previous):
transmissibility_o = Transmissibility_oil(Saturation_previous)
transmisibility_matrix_o = np.zeros([number_nodes , number_nodes])
for i in range(1,number_nodes,1):
transmisibility_matrix_o[i][i] = transmissibility_o[i] + transmissibility_o[i+1]
transmisibility_matrix_o[i][i-1] = - transmissibility_o[i]
transmisibility_matrix_o[i-1][i] = - transmissibility_o[i]
transmisibility_matrix_o[0][0] = transmissibility_o[0] + transmissibility_o[1]
# print("\n\ntransmisibility_matrix for Oil is \n",transmisibility_matrix_o)
return transmisibility_matrix_o
def Total_transmissibility_matrix(Saturation_previous):
transmisibility_matrix_o = Oil_Transmissibility_matrix_oil(Saturation_previous)
transmisibility_matrix_w = Water_Transmissibility_matrix_water(Saturation_previous)
Total_transmissibility_matrix = transmisibility_matrix_w + transmisibility_matrix_o
return Total_transmissibility_matrix
###################################################################################
###################################################################################
###################################################################################
#print("\n############## B_matrix or accumulation matrix ################\n")
###################################################################################
###################################################################################
Total_compressibility = (Sw)*compressibility_w + (1-Sw)*compressibility_o
#print("\n Total compressibility = ",Total_compressibility)
B_matrix = np.zeros([number_nodes , number_nodes])
B_inverse_matrix = np.zeros([number_nodes , number_nodes])
B_actual_matrix = np.zeros([number_nodes , number_nodes])
B = np.ones([number_nodes ,1])
for i in range (0,number_nodes):
B[i] = (area*dx[i+1]*porosity)/Bw
B_matrix[i][i] = B[i]*Total_compressibility
B_actual_matrix[i][i] = B[i]/dt_increment
#print("\n B_matrix is\n", str(B_matrix))
B_actual_matrix = np.linalg.inv(B_actual_matrix)
#print("\nB_actual_matrix\n",B_actual_matrix)
###################################################################################
#print("\n############## Q_matrix ################\n")
######################################### injection water matrx
Q_matrix_w = np.zeros([number_nodes , 1])
for i in range (0,number_nodes):
Q_matrix_w[i] = q_w[i+1]
print("\n Q_matrix_w injection flow rate matrix for Water is\n", str(Q_matrix_w))
######################################### injection water matrx
Q_matrix_o = np.zeros([number_nodes , 1])
for i in range (0,number_nodes):
Q_matrix_o[i] = q_o[i+1]
#print("\n Q_matrix_o production flow rate matrix for oil is\n", str(Q_matrix_o))
Total_flow_rate_Matrix = (Bo/Bw)*Q_matrix_o + Q_matrix_w
print("\n Total flow rate matrix \n", Total_flow_rate_Matrix)
#print("\n##################################################################################")
#
#B1_Sw = np.ones([t_final , 1])
#B2_Sw = np.ones([t_final , 1])
#B3_Sw = np.ones([t_final , 1])
#
#row = np.ones([t_final , 1])
#
#Kr_Sw = np.ones([t_final , 1])
#Kr_So = np.ones([t_final , 1])
#
#Kr2_Sw = np.ones([t_final , 1])
#Kr2_So = np.ones([t_final , 1])
#
#Kr3_Sw = np.ones([t_final , 1])
#Kr3_So = np.ones([t_final , 1])
x = [166.5,499.5,832.5]
print(x)
x = np.array(x)
for k in range(0, t_final, 1):
plt.figure(1)
plt.title('Pressure profile over grid block, Running time ' + str(t_final) + 'days' , fontsize=14 )
plt.xlabel('Grid block distance ' , fontsize=16 )
plt.ylabel('pressure profile ' , fontsize=16)
plt.plot( x , pressure_previous)
plt.show
# row[k] = pressure_previous[0]
# B1_Sw[k] = Saturation_previous[0]
# B2_Sw[k] = Saturation_previous[1]
# B3_Sw[k] = Saturation_previous[2]
# #print("\nB_actual_matrix\n",B_actual_matrix)
# print("#################( RUNNING TIME ) ####### = " + str(k) + " DAY #############")
pressure_previous = np.dot(np.linalg.inv(( 6.33*10**(-3)*Total_transmissibility_matrix(Saturation_previous) + inverse_dt*B_matrix )), (np.dot((inverse_dt*B_matrix ) , pressure_previous) + Total_flow_rate_Matrix))
minus_water = - Water_Transmissibility_matrix_water(Saturation_previous)
# print("\n\nminus_water", minus_water)
# print(minus_water.shape)
# print(pressure_previous.shape)
S_Matrix = np.dot( minus_water , pressure_previous)
# print(S_Matrix.shape)
#print("\n S_Matrix is\n = ",S_Matrix)
S_Matrix_addition = S_Matrix + Q_matrix_w
# print(S_Matrix_addition.shape)
#print("\n S_Matrix_addition is = ", S_Matrix_addition )
MM = np.dot(B_actual_matrix, S_Matrix_addition)
# print(MM.shape)
#print(" MM = \n", MM )
Saturation_previous = Saturation_previous + MM
Saturation_previous = np.around(Saturation_previous, decimals = 40)
# print(" pressure and saturation after " + str(k) + " iteration is ")
# print(" previous pressure \n",pressure_previous)
# print("Saturation previous \n", Saturation_previous)
#print(Saturation_previous.shape)
#
plt.savefig('Pressure vs distanceday2000.png', dpi=1200, bbox_inches='tight')
#
##
"""
submission = pd.DataFrame({'id': data_new_test_file['id'], 'left ': prediction})
##Visualize the first 5 rows
submission.head()
filename = 'all_class742308.csv'
submission.to_csv(filename,index=False)
print('Saved file: ' + filename)
#
"""
"""
#print(Kr_Sw)
Kr_Sw = np.array(Kr_Sw)
Kr_So = np.array(Kr_So)
Kr2_Sw = np.array(Kr2_Sw)
Kr2_So = np.array(Kr2_So)
Kr3_Sw = np.array(Kr3_Sw)
Kr3_So = np.array(Kr3_So)
B1_Sw = np.array(B1_Sw)
B2_Sw = np.array(B2_Sw)
B3_Sw = np.array(B3_Sw)
plt.figure(figsize=(12,8))
plt.legend(["liquid density", "gas density"], prop={"size":20})
plt.plot(B3_Sw, Kr_Sw, color = 'r', label= 'Relative perm of water block 1 ')
plt.plot(B3_Sw, Kr_So, color = 'b', label= 'Relative perm of oil block 1 ')
plt.plot(B3_Sw, Kr2_Sw, color = 'c', label= 'Relative perm of water block 2 ')
plt.plot(B3_Sw, Kr2_So, color = 'g', label= 'Relative perm of oil block 2 ')
plt.plot(B3_Sw, Kr3_Sw, color = 'm', label= 'Relative perm of water block 3 ')
plt.plot(B3_Sw, Kr3_So, color = 'k', label= 'Relative perm of oil block 3 ')
plt.title('Saturation and relative permeability graph running time ' + str(t_final) + 'days' , fontsize=23 )
plt.xlabel('Water Saturation ' , fontsize=16 )
plt.ylabel('relative permeability of oil and water ' , fontsize=16)
plt.rc('xtick', labelsize=15)
plt.rc('ytick', labelsize=15)
plt.legend()
# save the figure
#plt.savefig('saturation-relative-permeability27.png', dpi=1200, bbox_inches='tight')
plt.show()
"""
###############################################################################
########## DON't DIsturb ######################
"""
row = np.array(row)
B1_Sw = np.array(B1_Sw)
B2_Sw = np.array(B2_Sw)
B3_Sw = np.array(B3_Sw)
B1_So = np.ones([t_final , 1])
B2_So = np.ones([t_final , 1])
B3_So = np.ones([t_final , 1])
for h in range(0, t_final, 1):
B1_So[h] = 1 - B1_Sw[h]
B2_So[h] = 1 - B2_Sw[h]
B3_So[h] = 1 - B3_Sw[h]
B1_So = np.array(B1_So)
B2_So = np.array(B2_So)
B3_So = np.array(B3_So)
#print(row)
#print(B1_Sw)
plt.figure(figsize=(8,6))
plt.legend(["liquid density", "gas density"], prop={"size":20})
plt.plot(row, B1_Sw, color = 'r', label= 'saturation profile block 1 ')
plt.plot(row, B2_Sw, color = 'g', label= 'saturation profile block 2 ')
plt.plot(row, B3_Sw, color = 'b', label= 'saturation profile block 3 ')
plt.plot(row, B1_So, color = 'c', label= ' oil saturation profile block 1 ')
plt.plot(row, B2_So, color = 'y', label= ' oil saturation profile block 2 ')
plt.plot(row, B3_So, color = 'm', label= ' oil saturation profile block 3 ')
plt.title('pressure and saturation graph', fontsize=23 )
plt.xlabel('pressure psi ' , fontsize=23 )
plt.ylabel('Saturation ', fontsize=23)
plt.rc('xtick', labelsize=15)
plt.rc('ytick', labelsize=15)
plt.legend()
plt.show()
"""
|
__author__ = "TUSHIT AGARWAL"
from requests import get
import json
from openpyxl import Workbook, load_workbook
import os
from random import shuffle, choice
from time import sleep
import gzip
import shutil
import urllib
import urllib.request
def kelvin_to_Celcius(k):
return (k - 273.15)
def Celcius_to_Fahrenheit(n):
return ((n * 9.0) / 5.0) + 32.0
def createExcelSheet():
wb = Workbook()
wb.create_sheet(title = "Weather", index = 0)
wb.create_sheet(title = "City Tokens", index = 1)
wb.create_sheet(title = "Additional Details", index = 2)
sheet = wb['City Tokens']
sheet.append(("CITY", "CITY_ID"))
temp = []
with open(os.path.join(os.getcwd(), "city.list.json"), 'r', encoding = 'utf-8') as f:
for i in json.loads(f.read()):
sheet.append((i['name'], i['id']))
temp.append((i['name'], i['id']))
sheet = wb["Weather"]
sheet.append(["CITY TOKEN", "TEMPERATURE", "HUMIDITY", "UNIT", "STATE UPDATE(0/1)"])
for i in range(20):
sheet.append([temp[i][1], "", "", choice(["C", "F"]), choice([0,1])])
wb.save('task1.xlsx')
def wait(n):
c = 0
t = ["/", "-", "\\", "|"]
while c <= n:
sleep(1)
print(f"Waiting for {n} seconds...{t[c%4]}", end = "\r")
c += 1
def main():
with open("key.txt", 'r') as f:
key = f.read()
if os.path.isfile(os.path.join(os.getcwd(), "task1.xlsx")):
try:
print("Loading Workbook...", end = "\r")
wb = load_workbook("task1.xlsx")
except:
print("Failed to load Workbook due to some error. Deleting Existing Workbook...", end = "\r")
sleep(2)
os.remove("task1.xlsx")
print("Creating Workbook..." + " " * 50, end = "\r")
createExcelSheet()
finally:
wb = load_workbook("task1.xlsx")
print("Workbook loaded successfully..." + " " * 50, end = "\r")
else:
print("No Workbook found... Creating a new Workbook" + " " * 50, end = "\r")
createExcelSheet()
wb = load_workbook("task1.xlsx")
sheet = wb["Weather"]
while True:
print("Fetching API..." + " " * 30, end = "\r")
try:
for row in sheet.iter_rows():
cityToken, temperature, humidity, unit, update = map(lambda x: x.value, row)
if update == 1:
d = json.loads(get(f"http://api.openweathermap.org/data/2.5/weather?id={cityToken}&appid={key}").text)
if unit == "F":
temperature = "%.3f" %kelvin_to_Celcius(Celcius_to_Fahrenheit(d["main"]["temp"]))
elif unit == "C":
temperature = "%.3f" %kelvin_to_Celcius(d["main"]["temp"])
humidity = d["main"]["humidity"]
data = [cityToken, temperature, humidity, unit, update]
for i in range(5):
row[i].value = data[i]
print("Updating Values to Workbook and saving file....", end = "\r")
wb.save("task1.xlsx")
print(" " * 100, end = "\r") # For clearing the output stream.
wait(10)
except KeyboardInterrupt:
print("Exiting program...Saving the file..." + " " * 50, end = "\r")
wb.save("task1.xlsx")
break
except:
print("Unknown error occurs...exiting the program..saving the file...", end = "\r")
wb.save("task1.xlsx")
break
print('Workbook updated successfully...' + " " * 50)
if __name__ == "__main__":
opener = urllib.request.URLopener()
opener.retrieve("http://bulk.openweathermap.org/sample/city.list.json.gz", "city.list.json.gz")
with gzip.open('city.list.json.gz', 'rb') as f_in:
with open('city.list.json', 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
main()
|
# -*- coding: utf-8 -*-
class dict_with_default(dict):
"""Dictionary that returns a default value when key is missing
:param default: default value to return when key is missing
"""
def __init__(self, default, *args, **kwargs):
super(dict_with_default, self).__init__(*args, **kwargs)
self.default = default
def __missing__(self, key):
return self.default
def dict_factory(name, default):
"""Return a subclass of dict with a default value
:param name: name of the subclass
:type name: string
:param default: the default value returned by the dict instead of KeyError
"""
def __missing__(self, key):
return default
new_class = type(name, (dict,), {"__missing__": __missing__})
return new_class
class dict_default(dict):
"""Dictionary subclass that returns the key instead of a KeyError
"""
def __missing__(self, key):
return key
def bitwise_operator(frame, operator):
"""Returns the result of applying the bitwise ``|`` or ``&`` operator to a
list of series
:param frame: data with colums to apply the bitwise operator to
:type frame: DataFrame
:param operator: the operator to apply. 'and' and 'or' are acceptable
values
"""
num_cols = frame.shape[1]
if num_cols == 1:
return frame.iloc[:, 0]
else:
series = frame.iloc[:, 0]
for i in range(1, num_cols):
if operator == 'and':
series = series & frame.iloc[:, i]
elif operator == 'or':
series = series | frame.iloc[:, i]
return series
def bitwise_xor(frame):
"""Returns the result of applying the bitwise ``^`` operator to two Series
in a DataFrame
:param frame: data with colums to apply the bitwise or operator to
:type frame: DataFrame
"""
num_cols = frame.shape[1]
if num_cols != 2:
raise ValueError("DataFrame must have 2 columns.")
else:
series = frame.iloc[:, 0] ^ frame.iloc[:, 1]
return series
def bitwise_not(frame):
"""Returns the result of applying the bitwise ``~`` operator to the
boolean conversion of the DataFrame
:param frame: data with colums to apply the bitwise or operator to
:type frame: DataFrame
"""
return ~frame.astype('bool')
|
import math
from tkinter import *
window = Tk()
c = Canvas(window, width=640, height=360, bg='grey')
c.pack()
def drawPoint(x, y, color):
c.create_rectangle(x-1, y-1, x, y, outline=color)
def drawTriangle(posX, posY, posX1, posY1, posX2, posY2, colour):
c.create_polygon(posX, posY, posX1, posY1, posX2, posY2, fill=colour)
def drawCircle(posX, posY, radiusX, pointCount, color):
pointCount2 = math.ceil(pointCount / 4)
k = 2.0 * math.pi / pointCount2
for i in range(0, pointCount2):
phi = i * k
drawPosX = radiusX * math.cos(phi)
drawPosY = radiusX * math.sin(phi)
drawPosX += posX
drawPosY += posY
drawPoint(drawPosX, drawPosY, color)
nextphi = (i+1)*k
nextx = radiusX*math.cos(nextphi)
nexty = radiusX*math.sin(nextphi)
nextx+=posX
nexty+=posY
c.create_line(drawPosX, drawPosY, nextx, nexty, fill='blue')
circleRadiuses = [90, 80, 70, 60, 50, 40, 30, 20, 10]
#drawTriangle(0, 0, 640, 0, 640, 360, 'red')
gradient = 1
for radius in circleRadiuses:
yPosition = (circleRadiuses[0]-radius+(10*gradient)) /circleRadiuses[0]
yPosition = yPosition * gradient
if(yPosition > 1.0):
yPosition = 1.0
if(yPosition < 0.2):
color = 'black'
if(yPosition >= 0.2 and yPosition < 0.4):
color = 'red'
if(yPosition >= 0.4 and yPosition < 0.6):
color = 'blue'
if(yPosition >= 0.6 and yPosition < 0.8):
color = 'green'
if(yPosition >= 0.8 and yPosition < 1.0):
color = 'brown'
if(yPosition == 1.0):
color = 'yellow'
drawPoint(320, 180, 'yellow')
currentPointCount = (round(radius / 4)) * 2
yMultiply = yPosition
drawCircle(320, 180, radius, currentPointCount, color)
# in blitz , to get Y position , multiply yPosition variable by mountain Z scale
|
one=input('Enter weight 1:\n')
two=input('Enter weight 2:\n')
three=input('Enter weight 3:\n')
four=input('Enter weight 4:\n')
l=[]
l.append(one)
l.append(two)
l.append(three)
l.append(four)
t=[float(i) for i in l]
print('Weights:',t)
n=len(t)
max=t[1]
avg=0
for i in t:
avg+=i
avg=avg/n
print()
print('Average weight:', '%0.2f' %(avg))
for i in range(n):
if max<t[i]:
max=t[i]
print('Max weight:','%0.2f' %(max))
print()
user=int(input('Enter a list location (1 - 4):\n'))
for i in range(n):
if user==i:
print('Weight in pounds:','%0.2f' %(t[i-1]))
z=t[i-1]/2.2
print('Weight in kilograms:', '%0.2f' %(z))
print()
t.sort()
print('Sorted list:',t)
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^map/$', views.index, name='index'),
url(r'^form/$', views.form, name='form'),
url(r'^form/post/$', views.post, name='post'),
url(r'^about/$', views.about, name='about')
] |
#!/usr/bin/env python3
# -*-coding: utf-8-*-
# Author : Christopher Lee
# License: MIT License
# File : downloader.py
# Date : 2016-12-29 22:41
# Version: 0.0.1
# Description: description of this file.
from time import sleep
import requests
from concurrent.futures import ThreadPoolExecutor
import pika
__version__ = '0.0.1'
__author__ = 'Chris'
class Downloader(object):
def __init__(self, max_workers=15):
self._executor = ThreadPoolExecutor(max_workers)
def submit(self, url, callback=None):
self._executor.submit(self._download, url, callback)
@staticmethod
def _download(url, callback=None):
try:
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/50.0.2661.102 Safari/537.36',
'Referer': 'https://movie.douban.com'
}
response = requests.get(url, headers)
if response.status_code == 200:
print('[{}] Downloaded url {}...OK'.format(response, url))
if callback and callable(callback):
callback()
else:
print('[{}] Downloaded url {}...Failed'.format(response, url))
except Exception as err:
print(err)
print('Downloaded url {}...Failed'.format(url))
sleep(1)
_downloader = Downloader()
def message_received(channel, method, properties, body):
queue = channel.queue_declare(queue='urls_queue', durable=True)
print(queue.method)
_downloader.submit(body.decode(), lambda: channel.basic_ack(delivery_tag=method.delivery_tag))
def main():
conn = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
channel = conn.channel()
channel.queue_declare(queue='urls_queue', durable=True)
channel.basic_consume(message_received, queue='urls_queue')
print('Downloader started.')
channel.basic_qos(prefetch_count=1)
channel.start_consuming()
if __name__ == '__main__':
main()
|
from . import reddit
print "Creating new reddit instance"
red = reddit.Reddit() |
import io
import json
import os
import sys
#time_container=[0]*60*24
month_container=[0]*12*8
mk=dict()
mk['Oct']=10
mk['Jan']=1
mk['Nov']=11
mk['Dec']=12
mk['Sep']=9
mk['Aug']=8
mk['Jul']=7
mk['Jun']=6
mk['May']=5
mk['Apr']=4
mk['Mar']=3
mk['Feb']=2
with open('succ.txt','r') as f:
for line in f:
if line == '\n':
continue
usr_id=int(line)
#usr_id,dis=[int(x) for x in line.split()]
if os.stat(str(usr_id)+'/user_timeline.json').st_size==0:
continue
usr_timeline_data=open(str(usr_id)+'/user_timeline.json')
usr_timeline=json.load(usr_timeline_data)
for tweet in usr_timeline:
#print tweet
week,month,date,time,time_zone,yr=[x for x in tweet['created_at'].split()]
hour,minute,second=[int(x) for x in time.split(':')]
#total_time=hour*60+minute
#print month
total_month=(int(yr)-2006)*12+mk[month]-1
#print total_time
#time_container[total_time]=time_container[total_time]+1
month_container[total_month]=month_container[total_month]+1
f_succ=open('succ_month_stat.txt','w')
f_succ.write('[')
for i in range(0,12*8-2):
f_succ.write(str(month_container[i])+',')
f_succ.write(str(month_container[12*8-1])+'];')
f_succ.close()
|
#!/usr/bin/python
# Update IATA & ICAO code for planes from Wikipedia
#
# Prereqs:
# virtualenv env
# source env/bin/activate
# curl https://bootstrap.pypa.io/get-pip.py | python
# pip install mysql-connector unittest
import argparse
import codecs
import mysql.connector
import sys
import urllib2
from collections import defaultdict
import database_connector
class OpenFlightsAirlines(object):
def __init__(self, aldb):
self.aldb = aldb
self.of_iata = defaultdict(list)
self.of_icao = defaultdict(list)
def load_all_airlines(self):
aldb.cursor.execute('SELECT * FROM airlines')
for row in aldb.cursor:
if row['iata'] == "":
row['iata'] = None
self.of_iata[row['iata']].append(row)
self.of_icao[row['icao']].append(row)
def match(self, wp):
icao, iata, callsign, country = wp['icao'], wp['iata'], wp['callsign'], wp['country']
if icao and icao in self.of_icao:
for airline in self.of_icao[icao]:
if (iata and airline['iata'] == iata) or airline['callsign'] == callsign or airline['country'] == country:
return airline
if iata and iata in self.of_iata:
for airline in self.of_iata[iata]:
if airline['callsign'] == callsign or airline['country'] == country:
print "IATA MATCH %s, %s" % (airline, wp)
return airline
return None
def diff(self, of, wp):
fields = {}
for field in ['name', 'callsign', 'icao', 'iata']:
if wp[field] and wp[field] != of[field]:
fields[field] = wp[field]
return fields
def update_from_wp(self, of, wp):
fields = self.diff(of, wp)
if fields:
self.aldb.update_from_wp(of['apid'], fields)
class AirlineDB(database_connector.DatabaseConnector):
def update_from_wp(self, of_apid, fields):
field_string = ', '.join(map(lambda k: '%s=%s' % k, fields.items()))
self.safe_execute(
'UPDATE airports SET %s WHERE apid=%s',
(', '.join(map(lambda k: '%s=%s' % k, fields.items())), of_apid))
class WikipediaArticle(object):
def __init__(self):
self.airlines = []
def load(self, letter):
airline_url = 'https://en.wikipedia.org/w/api.php?action=query&titles=List_of_airline_codes_(%s)&prop=revisions&rvprop=content&format=php'
response = urllib2.urlopen(airline_url % letter).read()
block = []
header = 2
for line in response.splitlines():
if line.startswith('|-'):
if header > 0:
header -= 1
else:
self.airlines.append(self.parse_airline(block))
block = []
else:
block.append(line)
# |-
# ! IATA
# ! ICAO
# ! Name
# ! Call sign
# ! Country
# ! Comments
def parse_airline(self, block):
iata, icao, name, callsign, country = [self.clean(x) for x in block[0:5]]
return {'icao': icao, 'iata': iata, 'name': name, 'callsign': callsign, 'country': country}
def clean(self, x):
# | ''[[Foo|Bar]]'' -> Bar
x = unicode(x.split('|')[-1].translate(None, "[|]").replace("''", ""), 'utf-8')
if x == '':
return None
return x
if __name__ == "__main__":
# Needed to allow piping UTF-8 (srsly Python wtf)
sys.stdout = codecs.getwriter('utf8')(sys.stdout)
parser = argparse.ArgumentParser()
parser.add_argument('--live_run', default=False, action='store_true')
parser.add_argument('--local', default=False, action='store_true')
args = parser.parse_args()
aldb = AirlineDB(args)
ofa = OpenFlightsAirlines(aldb)
ofa.load_all_airlines()
wpa = WikipediaArticle()
wpa.load('A')
count = 0
updated = 0
added = 0
for airline in wpa.airlines:
of_airline = ofa.match(airline)
if of_airline:
ofa.update_from_wp(of_airline, airline)
else:
print 'NEW', airline
added += 1
count += 1
print "%s new, %s updated, %s total" % (added, updated, count)
|
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 20 11:09:53 2021
@author: user
"""
#key 1 ๅฐไธญ้็ถไฝๅคง่ฟดๅ
#key 2 ็จ็ธไน่จ็ฎ
class Solution(object):
def numTeams(self, rating):
"""
:type rating: List[int]
:rtype: int
"""
c = 0
for i, value in enumerate(rating):
AB_c = BC_c = BA_c = CB_c = 0
for j in rating[:i]:
if value > j:
BA_c += 1
if j > value:
AB_c += 1
for j in rating[i:]:
if value > j:
BC_c += 1
if j > value:
CB_c += 1
c += AB_c*BC_c + BA_c*CB_c
return c |
from django.shortcuts import render
from django.shortcuts import get_object_or_404, render, render_to_response
from django.contrib.auth import authenticate, login, logout
from django.http import HttpResponseRedirect, HttpResponse, Http404,JsonResponse
from django.core.mail import send_mail, EmailMessage
from django.contrib.auth.models import User, Permission
from django.utils import timezone
from django.conf import settings
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.files.storage import FileSystemStorage
from django.core.files import File
from django.db.models import Q
from django.core.exceptions import SuspiciousOperation
from django.contrib.auth.decorators import login_required, permission_required
from xhtml2pdf import pisa
from django.template.loader import get_template
from rapidsignnow.settings import CONSTANCE_CONFIG_FIELDSETS
from rapidsignnow.settings import INVOICE_HARDCODED_TO_EMAIL
from constance import config
from io import BytesIO
import datetime
import StringIO
import zipfile
import itertools
import xlsxwriter
import boto
import os
import requests
import mimetypes
import magic
import urllib
import numpy
from boto.s3.key import Key
from datetime import timedelta
from django.utils import timezone
from dateutil import relativedelta
from reportlab.lib import colors
from reportlab.lib.enums import TA_CENTER, TA_RIGHT
from reportlab.lib.pagesizes import A4, landscape
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.units import inch
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle
PAGE_WIDTH = A4[0]
PAGE_HEIGHT = A4[1]
styles = getSampleStyleSheet()
from system_admin.models import SystemAdmin
from law_firm.models import LawFirm, LawFirmRates
from broker.models import Broker
from master_broker.models import MasterBroker
from investigator.models import Investigator, InvestigatorRates
from address.models import Address
from case.models import Case
from document.models import Document
from attached_document.models import AttachedDocument
from invoice.models import Invoice
from invoice_line.models import InvoiceLine
from status_update.models import StatusUpdate
all_languages = ["English","Mandarin","Spanish","Hindi","Arabic","Portuguese","Bengali","Russian","Japanese","Punjabi",
"German","Javanese","Wu","Shanghainese","Malay","Malaysian","Indonesian","Telugu","Vietnamese","Korean",
"French","Marathi","Tamil","Urdu","Turkish","Italian","Yue","Cantonese","Thai","Gujarati","Jin",
"Southern Min","Hokkien","Teochew","Persian","Polish","Pashto","Kannada","Xiang","Malayalam","Sundanese",
"Hausa","Odia","Burmese","Hakka","Ukrainian","Bhojpuri","Tagalog","Yoruba","Maithili","Uzbek","Sindhi",
"Amharic","Fula","Romanian","Oromo","Igbo","Azerbaijani","Awadhi","Gan Chinese","Cebuano","Dutch",
"Kurdish","Serbo-Croatian","Malagasy","Saraiki","Nepali","Sinhalese","Chittagonian","Zhuang","Khmer",
"Turkmen","Assamese","Madurese","Somali","Marwari","Magahi","Haryanvi","Hungarian","Chhattisgarhi",
"Greek","Chewa","Deccan","Akan","Kazakh","Northern Min","disputed","discuss","Sylheti","Zulu","Czech",
"Kinyarwanda","Dhundhari","Haitian Creole","Eastern Min","Fuzhounese","Ilocano","Quechua","Kirundi",
"Swedish","Hmong","Shona","Uyghur","Hiligaynon/Ilonggo","Mossi","Xhosa","Belarusian","Balochi","Konkani"]
countries = ["United States of America","Afghanistan","Aland Islands","Albania","Algeria","American Samoa","Andorra",
"Angola","Anguilla","Antarctica","Antigua and Barbuda","Argentina","Armenia","Aruba","Australia","Austria",
"Azerbaijan","Bahamas","Bahrain","Bangladesh","Barbados","Belarus","Belgium","Belize","Benin","Bermuda",
"Bhutan","Bolivia, Plurinational State of","Bonaire, Sint Eustatius and Saba","Bosnia and Herzegovina",
"Botswana","Bouvet Island","Brazil","British Indian Ocean Territory","Brunei Darussalam","Bulgaria",
"Burkina Faso","Burundi","Cambodia","Cameroon","Canada","Cape Verde","Cayman Islands",
"Central African Republic","Chad","Chile","China","Christmas Island","Cocos (Keeling) Islands","Colombia",
"Comoros","Congo","Congo, the Democratic Republic of the","Cook Islands","Costa Rica","Croatia","Cuba",
"Cyprus","Czech Republic","Denmark","Djibouti","Dominica","Dominican Republic","Ecuador","Egypt",
"El Salvador","Equatorial Guinea","Eritrea","Estonia","Ethiopia","Falkland Islands (Malvinas)",
"Faroe Islands","Fiji","Finland","France","French Guiana","French Polynesia","French Southern Territories",
"Gabon","Gambia","Georgia","Germany","Ghana","Gibraltar","Greece","Greenland","Grenada","Guadeloupe","Guam",
"Guatemala","Guernsey","Guinea","Guinea-Bissau","Guyana","Haiti","Heard Island and McDonald Islands",
"Honduras","Hong Kong","Hungary","Iceland","India","Indonesia","Iran, Islamic Republic of","Iraq","Ireland",
"Isle of Man","Israel","Italy","Jamaica","Japan","Jersey","Jordan","Kazakhstan","Kenya","Kiribati",
"Korea, Democratic People's Republic of","Korea, Republic of","Kuwait","Kyrgyzstan",
"Lao People's Democratic Republic","Latvia","Lebanon","Lesotho","Liberia","Libya","Liechtenstein",
"Lithuania","Luxembourg","Macao","Macedonia, the former Yugoslav Republic of","Madagascar","Malawi",
"Malaysia","Maldives","Mali","Malta","Marshall Islands","Martinique","Mauritania","Mauritius","Mayotte",
"Mexico","Micronesia, Federated States of","Moldova, Republic of","Monaco","Mongolia","Montenegro",
"Montserrat","Morocco","Mozambique","Myanmar","Namibia","Nauru","Nepal","Netherlands","New Caledonia",
"New Zealand","Nicaragua","Niger","Nigeria","Niue","Norfolk Island","Northern Mariana Islands","Norway",
"Oman","Pakistan","Palau","Palestinian Territory, Occupied","Panama","Papua New Guinea","Paraguay","Peru",
"Philippines","Pitcairn","Poland","Portugal","Puerto Rico","Qatar","Reunion","Romania",
"Russian Federation","Rwanda","Saint Barthelemy","Saint Helena, Ascension and Tristan da Cunha",
"Saint Kitts and Nevis","Saint Lucia","Saint Martin (French part)","Saint Pierre and Miquelon",
"Saint Vincent and the Grenadines","Samoa","San Marino","Sao Tome and Principe","Saudi Arabia","Senegal",
"Serbia","Seychelles","Sierra Leone","Singapore","Sint Maarten (Dutch part)","Slovakia","Slovenia",
"Solomon Islands","Somalia","South Africa","South Georgia and the South Sandwich Islands","South Sudan",
"Spain","Sri Lanka","Sudan","Suriname","Svalbard and Jan Mayen","Swaziland","Sweden","Switzerland",
"Syrian Arab Republic","Taiwan, Province of China","Tajikistan","Tanzania, United Republic of","Thailand",
"Timor-Leste","Togo","Tokelau","Tonga","Trinidad and Tobago","Tunisia","Turkey","Turkmenistan",
"Turks and Caicos Islands","Tuvalu","Uganda","Ukraine","United Arab Emirates","United Kingdom",
"United States Minor Outlying Islands","Uruguay","Uzbekistan","Vanuatu",
"Venezuela, Bolivarian Republic of","Viet Nam","Virgin Islands, British","Virgin Islands, U.S.",
"Wallis and Futuna","Western Sahara","Yemen","Zambia","Zimbabwe"]
states = {
'AK': 'Alaska',
'AL': 'Alabama',
'AR': 'Arkansas',
'AS': 'American Samoa',
'AZ': 'Arizona',
'CA': 'California',
'CO': 'Colorado',
'CT': 'Connecticut',
'DC': 'District of Columbia',
'DE': 'Delaware',
'FL': 'Florida',
'GA': 'Georgia',
'GU': 'Guam',
'HI': 'Hawaii',
'IA': 'Iowa',
'ID': 'Idaho',
'IL': 'Illinois',
'IN': 'Indiana',
'KS': 'Kansas',
'KY': 'Kentucky',
'LA': 'Louisiana',
'MA': 'Massachusetts',
'MD': 'Maryland',
'ME': 'Maine',
'MI': 'Michigan',
'MN': 'Minnesota',
'MO': 'Missouri',
'MP': 'Northern Mariana Islands',
'MS': 'Mississippi',
'MT': 'Montana',
'NA': 'National',
'NC': 'North Carolina',
'ND': 'North Dakota',
'NE': 'Nebraska',
'NH': 'New Hampshire',
'NJ': 'New Jersey',
'NM': 'New Mexico',
'NV': 'Nevada',
'NY': 'New York',
'OH': 'Ohio',
'OK': 'Oklahoma',
'OR': 'Oregon',
'PA': 'Pennsylvania',
'PR': 'Puerto Rico',
'RI': 'Rhode Island',
'SC': 'South Carolina',
'SD': 'South Dakota',
'TN': 'Tennessee',
'TX': 'Texas',
'UT': 'Utah',
'VA': 'Virginia',
'VI': 'Virgin Islands',
'VT': 'Vermont',
'WA': 'Washington',
'WI': 'Wisconsin',
'WV': 'West Virginia',
'WY': 'Wyoming'
}
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def law_firms(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
context = dict()
context['law_firms'] = []
law_firms = LawFirm.objects.all().order_by('-pk')
phone_numbers = []
for law_firm in law_firms:
phone_number = law_firm.phone_number_one.replace('-', '').replace(' ', '')
phone_numbers.append(phone_number)
context['data'] = zip(law_firms, phone_numbers)
# paginator = Paginator(law_firms, 15) # Show 25 contacts per page
# page = request.GET.get('page')
# try:
# law_firms = paginator.page(page)
# except PageNotAnInteger:
# # If page is not an integer, deliver first page.
# law_firms = paginator.page(1)
# except EmptyPage:
# # If page is out of range (e.g. 9999), deliver last page of results.
# law_firms = paginator.page(paginator.num_pages)
context['law_firms']= law_firms
return render(request, 'system_admin/law_firms.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def profile(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
context = dict()
if request.POST:
old_password = request.POST["old_password"]
new_password = request.POST["new_password"]
new_password_confirm = request.POST["new_password_confirm"]
if( (old_password is not None) and (new_password is not None) and (new_password_confirm is not None) and (new_password == new_password_confirm)):
if (request.user.check_password(old_password)):
print "Password check successful"
request.user.set_password(new_password)
request.user.save()
return JsonResponse({'success':"true"})
else:
print "Password check failed"
context["error"] = "Your current password is incorrect"
return JsonResponse({'success':"false"})
else:
return JsonResponse({'success':"false"})
return render(request, 'system_admin/profile.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def new_law_firm(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
if request.POST:
username = request.POST['email-1']
password = request.POST['password']
first_name = request.POST['name']
try:
user = User.objects.create_user(username=username, password=password, email=username, first_name = first_name )
created_user = authenticate(username=username, password=password)
permission = Permission.objects.get(name='Can View Law Firm')
user.user_permissions.add(permission)
except:
context = dict()
context['error'] = 'A user with this username already exists'
return render(request, 'system_admin/new_law_firm.html', context)
if request.POST:
referring_law_firm = None
if 'referring_law_firm' in request.POST:
try:
referring_law_firm_id = request.POST['referring-law-firm']
referring_law_firm = LawFirm.objects.get(referring_law_firm_id)
except:
return HttpResponseRedirect('/administrator/new-law-firm/')
street_one = request.POST['street-1']
street_two = None
if 'street-2' in request.POST:
street_two = request.POST['street-2']
city = request.POST['city']
state = request.POST['state']
zip_code = request.POST['zip-code']
country = request.POST['country']
new_address = Address(street_one=street_one, street_two=street_two, city=city, state=state, zip_code=zip_code,
country=country)
new_address.save()
coordinates = new_address.get_coordinates()
if coordinates is not None:
new_address.gmaps_link = 'https://www.google.com/maps/place/' + str(coordinates['latitude']) + ',' + \
str(coordinates['longitude']) + 'z?hl=en'
new_address.save()
name = request.POST['name']
phone_number_one = request.POST['phone-1']
phone_number_two = None
if 'phone-2' in request.POST:
phone_number_two = request.POST['phone-2']
email_one = request.POST['email-1']
email_two = None
if 'email-2' in request.POST:
email_two = request.POST['email-2']
payment_plan = None
if 'payment-plan' in request.POST:
payment_plan = request.POST['payment-plan']
new_law_firm_instance = LawFirm(user=created_user,name=name, phone_number_one=phone_number_one, phone_number_two=phone_number_two,
email_one=email_one, email_two=email_two, address=new_address, payment_plan=payment_plan)
new_law_firm_instance.save()
law_firm_rates_keys = [rate for rate in CONSTANCE_CONFIG_FIELDSETS['Law firm In Area options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Law firm Out of Area options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Law firm miscellaneous options']]
law_firm_rates = dict()
for rate_key in law_firm_rates_keys:
law_firm_rates[rate_key] = float(request.POST[rate_key.lower().replace('_', '-')])
new_law_firm_rates = LawFirmRates()
new_law_firm_rates.save()
for rate_key in law_firm_rates_keys:
setattr(new_law_firm_rates, rate_key.lower()[0:len(rate_key) - len('_LAW_FIRM')], law_firm_rates[rate_key])
new_law_firm_rates.save()
new_law_firm_instance.rates = new_law_firm_rates
new_law_firm_instance.save()
if request.method == 'POST' and 'document' in request.FILES and request.FILES['document']:
files = request.FILES.getlist('document')
document_names = request.POST.getlist('document-name')
for file,document_name in zip(files,document_names):
fs = FileSystemStorage()
filename, file_extension = os.path.splitext(file.name)
file_name = str(document_name) + str(file_extension)
file.name = str(new_law_firm_instance.name) + "-" + str(document_name) + str(file_extension)
uploaded_file_url = fs.url(file.name)
new_document = Document(file_name=file_name, file = file, file_url=uploaded_file_url, law_firm = new_law_firm_instance )
new_document.save()
return HttpResponseRedirect('/administrator/law-firms/?created=True')
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['countries'] = countries
return render(request, 'system_admin/new_law_firm.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def law_firm(request, law_firm_id):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
try:
law_firm_instance = LawFirm.objects.get(pk=law_firm_id)
except:
raise Http404
documents = Document.objects.filter(law_firm = law_firm_instance)
context = dict()
context['law_firm'] = law_firm_instance
context['law_firm_rates'] = law_firm_instance.rates
context['countries'] = countries
phone_number_one = law_firm_instance.phone_number_one.replace('-', '').replace(' ', '')
phone_number_two = law_firm_instance.phone_number_two.replace('-', '').replace(' ', '')
context['phone_number_one'] = phone_number_one
context['phone_number_two'] = phone_number_two
if request.POST:
if request.is_ajax() and request.POST:
if request.POST['context'] == 'suspend':
law_firm_instance.is_active = False
law_firm_instance.user.is_active = False
law_firm_instance.user.save()
law_firm_instance.save()
return HttpResponse('')
elif request.POST['context'] == 'resume':
law_firm_instance.is_active = True
law_firm_instance.user.is_active = True
law_firm_instance.user.save()
law_firm_instance.save()
return HttpResponse('')
elif request.POST['context'] == 'delete':
# rates_instance = law_firm_instance.rates
# law_firm_instance.rates = None
# rates_instance.delete()
# law_firm_instance.address.delete()
law_firm_instance.is_active = False
law_firm_instance.user.is_active = False
law_firm_instance.user.save()
law_firm_instance.save()
return HttpResponse('')
else:
return HttpResponse('')
elif request.POST.get('context') == 'view-document':
context = dict()
context['pagesize'] = 'A4'
template = get_template('system_admin/invoice.html')
result = StringIO.StringIO()
document_id = request.POST.get('document-id')
document_instance = Document.objects.get(pk = document_id)
mime = magic.Magic(mime=True)
buffer = "output"
urllib.urlretrieve(document_instance.file.url, buffer)
mimes = mime.from_file(buffer)
output = download_doc(request,document_id)
result = output
document = result.getvalue()
# print (result)
html = template.render(context)
pdf = pisa.pisaDocument(
StringIO.StringIO(html.encode("ISO-8859-1")),
dest=result, link_callback=fetch_resources)
if not pdf.err:
return HttpResponse(document, content_type=mimes)
return HttpResponse("Error: <pre>%s</pre>" % escape(html))
elif request.POST.get('context') == 'edit-document':
if request.method == 'POST' or request.FILES.get('document'):
document_id = request.POST.get('document-id')
document_instance = Document.objects.get(pk=document_id)
changes = 0
try:
if request.FILES.get('new-document'):
file = request.FILES.get('new-document')
document_instance.file = file
changes = 1
if request.POST.get('new-document-name'):
document_name = request.POST.get('new-document-name')
fs = FileSystemStorage()
filename, file_extension = os.path.splitext(document_instance.file.name)
file_name = str(document_name) + str(file_extension)
document_instance.file.name = str(law_firm_instance.name) + "-" + str(document_name) + str(file_extension)
document_instance.file_name = file_name
uploaded_file_url = fs.url(document_instance.file.name)
document_instance.file_url = uploaded_file_url
changes= 1
elif request.POST.get('new-document-name'):
document_name = request.POST.get('new-document-name')
filename, file_extension = os.path.splitext(document_instance.file.name)
file_name = str(document_name) + str(file_extension)
document_instance.file_name = file_name
changes= 1
if changes != 0:
document_instance.version = float(document_instance.version) + 0.1
document_instance.save()
except:
print "Could not Update Document "
elif request.POST.get('context') == 'download-document':
document_id = request.POST['document-id']
response = download_doc(request,document_id)
return response
else:
street_one = request.POST['street-1']
street_two = None
if 'street-2' in request.POST:
street_two = request.POST['street-2']
city = request.POST['city']
state = request.POST['state']
zip_code = request.POST['zip-code']
country = request.POST['country']
law_firm_instance.address.street_one = street_one
law_firm_instance.address.street_two = street_two
law_firm_instance.address.city = city
law_firm_instance.address.state = state
law_firm_instance.address.country = country
law_firm_instance.address.zip_code = zip_code
law_firm_instance.address.save()
coordinates = law_firm_instance.address.get_coordinates()
if coordinates is not None:
law_firm_instance.address.gmaps_link = 'https://www.google.com/maps/place/' + str(coordinates['latitude']) + ',' + \
str(coordinates['longitude']) + 'z?hl=en'
law_firm_instance.address.save()
name = request.POST['name']
phone_number_one = request.POST['phone-1']
phone_number_two = None
if 'phone-2' in request.POST:
phone_number_two = request.POST['phone-2']
email_one = request.POST['email-1']
email_two = None
if 'email-2' in request.POST:
email_two = request.POST['email-2']
payment_plan = None
if 'payment-plan' in request.POST:
payment_plan = request.POST['payment-plan']
number_of_free_miles = None
try:
if 'number-of-free-miles' in request.POST:
number_of_free_miles = int(request.POST['number-of-free-miles'])
except:
number_of_free_miles = None
law_firm_instance.name = name
law_firm_instance.phone_number_one = phone_number_one
law_firm_instance.phone_number_two = phone_number_two
law_firm_instance.email_one = email_one
law_firm_instance.email_two = email_two
law_firm_instance.payment_plan = payment_plan
law_firm_instance.number_of_free_miles = number_of_free_miles
law_firm_instance.save()
law_firm_rates_keys = [rate for rate in CONSTANCE_CONFIG_FIELDSETS['Law firm In Area options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Law firm Out of Area options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Law firm miscellaneous options']]
law_firm_rates = dict()
for rate_key in law_firm_rates_keys:
law_firm_rates[rate_key] = float(request.POST[rate_key.lower().replace('_', '-')])
law_firm_rates_instance = None
if law_firm_instance.rates is not None:
law_firm_rates_instance = law_firm_instance.rates
else:
law_firm_rates_instance = LawFirmRates()
law_firm_rates_instance.save()
context['law_firm_rates'] = law_firm_rates_instance
for rate_key in law_firm_rates_keys:
setattr(law_firm_rates_instance, rate_key.lower()[0:len(rate_key) - len('_LAW_FIRM')], law_firm_rates[rate_key])
law_firm_rates_instance.save()
law_firm_instance.rates = law_firm_rates_instance
law_firm_instance.save()
if request.method == 'POST' and request.FILES.getlist('document'):
files = request.FILES.getlist('document')
document_names = request.POST.getlist('document-name')
for file,document_name in zip(files,document_names):
fs = FileSystemStorage()
# file_name = fs.save(file.name, file)
filename, file_extension = os.path.splitext(file.name)
file_name = str(document_name) + str(file_extension)
file.name = str(law_firm_instance.name) + "-" + str(document_name) + str(file_extension)
uploaded_file_url = fs.url(file.name)
new_document = Document(file_name=file_name, file = file, file_url=uploaded_file_url, law_firm = law_firm_instance )
new_document.save()
documents = Document.objects.filter(law_firm = law_firm_instance)
attached_documents = []
for document in documents:
attached_cases = AttachedDocument.objects.filter(document = document)
for attached_case in attached_cases:
attached_documents.append(attached_case)
context['attached_documents'] = attached_documents
context['documents'] = documents
phone_number_one = law_firm_instance.phone_number_one.replace('-', '').replace(' ', '')
phone_number_two = law_firm_instance.phone_number_two.replace('-', '').replace(' ', '')
context['phone_number_one'] = phone_number_one
context['phone_number_two'] = phone_number_two
return render(request, 'system_admin/law_firm_details.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def brokers(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
context = dict()
context['brokers'] = []
brokers = Broker.objects.all().order_by('-pk')
phone_numbers = []
for broker in brokers:
phone_number = broker.phone_number_one.replace('-', '').replace(' ', '')
phone_numbers.append(phone_number)
context['data'] = zip(brokers, phone_numbers)
# paginator = Paginator(brokers, 15) # Show 25 contacts per page
# page = request.GET.get('page')
# try:
# brokers = paginator.page(page)
# except PageNotAnInteger:
# # If page is not an integer, deliver first page.
# brokers = paginator.page(1)
# except EmptyPage:
# # If page is out of range (e.g. 9999), deliver last page of results.
# brokers = paginator.page(paginator.num_pages)
context['brokers']= brokers
return render(request, 'system_admin/brokers.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def new_broker(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
if request.POST:
username = request.POST['email-1']
password = request.POST['password']
first_name = request.POST['first-name']
last_name = request.POST['last-name']
try:
user = User.objects.create_user(username=username, password=password, first_name=first_name, last_name=last_name)
created_user = authenticate(username=username, password=password)
permission = Permission.objects.get(name='Can View Broker')
user.user_permissions.add(permission)
except:
context = dict()
context['error'] = 'A user with this username already exists'
return render(request, 'system_admin/new_broker.html', context)
street_one = request.POST['street-1']
street_two = None
if 'street-2' in request.POST:
street_two = request.POST['street-2']
city = request.POST['city']
state = request.POST['state']
zip_code = request.POST['zip-code']
country = request.POST['country']
new_address = Address(street_one=street_one, street_two=street_two, city=city, state=state, zip_code=zip_code,
country=country)
new_address.save()
coordinates = new_address.get_coordinates()
if coordinates is not None:
new_address.gmaps_link = 'https://www.google.com/maps/place/' + str(coordinates['latitude']) + ',' + \
str(coordinates['longitude']) + 'z?hl=en'
new_address.save()
phone_number_one = request.POST['phone-1']
phone_number_two = None
if 'phone-2' in request.POST:
phone_number_two = request.POST['phone-2']
email_one = request.POST['email-1']
email_two = None
if 'email-2' in request.POST:
email_two = request.POST['email-2']
more_info = None
if 'more-info' in request.POST:
more_info = request.POST['more-info']
photograph = None
if request.FILES and 'photograph' in request.FILES:
photograph = request.FILES['photograph']
new_broker_instance = Broker(user=created_user, address=new_address, phone_number_one=phone_number_one,
phone_number_two=phone_number_two, email_one=email_one, email_two=email_two,
more_info=more_info, photograph=photograph)
new_broker_instance.save()
return HttpResponseRedirect('/administrator/brokers/?created=True')
context = dict()
context['countries'] = countries
return render(request, 'system_admin/new_broker.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def broker(request, broker_id):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
try:
broker_instance = Broker.objects.get(pk=broker_id)
except:
raise Http404
context = dict()
context['broker'] = broker_instance
context['countries'] = countries
phone_number_one = broker_instance.phone_number_one.replace('-', '').replace(' ', '')
phone_number_two = broker_instance.phone_number_two.replace('-', '').replace(' ', '')
context['phone_number_one'] = phone_number_one
context['phone_number_two'] = phone_number_two
if request.is_ajax() and request.POST and 'context' in request.POST:
if request.POST['context'] == 'suspend':
#broker can still login
broker_instance.is_active = False
broker_instance.user.is_active = False
broker_instance.user.save()
broker_instance.save()
return HttpResponse('')
elif request.POST['context'] == 'resume':
broker_instance.is_active = True
broker_instance.user.is_active = True
broker_instance.user.save()
broker_instance.save()
return HttpResponse('')
elif request.POST['context'] == 'delete':
# restrict login (soft delete)
user_instance = broker_instance.user
user_instance.is_active = False
user_instance.save()
return HttpResponse('')
else:
return HttpResponse('')
if request.POST:
first_name = request.POST['first-name']
last_name = request.POST['last-name']
broker_instance.user.first_name = first_name
broker_instance.user.last_name = last_name
broker_instance.user.save()
street_one = request.POST['street-1']
street_two = None
if 'street-2' in request.POST:
street_two = request.POST['street-2']
city = request.POST['city']
state = request.POST['state']
zip_code = request.POST['zip-code']
country = request.POST['country']
broker_instance.address.street_one = street_one
broker_instance.address.street_two = street_two
broker_instance.address.city = city
broker_instance.address.state = state
broker_instance.address.country = country
broker_instance.address.zip_code = zip_code
broker_instance.address.save()
coordinates = broker_instance.address.get_coordinates()
if coordinates is not None:
broker_instance.address.gmaps_link = 'https://www.google.com/maps/place/' + str(coordinates['latitude']) + ',' + \
str(coordinates['longitude'])
broker_instance.address.save()
phone_number_one = request.POST['phone-1']
phone_number_two = None
if 'phone-2' in request.POST:
phone_number_two = request.POST['phone-2']
email_one = request.POST['email-1']
email_two = None
if 'email-2' in request.POST:
email_two = request.POST['email-2']
more_info = None
if 'more-info' in request.POST:
more_info = request.POST['more-info']
photograph = None
if request.FILES and 'photograph' in request.FILES:
photograph = request.FILES['photograph']
broker_instance.photograph = photograph
broker_instance.phone_number_one = phone_number_one
broker_instance.phone_number_two = phone_number_two
broker_instance.email_one = email_one
broker_instance.email_two = email_two
broker_instance.more_info = more_info
broker_instance.save()
phone_number_one = broker_instance.phone_number_one.replace('-', '').replace(' ', '')
phone_number_two = broker_instance.phone_number_two.replace('-', '').replace(' ', '')
context['phone_number_one'] = phone_number_one
context['phone_number_two'] = phone_number_two
return render(request, 'system_admin/broker_details.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def master_brokers(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
context = dict()
context['master_brokers'] = []
master_brokers = MasterBroker.objects.all().order_by('-pk')
# paginator = Paginator(master_brokers, 15) # Show 25 contacts per page
# page = request.GET.get('page')
# try:
# master_brokers = paginator.page(page)
# except PageNotAnInteger:
# # If page is not an integer, deliver first page.
# master_brokers = paginator.page(1)
# except EmptyPage:
# # If page is out of range (e.g. 9999), deliver last page of results.
# master_brokers = paginator.page(paginator.num_pages)
context['master_brokers'] = master_brokers
return render(request, 'system_admin/master_brokers.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def new_master_broker(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
if request.POST:
username = request.POST['email-1']
password = request.POST['password']
first_name = request.POST['first-name']
last_name = request.POST['last-name']
try:
User.objects.create_user(username=username, password=password, first_name=first_name, last_name=last_name)
created_user = authenticate(username=username, password=password)
except:
context = dict()
context['error'] = 'A user with this username already exists'
return render(request, 'system_admin/new_master_broker.html', context)
street_one = request.POST['street-1']
street_two = None
if 'street-2' in request.POST:
street_two = request.POST['street-2']
city = request.POST['city']
state = request.POST['state']
zip_code = request.POST['zip-code']
country = request.POST['country']
new_address = Address(street_one=street_one, street_two=street_two, city=city, state=state, zip_code=zip_code,
country=country)
new_address.save()
coordinates = new_address.get_coordinates()
if coordinates is not None:
new_address.gmaps_link = 'https://www.google.com/maps/place/' + str(coordinates['latitude']) + ',' + \
str(coordinates['longitude']) + 'z?hl=en'
new_address.save()
phone_number_one = request.POST['phone-1']
phone_number_two = None
if 'phone-2' in request.POST:
phone_number_two = request.POST['phone-2']
email_one = request.POST['email-1']
email_two = None
if 'email-2' in request.POST:
email_two = request.POST['email-2']
more_info = None
if 'more-info' in request.POST:
more_info = request.POST['more-info']
photograph = None
if request.FILES and 'photograph' in request.FILES:
photograph = request.FILES['photograph']
new_master_broker_instance = MasterBroker(user=created_user, address=new_address, phone_number_one=phone_number_one,
phone_number_two=phone_number_two, email_one=email_one, email_two=email_two,
more_info=more_info, photograph=photograph)
new_master_broker_instance.save()
return HttpResponseRedirect('/administrator/master-brokers/?created=True')
context = dict()
context['countries'] = countries
return render(request, 'system_admin/new_master_broker.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def master_broker(request,master_broker_id):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
try:
master_broker_instance = MasterBroker.objects.get(pk=master_broker_id)
except:
raise Http404
context = dict()
context['master_broker'] = master_broker_instance
context['countries'] = countries
if request.is_ajax() and request.POST and 'context' in request.POST:
if request.POST['context'] == 'suspend':
#broker can still login
master_broker_instance.is_active = False
master_broker_instance.user.is_active = False
master_broker_instance.user.save()
master_broker_instance.save()
return HttpResponse('')
elif request.POST['context'] == 'resume':
master_broker_instance.is_active = True
master_broker_instance.user.is_active = True
master_broker_instance.user.save()
master_broker_instance.save()
return HttpResponse('')
elif request.POST['context'] == 'delete':
# restrict login (soft delete)
user_instance = master_broker_instance.user
user_instance.is_active = False
user_instance.save()
return HttpResponse('')
else:
return HttpResponse('')
if request.POST:
first_name = request.POST['first-name']
last_name = request.POST['last-name']
master_broker_instance.user.first_name = first_name
master_broker_instance.user.last_name = last_name
master_broker_instance.user.save()
street_one = request.POST['street-1']
street_two = None
if 'street-2' in request.POST:
street_two = request.POST['street-2']
city = request.POST['city']
state = request.POST['state']
zip_code = request.POST['zip-code']
country = request.POST['country']
master_broker_instance.address.street_one = street_one
master_broker_instance.address.street_two = street_two
master_broker_instance.address.city = city
master_broker_instance.address.state = state
master_broker_instance.address.country = country
master_broker_instance.address.zip_code = zip_code
master_broker_instance.address.save()
coordinates = master_broker_instance.address.get_coordinates()
if coordinates is not None:
master_broker_instance.address.gmaps_link = 'https://www.google.com/maps/place/' + str(coordinates['latitude']) + ',' + \
str(coordinates['longitude'])
master_broker_instance.address.save()
phone_number_one = request.POST['phone-1']
phone_number_two = None
if 'phone-2' in request.POST:
phone_number_two = request.POST['phone-2']
email_one = request.POST['email-1']
email_two = None
if 'email-2' in request.POST:
email_two = request.POST['email-2']
more_info = None
if 'more-info' in request.POST:
more_info = request.POST['more-info']
photograph = None
if request.FILES and 'photograph' in request.FILES:
photograph = request.FILES['photograph']
master_broker_instance.photograph = photograph
master_broker_instance.phone_number_one = phone_number_one
master_broker_instance.phone_number_two = phone_number_two
master_broker_instance.email_one = email_one
master_broker_instance.email_two = email_two
master_broker_instance.more_info = more_info
master_broker_instance.save()
return render(request, 'system_admin/master_broker_details.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def investigators(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
context = dict()
context['investigators'] =[]
investigators = Investigator.objects.all().order_by('-pk')
phone_numbers = []
for investigator in investigators:
phone_number = investigator.phone_number_one.replace('-', '').replace(' ', '')
phone_numbers.append(phone_number)
context['data'] = zip(investigators, phone_numbers)
# paginator = Paginator(investigators, 15) # Show 25 contacts per page
# page = request.GET.get('page')
# try:
# investigators = paginator.page(page)
# except PageNotAnInteger:
# # If page is not an integer, deliver first page.
# investigators = paginator.page(1)
# except EmptyPage:
# # If page is out of range (e.g. 9999), deliver last page of results.
# investigators = paginator.page(paginator.num_pages)
context['investigators']= investigators
return render(request, 'system_admin/investigators.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def new_investigator(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
context = dict()
context['languages'] = all_languages
context['countries'] = countries
if request.POST:
username = request.POST['email-1']
password = request.POST['password']
first_name = request.POST['first-name']
last_name = request.POST['last-name']
try:
user = User.objects.create_user(username=username, password=password, first_name=first_name, last_name=last_name)
created_user = authenticate(username=username, password=password)
permission = Permission.objects.get(name='Can View Investigator')
user.user_permissions.add(permission)
except:
context['error'] = 'A user with this username already exists'
return render(request, 'system_admin/new_investigator.html', context)
street_one = request.POST['street-1']
street_two = None
if 'street-2' in request.POST:
street_two = request.POST['street-2']
city = request.POST['city']
state = request.POST['state']
country = request.POST['country']
zip_code = request.POST['zip-code']
new_address = Address(street_one=street_one, street_two=street_two, city=city, state=state, zip_code=zip_code,
country=country)
new_address.save()
coordinates = new_address.get_coordinates()
if coordinates is not None:
new_address.gmaps_link = 'https://www.google.com/maps/place/' + str(coordinates['latitude']) + ',' + \
str(coordinates['longitude'])
new_address.save()
new_secondary_address = None
try:
street_one = request.POST['secondary-street-1']
street_two = None
if 'street-2' in request.POST:
street_two = request.POST['secondary-street-2']
city = request.POST['secondary-city']
state = request.POST['secondary-state']
country = request.POST['secondary-country']
zip_code = request.POST['secondary-zip-code']
new_secondary_address = Address(street_one=street_one, street_two=street_two, city=city, state=state, zip_code=zip_code,
country=country)
new_secondary_address.save()
coordinates = new_secondary_address.get_coordinates()
if coordinates is not None:
new_secondary_address.gmaps_link = 'https://www.google.com/maps/place/' + str(coordinates['latitude']) + ',' + \
str(coordinates['longitude'])
new_secondary_address.save()
except:
new_secondary_address = None
nickname = request.POST['nickname']
languages = repr([str(ele) for ele in request.POST.getlist('languages')])
phone_number_one = request.POST['phone-1']
phone_number_two = None
if 'phone-2' in request.POST:
phone_number_two = request.POST['phone-2']
email_one = request.POST['email-1']
email_two = None
if 'email-2' in request.POST:
email_two = request.POST['email-2']
more_info = None
if 'more-info' in request.POST:
more_info = request.POST['more-info']
photograph = None
if request.FILES and 'photograph' in request.FILES:
photograph = request.FILES['photograph']
new_investigator_instance = Investigator(user=created_user, address=new_address, nickname=nickname,
languages=languages, phone_number_one=phone_number_one, phone_number_two=phone_number_two,
email_one=email_one, email_two=email_two, more_info=more_info, photograph=photograph,
secondary_address=new_secondary_address)
new_investigator_instance.save()
investigator_rates_keys = [rate for rate in CONSTANCE_CONFIG_FIELDSETS['Investigator In Area options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Investigator Out of Area options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Investigator miscellaneous options']]
investigator_rates = dict()
for rate_key in investigator_rates_keys:
investigator_rates[rate_key] = float(request.POST[rate_key.lower().replace('_', '-')])
new_investigator_rates = InvestigatorRates()
new_investigator_rates.save()
for rate_key in investigator_rates_keys:
setattr(new_investigator_rates, rate_key.lower()[0:len(rate_key) - len('_INVESTIGATOR')],
investigator_rates[rate_key])
new_investigator_rates.save()
new_investigator_instance.rates = new_investigator_rates
new_investigator_instance.save()
return HttpResponseRedirect('/administrator/investigators/?created=True')
return render(request, 'system_admin/new_investigator.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def investigator(request, investigator_id):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
try:
investigator_instance = Investigator.objects.get(pk=investigator_id)
except:
raise Http404
context = dict()
context['investigator'] = investigator_instance
context['investigator_rates'] = investigator_instance.rates
context['languages'] = all_languages
context['countries'] = countries
phone_number_one = investigator_instance.phone_number_one.replace('-', '').replace(' ', '')
phone_number_two = investigator_instance.phone_number_two.replace('-', '').replace(' ', '')
context['phone_number_one'] = phone_number_one
context['phone_number_two'] = phone_number_two
if request.is_ajax() and request.POST and 'context' in request.POST:
if request.POST['context'] == 'suspend':
investigator_instance.is_active = False
investigator_instance.user.is_active = False
investigator_instance.user.save()
investigator_instance.save()
return HttpResponse('')
elif request.POST['context'] == 'resume':
investigator_instance.is_active = True
investigator_instance.user.is_active = True
investigator_instance.user.save()
investigator_instance.save()
return HttpResponse('')
elif request.POST['context'] == 'delete':
user_instance = investigator_instance.user
for case in Case.objects.filter(investigator=investigator_instance).exclude(status='Closed'):
case.investigator = None
case.save()
user_instance.is_active = False
user_instance.save()
return HttpResponse('')
else:
return HttpResponse('')
if request.POST:
first_name = request.POST['first-name']
last_name = request.POST['last-name']
investigator_instance.user.first_name = first_name
investigator_instance.user.last_name = last_name
investigator_instance.user.save()
street_one = request.POST['street-1']
street_two = None
if 'street-2' in request.POST:
street_two = request.POST['street-2']
city = request.POST['city']
state = request.POST['state']
country = request.POST['country']
zip_code = request.POST['zip-code']
investigator_instance.address.street_one = street_one
investigator_instance.address.street_two = street_two
investigator_instance.address.city = city
investigator_instance.address.state = state
investigator_instance.address.country = country
investigator_instance.address.zip_code = zip_code
investigator_instance.address.save()
coordinates = investigator_instance.address.get_coordinates()
if coordinates is not None:
investigator_instance.address.gmaps_link = 'https://www.google.com/maps/place/' + str(coordinates['latitude']) + ',' + \
str(coordinates['longitude'])
investigator_instance.address.save()
new_secondary_address = None
try:
street_one = request.POST['secondary-street-1']
street_two = None
if 'street-2' in request.POST:
street_two = request.POST['secondary-street-2']
city = request.POST['secondary-city']
state = request.POST['secondary-state']
country = request.POST['secondary-country']
zip_code = request.POST['secondary-zip-code']
if investigator_instance.secondary_address:
investigator_instance.secondary_address.street_one = street_one
investigator_instance.secondary_address.street_two = street_two
investigator_instance.secondary_address.city = city
investigator_instance.secondary_address.state = state
investigator_instance.secondary_address.country = country
investigator_instance.secondary_address.zip_code = zip_code
investigator_instance.secondary_address.save()
coordinates = investigator_instance.secondary_address.get_coordinates()
if coordinates is not None:
investigator_instance.secondary_address.gmaps_link = 'https://www.google.com/maps/place/' + str(coordinates['latitude']) + ',' + \
str(coordinates['longitude'])
investigator_instance.secondary_address.save()
else:
new_secondary_address = Address(street_one=street_one, street_two=street_two, city=city, state=state, zip_code=zip_code,
country=country)
new_secondary_address.save()
coordinates = investigator_instance.secondary_address.get_coordinates()
if coordinates is not None:
investigator_instance.secondary_address.gmaps_link = 'https://www.google.com/maps/place/' + str(coordinates['latitude']) + ',' + \
str(coordinates['longitude'])
investigator_instance.secondary_address.save()
investigator_instance.secondary_address = new_secondary_address
investigator_instance.save()
except:
pass
nickname = request.POST['nickname']
languages = repr([str(ele) for ele in request.POST.getlist('languages')])
phone_number_one = request.POST['phone-1']
phone_number_two = None
if 'phone-2' in request.POST:
phone_number_two = request.POST['phone-2']
email_one = request.POST['email-1']
email_two = None
if 'email-2' in request.POST:
email_two = request.POST['email-2']
more_info = None
if 'more-info' in request.POST:
more_info = request.POST['more-info']
photograph = None
if request.FILES and 'photograph' in request.FILES:
photograph = request.FILES['photograph']
investigator_instance.photograph = photograph
investigator_instance.nickname = nickname
investigator_instance.languages = languages
investigator_instance.phone_number_one = phone_number_one
investigator_instance.phone_number_two = phone_number_two
investigator_instance.email_one = email_one
investigator_instance.email_two = email_two
investigator_instance.more_info = more_info
investigator_instance.save()
investigator_rates_keys = [rate for rate in CONSTANCE_CONFIG_FIELDSETS['Investigator In Area options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Investigator Out of Area options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Investigator miscellaneous options']]
investigator_rates = dict()
for rate_key in investigator_rates_keys:
investigator_rates[rate_key] = float(request.POST[rate_key.lower().replace('_', '-')])
investigator_rates_instance = None
if investigator_instance.rates is not None:
investigator_rates_instance = investigator_instance.rates
else:
investigator_rates_instance = InvestigatorRates()
investigator_rates_instance.save()
context['investigator_rates'] = investigator_rates_instance
for rate_key in investigator_rates_keys:
setattr(investigator_rates_instance, rate_key.lower()[0:len(rate_key) - len('_INVESTIGATOR')], investigator_rates[rate_key])
investigator_rates_instance.save()
investigator_instance.rates = investigator_rates_instance
investigator_instance.save()
phone_number_one = investigator_instance.phone_number_one.replace('-', '').replace(' ', '')
phone_number_two = investigator_instance.phone_number_two.replace('-', '').replace(' ', '')
context['phone_number_one'] = phone_number_one
context['phone_number_two'] = phone_number_two
return render(request, 'system_admin/investigator_details.html', context)
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def rates(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
if request.POST:
rates_keys = [rate for rate in CONSTANCE_CONFIG_FIELDSETS['Law firm In Area options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Law firm Out of Area options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Law firm miscellaneous options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Investigator In Area options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Investigator Out of Area options']] + \
[rate for rate in CONSTANCE_CONFIG_FIELDSETS['Investigator miscellaneous options']]
for rate_key in rates_keys:
setattr(config, rate_key, request.POST[rate_key.lower().replace('_', '-')])
return render(request, 'system_admin/rates.html')
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def delete_case_invoice(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
if not request.POST or 'case_id' not in request.POST:
return HttpResponseRedirect('/')
case_id = request.POST['case_id']
case_instance = Case.objects.get(pk=case_id)
invoice_to_be_deleted = case_instance.invoice
if invoice_to_be_deleted:
invoice_lines = InvoiceLine.objects.filter(invoice = invoice_to_be_deleted)
all_cases_containing_same_invoice = Case.objects.filter(invoice = invoice_to_be_deleted)
for case in all_cases_containing_same_invoice:
case.invoice = None
case.total_amount_billed_to_law_firm = 0
case.save()
# print invoice_lines
# case_instance.invoice.id
# case_instance.invoice.delete()
invoice_to_be_deleted.is_deleted = True
invoice_to_be_deleted.save()
# case_instance.invoice = None
# case_instance.save()
return HttpResponse('')
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def delete_case_invoice_as_csv(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
if not request.POST or 'case_id' not in request.POST:
return HttpResponseRedirect('/')
case_id = request.POST['case_id']
case_instance = Case.objects.get(pk=case_id)
invoice_to_be_deleted = case_instance.invoice_as_csv
print "hello"
if invoice_to_be_deleted:
invoice_lines = InvoiceLine.objects.filter(invoice = invoice_to_be_deleted)
all_cases_containing_same_invoice = Case.objects.filter(invoice_as_csv = invoice_to_be_deleted)
for case in all_cases_containing_same_invoice:
case.invoice_as_csv = None
case.total_amount_billed_to_law_firm = 0
case.save()
# print invoice_lines
# case_instance.invoice.id
# case_instance.invoice.delete()
invoice_to_be_deleted.is_deleted = True
invoice_to_be_deleted.save()
# case_instance.invoice = None
# case_instance.save()
return HttpResponse('')
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def delete_case_invoice_as_excel(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
if not request.POST or 'case_id' not in request.POST:
return HttpResponseRedirect('/')
case_id = request.POST['case_id']
case_instance = Case.objects.get(pk=case_id)
invoice_to_be_deleted = case_instance.invoice_as_excel
print "hello"
if invoice_to_be_deleted:
invoice_lines = InvoiceLine.objects.filter(invoice = invoice_to_be_deleted)
all_cases_containing_same_invoice = Case.objects.filter(invoice_as_excel = invoice_to_be_deleted)
for case in all_cases_containing_same_invoice:
case.invoice_as_excel = None
case.total_amount_billed_to_law_firm = 0
case.save()
# print invoice_lines
# case_instance.invoice.id
# case_instance.invoice.delete()
invoice_to_be_deleted.is_deleted = True
invoice_to_be_deleted.save()
# case_instance.invoice = None
# case_instance.save()
return HttpResponse('')
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def delete_document(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
if not request.POST or 'document_id' not in request.POST:
return HttpResponseRedirect('/')
try:
document_id = request.POST['document_id']
document_instance = Document.objects.get(pk=document_id)
law_firm_id = request.POST['law_firm_id']
except:
print "No document_id or law firm"
try:
attached_documents = AttachedDocument.objects.filter(document = document_instance)
# if attached_documents:
# raise SuspiciousOperation("Invalid request; Cannot delete document as the document is attached to a case")
for attached_document in attached_documents:
attached_document.delete()
if document_instance:
document_instance.law_firm = None
document_instance.is_deleted = True
document_instance.save()
except:
print "Error: Attached document not found and not deleted"
# return HttpResponse('')
return HttpResponseRedirect('/administrator/law-firm/'+ law_firm_id +'/')
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def generate_report(request):
import datetime
import csv
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
if request.POST:
if request.is_ajax() and 'context' in request.POST and request.POST['context'] == 'update-table':
# Handle table update
case_id = request.POST['case-id']
case_instance = Case.objects.get(pk=case_id)
case_instance.no_of_miles_travelled = float(request.POST['number-of-miles-travelled'])
case_instance.no_of_free_miles_law_firm = float(request.POST['number-of-free-miles'])
case_instance.basic_fee_law_firm = float(request.POST['basic-fee'])
case_instance.mileage_rate_law_firm = float(request.POST['mileage-rate'])
case_instance.additional_expenses = float(request.POST['additional-expenses'])
case_instance.save()
return HttpResponse('')
elif request.is_ajax() and 'context' in request.POST and request.POST['context'] == 'pay':
#Mark case as paid to investigator
try:
case_ids = request.POST['case_ids'].replace('"', '').replace("'", '').replace('[', '').replace(']', '')\
.replace(' ', '').split(',')
except:
return HttpResponseRedirect('/')
all_cases_in_range = []
for case_id in case_ids:
case_instance = Case.objects.get(pk=case_id)
case_instance.is_investigator_paid = True
case_instance.amount_paid_to_investigator = case_instance.get_investigator_price()
case_instance.save()
all_cases_in_range.append(case_instance)
return HttpResponse('')
elif request.is_ajax() and 'context' in request.POST and request.POST['context'] == 'unpay':
#Mark case as unpaid to investigator
try:
case_ids = request.POST['case_ids'].replace('"', '').replace("'", '').replace('[', '').replace(']', '')\
.replace(' ', '').split(',')
except:
return HttpResponseRedirect('/')
all_cases_in_range = []
for case_id in case_ids:
case_instance = Case.objects.get(pk=case_id)
case_instance.is_investigator_paid = False
case_instance.save()
all_cases_in_range.append(case_instance)
return HttpResponse('')
elif 'context' in request.POST and request.POST['context'] == 'download-csv-selected-cases':
# generate CSV for only specific cases
try:
case_ids = request.POST['case_ids'].replace('"', '').replace("'", '').replace('[', '').replace(']', '')\
.replace(' ', '').split(',')
except:
return HttpResponseRedirect('/')
all_cases_in_range = []
for case_id in case_ids:
try:
case_instance = Case.objects.get(pk=case_id)
all_cases_in_range.append(case_instance)
except :
context = dict()
context['error'] = 'An error occurred while generating payment report'
return render(request, 'system_admin/generate_report.html',context)
pass
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="Payment Report.csv"'
writer = csv.writer(response)
writer.writerow(['Case Name', 'Investigator name','Adult Cients','Child Clients', 'Location of client', 'No. of miles travelled',
'No. of free miles', 'Basic fee',
# 'No. of signatures',
'Mileage rate',
'Additional expenses','Additional Expenses description', 'Expected payment', 'Amount paid to law firm', 'Difference',
'Amount paid to investigator', 'Profit', 'Payment status'])
for case in all_cases_in_range:
investigator_name = case.investigator.user.first_name + ' ' + case.investigator.user.last_name
investigator_payment = 'PENDING'
if case.is_investigator_paid:
investigator_payment = 'PAID'
writer.writerow([case.name, investigator_name, case.adult_clients, case.child_clients,case.client_address.simple_address(), case.no_of_miles_travelled,
case.no_of_free_miles_investigator, case.basic_fee_investigator,
# case.number_of_signatures_required,
case.mileage_rate_investigator, case.additional_expenses,
case.additional_expenses_description, case.expected_payment, case.get_law_firm_price(),
case.difference_in_payment(), case.get_investigator_price(), case.profit(), investigator_payment])
return response
return HttpResponse('')
elif 'from' in request.POST and 'to' in request.POST and 'law-firm' in request.POST:
try:
from_date = request.POST.get('from')
to_date = request.POST.get('to')
from_components = from_date.split('/')
from_date = datetime.datetime(int(from_components[2]), int(from_components[0]), int(from_components[1]))
to_components = to_date.split('/')
to_date = datetime.datetime(int(to_components[2]), int(to_components[0]), int(to_components[1]))
if request.POST['law-firm'] == 'All Firms':
law_firm = 'All Firms'
else:
law_firm_id = request.POST['law-firm']
law_firm = LawFirm.objects.get(pk=law_firm_id)
except:
return HttpResponseRedirect('/administrator/generate-report/')
if law_firm != 'All Firms' :
all_cases_in_range = Case.objects.filter(created_at__gte=from_date)\
.filter(created_at__lt=to_date)\
.filter(law_firm = law_firm)\
.filter(status='Closed')
else:
all_cases_in_range = Case.objects.filter(created_at__gte=from_date)\
.filter(created_at__lt=to_date)\
.filter(status='Closed')
context = dict()
context['cases'] = all_cases_in_range
context['from'] = request.POST['from']
context['to'] = request.POST['to']
# context['law_firms'] = LawFirm.objects.all()
# context['selected_firm'] = law_firm
print "before download-csv context"
# for case in all_cases_in_range:
# print(case.name)
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
return render(request, 'system_admin/generate_report.html', context)
elif 'context' in request.POST and request.POST['context'] == 'download-csv':
# try:
# print "in context download-csv"
from_date = request.POST.get('from')
to_date = request.POST.get('to')
from_components = from_date.split('/')
from_date = datetime.datetime(int(from_components[2]), int(from_components[0]), int(from_components[1]))
to_components = to_date.split('/')
to_date = datetime.datetime(int(to_components[2]), int(to_components[0]), int(to_components[1]))
if request.POST.get('law_firm') == 'All Firms':
law_firm = 'All Firms'
else:
law_firm_id = request.POST.get('law_firm')
print (law_firm_id)
law_firm = LawFirm.objects.get(pk=law_firm_id)
# except:
# return HttpResponseRedirect('/administrator/generate-report/')
if law_firm != 'All Firms' :
all_cases_in_range = Case.objects.filter(created_at__gte=from_date)\
.filter(created_at__lt=to_date)\
.filter(law_firm = law_firm)\
.filter(status='Closed')
else:
all_cases_in_range = Case.objects.filter(created_at__gte=from_date)\
.filter(created_at__lt=to_date)\
.filter(status='Closed')
print "passes download-csv context"
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="Payment Report on '+str(datetime.datetime.now().strftime("%Y/%m/%d %H:%M:%S"))+'.csv"'
writer = csv.writer(response)
writer.writerow(['Case Name', 'Investigator name','Adult Clients','Child Clients', 'Location of client', 'No. of miles travelled',
'No. of free miles', 'Basic fee',
# 'No. of signatures',
'Mileage rate','Additional expenses','Additional Expenses description', 'Expected payment', 'Amount paid to law firm', 'Difference',
'Amount paid to investigator', 'Profit', 'Payment status'])
for case in all_cases_in_range:
investigator_name = case.investigator.user.first_name + ' ' + case.investigator.user.last_name
investigator_payment = 'PENDING'
if case.is_investigator_paid:
investigator_payment = 'PAID'
writer.writerow([case.name, investigator_name, case.adult_clients, case.child_clients ,case.client_address.simple_address(), case.no_of_miles_travelled,
case.no_of_free_miles_investigator, case.basic_fee_investigator,
# case.number_of_signatures_required,
case.mileage_rate_investigator, case.additional_expenses,
case.additional_expenses_description,
case.expected_payment, case.get_law_firm_price(),
case.difference_in_payment(), case.get_investigator_price(), case.profit(), investigator_payment])
return response
context = dict()
context = dict()
context['cases'] = all_cases_in_range
context['from'] = request.POST['from']
context['to'] = request.POST['to']
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
return render(request, 'system_admin/generate_report.html', context)
context = dict()
context['law_firms'] = LawFirm.objects.all()
return render(request, 'system_admin/generate_report.html',context)
# @login_required(login_url='/')
# @permission_required('system_admin.can_view_system_admin',raise_exception=True)
def my_first_page(canvas, doc):
canvas.saveState()
canvas.setFont('Times-Roman',9)
canvas.drawString(inch, 0.75 * inch, "Page %d" % (doc.page,))
canvas.restoreState()
def my_later_pages(canvas, doc):
canvas.saveState()
canvas.setFont('Times-Roman',9)
canvas.drawString(inch, 0.75 * inch, "Page %d" % (doc.page,))
canvas.restoreState()
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def case_details(request, case_id):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
try:
case_instance = Case.objects.get(pk=case_id)
except:
raise Http404
context = dict()
context['broker'] = broker
context['case'] = case_instance
context['law_firms'] = LawFirm.objects.all()
context['countries'] = countries
context['editable'] = case_instance.invoice is None and not case_instance.is_investigator_paid and case_instance.invoice_as_csv is None and case_instance.invoice_as_excel is None
context['amount_paid_to_investigator'] = case_instance.get_investigator_price()
context['amount_billed_to_law_firm'] = case_instance.get_law_firm_price()
context['all_documents'] = Document.objects.all()
client_mobile_phone = case_instance.client_mobile_phone.replace('-', '').replace(' ', '')
context['client_mobile_phone'] = client_mobile_phone
client_home_phone = case_instance.client_home_phone.replace('-', '').replace(' ', '')
context['client_home_phone'] = client_home_phone
if request.POST:
if request.POST.get('context') == 'view-document':
context = dict()
context['pagesize'] = 'A4'
template = get_template('document.html')
result = StringIO.StringIO()
document_id = request.POST.get('document-id')
document_instance = Document.objects.get(pk = document_id)
mime = magic.Magic(mime=True)
buffer = "output"
urllib.urlretrieve(document_instance.file.url, buffer)
mimes = mime.from_file(buffer)
output = download_doc(request,document_id)
result = output
document = result.getvalue()
# print (result)
html = template.render(context)
pdf = pisa.pisaDocument(
StringIO.StringIO(html.encode("ISO-8859-1")),
dest=result, link_callback=fetch_resources)
if not pdf.err:
return HttpResponse(document, content_type=mimes)
return HttpResponse("Error: <pre>%s</pre>" % escape(html))
elif request.POST.get('context') == 'download-document':
document_id = request.POST['document-id']
response = download_doc(request,document_id)
return response
elif request.POST['context'] == 'update-case-details':
if case_instance.is_investigator_paid:
raise SuspiciousOperation("Invalid request; Cannot edit case details after case is marked as Paid")
elif case_instance.invoice:
raise SuspiciousOperation("Invalid request; Cannot edit case details if an active invoice is associated")
print "Updating case: %s"%str(case_id)
# law_firm_id = request.POST['law-firm']
# law_firm = LawFirm.objects.get(pk=law_firm_id)
street_one = request.POST['street-1']
street_two = request.POST['street-2']
city = request.POST['city']
state = request.POST['state']
zip_code = request.POST['zip-code']
case_instance.client_address.street_one = street_one
case_instance.client_address.street_two = street_two
case_instance.client_address.city = city
case_instance.client_address.state = state
case_instance.client_address.zip_code = zip_code
case_instance.client_address.save()
coordinates = case_instance.client_address.get_coordinates()
if coordinates is not None:
case_instance.client_address.gmaps_link = 'https://www.google.com/maps/place/' + str(coordinates['latitude']) + ',' + \
str(coordinates['longitude'])
case_instance.client_address.save()
name = request.POST['case-name']
case_type = request.POST['case-type']
case_type_description = ''
if ('case-type' not in request.POST or type == ''):
case_type='default'
elif (case_type == 'Others'):
case_type_description = request.POST['case-type-description']
basic_fee_law_firm = request.POST['basic-fee-for-case-law-firm']
no_of_free_miles_law_firm = request.POST['no-of-free-miles-law-firm']
mileage_rate_law_firm = request.POST['mileage-rate-law-firm']
basic_fee_investigator = request.POST['basic-fee-for-case-investigator']
no_of_free_miles_investigator = request.POST['no-of-free-miles-investigator']
mileage_rate_investigator = request.POST['mileage-rate-investigator']
if 'dol' in request.POST and request.POST['dol']:
dol = request.POST['dol']
dol_components = dol.split('/')
dol = datetime.datetime(int(dol_components[2]), int(dol_components[0]), int(dol_components[1]))
case_instance.is_dol_provided = True
else:
dol = datetime.datetime(2017, 1, 1)
case_instance.is_dol_provided = False
if 'closing-date' in request.POST and request.POST['closing-date']:
doc = request.POST['closing-date']
doc_components = doc.split('/')
doc = datetime.datetime(int(doc_components[2]), int(doc_components[0]), int(doc_components[1]))
case_instance.closing_date = doc
# else:
# dos = None
# case_instance.closing_date = dos
if 'edos' in request.POST and request.POST['edos']:
edos = request.POST['edos']
edos_components = edos.split('/')
edos = datetime.datetime(int(edos_components[2]), int(edos_components[0]), int(edos_components[1]))
case_instance.expected_closing_date = edos
else:
edos = None
case_instance.expected_closing_date = edos
if 'dos' in request.POST and request.POST['dos']:
dos = request.POST['dos']
dos_components = dos.split('/')
dos = datetime.datetime(int(dos_components[2]), int(dos_components[0]), int(dos_components[1]))
case_instance.date_of_signup = dos
else:
dos = case.created_at
case_instance.date_of_signup = dos
locality = request.POST['locality']
adult_clients = request.POST['adult-clients']
child_clients = request.POST['child-clients']
no_of_miles_travelled = None
if 'no-of-miles-travelled' in request.POST:
no_of_miles_travelled = float(request.POST['no-of-miles-travelled'])
additional_expenses = None
if 'additional-expenses' in request.POST:
additional_expenses = float(request.POST['additional-expenses'])
rsn_extra_expenses = None
if 'rsn-extra-expenses' in request.POST:
rsn_extra_expenses = float(request.POST['rsn-extra-expenses'])
expected_payment = request.POST['expected-payment-for-case']
client_name = request.POST['client-name']
client_mobile_phone = request.POST['mobile-phone']
client_home_phone = request.POST['home-phone']
client_primary_email = request.POST['primary-email']
client_secondary_email = request.POST['secondary-email']
client_language = request.POST['language']
number_of_adult_signatures_required = int(request.POST['number-of-adult-signatures-required'])
number_of_child_signatures_required = int(request.POST['number-of-child-signatures-required'])
number_of_adult_signatures_obtained = int(request.POST['number-of-adult-signatures-obtained'])
number_of_child_signatures_obtained = int(request.POST['number-of-child-signatures-obtained'])
# case_instance.law_firm = law_firm
case_instance.name = name
case_instance.type = case_type
case_instance.type_description = case_type_description
case_instance.basic_fee_law_firm = basic_fee_law_firm
case_instance.no_of_free_miles_law_firm = no_of_free_miles_law_firm
case_instance.mileage_rate_law_firm = mileage_rate_law_firm
if not case_instance.is_investigator_paid:
case_instance.basic_fee_investigator = basic_fee_investigator
case_instance.no_of_free_miles_investigator = no_of_free_miles_investigator
case_instance.mileage_rate_investigator = mileage_rate_investigator
case_instance.dol = dol
case_instance.locality = locality
if no_of_miles_travelled:
case_instance.no_of_miles_travelled = no_of_miles_travelled
if additional_expenses:
case_instance.additional_expenses = additional_expenses
if rsn_extra_expenses:
case_instance.rsn_extra_expenses = rsn_extra_expenses
case_instance.expected_payment = expected_payment
case_instance.adult_clients = adult_clients
case_instance.child_clients = child_clients
case_instance.client_name = client_name
case_instance.client_mobile_phone = client_mobile_phone
case_instance.client_home_phone = client_home_phone
case_instance.client_primary_email = client_primary_email
case_instance.client_secondary_email = client_secondary_email
case_instance.client_language = client_language
case_instance.number_of_adult_signatures_required = number_of_adult_signatures_required
case_instance.number_of_child_signatures_required = number_of_child_signatures_required
case_instance.number_of_adult_signatures_obtained = number_of_adult_signatures_obtained
case_instance.number_of_child_signatures_obtained = number_of_child_signatures_obtained
# if 'documents' in request.FILES:
# uploaded_documents = request.FILES['documents']
# case_instance.documents = uploaded_documents
if 'document' in request.POST:
documents = request.POST.getlist('document')
for document in documents:
if AttachedDocument.objects.filter(document=document).filter(case=case_instance):
print "Already attached to this case"
else:
attached_document = Document.objects.get(pk=document)
new_attached_document = AttachedDocument(case=case_instance, document=attached_document)
new_attached_document.save()
case_instance.save()
context['updated'] = True
elif request.is_ajax() and request.POST['context'] == 'pay':
case_instance.is_investigator_paid = True
case_instance.amount_paid_to_investigator = case_instance.get_investigator_price()
case_instance.save()
return HttpResponse('')
elif request.is_ajax() and request.POST['context'] == 'unpay':
case_instance.is_investigator_paid = False
case_instance.save()
return HttpResponse('')
attached_documents = AttachedDocument.objects.filter(case = case_instance)
context['attached_documents'] = attached_documents
documents = Document.objects.filter(law_firm=case_instance.law_firm)
flag = 0
documents_available = []
for document in documents:
flag = 0
for attached_document in attached_documents:
if document.pk == attached_document.document.pk:
flag = 1
if flag == 0:
documents_available.append(document)
context['documents'] = documents_available
client_mobile_phone = case_instance.client_mobile_phone.replace('-', '').replace(' ', '')
context['client_mobile_phone'] = client_mobile_phone
client_home_phone = case_instance.client_home_phone.replace('-', '').replace(' ', '')
context['client_home_phone'] = client_home_phone
return render(request, 'system_admin/case_details.html', context)
# Complete rewrite of invoicing
@login_required(login_url='/')
@permission_required('system_admin.can_view_system_admin',raise_exception=True)
def generate_invoice_with_invoice_lines(request):
import datetime
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
if request.POST:
law_firm_id = request.POST.get('law-firm')
law_firm = LawFirm.objects.get(pk=law_firm_id)
from_date = request.POST.get('from')
to_date = request.POST.get('to')
try:
from_components = from_date.split('/')
from_date = datetime.datetime(int(from_components[2]), int(from_components[0]), int(from_components[1]))
to_components = to_date.split('/')
to_date = datetime.datetime(int(to_components[2]), int(to_components[0]), int(to_components[1]))
except:
return HttpResponseRedirect('/administrator/generate-invoice/')
all_cases_in_range = Case.objects.filter(law_firm=law_firm).filter(created_at__gte=from_date)\
.filter(created_at__lte=to_date).filter(status='Closed')
if request.POST.get('context') == 'list-cases':
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
context['cases'] = all_cases_in_range
context['from'] = request.POST['from']
context['to'] = request.POST['to']
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST.get('context') == 'print-aggregate-invoice':
try:
case_ids = request.POST['case-ids'].replace('"', '').replace("'", '').replace('[', '').replace(']', '')\
.replace(' ', '').split(',')
except:
return HttpResponseRedirect('/')
all_cases_in_range = []
for case_id in case_ids:
case_instance = Case.objects.get(pk=case_id)
if case_instance.status.lower() != 'closed':
print "Invalid request; Invoice cannot be generated for cases which have not yet been closed. Case ID: %d"%case_instance.id
raise SuspiciousOperation("Invalid request; Invoice cannot be generated for cases which have not yet been closed. Case ID: %d"%case_instance.id)
all_cases_in_range.append(case_instance)
if request.POST['invoice-sending'] == 'download':
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="invoice.pdf"'
generate_the_aggregate_invoice(response, law_firm, all_cases_in_range)
return response
elif request.POST['invoice-sending'] == 'mail':
pdf_buffer = BytesIO()
generate_the_aggregate_invoice(pdf_buffer, law_firm, all_cases_in_range)
for case in all_cases_in_range:
case.is_invoice_mailed = True
case.save()
pdf = pdf_buffer.getvalue()
pdf_buffer.close()
law_firm_email = request.POST.get('email')
email_body = 'Find the attached invoice'
message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm_email])
message.attach('Invoice.pdf', pdf, 'application/pdf')
message.send()
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = all_cases_in_range
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST.get('context') == 'print-bulk-invoice':
try:
case_ids = request.POST['case-ids'].replace('"', '').replace("'", '').replace('[', '').replace(']', '')\
.replace(' ', '').split(',')
except:
return HttpResponseRedirect('/')
all_cases_in_range = []
for case_id in case_ids:
case_instance = Case.objects.get(pk=case_id)
all_cases_in_range.append(case_instance)
# ret_zip = None
if request.POST['invoice-sending'] == 'download':
response = HttpResponse(content_type='application/zip')
response['Content-Disposition'] = 'filename=all_invoices.zip'
buff = StringIO.StringIO()
archive = zipfile.ZipFile(buff, 'w' ,zipfile.ZIP_DEFLATED)
list_of_pdfs = []
for case_in_range in all_cases_in_range:
file_like_object = StringIO.StringIO()
generate_the_invoice(file_like_object, law_firm, [case_in_range])
archive.writestr('Invoice for ' + case_in_range.name + '.pdf', file_like_object.getvalue())
archive.close()
buff.flush()
ret_zip = buff.getvalue()
buff.close()
response.write(ret_zip)
if request.POST['download'] == 'true':
return response
else:
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = Case.objects.filter(law_firm=law_firm).filter(created_at__gte=from_date).filter(created_at__lt=to_date).filter(status='Closed')
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST['invoice-sending'] == 'mail':
buff = StringIO.StringIO()
archive = zipfile.ZipFile(buff, 'w' ,zipfile.ZIP_DEFLATED)
list_of_pdfs = []
for case_in_range in all_cases_in_range:
file_like_object = StringIO.StringIO()
generate_the_invoice(file_like_object, law_firm, [case_in_range])
case_in_range.is_invoice_mailed = True
case_in_range.save()
archive.writestr('Invoice for ' + case_in_range.name + '.pdf', file_like_object.getvalue())
archive.close()
buff.flush()
ret_zip = buff.getvalue()
buff.close()
law_firm_email = request.POST.get('email')
email_body = 'Find the attached invoice'
message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm_email])
message.attach('all_invoices.zip', ret_zip, 'application/zip')
message.send()
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = all_cases_in_range
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST.get('context') == 'print-single-invoice':
case_instance = None
try:
case_id = request.POST['case-id']
case_instance = Case.objects.get(pk=case_id)
except:
return HttpResponseRedirect('/')
all_cases_in_range = [case_instance]
if request.POST['invoice-sending'] == 'download':
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="Invoice - '+ case_instance.name +'.pdf"'
generate_the_invoice(response, law_firm, all_cases_in_range)
print (response)
if request.POST['download'] == 'true':
return response
else:
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = Case.objects.filter(law_firm=law_firm).filter(created_at__gte=from_date).filter(created_at__lt=to_date).filter(status='Closed')
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST['invoice-sending'] == 'mail':
pdf_buffer = BytesIO()
generate_the_invoice(pdf_buffer, law_firm, all_cases_in_range)
for case in all_cases_in_range:
case.is_invoice_mailed = True
case.save()
pdf = pdf_buffer.getvalue()
pdf_buffer.close()
email_body = 'Find the attached invoice'
law_firm_email = request.POST.get('email')
# Hardcoding David's Email
# message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm.email_one])
message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm_email])
message.attach('Invoice.pdf', pdf, 'application/pdf')
message.send()
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = Case.objects.filter(law_firm=law_firm).filter(created_at__gte=from_date).filter(created_at__lt=to_date).filter(status='Closed')
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST['invoice-sending'] == 'print':
context = dict()
context['pagesize'] = 'A4'
template = get_template('system_admin/invoice.html')
result = StringIO.StringIO()
generate_the_invoice(result, law_firm, all_cases_in_range)
invoice_pdf = result.getvalue()
context['cases'] = all_cases_in_range
html = template.render(context)
pdf = pisa.pisaDocument(
StringIO.StringIO(html.encode("ISO-8859-1")),
dest=result, link_callback=fetch_resources)
if not pdf.err:
return HttpResponse(invoice_pdf, content_type='application/pdf')
return HttpResponse("Error: <pre>%s</pre>" % escape(html))
elif request.POST.get('context') == 'print-combined-invoice-as-csv':
case_ids = []
case_instance = None
try:
case_ids = request.POST['case-ids'].replace('"', '').replace("'", '').replace('[', '').replace(']', '')\
.replace(' ', '').split(',')
print (case_ids)
except:
return HttpResponseRedirect('/')
# pass
all_cases_in_range = []
for case_id in case_ids:
try:
case_instance = Case.objects.get(pk=case_id)
all_cases_in_range.append(case_instance)
except :
context = dict()
context['error'] = 'An error occurred while generating payment report'
return render(request, 'system_admin/generate_report.html',context)
pass
if request.POST['invoice-sending'] == 'download':
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="Combined-invoice.csv"'
response = generate_bulk_invoice_as_csv(response, law_firm, all_cases_in_range)
# print "hello"
if request.POST['download'] == 'true':
# print "true"
return response
else:
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = Case.objects.filter(law_firm=law_firm).filter(created_at__gte=from_date).filter(created_at__lt=to_date).filter(status='Closed')
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST['invoice-sending'] == 'mail':
csv_buffer = BytesIO()
generate_bulk_invoice_as_csv(csv_buffer, law_firm, all_cases_in_range)
for case in all_cases_in_range:
case.is_invoice_as_csv_mailed = True
case.save()
csv = csv_buffer.getvalue()
csv_buffer.close()
law_firm_email = request.POST.get('email')
email_body = 'Find the attached invoice as csv'
# Hardcoding David's Email
# message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm.email_one])
message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm_email])
message.attach('Invoice.csv', csv, 'text/csv')
message.send()
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = Case.objects.filter(law_firm=law_firm).filter(created_at__gte=from_date).filter(created_at__lt=to_date).filter(status='Closed')
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST.get('context') == 'print-single-invoice-csv':
case_instance = None
try:
case_id = request.POST['case-id']
case_instance = Case.objects.get(pk=case_id)
except:
return HttpResponseRedirect('/')
all_cases_in_range = [case_instance]
if request.POST['invoice-sending'] == 'download':
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="Invoice - '+ case_instance.name +'.csv"'
generate_the_invoice_as_csv(response, law_firm, all_cases_in_range)
# print "hello"
if request.POST['download'] == 'true':
print "true"
return response
else:
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = Case.objects.filter(law_firm=law_firm).filter(created_at__gte=from_date).filter(created_at__lt=to_date).filter(status='Closed')
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST['invoice-sending'] == 'mail':
csv_buffer = BytesIO()
generate_the_invoice_as_csv(csv_buffer, law_firm, all_cases_in_range)
print "hello"
for case in all_cases_in_range:
print"in loop"
case.is_invoice_as_csv_mailed = True
case.save()
print (case.is_invoice_as_csv_mailed)
case_name = case.name
csv = csv_buffer.getvalue()
csv_buffer.close()
# law_firm_email = law_firm.email_one
law_firm_email = request.POST.get('email')
print str(law_firm_email)
email_body = 'Find the attached invoice as csv'
# Hardcoding David's Email
# message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm.email_one])
message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm_email])
message.attach('Invoice.csv', csv, 'text/csv')
message.send()
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = Case.objects.filter(law_firm=law_firm).filter(created_at__gte=from_date).filter(created_at__lt=to_date).filter(status='Closed')
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST.get('context') == 'print-single-invoice-excel':
case_instance = None
try:
case_id = request.POST['case-id']
case_instance = Case.objects.get(pk=case_id)
except:
return HttpResponseRedirect('/')
all_cases_in_range = [case_instance]
if request.POST['invoice-sending'] == 'download':
# response = HttpResponse(content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
# response['Content-Disposition'] = 'attachment; filename="Invoice - '+ case_instance.name +'.xlsx"'
# response = generate_the_invoice_as_excel(response, law_firm, all_cases_in_range)
output = StringIO.StringIO()
output = generate_the_invoice_as_excel(output, law_firm, all_cases_in_range)
output.seek(0)
response = HttpResponse(output.read(), content_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
response['Content-Disposition'] = 'attachment; filename="Invoice- '+ case_instance.name +'.xlsx"'
# return response
# print "hello"
if request.POST['download'] == 'true':
print "true"
return response
else:
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = Case.objects.filter(law_firm=law_firm).filter(created_at__gte=from_date).filter(created_at__lt=to_date).filter(status='Closed')
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST['invoice-sending'] == 'mail':
excel_buffer = StringIO.StringIO()
generate_the_invoice_as_excel(excel_buffer, law_firm, all_cases_in_range)
print "excel"
excel = excel_buffer.getvalue()
excel_buffer.close()
# law_firm_email = law_firm.email_one
law_firm_email = request.POST.get('email')
print str(law_firm_email)
email_body = 'Find the attached invoice as excel'
# Hardcoding David's Email
# message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm.email_one])
message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm_email])
message.attach('Invoice.xlsx', excel, 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
message.send()
for case in all_cases_in_range:
print"in loop"
case.is_invoice_as_csv_mailed = True
case.save()
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = Case.objects.filter(law_firm=law_firm).filter(created_at__gte=from_date).filter(created_at__lt=to_date).filter(status='Closed')
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST.get('context') == 'print-combined-invoice-as-excel':
case_ids = []
case_instance = None
try:
case_ids = request.POST['case-ids'].replace('"', '').replace("'", '').replace('[', '').replace(']', '')\
.replace(' ', '').split(',')
print (case_ids)
except:
return HttpResponseRedirect('/')
# pass
all_cases_in_range = []
for case_id in case_ids:
try:
case_instance = Case.objects.get(pk=case_id)
all_cases_in_range.append(case_instance)
except :
context = dict()
context['error'] = 'An error occurred while generating payment report'
return render(request, 'system_admin/generate_report.html',context)
pass
if request.POST['invoice-sending'] == 'download':
output = StringIO.StringIO()
output = generate_bulk_invoice_as_excel(output, law_firm, all_cases_in_range)
output.seek(0)
response = HttpResponse(output.read(), content_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
response['Content-Disposition'] = "attachment; filename=Combined-invoice.xlsx"
if request.POST['download'] == 'true':
return response
else:
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = Case.objects.filter(law_firm=law_firm).filter(created_at__gte=from_date).filter(created_at__lt=to_date).filter(status='Closed')
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
elif request.POST['invoice-sending'] == 'mail':
excel_buffer = StringIO.StringIO()
generate_bulk_invoice_as_excel(excel_buffer, law_firm, all_cases_in_range)
# for case in all_cases_in_range:
# case.is_invoice_as_excel_mailed = True
excel = excel_buffer.getvalue()
excel_buffer.close()
law_firm_email = request.POST.get('email')
email_body = 'Find the attached invoice as excel'
# Hardcoding David's Email
# message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm.email_one])
message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm_email])
message.attach('Combined-Invoice.xlsx', excel, 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
message.send()
for case in all_cases_in_range:
print"in loop"
case.is_invoice_as_csv_mailed = True
case.save()
context = dict()
context['law_firms'] = LawFirm.objects.all()
context['selected_firm'] = law_firm
context['cases'] = Case.objects.filter(law_firm=law_firm).filter(created_at__gte=from_date).filter(created_at__lt=to_date).filter(status='Closed')
context['from'] = request.POST['from']
context['to'] = request.POST['to']
law_firm_emails = []
if law_firm.email_one:
law_firm_emails.append(law_firm.email_one)
if law_firm.email_two:
law_firm_emails.append(law_firm.email_two)
context['law_firm_emails'] = law_firm_emails
return render(request, 'system_admin/generate_invoice.html', context)
context = dict()
context['law_firms'] = LawFirm.objects.all()
return render(request, 'system_admin/generate_invoice.html', context)
def generate_the_invoice(output, law_firm, cases):
if len(cases) < 1:
raise ValueError('Cases cannot be zero for invoicing')
for case in cases:
if case.invoice is not None:
print_invoice_as_pdf(output,case.invoice, law_firm)
return
entire_invoice_total = 0
law_firm_rates = law_firm.rates
default_in_area_payment_for_one_signature = law_firm_rates.default_in_area_payment_for_one_signature
default_in_area_payment_for_each_additional_adult_signature = law_firm_rates.default_in_area_payment_for_each_additional_adult_signature
default_in_area_payment_for_children = law_firm_rates.default_in_area_payment_for_children
maximum_in_area_payment_for_any_number_of_signatures = law_firm_rates.maximum_in_area_payment_for_any_number_of_signatures
default_out_of_area_payment_for_one_signature = law_firm_rates.default_out_of_area_payment_for_one_signature
default_out_of_area_payment_for_each_additional_adult_signature = law_firm_rates.default_out_of_area_payment_for_each_additional_adult_signature
default_out_of_area_payment_for_children = law_firm_rates.default_out_of_area_payment_for_children
maximum_out_of_area_payment_for_any_number_of_signatures = law_firm_rates.maximum_out_of_area_payment_for_any_number_of_signatures
invoice = Invoice()
invoice.law_firm_name = law_firm.name
invoice.law_firm_address = law_firm.address.simple_address()
invoice.law_firm_email = law_firm.email_one
invoice_lines = []
invoice.save()
print "Newly created Invoice ID: %d"%invoice.id
for case in cases:
case.invoice = invoice
case.save()
print "Invoice for case no: %d"%case.id
#create new invoice line
invoice_line = InvoiceLine()
#FK assignments
invoice_line.invoice = invoice
invoice_line.case = case
number_of_adult_signatures_required = 0
number_of_child_signatures_required = 0
number_of_adult_signatures_obtained = 0
number_of_child_signatures_obtained = 0
is_signature_obtained = False
did_investigator_travel = False
case_name = case.name
case_created_at = case.created_at
date_of_signup = case.date_of_signup
investigator_name = case.investigator.user.first_name + ' '+ case.investigator.user.last_name
client_name = case.client_name
client_address = case.client_address.simple_address()
dol = case.dol
case_closing_date = case.closing_date
is_dol_provided = case.is_dol_provided
locality = case.locality
additional_expenses_description = case.additional_expenses_description
rsn_extra_expenses = case.rsn_extra_expenses
rsn_extra_expenses_info = case.rsn_extra_expenses_description
adult_clients = case.adult_clients
child_clients = case.child_clients
basic_fee_law_firm = case.basic_fee_law_firm
no_of_free_miles_law_firm = case.no_of_free_miles_law_firm
mileage_rate_law_firm = case.mileage_rate_law_firm
cancelled_by = case.cancelled_by
print "cancelled_by:%s"%case.cancelled_by
cancelled_reason_description = case.cancelled_reason_description
additional_expenses = case.additional_expenses
no_of_miles_travelled = case.no_of_miles_travelled
#Need to calculate these
travel_expenses = 0
total_signature_fee_for_adults = 0
total_signature_fee_for_children = 0
total_signature_fee = 0
total_amount_billed_to_law_firm = 0
if case.is_signature_obtained:
number_of_adult_signatures_required = case.number_of_adult_signatures_required
number_of_child_signatures_required = case.number_of_child_signatures_required
number_of_adult_signatures_obtained = case.number_of_adult_signatures_obtained
number_of_child_signatures_obtained = case.number_of_child_signatures_obtained
is_signature_obtained = True
did_investigator_travel = True
number_of_billed_adults = 0
number_of_billed_children = 0
if no_of_miles_travelled > no_of_free_miles_law_firm and int(no_of_miles_travelled) != 0:
travel_expenses = ((no_of_miles_travelled - no_of_free_miles_law_firm) * mileage_rate_law_firm)
print "Travel expenses:( %f free miles - %f miles travelled) * $%f per mile = $%f"%(float(no_of_free_miles_law_firm),float(no_of_miles_travelled), float(mileage_rate_law_firm),float(travel_expenses))
if number_of_adult_signatures_obtained < 1 and number_of_child_signatures_obtained >0:
number_of_billed_adults = 1
number_of_billed_children = number_of_child_signatures_obtained - 1
else:
number_of_billed_adults = number_of_adult_signatures_obtained
number_of_billed_children = number_of_child_signatures_obtained
if locality.lower() == 'in area':
total_signature_fee_for_adults = default_in_area_payment_for_one_signature
if (number_of_billed_adults - 1) > 0:
total_signature_fee_for_adults += (number_of_billed_adults - 1) * default_in_area_payment_for_each_additional_adult_signature
total_signature_fee_for_children = number_of_billed_children * default_in_area_payment_for_children
total_signature_fee = total_signature_fee_for_adults + total_signature_fee_for_children
if total_signature_fee > maximum_in_area_payment_for_any_number_of_signatures:
total_signature_fee = maximum_in_area_payment_for_any_number_of_signatures
total_amount_billed_to_law_firm = total_signature_fee + travel_expenses + additional_expenses + rsn_extra_expenses
else:
# case out of area
total_signature_fee_for_adults = default_out_of_area_payment_for_one_signature
if (number_of_billed_adults - 1) > 0:
total_signature_fee_for_adults += (number_of_billed_adults - 1) * default_out_of_area_payment_for_each_additional_adult_signature
total_signature_fee_for_children = number_of_billed_children * default_out_of_area_payment_for_children
total_signature_fee = total_signature_fee_for_adults + total_signature_fee_for_children
if total_signature_fee > maximum_out_of_area_payment_for_any_number_of_signatures:
total_signature_fee = maximum_out_of_area_payment_for_any_number_of_signatures
total_amount_billed_to_law_firm = total_signature_fee + travel_expenses + additional_expenses + rsn_extra_expenses
elif case.did_investigator_travel:
number_of_adult_signatures_required = case.number_of_adult_signatures_required
number_of_child_signatures_required = case.number_of_child_signatures_required
number_of_adult_signatures_obtained = 0
number_of_child_signatures_obtained = 0
is_signature_obtained = False
did_investigator_travel = True
if no_of_miles_travelled > no_of_free_miles_law_firm and int(no_of_miles_travelled) != 0:
travel_expenses = ((no_of_miles_travelled - no_of_free_miles_law_firm) * mileage_rate_law_firm)
print "Travel expenses:( %f free miles - %f miles travelled) * $%f per mile = $%f"%(float(no_of_free_miles_law_firm),float(no_of_miles_travelled), float(mileage_rate_law_firm),float(travel_expenses))
else:
print "Travel expenses is $0"
total_amount_billed_to_law_firm = basic_fee_law_firm + travel_expenses + additional_expenses + rsn_extra_expenses
pass
else:
travel_expense = 0
total_signature_fee_for_adults = 0
total_signature_fee_for_children = 0
total_signature_fee = 0
total_amount_billed_to_law_firm = 0
pass
# Just in case someone enters a negative value
if total_signature_fee_for_children < 0:
total_signature_fee_for_children = 0
if total_signature_fee_for_adults < 0:
total_signature_fee_for_adults = 0
if travel_expenses< 0:
travel_expenses = 0
if additional_expenses < 0:
additional_expenses = 0
if no_of_miles_travelled < 0:
no_of_miles_travelled = 0
invoice_line.number_of_adult_signatures_required = number_of_adult_signatures_required
invoice_line.number_of_child_signatures_required = number_of_child_signatures_required
invoice_line.number_of_adult_signatures_obtained = number_of_adult_signatures_obtained
invoice_line.number_of_child_signatures_obtained = number_of_child_signatures_obtained
# other static assignments
invoice_line.basic_fee_law_firm = basic_fee_law_firm
invoice_line.no_of_free_miles_law_firm = no_of_free_miles_law_firm
invoice_line.mileage_rate_law_firm = mileage_rate_law_firm
invoice_line.case_name = case_name
invoice_line.investigator_name = investigator_name
invoice_line.client_name = client_name
invoice_line.client_address = client_address
invoice_line.case_created_at = case_created_at
invoice_line.dol = dol
invoice_line.is_dol_provided = is_dol_provided
invoice_line.case_closing_date = case_closing_date
invoice_line.date_of_signup = date_of_signup
invoice_line.locality = locality
invoice_line.adult_clients = adult_clients
invoice_line.child_clients = child_clients
invoice_line.cancelled_by = cancelled_by
invoice_line.cancelled_reason_description = cancelled_reason_description
invoice_line.is_signature_obtained = is_signature_obtained
invoice_line.did_investigator_travel = did_investigator_travel
invoice_line.additional_expenses = additional_expenses
invoice_line.rsn_extra_expenses = rsn_extra_expenses
invoice_line.no_of_miles_travelled = no_of_miles_travelled
invoice_line.travel_expenses = travel_expenses
invoice_line.total_signature_fee_for_adults = total_signature_fee_for_adults
invoice_line.total_signature_fee_for_children = total_signature_fee_for_children
invoice_line.total_signature_fee = total_signature_fee
invoice_line.total_amount_billed_to_law_firm = total_amount_billed_to_law_firm
invoice_line.additional_expenses_description = additional_expenses_description
invoice_line.rsn_extra_expenses_description = rsn_extra_expenses_info
print "is_signature_obtained: %r" %is_signature_obtained
print "did_investigator_travel: %r"%did_investigator_travel
print "travel_expense: %f"%travel_expenses
print "additional_expenses: %f"%additional_expenses
print "rsn_extra_expenses: %f"%rsn_extra_expenses
print "total_signature_fee_for_adults: %f"%total_signature_fee_for_adults
print "total_signature_fee_for_children: %f"%total_signature_fee_for_children
print "total_signature_fee: %f"%total_signature_fee
print "total_amount_billed_to_law_firm: %f"%total_amount_billed_to_law_firm
print "Cancelled_by:%s"%cancelled_by
#Save the invoice_line
invoice_line.save()
#add the case total to the invoice total
entire_invoice_total += total_amount_billed_to_law_firm
invoice_lines.append(invoice_line)
invoice.total_billed_amount = entire_invoice_total
invoice.save()
print_invoice_as_pdf(output,invoice, law_firm)
def generate_the_aggregate_invoice(output, law_firm, cases):
if len(cases) < 1:
raise ValueError('Cases cannot be zero for invoicing')
# for case in cases:
# if case.invoice is not None:
# case.invoice = None
# case.total_amount_billed_to_law_firm = 0
# case.save()
entire_invoice_total = 0
law_firm_rates = law_firm.rates
default_in_area_payment_for_one_signature = law_firm_rates.default_in_area_payment_for_one_signature
default_in_area_payment_for_each_additional_adult_signature = law_firm_rates.default_in_area_payment_for_each_additional_adult_signature
default_in_area_payment_for_children = law_firm_rates.default_in_area_payment_for_children
maximum_in_area_payment_for_any_number_of_signatures = law_firm_rates.maximum_in_area_payment_for_any_number_of_signatures
default_out_of_area_payment_for_one_signature = law_firm_rates.default_out_of_area_payment_for_one_signature
default_out_of_area_payment_for_each_additional_adult_signature = law_firm_rates.default_out_of_area_payment_for_each_additional_adult_signature
default_out_of_area_payment_for_children = law_firm_rates.default_out_of_area_payment_for_children
maximum_out_of_area_payment_for_any_number_of_signatures = law_firm_rates.maximum_out_of_area_payment_for_any_number_of_signatures
invoice = Invoice()
invoice.law_firm_name = law_firm.name
invoice.law_firm_address = law_firm.address.simple_address()
invoice.law_firm_email = law_firm.email_one
invoice_lines = []
invoice.save()
print "Newly created Invoice ID: %d"%invoice.id
for case in cases:
# case.invoice = invoice
case.save()
print "Invoice for case no: %d"%case.id
#create new invoice line
invoice_line = InvoiceLine()
#FK assignments
invoice_line.invoice = invoice
invoice_line.case = case
number_of_adult_signatures_required = 0
number_of_child_signatures_required = 0
number_of_adult_signatures_obtained = 0
number_of_child_signatures_obtained = 0
is_signature_obtained = False
did_investigator_travel = False
case_name = case.name
case_created_at = case.created_at
investigator_name = case.investigator.user.first_name + ' '+ case.investigator.user.last_name
client_name = case.client_name
client_address = case.client_address.simple_address()
dol = case.dol
case_closing_date = case.closing_date
is_dol_provided = case.is_dol_provided
locality = case.locality
additional_expenses_description = case.additional_expenses_description
date_of_signup = case.date_of_signup
adult_clients = case.adult_clients
child_clients = case.child_clients
basic_fee_law_firm = case.basic_fee_law_firm
no_of_free_miles_law_firm = case.no_of_free_miles_law_firm
mileage_rate_law_firm = case.mileage_rate_law_firm
cancelled_by = case.cancelled_by
print "cancelled_by:%s"%case.cancelled_by
cancelled_reason_description = case.cancelled_reason_description
additional_expenses = case.additional_expenses
no_of_miles_travelled = case.no_of_miles_travelled
rsn_extra_expenses = case.rsn_extra_expenses
rsn_extra_expenses_info = case.rsn_extra_expenses_description
#Need to calculate these
travel_expenses = 0
total_signature_fee_for_adults = 0
total_signature_fee_for_children = 0
total_signature_fee = 0
total_amount_billed_to_law_firm = 0
if case.is_signature_obtained:
number_of_adult_signatures_required = case.number_of_adult_signatures_required
number_of_child_signatures_required = case.number_of_child_signatures_required
number_of_adult_signatures_obtained = case.number_of_adult_signatures_obtained
number_of_child_signatures_obtained = case.number_of_child_signatures_obtained
is_signature_obtained = True
did_investigator_travel = True
number_of_billed_adults = 0
number_of_billed_children = 0
if no_of_miles_travelled > no_of_free_miles_law_firm and int(no_of_miles_travelled) != 0:
travel_expenses = ((no_of_miles_travelled - no_of_free_miles_law_firm) * mileage_rate_law_firm)
print "Travel expenses:( %f free miles - %f miles travelled) * $%f per mile = $%f"%(float(no_of_free_miles_law_firm),float(no_of_miles_travelled), float(mileage_rate_law_firm),float(travel_expenses))
if number_of_adult_signatures_obtained < 1 and number_of_child_signatures_obtained >0:
number_of_billed_adults = 1
number_of_billed_children = number_of_child_signatures_obtained - 1
else:
number_of_billed_adults = number_of_adult_signatures_obtained
number_of_billed_children = number_of_child_signatures_obtained
if locality.lower() == 'in area':
total_signature_fee_for_adults = default_in_area_payment_for_one_signature
if (number_of_billed_adults - 1) > 0:
total_signature_fee_for_adults += (number_of_billed_adults - 1) * default_in_area_payment_for_each_additional_adult_signature
total_signature_fee_for_children = number_of_billed_children * default_in_area_payment_for_children
total_signature_fee = total_signature_fee_for_adults + total_signature_fee_for_children
if total_signature_fee > maximum_in_area_payment_for_any_number_of_signatures:
total_signature_fee = maximum_in_area_payment_for_any_number_of_signatures
total_amount_billed_to_law_firm = total_signature_fee + travel_expenses + additional_expenses + rsn_extra_expenses
else:
# case out of area
total_signature_fee_for_adults = default_out_of_area_payment_for_one_signature
if (number_of_billed_adults - 1) > 0:
total_signature_fee_for_adults += (number_of_billed_adults - 1) * default_out_of_area_payment_for_each_additional_adult_signature
total_signature_fee_for_children = number_of_billed_children * default_out_of_area_payment_for_children
total_signature_fee = total_signature_fee_for_adults + total_signature_fee_for_children
if total_signature_fee > maximum_out_of_area_payment_for_any_number_of_signatures:
total_signature_fee = maximum_out_of_area_payment_for_any_number_of_signatures
total_amount_billed_to_law_firm = total_signature_fee + travel_expenses + additional_expenses + rsn_extra_expenses
elif case.did_investigator_travel:
number_of_adult_signatures_required = case.number_of_adult_signatures_required
number_of_child_signatures_required = case.number_of_child_signatures_required
number_of_adult_signatures_obtained = 0
number_of_child_signatures_obtained = 0
is_signature_obtained = False
did_investigator_travel = True
if no_of_miles_travelled > no_of_free_miles_law_firm and int(no_of_miles_travelled) != 0:
travel_expenses = ((no_of_miles_travelled - no_of_free_miles_law_firm) * mileage_rate_law_firm)
print "Travel expenses:( %f free miles - %f miles travelled) * $%f per mile = $%f"%(float(no_of_free_miles_law_firm),float(no_of_miles_travelled), float(mileage_rate_law_firm),float(travel_expenses))
else:
print "Travel expenses is $0"
total_amount_billed_to_law_firm = basic_fee_law_firm + travel_expenses + additional_expenses + rsn_extra_expenses
pass
else:
travel_expense = 0
total_signature_fee_for_adults = 0
total_signature_fee_for_children = 0
total_signature_fee = 0
total_amount_billed_to_law_firm = 0
pass
# Just in case someone enters a negative value
if total_signature_fee_for_children < 0:
total_signature_fee_for_children = 0
if total_signature_fee_for_adults < 0:
total_signature_fee_for_adults = 0
if travel_expenses< 0:
travel_expenses = 0
if additional_expenses < 0:
additional_expenses = 0
if no_of_miles_travelled < 0:
no_of_miles_travelled = 0
invoice_line.number_of_adult_signatures_required = number_of_adult_signatures_required
invoice_line.number_of_child_signatures_required = number_of_child_signatures_required
invoice_line.number_of_adult_signatures_obtained = number_of_adult_signatures_obtained
invoice_line.number_of_child_signatures_obtained = number_of_child_signatures_obtained
# other static assignments
invoice_line.basic_fee_law_firm = basic_fee_law_firm
invoice_line.no_of_free_miles_law_firm = no_of_free_miles_law_firm
invoice_line.mileage_rate_law_firm = mileage_rate_law_firm
invoice_line.case_name = case_name
invoice_line.investigator_name = investigator_name
invoice_line.client_name = client_name
invoice_line.client_address = client_address
invoice_line.case_created_at = case_created_at
invoice_line.dol = dol
invoice_line.is_dol_provided = is_dol_provided
invoice_line.case_closing_date = case_closing_date
invoice_line.date_of_signup = date_of_signup
invoice_line.locality = locality
invoice_line.adult_clients = adult_clients
invoice_line.child_clients = child_clients
invoice_line.cancelled_by = cancelled_by
invoice_line.cancelled_reason_description = cancelled_reason_description
invoice_line.is_signature_obtained = is_signature_obtained
invoice_line.did_investigator_travel = did_investigator_travel
invoice_line.rsn_extra_expenses = rsn_extra_expenses
invoice_line.additional_expenses = additional_expenses
invoice_line.no_of_miles_travelled = no_of_miles_travelled
invoice_line.travel_expenses = travel_expenses
invoice_line.total_signature_fee_for_adults = total_signature_fee_for_adults
invoice_line.total_signature_fee_for_children = total_signature_fee_for_children
invoice_line.total_signature_fee = total_signature_fee
invoice_line.total_amount_billed_to_law_firm = total_amount_billed_to_law_firm
invoice_line.additional_expenses_description = additional_expenses_description
invoice_line.rsn_extra_expenses_description = rsn_extra_expenses_info
print "is_signature_obtained: %r" %is_signature_obtained
print "did_investigator_travel: %r"%did_investigator_travel
print "travel_expense: %f"%travel_expenses
print "additional_expenses: %f"%additional_expenses
print "total_signature_fee_for_adults: %f"%total_signature_fee_for_adults
print "total_signature_fee_for_children: %f"%total_signature_fee_for_children
print "total_signature_fee: %f"%total_signature_fee
print "total_amount_billed_to_law_firm: %f"%total_amount_billed_to_law_firm
print "Cancelled_by:%s"%cancelled_by
#Save the invoice_line
invoice_line.save()
#add the case total to the invoice total
entire_invoice_total += total_amount_billed_to_law_firm
invoice_lines.append(invoice_line)
invoice.total_billed_amount = entire_invoice_total
invoice.save()
print_aggregate_invoice_as_pdf(output,invoice, law_firm)
def generate_the_invoice_as_csv(output, law_firm, cases):
if len(cases) < 1:
raise ValueError('Cases cannot be zero for invoicing')
for case in cases:
if case.invoice_as_csv is not None:
print_invoice_as_csv(output,case.invoice_as_csv, law_firm)
return
entire_invoice_total = 0
law_firm_rates = law_firm.rates
default_in_area_payment_for_one_signature = law_firm_rates.default_in_area_payment_for_one_signature
default_in_area_payment_for_each_additional_adult_signature = law_firm_rates.default_in_area_payment_for_each_additional_adult_signature
default_in_area_payment_for_children = law_firm_rates.default_in_area_payment_for_children
maximum_in_area_payment_for_any_number_of_signatures = law_firm_rates.maximum_in_area_payment_for_any_number_of_signatures
default_out_of_area_payment_for_one_signature = law_firm_rates.default_out_of_area_payment_for_one_signature
default_out_of_area_payment_for_each_additional_adult_signature = law_firm_rates.default_out_of_area_payment_for_each_additional_adult_signature
default_out_of_area_payment_for_children = law_firm_rates.default_out_of_area_payment_for_children
maximum_out_of_area_payment_for_any_number_of_signatures = law_firm_rates.maximum_out_of_area_payment_for_any_number_of_signatures
invoice_as_csv = Invoice()
invoice_as_csv.law_firm_name = law_firm.name
invoice_as_csv.law_firm_address = law_firm.address.simple_address()
invoice_as_csv.law_firm_email = law_firm.email_one
invoice_lines = []
invoice_as_csv.save()
print "Newly created Invoice ID: %d"%invoice_as_csv.id
for case in cases:
case.invoice_as_csv = invoice_as_csv
case.save()
print "Invoice for case no: %d"%case.id
#create new invoice line
invoice_line = InvoiceLine()
#FK assignments
invoice_line.invoice = invoice_as_csv
invoice_line.case = case
number_of_adult_signatures_required = 0
number_of_child_signatures_required = 0
number_of_adult_signatures_obtained = 0
number_of_child_signatures_obtained = 0
is_signature_obtained = False
did_investigator_travel = False
case_name = case.name
case_created_at = case.created_at
investigator_name = case.investigator.user.first_name + ' '+ case.investigator.user.last_name
client_name = case.client_name
client_address = case.client_address.simple_address()
dol = case.dol
case_closing_date = case.closing_date
is_dol_provided = case.is_dol_provided
locality = case.locality
additional_expenses_description = case.additional_expenses_description
date_of_signup = case.date_of_signup
adult_clients = case.adult_clients
child_clients = case.child_clients
basic_fee_law_firm = case.basic_fee_law_firm
no_of_free_miles_law_firm = case.no_of_free_miles_law_firm
mileage_rate_law_firm = case.mileage_rate_law_firm
cancelled_by = case.cancelled_by
print "cancelled_by:%s"%case.cancelled_by
cancelled_reason_description = case.cancelled_reason_description
additional_expenses = case.additional_expenses
no_of_miles_travelled = case.no_of_miles_travelled
#Need to calculate these
travel_expenses = 0
total_signature_fee_for_adults = 0
total_signature_fee_for_children = 0
total_signature_fee = 0
total_amount_billed_to_law_firm = 0
if case.is_signature_obtained:
number_of_adult_signatures_required = case.number_of_adult_signatures_required
number_of_child_signatures_required = case.number_of_child_signatures_required
number_of_adult_signatures_obtained = case.number_of_adult_signatures_obtained
number_of_child_signatures_obtained = case.number_of_child_signatures_obtained
is_signature_obtained = True
did_investigator_travel = True
number_of_billed_adults = 0
number_of_billed_children = 0
if no_of_miles_travelled > no_of_free_miles_law_firm and int(no_of_miles_travelled) != 0:
travel_expenses = ((no_of_miles_travelled - no_of_free_miles_law_firm) * mileage_rate_law_firm)
print "Travel expenses:( %f free miles - %f miles travelled) * $%f per mile = $%f"%(float(no_of_free_miles_law_firm),float(no_of_miles_travelled), float(mileage_rate_law_firm),float(travel_expenses))
if number_of_adult_signatures_obtained < 1 and number_of_child_signatures_obtained >0:
number_of_billed_adults = 1
number_of_billed_children = number_of_child_signatures_obtained - 1
else:
number_of_billed_adults = number_of_adult_signatures_obtained
number_of_billed_children = number_of_child_signatures_obtained
if locality.lower() == 'in area':
total_signature_fee_for_adults = default_in_area_payment_for_one_signature
if (number_of_billed_adults - 1) > 0:
total_signature_fee_for_adults += (number_of_billed_adults - 1) * default_in_area_payment_for_each_additional_adult_signature
total_signature_fee_for_children = number_of_billed_children * default_in_area_payment_for_children
total_signature_fee = total_signature_fee_for_adults + total_signature_fee_for_children
if total_signature_fee > maximum_in_area_payment_for_any_number_of_signatures:
total_signature_fee = maximum_in_area_payment_for_any_number_of_signatures
total_amount_billed_to_law_firm = total_signature_fee + travel_expenses + additional_expenses
else:
# case out of area
total_signature_fee_for_adults = default_out_of_area_payment_for_one_signature
if (number_of_billed_adults - 1) > 0:
total_signature_fee_for_adults += (number_of_billed_adults - 1) * default_out_of_area_payment_for_each_additional_adult_signature
total_signature_fee_for_children = number_of_billed_children * default_out_of_area_payment_for_children
total_signature_fee = total_signature_fee_for_adults + total_signature_fee_for_children
if total_signature_fee > maximum_out_of_area_payment_for_any_number_of_signatures:
total_signature_fee = maximum_out_of_area_payment_for_any_number_of_signatures
total_amount_billed_to_law_firm = total_signature_fee + travel_expenses + additional_expenses
elif case.did_investigator_travel:
number_of_adult_signatures_required = case.number_of_adult_signatures_required
number_of_child_signatures_required = case.number_of_child_signatures_required
number_of_adult_signatures_obtained = 0
number_of_child_signatures_obtained = 0
is_signature_obtained = False
did_investigator_travel = True
if no_of_miles_travelled > no_of_free_miles_law_firm and int(no_of_miles_travelled) != 0:
travel_expenses = ((no_of_miles_travelled - no_of_free_miles_law_firm) * mileage_rate_law_firm)
print "Travel expenses:( %f free miles - %f miles travelled) * $%f per mile = $%f"%(float(no_of_free_miles_law_firm),float(no_of_miles_travelled), float(mileage_rate_law_firm),float(travel_expenses))
else:
print "Travel expenses is $0"
total_amount_billed_to_law_firm = basic_fee_law_firm + travel_expenses + additional_expenses
pass
else:
travel_expense = 0
total_signature_fee_for_adults = 0
total_signature_fee_for_children = 0
total_signature_fee = 0
total_amount_billed_to_law_firm = 0
pass
# Just in case someone enters a negative value
if total_signature_fee_for_children < 0:
total_signature_fee_for_children = 0
if total_signature_fee_for_adults < 0:
total_signature_fee_for_adults = 0
if travel_expenses< 0:
travel_expenses = 0
if additional_expenses < 0:
additional_expenses = 0
if no_of_miles_travelled < 0:
no_of_miles_travelled = 0
invoice_line.number_of_adult_signatures_required = number_of_adult_signatures_required
invoice_line.number_of_child_signatures_required = number_of_child_signatures_required
invoice_line.number_of_adult_signatures_obtained = number_of_adult_signatures_obtained
invoice_line.number_of_child_signatures_obtained = number_of_child_signatures_obtained
# other static assignments
invoice_line.basic_fee_law_firm = basic_fee_law_firm
invoice_line.no_of_free_miles_law_firm = no_of_free_miles_law_firm
invoice_line.mileage_rate_law_firm = mileage_rate_law_firm
invoice_line.case_name = case_name
invoice_line.investigator_name = investigator_name
invoice_line.client_name = client_name
invoice_line.client_address = client_address
invoice_line.case_created_at = case_created_at
invoice_line.dol = dol
invoice_line.is_dol_provided = is_dol_provided
invoice_line.case_closing_date = case_closing_date
invoice_line.date_of_signup = date_of_signup
invoice_line.locality = locality
invoice_line.adult_clients = adult_clients
invoice_line.child_clients = child_clients
invoice_line.cancelled_by = cancelled_by
invoice_line.cancelled_reason_description = cancelled_reason_description
invoice_line.is_signature_obtained = is_signature_obtained
invoice_line.did_investigator_travel = did_investigator_travel
invoice_line.additional_expenses = additional_expenses
invoice_line.no_of_miles_travelled = no_of_miles_travelled
invoice_line.travel_expenses = travel_expenses
invoice_line.total_signature_fee_for_adults = total_signature_fee_for_adults
invoice_line.total_signature_fee_for_children = total_signature_fee_for_children
invoice_line.total_signature_fee = total_signature_fee
invoice_line.total_amount_billed_to_law_firm = total_amount_billed_to_law_firm
invoice_line.additional_expenses_description = additional_expenses_description
print "is_signature_obtained: %r" %is_signature_obtained
print "did_investigator_travel: %r"%did_investigator_travel
print "travel_expense: %f"%travel_expenses
print "additional_expenses: %f"%additional_expenses
print "total_signature_fee_for_adults: %f"%total_signature_fee_for_adults
print "total_signature_fee_for_children: %f"%total_signature_fee_for_children
print "total_signature_fee: %f"%total_signature_fee
print "total_amount_billed_to_law_firm: %f"%total_amount_billed_to_law_firm
print "Cancelled_by:%s"%cancelled_by
#Save the invoice_line
invoice_line.save()
#add the case total to the invoice total
entire_invoice_total += total_amount_billed_to_law_firm
invoice_lines.append(invoice_line)
invoice_as_csv.total_billed_amount = entire_invoice_total
invoice_as_csv.save()
print_invoice_as_csv(output,invoice_as_csv, law_firm)
def print_invoice_as_pdf(output,invoice, law_firm):
invoice_number = invoice.id
invoice_lines = InvoiceLine.objects.filter(invoice=invoice).order_by('case_created_at')
light_peacock_green = '#dbf2f9'
dark_peacock_green = '#166a83'
doc = SimpleDocTemplate(output)
story = []
style = styles["Normal"]
table_data = [[[Paragraph('Rapid Sign Now', ParagraphStyle('heading', fontSize=15, textColor=dark_peacock_green)), Spacer(1,0.3*inch)],
[Paragraph('INVOICE', ParagraphStyle('heading', fontSize=13, textColor=light_peacock_green, alignment=TA_RIGHT)), Spacer(1,0.3*inch)]]]
table_data.append([Paragraph('8 Corporate park suite 300 <br /> Irvine, CA 92606 <br /> customerservice@rapidsignnow.com <br /> www.rapidsignnow.com <br /> P: 310-892-2043',# <br /> F: 123-555-0124',
ParagraphStyle('address', fontSize=7, textColor=dark_peacock_green, leading=12)),
Paragraph('Invoice No.:' + str(invoice_number) + '<br /> Invoice Date: ' + datetime.datetime.now().strftime('%m-%d-%y')
+ ' <br /> Due Date: ' + (datetime.datetime.now() + datetime.timedelta(days=10)).strftime('%m-%d-%y'),
ParagraphStyle('meta', fontSize=7, textColor=light_peacock_green, leading=12))])
table_data.append([Paragraph('<b>BILL TO:</b> ' + law_firm.name + '<br />' + law_firm.address.simple_address(), ParagraphStyle('address', fontSize=7, textColor='#000000', leading=12)), ''])
table_data.append([Paragraph('<b>Case Details</b>', ParagraphStyle('table-header', fontSize=13, textColor=light_peacock_green, alignment=TA_CENTER )),
Paragraph('<b>Amount</b>', ParagraphStyle('table-header', fontSize=13, textColor=light_peacock_green, alignment=TA_CENTER ))])
case_style = ParagraphStyle('case-details', fontSize=8, textColor='#000000')
law_firm_style = ParagraphStyle('law-firm-details', fontSize=8, textColor='#000000')
price_style = ParagraphStyle('case-details', fontSize=8, textColor='#000000', alignment=TA_CENTER)
for invoice_line in invoice_lines:
case_final_status = ''
case_cancelled_by = ''
case_status_additional_info = 'N.A'
if invoice_line.is_signature_obtained:
if invoice_line.number_of_adult_signatures_required <= invoice_line.number_of_adult_signatures_obtained and invoice_line.number_of_child_signatures_required <= invoice_line.number_of_child_signatures_obtained:
case_final_status = 'Signature Obtained'
else:
case_final_status = 'Signatures Partially obtained'
elif invoice_line.did_investigator_travel:
case_final_status = 'Signature Not Obtained'
else:
case_final_status = 'Client Cancelled'
case_cancelled_by = invoice_line.cancelled_by
if invoice_line.cancelled_reason_description:
case_status_additional_info = invoice_line.cancelled_reason_description
if invoice_line.is_signature_obtained:
signature_fee = Paragraph('Signature fees: $' + str(invoice_line.total_signature_fee), price_style)
else:
signature_fee = Paragraph('Basic fee: $' + str(invoice_line.basic_fee_law_firm), price_style)
if invoice_line.is_dol_provided:
try:
dol_value = invoice_line.dol.strftime('%m-%d-%y')
except:
dol_value = str(invoice_line.dol.day) + "-" + str(invoice_line.dol.month) + "-" + str(invoice_line.dol.year)
else:
dol_value = 'Not Provided'
# if invoice_line.is_signature_obtained:
# is_signature_obtained = 'Yes'
# else:
# is_signature_obtained = 'No'
additional_expenses_description = 'N.A'
if invoice_line.additional_expenses_description != '':
if invoice_line.rsn_extra_expenses_description != '':
additional_expenses_description = invoice_line.additional_expenses_description + " and " + invoice_line.rsn_extra_expenses_description
else:
additional_expenses_description = invoice_line.additional_expenses_description
elif invoice_line.rsn_extra_expenses_description != '':
additional_expenses_description = invoice_line.rsn_extra_expenses_description
travel_expenses_line = None
additional_expenses_line = None
if case_final_status.lower() == 'client cancelled':
travel_expenses_line = Paragraph('Travel expenses: N.A', price_style)
additional_expenses_line = Paragraph('Additional expenses: N.A', price_style)
signature_fee = Paragraph('Signature fee: N.A' , price_style)
case_final_status = case_cancelled_by
else:
travel_expenses_line = Paragraph('Travel expenses: $' + str(invoice_line.travel_expenses), price_style)
additional_expenses = invoice_line.additional_expenses + invoice_line.rsn_extra_expenses
additional_expenses_line = Paragraph('Additional expenses: $' + str(additional_expenses), price_style)
if invoice_line.date_of_signup is not None:
date_of_signup = invoice_line.date_of_signup
else:
date_of_signup = invoice_line.case_created_at
table_data.append([
[
[Paragraph('<b>Case name: </b>' + invoice_line.case_name, case_style)],
[Paragraph('<b>Investigator: </b>' + invoice_line.investigator_name, case_style)],
[Paragraph('<b>Location: </b>' + invoice_line.client_address, case_style)],
[Paragraph('<b>DOL: </b>' + dol_value, case_style)],
[Paragraph('<b>Date of Sign Up: </b>' + date_of_signup.strftime('%m-%d-%y'), case_style)],
[Paragraph('<b>Locality: </b>' + invoice_line.locality, case_style)],
[Paragraph('<b>No. of miles: </b>' + str(invoice_line.no_of_miles_travelled), case_style)],
[Paragraph('<b>Mileage rate: </b>' + str(invoice_line.mileage_rate_law_firm), case_style)],
[Paragraph('<b>Adult clients: </b>' + invoice_line.adult_clients, case_style)],
[Paragraph('<b>Child clients: </b>' + invoice_line.child_clients, case_style)],
[Paragraph('<b>Additional expenses desc: </b>' + additional_expenses_description, case_style)],
[Paragraph('<b>Final Status: </b>' + case_final_status, case_style)],
[Paragraph('<b>Status additional info : </b>' + case_status_additional_info, case_style)],
],
[
[signature_fee],
[travel_expenses_line],
[additional_expenses_line],
[Paragraph('Total price: $' + str(invoice_line.total_amount_billed_to_law_firm), price_style)]
]
])
table_data.append([Paragraph('', style),
Paragraph('', style)])
table_data.append([Paragraph('<b>Total: </b>', style),
Paragraph('$' + str(invoice.total_billed_amount), style)])
table_data.append([Paragraph('<br/><br/>', style),
Paragraph('', style)])
law_firm_rates = law_firm.rates
adult_clients_full = invoice_line.adult_clients
adult_clients = adult_clients_full.split(',')
child_clients_full = invoice_line.child_clients
child_clients = child_clients_full.split(',')
if invoice_line.is_signature_obtained:
table_data.append([
[
[Paragraph('<br/><br/><br/><b>Clients Signed </b>', style)],
],
[
[Paragraph('<br/><br/><br/><b>Cost </b>', style)]
]
])
if(invoice_line.locality == 'In Area'):
signature_fee_for_adult_clients = law_firm_rates.default_in_area_payment_for_one_signature + ((invoice_line.number_of_adult_signatures_obtained - 1) * law_firm_rates.default_in_area_payment_for_each_additional_adult_signature)
signature_fee_for_child_clients = law_firm_rates.default_in_area_payment_for_children * invoice_line.number_of_child_signatures_obtained
if invoice_line.adult_clients:
for adult_client in adult_clients:
table_data.append([
[
[Paragraph('<br/>'+str(adult_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
if invoice_line.child_clients:
for child_client in child_clients:
table_data.append([
[
[Paragraph('<br/>'+str(child_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
else:
signature_fee_for_adult_clients = law_firm_rates.default_out_of_area_payment_for_one_signature + ((invoice_line.number_of_adult_signatures_obtained - 1) * law_firm_rates.default_out_of_area_payment_for_each_additional_adult_signature)
signature_fee_for_child_clients = law_firm_rates.default_out_of_area_payment_for_children * invoice_line.number_of_child_signatures_obtained
if invoice_line.adult_clients:
for adult_client in adult_clients:
table_data.append([
[
[Paragraph('<br/>'+str(adult_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
if invoice_line.child_clients:
for child_client in child_clients:
table_data.append([
[
[Paragraph('<br/>'+str(child_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
elif (case_final_status == 'Signature Not Obtained'):
table_data.append([
[
[Paragraph('<br/><br/><br/><b>Clients</b>', style)],
],
[
[Paragraph('<br/><br/><br/><b>Cost </b>', style)]
]
])
if(invoice_line.locality == 'In Area'):
if invoice_line.adult_clients:
for adult_client in adult_clients:
table_data.append([
[
[Paragraph('<br/>'+str(adult_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
if invoice_line.child_clients:
for child_client in child_clients:
table_data.append([
[
[Paragraph('<br/>'+str(child_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
else:
if invoice_line.adult_clients:
for adult_client in adult_clients:
table_data.append([
[
[Paragraph('<br/>'+str(adult_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
if invoice_line.child_clients:
for child_client in child_clients:
table_data.append([
[
[Paragraph('<br/>'+str(child_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
else:
table_data.append([
[ [Paragraph('<br/><br/><br/><b>Detailed Invoice</b>',style)],
[Paragraph('<b><br/>Basic Fee =</b> $ 0', law_firm_style)],
[Paragraph('<b>Miles travelled =</b> 0',law_firm_style)],
[Paragraph('<b>Travel Expenses =</b> $ 0',law_firm_style)],
[Paragraph('<b>Additional expenses =</b> $ 0',law_firm_style)],
[Paragraph('<b>RSN Extra expenses =</b> $ 0',law_firm_style)],
[Paragraph('<b>Total Price =</b> $ 0',law_firm_style)],
[Paragraph('<br/><br/>',style)]
],
[
[Paragraph('', style)],
[Paragraph('', style)]
]
])
table_content = Table(table_data)
table_content.setStyle(TableStyle([
('BACKGROUND', (0, 0), (0, 1), light_peacock_green),
('BACKGROUND', (1, 0), (1, 1), dark_peacock_green),
('BACKGROUND', (0, 3), (1, 3), dark_peacock_green),
('VALIGN',(0,0),(-1,-1),'TOP')
]))
for loop in range(0, len(invoice_lines)):
table_index = loop + 4
if table_index % 2 != 0:
table_content.setStyle(TableStyle([
('BACKGROUND', (0, table_index), (1, table_index), light_peacock_green)
]))
rates_style = ParagraphStyle('case-details', fontSize=10, textColor='#000000')
story.append(table_content)
doc.build(story, onFirstPage=my_first_page, onLaterPages=my_later_pages)
pass
# End Complete rewrite of invoicing
def print_aggregate_invoice_as_pdf(output,invoice, law_firm):
invoice_number = invoice.id
invoice_lines = InvoiceLine.objects.filter(invoice=invoice).order_by('case_created_at')
light_peacock_green = '#dbf2f9'
dark_peacock_green = '#166a83'
doc = SimpleDocTemplate(output)
story = []
style = styles["Normal"]
table_data = [[[Paragraph('Rapid Sign Now', ParagraphStyle('heading', fontSize=15, textColor=dark_peacock_green)), Spacer(1,0.3*inch)],
[Paragraph('INVOICE', ParagraphStyle('heading', fontSize=13, textColor=light_peacock_green, alignment=TA_RIGHT)), Spacer(1,0.3*inch)]]]
table_data.append([Paragraph('8 Corporate park suite 300 <br /> Irvine, CA 92606 <br /> customerservice@rapidsignnow.com <br /> www.rapidsignnow.com <br /> P: 310-892-2043',# <br /> F: 123-555-0124',
ParagraphStyle('address', fontSize=7, textColor=dark_peacock_green, leading=12)),
Paragraph('Invoice No.:' + str(invoice_number) + '<br /> Invoice Date: ' + datetime.datetime.now().strftime('%m-%d-%y')
+ ' <br /> Due Date: ' + (datetime.datetime.now() + datetime.timedelta(days=10)).strftime('%m-%d-%y'),
ParagraphStyle('meta', fontSize=7, textColor=light_peacock_green, leading=12))])
table_data.append([Paragraph('<b>BILL TO:</b> ' + law_firm.name + '<br />' + law_firm.address.simple_address(), ParagraphStyle('address', fontSize=7, textColor='#000000', leading=12)), ''])
table_data.append([Paragraph('<b>Case Details</b>', ParagraphStyle('table-header', fontSize=13, textColor=light_peacock_green, alignment=TA_CENTER )),
Paragraph('<b>Amount</b>', ParagraphStyle('table-header', fontSize=13, textColor=light_peacock_green, alignment=TA_CENTER ))])
case_style = ParagraphStyle('case-details', fontSize=8, textColor='#000000')
law_firm_style = ParagraphStyle('law-firm-details', fontSize=8, textColor='#000000')
price_style = ParagraphStyle('case-details', fontSize=8, textColor='#000000', alignment=TA_CENTER)
for invoice_line in invoice_lines:
case_final_status = ''
case_cancelled_by = ''
case_status_additional_info = 'N.A'
if invoice_line.is_signature_obtained:
if invoice_line.number_of_adult_signatures_required <= invoice_line.number_of_adult_signatures_obtained and invoice_line.number_of_child_signatures_required <= invoice_line.number_of_child_signatures_obtained:
case_final_status = 'Signature Obtained'
else:
case_final_status = 'Signatures Partially obtained'
elif invoice_line.did_investigator_travel:
case_final_status = 'Signature Not Obtained'
else:
case_final_status = 'Client Cancelled'
case_cancelled_by = invoice_line.cancelled_by
if invoice_line.cancelled_reason_description:
case_status_additional_info = invoice_line.cancelled_reason_description
if invoice_line.is_signature_obtained:
signature_fee = Paragraph('Signature fees: $' + str(invoice_line.total_signature_fee), price_style)
else:
signature_fee = Paragraph('Basic fee: $' + str(invoice_line.basic_fee_law_firm), price_style)
if invoice_line.is_dol_provided:
try:
dol_value = invoice_line.dol.strftime('%m-%d-%y')
except:
dol_value = str(invoice_line.dol.day) + "-" + str(invoice_line.dol.month) + "-" + str(invoice_line.dol.year)
else:
dol_value = 'Not Provided'
# if invoice_line.is_signature_obtained:
# is_signature_obtained = 'Yes'
# else:
# is_signature_obtained = 'No'
additional_expenses_description = 'N.A'
if invoice_line.additional_expenses_description != '':
if invoice_line.rsn_extra_expenses_description != '':
additional_expenses_description = invoice_line.additional_expenses_description + " and " + invoice_line.rsn_extra_expenses_description
else:
additional_expenses_description = invoice_line.additional_expenses_description
elif invoice_line.rsn_extra_expenses_description != '':
additional_expenses_description = invoice_line.rsn_extra_expenses_description
travel_expenses_line = None
additional_expenses_line = None
rsn_extra_expenses = None
if case_final_status.lower() == 'client cancelled':
travel_expenses_line = Paragraph('Travel expenses: N.A', price_style)
additional_expenses_line = Paragraph('Additional expenses: N.A', price_style)
signature_fee = Paragraph('Signature fee: N.A' , price_style)
case_final_status = case_cancelled_by
else:
travel_expenses_line = Paragraph('Travel expenses: $' + str(invoice_line.travel_expenses), price_style)
additional_expenses = invoice_line.additional_expenses + invoice_line.rsn_extra_expenses
print str(additional_expenses)
additional_expenses_line = Paragraph('Additional expenses: $' + str(additional_expenses), price_style)
if invoice_line.date_of_signup is not None:
date_of_signup = invoice_line.date_of_signup
else:
date_of_signup = invoice_line.case_created_at
table_data.append([
[
[Paragraph('<b>Case name: </b>' + invoice_line.case_name, case_style)],
[Paragraph('<b>Investigator: </b>' + invoice_line.investigator_name, case_style)],
[Paragraph('<b>Location: </b>' + invoice_line.client_address, case_style)],
[Paragraph('<b>DOL: </b>' + dol_value, case_style)],
[Paragraph('<b>Date of Sign Up: </b>' + date_of_signup.strftime('%m-%d-%y'), case_style)],
[Paragraph('<b>Locality: </b>' + invoice_line.locality, case_style)],
[Paragraph('<b>No. of miles: </b>' + str(invoice_line.no_of_miles_travelled), case_style)],
[Paragraph('<b>Mileage rate: </b>' + str(invoice_line.mileage_rate_law_firm), case_style)],
[Paragraph('<b>Adult clients: </b>' + invoice_line.adult_clients, case_style)],
[Paragraph('<b>Child clients: </b>' + invoice_line.child_clients, case_style)],
[Paragraph('<b>Additional expenses desc: </b>' + additional_expenses_description, case_style)],
[Paragraph('<b>Final Status: </b>' + case_final_status, case_style)],
[Paragraph('<b>Status additional info : </b>' + case_status_additional_info, case_style)],
],
[
[signature_fee],
[travel_expenses_line],
[additional_expenses_line],
[Paragraph('Total price: $' + str(invoice_line.total_amount_billed_to_law_firm), price_style)]
]
])
table_data.append([Paragraph('<br/><br/>', style),
Paragraph('', style)])
law_firm_rates = law_firm.rates
adult_clients_full = invoice_line.adult_clients
adult_clients = adult_clients_full.split(',')
child_clients_full = invoice_line.child_clients
child_clients = child_clients_full.split(',')
if invoice_line.is_signature_obtained:
table_data.append([
[
[Paragraph('<br/><br/><br/><b>Clients Signed </b>', style)],
],
[
[Paragraph('<br/><br/><br/><b>Cost </b>', style)]
]
])
if(invoice_line.locality == 'In Area'):
signature_fee_for_adult_clients = law_firm_rates.default_in_area_payment_for_one_signature + ((invoice_line.number_of_adult_signatures_obtained - 1) * law_firm_rates.default_in_area_payment_for_each_additional_adult_signature)
signature_fee_for_child_clients = law_firm_rates.default_in_area_payment_for_children * invoice_line.number_of_child_signatures_obtained
if invoice_line.adult_clients:
for adult_client in adult_clients:
table_data.append([
[
[Paragraph('<br/>'+str(adult_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
if invoice_line.child_clients:
for child_client in child_clients:
table_data.append([
[
[Paragraph('<br/>'+str(child_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
else:
signature_fee_for_adult_clients = law_firm_rates.default_out_of_area_payment_for_one_signature + ((invoice_line.number_of_adult_signatures_obtained - 1) * law_firm_rates.default_out_of_area_payment_for_each_additional_adult_signature)
signature_fee_for_child_clients = law_firm_rates.default_out_of_area_payment_for_children * invoice_line.number_of_child_signatures_obtained
if invoice_line.adult_clients:
for adult_client in adult_clients:
table_data.append([
[
[Paragraph('<br/>'+str(adult_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
if invoice_line.child_clients:
for child_client in child_clients:
table_data.append([
[
[Paragraph('<br/>'+str(child_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
elif (case_final_status == 'Signature Not Obtained'):
table_data.append([
[
[Paragraph('<br/><br/><br/><b>Clients</b>', style)],
],
[
[Paragraph('<br/><br/><br/><b>Cost </b>', style)]
]
])
if(invoice_line.locality == 'In Area'):
if invoice_line.adult_clients:
for adult_client in adult_clients:
table_data.append([
[
[Paragraph('<br/>'+str(adult_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
if invoice_line.child_clients:
for child_client in child_clients:
table_data.append([
[
[Paragraph('<br/>'+str(child_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
else:
if invoice_line.adult_clients:
for adult_client in adult_clients:
table_data.append([
[
[Paragraph('<br/>'+str(adult_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
if invoice_line.child_clients:
for child_client in child_clients:
table_data.append([
[
[Paragraph('<br/>'+str(child_client),law_firm_style)],
],
[
[Paragraph('<b><br/>_______________</b>',law_firm_style)],
]
])
else:
table_data.append([
[ [Paragraph('<br/><br/><br/><b>Detailed Invoice</b>',style)],
[Paragraph('<b><br/>Basic Fee =</b> $ 0', law_firm_style)],
[Paragraph('<b>Miles travelled =</b> 0',law_firm_style)],
[Paragraph('<b>Travel Expenses =</b> $ 0',law_firm_style)],
[Paragraph('<b>Additional expenses =</b> $ 0',law_firm_style)],
[Paragraph('<b>RSN Extra expenses =</b> $ 0',law_firm_style)],
[Paragraph('<b>Total Price =</b> $ 0',law_firm_style)],
[Paragraph('<br/><br/>',style)]
],
[
[Paragraph('', style)],
[Paragraph('', style)]
]
])
table_data.append([Paragraph('<br/><br/>', law_firm_style),
Paragraph('<br/><br/>', law_firm_style)])
table_data.append([Paragraph('<b><br/> Total: </b>', style),
Paragraph('<br/>$' + str(invoice.total_billed_amount), style)])
table_content = Table(table_data)
table_content.setStyle(TableStyle([
('BACKGROUND', (0, 0), (0, 1), light_peacock_green),
('BACKGROUND', (1, 0), (1, 1), dark_peacock_green),
('BACKGROUND', (0, 3), (1, 3), dark_peacock_green),
('VALIGN',(0,0),(-1,-1),'TOP')
]))
for loop in range(0, len(invoice_lines)):
table_index = loop + 7
if table_index % 3 != 0:
table_content.setStyle(TableStyle([
('BACKGROUND', (0, table_index), (1, table_index), light_peacock_green)
]))
rates_style = ParagraphStyle('case-details', fontSize=10, textColor='#000000')
story.append(table_content)
doc.build(story, onFirstPage=my_first_page, onLaterPages=my_later_pages)
pass
def print_invoice_as_csv(output,invoice, law_firm):
import csv
invoice_number = invoice.id
invoice_lines = InvoiceLine.objects.filter(invoice=invoice).order_by('case_created_at')
law_firm_rates = law_firm.rates
print "hello"
for invoice_line in invoice_lines:
case_final_status = ''
case_cancelled_by = ''
case_status_additional_info = 'N.A'
if invoice_line.is_signature_obtained:
if invoice_line.number_of_adult_signatures_required <= invoice_line.number_of_adult_signatures_obtained and invoice_line.number_of_child_signatures_required <= invoice_line.number_of_child_signatures_obtained:
case_final_status = 'Signature Obtained'
# print str(case_final_status)
else:
case_final_status = 'Signatures Partially obtained'
# print (case_final_status)
elif invoice_line.did_investigator_travel:
case_final_status = 'Signature Not Obtained'
else:
case_final_status = 'Client Cancelled'
case_cancelled_by = invoice_line.cancelled_by
if invoice_line.cancelled_reason_description:
case_status_additional_info = invoice_line.cancelled_reason_description
all_cases_in_range = []
for invoice_line in invoice_lines:
try:
case_instance = Case.objects.get(pk=invoice_line.case.pk)
all_cases_in_range.append(case_instance)
except :
context = dict()
context['error'] = 'An error occurred while generating payment report'
# return render(request, 'system_admin/generate_invoice.html',context)
pass
writer = csv.writer(output)
writer.writerow(['BILL TO:', law_firm.name])
writer.writerow(['',law_firm.address.simple_address()])
writer.writerow(['',law_firm.phone_number_one])
writer.writerow([''])
writer.writerow(['Date of Signup','Date of Loss','Case Name',' Adult Clients','Child Clients','Address','Milaege','Total','Investigator'])
print "writing"
for case in all_cases_in_range:
if case.date_of_signup is not None:
date_of_signup = case.date_of_signup
else:
date_of_signup = case.created_at
investigator_name = case.investigator.user.first_name + ' ' + case.investigator.user.last_name
total_payout = '$'+ str(case.get_law_firm_price())
if case.locality == 'In Area':
writer.writerow([date_of_signup, case.dol, case.name, case.adult_clients, case.child_clients,case.client_address.simple_address(), case.no_of_miles_travelled,
total_payout,
investigator_name])
else:
writer.writerow([date_of_signup, case.dol, case.name, case.adult_clients, case.child_clients,case.client_address.simple_address(), case.no_of_miles_travelled,
total_payout,
investigator_name])
print "In loop for case"
print (output)
return output
pass
def generate_bulk_invoice_as_csv(output,law_firm,all_cases_in_range):
import csv
law_firm_rates = law_firm.rates
writer = csv.writer(output)
writer.writerow(['BILL TO:', law_firm.name])
writer.writerow(['',law_firm.address.simple_address()])
writer.writerow(['',law_firm.phone_number_one])
writer.writerow([''])
writer.writerow(['Date of Signup','Date of Loss','Case Name',' Adult Clients','Child Clients','Address','Milaege','Total','Investigator'])
print "writing"
total = 0
for case in all_cases_in_range:
investigator_name = case.investigator.user.first_name + ' ' + case.investigator.user.last_name
total_payout = '$ '+ str(case.get_law_firm_price())
total_law_firm_payout = case.get_law_firm_price()
total = total + total_law_firm_payout
if case.date_of_signup is not None:
date_of_signup = case.date_of_signup
else:
date_of_signup = case.created_at
if case.locality == 'In Area':
writer.writerow([date_of_signup, case.dol, case.name, case.adult_clients, case.child_clients,case.client_address.simple_address(), case.no_of_miles_travelled,
total_payout,
investigator_name])
else:
writer.writerow([date_of_signup, case.dol, case.name, case.adult_clients, case.child_clients,case.client_address.simple_address(), case.no_of_miles_travelled,
total_payout,
investigator_name])
total = '$ ' + str(total)
writer.writerow([''])
writer.writerow(['','','','','','','Total',total])
return output
def fetch_resources(uri, rel):
path = join(settings.STATIC_ROOT, uri.replace(settings.STATIC_URL, ""))
return path
def generate_the_invoice_as_excel(output, law_firm, cases):
if len(cases) < 1:
raise ValueError('Cases cannot be zero for invoicing')
for case in cases:
if case.invoice_as_excel is not None:
output = print_invoice_as_excel(output,case.invoice_as_excel, law_firm)
return output
entire_invoice_total = 0
law_firm_rates = law_firm.rates
default_in_area_payment_for_one_signature = law_firm_rates.default_in_area_payment_for_one_signature
default_in_area_payment_for_each_additional_adult_signature = law_firm_rates.default_in_area_payment_for_each_additional_adult_signature
default_in_area_payment_for_children = law_firm_rates.default_in_area_payment_for_children
maximum_in_area_payment_for_any_number_of_signatures = law_firm_rates.maximum_in_area_payment_for_any_number_of_signatures
default_out_of_area_payment_for_one_signature = law_firm_rates.default_out_of_area_payment_for_one_signature
default_out_of_area_payment_for_each_additional_adult_signature = law_firm_rates.default_out_of_area_payment_for_each_additional_adult_signature
default_out_of_area_payment_for_children = law_firm_rates.default_out_of_area_payment_for_children
maximum_out_of_area_payment_for_any_number_of_signatures = law_firm_rates.maximum_out_of_area_payment_for_any_number_of_signatures
invoice_as_excel = Invoice()
invoice_as_excel.law_firm_name = law_firm.name
invoice_as_excel.law_firm_address = law_firm.address.simple_address()
invoice_as_excel.law_firm_email = law_firm.email_one
invoice_lines = []
invoice_as_excel.save()
print "Newly created Invoice ID: %d"%invoice_as_excel.id
for case in cases:
case.invoice_as_excel = invoice_as_excel
case.save()
print "Invoice for case no: %d"%case.id
#create new invoice line
invoice_line = InvoiceLine()
#FK assignments
invoice_line.invoice = invoice_as_excel
invoice_line.case = case
number_of_adult_signatures_required = 0
number_of_child_signatures_required = 0
number_of_adult_signatures_obtained = 0
number_of_child_signatures_obtained = 0
is_signature_obtained = False
did_investigator_travel = False
case_name = case.name
case_created_at = case.created_at
investigator_name = case.investigator.user.first_name + ' '+ case.investigator.user.last_name
client_name = case.client_name
client_address = case.client_address.simple_address()
dol = case.dol
case_closing_date = case.closing_date
is_dol_provided = case.is_dol_provided
locality = case.locality
additional_expenses_description = case.additional_expenses_description
date_of_signup = case.date_of_signup
adult_clients = case.adult_clients
child_clients = case.child_clients
basic_fee_law_firm = case.basic_fee_law_firm
no_of_free_miles_law_firm = case.no_of_free_miles_law_firm
mileage_rate_law_firm = case.mileage_rate_law_firm
cancelled_by = case.cancelled_by
print "cancelled_by:%s"%case.cancelled_by
cancelled_reason_description = case.cancelled_reason_description
additional_expenses = case.additional_expenses
no_of_miles_travelled = case.no_of_miles_travelled
#Need to calculate these
travel_expenses = 0
total_signature_fee_for_adults = 0
total_signature_fee_for_children = 0
total_signature_fee = 0
total_amount_billed_to_law_firm = 0
if case.is_signature_obtained:
number_of_adult_signatures_required = case.number_of_adult_signatures_required
number_of_child_signatures_required = case.number_of_child_signatures_required
number_of_adult_signatures_obtained = case.number_of_adult_signatures_obtained
number_of_child_signatures_obtained = case.number_of_child_signatures_obtained
is_signature_obtained = True
did_investigator_travel = True
number_of_billed_adults = 0
number_of_billed_children = 0
if no_of_miles_travelled > no_of_free_miles_law_firm and int(no_of_miles_travelled) != 0:
travel_expenses = ((no_of_miles_travelled - no_of_free_miles_law_firm) * mileage_rate_law_firm)
print "Travel expenses:( %f free miles - %f miles travelled) * $%f per mile = $%f"%(float(no_of_free_miles_law_firm),float(no_of_miles_travelled), float(mileage_rate_law_firm),float(travel_expenses))
if number_of_adult_signatures_obtained < 1 and number_of_child_signatures_obtained >0:
number_of_billed_adults = 1
number_of_billed_children = number_of_child_signatures_obtained - 1
else:
number_of_billed_adults = number_of_adult_signatures_obtained
number_of_billed_children = number_of_child_signatures_obtained
if locality.lower() == 'in area':
total_signature_fee_for_adults = default_in_area_payment_for_one_signature
if (number_of_billed_adults - 1) > 0:
total_signature_fee_for_adults += (number_of_billed_adults - 1) * default_in_area_payment_for_each_additional_adult_signature
total_signature_fee_for_children = number_of_billed_children * default_in_area_payment_for_children
total_signature_fee = total_signature_fee_for_adults + total_signature_fee_for_children
if total_signature_fee > maximum_in_area_payment_for_any_number_of_signatures:
total_signature_fee = maximum_in_area_payment_for_any_number_of_signatures
total_amount_billed_to_law_firm = total_signature_fee + travel_expenses + additional_expenses
else:
# case out of area
total_signature_fee_for_adults = default_out_of_area_payment_for_one_signature
if (number_of_billed_adults - 1) > 0:
total_signature_fee_for_adults += (number_of_billed_adults - 1) * default_out_of_area_payment_for_each_additional_adult_signature
total_signature_fee_for_children = number_of_billed_children * default_out_of_area_payment_for_children
total_signature_fee = total_signature_fee_for_adults + total_signature_fee_for_children
if total_signature_fee > maximum_out_of_area_payment_for_any_number_of_signatures:
total_signature_fee = maximum_out_of_area_payment_for_any_number_of_signatures
total_amount_billed_to_law_firm = total_signature_fee + travel_expenses + additional_expenses
elif case.did_investigator_travel:
number_of_adult_signatures_required = case.number_of_adult_signatures_required
number_of_child_signatures_required = case.number_of_child_signatures_required
number_of_adult_signatures_obtained = 0
number_of_child_signatures_obtained = 0
is_signature_obtained = False
did_investigator_travel = True
if no_of_miles_travelled > no_of_free_miles_law_firm and int(no_of_miles_travelled) != 0:
travel_expenses = ((no_of_miles_travelled - no_of_free_miles_law_firm) * mileage_rate_law_firm)
print "Travel expenses:( %f free miles - %f miles travelled) * $%f per mile = $%f"%(float(no_of_free_miles_law_firm),float(no_of_miles_travelled), float(mileage_rate_law_firm),float(travel_expenses))
else:
print "Travel expenses is $0"
total_amount_billed_to_law_firm = basic_fee_law_firm + travel_expenses + additional_expenses
pass
else:
travel_expense = 0
total_signature_fee_for_adults = 0
total_signature_fee_for_children = 0
total_signature_fee = 0
total_amount_billed_to_law_firm = 0
pass
# Just in case someone enters a negative value
if total_signature_fee_for_children < 0:
total_signature_fee_for_children = 0
if total_signature_fee_for_adults < 0:
total_signature_fee_for_adults = 0
if travel_expenses< 0:
travel_expenses = 0
if additional_expenses < 0:
additional_expenses = 0
if no_of_miles_travelled < 0:
no_of_miles_travelled = 0
invoice_line.number_of_adult_signatures_required = number_of_adult_signatures_required
invoice_line.number_of_child_signatures_required = number_of_child_signatures_required
invoice_line.number_of_adult_signatures_obtained = number_of_adult_signatures_obtained
invoice_line.number_of_child_signatures_obtained = number_of_child_signatures_obtained
# other static assignments
invoice_line.basic_fee_law_firm = basic_fee_law_firm
invoice_line.no_of_free_miles_law_firm = no_of_free_miles_law_firm
invoice_line.mileage_rate_law_firm = mileage_rate_law_firm
invoice_line.case_name = case_name
invoice_line.investigator_name = investigator_name
invoice_line.client_name = client_name
invoice_line.client_address = client_address
invoice_line.case_created_at = case_created_at
invoice_line.dol = dol
invoice_line.is_dol_provided = is_dol_provided
invoice_line.case_closing_date = case_closing_date
invoice_line.date_of_signup = date_of_signup
invoice_line.locality = locality
invoice_line.adult_clients = adult_clients
invoice_line.child_clients = child_clients
invoice_line.cancelled_by = cancelled_by
invoice_line.cancelled_reason_description = cancelled_reason_description
invoice_line.is_signature_obtained = is_signature_obtained
invoice_line.did_investigator_travel = did_investigator_travel
invoice_line.additional_expenses = additional_expenses
invoice_line.no_of_miles_travelled = no_of_miles_travelled
invoice_line.travel_expenses = travel_expenses
invoice_line.total_signature_fee_for_adults = total_signature_fee_for_adults
invoice_line.total_signature_fee_for_children = total_signature_fee_for_children
invoice_line.total_signature_fee = total_signature_fee
invoice_line.total_amount_billed_to_law_firm = total_amount_billed_to_law_firm
invoice_line.additional_expenses_description = additional_expenses_description
print "is_signature_obtained: %r" %is_signature_obtained
print "did_investigator_travel: %r"%did_investigator_travel
print "travel_expense: %f"%travel_expenses
print "additional_expenses: %f"%additional_expenses
print "total_signature_fee_for_adults: %f"%total_signature_fee_for_adults
print "total_signature_fee_for_children: %f"%total_signature_fee_for_children
print "total_signature_fee: %f"%total_signature_fee
print "total_amount_billed_to_law_firm: %f"%total_amount_billed_to_law_firm
print "Cancelled_by:%s"%cancelled_by
#Save the invoice_line
invoice_line.save()
#add the case total to the invoice total
entire_invoice_total += total_amount_billed_to_law_firm
invoice_lines.append(invoice_line)
invoice_as_excel.total_billed_amount = entire_invoice_total
invoice_as_excel.save()
output = print_invoice_as_excel(output,invoice_as_excel, law_firm)
return output
def print_invoice_as_excel(output,invoice, law_firm):
import xlsxwriter
import datetime
invoice_number = invoice.id
invoice_lines = InvoiceLine.objects.filter(invoice=invoice).order_by('case_created_at')
law_firm_rates = law_firm.rates
workbook = xlsxwriter.Workbook(output, {'remove_timezone': True})
worksheet = workbook.add_worksheet()
bold = workbook.add_format({'bold': True})
money = workbook.add_format({'num_format': '$#,##0'})
wrap = workbook.add_format()
wrap.set_text_wrap()
date_format = workbook.add_format({'num_format': 'm/d/yyyy'})
bold_align = workbook.add_format({'bold':True,'align':'justify'})
num = workbook.add_format()
num.set_num_format('0.00')
data = []
print "hello"
for invoice_line in invoice_lines:
case_final_status = ''
case_cancelled_by = ''
case_status_additional_info = 'N.A'
if invoice_line.is_signature_obtained:
if invoice_line.number_of_adult_signatures_required <= invoice_line.number_of_adult_signatures_obtained and invoice_line.number_of_child_signatures_required <= invoice_line.number_of_child_signatures_obtained:
case_final_status = 'Signature Obtained'
# print str(case_final_status)
else:
case_final_status = 'Signatures Partially obtained'
# print (case_final_status)
elif invoice_line.did_investigator_travel:
case_final_status = 'Signature Not Obtained'
else:
case_final_status = 'Client Cancelled'
case_cancelled_by = invoice_line.cancelled_by
if invoice_line.cancelled_reason_description:
case_status_additional_info = invoice_line.cancelled_reason_description
all_cases_in_range = []
for invoice_line in invoice_lines:
try:
case_instance = Case.objects.get(pk=invoice_line.case.pk)
all_cases_in_range.append(case_instance)
except :
context = dict()
context['error'] = 'An error occurred while generating invoice'
pass
worksheet.write('A1','BILL TO:', bold)
worksheet.write('B1',law_firm.name, bold)
worksheet.write('B2',law_firm.address.simple_address(),bold)
worksheet.write('B3',law_firm.phone_number_one,bold)
worksheet.write('A5','Date Of Signup',bold_align)
worksheet.write('B5','Date Of Loss',bold_align)
worksheet.write('C5','Case Name',bold_align)
worksheet.write('D5','Adult Clients',bold_align)
worksheet.write('E5','Child Clients',bold_align)
worksheet.write('F5','Address',bold_align)
worksheet.write('G5','No of Miles',bold_align)
worksheet.write('H5','Total',bold_align)
worksheet.write('I5','Investigator',bold_align)
worksheet.set_column('A:I', 12)
count = 0
for case in all_cases_in_range:
if case.date_of_signup is not None:
date_of_signup = case.date_of_signup
else:
date_of_signup = case.created_at
investigator_name = case.investigator.user.first_name + ' ' + case.investigator.user.last_name
total_payout = case.get_law_firm_price()
if case.locality == 'In Area':
data.append([date_of_signup, case.dol, case.name, case.adult_clients, case.child_clients,case.client_address.simple_address().decode('latin-1'), case.no_of_miles_travelled,total_payout,investigator_name])
count = count + 1
else:
data.append([date_of_signup, case.dol, case.name, case.adult_clients, case.child_clients,case.client_address.simple_address(), case.no_of_miles_travelled,total_payout,investigator_name])
count = count + 1
print "printing"
row = 5
col = 0
for dos, dol, name, adult_clients, child_clients, client_address, miles, total, investigator in (data):
worksheet.write_datetime(row, col, dos, date_format )
worksheet.write_datetime(row, col + 1, dol, date_format )
worksheet.write_string(row, col + 2, name, wrap)
worksheet.write_string(row, col + 3, adult_clients, wrap)
worksheet.write_string(row, col + 4, child_clients, wrap)
worksheet.write_string(row, col + 5, client_address, wrap)
worksheet.write_number(row, col + 6, miles, num)
worksheet.write_number(row, col + 7, total, money)
worksheet.write_string(row, col + 8, investigator, wrap)
row += 1
workbook.close()
return output
pass
def generate_bulk_invoice_as_excel(output,law_firm,all_cases_in_range):
import csv
workbook = xlsxwriter.Workbook(output, {'remove_timezone': True})
worksheet = workbook.add_worksheet()
bold = workbook.add_format({'bold': True})
money = workbook.add_format({'num_format': '$#,##0'})
wrap = workbook.add_format()
wrap.set_text_wrap()
date_format = workbook.add_format({'num_format': 'm/d/yyyy'})
bold_align = workbook.add_format({'bold':True,'align':'justify'})
num = workbook.add_format()
num.set_num_format('0.00')
data = []
worksheet.write('A1','BILL TO:', bold)
worksheet.write('B1',law_firm.name, bold)
worksheet.write('B2',law_firm.address.simple_address(),bold)
worksheet.write('B3',law_firm.phone_number_one,bold)
worksheet.write('A5','Date Of Signup',bold_align)
worksheet.write('B5','Date Of Loss',bold_align)
worksheet.write('C5','Case Name',bold_align)
worksheet.write('D5','Adult Clients',bold_align)
worksheet.write('E5','Child Clients',bold_align)
worksheet.write('F5','Address',bold_align)
worksheet.write('G5','No of Miles',bold_align)
worksheet.write('H5','Total',bold_align)
worksheet.write('I5','Investigator',bold_align)
worksheet.set_column('A:I', 12)
row = 5
col = 0
total_bulk = 0
for case in all_cases_in_range:
if case.date_of_signup is not None:
date_of_signup = case.date_of_signup
else:
date_of_signup = case.created_at
investigator_name = case.investigator.user.first_name + ' ' + case.investigator.user.last_name
total_payout = case.get_law_firm_price()
total_bulk = total_bulk + total_payout
# print case.name
# print case.client_address.simple_address().decode('latin-1')
if case.locality == 'In Area':
data.append([date_of_signup, case.dol, case.name, case.adult_clients, case.child_clients,case.client_address.simple_address().decode('latin-1'), case.no_of_miles_travelled,total_payout,investigator_name])
else:
data.append([date_of_signup, case.dol, case.name, case.adult_clients, case.child_clients,case.client_address.simple_address().decode('latin-1'), case.no_of_miles_travelled,total_payout,investigator_name])
print "printing"
for dos, dol, name, adult_clients, child_clients, client_address, miles, total, investigator in (data):
worksheet.write_datetime(row, col, dos, date_format )
worksheet.write_datetime(row, col + 1, dol, date_format )
worksheet.write_string(row, col + 2, name, wrap)
worksheet.write_string(row, col + 3, adult_clients, wrap)
worksheet.write_string(row, col + 4, child_clients, wrap)
worksheet.write_string(row, col + 5, client_address, wrap)
worksheet.write_number(row, col + 6, miles, num)
worksheet.write_number(row, col + 7, total, money)
worksheet.write_string(row, col + 8, investigator, wrap)
row += 1
worksheet.write(row + 2, col+6,'Total',bold)
worksheet.write(row+2,col+7,total_bulk,money)
workbook.close()
return output
def download_doc(request,file_id):
document = Document.objects.get(id = file_id)
mime = magic.Magic(mime=True)
output = "output"
urllib.urlretrieve(document.file.url, output)
mimes = mime.from_file(output)
ext = mimetypes.guess_all_extensions(mimes)[0]
# os.rename(output, output+ext) # Rename file
# pdf = d.file.file.read()
# law_firm_email = 'ankit.singh@42hertz.com'
# email_body = 'Find attached the invoice'
# message = EmailMessage('Invoice', email_body, 'invoice@rapidsignnow.com', [law_firm_email])
# message.attach('Attachment'+ext, pdf, mimes)
# message.send()
response = HttpResponse(document.file, content_type=mimes)
response['Content-Disposition'] = 'attachment; filename=%s' % document.file.name
return response
def delete_attached_document(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
if not request.POST or 'attached_document_id' not in request.POST:
return HttpResponseRedirect('/')
attached_document_id = request.POST['attached_document_id']
attached_document_instance = AttachedDocument.objects.get(pk=attached_document_id)
attached_document_instance.delete()
return HttpResponse('')
def dashboard(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/')
try:
system_admin = SystemAdmin.objects.get(user=request.user)
except:
return HttpResponseRedirect('/')
context = dict()
# in_progress =
total_cases = Case.objects.filter(~Q(status="Duplicate delete"))
total_cases_count = total_cases.count()
# client_cancelled = Case.objects.filter(status = "Closed").filter(Q(is_signature_obtained = False) & Q(did_investigator_travel = False))
client_cancelled = total_cases.filter(Q(status = "Closed") | Q(status = "Client cancelled") ).filter(Q(is_signature_obtained = False) & Q(did_investigator_travel = False))
client_cancelled_count = client_cancelled.count()
client_cancelled_percent = float(client_cancelled_count * 100 ) / total_cases_count
signature_obtained = Case.objects.filter(is_signature_obtained = True)
signature_obtained_count = signature_obtained.count()
signature_obtained_percent = float(signature_obtained_count * 100 ) /total_cases_count
signature_not_obtained = Case.objects.filter(Q(is_signature_obtained = False) & Q(did_investigator_travel = True))
signature_not_obtained_count = signature_not_obtained.count()
signature_not_obtained_percent = float(signature_not_obtained_count * 100) / total_cases_count
others = round(100 - client_cancelled_percent - signature_not_obtained_percent - signature_obtained_percent,2)
context['others'] = others
print round(100 - client_cancelled_percent - signature_not_obtained_percent - signature_obtained_percent)
context['client_cancelled'] = round(client_cancelled_percent,2)
print client_cancelled_percent
context['total_cases_count'] = total_cases_count
context['signature_obtained'] = round(signature_obtained_percent,2)
context['signature_not_obtained'] = round(signature_not_obtained_percent,2)
case_status_closed = total_cases.filter(status = "Closed")
case_status_client_cancelled = total_cases.filter(status = "Client cancelled")
total_cases_for_case_status_donut = total_cases.count() - case_status_closed.count() - case_status_client_cancelled.count()
case_status_inactive = total_cases.filter(status = "Inactive")
case_status_in_progress = total_cases.filter(status = "In progress")
case_status_called_and_texted = total_cases.filter(status = "Called and texted")
case_status_client_contacted = total_cases.filter(status = "Client contacted")
case_status_client_meeting_set = total_cases.filter(status = "Client meeting set")
case_status_client_rescheduled = total_cases.filter(status = "Client rescheduled")
case_signature_obtained = total_cases.filter(status = "Signature obtained")
case_status_signature_not_obtained = total_cases.filter(status = "Signature not obtained")
context['total_cases_for_case_status_donut'] = total_cases_for_case_status_donut
context['case_status_inactive'] = round( (float(case_status_inactive.count() * 100) / total_cases_for_case_status_donut) ,2)
context['case_status_in_progress'] = round( (float(case_status_in_progress.count() * 100) / total_cases_for_case_status_donut) ,2)
context['case_status_called_and_texted'] = round( (float(case_status_called_and_texted.count() * 100) / total_cases_for_case_status_donut) ,2)
context['case_status_client_contacted'] = round( (float(case_status_client_contacted.count() * 100) / total_cases_for_case_status_donut) ,2)
context['case_status_client_meeting_set'] = round( (float(case_status_client_meeting_set.count() * 100) / total_cases_for_case_status_donut) ,2)
context['case_status_client_rescheduled'] = round( (float(case_status_client_rescheduled.count() * 100) / total_cases_for_case_status_donut) ,2)
# context['case_status_client_cancelled'] = round( (float(case_status_client_cancelled.count() * 100) / total_cases_for_case_status_donut) ,2)
context['case_signature_obtained'] = round( (float(case_signature_obtained.count() * 100) / total_cases_for_case_status_donut) ,2)
context['case_status_signature_not_obtained'] = round( (float(case_status_signature_not_obtained.count() * 100) / total_cases_for_case_status_donut) ,2)
# context['case_status_closed'] = round( (case_status_closed.count() * 100 / total_cases_count) ,2)
payment_paid_and_closed = total_cases.filter(is_investigator_paid = True)
payment_invoice_generated_but_not_paid = total_cases.filter(Q(is_investigator_paid = False) & ~Q(invoice = None))
payment_invoice_sent_but_not_paid = total_cases.filter(Q(is_investigator_paid = False) & Q(invoice = None) & (Q(is_invoice_mailed = True)|Q(is_invoice_as_csv_mailed = True)))
payment_closed_but_no_invoice = total_cases.filter(Q(is_investigator_paid = False) & Q(invoice = None) & Q(is_invoice_mailed = False) & Q(is_invoice_as_csv_mailed = False) & Q(status = "Closed") )
payment_in_progress = total_cases.filter(Q(is_investigator_paid = False) & Q(invoice = None) & Q(is_invoice_mailed = False) & Q(is_invoice_as_csv_mailed = False) & ~Q(status = "Closed") )
context["payment_paid_and_closed"] = round( (float(payment_paid_and_closed.count() * 100) / total_cases.count()) ,2)
context["payment_invoice_generated_but_not_paid"] = round( (float(payment_invoice_generated_but_not_paid.count() * 100) / total_cases.count()) ,2)
context["payment_invoice_sent_but_not_paid"] = round( (float(payment_invoice_sent_but_not_paid.count() * 100) / total_cases.count()) ,2)
context["payment_closed_but_no_invoice"] = round( (float(payment_closed_but_no_invoice.count() * 100) / total_cases.count()) ,2)
context["payment_in_progress"] = round( (float(payment_in_progress.count() * 100) / total_cases.count()) ,2)
#for invetigator table
investigators = Investigator.objects.all()
# sorted_results = sorted(investigators, key= lambda t: t.cases_in_month(), reverse=True)
# investigators = sorted_results[0:10]
investigators_for_top_table = []
cases_assigned_to_investigator_for_top_table = []
cases_with_signature_percent_for_top_table = []
for investigator in investigators:
cases_assigned_to_investigator = Case.objects.filter(investigator=investigator).filter(~Q(status= "Duplicate delete")).filter(created_at__gt=(timezone.now() - timedelta(days=30)))
cases_with_signature_obtained = cases_assigned_to_investigator.filter(is_signature_obtained = True)
cases_assigned_to_investigator_for_top_table.append(len(cases_assigned_to_investigator))
if len(cases_assigned_to_investigator) != 0 and len(cases_with_signature_obtained) != 0:
percent_signature_obtained = round(float(len(cases_with_signature_obtained)*100 / len(cases_assigned_to_investigator)),2)
cases_with_signature_percent_for_top_table.append(percent_signature_obtained)
investigators_for_top_table.append(investigator)
else:
percent_signature_obtained = 0
cases_with_signature_percent_for_top_table.append(percent_signature_obtained)
investigators_for_top_table.append(investigator)
investigator_data = zip(investigators_for_top_table,cases_assigned_to_investigator_for_top_table,cases_with_signature_percent_for_top_table)
investigator_data.sort(key = lambda t: t[1],reverse=True)
context["investigators_data"] = investigator_data[0:10]
#cases added and closed graph
cases_added_list = []
cases_closed_list = []
monthly_revenue_list = []
monthly_proposed_revenue_list = []
daily_revenue_list = []
daily_proposed_revenue_list = []
broker_revenue_list = []
cases_added = []
cases_closed = []
dates = []
date = timezone.localtime(timezone.now()).date() - timedelta(days=31)
for day in range(0, 32):
current_date = date + timedelta(days=day)
next_date = date + timedelta(days=day + 1)
cases_on_current_date = total_cases.filter(created_at__gte = current_date).filter(created_at__lt = next_date)
cases_added.append(cases_on_current_date.count())
dates.append(current_date)
date = timezone.localtime(timezone.now()).date() - timedelta(days=31)
for day in range(0, 32):
current_date = date + timedelta(days=day)
next_date = date + timedelta(days=day + 1)
cases_closed_on_current_date = total_cases.filter(closing_date__gte = current_date).filter(closing_date__lt = next_date)
cases_closed.append(cases_closed_on_current_date.count())
# context['cases_added_closed'] = zip(dates, cases_added, cases_closed)
cases_added_list = zip(dates, cases_added)
cases_closed_list = zip(dates, cases_closed)
brokers = Broker.objects.all()
context['brokers'] = brokers
#revenue for RSN graph
profit_per_month = []
proposed_profit_per_month = []
profit_per_day = []
proposed_profit_per_day = []
profit_per_day_for_broker = []
dates = []
months = []
weeks = []
broker_ids = []
data_for_broker = []
date = timezone.localtime(timezone.now()).date() - timedelta(days=31)
# daily revenue for RSN graph
for day in range(0, 32):
profit = 0
profit_including_client_cancelled = 0
current_date = date + timedelta(days=day)
next_date = date + timedelta(days=day + 1)
cases_on_current_date = total_cases.filter(created_at__gte = current_date).filter(created_at__lt = next_date).filter(status = "Closed")
for case in cases_on_current_date:
profit = profit + case.amount_billed_to_law_firm - case.amount_paid_to_investigator
cases_on_current_date_including_client_cancelled = total_cases.filter(created_at__gte = current_date).filter(created_at__lt = next_date).filter(status = "Client cancelled")
for case in cases_on_current_date_including_client_cancelled:
profit_including_client_cancelled = profit_including_client_cancelled + case.get_proposed_law_firm_price() - case.get_proposed_investigator_price()
profit_per_day.append(profit)
proposed_profit_per_day.append(profit_including_client_cancelled+profit)
dates.append(current_date)
daily_revenue_list = zip(dates, profit_per_day)
daily_proposed_revenue_list = zip(dates,proposed_profit_per_day)
# monthly revenue for RSN graph
date = timezone.localtime(timezone.now()).date()
date = date.replace(day=1)
month_days = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
no_of_months = range(0,12)
month_names = ['January','February','March','April','May','June','July','August','September','October','November','December']
for index in range(1,7):
profit = 0
profit_including_client_cancelled = 0
first_day_of_month = date
month = date.month
days = month_days[no_of_months[month-1]]
last_day_of_month = date.replace(day=days)
cases_on_current_date = total_cases.filter(created_at__gte = first_day_of_month).filter(created_at__lt = last_day_of_month).filter(status = "Closed")
for case in cases_on_current_date:
profit = profit + case.amount_billed_to_law_firm - case.amount_paid_to_investigator
cases_on_current_date_including_client_cancelled = total_cases.filter(created_at__gt = first_day_of_month).filter(created_at__lt = last_day_of_month).filter(status = "Client cancelled")
for case in cases_on_current_date_including_client_cancelled:
profit_including_client_cancelled = profit_including_client_cancelled + case.get_proposed_law_firm_price() - case.get_proposed_investigator_price()
proposed_profit_per_month.append(profit_including_client_cancelled+profit)
profit_per_month.append(profit)
months.append(first_day_of_month)
date = date - timedelta(days=month_days[no_of_months[month-2]])
# context['total_revenue_graph_monthly'] = zip(months, profit_per_month)
# context['total_proposed_revenue_graph_monthly'] = zip(months, proposed_profit_per_month)
monthly_revenue_list = zip(months, profit_per_month)
monthly_proposed_revenue_list = zip(months, proposed_profit_per_month)
#broker revenue graph
for broker in brokers:
zippped_data = []
dates = []
profit_per_day_for_broker = []
date = timezone.localtime(timezone.now()).date() - timedelta(days=31)
for day in range(0, 32):
profit = 0
current_date = date + timedelta(days=day)
next_date = date + timedelta(days=day + 1)
cases_on_current_date = total_cases.filter(created_by = broker).filter(created_at__gte = current_date).filter(created_at__lt = next_date).filter(status = "Closed")
for case in cases_on_current_date:
profit = profit + case.amount_billed_to_law_firm - case.amount_paid_to_investigator
profit_per_day_for_broker.append(profit)
dates.append(current_date)
zipped_data = zip(dates,profit_per_day_for_broker)
data_for_broker.append(zipped_data)
broker_revenue_list = data_for_broker
context["broker"] = brokers
context["brokers_count"] = brokers.count()
#USA map graph
states_array = []
cases_in_state = []
for short_form, state_name in states.iteritems():
cases = total_cases.filter(Q(client_address__state = state_name)|Q(client_address__state = short_form))
states_array.append(state_name)
cases_in_state.append(cases.count)
zipped_data_for_map_graph = zip(states_array,cases_in_state)
context["states"] = states_array
context["map_data"] = zipped_data_for_map_graph
#broker table data
broker_for_percentage_of_cases_closed = []
percentage_of_cases_closed_for_broker = []
broker_no_of_cases_added = []
broker_no_of_cases_closed = []
#<me
broker_avg_closing_time_seconds = []
#me>
for broker in brokers:
no_of_cases_added = 0
no_of_cases_closed = 0
date = timezone.localtime(timezone.now()).date() - timedelta(days=31)
cases_on_current_date = total_cases.filter(created_by = broker).filter(created_at__gte = date)
cases_on_current_date_closed = cases_on_current_date.filter(status = "Closed")
no_of_cases_added = cases_on_current_date.count()
no_of_cases_closed = cases_on_current_date_closed.count()
#<me
closed_cases_created_date = cases_on_current_date_closed.values_list('created_at', flat=True)
closed_cases_closing_date = cases_on_current_date_closed.values_list('closing_date', flat=True)
avg_closing_time=0
for i in range(0,no_of_cases_closed):
avg_closing_time += (closed_cases_closing_date[i]-closed_cases_created_date[i]).seconds
if avg_closing_time!=0:
avg_closing_time= avg_closing_time*1.0/no_of_cases_closed
broker_avg_closing_time_seconds.append(avg_closing_time)
#me>
if no_of_cases_added != 0 and no_of_cases_closed != 0:
closing_percent = round(float(no_of_cases_closed * 100) / no_of_cases_added , 2)
percentage_of_cases_closed_for_broker.append(closing_percent)
broker_for_percentage_of_cases_closed.append(broker)
broker_no_of_cases_added.append(no_of_cases_added)
broker_no_of_cases_closed.append(no_of_cases_closed)
else:
closing_percent = 0
percentage_of_cases_closed_for_broker.append(closing_percent)
broker_for_percentage_of_cases_closed.append(broker)
broker_no_of_cases_added.append(no_of_cases_added)
broker_no_of_cases_closed.append(no_of_cases_closed)
broker_closing_percent_data = zip(broker_for_percentage_of_cases_closed,broker_no_of_cases_added,broker_no_of_cases_closed,percentage_of_cases_closed_for_broker,broker_avg_closing_time_seconds)
#<me
mean_cases_added = sum(broker_no_of_cases_added)*1.0/len(broker_no_of_cases_added)
max_cases_added = max(broker_no_of_cases_added)*1.0
max_closing_time = max(broker_avg_closing_time_seconds)
def normalized_rate(broker_row):
closing_time = broker_row[4]
percent_cases_closed = broker_row[3]
no_cases_closed = broker_row[2]
if percent_cases_closed==0:
return -9999
else:
no_case_weight = numpy.log(no_cases_closed*max_cases_added/mean_cases_added)
closing_time_weight = numpy.log(max_closing_time/closing_time)+1
return no_case_weight*closing_time_weight*percent_cases_closed
broker_closing_percent_data.sort(key = (lambda t: normalized_rate(t)),reverse=True)
#me>
context["broker_closing_percent_data"] = broker_closing_percent_data
if request.POST:
if 'from' in request.POST and request.POST['from'] != '':
from_date = request.POST['from']
from_components = from_date.split('/')
from_date = datetime.datetime(int(from_components[2]), int(from_components[0]), int(from_components[1]))
context['from'] = request.POST['from']
if 'to' in request.POST and request.POST['to'] != '':
to_date = request.POST['to']
to_components = to_date.split('/')
to_date = datetime.datetime(int(to_components[2]), int(to_components[0]), int(to_components[1]))
context['to'] = request.POST['to']
if 'context' in request.POST and request.POST['context'] == 'cases-added-closed':
print "inside"
dates = []
cases_added = []
cases_closed = []
date = from_date
date_range = to_date - from_date
date_range = date_range.days
for day in range(0,date_range+1):
current_date = date + timedelta(days=day)
next_date = date + timedelta(days=day + 1)
cases_on_current_date = total_cases.filter(created_at__gte = current_date).filter(created_at__lt = next_date)
cases_added.append(cases_on_current_date.count())
dates.append(current_date.date())
cases_added_list = zip(dates,cases_added)
for day in range(0,date_range+1):
current_date = date + timedelta(days=day)
next_date = date + timedelta(days=day + 1)
cases_closed_on_current_date = total_cases.filter(closing_date__gte = current_date).filter(closing_date__lt = next_date)
cases_closed.append(cases_closed_on_current_date.count())
cases_closed_list = zip(dates,cases_closed)
context["range_for"] = "cases-added-closed"
elif 'context' in request.POST and request.POST['context'] == 'rsn-revenue':
profit_per_month = []
proposed_profit_per_month = []
profit_per_day = []
proposed_profit_per_day = []
profit_per_day_for_broker = []
dates = []
months = []
weeks = []
date = from_date
date_range = to_date - from_date
date_range = date_range.days
for day in range(0, date_range+1):
profit = 0
profit_including_client_cancelled = 0
current_date = date + timedelta(days=day)
next_date = date + timedelta(days=day + 1)
cases_on_current_date = total_cases.filter(created_at__gte = current_date).filter(created_at__lt = next_date).filter(status = "Closed")
for case in cases_on_current_date:
profit = profit + case.amount_billed_to_law_firm - case.amount_paid_to_investigator
cases_on_current_date_including_client_cancelled = total_cases.filter(created_at__gte = current_date).filter(created_at__lt = next_date).filter(status = "Client cancelled")
for case in cases_on_current_date_including_client_cancelled:
profit_including_client_cancelled = profit_including_client_cancelled + case.get_proposed_law_firm_price() - case.get_proposed_investigator_price()
profit_per_day.append(profit)
proposed_profit_per_day.append(profit_including_client_cancelled+profit)
dates.append(current_date.date())
# context['total_revenue_graph_daily'] = zip(dates, profit_per_day)
# context['total_proposed_revenue_graph_daily'] = zip(dates,proposed_profit_per_day)
daily_revenue_list = zip(dates, profit_per_day)
daily_proposed_revenue_list = zip(dates,proposed_profit_per_day)
# monthly revenue for RSN graph
date = to_date
# month_range = relativedelta.relativedelta(to_date, from_date)
month_range = (to_date.year - from_date.year)*12 + (to_date.month - from_date.month)
month_range = month_range + 1
# month_range = month_range.months
date = date.replace(day=1)
month_days = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
no_of_months = range(0,12)
month_names = ['January','February','March','April','May','June','July','August','September','October','November','December']
for index in range(1,month_range+1):
profit = 0
profit_including_client_cancelled = 0
first_day_of_month = date
month = date.month
days = month_days[no_of_months[month-1]]
last_day_of_month = date.replace(day=days)
cases_on_current_date = total_cases.filter(created_at__gte = first_day_of_month).filter(created_at__lt = last_day_of_month).filter(status = "Closed")
for case in cases_on_current_date:
profit = profit + case.amount_billed_to_law_firm - case.amount_paid_to_investigator
cases_on_current_date_including_client_cancelled = total_cases.filter(created_at__gt = first_day_of_month).filter(created_at__lt = last_day_of_month).filter(status = "Client cancelled")
for case in cases_on_current_date_including_client_cancelled:
profit_including_client_cancelled = profit_including_client_cancelled + case.get_proposed_law_firm_price() - case.get_proposed_investigator_price()
proposed_profit_per_month.append(profit_including_client_cancelled+profit)
profit_per_month.append(profit)
months.append(first_day_of_month.date())
date = date - timedelta(days=month_days[no_of_months[month-2]])
# context['total_revenue_graph_monthly'] = zip(months, profit_per_month)
# context['total_proposed_revenue_graph_monthly'] = zip(months, proposed_profit_per_month)
monthly_revenue_list = zip(months, profit_per_month)
monthly_proposed_revenue_list = zip(months, proposed_profit_per_month)
context["range_for"] = "rsn-revenue"
elif 'context' in request.POST and request.POST['context'] == 'broker-revenue':
broker_ids = []
data_for_broker = []
brokers = Broker.objects.all()
for broker in brokers:
zippped_data = []
dates = []
profit_per_day_for_broker = []
date = from_date
date_range = to_date - from_date
date_range = date_range.days
for day in range(0, date_range + 1):
profit = 0
current_date = date + timedelta(days=day)
next_date = date + timedelta(days=day + 1)
cases_on_current_date = total_cases.filter(created_by = broker).filter(created_at__gte = current_date).filter(created_at__lt = next_date).filter(status = "Closed")
for case in cases_on_current_date:
profit = profit + case.amount_billed_to_law_firm - case.amount_paid_to_investigator
profit_per_day_for_broker.append(profit)
dates.append(current_date.date())
zipped_data = zip(dates,profit_per_day_for_broker)
data_for_broker.append(zipped_data)
broker_revenue_list = data_for_broker
context["range_for"] = "broker-revenue"
context["broker_revenue"] = broker_revenue_list
context['total_revenue_graph_monthly'] = monthly_revenue_list
context['total_proposed_revenue_graph_monthly'] = monthly_proposed_revenue_list
context['cases_added'] = cases_added_list
context['cases_closed'] = cases_closed_list
context['total_revenue_graph_daily'] = daily_revenue_list
context['total_proposed_revenue_graph_daily'] = daily_proposed_revenue_list
return render(request, 'system_admin/dashboard.html',context) |
def end_list(a):
return [a[0], a[-1]]
list = [i for i in range(1, 11)]
print(end_list(list))
|
"""
๋ฌธ์ : X๋ณด๋ค ์์ ์
์ ์ N๊ฐ๋ก ์ด๋ฃจ์ด์ง ์์ด A์ ์ ์ X๊ฐ ์ฃผ์ด์ง๋ค.
์ด๋, A์์ X๋ณด๋ค ์์ ์๋ฅผ ๋ชจ๋ ์ถ๋ ฅํ๋ ํ๋ก๊ทธ๋จ์ ์์ฑํ์์ค.
์
๋ ฅ: ์ฒซ์งธ ์ค์ N๊ณผ X๊ฐ ์ฃผ์ด์ง๋ค. (1 โค N, X โค 10,000)
๋์งธ ์ค์ ์์ด A๋ฅผ ์ด๋ฃจ๋ ์ ์ N๊ฐ๊ฐ ์ฃผ์ด์ง๋ค.
์ฃผ์ด์ง๋ ์ ์๋ ๋ชจ๋ 1๋ณด๋ค ํฌ๊ฑฐ๋ ๊ฐ๊ณ , 10,000๋ณด๋ค ์๊ฑฐ๋ ๊ฐ์ ์ ์์ด๋ค.
์ถ๋ ฅ: X๋ณด๋ค ์์ ์๋ฅผ ์
๋ ฅ๋ฐ์ ์์๋๋ก ๊ณต๋ฐฑ์ผ๋ก ๊ตฌ๋ถํด ์ถ๋ ฅํ๋ค.
X๋ณด๋ค ์์ ์๋ ์ ์ด๋ ํ๋ ์กด์ฌํ๋ค.
์
๋ ฅ ์์ :
10 5
1 10 4 9 2 3 8 5 7 6
์ธจ๋ต ์์ :
1 4 2 3
meno
1. N๊ฐ์ ์ ์๋ ๋ฐ๋ณต๋ฌธ์ด ๋์๊ฐ๋ ํ์๋ฅผ ๊ฒฐ์ ํ๋ค.
2. X๋ ์
๋ ฅ๋ฐ์ ์ ์์ ๋น๊ต ๋๋ค.
3. N์ ๋ฐ๋ณต๋ฌธ์ผ๋ก A๋ฅผ ์
๋ ฅ ๋ฐ๋๋ค. (๋จ, N๋ณด๋ค ์๊ฑฐ๋ ๊ฐ์์ผํ๋ค.)
question
1. ์์๋๋ก ๊ณต๋ฐฑ์ผ๋ก ๊ตฌ๋ถ์ ์ด๋ป๊ฒ ํ๋ฉด ์ข์๊น?
solving now
๋ณ์: N(์ ์๊ฐ์), X(์ต๋๊ฐ), A(์์ด)
"""
N=int(input("N"))
X=int(input("X"))
A = list(map(int, input().split()))
i=0
# for i in range(0, N-1):
while 1:
# print("A[i]=%s"%A[i])
print("i=%s"%i)
if A[i] >= X:
del A[i]
N-=1
print("A=%s"%A)
elif len(A) == X:
break
print("len(A)=%s"%len(A))
print("N=%s" % N)
i+=1
print(A)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='UserInfo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('f_name', models.CharField(max_length=100, verbose_name=b'First Name')),
('l_name', models.CharField(max_length=200, verbose_name=b'Last Name')),
('email', models.EmailField(max_length=75, verbose_name=b'Email')),
('create_date', models.DateTimeField(verbose_name=b'Date Created')),
],
options={
},
bases=(models.Model,),
),
]
|
from django import forms
from .models import Team
class CreateTeamForm(forms.ModelForm):
class Meta:
model = Team
fields = ['title',]
class UserSearchForm(forms.Form):
username = forms.CharField(
label='',
widget=forms.TextInput(attrs={
'type': 'text',
'class': 'form-control',
'placeholder': 'Search users',
'aria-label': 'Search users',
'aria-describedby': 'button-addon2'
})) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.